From ffaa9cd67d06d3d93a0961bb7a0b33d0eea464ad Mon Sep 17 00:00:00 2001 From: Jamie Pine <32987599+jamiepine@users.noreply.github.com> Date: Sun, 13 Mar 2022 03:50:40 -0700 Subject: [PATCH] wow, SEXY CORE - corrected naming structure of Core/Client related enums - Added CoreResource - Created planned ClientCommands - moved migrations out of mod.rs - created useBridgeQuery --- apps/desktop/src-tauri/src/commands.rs | 24 ++- apps/desktop/src-tauri/src/main.rs | 24 ++- apps/desktop/src/components/file/Sidebar.tsx | 4 +- packages/core/src/db/migrate.rs | 131 ++++++++++++++ packages/core/src/db/mod.rs | 137 +-------------- packages/core/src/lib.rs | 173 ++++++++++++------- packages/core/src/library/loader.rs | 6 +- packages/state/lib/bridge.ts | 9 + packages/state/lib/files/query.ts | 10 +- 9 files changed, 295 insertions(+), 223 deletions(-) create mode 100644 packages/core/src/db/migrate.rs diff --git a/apps/desktop/src-tauri/src/commands.rs b/apps/desktop/src-tauri/src/commands.rs index f443aba3c..18a2cc0e2 100644 --- a/apps/desktop/src-tauri/src/commands.rs +++ b/apps/desktop/src-tauri/src/commands.rs @@ -4,16 +4,24 @@ use sdcorelib::{ state::{client, client::ClientState}, sys, sys::{volumes, volumes::Volume}, - ClientQuery, ClientResponse, Core, + ClientCommand, ClientQuery, Core, CoreResponse, }; -#[tauri::command(async)] -pub async fn client_query_transport(data: ClientQuery) -> Result { - match Core::query(data).await { - Ok(response) => Ok(response), - Err(err) => Err(err.to_string()), - } -} +// #[tauri::command(async)] +// pub async fn client_query_transport(data: ClientQuery) -> Result { +// match Core::query(data).await { +// Ok(response) => Ok(response), +// Err(err) => Err(err.to_string()), +// } +// } + +// #[tauri::command(async)] +// pub async fn client_command_transport(data: ClientCommand) -> Result { +// match Core::command(data).await { +// Ok(response) => Ok(response), +// Err(err) => Err(err.to_string()), +// } +// } #[tauri::command(async)] pub async fn scan_dir(path: String) -> Result<(), String> { diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs index decc0ead4..5a0884347 100644 --- a/apps/desktop/src-tauri/src/main.rs +++ b/apps/desktop/src-tauri/src/main.rs @@ -1,4 +1,5 @@ -use sdcorelib::Core; +use once_cell::sync::OnceCell; +use sdcorelib::{ClientCommand, ClientQuery, Core, CoreResponse}; use tauri::api::path; use tauri::Manager; // use tauri_plugin_shadows::Shadows; @@ -6,6 +7,24 @@ use tauri::Manager; mod commands; mod menu; +pub static CORE: OnceCell = OnceCell::new(); + +#[tauri::command(async)] +async fn client_query_transport(data: ClientQuery) -> Result { + match CORE.get().unwrap().query(data).await { + Ok(response) => Ok(response), + Err(err) => Err(err.to_string()), + } +} + +#[tauri::command(async)] +async fn client_command_transport(data: ClientCommand) -> Result { + match CORE.get().unwrap().command(data).await { + Ok(response) => Ok(response), + Err(err) => Err(err.to_string()), + } +} + #[tokio::main] async fn main() { let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./")); @@ -26,7 +45,8 @@ async fn main() { }) .on_menu_event(|event| menu::handle_menu_event(event)) .invoke_handler(tauri::generate_handler![ - commands::client_query_transport, + client_query_transport, + client_command_transport, commands::scan_dir, commands::create_location, commands::get_files, diff --git a/apps/desktop/src/components/file/Sidebar.tsx b/apps/desktop/src/components/file/Sidebar.tsx index 5f9bbad4d..4eb9c9ee6 100644 --- a/apps/desktop/src/components/file/Sidebar.tsx +++ b/apps/desktop/src/components/file/Sidebar.tsx @@ -10,7 +10,7 @@ import { TrafficLights } from '../os/TrafficLights'; import { Button } from '../primitive'; import { Dropdown } from '../primitive/Dropdown'; import { DefaultProps } from '../primitive/types'; -import { useVolumes } from '@sd/state'; +import { useBridgeQuery } from '@sd/state'; interface SidebarProps extends DefaultProps {} export const SidebarLink = (props: NavLinkProps) => ( @@ -51,7 +51,7 @@ export function MacOSTrafficLights() { } export const Sidebar: React.FC = (props) => { - const { data: volumes } = useVolumes(); + const { data: volumes } = useBridgeQuery('SysGetVolumes'); const tags = [ { id: 1, name: 'Keepsafe', color: '#FF6788' }, diff --git a/packages/core/src/db/migrate.rs b/packages/core/src/db/migrate.rs new file mode 100644 index 000000000..42c70ae29 --- /dev/null +++ b/packages/core/src/db/migrate.rs @@ -0,0 +1,131 @@ +use crate::file::checksum::sha256_digest; +use crate::{prisma, prisma::Migration}; +use anyhow::Result; +use data_encoding::HEXLOWER; +use include_dir::{include_dir, Dir}; +use std::ffi::OsStr; +use std::io::BufReader; + +const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql"); +static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations"); + +pub async fn run_migrations(db_url: &str) -> Result<()> { + let client = prisma::new_client_with_url(&format!("file:{}", &db_url)).await; + + match client + ._query_raw::( + "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'", + ) + .await + { + Ok(data) => { + if data.len() == 0 { + println!("Migration table does not exist"); + // execute migration + match client._execute_raw(INIT_MIGRATION).await { + Ok(_) => {} + Err(e) => { + println!("Failed to create migration table: {}", e); + } + }; + + let value: Vec = client + ._query_raw( + "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'", + ) + .await + .unwrap(); + + println!("Migration table created: {:?}", value); + } else { + println!("Migration table exists: {:?}", data); + } + + let mut migration_subdirs = MIGRATIONS_DIR + .dirs() + .filter(|subdir| { + subdir + .path() + .file_name() + .map(|name| name != OsStr::new("migration_table")) + .unwrap_or(false) + }) + .collect::>(); + + migration_subdirs.sort_by(|a, b| { + let a_name = a.path().file_name().unwrap().to_str().unwrap(); + let b_name = b.path().file_name().unwrap().to_str().unwrap(); + + let a_time = a_name[..14].parse::().unwrap(); + let b_time = b_name[..14].parse::().unwrap(); + + a_time.cmp(&b_time) + }); + + for subdir in migration_subdirs { + println!("{:?}", subdir.path()); + let migration_file = subdir + .get_file(subdir.path().join("./migration.sql")) + .unwrap(); + let migration_sql = migration_file.contents_utf8().unwrap(); + + let digest = sha256_digest(BufReader::new(migration_file.contents()))?; + // create a lowercase hash from + let checksum = HEXLOWER.encode(digest.as_ref()); + let name = subdir.path().file_name().unwrap().to_str().unwrap(); + + // get existing migration by checksum, if it doesn't exist run the migration + let existing_migration = client + .migration() + .find_unique(Migration::checksum().equals(checksum.clone())) + .exec() + .await; + + if existing_migration.is_none() { + println!("Running migration: {}", name); + + let steps = migration_sql.split(";").collect::>(); + let steps = &steps[0..steps.len() - 1]; + + client + .migration() + .create_one( + Migration::name().set(name.to_string()), + Migration::checksum().set(checksum.clone()), + vec![], + ) + .exec() + .await; + + for (i, step) in steps.iter().enumerate() { + match client._execute_raw(&format!("{};", step)).await { + Ok(_) => { + println!("Step {} ran successfully", i); + client + .migration() + .find_unique(Migration::checksum().equals(checksum.clone())) + .update(vec![Migration::steps_applied().set(i as i64 + 1)]) + .exec() + .await; + } + Err(e) => { + println!("Error running migration: {}", name); + println!("{}", e); + break; + } + } + } + + println!("Migration {} recorded successfully", name); + } else { + println!("Migration {} already exists", name); + } + } + } + Err(err) => { + panic!("Failed to check migration table existence: {:?}", err); + } + } + + Ok(()) +} diff --git a/packages/core/src/db/mod.rs b/packages/core/src/db/mod.rs index ee18796be..afe92a587 100644 --- a/packages/core/src/db/mod.rs +++ b/packages/core/src/db/mod.rs @@ -1,15 +1,8 @@ -use crate::file::checksum::sha256_digest; -use crate::{ - prisma, - prisma::{Migration, PrismaClient}, -}; -use crate::{state, CoreError}; +use crate::state; +use crate::{prisma, prisma::PrismaClient}; use anyhow::Result; -use data_encoding::HEXLOWER; -use include_dir::{include_dir, Dir}; use once_cell::sync::OnceCell; -use std::ffi::OsStr; -use std::io::BufReader; +pub mod migrate; pub static DB: OnceCell = OnceCell::new(); @@ -34,127 +27,3 @@ pub async fn get() -> Result<&'static PrismaClient, String> { Ok(DB.get().unwrap()) } } - -const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql"); -static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations"); - -pub async fn init(db_url: &str) -> Result<()> { - let client = prisma::new_client_with_url(&format!("file:{}", &db_url)).await; - - match client - ._query_raw::( - "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'", - ) - .await - { - Ok(data) => { - if data.len() == 0 { - println!("Migration table does not exist"); - // execute migration - match client._execute_raw(INIT_MIGRATION).await { - Ok(_) => {} - Err(e) => { - println!("Failed to create migration table: {}", e); - } - }; - - let value: Vec = client - ._query_raw( - "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'", - ) - .await - .unwrap(); - - println!("Migration table created: {:?}", value); - } else { - println!("Migration table exists: {:?}", data); - } - - let mut migration_subdirs = MIGRATIONS_DIR - .dirs() - .filter(|subdir| { - subdir - .path() - .file_name() - .map(|name| name != OsStr::new("migration_table")) - .unwrap_or(false) - }) - .collect::>(); - - migration_subdirs.sort_by(|a, b| { - let a_name = a.path().file_name().unwrap().to_str().unwrap(); - let b_name = b.path().file_name().unwrap().to_str().unwrap(); - - let a_time = a_name[..14].parse::().unwrap(); - let b_time = b_name[..14].parse::().unwrap(); - - a_time.cmp(&b_time) - }); - - for subdir in migration_subdirs { - println!("{:?}", subdir.path()); - let migration_file = subdir - .get_file(subdir.path().join("./migration.sql")) - .unwrap(); - let migration_sql = migration_file.contents_utf8().unwrap(); - - let digest = sha256_digest(BufReader::new(migration_file.contents()))?; - // create a lowercase hash from - let checksum = HEXLOWER.encode(digest.as_ref()); - let name = subdir.path().file_name().unwrap().to_str().unwrap(); - - // get existing migration by checksum, if it doesn't exist run the migration - let existing_migration = client - .migration() - .find_unique(Migration::checksum().equals(checksum.clone())) - .exec() - .await; - - if existing_migration.is_none() { - println!("Running migration: {}", name); - - let steps = migration_sql.split(";").collect::>(); - let steps = &steps[0..steps.len() - 1]; - - client - .migration() - .create_one( - Migration::name().set(name.to_string()), - Migration::checksum().set(checksum.clone()), - vec![], - ) - .exec() - .await; - - for (i, step) in steps.iter().enumerate() { - match client._execute_raw(&format!("{};", step)).await { - Ok(_) => { - println!("Step {} ran successfully", i); - client - .migration() - .find_unique(Migration::checksum().equals(checksum.clone())) - .update(vec![Migration::steps_applied().set(i as i64 + 1)]) - .exec() - .await; - } - Err(e) => { - println!("Error running migration: {}", name); - println!("{}", e); - break; - } - } - } - - println!("Migration {} recorded successfully", name); - } else { - println!("Migration {} already exists", name); - } - } - } - Err(err) => { - panic!("Failed to check migration table existence: {:?}", err); - } - } - - Ok(()) -} diff --git a/packages/core/src/lib.rs b/packages/core/src/lib.rs index 1360b69ef..f89ac81c7 100644 --- a/packages/core/src/lib.rs +++ b/packages/core/src/lib.rs @@ -21,91 +21,48 @@ pub mod util; // pub mod native; pub struct Core { - pub event_sender: mpsc::Sender, - pub event_receiver: mpsc::Receiver, -} - -#[derive(Error, Debug)] -pub enum CoreError { - #[error("System error")] - SysError(#[from] sys::SysError), -} - -// represents an event this library can emit -#[derive(Serialize, Deserialize, Debug, TS)] -#[serde(rename_all = "snake_case", tag = "key", content = "payload")] -#[ts(export)] -pub enum ClientEvent { - NewFileTypeThumb { file_id: u32, icon_created: bool }, - NewJobCreated { job_id: u32, progress: u8 }, - ResourceChange { key: String, id: String }, - DatabaseDisconnected { reason: Option }, -} - -// represents an event this library can emit -#[derive(Serialize, Deserialize, Debug, TS)] -#[serde(tag = "key", content = "params")] -#[ts(export)] -pub enum ClientQuery { - SysGetVolumes, - ClientGetCurrent, - SysGetLocations { id: String }, - LibGetExplorerDir { path: String, limit: u32 }, -} - -#[derive(Serialize, Deserialize, Debug, TS)] -#[serde(tag = "key", content = "data")] -#[ts(export)] -pub enum ClientResponse { - SysGetVolumes(Vec), + pub event_sender: mpsc::Sender, + pub event_receiver: mpsc::Receiver, + pub state: ClientState, } impl Core { - pub async fn query(query: ClientQuery) -> Result { - println!("Core Query: {:?}", query); - let response = match query { - ClientQuery::SysGetVolumes => ClientResponse::SysGetVolumes(sys::volumes::get()?), - ClientQuery::SysGetLocations { id: _ } => todo!(), - ClientQuery::LibGetExplorerDir { path: _, limit: _ } => todo!(), - ClientQuery::ClientGetCurrent => todo!(), - }; - Ok(response) - } - - pub async fn send(&self, event: ClientEvent) { - self.event_sender.send(event).await.unwrap(); - } - + // create new instance of core, run startup tasks pub async fn new(mut data_dir: std::path::PathBuf) -> Core { let (event_sender, event_receiver) = mpsc::channel(100); - let core = Core { - event_sender, - event_receiver, - }; data_dir = data_dir.join("spacedrive"); let data_dir = data_dir.to_str().unwrap(); // create data directory if it doesn't exist fs::create_dir_all(&data_dir).unwrap(); // prepare basic client state - let mut client_config = - ClientState::new(data_dir, "diamond-mastering-space-dragon").unwrap(); + let mut state = ClientState::new(data_dir, "diamond-mastering-space-dragon").unwrap(); // load from disk - client_config + state .read_disk() .unwrap_or(error!("No client state found, creating new one...")); - client_config.save(); + state.save(); - // begin asynchronous startup routines - info!("Starting up... {:?}", client_config); - if client_config.libraries.len() == 0 { + let core = Core { + event_sender, + event_receiver, + state, + }; + core.initializer().await; + core + // activate p2p listeners + // p2p::listener::listen(None); + } + // load library database + initialize client with db + pub async fn initializer(&self) { + if self.state.libraries.len() == 0 { match library::loader::create(None).await { Ok(library) => info!("Created new library: {:?}", library), Err(e) => info!("Error creating library: {:?}", e), } } else { - for library in client_config.libraries.iter() { + for library in self.state.libraries.iter() { // init database for library match library::loader::load(&library.library_path, &library.library_id).await { Ok(library) => info!("Loaded library: {:?}", library), @@ -118,8 +75,90 @@ impl Core { Ok(_) => info!("Spacedrive online"), Err(e) => info!("Error initializing client: {:?}", e), }; - // activate p2p listeners - // p2p::listener::listen(None); - core + } + pub async fn command(&self, cmd: ClientCommand) -> Result { + info!("Core command: {:?}", cmd); + Ok(CoreResponse::Success) + } + // query sources of data + pub async fn query(&self, query: ClientQuery) -> Result { + info!("Core query: {:?}", query); + let response = match query { + ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::volumes::get()?), + ClientQuery::SysGetLocations { id: _ } => todo!(), + ClientQuery::LibGetExplorerDir { path: _, limit: _ } => todo!(), + ClientQuery::ClientGetState => todo!(), + }; + Ok(response) + } + // send an event to the client + pub async fn send(&self, event: CoreEvent) { + self.event_sender.send(event).await.unwrap(); } } + +// represents an event this library can emit +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "params")] +#[ts(export)] +pub enum ClientCommand { + LocScanFull { location_id: u32 }, + FileScanQuick { file_id: u32 }, + FileScanFull { file_id: u32 }, + FileDelete { file_id: u32 }, + TagCreate { name: String, color: String }, + TagAssign { file_id: u32, tag_id: u32 }, + TagDelete { tag_id: u32 }, + LocDelete { location_id: u32 }, + LibDelete { library_id: u32 }, + SysVolumeUnmount { volume_id: u32 }, +} + +// represents an event this library can emit +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "params")] +#[ts(export)] +pub enum ClientQuery { + ClientGetState, + SysGetVolumes, + SysGetLocations { id: String }, + LibGetExplorerDir { path: String, limit: u32 }, +} + +// represents an event this library can emit +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "payload")] +#[ts(export)] +pub enum CoreEvent { + // most all events should be once of these two + InvalidateQuery(ClientQuery), + InvalidateResource(CoreResource), + + Log { message: String }, + DatabaseDisconnected { reason: Option }, +} + +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "data")] +#[ts(export)] +pub enum CoreResponse { + Success, + SysGetVolumes(Vec), +} + +#[derive(Error, Debug)] +pub enum CoreError { + #[error("System error")] + SysError(#[from] sys::SysError), +} + +#[derive(Serialize, Deserialize, Debug, TS)] +#[ts(export)] +pub enum CoreResource { + Client, + Library, + Location, + File, + Job, + Tag, +} diff --git a/packages/core/src/library/loader.rs b/packages/core/src/library/loader.rs index efb0afedc..7628b7faa 100644 --- a/packages/core/src/library/loader.rs +++ b/packages/core/src/library/loader.rs @@ -3,7 +3,7 @@ use uuid::Uuid; use crate::state::client::LibraryState; use crate::{ - db::{self, init}, + db::{self, migrate}, prisma::{Library, LibraryData}, state, }; @@ -48,7 +48,7 @@ pub async fn load(library_path: &str, library_id: &str) -> Result<()> { config.save(); } // create connection with library database & run migrations - init(&library_path).await?; + migrate::run_migrations(&library_path).await?; // if doesn't exist, mark as offline Ok(()) } @@ -66,7 +66,7 @@ pub async fn create(name: Option) -> Result<()> { ..LibraryState::default() }; - init(&library_state.library_path).await?; + migrate::run_migrations(&library_state.library_path).await?; config.libraries.push(library_state); diff --git a/packages/state/lib/bridge.ts b/packages/state/lib/bridge.ts index fdb277a6a..f805f6ed9 100644 --- a/packages/state/lib/bridge.ts +++ b/packages/state/lib/bridge.ts @@ -1,5 +1,6 @@ import { ClientQuery, ClientResponse } from '@sd/core'; import { EventEmitter } from 'eventemitter3'; +import { useQuery } from 'react-query'; export let transport: BaseTransport | null = null; @@ -19,3 +20,11 @@ export async function bridge< export function setTransport(_transport: BaseTransport) { transport = _transport; } + +export function useBridgeQuery( + key: Parameters[0], + params?: Parameters[1], + options: Parameters[2] = {} +) { + return useQuery([key, params], () => bridge(key, params), options); +} diff --git a/packages/state/lib/files/query.ts b/packages/state/lib/files/query.ts index 6662cef32..3d72c4fa0 100644 --- a/packages/state/lib/files/query.ts +++ b/packages/state/lib/files/query.ts @@ -3,10 +3,6 @@ import { useState } from 'react'; import { useFileExplorerState } from './state'; import { bridge } from '../bridge'; -// export function useBridgeQuery(key: ) { -// return useQuery(['sys_get_volumes'], () => bridge('sys_get_volumes')); -// } - // this hook initializes the explorer state and queries the core export function useFileExplorer(initialPath = '/', initialLocation: number | null = null) { const fileState = useFileExplorerState(); @@ -28,6 +24,6 @@ export function useFileExplorer(initialPath = '/', initialLocation: number | nul return { location, files, setPath, setLocationId }; } -export function useVolumes() { - return useQuery(['SysGetVolumes'], () => bridge('SysGetVolumes')); -} +// export function useVolumes() { +// return useQuery(['SysGetVolumes'], () => bridge('SysGetVolumes')); +// }