return types working on useBridgeQuery

- added implimentation of SysGetLocation
This commit is contained in:
Jamie Pine
2022-03-13 09:54:58 -07:00
parent ffaa9cd67d
commit b11cc71f00
27 changed files with 257 additions and 608 deletions

View File

Binary file not shown.

View File

@@ -37,6 +37,7 @@ once_cell = "1.8.0"
int-enum = "0.4.0"
async-std = "1.10.0"
tokio = { version = "1.17.0", features = ["sync"] }
lazy_static = "1.4.0"
[features]
default = [ "custom-protocol" ]

View File

@@ -1,28 +1,11 @@
// DEPRECATE EVERYTHING IN THIS FILE
use anyhow::Result;
use sdcorelib::{
file::{indexer, retrieve, retrieve::Directory, watcher::watch_dir},
state::{client, client::ClientState},
sys,
sys::{volumes, volumes::Volume},
ClientCommand, ClientQuery, Core, CoreResponse,
};
// #[tauri::command(async)]
// pub async fn client_query_transport(data: ClientQuery) -> Result<CoreResponse, String> {
// match Core::query(data).await {
// Ok(response) => Ok(response),
// Err(err) => Err(err.to_string()),
// }
// }
// #[tauri::command(async)]
// pub async fn client_command_transport(data: ClientCommand) -> Result<CoreResponse, String> {
// match Core::command(data).await {
// Ok(response) => Ok(response),
// Err(err) => Err(err.to_string()),
// }
// }
#[tauri::command(async)]
pub async fn scan_dir(path: String) -> Result<(), String> {
let files = indexer::scan(&path).await.map_err(|e| e.to_string());

View File

@@ -1,29 +0,0 @@
use serde::Serialize;
use std::fs;
use tauri::api::path;
#[derive(Serialize)]
pub struct AppConfig {
pub primary_db: std::path::PathBuf,
pub data_dir: std::path::PathBuf,
pub file_type_thumb_dir: std::path::PathBuf,
}
// returns the app config struct with complete values
pub fn get_config() -> AppConfig {
let app_name = "Spacedrive";
let data_dir = path::data_dir()
.unwrap_or(std::path::PathBuf::from("./"))
.join(app_name);
let file_type_thumb_dir = data_dir.join("file_icons");
// create the data directory if not exists
fs::create_dir_all(&data_dir).unwrap();
fs::create_dir_all(&file_type_thumb_dir).unwrap();
AppConfig {
primary_db: data_dir.join("primary.db3"),
data_dir,
file_type_thumb_dir,
}
}

View File

@@ -1,25 +1,27 @@
use once_cell::sync::OnceCell;
use sdcorelib::{ClientCommand, ClientQuery, Core, CoreResponse};
use tauri::api::path;
use tauri::Manager;
// use tauri_plugin_shadows::Shadows;
mod commands;
mod menu;
pub static CORE: OnceCell<Core> = OnceCell::new();
#[tauri::command(async)]
async fn client_query_transport(data: ClientQuery) -> Result<CoreResponse, String> {
match CORE.get().unwrap().query(data).await {
async fn client_query_transport(
core: tauri::State<'_, Core>,
data: ClientQuery,
) -> Result<CoreResponse, String> {
match core.query(data).await {
Ok(response) => Ok(response),
Err(err) => Err(err.to_string()),
}
}
#[tauri::command(async)]
async fn client_command_transport(data: ClientCommand) -> Result<CoreResponse, String> {
match CORE.get().unwrap().command(data).await {
async fn client_command_transport(
core: tauri::State<'_, Core>,
data: ClientCommand,
) -> Result<CoreResponse, String> {
match core.command(data).await {
Ok(response) => Ok(response),
Err(err) => Err(err.to_string()),
}
@@ -28,15 +30,15 @@ async fn client_command_transport(data: ClientCommand) -> Result<CoreResponse, S
#[tokio::main]
async fn main() {
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
let mut core = Core::new(data_dir).await;
let (core, mut event_receiver) = Core::new(data_dir).await;
tauri::Builder::default()
.manage(core)
.setup(|app| {
let app = app.handle();
tauri::async_runtime::spawn(async move {
while let Some(event) = core.event_receiver.recv().await {
// core event transport
tokio::spawn(async move {
while let Some(event) = event_receiver.recv().await {
app.emit_all("core_event", &event).unwrap();
}
});
@@ -47,6 +49,7 @@ async fn main() {
.invoke_handler(tauri::generate_handler![
client_query_transport,
client_command_transport,
// deprecate below
commands::scan_dir,
commands::create_location,
commands::get_files,

View File

@@ -110,7 +110,6 @@ export const Sidebar: React.FC<SidebarProps> = (props) => {
</div>
<div>
<Heading>Locations</Heading>
{/* @ts-ignore */}
{volumes?.map((location, index) => {
return (
<div key={index} className="flex flex-row items-center">

View File

@@ -0,0 +1 @@
max_width = 150

View File

@@ -0,0 +1,2 @@
export type ClientCommand = { key: "LocScanFull", params: { location_id: bigint, } } | { key: "FileScanQuick", params: { file_id: bigint, } } | { key: "FileScanFull", params: { file_id: bigint, } } | { key: "FileDelete", params: { file_id: bigint, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: bigint, tag_id: bigint, } } | { key: "TagDelete", params: { tag_id: bigint, } } | { key: "LocDelete", params: { location_id: bigint, } } | { key: "LibDelete", params: { library_id: bigint, } } | { key: "SysVolumeUnmount", params: { volume_id: bigint, } };

View File

@@ -1,2 +1,2 @@
export type ClientQuery = { key: "SysGetVolumes" } | { key: "ClientGetCurrent" } | { key: "SysGetLocations", params: { id: string, } } | { key: "LibGetExplorerDir", params: { path: string, limit: number, } };
export type ClientQuery = { key: "ClientGetState" } | { key: "SysGetVolumes" } | { key: "SysGetLocation", params: { id: bigint, } } | { key: "LibGetExplorerDir", params: { path: string, limit: bigint, } };

View File

@@ -0,0 +1,4 @@
import type { ClientQuery } from "./ClientQuery";
import type { CoreResource } from "./CoreResource";
export type CoreEvent = { key: "InvalidateQuery", payload: ClientQuery } | { key: "InvalidateResource", payload: CoreResource } | { key: "Log", payload: { message: string, } } | { key: "DatabaseDisconnected", payload: { reason: string | null, } };

View File

@@ -0,0 +1,2 @@
export type CoreResource = "Client" | "Library" | "Location" | "File" | "Job" | "Tag";

View File

@@ -0,0 +1,4 @@
import type { LocationResource } from "./LocationResource";
import type { Volume } from "./Volume";
export type CoreResponse = { key: "Success" } | { key: "SysGetVolumes", data: Array<Volume> } | { key: "SysGetLocations", data: LocationResource };

View File

@@ -1,4 +1,8 @@
export * from './bindings/ClientCommand';
export * from './bindings/ClientEvent';
export * from './bindings/ClientQuery';
export * from './bindings/ClientResponse';
export * from './bindings/CoreEvent';
export * from './bindings/CoreResource';
export * from './bindings/CoreResponse';
export * from './bindings/Volume';

View File

@@ -29,22 +29,14 @@ pub async fn create() -> Result<()> {
_ => Platform::Unknown,
};
let client = match db
.client()
.find_unique(Client::uuid().equals(config.client_id.clone()))
.exec()
.await
{
let client = match db.client().find_unique(Client::uuid().equals(config.client_id.clone())).exec().await {
Some(client) => client,
None => {
db.client()
.create_one(
Client::uuid().set(config.client_id.clone()),
Client::name().set(hostname.clone()),
vec![
Client::platform().set(platform as i64),
Client::online().set(true),
],
vec![Client::platform().set(platform as i64), Client::online().set(true)],
)
.exec()
.await

View File

@@ -13,9 +13,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
let client = prisma::new_client_with_url(&format!("file:{}", &db_url)).await;
match client
._query_raw::<serde_json::Value>(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
._query_raw::<serde_json::Value>("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
.await
{
Ok(data) => {
@@ -30,9 +28,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
};
let value: Vec<serde_json::Value> = client
._query_raw(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
._query_raw("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
.await
.unwrap();
@@ -64,9 +60,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
for subdir in migration_subdirs {
println!("{:?}", subdir.path());
let migration_file = subdir
.get_file(subdir.path().join("./migration.sql"))
.unwrap();
let migration_file = subdir.get_file(subdir.path().join("./migration.sql")).unwrap();
let migration_sql = migration_file.contents_utf8().unwrap();
let digest = sha256_digest(BufReader::new(migration_file.contents()))?;

View File

@@ -10,11 +10,7 @@ pub async fn get() -> Result<&'static PrismaClient, String> {
if DB.get().is_none() {
let config = state::client::get();
let current_library = config
.libraries
.iter()
.find(|l| l.library_id == config.current_library_id)
.unwrap();
let current_library = config.libraries.iter().find(|l| l.library_id == config.current_library_id).unwrap();
let path = current_library.library_path.clone();
// TODO: Error handling when brendan adds it to prisma-client-rust

View File

@@ -6,8 +6,7 @@ use walkdir::{DirEntry, WalkDir};
use super::watcher::watch_dir;
use crate::db;
use crate::prisma::LocationData;
use crate::sys::locations::{create_location, get_location};
use crate::sys::locations::{create_location, get_location, LocationResource};
use crate::util::time;
pub async fn scan_paths(location_id: i64) -> Result<()> {
@@ -54,8 +53,7 @@ pub async fn scan(path: &str) -> Result<()> {
let scan_start = Instant::now();
// walk through directory recursively
for entry in WalkDir::new(path).into_iter().filter_entry(|dir| {
let approved =
!is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
let approved = !is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
approved
}) {
// extract directory entry or log and continue if failed
@@ -68,11 +66,7 @@ pub async fn scan(path: &str) -> Result<()> {
};
let path = entry.path();
let parent_path = path
.parent()
.unwrap_or(Path::new(""))
.to_str()
.unwrap_or("");
let parent_path = path.parent().unwrap_or(Path::new("")).to_str().unwrap_or("");
let parent_dir_id = dirs.get(&*parent_path);
println!("Discovered: {:?}, {:?}", &path, &parent_dir_id);
@@ -95,15 +89,13 @@ pub async fn scan(path: &str) -> Result<()> {
// vector to store active models
let mut files: Vec<String> = Vec::new();
for (file_path, file_id, parent_dir_id) in chunk {
files.push(
match prepare_model(&file_path, *file_id, &location, parent_dir_id) {
Ok(file) => file,
Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e);
continue;
}
},
);
files.push(match prepare_model(&file_path, *file_id, &location, parent_dir_id) {
Ok(file) => file,
Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e);
continue;
}
});
}
let raw_sql = format!(
r#"
@@ -126,12 +118,7 @@ pub async fn scan(path: &str) -> Result<()> {
}
// reads a file at a path and creates an ActiveModel with metadata
fn prepare_model(
file_path: &PathBuf,
id: i64,
location: &LocationData,
parent_id: &Option<i64>,
) -> Result<String> {
fn prepare_model(file_path: &PathBuf, id: i64, location: &LocationResource, parent_id: &Option<i64>) -> Result<String> {
let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str();
let size = metadata.len();
@@ -157,12 +144,8 @@ fn prepare_model(
&name,
&extension,
&size.to_string(),
&time::system_time_to_date_time(metadata.created())
.unwrap()
.to_string(),
&time::system_time_to_date_time(metadata.modified())
.unwrap()
.to_string(),
&time::system_time_to_date_time(metadata.created()).unwrap().to_string(),
&time::system_time_to_date_time(metadata.modified()).unwrap().to_string(),
))
}
@@ -179,19 +162,11 @@ pub async fn test_scan(path: &str) -> Result<()> {
// extract name from OsStr returned by PathBuff
fn extract_name(os_string: Option<&OsStr>) -> String {
os_string
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_owned()
os_string.unwrap_or_default().to_str().unwrap_or_default().to_owned()
}
fn is_hidden(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with("."))
.unwrap_or(false)
entry.file_name().to_str().map(|s| s.starts_with(".")).unwrap_or(false)
}
fn is_library(entry: &DirEntry) -> bool {
@@ -204,11 +179,7 @@ fn is_library(entry: &DirEntry) -> bool {
}
fn is_node_modules(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.contains("node_modules"))
.unwrap_or(false)
entry.file_name().to_str().map(|s| s.contains("node_modules")).unwrap_or(false)
}
fn is_app_bundle(entry: &DirEntry) -> bool {
@@ -247,9 +218,7 @@ fn construct_file_sql(
id,
is_dir as u8,
location_id,
parent_id
.map(|id| id.to_string())
.unwrap_or("NULL".to_string()),
parent_id.map(|id| id.to_string()).unwrap_or("NULL".to_string()),
stem,
name,
extension,

View File

@@ -21,21 +21,12 @@ pub async fn get_dir_with_contents(path: &str) -> Result<Directory, String> {
// meta_integrity_hash.truncate(20);
let directory = match db
.file()
.find_unique(File::name().equals(path.into()))
.exec()
.await
{
let directory = match db.file().find_unique(File::name().equals(path.into())).exec().await {
Some(file) => file,
None => return Err("directory_not_found".to_owned()),
};
let files = db
.file()
.find_many(vec![File::parent_id().equals(directory.id)])
.exec()
.await;
let files = db.file().find_many(vec![File::parent_id().equals(directory.id)]).exec().await;
Ok(Directory {
directory: directory.clone(),

View File

@@ -9,8 +9,7 @@ pub async fn create_thumb(path: &str) -> Result<()> {
let file = File::open(path).unwrap();
let reader = BufReader::new(file);
let mut thumbnails =
create_thumbnails(reader, mime::IMAGE_PNG, [ThumbnailSize::Small]).unwrap();
let mut thumbnails = create_thumbnails(reader, mime::IMAGE_PNG, [ThumbnailSize::Small]).unwrap();
let thumbnail = thumbnails.pop().unwrap();

View File

@@ -22,13 +22,12 @@ pub mod util;
pub struct Core {
pub event_sender: mpsc::Sender<CoreEvent>,
pub event_receiver: mpsc::Receiver<CoreEvent>,
pub state: ClientState,
}
impl Core {
// create new instance of core, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> Core {
pub async fn new(mut data_dir: std::path::PathBuf) -> (Core, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_receiver) = mpsc::channel(100);
data_dir = data_dir.join("spacedrive");
@@ -38,19 +37,13 @@ impl Core {
// prepare basic client state
let mut state = ClientState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk
state
.read_disk()
.unwrap_or(error!("No client state found, creating new one..."));
state.read_disk().unwrap_or(error!("No client state found, creating new one..."));
state.save();
let core = Core {
event_sender,
event_receiver,
state,
};
let core = Core { event_sender, state };
core.initializer().await;
core
(core, event_receiver)
// activate p2p listeners
// p2p::listener::listen(None);
}
@@ -85,7 +78,7 @@ impl Core {
info!("Core query: {:?}", query);
let response = match query {
ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::volumes::get()?),
ClientQuery::SysGetLocations { id: _ } => todo!(),
ClientQuery::SysGetLocation { id } => CoreResponse::SysGetLocations(sys::locations::get_location(id).await?),
ClientQuery::LibGetExplorerDir { path: _, limit: _ } => todo!(),
ClientQuery::ClientGetState => todo!(),
};
@@ -102,16 +95,16 @@ impl Core {
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientCommand {
LocScanFull { location_id: u32 },
FileScanQuick { file_id: u32 },
FileScanFull { file_id: u32 },
FileDelete { file_id: u32 },
LocScanFull { location_id: i64 },
FileScanQuick { file_id: i64 },
FileScanFull { file_id: i64 },
FileDelete { file_id: i64 },
TagCreate { name: String, color: String },
TagAssign { file_id: u32, tag_id: u32 },
TagDelete { tag_id: u32 },
LocDelete { location_id: u32 },
LibDelete { library_id: u32 },
SysVolumeUnmount { volume_id: u32 },
TagAssign { file_id: i64, tag_id: i64 },
TagDelete { tag_id: i64 },
LocDelete { location_id: i64 },
LibDelete { library_id: i64 },
SysVolumeUnmount { volume_id: i64 },
}
// represents an event this library can emit
@@ -121,8 +114,8 @@ pub enum ClientCommand {
pub enum ClientQuery {
ClientGetState,
SysGetVolumes,
SysGetLocations { id: String },
LibGetExplorerDir { path: String, limit: u32 },
SysGetLocation { id: i64 },
LibGetExplorerDir { path: String, limit: i64 },
}
// represents an event this library can emit
@@ -133,7 +126,6 @@ pub enum CoreEvent {
// most all events should be once of these two
InvalidateQuery(ClientQuery),
InvalidateResource(CoreResource),
Log { message: String },
DatabaseDisconnected { reason: Option<String> },
}
@@ -144,6 +136,7 @@ pub enum CoreEvent {
pub enum CoreResponse {
Success,
SysGetVolumes(Vec<sys::volumes::Volume>),
SysGetLocations(sys::locations::LocationResource),
}
#[derive(Error, Debug)]

View File

File diff suppressed because it is too large Load Diff

View File

@@ -86,11 +86,7 @@ impl ClientState {
}
pub fn get_current_library(&self) -> LibraryState {
match self
.libraries
.iter()
.find(|lib| lib.library_id == self.current_library_id)
{
match self.libraries.iter().find(|lib| lib.library_id == self.current_library_id) {
Some(lib) => lib.clone(),
None => LibraryState::default(),
}

View File

@@ -9,9 +9,42 @@ use log::info;
use serde::{Deserialize, Serialize};
use std::{fs, io, io::Write};
use thiserror::Error;
use ts_rs::TS;
pub use crate::prisma::LocationData;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
pub struct LocationResource {
pub id: i64,
pub name: Option<String>,
pub path: Option<String>,
pub total_capacity: Option<i64>,
pub available_capacity: Option<i64>,
pub is_removable: bool,
pub is_ejectable: bool,
pub is_root_filesystem: bool,
pub is_online: bool,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
}
impl Into<LocationResource> for LocationData {
fn into(self) -> LocationResource {
LocationResource {
id: self.id,
name: self.name,
path: self.path,
total_capacity: self.total_capacity,
available_capacity: self.available_capacity,
is_removable: self.is_removable,
is_ejectable: self.is_ejectable,
is_root_filesystem: self.is_root_filesystem,
is_online: self.is_online,
date_created: self.date_created,
}
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct DotSpacedrive {
pub location_uuid: String,
@@ -24,8 +57,7 @@ static DOTFILE_NAME: &str = ".spacedrive";
// - accessible on from the local filesystem
// - already exists in the database
pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError> {
let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME))
{
let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME)) {
Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
Err(e) => return Err(LocationError::DotfileReadFailure(e)),
};
@@ -33,15 +65,13 @@ pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError>
Ok(dotfile)
}
pub async fn get_location(location_id: i64) -> Result<LocationData, LocationError> {
pub async fn get_location(location_id: i64) -> Result<LocationResource, LocationError> {
let db = db::get().await.map_err(|e| LocationError::DBError(e))?;
// get location by location_id from db and include location_paths
let location = match db
.location()
.find_first(vec![
Location::files().some(vec![File::id().equals(location_id)])
])
.find_first(vec![Location::files().some(vec![File::id().equals(location_id.into())])])
.exec()
.await
{
@@ -51,10 +81,10 @@ pub async fn get_location(location_id: i64) -> Result<LocationData, LocationErro
info!("Retrieved location: {:?}", location);
Ok(location)
Ok(location.into())
}
pub async fn create_location(path: &str) -> Result<LocationData, LocationError> {
pub async fn create_location(path: &str) -> Result<LocationResource, LocationError> {
let db = db::get().await.map_err(|e| LocationError::DBError(e))?;
let config = client::get();
@@ -64,18 +94,10 @@ pub async fn create_location(path: &str) -> Result<LocationData, LocationError>
Err(e) => return Err(LocationError::FileReadError(e)),
}
// check if location already exists
let location = match db
.location()
.find_first(vec![Location::path().equals(path.to_string())])
.exec()
.await
{
let location = match db.location().find_first(vec![Location::path().equals(path.to_string())]).exec().await {
Some(location) => location,
None => {
info!(
"Location does not exist, creating new location for '{}'",
&path
);
info!("Location does not exist, creating new location for '{}'", &path);
let uuid = uuid::Uuid::new_v4();
// create new location
let create_location_params = {
@@ -85,9 +107,7 @@ pub async fn create_location(path: &str) -> Result<LocationData, LocationError>
};
info!("Loaded mounted volumes: {:?}", volumes);
// find mount with matching path
let volume = volumes
.into_iter()
.find(|mount| path.starts_with(&mount.mount_point));
let volume = volumes.into_iter().find(|mount| path.starts_with(&mount.mount_point));
let volume_data = match volume {
Some(mount) => mount,
@@ -106,11 +126,7 @@ pub async fn create_location(path: &str) -> Result<LocationData, LocationError>
]
};
let location = db
.location()
.create_one(create_location_params)
.exec()
.await;
let location = db.location().create_one(create_location_params).exec().await;
info!("Created location: {:?}", location);
@@ -139,7 +155,7 @@ pub async fn create_location(path: &str) -> Result<LocationData, LocationError>
}
};
Ok(location)
Ok(location.into())
}
#[derive(Error, Debug)]

View File

@@ -2,6 +2,10 @@ pub mod locations;
pub mod volumes;
use thiserror::Error;
use crate::CoreError;
use self::locations::LocationError;
#[derive(Error, Debug)]
pub enum SysError {
#[error("Location error")]
@@ -9,3 +13,9 @@ pub enum SysError {
#[error("Error with system volumes")]
VolumeError(String),
}
impl From<LocationError> for CoreError {
fn from(e: LocationError) -> Self {
CoreError::SysError(SysError::LocationError(e))
}
}

View File

@@ -39,8 +39,7 @@ pub fn get() -> Result<Vec<Volume>, SysError> {
let mut name = disk.name().to_str().unwrap_or("Volume").to_string();
let is_removable = disk.is_removable();
let file_system = String::from_utf8(disk.file_system().to_vec())
.unwrap_or_else(|_| "Err".to_string());
let file_system = String::from_utf8(disk.file_system().to_vec()).unwrap_or_else(|_| "Err".to_string());
let disk_type = match disk.type_() {
sysinfo::DiskType::SSD => "SSD".to_string(),
@@ -56,15 +55,11 @@ pub fn get() -> Result<Vec<Volume>, SysError> {
let mut caption = mount_point.clone();
caption.pop();
let wmic_process = Command::new("cmd")
.args([
"/C",
&format!("wmic logical disk where Caption='{caption}' get Size"),
])
.args(["/C", &format!("wmic logical disk where Caption='{caption}' get Size")])
.output()
.expect("failed to execute process");
let wmic_process_output = String::from_utf8(wmic_process.stdout).unwrap();
let parsed_size =
wmic_process_output.split("\r\r\n").collect::<Vec<&str>>()[1].to_string();
let parsed_size = wmic_process_output.split("\r\r\n").collect::<Vec<&str>>()[1].to_string();
if let Ok(n) = parsed_size.trim().parse::<u64>() {
total_space = n;

View File

@@ -4,12 +4,12 @@ use std::io;
use std::time::{SystemTime, UNIX_EPOCH};
pub fn system_time_to_date_time(system_time: io::Result<SystemTime>) -> Result<NaiveDateTime> {
// extract system time or resort to current time if failure
let system_time = system_time.unwrap_or(SystemTime::now());
let std_duration = system_time.duration_since(UNIX_EPOCH)?;
let chrono_duration = chrono::Duration::from_std(std_duration)?;
let unix = NaiveDateTime::from_timestamp(0, 0);
let naive = unix + chrono_duration;
// let date_time: DateTime<Utc> = Utc.from_local_datetime(&naive).unwrap();
Ok(naive)
// extract system time or resort to current time if failure
let system_time = system_time.unwrap_or(SystemTime::now());
let std_duration = system_time.duration_since(UNIX_EPOCH)?;
let chrono_duration = chrono::Duration::from_std(std_duration)?;
let unix = NaiveDateTime::from_timestamp(0, 0);
let naive = unix + chrono_duration;
// let date_time: DateTime<Utc> = Utc.from_local_datetime(&naive).unwrap();
Ok(naive)
}

View File

@@ -1,6 +1,6 @@
import { ClientQuery, ClientResponse } from '@sd/core';
import { ClientQuery, CoreResponse } from '@sd/core';
import { EventEmitter } from 'eventemitter3';
import { useQuery } from 'react-query';
import { useQuery, UseQueryOptions, UseQueryResult } from 'react-query';
export let transport: BaseTransport | null = null;
@@ -8,12 +8,19 @@ export abstract class BaseTransport extends EventEmitter {
abstract send(query: ClientQuery): Promise<unknown>;
}
export async function bridge<
K extends ClientQuery['key'],
CQ extends Extract<ClientQuery, { key: K }>
>(key: K, params?: CQ extends { params: any } ? CQ['params'] : never) {
type KeyType = ClientQuery['key'];
type CQType<K> = Extract<ClientQuery, { key: K }>;
type CRType<K> = Extract<CoreResponse, { key: K }>;
type CQParams<CQ> = CQ extends { params: any } ? CQ['params'] : never;
type CRData<CR> = CR extends { data: any } ? CR['data'] : never;
export async function bridge<K extends KeyType, CQ extends CQType<K>, CR extends CRType<K>>(
key: K,
params?: CQParams<CQ>
): Promise<CRData<CR>> {
const result = (await transport?.send({ key, params } as any)) as any;
console.log(`ClientQueryTransport: [${result?.key}]`, result?.data);
// console.log(`Client Query Transport: [${result?.key}]`, result?.data);
return result?.data;
}
@@ -21,10 +28,10 @@ export function setTransport(_transport: BaseTransport) {
transport = _transport;
}
export function useBridgeQuery(
key: Parameters<typeof bridge>[0],
params?: Parameters<typeof bridge>[1],
options: Parameters<typeof useQuery>[2] = {}
export function useBridgeQuery<K extends KeyType, CQ extends CQType<K>, CR extends CRType<K>>(
key: K,
params?: CQParams<CQ>,
options: UseQueryOptions<CRData<CR>> = {}
) {
return useQuery([key, params], () => bridge(key, params), options);
return useQuery<CRData<CR>>([key, params], async () => await bridge(key, params), options);
}