ENG 223 Location Awareness (#468)

* Introducing location online/offline checks and location relink

* Some initial drafts for location watchers

* Location metadata relink and add library

* Many improvements at job system
Now using prisma batching at identifier job
Removing blocking I/O from extension subcrate
Implementing lazy generation of thumbnails
New current directory identifier job to be used on light rescans

* Some optimizations on identifier and object validator jobs

* merge jamie's identifier PR

* fully repaired identifier job

* properly hooked up object kind

* inspector fix

* fix video badge

* small improvements to libraries settings

* identifier and inspector improvements

* fix feature flags and hook up context menu location utilities

* BETTER CONTEXT MENU x100

* test-files

* style tweaks

* new icon designs

* manifest

* fix thumbnails on web

* media data

* New Location Watcher and some minor fixes

* disable broken media_data extractor, wip

* wip

* function name fix

* Fixing pnpm prep and some warnings

* Solving a race condition beetween indexer job and FS event handlerSome other minor warnings

* Generating thumbnails on watcher

* Remove event handler on watcher

* Some initial works on modify events and other small fixes

* File update event

* Trying to be more generic with used events and some tests to validate our assumptions

* Turning on location metadata file

* Introducing core unit tests on CI pipeline

* Submiting new unit test assumptions to validate on windows CI

* Fixing unit tests

* Fixing unit tests again

* Fixing unit tests

* Fixing unit tests for macos

* Fixing unit tests for macos again

* New structure for platform dependent event handling
Implementing event handlers for Linux and MacOS

* minor fixes + rustfmt + clippy

* Windows event handling

* Introducing a feature gate to only use location watching on desktop app for now

* Putting more stuff behind feature gates to avoid warnings

* Adding feature to cargo test on CI

* Changing some debug logs to trace logs and removing Jamie specific stuff

* Make location removal from manager less async

* fix build when "location-watcher" feature disabled

* fix types + clippy

* make location manager non-static

* remove uses of `to_string_lossy`

* more invalidate_query calls

* Expose `library_ctx` directly to avoid needless clones

* New materialized_path handling for directories

* Removing cascade delete between file_path and object
- Some other minor stuff

* remove unused `CurrentDirFileIdentifierJob`

Co-authored-by: Jamie Pine <ijamespine@me.com>
Co-authored-by: Oscar Beaumont <oscar@otbeaumont.me>
This commit is contained in:
Ericson "Fogo" Soares
2022-12-30 13:53:24 -03:00
committed by GitHub
parent fc2e8d8cec
commit bcbcd260d4
109 changed files with 5000 additions and 12082 deletions

2
.github/CODEOWNERS vendored
View File

@@ -22,7 +22,7 @@
/apps/mobile/ @jamiepine @Brendonovich @oscartbeaumont @utkubakir
# core logic
/core/ @jamiepine @Brendonovich @oscartbeaumont
/core/ @jamiepine @Brendonovich @oscartbeaumont @fogodev
/packages/macos/ @jamiepine @Brendonovich @oscartbeaumont
# server app

View File

@@ -158,6 +158,9 @@ jobs:
- name: Cargo fetch
run: cargo fetch
- name: Cargo test core
run: cargo test -p sd-core -F location-watcher
- name: Check core
run: cargo check -p sd-core --release

BIN
Cargo.lock generated
View File

Binary file not shown.

View File

@@ -24,7 +24,6 @@
"devDependencies": {
"@sd/config": "workspace:*",
"@tauri-apps/cli": "1.1.1",
"@tauri-apps/tauricon": "github:tauri-apps/tauricon",
"@types/babel-core": "^6.25.7",
"@types/react": "^18.0.21",
"@types/react-dom": "^18.0.6",

View File

@@ -12,7 +12,7 @@ build = "build.rs"
[dependencies]
tauri = { version = "1.1.1", features = ["api-all", "macos-private-api"] }
rspc = { workspace = true, features = ["tauri"] }
sd-core = { path = "../../../core", features = ["ffmpeg"] }
sd-core = { path = "../../../core", features = ["ffmpeg", "location-watcher"] }
tokio = { version = "1.21.2", features = ["sync"] }
window-shadows = "0.2.0"
tracing = "0.1.36"

View File

@@ -59,13 +59,13 @@ async fn main() -> Result<(), Box<dyn Error>> {
.setup(|app| {
let app = app.handle();
app.windows().iter().for_each(|(_, window)| {
window.hide().unwrap();
// window.hide().unwrap();
tokio::spawn({
let window = window.clone();
async move {
sleep(Duration::from_secs(3)).await;
if window.is_visible().unwrap_or(true) == false {
if !window.is_visible().unwrap_or(true) {
println!("Window did not emit `app_ready` event fast enough. Showing window...");
let _ = window.show();
}

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 79 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 126 KiB

After

Width:  |  Height:  |  Size: 123 KiB

View File

@@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
sd-core = { path = "../../core", features = [] }
sd-core = { path = "../../core", features = ["ffmpeg"] }
rspc = { workspace = true, features = ["axum"] }
axum = "0.5.16"
tokio = { version = "1.21.2", features = ["sync", "rt-multi-thread", "signal"] }

View File

@@ -4,6 +4,6 @@
"main": "index.js",
"license": "GPL-3.0-only",
"scripts": {
"dev": "cargo watch -x 'run -p server'"
"dev": "RUST_LOG=\"sd_core=info\" cargo watch -x 'run -p server'"
}
}

View File

@@ -39,11 +39,12 @@ async fn main() {
let app = axum::Router::new()
.route("/", get(|| async { "Spacedrive Server!" }))
.route("/health", get(|| async { "OK" }))
.route("/spacedrive/:id", {
.route("/spacedrive/*id", {
let node = node.clone();
get(|extract::Path(path): extract::Path<String>| async move {
let (status_code, content_type, body) =
node.handle_custom_uri(path.split('/').collect()).await;
let (status_code, content_type, body) = node
.handle_custom_uri(path.split('/').skip(1).collect())
.await;
(
StatusCode::from_u16(status_code).unwrap(),

BIN
apps/web/public/favicon.ico Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

View File

@@ -8,18 +8,18 @@
"type": "image/x-icon"
},
{
"src": "logo192.png",
"src": "logo-192x192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"src": "logo-512x512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
"theme_color": "#101016",
"background_color": "#1C1D26"
}

View File

@@ -23,7 +23,9 @@ const client = hooks.createClient({
const platform: Platform = {
platform: 'web',
getThumbnailUrlById: (casId) =>
`${import.meta.env.VITE_SDSERVER_BASE_URL}/spacedrive/thumbnail/${encodeURIComponent(casId)}`,
`${
import.meta.env.VITE_SDSERVER_BASE_URL || 'http://localhost:8080'
}/spacedrive/thumbnail/${encodeURIComponent(casId)}.webp`,
openLink: (url) => window.open(url, '_blank')?.focus(),
demoMode: true
};

View File

@@ -3,6 +3,7 @@
<head>
<meta charset="utf-8" />
<title>Spacedrive</title>
<link rel="manifest" href="/manifest.json">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>

View File

@@ -19,6 +19,9 @@ ffmpeg = [
"dep:ffmpeg-next",
"dep:sd-ffmpeg",
] # This feature controls whether the Spacedrive Core contains functionality which requires FFmpeg.
location-watcher = [
"dep:notify"
]
[dependencies]
hostname = "0.3.1"
@@ -66,6 +69,7 @@ ctor = "0.1.23"
globset = { version = "^0.4.9", features = ["serde1"] }
itertools = "^0.10.5"
enumflags2 = "0.7.5"
notify = { version = "5.0.0", default-features = false, features = ["macos_kqueue"], optional = true }
[dev-dependencies]
tempfile = "^3.3.0"

View File

@@ -2,7 +2,7 @@ use std::process::Command;
fn main() {
let output = Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.args(["rev-parse", "--short", "HEAD"])
.output()
.expect("error getting git hash. Does `git rev-parse --short HEAD` work for you?");
let git_hash = String::from_utf8(output.stdout)

View File

@@ -73,7 +73,7 @@ CREATE TABLE "object" (
"cas_id" TEXT NOT NULL,
"integrity_checksum" TEXT,
"name" TEXT,
"extension" TEXT,
"extension" TEXT COLLATE NOCASE,
"kind" INTEGER NOT NULL DEFAULT 0,
"size_in_bytes" TEXT NOT NULL,
"key_id" INTEGER,
@@ -98,7 +98,7 @@ CREATE TABLE "file_path" (
"location_id" INTEGER NOT NULL,
"materialized_path" TEXT NOT NULL,
"name" TEXT NOT NULL,
"extension" TEXT,
"extension" TEXT COLLATE NOCASE,
"object_id" INTEGER,
"parent_id" INTEGER,
"key_id" INTEGER,
@@ -107,7 +107,7 @@ CREATE TABLE "file_path" (
"date_indexed" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY ("location_id", "id"),
CONSTRAINT "file_path_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_path_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT "file_path_location_id_fkey" FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE CASCADE ON UPDATE CASCADE,
CONSTRAINT "file_path_key_id_fkey" FOREIGN KEY ("key_id") REFERENCES "key" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);

View File

@@ -142,9 +142,10 @@ model Object {
model FilePath {
id Int
is_dir Boolean @default(false)
is_dir Boolean @default(false)
// location that owns this path
location_id Int
location Location @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
// a path generated from local file_path ids eg: "34/45/67/890"
materialized_path String
// the name and extension
@@ -152,6 +153,7 @@ model FilePath {
extension String?
// the unique Object for this file path
object_id Int?
object Object? @relation(fields: [object_id], references: [id], onDelete: Restrict)
// the parent in the file tree
parent_id Int?
key_id Int? // replacement for encryption
@@ -162,9 +164,6 @@ model FilePath {
date_modified DateTime @default(now())
date_indexed DateTime @default(now())
object Object? @relation(fields: [object_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
location Location? @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
// NOTE: this self relation for the file tree was causing SQLite to go to forever bed, disabling until workaround
// parent FilePath? @relation("directory_file_paths", fields: [parent_id], references: [id], onDelete: NoAction, onUpdate: NoAction)
// children FilePath[] @relation("directory_file_paths")
@@ -188,31 +187,31 @@ model FileConflict {
// keys allow us to know exactly which files can be decrypted with a given key
// they can be "mounted" to a client, and then used to decrypt files automatically
model Key {
id Int @id @default(autoincrement())
id Int @id @default(autoincrement())
// uuid to identify the key
uuid String @unique
uuid String @unique
// the name that the user sets
name String?
name String?
// is this key the default for encryption?
// was not tagged as unique as i'm not too sure if PCR will handle it
// can always be tagged as unique, the keys API will need updating to use `find_unique()`
default Boolean @default(false)
default Boolean @default(false)
// nullable if concealed for security
date_created DateTime? @default(now())
date_created DateTime? @default(now())
// encryption algorithm used to encrypt the key
algorithm Bytes
algorithm Bytes
// hashing algorithm used for hashing the master password
hashing_algorithm Bytes
// salt used for encrypting data with this key
content_salt Bytes
content_salt Bytes
// the *encrypted* master key (48 bytes)
master_key Bytes
master_key Bytes
// the nonce used for encrypting the master key
master_key_nonce Bytes
master_key_nonce Bytes
// the nonce used for encrypting the key
key_nonce Bytes
key_nonce Bytes
// the *encrypted* key
key Bytes
key Bytes
automount Boolean @default(false)
@@ -236,8 +235,7 @@ model MediaData {
codecs String? // eg: "h264,acc"
streams Int?
// change this relation to Object after testing
objects Object? @relation(fields: [id], references: [id], onDelete: Cascade, onUpdate: Cascade)
object Object? @relation(fields: [id], references: [id], onDelete: Cascade, onUpdate: Cascade)
@@map("media_data")
}

View File

@@ -16,10 +16,19 @@ use super::{utils::LibraryRequest, RouterBuilder};
pub(crate) fn mount() -> RouterBuilder {
<RouterBuilder>::new()
.library_query("readMetadata", |t| {
t(|_, _id: i32, _| async move {
#[allow(unreachable_code)]
Ok(todo!())
.library_query("get", |t| {
#[derive(Type, Deserialize)]
pub struct GetArgs {
pub id: i32,
}
t(|_, args: GetArgs, library| async move {
Ok(library
.db
.object()
.find_unique(object::id::equals(args.id))
.include(object::include!({ file_paths media_data }))
.exec()
.await?)
})
})
.library_mutation("setNote", |t| {
@@ -41,6 +50,7 @@ pub(crate) fn mount() -> RouterBuilder {
.await?;
invalidate_query!(library, "locations.getExplorerData");
invalidate_query!(library, "tags.getExplorerData");
Ok(())
})
@@ -64,6 +74,7 @@ pub(crate) fn mount() -> RouterBuilder {
.await?;
invalidate_query!(library, "locations.getExplorerData");
invalidate_query!(library, "tags.getExplorerData");
Ok(())
})
@@ -94,9 +105,7 @@ pub(crate) fn mount() -> RouterBuilder {
));
}
library
.spawn_job(Job::new(args, Box::new(FileEncryptorJob {})))
.await;
library.spawn_job(Job::new(args, FileEncryptorJob {})).await;
invalidate_query!(library, "locations.getExplorerData");
Ok(())
@@ -115,9 +124,7 @@ pub(crate) fn mount() -> RouterBuilder {
));
}
library
.spawn_job(Job::new(args, Box::new(FileDecryptorJob {})))
.await;
library.spawn_job(Job::new(args, FileDecryptorJob {})).await;
invalidate_query!(library, "locations.getExplorerData");
Ok(())

View File

@@ -2,7 +2,7 @@ use crate::{
job::{Job, JobManager},
location::{fetch_location, LocationError},
object::{
identifier_job::{FileIdentifierJob, FileIdentifierJobInit},
identifier_job::full_identifier_job::{FullFileIdentifierJob, FullFileIdentifierJobInit},
preview::{ThumbnailJob, ThumbnailJobInit},
validation::validator_job::{ObjectValidatorJob, ObjectValidatorJobInit},
},
@@ -26,6 +26,12 @@ pub(crate) fn mount() -> RouterBuilder {
.library_query("getHistory", |t| {
t(|_, _: (), library| async move { Ok(JobManager::get_history(&library).await?) })
})
.library_mutation("clearAll", |t| {
t(|_, _: (), library| async move {
JobManager::clear_all_jobs(&library).await?;
Ok(())
})
})
.library_mutation("generateThumbsForLocation", |t| {
#[derive(Type, Deserialize)]
pub struct GenerateThumbsForLocationArgs {
@@ -49,10 +55,10 @@ pub(crate) fn mount() -> RouterBuilder {
.spawn_job(Job::new(
ThumbnailJobInit {
location_id: args.id,
path: PathBuf::new(),
background: true,
root_path: PathBuf::new(),
background: false,
},
Box::new(ThumbnailJob {}),
ThumbnailJob {},
))
.await;
@@ -82,7 +88,7 @@ pub(crate) fn mount() -> RouterBuilder {
path: args.path,
background: true,
},
Box::new(ObjectValidatorJob {}),
ObjectValidatorJob {},
))
.await;
@@ -106,11 +112,11 @@ pub(crate) fn mount() -> RouterBuilder {
library
.spawn_job(Job::new(
FileIdentifierJobInit {
FullFileIdentifierJobInit {
location_id: args.id,
sub_path: Some(args.path),
},
Box::new(FileIdentifierJob {}),
FullFileIdentifierJob {},
))
.await;

View File

@@ -1,17 +1,18 @@
use crate::{
invalidate_query,
location::{
fetch_location,
delete_location, fetch_location,
indexer::{indexer_job::indexer_job_location, rules::IndexerRuleCreateArgs},
scan_location, LocationCreateArgs, LocationError, LocationUpdateArgs,
relink_location, scan_location, LocationCreateArgs, LocationError, LocationUpdateArgs,
},
object::preview::THUMBNAIL_CACHE_DIR_NAME,
prisma::{file_path, indexer_rule, indexer_rules_in_location, location, object, tag},
};
use std::path::PathBuf;
use rspc::{self, internal::MiddlewareBuilderLike, ErrorCode, Type};
use serde::{Deserialize, Serialize};
use tracing::info;
use tokio::{fs, io};
use super::{utils::LibraryRequest, Ctx, RouterBuilder};
@@ -76,7 +77,7 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
pub cursor: Option<String>,
}
t(|_, args: LocationExplorerArgs, library| async move {
t(|_, mut args: LocationExplorerArgs, library| async move {
let location = library
.db
.location()
@@ -87,6 +88,10 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
rspc::Error::new(ErrorCode::NotFound, "Location not found".into())
})?;
if !args.path.ends_with('/') {
args.path += "/";
}
let directory = library
.db
.file_path()
@@ -112,25 +117,42 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
.exec()
.await?;
// library
// .queue_job(Job::new(
// ThumbnailJobInit {
// location_id: location.id,
// // recursive: false, // TODO: do this
// root_path: PathBuf::from(&directory.materialized_path),
// background: true,
// },
// ThumbnailJob {},
// ))
// .await;
let mut items = Vec::with_capacity(file_paths.len());
for mut file_path in file_paths {
if let Some(object) = &mut file_path.object.as_mut() {
// TODO: Use helper function to build this url as as the Rust file loading layer
let thumb_path = library
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(&object.cas_id)
.with_extension("webp");
object.has_thumbnail = (match fs::metadata(thumb_path).await {
Ok(_) => Ok(true),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e),
})
.map_err(LocationError::IOError)?;
}
items.push(ExplorerItem::Path(Box::new(file_path)));
}
Ok(ExplorerData {
context: ExplorerContext::Location(location),
items: file_paths
.into_iter()
.map(|mut file_path| {
if let Some(object) = &mut file_path.object.as_mut() {
// TODO: Use helper function to build this url as as the Rust file loading layer
let thumb_path = library
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(&object.cas_id)
.with_extension("webp");
object.has_thumbnail = thumb_path.try_exists().unwrap();
}
ExplorerItem::Path(Box::new(file_path))
})
.collect(),
items,
})
})
})
@@ -148,38 +170,35 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
})
.library_mutation("delete", |t| {
t(|_, location_id: i32, library| async move {
delete_location(&library, location_id)
.await
.map_err(Into::into)
})
})
.library_mutation("relink", |t| {
t(|_, location_path: PathBuf, library| async move {
relink_location(&library, location_path)
.await
.map_err(Into::into)
})
})
.library_mutation("addLibrary", |t| {
t(|_, args: LocationCreateArgs, library| async move {
let location = args.add_library(&library).await?;
scan_location(&library, location).await?;
Ok(())
})
})
.library_mutation("fullRescan", |t| {
t(|_, location_id: i32, library| async move {
// remove existing paths
library
.db
.file_path()
.delete_many(vec![file_path::location_id::equals(location_id)])
.exec()
.await?;
library
.db
.indexer_rules_in_location()
.delete_many(vec![indexer_rules_in_location::location_id::equals(
location_id,
)])
.exec()
.await?;
library
.db
.location()
.delete(location::id::equals(location_id))
.exec()
.await?;
invalidate_query!(library, "locations.list");
info!("Location {} deleted", location_id);
Ok(())
})
})
.library_mutation("fullRescan", |t| {
t(|_, location_id: i32, library| async move {
// rescan location
scan_location(
&library,
fetch_location(&library, location_id)

View File

@@ -78,7 +78,12 @@ pub(crate) fn mount() -> Arc<Router> {
Ok(NodeState {
config: ctx.config.get().await,
// We are taking the assumption here that this value is only used on the frontend for display purposes
data_path: ctx.config.data_directory().to_string_lossy().into_owned(),
data_path: ctx
.config
.data_directory()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
})
})
})
@@ -101,7 +106,7 @@ pub(crate) fn mount() -> Arc<Router> {
CoreEvent::InvalidateOperation(op) => yield op,
CoreEvent::InvalidateOperationDebounced(op) => {
let current = Instant::now();
if current.duration_since(last) > Duration::from_millis(1000 / 60) {
if current.duration_since(last) > Duration::from_millis(1000 / 10) {
last = current;
yield op;
}

View File

@@ -1,28 +1,33 @@
use crate::{
invalidate_query,
job::{worker::Worker, DynJob, Job, JobError},
library::LibraryContext,
location::indexer::indexer_job::{IndexerJob, INDEXER_JOB_NAME},
object::{
identifier_job::{FileIdentifierJob, IDENTIFIER_JOB_NAME},
identifier_job::full_identifier_job::{FullFileIdentifierJob, FULL_IDENTIFIER_JOB_NAME},
preview::{ThumbnailJob, THUMBNAIL_JOB_NAME},
validation::validator_job::{ObjectValidatorJob, VALIDATOR_JOB_NAME},
},
prisma::{job, node},
};
use std::{
collections::{HashMap, HashSet, VecDeque},
fmt::Debug,
fmt::{Display, Formatter},
sync::Arc,
time::Duration,
};
use int_enum::IntEnum;
use prisma_client_rust::Direction;
use rspc::Type;
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, VecDeque},
fmt::Debug,
fmt::{Display, Formatter},
sync::Arc,
time::Duration,
use tokio::{
sync::{broadcast, mpsc, Mutex, RwLock},
time::sleep,
};
use tokio::sync::{mpsc, Mutex, RwLock};
use tokio::{sync::broadcast, time::sleep};
use tracing::{error, info};
use tracing::{debug, error, info};
use uuid::Uuid;
// db is single threaded, nerd
@@ -36,6 +41,7 @@ pub enum JobManagerEvent {
/// Handling persisting JobReports to the database, pause/resuming, and
///
pub struct JobManager {
current_jobs_hashes: RwLock<HashSet<u64>>,
job_queue: RwLock<VecDeque<Box<dyn DynJob>>>,
running_workers: RwLock<HashMap<Uuid, Arc<Mutex<Worker>>>>,
internal_sender: mpsc::UnboundedSender<JobManagerEvent>,
@@ -47,6 +53,7 @@ impl JobManager {
let (shutdown_tx, _shutdown_rx) = broadcast::channel(1);
let (internal_sender, mut internal_receiver) = mpsc::unbounded_channel();
let this = Arc::new(Self {
current_jobs_hashes: RwLock::new(HashSet::new()),
job_queue: RwLock::new(VecDeque::new()),
running_workers: RwLock::new(HashMap::new()),
internal_sender,
@@ -58,7 +65,9 @@ impl JobManager {
// FIXME: if this task crashes, the entire application is unusable
while let Some(event) = internal_receiver.recv().await {
match event {
JobManagerEvent::IngestJob(ctx, job) => this2.clone().ingest(&ctx, job).await,
JobManagerEvent::IngestJob(ctx, job) => {
this2.clone().dispatch_job(&ctx, job).await
}
}
}
});
@@ -66,41 +75,45 @@ impl JobManager {
this
}
pub async fn ingest(self: Arc<Self>, ctx: &LibraryContext, mut job: Box<dyn DynJob>) {
// create worker to process job
let mut running_workers = self.running_workers.write().await;
if running_workers.len() < MAX_WORKERS {
info!("Running job: {:?}", job.name());
pub async fn ingest(self: Arc<Self>, ctx: &LibraryContext, job: Box<dyn DynJob>) {
let job_hash = job.hash();
debug!(
"Ingesting job: <name='{}', hash='{}'>",
job.name(),
job_hash
);
let job_report = job
.report()
.take()
.expect("critical error: missing job on worker");
let job_id = job_report.id;
let worker = Worker::new(job, job_report);
let wrapped_worker = Arc::new(Mutex::new(worker));
if let Err(e) =
Worker::spawn(Arc::clone(&self), Arc::clone(&wrapped_worker), ctx.clone()).await
{
error!("Error spawning worker: {:?}", e);
} else {
running_workers.insert(job_id, wrapped_worker);
}
if !self.current_jobs_hashes.read().await.contains(&job_hash) {
self.current_jobs_hashes.write().await.insert(job_hash);
self.dispatch_job(ctx, job).await;
} else {
self.job_queue.write().await.push_back(job);
debug!(
"Job already in queue: <name='{}', hash='{}'>",
job.name(),
job_hash
);
}
}
pub async fn ingest_queue(&self, _ctx: &LibraryContext, job: Box<dyn DynJob>) {
self.job_queue.write().await.push_back(job);
pub async fn ingest_queue(&self, job: Box<dyn DynJob>) {
let job_hash = job.hash();
debug!("Queueing job: <name='{}', hash='{}'>", job.name(), job_hash);
if !self.current_jobs_hashes.read().await.contains(&job_hash) {
self.current_jobs_hashes.write().await.insert(job_hash);
self.job_queue.write().await.push_back(job);
} else {
debug!(
"Job already in queue: <name='{}', hash='{}'>",
job.name(),
job_hash
);
}
}
pub async fn complete(self: Arc<Self>, ctx: &LibraryContext, job_id: Uuid) {
// remove worker from running workers
pub async fn complete(self: Arc<Self>, ctx: &LibraryContext, job_id: Uuid, job_hash: u64) {
// remove worker from running workers and from current jobs hashes
self.current_jobs_hashes.write().await.remove(&job_hash);
self.running_workers.write().await.remove(&job_id);
// continue queue
let job = self.job_queue.write().await.pop_front();
@@ -127,16 +140,26 @@ impl JobManager {
pub async fn get_history(
ctx: &LibraryContext,
) -> Result<Vec<JobReport>, prisma_client_rust::QueryError> {
let jobs = ctx
Ok(ctx
.db
.job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())])
.order_by(job::date_created::order(Direction::Desc))
.take(100)
.exec()
.await?;
.await?
.into_iter()
.map(Into::into)
.collect())
}
Ok(jobs.into_iter().map(Into::into).collect())
pub async fn clear_all_jobs(
ctx: &LibraryContext,
) -> Result<(), prisma_client_rust::QueryError> {
ctx.db.job().delete_many(vec![]).exec().await?;
invalidate_query!(ctx, "jobs.getHistory");
Ok(())
}
pub fn shutdown_tx(&self) -> Arc<broadcast::Sender<()>> {
@@ -176,20 +199,22 @@ impl JobManager {
match paused_job.name.as_str() {
THUMBNAIL_JOB_NAME => {
Arc::clone(&self)
.ingest(ctx, Job::resume(paused_job, Box::new(ThumbnailJob {}))?)
.dispatch_job(ctx, Job::resume(paused_job, ThumbnailJob {})?)
.await;
}
INDEXER_JOB_NAME => {
Arc::clone(&self)
.ingest(ctx, Job::resume(paused_job, Box::new(IndexerJob {}))?)
.dispatch_job(ctx, Job::resume(paused_job, IndexerJob {})?)
.await;
}
IDENTIFIER_JOB_NAME => {
FULL_IDENTIFIER_JOB_NAME => {
Arc::clone(&self)
.ingest(
ctx,
Job::resume(paused_job, Box::new(FileIdentifierJob {}))?,
)
.dispatch_job(ctx, Job::resume(paused_job, FullFileIdentifierJob {})?)
.await;
}
VALIDATOR_JOB_NAME => {
Arc::clone(&self)
.dispatch_job(ctx, Job::resume(paused_job, ObjectValidatorJob {})?)
.await;
}
_ => {
@@ -204,6 +229,40 @@ impl JobManager {
Ok(())
}
async fn dispatch_job(self: Arc<Self>, ctx: &LibraryContext, mut job: Box<dyn DynJob>) {
// create worker to process job
let mut running_workers = self.running_workers.write().await;
if running_workers.len() < MAX_WORKERS {
info!("Running job: {:?}", job.name());
let job_report = job
.report()
.take()
.expect("critical error: missing job on worker");
let job_id = job_report.id;
let worker = Worker::new(job, job_report);
let wrapped_worker = Arc::new(Mutex::new(worker));
if let Err(e) =
Worker::spawn(Arc::clone(&self), Arc::clone(&wrapped_worker), ctx.clone()).await
{
error!("Error spawning worker: {:?}", e);
} else {
running_workers.insert(job_id, wrapped_worker);
}
} else {
debug!(
"Queueing job: <name='{}', hash='{}'>",
job.name(),
job.hash()
);
self.job_queue.write().await.push_back(job);
}
}
}
#[derive(Debug)]

View File

@@ -1,10 +1,19 @@
use crate::location::{indexer::IndexerError, LocationError};
use sd_crypto::Error as CryptoError;
use crate::{
location::{indexer::IndexerError, LocationError},
object::{identifier_job::IdentifierJobError, preview::ThumbnailError},
};
use std::{
collections::{hash_map::DefaultHasher, VecDeque},
fmt::Debug,
hash::{Hash, Hasher},
};
use rmp_serde::{decode::Error as DecodeError, encode::Error as EncodeError};
use sd_crypto::Error as CryptoError;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use std::{collections::VecDeque, fmt::Debug};
use thiserror::Error;
use tracing::warn;
use uuid::Uuid;
mod job_manager;
@@ -15,14 +24,13 @@ pub use worker::*;
#[derive(Error, Debug)]
pub enum JobError {
// General errors
#[error("Database error: {0}")]
DatabaseError(#[from] prisma_client_rust::QueryError),
#[error("Location error: {0}")]
LocationError(#[from] LocationError),
#[error("I/O error: {0}")]
IOError(#[from] std::io::Error),
#[error("Failed to join Tokio spawn blocking: {0}")]
JoinError(#[from] tokio::task::JoinError),
JoinTaskError(#[from] tokio::task::JoinError),
#[error("Job state encode error: {0}")]
StateEncode(#[from] EncodeError),
#[error("Job state decode error: {0}")]
@@ -35,8 +43,20 @@ pub enum JobError {
"Tried to resume a job that doesn't have saved state data: job <name='{1}', uuid='{0}'>"
)]
MissingJobDataState(Uuid, String),
// Specific job errors
#[error("Indexer error: {0}")]
IndexerError(#[from] IndexerError),
#[error("Location error: {0}")]
LocationError(#[from] LocationError),
#[error("Thumbnail error: {0}")]
ThumbnailError(#[from] ThumbnailError),
#[error("Identifier error: {0}")]
IdentifierError(#[from] IdentifierJobError),
// Not errors
#[error("Job had a early finish: <name='{name}', reason='{reason}'>")]
EarlyFinish { name: String, reason: String },
#[error("Crypto error: {0}")]
CryptoError(#[from] CryptoError),
#[error("Data needed for job execution not found: job <name='{0}'>")]
@@ -49,29 +69,21 @@ pub type JobResult = Result<JobMetadata, JobError>;
pub type JobMetadata = Option<serde_json::Value>;
#[async_trait::async_trait]
pub trait StatefulJob: Send + Sync {
type Init: Serialize + DeserializeOwned + Send + Sync;
pub trait StatefulJob: Send + Sync + Sized {
type Init: Serialize + DeserializeOwned + Send + Sync + Hash;
type Data: Serialize + DeserializeOwned + Send + Sync;
type Step: Serialize + DeserializeOwned + Send + Sync;
fn name(&self) -> &'static str;
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError>;
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError>;
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError>;
async fn finalize(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult;
async fn finalize(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult;
}
#[async_trait::async_trait]
@@ -79,29 +91,17 @@ pub trait DynJob: Send + Sync {
fn report(&mut self) -> &mut Option<JobReport>;
fn name(&self) -> &'static str;
async fn run(&mut self, ctx: WorkerContext) -> JobResult;
fn hash(&self) -> u64;
}
pub struct Job<Init, Data, Step>
where
Init: Serialize + DeserializeOwned + Send + Sync,
Data: Serialize + DeserializeOwned + Send + Sync,
Step: Serialize + DeserializeOwned + Send + Sync,
{
pub struct Job<SJob: StatefulJob> {
report: Option<JobReport>,
state: JobState<Init, Data, Step>,
stateful_job: Box<dyn StatefulJob<Init = Init, Data = Data, Step = Step>>,
state: JobState<SJob>,
stateful_job: SJob,
}
impl<Init, Data, Step> Job<Init, Data, Step>
where
Init: Serialize + DeserializeOwned + Send + Sync,
Data: Serialize + DeserializeOwned + Send + Sync,
Step: Serialize + DeserializeOwned + Send + Sync,
{
pub fn new(
init: Init,
stateful_job: Box<dyn StatefulJob<Init = Init, Data = Data, Step = Step>>,
) -> Box<Self> {
impl<SJob: StatefulJob> Job<SJob> {
pub fn new(init: SJob::Init, stateful_job: SJob) -> Box<Self> {
Box::new(Self {
report: Some(JobReport::new(
Uuid::new_v4(),
@@ -117,10 +117,7 @@ where
})
}
pub fn resume(
mut report: JobReport,
stateful_job: Box<dyn StatefulJob<Init = Init, Data = Data, Step = Step>>,
) -> Result<Box<Self>, JobError> {
pub fn resume(mut report: JobReport, stateful_job: SJob) -> Result<Box<Self>, JobError> {
let job_state_data = if let Some(data) = report.data.take() {
data
} else {
@@ -135,21 +132,29 @@ where
}
}
impl<State: StatefulJob> Hash for Job<State> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name().hash(state);
self.state.hash(state);
}
}
#[derive(Serialize, Deserialize)]
pub struct JobState<Init, Data, Step> {
pub init: Init,
pub data: Option<Data>,
pub steps: VecDeque<Step>,
pub struct JobState<Job: StatefulJob> {
pub init: Job::Init,
pub data: Option<Job::Data>,
pub steps: VecDeque<Job::Step>,
pub step_number: usize,
}
impl<Job: StatefulJob> Hash for JobState<Job> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.init.hash(state);
}
}
#[async_trait::async_trait]
impl<Init, Data, Step> DynJob for Job<Init, Data, Step>
where
Init: Serialize + DeserializeOwned + Send + Sync,
Data: Serialize + DeserializeOwned + Send + Sync,
Step: Serialize + DeserializeOwned + Send + Sync,
{
impl<State: StatefulJob> DynJob for Job<State> {
fn report(&mut self) -> &mut Option<JobReport> {
&mut self.report
}
@@ -174,7 +179,12 @@ where
ctx.clone(),
&mut self.state,
) => {
step_result?;
if matches!(step_result, Err(JobError::EarlyFinish { .. })) {
warn!("{}", step_result.unwrap_err());
break;
} else {
step_result?;
};
self.state.steps.pop_front();
}
_ = &mut shutdown_rx_fut => {
@@ -192,4 +202,10 @@ where
.finalize(ctx.clone(), &mut self.state)
.await
}
fn hash(&self) -> u64 {
let mut hasher = DefaultHasher::new();
Hash::hash(self, &mut hasher);
hasher.finish()
}
}

View File

@@ -29,7 +29,7 @@ pub enum WorkerEvent {
#[derive(Clone)]
pub struct WorkerContext {
library_ctx: LibraryContext,
pub library_ctx: LibraryContext,
events_tx: UnboundedSender<WorkerEvent>,
shutdown_tx: Arc<broadcast::Sender<()>>,
}
@@ -52,10 +52,6 @@ impl WorkerContext {
.expect("critical error: failed to send worker worker progress event updates");
}
pub fn library_ctx(&self) -> LibraryContext {
self.library_ctx.clone()
}
pub fn shutdown_rx(&self) -> broadcast::Receiver<()> {
self.shutdown_tx.subscribe()
}
@@ -104,17 +100,23 @@ impl Worker {
.take()
.expect("critical error: missing job on worker");
let job_hash = job.hash();
let job_id = worker.report.id;
let old_status = worker.report.status;
worker.report.status = JobStatus::Running;
if matches!(old_status, JobStatus::Queued) {
worker.report.create(&ctx).await?;
} else {
worker.report.update(&ctx).await?;
}
drop(worker);
invalidate_query!(ctx, "jobs.isRunning");
// spawn task to handle receiving events from the worker
let library_ctx = ctx.clone();
// spawn task to handle receiving events from the worker
tokio::spawn(Worker::track_progress(
Arc::clone(&worker_mutex),
worker_events_rx,
@@ -178,7 +180,7 @@ impl Worker {
if let Err(e) = done_rx.await {
error!("failed to wait for worker completion: {:#?}", e);
}
job_manager.complete(&ctx, job_id).await;
job_manager.complete(&ctx, job_id, job_hash).await;
});
Ok(())

View File

@@ -1,7 +1,9 @@
use api::{CoreEvent, Ctx, Router};
use job::JobManager;
use library::LibraryManager;
use location::{LocationManager, LocationManagerError};
use node::NodeConfigManager;
use std::{path::Path, sync::Arc};
use thiserror::Error;
use tokio::{
@@ -27,6 +29,7 @@ pub(crate) mod prisma;
pub struct NodeContext {
pub config: Arc<NodeConfigManager>,
pub jobs: Arc<JobManager>,
pub location_manager: Arc<LocationManager>,
pub event_bus_tx: broadcast::Sender<CoreEvent>,
}
@@ -52,7 +55,9 @@ impl Node {
let data_dir = data_dir.as_ref();
#[cfg(debug_assertions)]
let data_dir = data_dir.join("dev");
let _ = fs::create_dir_all(&data_dir).await; // This error is ignore because it throwing on mobile despite the folder existing.
// This error is ignored because it's throwing on mobile despite the folder existing.
let _ = fs::create_dir_all(&data_dir).await;
// dbg!(get_object_kind_from_extension("png"));
@@ -66,12 +71,12 @@ impl Node {
EnvFilter::from_default_env()
.add_directive("warn".parse().expect("Error invalid tracing directive!"))
.add_directive(
"sd-core=debug"
"sd_core=debug"
.parse()
.expect("Error invalid tracing directive!"),
)
.add_directive(
"sd-core-mobile=debug"
"sd_core_mobile=debug"
.parse()
.expect("Error invalid tracing directive!"),
)
@@ -107,16 +112,39 @@ impl Node {
let config = NodeConfigManager::new(data_dir.to_path_buf()).await?;
let jobs = JobManager::new();
let location_manager = LocationManager::new();
let library_manager = LibraryManager::new(
data_dir.join("libraries"),
NodeContext {
config: Arc::clone(&config),
jobs: Arc::clone(&jobs),
location_manager: Arc::clone(&location_manager),
event_bus_tx: event_bus.0.clone(),
},
)
.await?;
// Adding already existing locations for location management
for library_ctx in library_manager.get_all_libraries_ctx().await {
for location in library_ctx
.db
.location()
.find_many(vec![])
.exec()
.await
.unwrap_or_else(|e| {
error!(
"Failed to get locations from database for location manager: {:#?}",
e
);
vec![]
}) {
if let Err(e) = location_manager.add(location.id, library_ctx.clone()).await {
error!("Failed to add location to location manager: {:#?}", e);
}
}
}
// Trying to resume possible paused jobs
let inner_library_manager = Arc::clone(&library_manager);
let inner_jobs = Arc::clone(&jobs);
@@ -136,6 +164,7 @@ impl Node {
event_bus,
};
info!("Spacedrive online.");
Ok((Arc::new(node), router))
}
@@ -208,4 +237,6 @@ pub enum NodeError {
FailedToInitializeConfig(#[from] node::NodeConfigError),
#[error("Failed to initialize library manager: {0}")]
FailedToInitializeLibraryManager(#[from] library::LibraryManagerError),
#[error("Location manager error: {0}")]
LocationManager(#[from] LocationManagerError),
}

View File

@@ -1,11 +1,17 @@
use crate::job::DynJob;
use crate::{
api::CoreEvent, job::DynJob, location::LocationManager, node::NodeConfigManager,
prisma::PrismaClient, NodeContext,
};
use std::{
fmt::{Debug, Formatter},
sync::Arc,
};
use sd_crypto::keys::keymanager::KeyManager;
use std::sync::Arc;
use tracing::warn;
use uuid::Uuid;
use crate::{api::CoreEvent, node::NodeConfigManager, prisma::PrismaClient, NodeContext};
use super::LibraryConfig;
/// LibraryContext holds context for a library which can be passed around the application.
@@ -25,25 +31,39 @@ pub struct LibraryContext {
pub(super) node_context: NodeContext,
}
impl Debug for LibraryContext {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
// Rolling out this implementation because `NodeContext` contains a DynJob which is
// troublesome to implement Debug trait
f.debug_struct("LibraryContext")
.field("id", &self.id)
.field("config", &self.config)
.field("db", &self.db)
.field("node_local_id", &self.node_local_id)
.finish()
}
}
impl LibraryContext {
pub(crate) async fn spawn_job(&self, job: Box<dyn DynJob>) {
self.node_context.jobs.clone().ingest(self, job).await;
}
pub(crate) async fn queue_job(&self, job: Box<dyn DynJob>) {
self.node_context.jobs.ingest_queue(self, job).await;
self.node_context.jobs.ingest_queue(job).await;
}
pub(crate) fn emit(&self, event: CoreEvent) {
match self.node_context.event_bus_tx.send(event) {
Ok(_) => (),
Err(err) => {
warn!("Error sending event to event bus: {:?}", err);
}
if let Err(e) = self.node_context.event_bus_tx.send(event) {
warn!("Error sending event to event bus: {e:?}");
}
}
pub(crate) fn config(&self) -> Arc<NodeConfigManager> {
self.node_context.config.clone()
}
pub(crate) fn location_manager(&self) -> &Arc<LocationManager> {
&self.node_context.location_manager
}
}

View File

@@ -1,9 +1,14 @@
use rspc::{self, ErrorCode};
use crate::LocationManagerError;
use std::path::PathBuf;
use rspc::{self, ErrorCode};
use thiserror::Error;
use tokio::io;
use uuid::Uuid;
use super::metadata::LocationMetadataError;
/// Error type for location related errors
#[derive(Error, Debug)]
pub enum LocationError {
@@ -18,22 +23,31 @@ pub enum LocationError {
// User errors
#[error("Location not a directory (path: {0:?})")]
NotDirectory(PathBuf),
#[error("Could not find directory in Location (path: {0:?})")]
DirectoryNotFound(String),
#[error("Missing local_path (id: {0})")]
MissingLocalPath(i32),
#[error("Library exists in the location metadata file, must relink: (old_path: {old_path:?}, new_path: {new_path:?})")]
NeedRelink {
old_path: PathBuf,
new_path: PathBuf,
},
#[error("Exist a different library in the location metadata file, must add a new library: (path: {0:?})")]
AddLibraryToMetadata(PathBuf),
#[error("Location metadata file not found: (path: {0:?})")]
MetadataNotFound(PathBuf),
#[error("Location already exists (path: {0:?})")]
LocationAlreadyExists(PathBuf),
// Internal Errors
#[error("Failed to create location (uuid {uuid:?})")]
CreateFailure { uuid: Uuid },
#[error("Failed to read location dotfile (path: {1:?}); (error: {0:?})")]
DotfileReadFailure(io::Error, PathBuf),
#[error("Failed to serialize dotfile for location (at path: {1:?}); (error: {0:?})")]
DotfileSerializeFailure(serde_json::Error, PathBuf),
#[error("Dotfile location is read only (at path: {0:?})")]
ReadonlyDotFileLocationFailure(PathBuf),
#[error("Failed to write dotfile (path: {1:?}); (error: {0:?})")]
DotfileWriteFailure(io::Error, PathBuf),
#[error("Location metadata error (error: {0:?})")]
LocationMetadataError(#[from] LocationMetadataError),
#[error("Failed to read location path metadata info (path: {1:?}); (error: {0:?})")]
LocationPathFilesystemMetadataAccess(io::Error, PathBuf),
#[error("Location is read only (at path: {0:?})")]
ReadonlyLocationFailure(PathBuf),
#[error("Missing metadata file for location (path: {0:?})")]
MissingMetadataFile(PathBuf),
#[error("Failed to open file from local os (error: {0:?})")]
FileReadError(io::Error),
#[error("Failed to read mounted volumes from local os (error: {0:?})")]
@@ -42,18 +56,25 @@ pub enum LocationError {
IOError(io::Error),
#[error("Database error (error: {0:?})")]
DatabaseError(#[from] prisma_client_rust::QueryError),
#[error("Location manager error (error: {0:?})")]
LocationManagerError(#[from] LocationManagerError),
}
impl From<LocationError> for rspc::Error {
fn from(err: LocationError) -> Self {
match err {
// Not found errors
LocationError::PathNotFound(_)
| LocationError::UuidNotFound(_)
| LocationError::IdNotFound(_) => {
rspc::Error::with_cause(ErrorCode::NotFound, err.to_string(), err)
}
LocationError::NotDirectory(_) | LocationError::MissingLocalPath(_) => {
// User's fault errors
LocationError::NotDirectory(_)
| LocationError::MissingLocalPath(_)
| LocationError::NeedRelink { .. }
| LocationError::AddLibraryToMetadata(_) => {
rspc::Error::with_cause(ErrorCode::BadRequest, err.to_string(), err)
}

View File

@@ -0,0 +1,141 @@
use crate::{library::LibraryContext, prisma::file_path};
use std::sync::atomic::{AtomicI32, Ordering};
use chrono::{DateTime, Utc};
use prisma_client_rust::{Direction, QueryError};
static LAST_FILE_PATH_ID: AtomicI32 = AtomicI32::new(0);
file_path::select!(file_path_id_only { id });
pub async fn get_max_file_path_id(library_ctx: &LibraryContext) -> Result<i32, QueryError> {
let mut last_id = LAST_FILE_PATH_ID.load(Ordering::Acquire);
if last_id == 0 {
last_id = fetch_max_file_path_id(library_ctx).await?;
LAST_FILE_PATH_ID.store(last_id, Ordering::Release);
}
Ok(last_id)
}
pub fn set_max_file_path_id(id: i32) {
LAST_FILE_PATH_ID.store(id, Ordering::Relaxed);
}
async fn fetch_max_file_path_id(library_ctx: &LibraryContext) -> Result<i32, QueryError> {
Ok(library_ctx
.db
.file_path()
.find_first(vec![])
.order_by(file_path::id::order(Direction::Desc))
.select(file_path_id_only::select())
.exec()
.await?
.map(|r| r.id)
.unwrap_or(0))
}
#[cfg(feature = "location-watcher")]
pub async fn create_file_path(
library_ctx: &LibraryContext,
location_id: i32,
mut materialized_path: String,
name: String,
extension: Option<String>,
parent_id: Option<i32>,
is_dir: bool,
) -> Result<file_path::Data, QueryError> {
use crate::prisma::location;
let mut last_id = LAST_FILE_PATH_ID.load(Ordering::Acquire);
if last_id == 0 {
last_id = fetch_max_file_path_id(library_ctx).await?;
}
// If this new file_path is a directory, materialized_path must end with "/"
if is_dir && !materialized_path.ends_with('/') {
materialized_path += "/";
}
let next_id = last_id + 1;
let created_path = library_ctx
.db
.file_path()
.create(
next_id,
location::id::equals(location_id),
materialized_path,
name,
vec![
file_path::parent_id::set(parent_id),
file_path::is_dir::set(is_dir),
file_path::extension::set(extension),
],
)
.exec()
.await?;
LAST_FILE_PATH_ID.store(next_id, Ordering::Release);
Ok(created_path)
}
pub struct FilePathBatchCreateEntry {
pub id: i32,
pub location_id: i32,
pub materialized_path: String,
pub name: String,
pub extension: Option<String>,
pub parent_id: Option<i32>,
pub is_dir: bool,
pub created_at: DateTime<Utc>,
}
pub async fn create_many_file_paths(
library_ctx: &LibraryContext,
entries: Vec<FilePathBatchCreateEntry>,
) -> Result<i64, QueryError> {
library_ctx
.db
.file_path()
.create_many(
entries
.into_iter()
.map(
|FilePathBatchCreateEntry {
id,
location_id,
mut materialized_path,
name,
extension,
parent_id,
is_dir,
created_at,
}| {
// If this new file_path is a directory, materialized_path must end with "/"
if is_dir && !materialized_path.ends_with('/') {
materialized_path += "/";
}
file_path::create_unchecked(
id,
location_id,
materialized_path,
name,
vec![
file_path::is_dir::set(is_dir),
file_path::parent_id::set(parent_id),
file_path::extension::set(extension),
file_path::date_created::set(created_at.into()),
],
)
},
)
.collect(),
)
.skip_duplicates()
.exec()
.await
}

View File

@@ -3,16 +3,26 @@ use crate::{
prisma::{file_path, location},
};
use std::{
collections::HashMap,
ffi::OsStr,
hash::{Hash, Hasher},
path::PathBuf,
time::Duration,
};
use chrono::{DateTime, Utc};
use itertools::Itertools;
use prisma_client_rust::Direction;
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, ffi::OsStr, path::PathBuf, time::Duration};
use tokio::time::Instant;
use tracing::info;
use super::{
rules::IndexerRule,
super::file_path_helper::{
create_many_file_paths, get_max_file_path_id, set_max_file_path_id,
FilePathBatchCreateEntry,
},
rules::{IndexerRule, RuleKind},
walk::{walk, WalkEntry},
};
@@ -35,6 +45,7 @@ pub struct IndexerJob;
location::include!(indexer_job_location {
indexer_rules: select { indexer_rule }
});
file_path::select!(file_path_id_only { id });
/// `IndexerJobInit` receives a `location::Data` object to be indexed
#[derive(Serialize, Deserialize)]
@@ -42,6 +53,11 @@ pub struct IndexerJobInit {
pub location: indexer_job_location::Data,
}
impl Hash for IndexerJobInit {
fn hash<H: Hasher>(&self, state: &mut H) {
self.location.id.hash(state);
}
}
/// `IndexerJobData` contains the state of the indexer job, which includes a `location_path` that
/// is cached and casted on `PathBuf` from `local_path` column in the `location` table. It also
/// contains some metadata for logging purposes.
@@ -94,11 +110,7 @@ impl StatefulJob for IndexerJob {
}
/// Creates a vector of valid path buffers from a directory, chunked into batches of `BATCH_SIZE`.
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let location_path = state
.init
.location
@@ -107,32 +119,17 @@ impl StatefulJob for IndexerJob {
.map(PathBuf::from)
.unwrap();
// query db to highers id, so we can increment it for the new files indexed
#[derive(Deserialize, Serialize, Debug)]
struct QueryRes {
id: Option<i32>,
}
// TODO: use a select to fetch only the id instead of entire record when prisma supports it
// grab the next id so we can increment in memory for batch inserting
let first_file_id = ctx
.library_ctx()
.db
.file_path()
.find_first(vec![])
.order_by(file_path::id::order(Direction::Desc))
.exec()
.await?
.map(|r| r.id)
.unwrap_or(0);
let first_file_id = get_max_file_path_id(&ctx.library_ctx).await?;
let mut indexer_rules_by_kind = HashMap::new();
let mut indexer_rules_by_kind: HashMap<RuleKind, Vec<IndexerRule>> =
HashMap::with_capacity(state.init.location.indexer_rules.len());
for location_rule in &state.init.location.indexer_rules {
let indexer_rule = IndexerRule::try_from(&location_rule.indexer_rule)?;
indexer_rules_by_kind
.entry(indexer_rule.kind)
.or_insert(vec![])
.or_default()
.push(indexer_rule);
}
@@ -154,10 +151,15 @@ impl StatefulJob for IndexerJob {
.await?;
let total_paths = paths.len();
let last_file_id = first_file_id + total_paths as i32;
// Setting our global state for file_path ids
set_max_file_path_id(last_file_id);
let mut dirs_ids = HashMap::new();
let paths_entries = paths
.into_iter()
.zip(first_file_id..(first_file_id + total_paths as i32))
.zip(first_file_id..last_file_id)
.map(
|(
WalkEntry {
@@ -205,7 +207,7 @@ impl StatefulJob for IndexerJob {
IndexerJobData::on_scan_progress(
ctx.clone(),
vec![
ScanProgress::SavedChunks(i as usize),
ScanProgress::SavedChunks(i),
ScanProgress::Message(format!(
"Writing {} of {} to db",
i * chunk_steps.len(),
@@ -224,7 +226,7 @@ impl StatefulJob for IndexerJob {
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let data = &state
.data
@@ -234,51 +236,44 @@ impl StatefulJob for IndexerJob {
let location_path = &data.location_path;
let location_id = state.init.location.id;
let count = ctx
.library_ctx()
.db
.file_path()
.create_many(
state.steps[0]
.iter()
.map(|entry| {
let name;
let extension;
let entries = state.steps[0]
.iter()
.map(|entry| {
let name;
let extension;
// if 'entry.path' is a directory, set extension to an empty string to
// avoid periods in folder names being interpreted as file extensions
if entry.is_dir {
extension = "".to_string();
name = extract_name(entry.path.file_name());
} else {
// if the 'entry.path' is not a directory, then get the extension and name.
extension = extract_name(entry.path.extension());
name = extract_name(entry.path.file_stem());
}
let materialized_path = entry
.path
.strip_prefix(location_path)
.unwrap()
.to_string_lossy()
.to_string();
// if 'entry.path' is a directory, set extension to an empty string to
// avoid periods in folder names being interpreted as file extensions
if entry.is_dir {
extension = None;
name = extract_name(entry.path.file_name());
} else {
// if the 'entry.path' is not a directory, then get the extension and name.
extension = Some(extract_name(entry.path.extension()).to_lowercase());
name = extract_name(entry.path.file_stem());
}
let materialized_path = entry
.path
.strip_prefix(location_path)
.unwrap()
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
file_path::create_unchecked(
entry.file_id,
location_id,
materialized_path,
name,
vec![
file_path::is_dir::set(entry.is_dir),
file_path::extension::set(Some(extension)),
file_path::parent_id::set(entry.parent_id),
file_path::date_created::set(entry.created_at.into()),
],
)
})
.collect(),
)
.exec()
.await?;
FilePathBatchCreateEntry {
id: entry.file_id,
location_id,
materialized_path,
name,
extension,
parent_id: entry.parent_id,
is_dir: entry.is_dir,
created_at: entry.created_at,
}
})
.collect();
let count = create_many_file_paths(&ctx.library_ctx, entries).await?;
info!("Inserted {count} records");
@@ -286,11 +281,7 @@ impl StatefulJob for IndexerJob {
}
/// Logs some metadata about the indexer job
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
let data = state
.data
.as_ref()

View File

@@ -223,7 +223,7 @@ async fn accept_dir_for_its_children(
let mut read_dir = fs::read_dir(source).await?;
while let Some(entry) = read_dir.next_entry().await? {
if entry.metadata().await?.is_dir()
&& children.contains(entry.file_name().to_string_lossy().as_ref())
&& children.contains(entry.file_name().to_str().expect("Found non-UTF-8 path"))
{
return Ok(true);
}
@@ -240,7 +240,7 @@ async fn reject_dir_for_its_children(
let mut read_dir = fs::read_dir(source).await?;
while let Some(entry) = read_dir.next_entry().await? {
if entry.metadata().await?.is_dir()
&& children.contains(entry.file_name().to_string_lossy().as_ref())
&& children.contains(entry.file_name().to_str().expect("Found non-UTF-8 path"))
{
return Ok(false);
}

View File

@@ -0,0 +1,165 @@
use crate::{library::LibraryContext, prisma::location};
use std::{
collections::HashMap,
path::{Path, PathBuf},
time::Duration,
};
use tokio::{fs, io::ErrorKind, time::sleep};
use tracing::{error, warn};
use uuid::Uuid;
use super::{watcher::LocationWatcher, LocationId};
type LibraryId = Uuid;
type LocationAndLibraryKey = (LocationId, LibraryId);
const LOCATION_CHECK_INTERVAL: Duration = Duration::from_secs(5);
pub(super) async fn check_online(location: &location::Data, library_ctx: &LibraryContext) -> bool {
if let Some(ref local_path) = location.local_path {
match fs::metadata(local_path).await {
Ok(_) => {
if !location.is_online {
set_location_online(location.id, library_ctx, true).await;
}
true
}
Err(e) if e.kind() == ErrorKind::NotFound => {
if location.is_online {
set_location_online(location.id, library_ctx, false).await;
}
false
}
Err(e) => {
error!("Failed to check if location is online: {:#?}", e);
false
}
}
} else {
// In this case, we don't have a `local_path`, but this location was marked as online
if location.is_online {
set_location_online(location.id, library_ctx, false).await;
}
false
}
}
pub(super) async fn set_location_online(
location_id: LocationId,
library_ctx: &LibraryContext,
online: bool,
) {
if let Err(e) = library_ctx
.db
.location()
.update(
location::id::equals(location_id),
vec![location::is_online::set(online)],
)
.exec()
.await
{
error!(
"Failed to update location to online: (id: {}, error: {:#?})",
location_id, e
);
}
}
pub(super) async fn location_check_sleep(
location_id: LocationId,
library_ctx: LibraryContext,
) -> (LocationId, LibraryContext) {
sleep(LOCATION_CHECK_INTERVAL).await;
(location_id, library_ctx)
}
pub(super) fn watch_location(
location: location::Data,
library_id: LibraryId,
location_path: impl AsRef<Path>,
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
) {
let location_id = location.id;
if let Some(mut watcher) = locations_unwatched.remove(&(location_id, library_id)) {
if watcher.check_path(location_path) {
watcher.watch();
} else {
watcher.update_data(location, true);
}
locations_watched.insert((location_id, library_id), watcher);
}
}
pub(super) fn unwatch_location(
location: location::Data,
library_id: LibraryId,
location_path: impl AsRef<Path>,
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
) {
let location_id = location.id;
if let Some(mut watcher) = locations_watched.remove(&(location_id, library_id)) {
if watcher.check_path(location_path) {
watcher.unwatch();
} else {
watcher.update_data(location, false)
}
locations_unwatched.insert((location_id, library_id), watcher);
}
}
pub(super) fn drop_location(
location_id: LocationId,
library_id: LibraryId,
message: &str,
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
) {
warn!("{message}: <id='{location_id}', library_id='{library_id}'>",);
if let Some(mut watcher) = locations_watched.remove(&(location_id, library_id)) {
watcher.unwatch();
} else {
locations_unwatched.remove(&(location_id, library_id));
}
}
pub(super) async fn get_location(
location_id: i32,
library_ctx: &LibraryContext,
) -> Option<location::Data> {
library_ctx
.db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await
.unwrap_or_else(|err| {
error!("Failed to get location data from location_id: {:#?}", err);
None
})
}
pub(super) fn subtract_location_path(
location_path: impl AsRef<Path>,
current_path: impl AsRef<Path>,
) -> Option<PathBuf> {
let location_path = location_path.as_ref();
let current_path = current_path.as_ref();
if let Ok(stripped) = current_path.strip_prefix(location_path) {
Some(stripped.to_path_buf())
} else {
error!(
"Failed to strip location root path ({}) from current path ({})",
location_path.display(),
current_path.display()
);
None
}
}

View File

@@ -0,0 +1,279 @@
use crate::library::LibraryContext;
use std::{path::PathBuf, sync::Arc};
use thiserror::Error;
use tokio::{
io,
sync::{mpsc, oneshot},
};
use tracing::{debug, error};
#[cfg(feature = "location-watcher")]
mod watcher;
#[cfg(feature = "location-watcher")]
mod helpers;
pub type LocationId = i32;
type ManagerMessage = (
LocationId,
LibraryContext,
oneshot::Sender<Result<(), LocationManagerError>>,
);
#[derive(Error, Debug)]
pub enum LocationManagerError {
#[error("Unable to send location id to be checked by actor: (error: {0})")]
ActorSendLocationError(#[from] mpsc::error::SendError<ManagerMessage>),
#[error("Unable to receive actor response: (error: {0})")]
ActorResponseError(#[from] oneshot::error::RecvError),
#[cfg(feature = "location-watcher")]
#[error("Watcher error: (error: {0})")]
WatcherError(#[from] notify::Error),
#[error("Location missing local path: <id='{0}'>")]
LocationMissingLocalPath(LocationId),
#[error("Tried to update a non-existing file: <path='{0}'>")]
UpdateNonExistingFile(PathBuf),
#[error("Unable to extract materialized path from location: <id='{0}', path='{1:?}'>")]
UnableToExtractMaterializedPath(LocationId, PathBuf),
#[error("Database error: {0}")]
DatabaseError(#[from] prisma_client_rust::QueryError),
#[error("I/O error: {0}")]
IOError(#[from] io::Error),
}
#[derive(Debug)]
pub struct LocationManager {
add_locations_tx: mpsc::Sender<ManagerMessage>,
remove_locations_tx: mpsc::Sender<ManagerMessage>,
stop_tx: Option<oneshot::Sender<()>>,
}
impl LocationManager {
#[allow(unused)]
pub fn new() -> Arc<Self> {
let (add_locations_tx, add_locations_rx) = mpsc::channel(128);
let (remove_locations_tx, remove_locations_rx) = mpsc::channel(128);
let (stop_tx, stop_rx) = oneshot::channel();
#[cfg(feature = "location-watcher")]
tokio::spawn(Self::run_locations_checker(
add_locations_rx,
remove_locations_rx,
stop_rx,
));
#[cfg(not(feature = "location-watcher"))]
tracing::warn!("Location watcher is disabled, locations will not be checked");
debug!("Location manager initialized");
Arc::new(Self {
add_locations_tx,
remove_locations_tx,
stop_tx: Some(stop_tx),
})
}
pub async fn add(
&self,
location_id: LocationId,
library_ctx: LibraryContext,
) -> Result<(), LocationManagerError> {
if cfg!(feature = "location-watcher") {
let (tx, rx) = oneshot::channel();
self.add_locations_tx
.send((location_id, library_ctx, tx))
.await?;
rx.await?
} else {
Ok(())
}
}
pub async fn remove(
&self,
location_id: LocationId,
library_ctx: LibraryContext,
) -> Result<(), LocationManagerError> {
if cfg!(feature = "location-watcher") {
let (tx, rx) = oneshot::channel();
self.remove_locations_tx
.send((location_id, library_ctx, tx))
.await?;
rx.await?
} else {
Ok(())
}
}
#[cfg(feature = "location-watcher")]
async fn run_locations_checker(
mut add_locations_rx: mpsc::Receiver<ManagerMessage>,
mut remove_locations_rx: mpsc::Receiver<ManagerMessage>,
mut stop_rx: oneshot::Receiver<()>,
) -> Result<(), LocationManagerError> {
use std::collections::{HashMap, HashSet};
use futures::stream::{FuturesUnordered, StreamExt};
use tokio::select;
use tracing::{info, warn};
use helpers::{
check_online, drop_location, get_location, location_check_sleep, unwatch_location,
watch_location,
};
use watcher::LocationWatcher;
let mut to_check_futures = FuturesUnordered::new();
let mut to_remove = HashSet::new();
let mut locations_watched = HashMap::new();
let mut locations_unwatched = HashMap::new();
loop {
select! {
// To add a new location
Some((location_id, library_ctx, response_tx)) = add_locations_rx.recv() => {
if let Some(location) = get_location(location_id, &library_ctx).await {
let is_online = check_online(&location, &library_ctx).await;
let _ = response_tx.send(
LocationWatcher::new(location, library_ctx.clone())
.await
.map(|mut watcher| {
if is_online {
watcher.watch();
locations_watched.insert(
(location_id, library_ctx.id),
watcher
);
} else {
locations_unwatched.insert(
(location_id, library_ctx.id),
watcher
);
}
to_check_futures.push(
location_check_sleep(location_id, library_ctx)
);
}
)
); // ignore errors, we handle errors on receiver
} else {
warn!(
"Location not found in database to be watched: {}",
location_id
);
}
}
// To remove an location
Some((location_id, library_ctx, response_tx)) = remove_locations_rx.recv() => {
if let Some(location) = get_location(location_id, &library_ctx).await {
if let Some(ref local_path_str) = location.local_path.clone() {
unwatch_location(
location,
library_ctx.id,
local_path_str,
&mut locations_watched,
&mut locations_unwatched,
);
locations_unwatched.remove(&(location_id, library_ctx.id));
} else {
drop_location(
location_id,
library_ctx.id,
"Dropping location from location manager, because we don't have a `local_path` anymore",
&mut locations_watched,
&mut locations_unwatched
);
}
} else {
drop_location(
location_id,
library_ctx.id,
"Removing location from manager, as we failed to fetch from db",
&mut locations_watched,
&mut locations_unwatched
);
}
// Marking location as removed, so we don't try to check it when the time comes
to_remove.insert((location_id, library_ctx.id));
let _ = response_tx.send(Ok(())); // ignore errors, we handle errors on receiver
}
// Periodically checking locations
Some((location_id, library_ctx)) = to_check_futures.next() => {
if to_remove.contains(&(location_id, library_ctx.id)) {
// The time to check came for an already removed library, so we just ignore it
to_remove.remove(&(location_id, library_ctx.id));
} else if let Some(location) = get_location(location_id, &library_ctx).await {
if let Some(ref local_path_str) = location.local_path.clone() {
if check_online(&location, &library_ctx).await {
watch_location(
location,
library_ctx.id,
local_path_str,
&mut locations_watched,
&mut locations_unwatched
);
} else {
unwatch_location(
location,
library_ctx.id,
local_path_str,
&mut locations_watched,
&mut locations_unwatched
);
}
to_check_futures.push(location_check_sleep(location_id, library_ctx));
} else {
drop_location(
location_id,
library_ctx.id,
"Dropping location from location manager, because we don't have a `local_path` anymore",
&mut locations_watched,
&mut locations_unwatched
);
}
} else {
drop_location(
location_id,
library_ctx.id,
"Removing location from manager, as we failed to fetch from db",
&mut locations_watched,
&mut locations_unwatched
);
}
}
_ = &mut stop_rx => {
info!("Stopping location manager");
break;
}
}
}
Ok(())
}
}
impl Drop for LocationManager {
fn drop(&mut self) {
if let Some(stop_tx) = self.stop_tx.take() {
if stop_tx.send(()).is_err() {
error!("Failed to send stop signal to location manager");
}
}
}
}

View File

@@ -0,0 +1,56 @@
use crate::{
library::LibraryContext,
location::{indexer::indexer_job::indexer_job_location, manager::LocationManagerError},
};
use async_trait::async_trait;
use notify::{
event::{AccessKind, AccessMode, CreateKind, ModifyKind, RenameMode},
Event, EventKind,
};
use tracing::trace;
use super::{
utils::{create_dir, file_creation_or_update, remove_event, rename_both_event},
EventHandler,
};
#[derive(Debug)]
pub(super) struct LinuxEventHandler {}
#[async_trait]
impl EventHandler for LinuxEventHandler {
fn new() -> Self {
Self {}
}
async fn handle_event(
&mut self,
location: indexer_job_location::Data,
library_ctx: &LibraryContext,
event: Event,
) -> Result<(), LocationManagerError> {
trace!("Received Linux event: {:#?}", event);
match event.kind {
EventKind::Access(AccessKind::Close(AccessMode::Write)) => {
// If a file was closed with write mode, then it was updated or created
file_creation_or_update(location, event, library_ctx).await?;
}
EventKind::Create(CreateKind::Folder) => {
create_dir(location, event, library_ctx.clone()).await?;
}
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
rename_both_event(location, event, library_ctx).await?;
}
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
}
other_event_kind => {
trace!("Other Linux event that we don't handle for now: {other_event_kind:#?}");
}
}
Ok(())
}
}

View File

@@ -0,0 +1,130 @@
use crate::{
library::LibraryContext,
location::{indexer::indexer_job::indexer_job_location, manager::LocationManagerError},
};
use std::{future::Future, time::Duration};
use async_trait::async_trait;
use notify::{
event::{CreateKind, DataChange, ModifyKind, RenameMode},
Event, EventKind,
};
use tokio::{fs, select, spawn, sync::oneshot, time::sleep};
use tracing::{trace, warn};
use super::{
utils::{create_dir, create_file, remove_event, rename, update_file},
EventHandler,
};
#[derive(Debug, Default)]
pub(super) struct MacOsEventHandler {
maybe_rename_sender: Option<oneshot::Sender<Event>>,
}
#[async_trait]
impl EventHandler for MacOsEventHandler {
fn new() -> Self
where
Self: Sized,
{
Default::default()
}
async fn handle_event(
&mut self,
location: indexer_job_location::Data,
library_ctx: &LibraryContext,
event: Event,
) -> Result<(), LocationManagerError> {
trace!("Received MacOS event: {:#?}", event);
match event.kind {
EventKind::Create(create_kind) => match create_kind {
CreateKind::File => {
let (maybe_rename_tx, maybe_rename_rx) = oneshot::channel();
spawn(wait_to_create(
location,
event,
library_ctx.clone(),
create_file,
maybe_rename_rx,
));
self.maybe_rename_sender = Some(maybe_rename_tx);
}
CreateKind::Folder => {
let (maybe_rename_tx, maybe_rename_rx) = oneshot::channel();
spawn(wait_to_create(
location,
event,
library_ctx.clone(),
create_dir,
maybe_rename_rx,
));
self.maybe_rename_sender = Some(maybe_rename_tx);
}
other => {
trace!("Ignoring other create event: {:#?}", other);
}
},
EventKind::Modify(ref modify_kind) => match modify_kind {
ModifyKind::Data(DataChange::Any) => {
if fs::metadata(&event.paths[0]).await?.is_file() {
update_file(location, event, library_ctx).await?;
} else {
trace!("Unexpected MacOS modify event on a directory");
}
// We ignore EventKind::Modify(ModifyKind::Data(DataChange::Any)) for directories
// as they're also used for removing files and directories, being emitted
// on the parent directory in this case
}
ModifyKind::Name(RenameMode::Any) => {
if let Some(rename_sender) = self.maybe_rename_sender.take() {
if !rename_sender.is_closed() && rename_sender.send(event).is_err() {
warn!("Failed to send rename event");
}
}
}
other => {
trace!("Ignoring other modify event: {:#?}", other);
}
},
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
// An EventKind::Modify(ModifyKind::Data(DataChange::Any)) - On parent directory
// is also emitted, but we can ignore it.
}
other_event_kind => {
trace!("Other MacOS event that we don't handle for now: {other_event_kind:#?}");
}
}
Ok(())
}
}
// FIX-ME: Had some troubles with borrowck, to receive a
// impl FnOnce(indexer_job_location::Data, Event, &LibraryContext) -> Fut
// as a parameter, had to move LibraryContext into the functions
async fn wait_to_create<Fut>(
location: indexer_job_location::Data,
event: Event,
library_ctx: LibraryContext,
create_fn: impl FnOnce(indexer_job_location::Data, Event, LibraryContext) -> Fut,
maybe_rename_rx: oneshot::Receiver<Event>,
) -> Result<(), LocationManagerError>
where
Fut: for<'r> Future<Output = Result<(), LocationManagerError>>,
{
select! {
() = sleep(Duration::from_secs(1)) => {
create_fn(location, event, library_ctx).await
},
Ok(rename_event) = maybe_rename_rx => {
trace!("Renaming file or directory instead of creating a new one");
rename(&event.paths[0], &rename_event.paths[0], location, &library_ctx).await
}
}
}

View File

@@ -0,0 +1,697 @@
use crate::{
library::LibraryContext,
prisma::{file_path, location},
};
use std::path::{Path, PathBuf};
use async_trait::async_trait;
use notify::{Config, Event, RecommendedWatcher, RecursiveMode, Watcher};
use tokio::{
runtime::Handle,
select,
sync::{mpsc, oneshot},
task::{block_in_place, JoinHandle},
};
use tracing::{debug, error, warn};
use super::{
super::{fetch_location, indexer::indexer_job::indexer_job_location},
LocationId, LocationManagerError,
};
mod linux;
mod macos;
mod windows;
mod utils;
use utils::{check_event, check_location_online};
#[cfg(target_os = "linux")]
type Handler = linux::LinuxEventHandler;
#[cfg(target_os = "macos")]
type Handler = macos::MacOsEventHandler;
#[cfg(target_os = "windows")]
type Handler = windows::WindowsEventHandler;
file_path::include!(file_path_with_object { object });
#[async_trait]
trait EventHandler {
fn new() -> Self
where
Self: Sized;
async fn handle_event(
&mut self,
location: indexer_job_location::Data,
library_ctx: &LibraryContext,
event: Event,
) -> Result<(), LocationManagerError>;
}
#[derive(Debug)]
pub(super) struct LocationWatcher {
location: location::Data,
path: PathBuf,
watcher: RecommendedWatcher,
handle: Option<JoinHandle<()>>,
stop_tx: Option<oneshot::Sender<()>>,
}
impl LocationWatcher {
pub(super) async fn new(
location: location::Data,
library_ctx: LibraryContext,
) -> Result<Self, LocationManagerError> {
let (events_tx, events_rx) = mpsc::unbounded_channel();
let (stop_tx, stop_rx) = oneshot::channel();
let watcher = RecommendedWatcher::new(
move |result| {
if !events_tx.is_closed() {
if events_tx.send(result).is_err() {
error!(
"Unable to send watcher event to location manager for location: <id='{}'>",
location.id
);
}
} else {
error!(
"Tried to send location file system events to a closed channel: <id='{}'",
location.id
);
}
},
Config::default(),
)?;
let handle = tokio::spawn(Self::handle_watch_events(
location.id,
library_ctx,
events_rx,
stop_rx,
));
let path = PathBuf::from(
location
.local_path
.as_ref()
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
);
Ok(Self {
location,
path,
watcher,
handle: Some(handle),
stop_tx: Some(stop_tx),
})
}
async fn handle_watch_events(
location_id: LocationId,
library_ctx: LibraryContext,
mut events_rx: mpsc::UnboundedReceiver<notify::Result<Event>>,
mut stop_rx: oneshot::Receiver<()>,
) {
let mut event_handler = Handler::new();
loop {
select! {
Some(event) = events_rx.recv() => {
match event {
Ok(event) => {
if let Err(e) = Self::handle_single_event(
location_id,
event,
&mut event_handler,
&library_ctx
).await {
error!("Failed to handle location file system event: \
<id='{location_id}', error='{e:#?}'>",
);
}
}
Err(e) => {
error!("watch error: {:#?}", e);
}
}
}
_ = &mut stop_rx => {
debug!("Stop Location Manager event handler for location: <id='{}'>", location_id);
break
}
}
}
}
async fn handle_single_event(
location_id: LocationId,
event: Event,
event_handler: &mut impl EventHandler,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if check_event(&event) {
if let Some(location) = fetch_location(library_ctx, location_id)
.include(indexer_job_location::include())
.exec()
.await?
{
if check_location_online(&location) {
return event_handler
.handle_event(location, library_ctx, event)
.await;
} else {
warn!("Tried to handle event for offline location: <id='{location_id}'>");
}
} else {
warn!("Tried to handle event for unknown location: <id='{location_id}'>");
}
}
Ok(())
}
pub(super) fn check_path(&self, path: impl AsRef<Path>) -> bool {
self.path == path.as_ref()
}
pub(super) fn watch(&mut self) {
if let Err(e) = self.watcher.watch(&self.path, RecursiveMode::Recursive) {
error!(
"Unable to watch location: (path: {}, error: {e:#?})",
self.path.display()
);
} else {
debug!("Now watching location: (path: {})", self.path.display());
}
}
pub(super) fn unwatch(&mut self) {
if let Err(e) = self.watcher.unwatch(&self.path) {
/**************************************** TODO: ****************************************
* According to an unit test, this error may occur when a subdirectory is removed *
* and we try to unwatch the parent directory then we have to check the implications *
* of unwatch error for this case. *
**************************************************************************************/
error!(
"Unable to unwatch location: (path: {}, error: {e:#?})",
self.path.display()
);
} else {
debug!("Stop watching location: (path: {})", self.path.display());
}
}
pub(super) fn update_data(&mut self, location: location::Data, to_watch: bool) {
assert_eq!(
self.location.id, location.id,
"Updated location data must have the same id"
);
let path = PathBuf::from(location.local_path.as_ref().unwrap_or_else(|| {
panic!(
"Tried to watch a location without local_path: <id='{}'>",
location.id
)
}));
if self.path != path {
self.unwatch();
self.path = path;
if to_watch {
self.watch();
}
}
self.location = location;
}
}
impl Drop for LocationWatcher {
fn drop(&mut self) {
if let Some(stop_tx) = self.stop_tx.take() {
if stop_tx.send(()).is_err() {
error!(
"Failed to send stop signal to location watcher: <id='{}'>",
self.location.id
);
}
// FIXME: change this Drop to async drop in the future
if let Some(handle) = self.handle.take() {
if let Err(e) =
block_in_place(move || Handle::current().block_on(async move { handle.await }))
{
error!("Failed to join watcher task: {e:#?}")
}
}
}
}
}
/***************************************************************************************************
* Some tests to validate our assumptions of events through different file systems *
***************************************************************************************************
* Events dispatched on Linux: *
* Create File: *
* 1) EventKind::Create(CreateKind::File) *
* 2) EventKind::Modify(ModifyKind::Metadata(MetadataKind::Any)) *
* or EventKind::Modify(ModifyKind::Data(DataChange::Any)) *
* 3) EventKind::Access(AccessKind::Close(AccessMode::Write))) *
* Create Directory: *
* 1) EventKind::Create(CreateKind::Folder) *
* Update File: *
* 1) EventKind::Modify(ModifyKind::Data(DataChange::Any)) *
* 2) EventKind::Access(AccessKind::Close(AccessMode::Write))) *
* Update File (rename): *
* 1) EventKind::Modify(ModifyKind::Name(RenameMode::From)) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::To)) *
* 3) EventKind::Modify(ModifyKind::Name(RenameMode::Both)) *
* Update Directory (rename): *
* 1) EventKind::Modify(ModifyKind::Name(RenameMode::From)) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::To)) *
* 3) EventKind::Modify(ModifyKind::Name(RenameMode::Both)) *
* Delete File: *
* 1) EventKind::Remove(RemoveKind::File) *
* Delete Directory: *
* 1) EventKind::Remove(RemoveKind::Folder) *
* *
* Events dispatched on MacOS: *
* Create File: *
* 1) EventKind::Create(CreateKind::File) *
* Create Directory: *
* 1) EventKind::Create(CreateKind::Folder) *
* Update File: *
* 1) EventKind::Modify(ModifyKind::Data(DataChange::Any)) *
* Update File (rename): *
* 1) EventKind::Create(CreateKind::File) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::Any)) *
* Update Directory (rename): *
* 1) EventKind::Create(CreateKind::Folder) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::Any)) *
* Delete File: *
* 1) EventKind::Remove(RemoveKind::Any) *
* 2) EventKind::Modify(ModifyKind::Data(DataChange::Any)) - On parent directory *
* Delete Directory: *
* 1) EventKind::Remove(RemoveKind::Any) *
* 2) EventKind::Modify(ModifyKind::Data(DataChange::Any)) - On parent directory *
* *
* Events dispatched on Windows: *
* Create File: *
* 1) EventKind::Create(CreateKind::Any) *
* 2) EventKind::Modify(ModifyKind::Any) *
* Create Directory: *
* 1) EventKind::Create(CreateKind::Any) *
* Update File: *
* 1) EventKind::Modify(ModifyKind::Any) *
* Update File (rename): *
* 1) EventKind::Modify(ModifyKind::Name(RenameMode::From)) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::To)) *
* Update Directory (rename): *
* 1) EventKind::Modify(ModifyKind::Name(RenameMode::From)) *
* 2) EventKind::Modify(ModifyKind::Name(RenameMode::To)) *
* Delete File: *
* 1) EventKind::Remove(RemoveKind::Any) *
* Delete Directory: *
* 1) EventKind::Remove(RemoveKind::Any) *
* *
* Events dispatched on Android: *
* TODO *
* *
* Events dispatched on iOS: *
* TODO *
* *
**************************************************************************************************/
#[cfg(test)]
#[allow(unused)]
mod tests {
#[cfg(target_os = "macos")]
use notify::event::DataChange;
use notify::{
event::{AccessKind, AccessMode, CreateKind, ModifyKind, RemoveKind, RenameMode},
Config, Event, EventKind, RecommendedWatcher, Watcher,
};
use std::io::ErrorKind;
use std::{path::Path, time::Duration};
use tempfile::{tempdir, TempDir};
use tokio::{fs, io::AsyncWriteExt, sync::mpsc, time::sleep};
use tracing::{debug, error};
use tracing_test::traced_test;
async fn setup_watcher() -> (
TempDir,
RecommendedWatcher,
mpsc::UnboundedReceiver<notify::Result<Event>>,
) {
let (events_tx, events_rx) = mpsc::unbounded_channel();
let watcher = RecommendedWatcher::new(
move |result| {
events_tx
.send(result)
.expect("Unable to send watcher event");
},
Config::default(),
)
.expect("Failed to create watcher");
(tempdir().unwrap(), watcher, events_rx)
}
async fn expect_event(
mut events_rx: mpsc::UnboundedReceiver<notify::Result<Event>>,
path: impl AsRef<Path>,
expected_event: EventKind,
) {
debug!("Expecting event: {expected_event:#?}");
let path = path.as_ref();
let mut tries = 0;
loop {
match events_rx.try_recv() {
Ok(maybe_event) => {
let event = maybe_event.expect("Failed to receive event");
debug!("Received event: {event:#?}");
// In case of file creation, we expect to see an close event on write mode
if event.paths[0] == path && event.kind == expected_event {
debug!("Received expected event: {expected_event:#?}");
break;
}
}
Err(e) => {
debug!("No event yet: {e:#?}");
tries += 1;
sleep(Duration::from_millis(100)).await;
}
}
if tries == 10 {
panic!("No expected event received after 10 tries");
}
}
}
#[tokio::test]
#[traced_test]
async fn create_file_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
let file_path = root_dir.path().join("test.txt");
fs::write(&file_path, "test").await.unwrap();
#[cfg(target_os = "windows")]
expect_event(events_rx, &file_path, EventKind::Modify(ModifyKind::Any)).await;
#[cfg(target_os = "macos")]
expect_event(events_rx, &file_path, EventKind::Create(CreateKind::File)).await;
#[cfg(target_os = "linux")]
expect_event(
events_rx,
&file_path,
EventKind::Access(AccessKind::Close(AccessMode::Write)),
)
.await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn create_dir_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
let dir_path = root_dir.path().join("inner");
fs::create_dir(&dir_path)
.await
.expect("Failed to create directory");
#[cfg(target_os = "windows")]
expect_event(events_rx, &dir_path, EventKind::Create(CreateKind::Any)).await;
#[cfg(target_os = "macos")]
expect_event(events_rx, &dir_path, EventKind::Create(CreateKind::Folder)).await;
#[cfg(target_os = "linux")]
expect_event(events_rx, &dir_path, EventKind::Create(CreateKind::Folder)).await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn update_file_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
let file_path = root_dir.path().join("test.txt");
fs::write(&file_path, "test").await.unwrap();
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
let mut file = fs::OpenOptions::new()
.append(true)
.open(&file_path)
.await
.expect("Failed to open file");
// Writing then sync data before closing the file
file.write_all(b"\nanother test")
.await
.expect("Failed to write to file");
file.sync_all().await.expect("Failed to flush file");
drop(file);
#[cfg(target_os = "windows")]
expect_event(events_rx, &file_path, EventKind::Modify(ModifyKind::Any)).await;
#[cfg(target_os = "macos")]
expect_event(
events_rx,
&file_path,
EventKind::Modify(ModifyKind::Data(DataChange::Any)),
)
.await;
#[cfg(target_os = "linux")]
expect_event(
events_rx,
&file_path,
EventKind::Access(AccessKind::Close(AccessMode::Write)),
)
.await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn update_file_rename_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
let file_path = root_dir.path().join("test.txt");
fs::write(&file_path, "test").await.unwrap();
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
let new_file_name = root_dir.path().join("test2.txt");
fs::rename(&file_path, &new_file_name)
.await
.expect("Failed to rename file");
#[cfg(target_os = "windows")]
expect_event(
events_rx,
&new_file_name,
EventKind::Modify(ModifyKind::Name(RenameMode::To)),
)
.await;
#[cfg(target_os = "macos")]
expect_event(
events_rx,
&file_path,
EventKind::Modify(ModifyKind::Name(RenameMode::Any)),
)
.await;
#[cfg(target_os = "linux")]
expect_event(
events_rx,
&file_path,
EventKind::Modify(ModifyKind::Name(RenameMode::Both)),
)
.await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn update_dir_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
let dir_path = root_dir.path().join("inner");
fs::create_dir(&dir_path)
.await
.expect("Failed to create directory");
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
let new_dir_name = root_dir.path().join("inner2");
fs::rename(&dir_path, &new_dir_name)
.await
.expect("Failed to rename directory");
#[cfg(target_os = "windows")]
expect_event(
events_rx,
&new_dir_name,
EventKind::Modify(ModifyKind::Name(RenameMode::To)),
)
.await;
#[cfg(target_os = "macos")]
expect_event(
events_rx,
&dir_path,
EventKind::Modify(ModifyKind::Name(RenameMode::Any)),
)
.await;
#[cfg(target_os = "linux")]
expect_event(
events_rx,
&dir_path,
EventKind::Modify(ModifyKind::Name(RenameMode::Both)),
)
.await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn delete_file_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
let file_path = root_dir.path().join("test.txt");
fs::write(&file_path, "test").await.unwrap();
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
fs::remove_file(&file_path)
.await
.expect("Failed to remove file");
#[cfg(target_os = "windows")]
expect_event(events_rx, &file_path, EventKind::Remove(RemoveKind::Any)).await;
#[cfg(target_os = "macos")]
expect_event(
events_rx,
&root_dir.path(),
EventKind::Modify(ModifyKind::Data(DataChange::Any)),
)
.await;
#[cfg(target_os = "linux")]
expect_event(events_rx, &file_path, EventKind::Remove(RemoveKind::File)).await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
#[tokio::test]
#[traced_test]
async fn delete_dir_event() {
let (root_dir, mut watcher, events_rx) = setup_watcher().await;
let dir_path = root_dir.path().join("inner");
fs::create_dir(&dir_path)
.await
.expect("Failed to create directory");
if let Err(e) = fs::metadata(&dir_path).await {
if e.kind() == ErrorKind::NotFound {
panic!("Directory not found");
} else {
panic!("{e}");
}
}
watcher
.watch(root_dir.path(), notify::RecursiveMode::Recursive)
.expect("Failed to watch root directory");
debug!("Now watching {}", root_dir.path().display());
debug!("First unwatching the inner directory before removing it");
if let Err(e) = watcher.unwatch(&dir_path) {
error!("Failed to unwatch inner directory: {e:#?}");
}
fs::remove_dir(&dir_path)
.await
.expect("Failed to remove directory");
#[cfg(target_os = "windows")]
expect_event(events_rx, &dir_path, EventKind::Remove(RemoveKind::Any)).await;
#[cfg(target_os = "macos")]
expect_event(
events_rx,
&root_dir.path(),
EventKind::Modify(ModifyKind::Data(DataChange::Any)),
)
.await;
#[cfg(target_os = "linux")]
expect_event(events_rx, &dir_path, EventKind::Remove(RemoveKind::Folder)).await;
debug!("Unwatching root directory: {}", root_dir.path().display());
if let Err(e) = watcher.unwatch(root_dir.path()) {
error!("Failed to unwatch root directory: {e:#?}");
}
}
}

View File

@@ -0,0 +1,596 @@
use crate::{
invalidate_query,
library::LibraryContext,
location::{
delete_directory,
file_path_helper::create_file_path,
indexer::indexer_job::indexer_job_location,
manager::{helpers::subtract_location_path, LocationId, LocationManagerError},
},
object::{
identifier_job::{assemble_object_metadata, ObjectCreationMetadata},
preview::{
can_generate_thumbnail_for_image, generate_image_thumbnail, THUMBNAIL_CACHE_DIR_NAME,
},
validation::hash::file_checksum,
},
prisma::{file_path, object},
};
use std::{
path::{Path, PathBuf},
str::FromStr,
};
use chrono::{FixedOffset, Utc};
use int_enum::IntEnum;
use notify::{event::RemoveKind, Event};
use prisma_client_rust::{raw, PrismaValue};
use sd_file_ext::extensions::ImageExtension;
use tokio::{fs, io::ErrorKind};
use tracing::{error, info, trace, warn};
use super::file_path_with_object;
pub(super) fn check_location_online(location: &indexer_job_location::Data) -> bool {
// if location is offline return early
// this prevents ....
if !location.is_online {
info!(
"Location is offline, skipping event: <id='{}'>",
location.id
);
false
} else {
true
}
}
pub(super) fn check_event(event: &Event) -> bool {
// if first path includes .DS_Store, ignore
if event.paths.iter().any(|p| {
p.to_str()
.expect("Found non-UTF-8 path")
.contains(".DS_Store")
}) {
return false;
}
true
}
pub(super) async fn create_dir(
location: indexer_job_location::Data,
event: Event,
library_ctx: LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
trace!(
"Location: <root_path ='{location_local_path}'> creating directory: {}",
event.paths[0].display()
);
if let Some(subpath) = subtract_location_path(location_local_path, &event.paths[0]) {
let parent_directory = get_parent_dir(location.id, &subpath, &library_ctx).await?;
trace!("parent_directory: {:?}", parent_directory);
if let Some(parent_directory) = parent_directory {
let created_path = create_file_path(
&library_ctx,
location.id,
subpath.to_str().expect("Found non-UTF-8 path").to_string(),
subpath
.file_stem()
.unwrap()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
None,
Some(parent_directory.id),
true,
)
.await?;
info!("Created path: {}", created_path.materialized_path);
invalidate_query!(library_ctx, "locations.getExplorerData");
} else {
warn!("Watcher found a path without parent");
}
}
}
Ok(())
}
pub(super) async fn create_file(
location: indexer_job_location::Data,
event: Event,
library_ctx: LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
inner_create_file(location.id, location_local_path, event, &library_ctx).await
} else {
Err(LocationManagerError::LocationMissingLocalPath(location.id))
}
}
async fn inner_create_file(
location_id: LocationId,
location_local_path: &str,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
trace!(
"Location: <root_path ='{location_local_path}'> creating file: {}",
event.paths[0].display()
);
if let Some(materialized_path) = subtract_location_path(location_local_path, &event.paths[0]) {
if let Some(parent_directory) =
get_parent_dir(location_id, &materialized_path, library_ctx).await?
{
let created_file = create_file_path(
library_ctx,
location_id,
materialized_path
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path
.file_stem()
.unwrap_or_default()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path.extension().and_then(|ext| {
if ext.is_empty() {
None
} else {
Some(ext.to_str().expect("Found non-UTF-8 path").to_string())
}
}),
Some(parent_directory.id),
false,
)
.await?;
info!("Created path: {}", created_file.materialized_path);
// generate provisional object
let ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
} = assemble_object_metadata(location_local_path, &created_file).await?;
// upsert object because in can be from a file that previously existed and was moved
let object = library_ctx
.db
.object()
.upsert(
object::cas_id::equals(cas_id.clone()),
(
cas_id.clone(),
size_str.clone(),
vec![
object::date_created::set(date_created),
object::kind::set(kind.int_value()),
],
),
vec![
object::size_in_bytes::set(size_str),
object::date_indexed::set(
Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap()),
),
],
)
.exec()
.await?;
library_ctx
.db
.file_path()
.update(
file_path::location_id_id(location_id, created_file.id),
vec![file_path::object_id::set(Some(object.id))],
)
.exec()
.await?;
trace!("object: {:#?}", object);
if !object.has_thumbnail {
if let Some(ref extension) = created_file.extension {
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
}
}
invalidate_query!(library_ctx, "locations.getExplorerData");
} else {
warn!("Watcher found a path without parent");
}
}
Ok(())
}
pub(super) async fn file_creation_or_update(
location: indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
{
inner_update_file(location_local_path, file_path, event, library_ctx).await
} else {
// We received None because it is a new file
inner_create_file(location.id, location_local_path, event, library_ctx).await
}
} else {
Err(LocationManagerError::LocationMissingLocalPath(location.id))
}
}
pub(super) async fn update_file(
location: indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
{
let ret = inner_update_file(location_local_path, file_path, event, library_ctx).await;
invalidate_query!(library_ctx, "locations.getExplorerData");
ret
} else {
Err(LocationManagerError::UpdateNonExistingFile(
event.paths[0].clone(),
))
}
} else {
Err(LocationManagerError::LocationMissingLocalPath(location.id))
}
}
async fn inner_update_file(
location_local_path: &str,
file_path: file_path_with_object::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
trace!(
"Location: <root_path ='{location_local_path}'> updating file: {}",
event.paths[0].display()
);
// We have to separate this object, as the `assemble_object_metadata` doesn't
// accept `file_path_with_object::Data`
let file_path_only = file_path::Data {
id: file_path.id,
is_dir: file_path.is_dir,
location_id: file_path.location_id,
location: None,
materialized_path: file_path.materialized_path,
name: file_path.name,
extension: file_path.extension,
object_id: file_path.object_id,
object: None,
parent_id: file_path.parent_id,
key_id: file_path.key_id,
date_created: file_path.date_created,
date_modified: file_path.date_modified,
date_indexed: file_path.date_indexed,
key: None,
};
let ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
} = assemble_object_metadata(location_local_path, &file_path_only).await?;
if let Some(ref object) = file_path.object {
if object.cas_id != cas_id {
// file content changed
library_ctx
.db
.object()
.update(
object::id::equals(object.id),
vec![
object::cas_id::set(cas_id.clone()),
object::size_in_bytes::set(size_str),
object::kind::set(kind.int_value()),
object::date_modified::set(date_created),
object::integrity_checksum::set(if object.integrity_checksum.is_some() {
// If a checksum was already computed, we need to recompute it
Some(file_checksum(&event.paths[0]).await?)
} else {
None
}),
],
)
.exec()
.await?;
if object.has_thumbnail {
// if this file had a thumbnail previously, we update it to match the new content
if let Some(ref extension) = file_path_only.extension {
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
}
}
}
}
invalidate_query!(library_ctx, "locations.getExplorerData");
Ok(())
}
pub(super) async fn rename_both_event(
location: indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
rename(&event.paths[1], &event.paths[0], location, library_ctx).await
}
pub(super) async fn rename(
new_path: impl AsRef<Path>,
old_path: impl AsRef<Path>,
location: indexer_job_location::Data,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
let mut old_path_materialized = extract_materialized_path(&location, old_path.as_ref())?
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
let new_path_materialized = extract_materialized_path(&location, new_path.as_ref())?;
let mut new_path_materialized_str = new_path_materialized
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
if let Some(file_path) =
get_existing_file_or_directory(&location, old_path, library_ctx).await?
{
// If the renamed path is a directory, we have to update every successor
if file_path.is_dir {
if !old_path_materialized.ends_with('/') {
old_path_materialized += "/";
}
if !new_path_materialized_str.ends_with('/') {
new_path_materialized_str += "/";
}
let updated = library_ctx
.db
._execute_raw(
raw!(
"UPDATE file_path SET materialized_path = REPLACE(materialized_path, {}, {}) WHERE location_id = {}",
PrismaValue::String(old_path_materialized),
PrismaValue::String(new_path_materialized_str.clone()),
PrismaValue::Int(location.id as i64)
)
)
.exec()
.await?;
trace!("Updated {updated} file_paths");
}
library_ctx
.db
.file_path()
.update(
file_path::location_id_id(file_path.location_id, file_path.id),
vec![
file_path::materialized_path::set(new_path_materialized_str),
file_path::name::set(
new_path_materialized
.file_stem()
.unwrap()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
),
file_path::extension::set(
new_path_materialized
.extension()
.map(|s| s.to_str().expect("Found non-UTF-8 path").to_string()),
),
],
)
.exec()
.await?;
invalidate_query!(library_ctx, "locations.getExplorerData");
}
Ok(())
}
pub(super) async fn remove_event(
location: indexer_job_location::Data,
event: Event,
remove_kind: RemoveKind,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
trace!("removed {remove_kind:#?}");
// if it doesn't either way, then we don't care
if let Some(file_path) =
get_existing_file_or_directory(&location, &event.paths[0], library_ctx).await?
{
// check file still exists on disk
match fs::metadata(&event.paths[0]).await {
Ok(_) => {
todo!("file has changed in some way, re-identify it")
}
Err(e) if e.kind() == ErrorKind::NotFound => {
// if is doesn't, we can remove it safely from our db
if file_path.is_dir {
delete_directory(library_ctx, location.id, Some(file_path.materialized_path))
.await?;
} else {
library_ctx
.db
.file_path()
.delete(file_path::location_id_id(location.id, file_path.id))
.exec()
.await?;
if let Some(object_id) = file_path.object_id {
library_ctx
.db
.object()
.delete_many(vec![
object::id::equals(object_id),
// https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#none
object::file_paths::none(vec![]),
])
.exec()
.await?;
}
}
}
Err(e) => return Err(e.into()),
}
invalidate_query!(library_ctx, "locations.getExplorerData");
}
Ok(())
}
fn extract_materialized_path(
location: &indexer_job_location::Data,
path: impl AsRef<Path>,
) -> Result<PathBuf, LocationManagerError> {
subtract_location_path(
location
.local_path
.as_ref()
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
&path,
)
.ok_or_else(|| {
LocationManagerError::UnableToExtractMaterializedPath(
location.id,
path.as_ref().to_path_buf(),
)
})
}
async fn get_existing_file_path(
location: &indexer_job_location::Data,
path: impl AsRef<Path>,
is_dir: bool,
library_ctx: &LibraryContext,
) -> Result<Option<file_path_with_object::Data>, LocationManagerError> {
let mut materialized_path = extract_materialized_path(location, path)?
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
if is_dir && !materialized_path.ends_with('/') {
materialized_path += "/";
}
library_ctx
.db
.file_path()
.find_first(vec![file_path::materialized_path::equals(
materialized_path,
)])
// include object for orphan check
.include(file_path_with_object::include())
.exec()
.await
.map_err(Into::into)
}
async fn get_existing_file_or_directory(
location: &indexer_job_location::Data,
path: impl AsRef<Path>,
library_ctx: &LibraryContext,
) -> Result<Option<file_path_with_object::Data>, LocationManagerError> {
let mut maybe_file_path =
get_existing_file_path(location, path.as_ref(), false, library_ctx).await?;
// First we just check if this path was a file in our db, if it isn't then we check for a directory
if maybe_file_path.is_none() {
maybe_file_path =
get_existing_file_path(location, path.as_ref(), true, library_ctx).await?;
}
Ok(maybe_file_path)
}
async fn get_parent_dir(
location_id: LocationId,
path: impl AsRef<Path>,
library_ctx: &LibraryContext,
) -> Result<Option<file_path::Data>, LocationManagerError> {
let mut parent_path_str = path
.as_ref()
.parent()
// We have an "/" `materialized_path` for each location_id
.unwrap_or_else(|| Path::new("/"))
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
// As we're looking specifically for a parent directory, it must end with '/'
if !parent_path_str.ends_with('/') {
parent_path_str += "/";
}
library_ctx
.db
.file_path()
.find_first(vec![
file_path::location_id::equals(location_id),
file_path::materialized_path::equals(parent_path_str),
])
.exec()
.await
.map_err(Into::into)
}
async fn generate_thumbnail(
extension: &str,
cas_id: &str,
file_path: impl AsRef<Path>,
library_ctx: &LibraryContext,
) {
let file_path = file_path.as_ref();
let output_path = library_ctx
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(cas_id)
.with_extension("webp");
if let Ok(extension) = ImageExtension::from_str(extension) {
if can_generate_thumbnail_for_image(&extension) {
if let Err(e) = generate_image_thumbnail(file_path, &output_path).await {
error!("Failed to image thumbnail on location manager: {e:#?}");
}
}
}
#[cfg(feature = "ffmpeg")]
{
use crate::object::preview::{can_generate_thumbnail_for_video, generate_video_thumbnail};
use sd_file_ext::extensions::VideoExtension;
if let Ok(extension) = VideoExtension::from_str(extension) {
if can_generate_thumbnail_for_video(&extension) {
if let Err(e) = generate_video_thumbnail(file_path, &output_path).await {
error!("Failed to video thumbnail on location manager: {e:#?}");
}
}
}
}
}

View File

@@ -0,0 +1,84 @@
use crate::{
library::LibraryContext,
location::{indexer::indexer_job::indexer_job_location, manager::LocationManagerError},
};
use async_trait::async_trait;
use notify::{
event::{CreateKind, ModifyKind, RenameMode},
Event, EventKind,
};
use tokio::fs;
use tracing::{trace, warn};
use super::{
utils::{create_dir, create_file, remove_event, rename, update_file},
EventHandler,
};
#[derive(Debug, Default)]
pub(super) struct WindowsEventHandler {
rename_stack: Option<Event>,
create_file_stack: Option<Event>,
}
#[async_trait]
impl EventHandler for WindowsEventHandler {
fn new() -> Self
where
Self: Sized,
{
Default::default()
}
async fn handle_event(
&mut self,
location: indexer_job_location::Data,
library_ctx: &LibraryContext,
event: Event,
) -> Result<(), LocationManagerError> {
trace!("Received Windows event: {:#?}", event);
match event.kind {
EventKind::Create(CreateKind::Any) => {
let metadata = fs::metadata(&event.paths[0]).await?;
if metadata.is_file() {
self.create_file_stack = Some(event);
} else {
create_dir(location, event, library_ctx.clone()).await?;
}
}
EventKind::Modify(ModifyKind::Any) => {
let metadata = fs::metadata(&event.paths[0]).await?;
if metadata.is_file() {
if let Some(create_file_event) = self.create_file_stack.take() {
create_file(location, create_file_event, library_ctx.clone()).await?;
} else {
update_file(location, event, library_ctx).await?;
}
} else {
warn!("Unexpected Windows modify event on a directory");
}
}
EventKind::Modify(ModifyKind::Name(RenameMode::From)) => {
self.rename_stack = Some(event);
}
EventKind::Modify(ModifyKind::Name(RenameMode::To)) => {
let from_event = self
.rename_stack
.take()
.expect("Unexpectedly missing rename from windows event");
rename(&event.paths[0], &from_event.paths[0], location, library_ctx).await?;
}
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
}
other_event_kind => {
trace!("Other Windows event that we don't handle for now: {other_event_kind:#?}");
}
}
Ok(())
}
}

View File

@@ -0,0 +1,229 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::{fs, io};
use uuid::Uuid;
static SPACEDRIVE_LOCATION_METADATA_FILE: &str = ".spacedrive";
pub(super) type LibraryId = Uuid;
pub(super) type LocationPubId = Uuid;
#[derive(Serialize, Deserialize, Default, Debug)]
struct LocationMetadata {
pub_id: LocationPubId,
name: String,
path: PathBuf,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
#[derive(Serialize, Deserialize, Default, Debug)]
struct SpacedriveLocationMetadata {
libraries: HashMap<LibraryId, LocationMetadata>,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
}
pub(super) struct SpacedriveLocationMetadataFile {
path: PathBuf,
metadata: SpacedriveLocationMetadata,
}
impl SpacedriveLocationMetadataFile {
pub(super) async fn try_load(
location_path: impl AsRef<Path>,
) -> Result<Option<Self>, LocationMetadataError> {
let metadata_file_name = location_path
.as_ref()
.join(SPACEDRIVE_LOCATION_METADATA_FILE);
match fs::read(&metadata_file_name).await {
Ok(data) => Ok(Some(Self {
path: metadata_file_name,
metadata: serde_json::from_slice(&data).map_err(|e| {
LocationMetadataError::Deserialize(e, location_path.as_ref().to_path_buf())
})?,
})),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(None),
Err(e) => Err(LocationMetadataError::Read(
e,
location_path.as_ref().to_path_buf(),
)),
}
}
pub(super) async fn create_and_save(
library_id: LibraryId,
location_pub_id: Uuid,
location_path: impl AsRef<Path>,
location_name: String,
) -> Result<(), LocationMetadataError> {
Self {
path: location_path
.as_ref()
.join(SPACEDRIVE_LOCATION_METADATA_FILE),
metadata: SpacedriveLocationMetadata {
libraries: [(
library_id,
LocationMetadata {
pub_id: location_pub_id,
name: location_name,
path: location_path.as_ref().to_path_buf(),
created_at: Utc::now(),
updated_at: Utc::now(),
},
)]
.into_iter()
.collect(),
created_at: Utc::now(),
updated_at: Utc::now(),
},
}
.write_metadata()
.await
}
pub(super) async fn relink(
&mut self,
library_id: LibraryId,
location_path: impl AsRef<Path>,
) -> Result<(), LocationMetadataError> {
let location_metadata = self
.metadata
.libraries
.get_mut(&library_id)
.ok_or(LocationMetadataError::LibraryNotFound(library_id))?;
let new_path = location_path.as_ref().to_path_buf();
if location_metadata.path == new_path {
return Err(LocationMetadataError::RelinkSamePath(new_path));
}
location_metadata.path = new_path;
location_metadata.updated_at = Utc::now();
self.path = location_path
.as_ref()
.join(SPACEDRIVE_LOCATION_METADATA_FILE);
self.write_metadata().await
}
pub(super) async fn update(
&mut self,
library_id: LibraryId,
location_name: String,
) -> Result<(), LocationMetadataError> {
let location_metadata = self
.metadata
.libraries
.get_mut(&library_id)
.ok_or(LocationMetadataError::LibraryNotFound(library_id))?;
location_metadata.name = location_name;
location_metadata.updated_at = Utc::now();
self.write_metadata().await
}
pub(super) async fn add_library(
&mut self,
library_id: LibraryId,
location_pub_id: Uuid,
location_path: impl AsRef<Path>,
location_name: String,
) -> Result<(), LocationMetadataError> {
self.metadata.libraries.insert(
library_id,
LocationMetadata {
pub_id: location_pub_id,
name: location_name,
path: location_path.as_ref().to_path_buf(),
created_at: Utc::now(),
updated_at: Utc::now(),
},
);
self.metadata.updated_at = Utc::now();
self.write_metadata().await
}
pub(super) fn has_library(&self, library_id: LibraryId) -> bool {
self.metadata.libraries.contains_key(&library_id)
}
pub(super) fn location_path(
&self,
library_id: LibraryId,
) -> Result<&Path, LocationMetadataError> {
self.metadata
.libraries
.get(&library_id)
.map(|l| l.path.as_path())
.ok_or(LocationMetadataError::LibraryNotFound(library_id))
}
pub(super) async fn remove_library(
&mut self,
library_id: LibraryId,
) -> Result<(), LocationMetadataError> {
self.metadata
.libraries
.remove(&library_id)
.ok_or(LocationMetadataError::LibraryNotFound(library_id))?;
self.metadata.updated_at = Utc::now();
if !self.metadata.libraries.is_empty() {
self.write_metadata().await
} else {
fs::remove_file(&self.path)
.await
.map_err(|e| LocationMetadataError::Delete(e, self.path.clone()))
}
}
pub(super) fn location_pub_id(
&self,
library_id: LibraryId,
) -> Result<Uuid, LocationMetadataError> {
self.metadata
.libraries
.get(&library_id)
.ok_or(LocationMetadataError::LibraryNotFound(library_id))
.map(|m| m.pub_id)
}
async fn write_metadata(&self) -> Result<(), LocationMetadataError> {
fs::write(
&self.path,
serde_json::to_vec(&self.metadata)
.map_err(|e| LocationMetadataError::Serialize(e, self.path.clone()))?,
)
.await
.map_err(|e| LocationMetadataError::Write(e, self.path.clone()))
}
}
#[derive(Error, Debug)]
pub enum LocationMetadataError {
#[error("Library not found: {0}")]
LibraryNotFound(LibraryId),
#[error("Failed to read location metadata file (path: {1:?}); (error: {0:?})")]
Read(io::Error, PathBuf),
#[error("Failed to delete location metadata file (path: {1:?}); (error: {0:?})")]
Delete(io::Error, PathBuf),
#[error("Failed to serialize metadata file for location (at path: {1:?}); (error: {0:?})")]
Serialize(serde_json::Error, PathBuf),
#[error("Failed to write location metadata file (path: {1:?}); (error: {0:?})")]
Write(io::Error, PathBuf),
#[error("Failed to deserialize metadata file for location (at path: {1:?}); (error: {0:?})")]
Deserialize(serde_json::Error, PathBuf),
#[error("Failed to relink, as the new location path is the same as the old path")]
RelinkSamePath(PathBuf),
}

View File

@@ -3,32 +3,34 @@ use crate::{
job::Job,
library::LibraryContext,
object::{
identifier_job::{FileIdentifierJob, FileIdentifierJobInit},
identifier_job::full_identifier_job::{FullFileIdentifierJob, FullFileIdentifierJobInit},
preview::{ThumbnailJob, ThumbnailJobInit},
validation::validator_job::{ObjectValidatorJob, ObjectValidatorJobInit},
},
prisma::{indexer_rules_in_location, location, node},
prisma::{file_path, indexer_rules_in_location, location, node, object},
};
use rspc::Type;
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, path::PathBuf};
use tokio::{
fs::{metadata, File},
io::AsyncWriteExt,
use std::{
collections::HashSet,
path::{Path, PathBuf},
};
use prisma_client_rust::QueryError;
use rspc::Type;
use serde::Deserialize;
use tokio::{fs, io};
use tracing::{debug, info};
use uuid::Uuid;
mod error;
pub mod file_path_helper;
pub mod indexer;
mod manager;
mod metadata;
pub use error::LocationError;
use indexer::indexer_job::{IndexerJob, IndexerJobInit};
use self::indexer::indexer_job::indexer_job_location;
static DOTFILE_NAME: &str = ".spacedrive";
use indexer::indexer_job::{indexer_job_location, IndexerJob, IndexerJobInit};
pub use manager::{LocationManager, LocationManagerError};
use metadata::SpacedriveLocationMetadataFile;
/// `LocationCreateArgs` is the argument received from the client using `rspc` to create a new location.
/// It has the actual path and a vector of indexer rules ids, to create many-to-many relationships
@@ -44,36 +46,36 @@ impl LocationCreateArgs {
self,
ctx: &LibraryContext,
) -> Result<indexer_job_location::Data, LocationError> {
// check if we have access to this location
if !self.path.try_exists().unwrap() {
return Err(LocationError::PathNotFound(self.path));
}
let path_metadata = metadata(&self.path)
.await
.map_err(|e| LocationError::DotfileReadFailure(e, self.path.clone()))?;
let path_metadata = match fs::metadata(&self.path).await {
Ok(metadata) => metadata,
Err(e) if e.kind() == io::ErrorKind::NotFound => {
return Err(LocationError::PathNotFound(self.path))
}
Err(e) => {
return Err(LocationError::LocationPathFilesystemMetadataAccess(
e, self.path,
));
}
};
if path_metadata.permissions().readonly() {
return Err(LocationError::ReadonlyDotFileLocationFailure(self.path));
return Err(LocationError::ReadonlyLocationFailure(self.path));
}
if !path_metadata.is_dir() {
return Err(LocationError::NotDirectory(self.path));
}
// check if the location already exists
let _location_exists = ctx
.db
.location()
.find_first(vec![location::local_path::equals(Some(
self.path.to_string_lossy().to_string(),
))])
.exec()
.await?
.is_some();
if _location_exists {
return Err(LocationError::LocationAlreadyExists(self.path));
if let Some(metadata) = SpacedriveLocationMetadataFile::try_load(&self.path).await? {
return if metadata.has_library(ctx.id) {
Err(LocationError::NeedRelink {
// SAFETY: This unwrap is ok as we checked that we have this library_id
old_path: metadata.location_path(ctx.id).unwrap().to_path_buf(),
new_path: self.path,
})
} else {
Err(LocationError::AddLibraryToMetadata(self.path))
};
}
debug!(
@@ -82,54 +84,61 @@ impl LocationCreateArgs {
);
let uuid = Uuid::new_v4();
let mut location = ctx
.db
.location()
.create(
uuid.as_bytes().to_vec(),
node::id::equals(ctx.node_local_id),
vec![
location::name::set(Some(
self.path.file_name().unwrap().to_str().unwrap().to_string(),
)),
location::is_online::set(true),
location::local_path::set(Some(self.path.to_string_lossy().to_string())),
],
)
.include(indexer_job_location::include())
.exec()
.await?;
let location = create_location(ctx, uuid, &self.path, &self.indexer_rules_ids).await?;
info!("Created location: {:?}", location);
// Write a location metadata on a .spacedrive file
SpacedriveLocationMetadataFile::create_and_save(
ctx.id,
uuid,
&self.path,
location.name.as_ref().unwrap().clone(),
)
.await?;
if !self.indexer_rules_ids.is_empty() {
link_location_and_indexer_rules(ctx, location.id, &self.indexer_rules_ids).await?;
info!("Created location: {location:?}");
Ok(location)
}
pub async fn add_library(
self,
ctx: &LibraryContext,
) -> Result<indexer_job_location::Data, LocationError> {
let mut metadata = SpacedriveLocationMetadataFile::try_load(&self.path)
.await?
.ok_or_else(|| LocationError::MetadataNotFound(self.path.clone()))?;
if metadata.has_library(ctx.id) {
return Err(LocationError::NeedRelink {
// SAFETY: This unwrap is ok as we checked that we have this library_id
old_path: metadata.location_path(ctx.id).unwrap().to_path_buf(),
new_path: self.path,
});
}
// Updating our location variable to include information about the indexer rules
location = fetch_location(ctx, location.id)
.include(indexer_job_location::include())
.exec()
.await?
.ok_or(LocationError::IdNotFound(location.id))?;
debug!(
"Trying to add a new library (library_id = {}) to an already existing location '{}'",
ctx.id,
self.path.display()
);
// write a file called .spacedrive to path containing the location id in JSON format
let mut dotfile = File::create(self.path.join(DOTFILE_NAME))
.await
.map_err(|e| LocationError::DotfileWriteFailure(e, self.path.clone()))?;
let uuid = Uuid::new_v4();
let json_bytes = serde_json::to_vec(&DotSpacedrive {
location_uuid: uuid,
library_uuid: ctx.id,
})
.map_err(|e| LocationError::DotfileSerializeFailure(e, self.path.clone()))?;
let location = create_location(ctx, uuid, &self.path, &self.indexer_rules_ids).await?;
dotfile
.write_all(&json_bytes)
.await
.map_err(|e| LocationError::DotfileWriteFailure(e, self.path))?;
metadata
.add_library(
ctx.id,
uuid,
&self.path,
location.name.as_ref().unwrap().clone(),
)
.await?;
invalidate_query!(ctx, "locations.list");
info!(
"Added library (library_id = {}) to location: {location:?}",
ctx.id
);
Ok(location)
}
@@ -156,15 +165,23 @@ impl LocationUpdateArgs {
.await?
.ok_or(LocationError::IdNotFound(self.id))?;
if location.name != self.name {
if self.name.is_some() && location.name != self.name {
ctx.db
.location()
.update(
location::id::equals(self.id),
vec![location::name::set(self.name)],
vec![location::name::set(self.name.clone())],
)
.exec()
.await?;
if let Some(ref local_path) = location.local_path {
if let Some(mut metadata) =
SpacedriveLocationMetadataFile::try_load(local_path).await?
{
metadata.update(ctx.id, self.name.unwrap().clone()).await?;
}
}
}
let current_rules_ids = location
@@ -205,25 +222,6 @@ impl LocationUpdateArgs {
}
}
#[derive(Serialize, Deserialize, Default)]
pub struct DotSpacedrive {
pub location_uuid: Uuid,
pub library_uuid: Uuid,
}
// checks to see if a location is:
// - accessible on from the local filesystem
// - already exists in the database
// pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError> {
// let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME))
// {
// Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
// Err(e) => return Err(LocationError::DotfileReadFailure(e)),
// };
// Ok(dotfile)
// }
pub fn fetch_location(ctx: &LibraryContext, location_id: i32) -> location::FindUnique {
ctx.db
.location()
@@ -257,38 +255,201 @@ pub async fn scan_location(
return Err(LocationError::MissingLocalPath(location.id));
};
let location_id = location.id;
ctx.queue_job(Job::new(
FileIdentifierJobInit {
FullFileIdentifierJobInit {
location_id: location.id,
sub_path: None,
},
Box::new(FileIdentifierJob {}),
))
.await;
ctx.spawn_job(Job::new(
IndexerJobInit { location },
Box::new(IndexerJob {}),
))
.await;
ctx.queue_job(Job::new(
ThumbnailJobInit {
location_id,
path: PathBuf::new(),
background: true,
},
Box::new(ThumbnailJob {}),
))
.await;
ctx.queue_job(Job::new(
ObjectValidatorJobInit {
location_id,
path: PathBuf::new(),
background: true,
},
Box::new(ObjectValidatorJob {}),
FullFileIdentifierJob {},
))
.await;
ctx.queue_job(Job::new(
ThumbnailJobInit {
location_id: location.id,
root_path: PathBuf::new(),
background: true,
},
ThumbnailJob {},
))
.await;
ctx.spawn_job(Job::new(IndexerJobInit { location }, IndexerJob {}))
.await;
Ok(())
}
pub async fn relink_location(
ctx: &LibraryContext,
location_path: impl AsRef<Path>,
) -> Result<(), LocationError> {
let mut metadata = SpacedriveLocationMetadataFile::try_load(&location_path)
.await?
.ok_or_else(|| LocationError::MissingMetadataFile(location_path.as_ref().to_path_buf()))?;
metadata.relink(ctx.id, &location_path).await?;
ctx.db
.location()
.update(
location::pub_id::equals(metadata.location_pub_id(ctx.id)?.as_ref().to_vec()),
vec![
location::local_path::set(Some(
location_path
.as_ref()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
)),
location::is_online::set(true),
],
)
.exec()
.await?;
Ok(())
}
async fn create_location(
ctx: &LibraryContext,
location_pub_id: Uuid,
location_path: impl AsRef<Path>,
indexer_rules_ids: &[i32],
) -> Result<indexer_job_location::Data, LocationError> {
let location_name = location_path
.as_ref()
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string();
let mut location = ctx
.db
.location()
.create(
location_pub_id.as_bytes().to_vec(),
node::id::equals(ctx.node_local_id),
vec![
location::name::set(Some(location_name.clone())),
location::is_online::set(true),
location::local_path::set(Some(
location_path
.as_ref()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
)),
],
)
.include(indexer_job_location::include())
.exec()
.await?;
if !indexer_rules_ids.is_empty() {
link_location_and_indexer_rules(ctx, location.id, indexer_rules_ids).await?;
}
// Updating our location variable to include information about the indexer rules
location = fetch_location(ctx, location.id)
.include(indexer_job_location::include())
.exec()
.await?
.ok_or(LocationError::IdNotFound(location.id))?;
invalidate_query!(ctx, "locations.list");
ctx.location_manager().add(location.id, ctx.clone()).await?;
Ok(location)
}
pub async fn delete_location(ctx: &LibraryContext, location_id: i32) -> Result<(), LocationError> {
ctx.location_manager()
.remove(location_id, ctx.clone())
.await?;
delete_directory(ctx, location_id, None).await?;
ctx.db
.indexer_rules_in_location()
.delete_many(vec![indexer_rules_in_location::location_id::equals(
location_id,
)])
.exec()
.await?;
let location = ctx
.db
.location()
.delete(location::id::equals(location_id))
.exec()
.await?;
if let Some(local_path) = location.local_path {
if let Ok(Some(mut metadata)) = SpacedriveLocationMetadataFile::try_load(&local_path).await
{
metadata.remove_library(ctx.id).await?;
}
}
info!("Location {} deleted", location_id);
invalidate_query!(ctx, "locations.list");
Ok(())
}
file_path::select!(file_path_object_id_only { object_id });
/// Will delete a directory recursively with Objects if left as orphans
/// this function is used to delete a location and when ingesting directory deletion events
pub async fn delete_directory(
ctx: &LibraryContext,
location_id: i32,
parent_materialized_path: Option<String>,
) -> Result<(), QueryError> {
let children_params = if let Some(parent_materialized_path) = parent_materialized_path {
vec![
file_path::location_id::equals(location_id),
file_path::materialized_path::starts_with(parent_materialized_path),
]
} else {
vec![file_path::location_id::equals(location_id)]
};
// Fetching all object_ids from all children file_paths
let object_ids = ctx
.db
.file_path()
.find_many(children_params.clone())
.select(file_path_object_id_only::select())
.exec()
.await?
.into_iter()
.filter_map(|file_path| file_path.object_id)
.collect();
// WARNING: file_paths must be deleted before objects, as they reference objects through object_id
// delete all children file_paths
ctx.db
.file_path()
.delete_many(children_params)
.exec()
.await?;
// delete all children objects
ctx.db
.object()
.delete_many(vec![
object::id::in_vec(object_ids),
// https://www.prisma.io/docs/reference/api-reference/prisma-client-reference#none
object::file_paths::none(vec![]),
])
.exec()
.await?;
invalidate_query!(ctx, "locations.getExplorerData");
Ok(())
}

View File

@@ -1,5 +1,5 @@
use blake3::Hasher;
use std::path::PathBuf;
use std::path::Path;
use tokio::{
fs::File,
io::{self, AsyncReadExt, AsyncSeekExt, SeekFrom},
@@ -17,7 +17,7 @@ async fn read_at(file: &mut File, offset: u64, size: u64) -> Result<Vec<u8>, io:
Ok(buf)
}
pub async fn generate_cas_id(path: PathBuf, size: u64) -> Result<String, io::Error> {
pub async fn generate_cas_id(path: impl AsRef<Path>, size: u64) -> Result<String, io::Error> {
// open file reference
let mut file = File::open(path).await?;
@@ -43,6 +43,7 @@ pub async fn generate_cas_id(path: PathBuf, size: u64) -> Result<String, io::Err
}
let hex = hasher.finalize().to_hex();
Ok(hex.to_string())
let mut id = hex.to_string();
id.truncate(16);
Ok(id)
}

View File

@@ -13,7 +13,7 @@ pub struct FileDecryptorJob;
pub struct FileDecryptorJobState {}
// decrypt could have an option to restore metadata (and another specific option for file name? - would turn "output file" into "output path" in the UI)
#[derive(Serialize, Deserialize, Debug, Type)]
#[derive(Serialize, Deserialize, Debug, Type, Hash)]
pub struct FileDecryptorJobInit {
pub location_id: i32,
pub object_id: i32,
@@ -40,16 +40,11 @@ impl StatefulJob for FileDecryptorJob {
JOB_NAME
}
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
// enumerate files to decrypt
// populate the steps with them (local file paths)
let library = ctx.library_ctx();
let location = library
let location = ctx
.library_ctx
.db
.location()
.find_unique(location::id::equals(state.init.location_id))
@@ -63,7 +58,8 @@ impl StatefulJob for FileDecryptorJob {
.map(PathBuf::from)
.expect("critical error: issue getting local path as pathbuf");
let item = library
let item = ctx
.library_ctx
.db
.file_path()
.find_first(vec![file_path::object_id::equals(Some(
@@ -91,7 +87,7 @@ impl StatefulJob for FileDecryptorJob {
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
// handle overwriting checks, and making sure there's enough available space
@@ -133,7 +129,7 @@ impl StatefulJob for FileDecryptorJob {
let index = header.find_key_index(password.clone())?;
// inherit the encryption algorithm from the keyslot
ctx.library_ctx().key_manager.add_to_keystore(
ctx.library_ctx.key_manager.add_to_keystore(
password.clone(),
header.algorithm,
header.keyslots[index].hashing_algorithm,
@@ -150,7 +146,7 @@ impl StatefulJob for FileDecryptorJob {
)));
}
} else {
let keys = ctx.library_ctx().key_manager.enumerate_hashed_keys();
let keys = ctx.library_ctx.key_manager.enumerate_hashed_keys();
header.decrypt_master_key_from_prehashed(keys)?
};
@@ -169,11 +165,7 @@ impl StatefulJob for FileDecryptorJob {
Ok(())
}
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
// mark job as successful
Ok(Some(serde_json::to_value(&state.init)?))
}

View File

@@ -26,7 +26,7 @@ enum ObjectType {
#[derive(Serialize, Deserialize, Debug)]
pub struct FileEncryptorJobState {}
#[derive(Serialize, Deserialize, Type)]
#[derive(Serialize, Deserialize, Type, Hash)]
pub struct FileEncryptorJobInit {
pub location_id: i32,
pub object_id: i32,
@@ -68,16 +68,11 @@ impl StatefulJob for FileEncryptorJob {
JOB_NAME
}
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
// enumerate files to encrypt
// populate the steps with them (local file paths)
let library = ctx.library_ctx();
let location = library
let location = ctx
.library_ctx
.db
.location()
.find_unique(location::id::equals(state.init.location_id))
@@ -91,7 +86,8 @@ impl StatefulJob for FileEncryptorJob {
.map(PathBuf::from)
.expect("critical error: issue getting local path as pathbuf");
let item = library
let item = ctx
.library_ctx
.db
.file_path()
.find_first(vec![file_path::object_id::equals(Some(
@@ -128,7 +124,7 @@ impl StatefulJob for FileEncryptorJob {
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
@@ -137,13 +133,13 @@ impl StatefulJob for FileEncryptorJob {
// handle overwriting checks, and making sure there's enough available space
let user_key = ctx
.library_ctx()
.library_ctx
.key_manager
.access_keymount(state.init.key_uuid)?
.hashed_key;
let user_key_details = ctx
.library_ctx()
.library_ctx
.key_manager
.access_keystore(state.init.key_uuid)?;
@@ -184,7 +180,7 @@ impl StatefulJob for FileEncryptorJob {
if state.init.metadata || state.init.preview_media {
// if any are requested, we can make the query as it'll be used at least once
let object = ctx
.library_ctx()
.library_ctx
.db
.object()
.find_unique(object::id::equals(state.init.object_id))
@@ -240,11 +236,7 @@ impl StatefulJob for FileEncryptorJob {
Ok(())
}
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
// mark job as successful
Ok(Some(serde_json::to_value(&state.init)?))
}

View File

@@ -1,422 +0,0 @@
use crate::{
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
library::LibraryContext,
prisma::{file_path, location, object},
};
use chrono::{DateTime, FixedOffset};
use int_enum::IntEnum;
use prisma_client_rust::{prisma_models::PrismaValue, raw::Raw, Direction};
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use tokio::{fs, io};
use tracing::{error, info};
use super::cas::generate_cas_id;
// we break this job into chunks of 100 to improve performance
static CHUNK_SIZE: usize = 100;
pub const IDENTIFIER_JOB_NAME: &str = "file_identifier";
pub struct FileIdentifierJob {}
// FileIdentifierJobInit takes file_paths without a file_id and uniquely identifies them
// first: generating the cas_id and extracting metadata
// finally: creating unique file records, and linking them to their file_paths
#[derive(Serialize, Deserialize, Clone)]
pub struct FileIdentifierJobInit {
pub location_id: i32,
pub sub_path: Option<PathBuf>, // subpath to start from
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FilePathIdAndLocationIdCursor {
file_path_id: i32,
location_id: i32,
}
impl From<&FilePathIdAndLocationIdCursor> for file_path::UniqueWhereParam {
fn from(cursor: &FilePathIdAndLocationIdCursor) -> Self {
file_path::location_id_id(cursor.location_id, cursor.file_path_id)
}
}
#[derive(Serialize, Deserialize)]
pub struct FileIdentifierJobState {
total_count: usize,
task_count: usize,
location: location::Data,
location_path: PathBuf,
cursor: FilePathIdAndLocationIdCursor,
}
#[async_trait::async_trait]
impl StatefulJob for FileIdentifierJob {
type Init = FileIdentifierJobInit;
type Data = FileIdentifierJobState;
type Step = ();
fn name(&self) -> &'static str {
IDENTIFIER_JOB_NAME
}
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
info!("Identifying orphan Paths...");
let library = ctx.library_ctx();
let location_id = state.init.location_id;
let location = library
.db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await?
.unwrap();
let location_path = location
.local_path
.as_ref()
.map(PathBuf::from)
.unwrap_or_default();
let total_count = count_orphan_file_paths(&library, state.init.location_id).await?;
info!("Found {} orphan file paths", total_count);
let task_count = (total_count as f64 / CHUNK_SIZE as f64).ceil() as usize;
info!(
"Found {} orphan Paths. Will execute {} tasks...",
total_count, task_count
);
// update job with total task count based on orphan file_paths count
ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
let first_path_id = library
.db
.file_path()
.find_first(orphan_path_filters(location_id, None))
.exec()
.await?
.map(|d| d.id)
.unwrap_or(1);
state.data = Some(FileIdentifierJobState {
total_count,
task_count,
location,
location_path,
cursor: FilePathIdAndLocationIdCursor {
file_path_id: first_path_id,
location_id: state.init.location_id,
},
});
state.steps = (0..task_count).map(|_| ()).collect();
Ok(())
}
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
let db = ctx.library_ctx().db;
// link file_path ids to a CreateObject struct containing unique file data
let mut chunk: HashMap<i32, CreateObject> = HashMap::new();
let mut cas_lookup: HashMap<String, i32> = HashMap::new();
let data = state
.data
.as_mut()
.expect("Critical error: missing data on job state");
// get chunk of orphans to process
let file_paths =
get_orphan_file_paths(&ctx.library_ctx(), &data.cursor, data.location.id).await?;
// if no file paths found, abort entire job early
if file_paths.is_empty() {
return Err(JobError::JobDataNotFound(
"Expected orphan Paths not returned from database query for this chunk".to_string(),
));
}
info!(
"Processing {:?} orphan Paths. ({} completed of {})",
file_paths.len(),
state.step_number,
data.task_count
);
// analyze each file_path
for file_path in &file_paths {
// get the cas_id and extract metadata
match assemble_object_metadata(&data.location_path, file_path).await {
Ok(object) => {
let cas_id = object.cas_id.clone();
// create entry into chunks for created file data
chunk.insert(file_path.id, object);
cas_lookup.insert(cas_id, file_path.id);
}
Err(e) => {
error!("Error assembling Object metadata: {:#?}", e);
continue;
}
};
}
// find all existing files by cas id
let generated_cas_ids = chunk.values().map(|c| c.cas_id.clone()).collect();
let existing_objects = db
.object()
.find_many(vec![object::cas_id::in_vec(generated_cas_ids)])
.exec()
.await?;
info!("Found {} existing files", existing_objects.len());
for existing_object in &existing_objects {
if let Err(e) = db
.file_path()
.update(
file_path::location_id_id(
state.init.location_id,
*cas_lookup.get(&existing_object.cas_id).unwrap(),
),
vec![file_path::object_id::set(Some(existing_object.id))],
)
.exec()
.await
{
error!("Error updating file_id: {:#?}", e);
}
}
let existing_object_cas_ids = existing_objects
.iter()
.map(|object| object.cas_id.clone())
.collect::<HashSet<_>>();
// extract objects that don't already exist in the database
let new_objects = chunk
.iter()
.map(|(_id, create_file)| create_file)
.filter(|create_file| !existing_object_cas_ids.contains(&create_file.cas_id))
.collect::<Vec<_>>();
if !new_objects.is_empty() {
// assemble prisma values for new unique files
let mut values = Vec::with_capacity(new_objects.len() * 3);
for object in &new_objects {
values.extend([
PrismaValue::String(object.cas_id.clone()),
PrismaValue::Int(object.size_in_bytes),
PrismaValue::DateTime(object.date_created),
PrismaValue::Int(object.kind.int_value() as i64),
]);
}
// create new file records with assembled values
// TODO: Use create_many with skip_duplicates. Waiting on https://github.com/Brendonovich/prisma-client-rust/issues/143
let created_files: Vec<FileCreated> = db
._query_raw(Raw::new(
&format!(
"INSERT INTO object (cas_id, size_in_bytes, date_created, kind) VALUES {}
ON CONFLICT (cas_id) DO NOTHING RETURNING id, cas_id",
vec!["({}, {}, {}, {})"; new_objects.len()].join(",")
),
values,
))
.exec()
.await
.unwrap_or_else(|e| {
error!("Error inserting files: {:#?}", e);
Vec::new()
});
for created_file in created_files {
// associate newly created files with their respective file_paths
// TODO: this is potentially bottle necking the chunk system, individually linking file_path to file, 100 queries per chunk
// - insert many could work, but I couldn't find a good way to do this in a single SQL query
if let Err(e) = ctx
.library_ctx()
.db
.file_path()
.update(
file_path::location_id_id(
state.init.location_id,
*cas_lookup.get(&created_file.cas_id).unwrap(),
),
vec![file_path::object_id::set(Some(created_file.id))],
)
.exec()
.await
{
info!("Error updating file_id: {:#?}", e);
}
}
}
// set the step data cursor to the last row of this chunk
if let Some(last_row) = file_paths.last() {
data.cursor.file_path_id = last_row.id;
}
ctx.progress(vec![
JobReportUpdate::CompletedTaskCount(state.step_number),
JobReportUpdate::Message(format!(
"Processed {} of {} orphan Paths",
state.step_number * CHUNK_SIZE,
data.total_count
)),
]);
// let _remaining = count_orphan_file_paths(&ctx.core_ctx, location_id.into()).await?;
Ok(())
}
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
let data = state
.data
.as_ref()
.expect("critical error: missing data on job state");
info!(
"Finalizing identifier job at {}, total of {} tasks",
data.location_path.display(),
data.task_count
);
Ok(Some(serde_json::to_value(&state.init)?))
}
}
fn orphan_path_filters(location_id: i32, file_path_id: Option<i32>) -> Vec<file_path::WhereParam> {
let mut params = vec![
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
];
// this is a workaround for the cursor not working properly
if let Some(file_path_id) = file_path_id {
params.push(file_path::id::gte(file_path_id))
}
params
}
#[derive(Deserialize, Serialize, Debug)]
struct CountRes {
count: Option<usize>,
}
async fn count_orphan_file_paths(
ctx: &LibraryContext,
location_id: i32,
) -> Result<usize, prisma_client_rust::QueryError> {
let files_count = ctx
.db
.file_path()
.count(vec![
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
])
.exec()
.await?;
// Is this
Ok(files_count as usize)
}
async fn get_orphan_file_paths(
ctx: &LibraryContext,
cursor: &FilePathIdAndLocationIdCursor,
location_id: i32,
) -> Result<Vec<file_path::Data>, prisma_client_rust::QueryError> {
info!(
"Querying {} orphan Paths at cursor: {:?}",
CHUNK_SIZE, cursor
);
ctx.db
.file_path()
.find_many(orphan_path_filters(location_id, Some(cursor.file_path_id)))
.order_by(file_path::id::order(Direction::Asc))
// .cursor(cursor.into())
.take(CHUNK_SIZE as i64)
.skip(1)
.exec()
.await
}
#[derive(Deserialize, Serialize, Debug)]
struct CreateObject {
pub cas_id: String,
pub size_in_bytes: i64,
pub date_created: DateTime<FixedOffset>,
pub kind: ObjectKind,
}
#[derive(Deserialize, Serialize, Debug)]
struct FileCreated {
pub id: i32,
pub cas_id: String,
}
async fn assemble_object_metadata(
location_path: impl AsRef<Path>,
file_path: &file_path::Data,
) -> Result<CreateObject, io::Error> {
let path = location_path
.as_ref()
.join(file_path.materialized_path.as_str());
info!("Reading path: {:?}", path);
let metadata = fs::metadata(&path).await?;
// derive Object kind
let object_kind: ObjectKind = match path.extension() {
Some(ext) => match ext.to_str() {
Some(ext) => {
let mut file = std::fs::File::open(&path).unwrap();
let resolved_ext = Extension::resolve_conflicting(ext, &mut file, true);
resolved_ext.map(Into::into).unwrap_or(ObjectKind::Unknown)
}
None => ObjectKind::Unknown,
},
None => ObjectKind::Unknown,
};
let size = metadata.len();
let cas_id = {
if !file_path.is_dir {
let mut ret = generate_cas_id(path, size).await?;
ret.truncate(16);
ret
} else {
"".to_string()
}
};
Ok(CreateObject {
cas_id,
size_in_bytes: size as i64,
date_created: file_path.date_created,
kind: object_kind,
})
}

View File

@@ -0,0 +1,249 @@
use crate::{
invalidate_query,
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
library::LibraryContext,
prisma::{file_path, location},
};
use std::path::PathBuf;
use prisma_client_rust::Direction;
use serde::{Deserialize, Serialize};
use tracing::info;
use super::{identifier_job_step, IdentifierJobError, CHUNK_SIZE};
pub const FULL_IDENTIFIER_JOB_NAME: &str = "file_identifier";
pub struct FullFileIdentifierJob {}
// FileIdentifierJobInit takes file_paths without a file_id and uniquely identifies them
// first: generating the cas_id and extracting metadata
// finally: creating unique file records, and linking them to their file_paths
#[derive(Serialize, Deserialize, Clone, Hash)]
pub struct FullFileIdentifierJobInit {
pub location_id: i32,
pub sub_path: Option<PathBuf>, // subpath to start from
}
#[derive(Serialize, Deserialize, Debug)]
struct FilePathIdAndLocationIdCursor {
file_path_id: i32,
location_id: i32,
}
impl From<&FilePathIdAndLocationIdCursor> for file_path::UniqueWhereParam {
fn from(cursor: &FilePathIdAndLocationIdCursor) -> Self {
file_path::location_id_id(cursor.location_id, cursor.file_path_id)
}
}
#[derive(Serialize, Deserialize)]
pub struct FullFileIdentifierJobState {
location: location::Data,
location_path: PathBuf,
cursor: FilePathIdAndLocationIdCursor,
report: FileIdentifierReport,
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct FileIdentifierReport {
location_path: String,
total_orphan_paths: usize,
total_objects_created: usize,
total_objects_linked: usize,
total_objects_ignored: usize,
}
#[async_trait::async_trait]
impl StatefulJob for FullFileIdentifierJob {
type Init = FullFileIdentifierJobInit;
type Data = FullFileIdentifierJobState;
type Step = ();
fn name(&self) -> &'static str {
FULL_IDENTIFIER_JOB_NAME
}
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
info!("Identifying orphan File Paths...");
let location_id = state.init.location_id;
let location = ctx
.library_ctx
.db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await?
.ok_or(IdentifierJobError::MissingLocation(state.init.location_id))?;
let location_path = location
.local_path
.as_ref()
.map(PathBuf::from)
.ok_or(IdentifierJobError::LocationLocalPath(location_id))?;
let orphan_count = count_orphan_file_paths(&ctx.library_ctx, location_id).await?;
info!("Found {} orphan file paths", orphan_count);
let task_count = (orphan_count as f64 / CHUNK_SIZE as f64).ceil() as usize;
info!(
"Found {} orphan Paths. Will execute {} tasks...",
orphan_count, task_count
);
// update job with total task count based on orphan file_paths count
ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
let first_path_id = ctx
.library_ctx
.db
.file_path()
.find_first(orphan_path_filters(location_id, None))
.exec()
.await?
.map(|d| d.id)
.unwrap_or(1);
state.data = Some(FullFileIdentifierJobState {
report: FileIdentifierReport {
location_path: location_path.to_str().unwrap_or("").to_string(),
total_orphan_paths: orphan_count,
..Default::default()
},
location,
location_path,
cursor: FilePathIdAndLocationIdCursor {
file_path_id: first_path_id,
location_id: state.init.location_id,
},
});
state.steps = (0..task_count).map(|_| ()).collect();
Ok(())
}
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let data = state
.data
.as_mut()
.expect("Critical error: missing data on job state");
// get chunk of orphans to process
let file_paths =
get_orphan_file_paths(&ctx.library_ctx, &data.cursor, data.location.id).await?;
// if no file paths found, abort entire job early, there is nothing to do
// if we hit this error, there is something wrong with the data/query
if file_paths.is_empty() {
return Err(JobError::EarlyFinish {
name: self.name().to_string(),
reason: "Expected orphan Paths not returned from database query for this chunk"
.to_string(),
});
}
info!(
"Processing {:?} orphan Paths. ({} completed of {})",
file_paths.len(),
state.step_number,
data.report.total_orphan_paths
);
let (total_objects_created, total_objects_linked) = identifier_job_step(
&ctx.library_ctx,
state.init.location_id,
&data.location_path,
&file_paths,
)
.await?;
data.report.total_objects_created += total_objects_created;
data.report.total_objects_linked += total_objects_linked;
// set the step data cursor to the last row of this chunk
if let Some(last_row) = file_paths.last() {
data.cursor.file_path_id = last_row.id;
}
ctx.progress(vec![
JobReportUpdate::CompletedTaskCount(state.step_number),
JobReportUpdate::Message(format!(
"Processed {} of {} orphan Paths",
state.step_number * CHUNK_SIZE,
data.report.total_orphan_paths
)),
]);
invalidate_query!(ctx.library_ctx, "locations.getExplorerData");
// let _remaining = count_orphan_file_paths(&ctx.core_ctx, location_id.into()).await?;
Ok(())
}
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
let data = state
.data
.as_ref()
.expect("critical error: missing data on job state");
info!("Finalizing identifier job: {:#?}", data.report);
Ok(Some(serde_json::to_value(&data.report)?))
}
}
fn orphan_path_filters(location_id: i32, file_path_id: Option<i32>) -> Vec<file_path::WhereParam> {
let mut params = vec![
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
];
// this is a workaround for the cursor not working properly
if let Some(file_path_id) = file_path_id {
params.push(file_path::id::gte(file_path_id));
}
params
}
async fn count_orphan_file_paths(
ctx: &LibraryContext,
location_id: i32,
) -> Result<usize, prisma_client_rust::QueryError> {
Ok(ctx
.db
.file_path()
.count(vec![
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
])
.exec()
.await? as usize)
}
async fn get_orphan_file_paths(
ctx: &LibraryContext,
cursor: &FilePathIdAndLocationIdCursor,
location_id: i32,
) -> Result<Vec<file_path::Data>, prisma_client_rust::QueryError> {
info!(
"Querying {} orphan Paths at cursor: {:?}",
CHUNK_SIZE, cursor
);
ctx.db
.file_path()
.find_many(orphan_path_filters(location_id, Some(cursor.file_path_id)))
.order_by(file_path::id::order(Direction::Asc))
// .cursor(cursor.into())
.take(CHUNK_SIZE as i64)
// .skip(1)
.exec()
.await
}

View File

@@ -0,0 +1,287 @@
use crate::{
job::JobError,
library::LibraryContext,
object::cas::generate_cas_id,
prisma::{file_path, object},
};
use chrono::{DateTime, FixedOffset};
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use futures::future::join_all;
use int_enum::IntEnum;
use prisma_client_rust::QueryError;
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
use thiserror::Error;
use tokio::{fs, io};
use tracing::{error, info};
pub mod full_identifier_job;
// we break these jobs into chunks of 100 to improve performance
static CHUNK_SIZE: usize = 100;
#[derive(Error, Debug)]
pub enum IdentifierJobError {
#[error("Location not found: <id = '{0}'>")]
MissingLocation(i32),
#[error("Root file path not found: <path = '{0}'>")]
MissingRootFilePath(PathBuf),
#[error("Location without local path: <id = '{0}'>")]
LocationLocalPath(i32),
}
#[derive(Debug, Clone)]
pub struct ObjectCreationMetadata {
pub cas_id: String,
pub size_str: String,
pub kind: ObjectKind,
pub date_created: DateTime<FixedOffset>,
}
pub async fn assemble_object_metadata(
location_path: impl AsRef<Path>,
file_path: &file_path::Data,
) -> Result<ObjectCreationMetadata, io::Error> {
assert!(
!file_path.is_dir,
"We can't generate cas_id for directories"
);
let path = location_path.as_ref().join(&file_path.materialized_path);
let metadata = fs::metadata(&path).await?;
// derive Object kind
let object_kind = match path.extension() {
Some(ext) => match ext.to_str() {
Some(ext) => {
let mut file = fs::File::open(&path).await?;
Extension::resolve_conflicting(&ext.to_lowercase(), &mut file, false)
.await
.map(Into::into)
.unwrap_or(ObjectKind::Unknown)
}
None => ObjectKind::Unknown,
},
None => ObjectKind::Unknown,
};
let size = metadata.len();
let cas_id = generate_cas_id(&path, size).await?;
info!("Analyzed file: {:?} {:?} {:?}", path, cas_id, object_kind);
Ok(ObjectCreationMetadata {
cas_id,
size_str: size.to_string(),
kind: object_kind,
date_created: file_path.date_created,
})
}
async fn batch_update_file_paths(
library: &LibraryContext,
location_id: i32,
objects: &[object::Data],
cas_id_lookup: &HashMap<String, Vec<i32>>,
) -> Result<Vec<file_path::Data>, QueryError> {
let mut file_path_updates = Vec::new();
objects.iter().for_each(|object| {
let file_path_ids = cas_id_lookup.get(&object.cas_id).unwrap();
file_path_updates.extend(file_path_ids.iter().map(|file_path_id| {
info!(
"Linking: <file_path_id = '{}', object_id = '{}'>",
file_path_id, object.id
);
library.db.file_path().update(
file_path::location_id_id(location_id, *file_path_id),
vec![file_path::object_id::set(Some(object.id))],
)
}));
});
info!(
"Updating {} file paths for {} objects",
file_path_updates.len(),
objects.len()
);
library.db._batch(file_path_updates).await
}
async fn generate_provisional_objects(
location_path: impl AsRef<Path>,
file_paths: &[file_path::Data],
) -> HashMap<i32, (String, String, Vec<object::SetParam>)> {
let mut provisional_objects = HashMap::with_capacity(file_paths.len());
// analyze each file_path
let location_path = location_path.as_ref();
for (file_path_id, objects_result) in join_all(file_paths.iter().map(|file_path| async move {
(
file_path.id,
assemble_object_metadata(location_path, file_path).await,
)
}))
.await
{
// get the cas_id and extract metadata
match objects_result {
Ok(ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
}) => {
// create entry into chunks for created file data
provisional_objects.insert(
file_path_id,
object::create_unchecked(
cas_id,
size_str,
vec![
object::date_created::set(date_created),
object::kind::set(kind.int_value()),
],
),
);
}
Err(e) => {
error!("Error assembling Object metadata: {:#?}", e);
continue;
}
};
}
provisional_objects
}
async fn identifier_job_step(
library: &LibraryContext,
location_id: i32,
location_path: impl AsRef<Path>,
file_paths: &[file_path::Data],
) -> Result<(usize, usize), JobError> {
let location_path = location_path.as_ref();
// generate objects for all file paths
let provisional_objects = generate_provisional_objects(location_path, file_paths).await;
let unique_cas_ids = provisional_objects
.values()
.map(|(cas_id, _, _)| cas_id.clone())
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
// allow easy lookup of cas_id to many file_path_ids
let mut cas_id_lookup: HashMap<String, Vec<i32>> = HashMap::with_capacity(unique_cas_ids.len());
// populate cas_id_lookup with file_path_ids
for (file_path_id, (cas_id, _, _)) in provisional_objects.iter() {
cas_id_lookup
.entry(cas_id.clone())
.or_insert_with(Vec::new)
.push(*file_path_id);
}
// info!("{:#?}", cas_id_lookup);
// get all objects that already exist in the database
let existing_objects = library
.db
.object()
.find_many(vec![object::cas_id::in_vec(unique_cas_ids)])
.exec()
.await?;
info!(
"Found {} existing Objects in Library, linking file paths...",
existing_objects.len()
);
let existing_objects_linked = if !existing_objects.is_empty() {
// link file_path.object_id to existing objects
batch_update_file_paths(library, location_id, &existing_objects, &cas_id_lookup)
.await?
.len()
} else {
0
};
let existing_object_cas_ids = existing_objects
.iter()
.map(|object| object.cas_id.clone())
.collect::<HashSet<_>>();
// extract objects that don't already exist in the database
let new_objects = provisional_objects
.into_iter()
.filter(|(_, (cas_id, _, _))| !existing_object_cas_ids.contains(cas_id))
.collect::<Vec<_>>();
let new_objects_cas_ids = new_objects
.iter()
.map(|(_, (cas_id, _, _))| cas_id.clone())
.collect::<Vec<_>>();
info!(
"Creating {} new Objects in Library... {:#?}",
new_objects.len(),
new_objects_cas_ids
);
let mut total_created: usize = 0;
if !new_objects.is_empty() {
// create new object records with assembled values
let total_created_files = library
.db
.object()
.create_many(
new_objects
.into_iter()
.map(|(_, (cas_id, size, params))| (cas_id, size, params))
.collect(),
)
.skip_duplicates()
.exec()
.await
.unwrap_or_else(|e| {
error!("Error inserting files: {:#?}", e);
0
});
total_created = total_created_files as usize;
info!("Created {} new Objects in Library", total_created);
// fetch newly created objects so we can link them to file_paths by their id
let created_files = library
.db
.object()
.find_many(vec![object::cas_id::in_vec(new_objects_cas_ids)])
.exec()
.await
.unwrap_or_else(|e| {
error!("Error finding created files: {:#?}", e);
vec![]
});
info!(
"Retrieved {} newly created Objects in Library",
created_files.len()
);
if !created_files.is_empty() {
batch_update_file_paths(library, location_id, &created_files, &cas_id_lookup).await?;
}
}
Ok((total_created, existing_objects_linked))
}

View File

@@ -0,0 +1,140 @@
// #[cfg(feature = "ffmpeg")]
// use std::{ffi::OsStr, path::PathBuf};
//
// #[cfg(feature = "ffmpeg")]
// use ffmpeg_next::{codec::context::Context, format, media::Type};
//
// #[derive(Default, Debug)]
// pub struct MediaItem {
// pub created_at: Option<String>,
// pub brand: Option<String>,
// pub model: Option<String>,
// pub duration_seconds: i32,
// pub best_video_stream_index: usize,
// pub best_audio_stream_index: usize,
// pub best_subtitle_stream_index: usize,
// pub steams: Vec<Stream>,
// }
//
// #[derive(Debug)]
// pub struct Stream {
// pub codec: String,
// pub frames: f64,
// pub duration_seconds: f64,
// #[cfg(feature = "ffmpeg")]
// pub kind: Option<StreamKind>,
// }
//
// #[cfg(feature = "ffmpeg")]
// #[derive(Debug, PartialEq, Eq)]
// pub enum StreamKind {
// Video(VideoStream),
// Audio(AudioStream),
// }
//
// #[derive(Debug, PartialEq, Eq)]
// pub struct VideoStream {
// pub width: u32,
// pub height: u32,
// pub aspect_ratio: String,
// #[cfg(feature = "ffmpeg")]
// pub format: format::Pixel,
// pub bitrate: usize,
// }
//
// #[derive(Debug, PartialEq, Eq)]
// pub struct AudioStream {
// pub channels: u16,
// #[cfg(feature = "ffmpeg")]
// pub format: format::Sample,
// pub bitrate: usize,
// pub rate: u32,
// }
//
// #[cfg(feature = "ffmpeg")]
// fn extract(iter: &mut ffmpeg_next::dictionary::Iter, key: &str) -> Option<String> {
// iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
// }
// #[cfg(feature = "ffmpeg")]
// pub fn extract_media_data(path: &PathBuf) -> Result<MediaItem, ffmpeg_next::Error> {
// use chrono::NaiveDateTime;
//
// ffmpeg_next::init().unwrap();
//
// let mut name = path
// .file_name()
// .and_then(OsStr::to_str)
// .map(ToString::to_string)
// .unwrap_or_default();
//
// // strip to exact potential date length and attempt to parse
// name = name.chars().take(19).collect();
// // specifically OBS uses this format for time, other checks could be added
// let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
//
// let context = format::input(&path)?;
//
// let mut media_item = MediaItem::default();
// let metadata = context.metadata();
// let mut iter = metadata.iter();
//
// // creation_time is usually the creation date of the file
// media_item.created_at = extract(&mut iter, "creation_time");
// // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
// media_item.created_at = extract(&mut iter, "creationdate");
// // fallback to potential time if exists
// if media_item.created_at.is_none() {
// media_item.created_at = potential_date.map(|d| d.to_string()).ok();
// }
// // origin metadata
// media_item.brand = extract(&mut iter, "major_brand");
// media_item.brand = extract(&mut iter, "make");
// media_item.model = extract(&mut iter, "model");
//
// if let Some(stream) = context.streams().best(Type::Video) {
// media_item.best_video_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(Type::Audio) {
// media_item.best_audio_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(Type::Subtitle) {
// media_item.best_subtitle_stream_index = stream.index();
// }
// media_item.duration_seconds = context.duration() as i32 / ffmpeg_next::ffi::AV_TIME_BASE;
//
// for stream in context.streams() {
// let codec = Context::from_parameters(stream.parameters())?;
//
// let mut stream_item = Stream {
// codec: codec.id().name().to_string(),
// frames: stream.frames() as f64,
// duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
// kind: None,
// };
//
// if codec.medium() == Type::Video {
// if let Ok(video) = codec.decoder().video() {
// stream_item.kind = Some(StreamKind::Video(VideoStream {
// bitrate: video.bit_rate(),
// format: video.format(),
// width: video.width(),
// height: video.height(),
// aspect_ratio: video.aspect_ratio().to_string(),
// }));
// }
// } else if codec.medium() == Type::Audio {
// if let Ok(audio) = codec.decoder().audio() {
// stream_item.kind = Some(StreamKind::Audio(AudioStream {
// channels: audio.channels(),
// bitrate: audio.bit_rate(),
// rate: audio.rate(),
// format: audio.format(),
// }));
// }
// }
// media_item.steams.push(stream_item);
// }
//
// Ok(media_item)
// }

View File

@@ -1,137 +0,0 @@
#[cfg(feature = "ffmpeg")]
use ffmpeg_next::format;
#[derive(Default, Debug)]
pub struct MediaItem {
pub created_at: Option<String>,
pub brand: Option<String>,
pub model: Option<String>,
pub duration_seconds: f64,
pub best_video_stream_index: usize,
pub best_audio_stream_index: usize,
pub best_subtitle_stream_index: usize,
pub steams: Vec<Stream>,
}
#[derive(Debug)]
pub struct Stream {
pub codec: String,
pub frames: f64,
pub duration_seconds: f64,
pub kind: Option<StreamKind>,
}
#[derive(Debug)]
#[allow(dead_code)] // TODO: Remove this when we start using ffmpeg
pub enum StreamKind {
Video(VideoStream),
Audio(AudioStream),
}
#[derive(Debug)]
pub struct VideoStream {
pub width: u32,
pub height: u32,
pub aspect_ratio: String,
#[cfg(feature = "ffmpeg")]
pub format: format::Pixel,
pub bitrate: usize,
}
#[derive(Debug)]
pub struct AudioStream {
pub channels: u16,
#[cfg(feature = "ffmpeg")]
pub format: format::Sample,
pub bitrate: usize,
pub rate: u32,
}
// fn extract(iter: &mut Iter, key: &str) -> Option<String> {
// iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
// }
// pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
// ffmpeg::init().unwrap();
// let mut name = Path::new(path)
// .file_name()
// .and_then(OsStr::to_str)
// .map(ToString::to_string)
// .unwrap_or(String::new());
// // strip to exact potential date length and attempt to parse
// name = name.chars().take(19).collect();
// // specifically OBS uses this format for time, other checks could be added
// let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
// match ffmpeg::format::input(&path) {
// Ok(context) => {
// let mut media_item = MediaItem::default();
// let metadata = context.metadata();
// let mut iter = metadata.iter();
// // creation_time is usually the creation date of the file
// media_item.created_at = extract(&mut iter, "creation_time");
// // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
// media_item.created_at = extract(&mut iter, "creationdate");
// // fallback to potential time if exists
// if media_item.created_at.is_none() {
// media_item.created_at = potential_date.map(|d| d.to_string()).ok();
// }
// // origin metadata
// media_item.brand = extract(&mut iter, "major_brand");
// media_item.brand = extract(&mut iter, "make");
// media_item.model = extract(&mut iter, "model");
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
// media_item.best_video_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
// media_item.best_audio_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
// media_item.best_subtitle_stream_index = stream.index();
// }
// media_item.duration_seconds =
// context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
// for stream in context.streams() {
// let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
// let mut stream_item = Stream {
// codec: codec.id().name().to_string(),
// frames: stream.frames() as f64,
// duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
// kind: None,
// };
// if codec.medium() == ffmpeg::media::Type::Video {
// if let Ok(video) = codec.decoder().video() {
// stream_item.kind = Some(StreamKind::Video(VideoStream {
// bitrate: video.bit_rate(),
// format: video.format(),
// width: video.width(),
// height: video.height(),
// aspect_ratio: video.aspect_ratio().to_string(),
// }));
// }
// } else if codec.medium() == ffmpeg::media::Type::Audio {
// if let Ok(audio) = codec.decoder().audio() {
// stream_item.kind = Some(StreamKind::Audio(AudioStream {
// channels: audio.channels(),
// bitrate: audio.bit_rate(),
// rate: audio.rate(),
// format: audio.format(),
// }));
// }
// }
// media_item.steams.push(stream_item);
// }
// info!("{:#?}", media_item);
// }
// Err(error) => error!("error: {}", error),
// }
// Ok(())
// }

View File

@@ -1,5 +1,5 @@
mod metadata;
mod media_data;
mod thumb;
pub use metadata::*;
pub use media_data::*;
pub use thumb::*;

View File

@@ -5,16 +5,18 @@ use crate::{
library::LibraryContext,
prisma::{file_path, location},
};
use sd_file_ext::extensions::{Extension, ImageExtension, VideoExtension};
use image::{self, imageops, DynamicImage, GenericImageView};
use serde::{Deserialize, Serialize};
use std::collections::VecDeque;
use std::{
collections::VecDeque,
error::Error,
ops::Deref,
path::{Path, PathBuf},
};
use image::{self, imageops, DynamicImage, GenericImageView};
use sd_file_ext::extensions::{Extension, ImageExtension, VideoExtension};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::{fs, task::block_in_place};
use tracing::{error, info, trace, warn};
use webp::Encoder;
@@ -26,10 +28,10 @@ pub const THUMBNAIL_JOB_NAME: &str = "thumbnailer";
pub struct ThumbnailJob {}
#[derive(Serialize, Deserialize, Clone)]
#[derive(Serialize, Deserialize, Clone, Hash)]
pub struct ThumbnailJobInit {
pub location_id: i32,
pub path: PathBuf,
pub root_path: PathBuf,
pub background: bool,
}
@@ -39,7 +41,18 @@ pub struct ThumbnailJobState {
root_path: PathBuf,
}
#[derive(Error, Debug)]
pub enum ThumbnailError {
#[error("Location not found: <id = '{0}'>")]
MissingLocation(i32),
#[error("Root file path not found: <path = '{0}'>")]
MissingRootFilePath(PathBuf),
#[error("Location without local path: <id = '{0}'>")]
LocationLocalPath(i32),
}
file_path::include!(file_path_with_object { object });
file_path::select!(file_path_id_only { id });
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
enum ThumbnailJobStepKind {
@@ -51,6 +64,7 @@ enum ThumbnailJobStepKind {
#[derive(Debug, Serialize, Deserialize)]
pub struct ThumbnailJobStep {
file_path: file_path_with_object::Data,
object_id: i32,
kind: ThumbnailJobStepKind,
}
@@ -64,50 +78,71 @@ impl StatefulJob for ThumbnailJob {
THUMBNAIL_JOB_NAME
}
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
let library_ctx = ctx.library_ctx();
let thumbnail_dir = library_ctx
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let thumbnail_dir = ctx
.library_ctx
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME);
let location = library_ctx
let location = ctx
.library_ctx
.db
.location()
.find_unique(location::id::equals(state.init.location_id))
.exec()
.await?
.unwrap();
.ok_or(ThumbnailError::MissingLocation(state.init.location_id))?;
let root_path_str = state
.init
.root_path
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
let parent_directory_id = ctx
.library_ctx
.db
.file_path()
.find_first(vec![
file_path::location_id::equals(state.init.location_id),
file_path::materialized_path::equals(if !root_path_str.is_empty() {
root_path_str
} else {
"/".to_string()
}),
file_path::is_dir::equals(true),
])
.select(file_path_id_only::select())
.exec()
.await?
.ok_or_else(|| ThumbnailError::MissingRootFilePath(state.init.root_path.clone()))?
.id;
info!(
"Searching for images in location {} at path {}",
location.id,
state.init.path.display()
"Searching for images in location {} at directory {}",
location.id, parent_directory_id
);
// create all necessary directories if they don't exist
fs::create_dir_all(&thumbnail_dir).await?;
let root_path = location.local_path.map(PathBuf::from).unwrap();
let root_path = location
.local_path
.map(PathBuf::from)
.ok_or(ThumbnailError::LocationLocalPath(location.id))?;
// query database for all image files in this location that need thumbnails
let image_files = get_files_by_extensions(
&library_ctx,
&ctx.library_ctx,
state.init.location_id,
&state.init.path,
[
ImageExtension::Png,
ImageExtension::Jpeg,
ImageExtension::Jpg,
ImageExtension::Gif,
ImageExtension::Webp,
]
.into_iter()
.map(Extension::Image)
.collect(),
parent_directory_id,
&sd_file_ext::extensions::ALL_IMAGE_EXTENSIONS
.iter()
.map(Clone::clone)
.filter(can_generate_thumbnail_for_image)
.map(Extension::Image)
.collect::<Vec<_>>(),
ThumbnailJobStepKind::Image,
)
.await?;
@@ -117,15 +152,15 @@ impl StatefulJob for ThumbnailJob {
let all_files = {
// query database for all video files in this location that need thumbnails
let video_files = get_files_by_extensions(
&library_ctx,
&ctx.library_ctx,
state.init.location_id,
&state.init.path,
sd_file_ext::extensions::ALL_VIDEO_EXTENSIONS
parent_directory_id,
&sd_file_ext::extensions::ALL_VIDEO_EXTENSIONS
.iter()
.map(Clone::clone)
.filter(can_generate_thumbnail_for_video)
.map(Extension::Video)
.collect(),
.collect::<Vec<_>>(),
ThumbnailJobStepKind::Video,
)
.await?;
@@ -156,7 +191,7 @@ impl StatefulJob for ThumbnailJob {
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
ctx.progress(vec![JobReportUpdate::Message(format!(
@@ -174,15 +209,14 @@ impl StatefulJob for ThumbnailJob {
trace!("image_file {:?}", step);
// get cas_id, if none found skip
let cas_id = match &step.file_path.object {
Some(f) => f.cas_id.clone(),
_ => {
warn!(
"skipping thumbnail generation for {}",
step.file_path.materialized_path
);
return Ok(());
}
let cas_id = if let Some(ref object) = step.file_path.object {
object.cas_id.clone()
} else {
warn!(
"skipping thumbnail generation for {}",
step.file_path.materialized_path
);
return Ok(());
};
// Define and write the WebP-encoded file to a given path
@@ -200,23 +234,58 @@ impl StatefulJob for ThumbnailJob {
}
#[cfg(feature = "ffmpeg")]
ThumbnailJobStepKind::Video => {
// use crate::{
// object::preview::{extract_media_data, StreamKind},
// prisma::media_data,
// };
// use
if let Err(e) = generate_video_thumbnail(&path, &output_path).await {
error!("Error generating thumb for video: {:?} {:#?}", &path, e);
}
// extract MediaData from video and put in the database
// TODO: this is bad here, maybe give it its own job?
// if let Ok(media_data) = extract_media_data(&path) {
// info!(
// "Extracted media data for object {}: {:?}",
// step.object_id, media_data
// );
// // let primary_video_stream = media_data
// // .steams
// // .iter()
// // .find(|s| s.kind == Some(StreamKind::Video(_)));
// let params = vec![
// media_data::duration_seconds::set(Some(media_data.duration_seconds)),
// // media_data::pixel_width::set(Some(media_data.width)),
// // media_data::pixel_height::set(Some(media_data.height)),
// ];
// let _ = ctx
// .library_ctx()
// .db
// .media_data()
// .upsert(
// media_data::id::equals(step.object_id),
// params.clone(),
// params,
// )
// .exec()
// .await?;
// }
}
}
if !state.init.background {
ctx.library_ctx().emit(CoreEvent::NewThumbnail { cas_id });
ctx.library_ctx.emit(CoreEvent::NewThumbnail { cas_id });
};
// With this invalidate query, we update the user interface to show each new thumbnail
invalidate_query!(ctx.library_ctx, "locations.getExplorerData");
} else {
info!("Thumb exists, skipping... {}", output_path.display());
}
// With this invalidate query, we update the user interface to show each new thumbnail
let library_ctx = ctx.library_ctx();
invalidate_query!(library_ctx, "locations.getExplorerData");
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(
state.step_number + 1,
)]);
@@ -224,11 +293,7 @@ impl StatefulJob for ThumbnailJob {
Ok(())
}
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
let data = state
.data
.as_ref()
@@ -244,7 +309,7 @@ impl StatefulJob for ThumbnailJob {
}
}
async fn generate_image_thumbnail<P: AsRef<Path>>(
pub async fn generate_image_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
@@ -276,7 +341,7 @@ async fn generate_image_thumbnail<P: AsRef<Path>>(
}
#[cfg(feature = "ffmpeg")]
async fn generate_video_thumbnail<P: AsRef<Path>>(
pub async fn generate_video_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
@@ -290,35 +355,38 @@ async fn generate_video_thumbnail<P: AsRef<Path>>(
async fn get_files_by_extensions(
ctx: &LibraryContext,
location_id: i32,
path: impl AsRef<Path>,
extensions: Vec<Extension>,
_parent_file_path_id: i32,
extensions: &[Extension],
kind: ThumbnailJobStepKind,
) -> Result<Vec<ThumbnailJobStep>, JobError> {
let mut params = vec![
file_path::location_id::equals(location_id),
file_path::extension::in_vec(extensions.iter().map(|e| e.to_string()).collect()),
];
let path_str = path.as_ref().to_string_lossy().to_string();
if !path_str.is_empty() {
params.push(file_path::materialized_path::starts_with(path_str));
}
Ok(ctx
.db
.file_path()
.find_many(params)
.find_many(vec![
file_path::location_id::equals(location_id),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
// file_path::parent_id::equals(Some(parent_file_path_id)),
])
.include(file_path_with_object::include())
.exec()
.await?
.into_iter()
.map(|file_path| ThumbnailJobStep { file_path, kind })
.map(|file_path| ThumbnailJobStep {
object_id: file_path.object.as_ref().unwrap().id,
file_path,
kind,
})
.collect())
}
#[allow(unused)]
pub fn can_generate_thumbnail_for_video(video_extension: &VideoExtension) -> bool {
use VideoExtension::*;
!matches!(video_extension, Mpg | Swf | M2v)
// File extensions that are specifically not supported by the thumbnailer
!matches!(video_extension, Mpg | Swf | M2v | Hevc)
}
#[allow(unused)]
pub fn can_generate_thumbnail_for_image(image_extension: &ImageExtension) -> bool {
use ImageExtension::*;
matches!(image_extension, Jpg | Jpeg | Png | Webp | Gif)
}

View File

@@ -1,5 +1,5 @@
use blake3::Hasher;
use std::path::PathBuf;
use std::path::Path;
use tokio::{
fs::File,
io::{self, AsyncReadExt},
@@ -7,7 +7,7 @@ use tokio::{
const BLOCK_SIZE: usize = 1048576;
pub async fn file_checksum(path: PathBuf) -> Result<String, io::Error> {
pub async fn file_checksum(path: impl AsRef<Path>) -> Result<String, io::Error> {
let mut reader = File::open(path).await?;
let mut context = Hasher::new();
let mut buffer = vec![0; BLOCK_SIZE].into_boxed_slice();

View File

@@ -4,13 +4,15 @@ use std::{collections::VecDeque, path::PathBuf};
use crate::{
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
prisma::{self, file_path, location, object},
prisma::{file_path, location, object},
};
use tracing::info;
use super::hash::file_checksum;
pub const VALIDATOR_JOB_NAME: &str = "object_validator";
// The Validator is able to:
// - generate a full byte checksum for Objects in a Location
// - generate checksums for all Objects missing without one
@@ -24,46 +26,49 @@ pub struct ObjectValidatorJobState {
}
// The validator can
#[derive(Serialize, Deserialize, Debug)]
#[derive(Serialize, Deserialize, Debug, Hash)]
pub struct ObjectValidatorJobInit {
pub location_id: i32,
pub path: PathBuf,
pub background: bool,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct ObjectValidatorJobStep {
pub path: file_path::Data,
}
file_path::select!(file_path_and_object {
materialized_path
object: select {
id
integrity_checksum
}
});
#[async_trait::async_trait]
impl StatefulJob for ObjectValidatorJob {
type Data = ObjectValidatorJobState;
type Init = ObjectValidatorJobInit;
type Step = ObjectValidatorJobStep;
type Data = ObjectValidatorJobState;
type Step = file_path_and_object::Data;
fn name(&self) -> &'static str {
"object_validator"
VALIDATOR_JOB_NAME
}
async fn init(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> Result<(), JobError> {
let library_ctx = ctx.library_ctx();
state.steps = library_ctx
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
state.steps = ctx
.library_ctx
.db
.file_path()
.find_many(vec![file_path::location_id::equals(state.init.location_id)])
.find_many(vec![
file_path::location_id::equals(state.init.location_id),
file_path::is_dir::equals(false),
file_path::object::is(vec![object::integrity_checksum::equals(None)]),
])
.select(file_path_and_object::select())
.exec()
.await?
.into_iter()
.map(|path| ObjectValidatorJobStep { path })
.collect::<VecDeque<_>>();
let location = library_ctx
let location = ctx
.library_ctx
.db
.location()
.find_unique(location::id::equals(state.init.location_id))
@@ -84,46 +89,29 @@ impl StatefulJob for ObjectValidatorJob {
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let library_ctx = ctx.library_ctx();
let data = state.data.as_ref().expect("fatal: missing job state");
let path = data.root_path.join(&step.path.materialized_path);
// skip directories
if path.is_dir() {
return Ok(());
}
if let Some(object_id) = step.path.object_id {
// this is to skip files that already have checksums
// i'm unsure what the desired behaviour is in this case
// we can also compare old and new checksums here
let object = library_ctx
.db
.object()
.find_unique(object::id::equals(object_id))
.exec()
.await?
.unwrap();
if object.integrity_checksum.is_some() {
return Ok(());
// this is to skip files that already have checksums
// i'm unsure what the desired behaviour is in this case
// we can also compare old and new checksums here
if let Some(ref object) = step.object {
// This if is just to make sure, we already queried objects where integrity_checksum is null
if object.integrity_checksum.is_none() {
ctx.library_ctx
.db
.object()
.update(
object::id::equals(object.id),
vec![object::SetParam::SetIntegrityChecksum(Some(
file_checksum(data.root_path.join(&step.materialized_path)).await?,
))],
)
.exec()
.await?;
}
let hash = file_checksum(path).await?;
library_ctx
.db
.object()
.update(
object::id::equals(object_id),
vec![prisma::object::SetParam::SetIntegrityChecksum(Some(hash))],
)
.exec()
.await?;
}
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(
@@ -133,11 +121,7 @@ impl StatefulJob for ObjectValidatorJob {
Ok(())
}
async fn finalize(
&self,
_ctx: WorkerContext,
state: &mut JobState<Self::Init, Self::Data, Self::Step>,
) -> JobResult {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
let data = state
.data
.as_ref()

View File

@@ -1,4 +1,6 @@
//! This module contains the crate's STREAM implementation, and wrappers that allow us to support multiple AEADs.
#![allow(clippy::use_self)] // I think: https://github.com/rust-lang/rust-clippy/issues/3909
use std::io::{Cursor, Read, Write};
use crate::{primitives::BLOCK_SIZE, Error, Protected, Result};
@@ -10,14 +12,13 @@ use aes_gcm::Aes256Gcm;
use chacha20poly1305::XChaCha20Poly1305;
/// These are all possible algorithms that can be used for encryption and decryption
#[derive(Clone, Copy, Eq, PartialEq)]
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
#[cfg_attr(
feature = "serde",
derive(serde::Serialize),
derive(serde::Deserialize)
)]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[allow(clippy::use_self)]
pub enum Algorithm {
XChaCha20Poly1305,
Aes256Gcm,

View File

@@ -10,6 +10,8 @@
//! let salt = generate_salt();
//! let hashed_password = hashing_algorithm.hash(password, salt).unwrap();
//! ```
#![allow(clippy::use_self)] // I think: https://github.com/rust-lang/rust-clippy/issues/3909
use crate::Protected;
use crate::{primitives::SALT_LEN, Error, Result};
use argon2::Argon2;
@@ -24,7 +26,6 @@ use argon2::Argon2;
derive(serde::Deserialize)
)]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[allow(clippy::use_self)]
pub enum Params {
Standard,
Hardened,

View File

@@ -134,9 +134,7 @@ impl MovieDecoder {
return Err(ThumbnailerError::SeekNotAllowed);
}
let timestamp = (AV_TIME_BASE as i64)
.checked_mul(seconds as i64)
.unwrap_or(0);
let timestamp = (AV_TIME_BASE as i64).checked_mul(seconds).unwrap_or(0);
check_error(
unsafe { av_seek_frame(self.format_context, -1, timestamp, 0) },
@@ -334,9 +332,12 @@ impl MovieDecoder {
break;
}
if unsafe {
CString::from_raw((*tag).key).to_string_lossy() == "filename"
CString::from_raw((*tag).key)
.to_str()
.expect("Found non-UTF-8 path") == "filename"
&& CString::from_raw((*tag).value)
.to_string_lossy()
.to_str()
.expect("Found non-UTF-8 path")
.starts_with("cover.")
} {
if embedded_data_streams.is_empty() {
@@ -436,8 +437,8 @@ impl MovieDecoder {
(*self.video_codec_context).width,
(*self.video_codec_context).height,
(*self.video_codec_context).pix_fmt as i32,
(*timebase).num,
(*timebase).den,
timebase.num,
timebase.den,
(*self.video_codec_context).sample_aspect_ratio.num,
i32::max((*self.video_codec_context).sample_aspect_ratio.den, 1)
)

View File

@@ -9,3 +9,9 @@ authors = ["Brendan Allen <brendan@spacedrive.com>", "Jamie Pine <jamie@spacedri
int-enum = "0.5.0"
serde = {version = "1.0.145", features = ["derive"]}
serde_json = "1.0.85"
strum = { version = "0.24", features = ["derive"]}
strum_macros = "0.24"
tokio = { version = "1.21.2", features = ["fs", "rt", "io-util"] }
[dev-dependencies]
tokio = { version = "1.21.2", features = ["fs", "rt", "macros"] }

View File

@@ -10,6 +10,7 @@ use crate::magic::{
extension_enum! {
Extension {
Document(DocumentExtension),
Video(VideoExtension),
Image(ImageExtension),
Audio(AudioExtension),
@@ -34,7 +35,7 @@ extension_category_enum! {
Swf = [0x5A, 0x57, 0x53] | [0x46, 0x57, 0x53],
Mjpeg = [],
Ts = [0x47],
Mts = [0x47, _, _, _] | [_, _, _, 0x47],
Mts = [0x47] | [_, _, _, 0x47],
Mpeg = [0x47] | [0x00, 0x00, 0x01, 0xBA] | [0x00, 0x00, 0x01, 0xB3],
Mxf = [0x06, 0x0E, 0x2B, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0D, 0x01, 0x02, 0x01, 0x01, 0x02],
M2v = [0x00, 0x00, 0x01, 0xBA],
@@ -44,6 +45,7 @@ extension_category_enum! {
Flv = [0x46, 0x4C, 0x56],
Wm = [],
#[serde(rename = "3gp")]
#[strum(serialize = "3gp")]
_3gp = [],
M4v = [0x66, 0x74, 0x79, 0x70, 0x4D, 0x34, 0x56] + 4,
Wmv = [0x30, 0x26, 0xB2, 0x75, 0x8E, 0x66, 0xCF, 0x11, 0xA6, 0xD9, 0x00, 0xAA, 0x00, 0x62, 0xCE, 0x6C],
@@ -54,12 +56,14 @@ extension_category_enum! {
Vob = [0x00, 0x00, 0x01, 0xBA],
Ogv = [0x4F, 0x67, 0x67, 0x53],
Wtv = [0xB7, 0xD8, 0x00],
Hevc = [],
F4v = [0x66, 0x74, 0x79, 0x70, 0x66, 0x72, 0x65, 0x65] + 4,
}
}
// image extensions
extension_category_enum! {
ImageExtension _ALL_IMAGE_EXTENSIONS {
ImageExtension ALL_IMAGE_EXTENSIONS {
Jpg = [0xFF, 0xD8],
Jpeg = [0xFF, 0xD8],
Png = [0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A],
@@ -116,6 +120,7 @@ extension_category_enum! {
Gz = [0x1F, 0x8B, 0x08],
Bz2 = [0x42, 0x5A, 0x68],
#[serde(rename = "7z")]
#[strum(serialize = "7z")]
_7z = [0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C],
Xz = [0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00],
}
@@ -258,7 +263,8 @@ extension_category_enum! {
#[cfg(test)]
mod test {
use std::{fs::File, path::PathBuf};
use std::path::PathBuf;
use tokio::fs::File;
use super::*;
@@ -283,9 +289,9 @@ mod test {
assert_eq!(Extension::from_str("jeff"), None);
}
#[test]
fn magic_bytes() {
fn test_path(subpath: &str) -> Option<Extension> {
#[tokio::test]
async fn magic_bytes() {
async fn test_path(subpath: &str) -> Option<Extension> {
println!("testing {}...", subpath);
let path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
@@ -295,175 +301,176 @@ mod test {
.join("packages/test-files/files")
.join(subpath);
let mut file = File::open(path).unwrap();
let mut file = File::open(path).await.unwrap();
Extension::resolve_conflicting(&subpath.split(".").last().unwrap(), &mut file, true)
.await
}
// Video extension tests
assert_eq!(
dbg!(test_path("video/video.ts")),
dbg!(test_path("video/video.ts").await),
Some(Extension::Video(VideoExtension::Ts))
);
assert_eq!(
dbg!(test_path("code/typescript.ts")),
dbg!(test_path("code/typescript.ts").await),
Some(Extension::Code(CodeExtension::Ts))
);
assert_eq!(
dbg!(test_path("video/video.3gp")),
dbg!(test_path("video/video.3gp").await),
Some(Extension::Video(VideoExtension::_3gp))
);
assert_eq!(
dbg!(test_path("video/video.mov")),
dbg!(test_path("video/video.mov").await),
Some(Extension::Video(VideoExtension::Mov))
);
assert_eq!(
dbg!(test_path("video/video.asf")),
dbg!(test_path("video/video.asf").await),
Some(Extension::Video(VideoExtension::Asf))
);
assert_eq!(
dbg!(test_path("video/video.avi")),
dbg!(test_path("video/video.avi").await),
Some(Extension::Video(VideoExtension::Avi))
);
assert_eq!(
dbg!(test_path("video/video.flv")),
dbg!(test_path("video/video.flv").await),
Some(Extension::Video(VideoExtension::Flv))
);
assert_eq!(
dbg!(test_path("video/video.m4v")),
dbg!(test_path("video/video.m4v").await),
Some(Extension::Video(VideoExtension::M4v))
);
assert_eq!(
dbg!(test_path("video/video.mkv")),
dbg!(test_path("video/video.mkv").await),
Some(Extension::Video(VideoExtension::Mkv))
);
assert_eq!(
dbg!(test_path("video/video.mpg")),
dbg!(test_path("video/video.mpg").await),
Some(Extension::Video(VideoExtension::Mpg))
);
assert_eq!(
dbg!(test_path("video/video.mpeg")),
dbg!(test_path("video/video.mpeg").await),
Some(Extension::Video(VideoExtension::Mpeg))
);
assert_eq!(
dbg!(test_path("video/video.mts")),
dbg!(test_path("video/video.mts").await),
Some(Extension::Video(VideoExtension::Mts))
);
assert_eq!(
dbg!(test_path("video/video.mxf")),
dbg!(test_path("video/video.mxf").await),
Some(Extension::Video(VideoExtension::Mxf))
);
assert_eq!(
dbg!(test_path("video/video.ogv")),
dbg!(test_path("video/video.ogv").await),
Some(Extension::Video(VideoExtension::Ogv))
);
assert_eq!(
dbg!(test_path("video/video.swf")),
dbg!(test_path("video/video.swf").await),
Some(Extension::Video(VideoExtension::Swf))
);
assert_eq!(
dbg!(test_path("video/video.ts")),
dbg!(test_path("video/video.ts").await),
Some(Extension::Video(VideoExtension::Ts))
);
assert_eq!(
dbg!(test_path("video/video.vob")),
dbg!(test_path("video/video.vob").await),
Some(Extension::Video(VideoExtension::Vob))
);
assert_eq!(
dbg!(test_path("video/video.ogv")),
dbg!(test_path("video/video.ogv").await),
Some(Extension::Video(VideoExtension::Ogv))
);
assert_eq!(
dbg!(test_path("video/video.wmv")),
dbg!(test_path("video/video.wmv").await),
Some(Extension::Video(VideoExtension::Wmv))
);
assert_eq!(
dbg!(test_path("video/video.wtv")),
dbg!(test_path("video/video.wtv").await),
Some(Extension::Video(VideoExtension::Wtv))
);
// Audio extension tests
assert_eq!(
dbg!(test_path("audio/audio.aac")),
dbg!(test_path("audio/audio.aac").await),
Some(Extension::Audio(AudioExtension::Aac))
);
assert_eq!(
dbg!(test_path("audio/audio.adts")),
dbg!(test_path("audio/audio.adts").await),
Some(Extension::Audio(AudioExtension::Adts))
);
assert_eq!(
dbg!(test_path("audio/audio.aif")),
dbg!(test_path("audio/audio.aif").await),
Some(Extension::Audio(AudioExtension::Aif))
);
assert_eq!(
dbg!(test_path("audio/audio.aiff")),
dbg!(test_path("audio/audio.aiff").await),
Some(Extension::Audio(AudioExtension::Aiff))
);
assert_eq!(
dbg!(test_path("audio/audio.aptx")),
dbg!(test_path("audio/audio.aptx").await),
Some(Extension::Audio(AudioExtension::Aptx))
);
assert_eq!(
dbg!(test_path("audio/audio.ast")),
dbg!(test_path("audio/audio.ast").await),
Some(Extension::Audio(AudioExtension::Ast))
);
assert_eq!(
dbg!(test_path("audio/audio.caf")),
dbg!(test_path("audio/audio.caf").await),
Some(Extension::Audio(AudioExtension::Caf))
);
assert_eq!(
dbg!(test_path("audio/audio.flac")),
dbg!(test_path("audio/audio.flac").await),
Some(Extension::Audio(AudioExtension::Flac))
);
assert_eq!(
dbg!(test_path("audio/audio.loas")),
dbg!(test_path("audio/audio.loas").await),
Some(Extension::Audio(AudioExtension::Loas))
);
assert_eq!(
dbg!(test_path("audio/audio.m4a")),
dbg!(test_path("audio/audio.m4a").await),
Some(Extension::Audio(AudioExtension::M4a))
);
// assert_eq!(
// dbg!(test_path("audio/audio.m4b")),
// dbg!(test_path("audio/audio.m4b").await),
// Some(Extension::Audio(AudioExtension::M4b))
// );
assert_eq!(
dbg!(test_path("audio/audio.mp2")),
dbg!(test_path("audio/audio.mp2").await),
Some(Extension::Audio(AudioExtension::Mp2))
);
assert_eq!(
dbg!(test_path("audio/audio.mp3")),
dbg!(test_path("audio/audio.mp3").await),
Some(Extension::Audio(AudioExtension::Mp3))
);
assert_eq!(
dbg!(test_path("audio/audio.oga")),
dbg!(test_path("audio/audio.oga").await),
Some(Extension::Audio(AudioExtension::Oga))
);
assert_eq!(
dbg!(test_path("audio/audio.ogg")),
dbg!(test_path("audio/audio.ogg").await),
Some(Extension::Audio(AudioExtension::Ogg))
);
assert_eq!(
dbg!(test_path("audio/audio.opus")),
dbg!(test_path("audio/audio.opus").await),
Some(Extension::Audio(AudioExtension::Opus))
);
assert_eq!(
dbg!(test_path("audio/audio.tta")),
dbg!(test_path("audio/audio.tta").await),
Some(Extension::Audio(AudioExtension::Tta))
);
assert_eq!(
dbg!(test_path("audio/audio.voc")),
dbg!(test_path("audio/audio.voc").await),
Some(Extension::Audio(AudioExtension::Voc))
);
assert_eq!(
dbg!(test_path("audio/audio.wav")),
dbg!(test_path("audio/audio.wav").await),
Some(Extension::Audio(AudioExtension::Wav))
);
assert_eq!(
dbg!(test_path("audio/audio.wma")),
dbg!(test_path("audio/audio.wma").await),
Some(Extension::Audio(AudioExtension::Wma))
);
assert_eq!(
dbg!(test_path("audio/audio.wv")),
dbg!(test_path("audio/audio.wv").await),
Some(Extension::Audio(AudioExtension::Wv))
);
}

View File

@@ -1,5 +1,12 @@
#![allow(dead_code)]
use crate::extensions::{CodeExtension, Extension, VideoExtension};
use std::io::SeekFrom;
use tokio::{
fs::File,
io::{AsyncReadExt, AsyncSeekExt},
};
#[derive(Debug, PartialEq, Eq)]
pub enum ExtensionPossibility {
@@ -98,8 +105,9 @@ macro_rules! extension_category_enum {
$($(#[$variant_attr:meta])* $variant:ident $(= $( [$($magic_bytes:tt),*] $(+ $offset:literal)? )|+ )? ,)*
}
) => {
#[derive(Debug, ::serde::Serialize, ::serde::Deserialize, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, ::serde::Serialize, ::serde::Deserialize, ::strum::Display, Clone, Copy, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
$(#[$enum_attr])*
// construct enum
@@ -120,12 +128,6 @@ macro_rules! extension_category_enum {
serde_json::from_value(serde_json::Value::String(s.to_string()))
}
}
// convert to string
impl std::fmt::Display for $enum_name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", serde_json::to_string(self).unwrap()) // SAFETY: This is safe
}
}
};
(@magic_bytes; $enum_name:ident ($($(#[$variant_attr:meta])* $variant:ident = $( [$($magic_bytes:tt),*] $(+ $offset:literal)? )|+ ),*)) => {
@@ -153,14 +155,12 @@ macro_rules! extension_category_enum {
}
pub(crate) use extension_category_enum;
pub fn verify_magic_bytes<T: MagicBytes>(ext: T, file: &mut std::fs::File) -> Option<T> {
use std::io::{Read, Seek, SeekFrom};
pub async fn verify_magic_bytes<T: MagicBytes>(ext: T, file: &mut File) -> Option<T> {
for magic in ext.magic_bytes_meta() {
let mut buf = vec![0; magic.length];
file.seek(SeekFrom::Start(magic.offset as u64)).ok()?;
file.read_exact(&mut buf).ok()?;
file.seek(SeekFrom::Start(magic.offset as u64)).await.ok()?;
file.read_exact(&mut buf).await.ok()?;
if ext.has_magic_bytes(&buf) {
return Some(ext);
@@ -171,9 +171,9 @@ pub fn verify_magic_bytes<T: MagicBytes>(ext: T, file: &mut std::fs::File) -> Op
}
impl Extension {
pub fn resolve_conflicting(
pub async fn resolve_conflicting(
ext_str: &str,
file: &mut std::fs::File,
file: &mut File,
always_check_magic_bytes: bool,
) -> Option<Extension> {
let ext = match Extension::from_str(ext_str) {
@@ -187,11 +187,20 @@ impl Extension {
ExtensionPossibility::Known(e) => {
if always_check_magic_bytes {
match e {
Self::Image(x) => verify_magic_bytes(x, file).map(Self::Image),
Self::Audio(x) => verify_magic_bytes(x, file).map(Self::Audio),
Self::Video(x) => verify_magic_bytes(x, file).map(Self::Video),
Self::Executable(x) => verify_magic_bytes(x, file).map(Self::Executable),
_ => None,
Self::Image(x) => verify_magic_bytes(x, file).await.map(Self::Image),
Self::Audio(x) => verify_magic_bytes(x, file).await.map(Self::Audio),
Self::Video(x) => verify_magic_bytes(x, file).await.map(Self::Video),
Self::Archive(x) => verify_magic_bytes(x, file).await.map(Self::Archive),
Self::Executable(x) => {
verify_magic_bytes(x, file).await.map(Self::Executable)
}
Self::Font(x) => verify_magic_bytes(x, file).await.map(Self::Font),
Self::Encrypted(x) => {
verify_magic_bytes(x, file).await.map(Self::Encrypted)
}
Self::Mesh(x) => verify_magic_bytes(x, file).await.map(Self::Mesh),
Self::Database(x) => verify_magic_bytes(x, file).await.map(Self::Database),
_ => Some(e),
}
} else {
Some(e)
@@ -200,7 +209,9 @@ impl Extension {
ExtensionPossibility::Conflicts(ext) => match ext_str {
"ts" => {
let maybe_video_ext = if ext.iter().any(|e| matches!(e, Extension::Video(_))) {
verify_magic_bytes(VideoExtension::Ts, file).map(Extension::Video)
verify_magic_bytes(VideoExtension::Ts, file)
.await
.map(Extension::Video)
} else {
None
};

View File

@@ -16,7 +16,7 @@ pub struct Ctx {
type Router = rspc::Router<Arc<Mutex<Ctx>>>;
fn to_map(v: &impl serde::Serialize) -> serde_json::Map<String, Value> {
match to_value(&v).unwrap() {
match to_value(v).unwrap() {
Value::Object(m) => m,
_ => unreachable!(),
}
@@ -144,7 +144,7 @@ pub(crate) fn new() -> RouterBuilder<Arc<Mutex<Ctx>>> {
}
}
let mut array = hashmap.into_iter().map(|(_, v)| v).collect::<Vec<_>>();
let mut array = hashmap.into_values().collect::<Vec<_>>();
array.sort_by(|a, b| a.id.partial_cmp(&b.id).unwrap());

View File

@@ -1 +1 @@
<svg id="Calque_1" data-name="Calque 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><defs><style>.cls-1{fill:#3fb37f;}.cls-2{fill:#34475b;}</style></defs><polygon class="cls-1" points="1.5 3.5 8 13 14.5 3.5 11 3.5 8 8 5 3.5 1.5 3.5"/><polygon class="cls-2" points="5 3.5 11 3.5 8 8 5 3.5"/></svg>
<svg id="Calque_1" data-name="Calque 1" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><polygon fill="#3fb37f" points="1.5 3.5 8 13 14.5 3.5 11 3.5 8 8 5 3.5 1.5 3.5"/><polygon fill="#34475b" points="5 3.5 11 3.5 8 8 5 3.5"/></svg>

Before

Width:  |  Height:  |  Size: 307 B

After

Width:  |  Height:  |  Size: 239 B

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 45 KiB

View File

@@ -0,0 +1,75 @@
<svg width="247" height="312" viewBox="0 0 247 312" fill="none" xmlns="http://www.w3.org/2000/svg">
<g filter="url(#filter0_d_1_20)">
<path d="M221.988 72.0685C227.407 77.4872 230.451 84.8366 230.451 92.5V271.715C230.451 287.673 217.515 300.609 201.557 300.609H35.8945C19.9365 300.609 7 287.673 7 271.715L7.00001 40.5584C7.00001 24.6004 19.9366 11.6638 35.8946 11.6638L147.068 10.537C154.732 10.537 162.081 13.5812 167.5 19L221.988 72.0685Z" fill="url(#paint0_linear_1_20)"/>
<path d="M221.988 72.0685C227.407 77.4872 230.451 84.8366 230.451 92.5V271.715C230.451 287.673 217.515 300.609 201.557 300.609H35.8945C19.9365 300.609 7 287.673 7 271.715L7.00001 40.5584C7.00001 24.6004 19.9366 11.6638 35.8946 11.6638L147.068 10.537C154.732 10.537 162.081 13.5812 167.5 19L221.988 72.0685Z" fill="url(#paint1_linear_1_20)" fill-opacity="0.2"/>
<path d="M221.988 72.0685C227.407 77.4872 230.451 84.8366 230.451 92.5V271.715C230.451 287.673 217.515 300.609 201.557 300.609H35.8945C19.9365 300.609 7 287.673 7 271.715L7.00001 40.5584C7.00001 24.6004 19.9366 11.6638 35.8946 11.6638L147.068 10.537C154.732 10.537 162.081 13.5812 167.5 19L221.988 72.0685Z" stroke="url(#paint2_radial_1_20)" stroke-width="5"/>
</g>
<g filter="url(#filter1_d_1_20)">
<path d="M152 55.6936V9C159.404 10.2017 166.255 13.666 171.611 18.9172L197.516 44.3145L223.505 69.7941C228.757 74.9427 231.954 81.8272 232.5 89.1613H185.468C166.984 89.1613 152 74.1773 152 55.6936Z" fill="url(#paint3_linear_1_20)"/>
<path d="M153.5 55.6936V10.8087C159.929 12.1906 165.848 15.368 170.561 19.9883L196.466 45.3856L222.455 70.8652C227.06 75.3797 230.003 81.2983 230.839 87.6613H185.468C167.812 87.6613 153.5 73.3489 153.5 55.6936Z" stroke="url(#paint4_radial_1_20)" stroke-width="3"/>
</g>
<g filter="url(#filter2_d_1_20)">
<path d="M152 55.6936V9C159.404 10.2017 166.255 13.666 171.611 18.9172L197.516 44.3145L223.505 69.7941C228.757 74.9427 231.954 81.8272 232.5 89.1613H185.468C166.984 89.1613 152 74.1773 152 55.6936Z" fill="url(#paint5_linear_1_20)"/>
<path d="M153.5 55.6936V10.8087C159.929 12.1906 165.848 15.368 170.561 19.9883L196.466 45.3856L222.455 70.8652C227.06 75.3797 230.003 81.2983 230.839 87.6613H185.468C167.812 87.6613 153.5 73.3489 153.5 55.6936Z" stroke="url(#paint6_radial_1_20)" stroke-width="3"/>
</g>
<defs>
<filter id="filter0_d_1_20" x="0.5" y="8.03699" width="236.451" height="303.072" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dy="4"/>
<feGaussianBlur stdDeviation="2"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.22 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1_20"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1_20" result="shape"/>
</filter>
<filter id="filter1_d_1_20" x="128" y="0" width="118.5" height="118.161" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-5" dy="10"/>
<feGaussianBlur stdDeviation="9.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1_20"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1_20" result="shape"/>
</filter>
<filter id="filter2_d_1_20" x="128" y="0" width="118.5" height="118.161" filterUnits="userSpaceOnUse" color-interpolation-filters="sRGB">
<feFlood flood-opacity="0" result="BackgroundImageFix"/>
<feColorMatrix in="SourceAlpha" type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0" result="hardAlpha"/>
<feOffset dx="-5" dy="10"/>
<feGaussianBlur stdDeviation="9.5"/>
<feComposite in2="hardAlpha" operator="out"/>
<feColorMatrix type="matrix" values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0"/>
<feBlend mode="normal" in2="BackgroundImageFix" result="effect1_dropShadow_1_20"/>
<feBlend mode="normal" in="SourceGraphic" in2="effect1_dropShadow_1_20" result="shape"/>
</filter>
<linearGradient id="paint0_linear_1_20" x1="119" y1="249" x2="119" y2="301" gradientUnits="userSpaceOnUse">
<stop stop-color="#252633"/>
<stop offset="1" stop-color="#20212A"/>
</linearGradient>
<linearGradient id="paint1_linear_1_20" x1="25.5" y1="28.5" x2="201" y2="80" gradientUnits="userSpaceOnUse">
<stop stop-color="#36364F"/>
<stop offset="1" stop-opacity="0"/>
</linearGradient>
<radialGradient id="paint2_radial_1_20" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(83 109) rotate(81.8057) scale(252.579 194.569)">
<stop stop-color="#4A496B"/>
<stop offset="0.75" stop-color="#20212D"/>
</radialGradient>
<linearGradient id="paint3_linear_1_20" x1="206.045" y1="37.6468" x2="152.845" y2="80.9321" gradientUnits="userSpaceOnUse">
<stop stop-color="#313347"/>
<stop offset="1" stop-color="#20222F"/>
</linearGradient>
<radialGradient id="paint4_radial_1_20" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(138.5 72.5) rotate(-22.0564) scale(62.58 62.8444)">
<stop stop-color="#464967"/>
<stop offset="1" stop-opacity="0"/>
</radialGradient>
<linearGradient id="paint5_linear_1_20" x1="206.045" y1="37.6468" x2="152.845" y2="80.9321" gradientUnits="userSpaceOnUse">
<stop stop-color="#313347"/>
<stop offset="1" stop-color="#20222F"/>
</linearGradient>
<radialGradient id="paint6_radial_1_20" cx="0" cy="0" r="1" gradientUnits="userSpaceOnUse" gradientTransform="translate(138.5 72.5) rotate(-22.0564) scale(62.58 62.8444)">
<stop stop-color="#464967"/>
<stop offset="1" stop-opacity="0"/>
</radialGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.8 KiB

View File

@@ -4,7 +4,7 @@
export type Procedures = {
queries:
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "files.readMetadata", input: LibraryArgs<number>, result: null } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number, cas_id: string, integrity_checksum: string | null, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath>, media_data: MediaData | null } | null } |
{ key: "jobs.getHistory", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "jobs.getRunning", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "jobs.isRunning", input: LibraryArgs<null>, result: boolean } |
@@ -37,6 +37,7 @@ export type Procedures = {
{ key: "files.encryptFiles", input: LibraryArgs<FileEncryptorJobInit>, result: null } |
{ key: "files.setFavorite", input: LibraryArgs<SetFavoriteArgs>, result: null } |
{ key: "files.setNote", input: LibraryArgs<SetNoteArgs>, result: null } |
{ key: "jobs.clearAll", input: LibraryArgs<null>, result: null } |
{ key: "jobs.generateThumbsForLocation", input: LibraryArgs<GenerateThumbsForLocationArgs>, result: null } |
{ key: "jobs.identifyUniqueFiles", input: LibraryArgs<IdentifyUniqueFilesArgs>, result: null } |
{ key: "jobs.objectValidator", input: LibraryArgs<ObjectValidatorArgs>, result: null } |
@@ -58,12 +59,14 @@ export type Procedures = {
{ key: "library.create", input: string, result: LibraryConfigWrapped } |
{ key: "library.delete", input: string, result: null } |
{ key: "library.edit", input: EditLibraryArgs, result: null } |
{ key: "locations.addLibrary", input: LibraryArgs<LocationCreateArgs>, result: null } |
{ key: "locations.create", input: LibraryArgs<LocationCreateArgs>, result: null } |
{ key: "locations.delete", input: LibraryArgs<number>, result: null } |
{ key: "locations.fullRescan", input: LibraryArgs<number>, result: null } |
{ key: "locations.indexer_rules.create", input: LibraryArgs<IndexerRuleCreateArgs>, result: IndexerRule } |
{ key: "locations.indexer_rules.delete", input: LibraryArgs<number>, result: null } |
{ key: "locations.quickRescan", input: LibraryArgs<null>, result: null } |
{ key: "locations.relink", input: LibraryArgs<string>, result: null } |
{ key: "locations.update", input: LibraryArgs<LocationUpdateArgs>, result: null } |
{ key: "tags.assign", input: LibraryArgs<TagAssignArgs>, result: null } |
{ key: "tags.create", input: LibraryArgs<TagCreateArgs>, result: Tag } |
@@ -98,6 +101,8 @@ export interface FilePath { id: number, is_dir: boolean, location_id: number, ma
export interface GenerateThumbsForLocationArgs { id: number, path: string }
export interface GetArgs { id: number }
export type HashingAlgorithm = { Argon2id: Params }
export interface IdentifyUniqueFilesArgs { id: number, path: string }
@@ -132,6 +137,8 @@ export interface LocationUpdateArgs { id: number, name: string | null, indexer_r
export interface MasterPasswordChangeArgs { password: string, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm }
export interface MediaData { id: number, pixel_width: number | null, pixel_height: number | null, longitude: number | null, latitude: number | null, fps: number | null, capture_device_make: string | null, capture_device_model: string | null, capture_device_software: string | null, duration_seconds: number | null, codecs: string | null, streams: number | null }
export interface Node { id: number, pub_id: Array<number>, name: string, platform: number, version: string | null, last_seen: string, timezone: string | null, date_created: string }
export interface NodeConfig { version: string | null, id: string, name: string, p2p_port: number | null }

View File

@@ -21,6 +21,7 @@ export function AppLayout() {
className={clsx(
// App level styles
'flex h-screen overflow-hidden text-ink select-none cursor-default',
os === 'browser' && 'bg-app border-t border-app-line/50',
os === 'macOS' && 'rounded-[10px] has-blur-effects',
os !== 'browser' && os !== 'windows' && 'border border-app-frame'
)}

View File

@@ -0,0 +1,48 @@
import { LocationCreateArgs, useBridgeMutation, useLibraryMutation } from '@sd/client';
import { Input } from '@sd/ui';
import { Dialog } from '@sd/ui';
import { useQueryClient } from '@tanstack/react-query';
import { PropsWithChildren, useState } from 'react';
export default function AddLocationDialog({
children,
onSubmit,
open,
setOpen
}: PropsWithChildren<{ onSubmit?: () => void; open: boolean; setOpen: (state: boolean) => void }>) {
// BEFORE MERGE: Remove default value
const [locationUrl, setLocationUrl] = useState(
'/Users/jamie/Projects/spacedrive/packages/test-files/files'
);
const createLocation = useLibraryMutation('locations.create', {
onSuccess: () => setOpen(false)
});
return (
<Dialog
open={open}
setOpen={setOpen}
title="Add Location URL"
description="As you are using the browser version of Spacedrive you will (for now) need to specify an absolute URL of a directory local to the remote node."
ctaAction={() =>
createLocation.mutate({
path: locationUrl,
indexer_rules_ids: []
} as LocationCreateArgs)
}
loading={createLocation.isLoading}
submitDisabled={!locationUrl}
ctaLabel="Add"
trigger={null}
>
<Input
className="flex-grow w-full mt-3"
value={locationUrl}
placeholder="/Users/jamie/Movies"
onChange={(e) => setLocationUrl(e.target.value)}
required
/>
</Dialog>
);
}

View File

@@ -1,12 +1,12 @@
import { ExplorerData, rspc, useCurrentLibrary } from '@sd/client';
import { useEffect, useState } from 'react';
import { useExplorerStore } from '../../util/explorerStore';
import { useExplorerStore } from '../../hooks/useExplorerStore';
import { AlertDialog, GenericAlertDialogState } from '../dialog/AlertDialog';
import { DecryptFileDialog } from '../dialog/DecryptFileDialog';
import { EncryptFileDialog } from '../dialog/EncryptFileDialog';
import { Inspector } from '../explorer/Inspector';
import ExplorerContextMenu from './ExplorerContextMenu';
import { ExplorerContextMenu } from './ExplorerContextMenu';
import { TopBar } from './ExplorerTopBar';
import { VirtualizedList } from './VirtualizedList';
@@ -47,15 +47,11 @@ export default function Explorer(props: Props) {
return (
<>
<div className="relative">
<ExplorerContextMenu
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
>
<ExplorerContextMenu>
<div className="relative flex flex-col w-full">
<TopBar showSeparator={separateTopBar} />
<div className="relative flex flex-row w-full max-h-full app-background ">
<div className="relative flex flex-row w-full max-h-full app-background">
{props.data && (
<VirtualizedList
data={props.data.items || []}
@@ -68,6 +64,9 @@ export default function Explorer(props: Props) {
};
});
}}
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
/>
)}
{expStore.showInspector && (

View File

@@ -1,23 +1,26 @@
import { useLibraryMutation, useLibraryQuery } from '@sd/client';
import { ExplorerItem, useLibraryMutation, useLibraryQuery } from '@sd/client';
import { ContextMenu as CM } from '@sd/ui';
import {
ArrowBendUpRight,
Image,
LockSimple,
LockSimpleOpen,
Package,
Plus,
Repeat,
Share,
ShieldCheck,
TagSimple,
Trash,
TrashSimple
} from 'phosphor-react';
import { PropsWithChildren, useMemo } from 'react';
import { getExplorerStore } from '../../hooks/useExplorerStore';
import { useOperatingSystem } from '../../hooks/useOperatingSystem';
import { usePlatform } from '../../util/Platform';
import { getExplorerStore } from '../../util/explorerStore';
import { GenericAlertDialogProps } from '../dialog/AlertDialog';
import { EncryptFileDialog } from '../dialog/EncryptFileDialog';
import { isObject } from './utils';
const AssignTagMenuItems = (props: { objectId: number }) => {
const tags = useLibraryQuery(['tags.list'], { suspense: true });
@@ -59,18 +62,9 @@ const AssignTagMenuItems = (props: { objectId: number }) => {
);
};
export interface ExplorerContextMenuProps extends PropsWithChildren {
setShowEncryptDialog: (isShowing: boolean) => void;
setShowDecryptDialog: (isShowing: boolean) => void;
setAlertDialogData: (data: GenericAlertDialogProps) => void;
}
export default function ExplorerContextMenu(props: ExplorerContextMenuProps) {
const store = getExplorerStore();
// const { mutate: generateThumbsForLocation } = useLibraryMutation(
// 'jobs.generateThumbsForLocation'
// );
function OpenInNativeExplorer() {
const platform = usePlatform();
const store = getExplorerStore();
const os = useOperatingSystem();
const osFileBrowserName = useMemo(() => {
@@ -81,6 +75,93 @@ export default function ExplorerContextMenu(props: ExplorerContextMenuProps) {
}
}, [os]);
return (
<>
{platform.openPath && (
<CM.Item
label={`Open in ${osFileBrowserName}`}
keybind="⌘Y"
onClick={() => {
alert('TODO: Open in FS');
// console.log('TODO', store.contextMenuActiveItem);
// platform.openPath!('/Users/oscar/Desktop'); // TODO: Work out the file path from the backend
}}
/>
)}
</>
);
}
export function ExplorerContextMenu(props: PropsWithChildren) {
const store = getExplorerStore();
const generateThumbsForLocation = useLibraryMutation('jobs.generateThumbsForLocation');
const objectValidator = useLibraryMutation('jobs.objectValidator');
const rescanLocation = useLibraryMutation('locations.fullRescan');
return (
<div className="relative">
<CM.ContextMenu trigger={props.children}>
<OpenInNativeExplorer />
<CM.Separator />
<CM.Item
label="Share"
icon={Share}
onClick={(e) => {
e.preventDefault();
navigator.share?.({
title: 'Spacedrive',
text: 'Check out this cool app',
url: 'https://spacedrive.com'
});
}}
/>
<CM.Separator />
<CM.Item
onClick={() => store.locationId && rescanLocation.mutate(store.locationId)}
label="Re-index"
icon={Repeat}
/>
<CM.SubMenu label="More actions..." icon={Plus}>
<CM.Item
onClick={() =>
store.locationId &&
generateThumbsForLocation.mutate({ id: store.locationId, path: '' })
}
label="Regen Thumbnails"
icon={Image}
/>
<CM.Item
onClick={() =>
store.locationId && objectValidator.mutate({ id: store.locationId, path: '' })
}
label="Generate Checksums"
icon={ShieldCheck}
/>
</CM.SubMenu>
<CM.Separator />
</CM.ContextMenu>
</div>
);
}
export interface FileItemContextMenuProps extends PropsWithChildren {
item: ExplorerItem;
setShowEncryptDialog: (isShowing: boolean) => void;
setShowDecryptDialog: (isShowing: boolean) => void;
setAlertDialogData: (data: GenericAlertDialogProps) => void;
}
export function FileItemContextMenu(props: FileItemContextMenuProps) {
const objectData = props.item ? (isObject(props.item) ? props.item : props.item.object) : null;
const hasMasterPasswordQuery = useLibraryQuery(['keys.hasMasterPassword']);
const hasMasterPassword =
hasMasterPasswordQuery.data !== undefined && hasMasterPasswordQuery.data === true
@@ -100,16 +181,7 @@ export default function ExplorerContextMenu(props: ExplorerContextMenuProps) {
<CM.Separator />
<CM.Item label="Quick view" keybind="␣" />
{platform.openPath && (
<CM.Item
label={`Open in ${osFileBrowserName}`}
keybind="⌘Y"
onClick={() => {
console.log('TODO', store.contextMenuActiveObject);
platform.openPath!('/Users/oscar/Desktop'); // TODO: Work out the file path from the backend
}}
/>
)}
<OpenInNativeExplorer />
<CM.Separator />
@@ -134,11 +206,10 @@ export default function ExplorerContextMenu(props: ExplorerContextMenuProps) {
<CM.Separator />
{store.contextMenuObjectId && (
<CM.SubMenu label="Assign tag" icon={TagSimple}>
<AssignTagMenuItems objectId={store.contextMenuObjectId} />
</CM.SubMenu>
)}
<CM.SubMenu label="Assign tag" icon={TagSimple}>
<AssignTagMenuItems objectId={objectData?.id || 0} />
</CM.SubMenu>
<CM.SubMenu label="More actions..." icon={Plus}>
<CM.Item
label="Encrypt"

View File

@@ -1,9 +1,13 @@
import { useLibraryMutation } from '@sd/client';
import { Button, Input, OverlayPanel, cva, tw } from '@sd/ui';
import clsx from 'clsx';
import {
ArrowsClockwise,
CaretLeft,
CaretRight,
ClockCounterClockwise,
Columns,
HourglassSimple,
IconProps,
Key,
List,
@@ -17,8 +21,8 @@ import { forwardRef, useEffect, useRef } from 'react';
import { useForm } from 'react-hook-form';
import { useNavigate } from 'react-router-dom';
import { getExplorerStore, useExplorerStore } from '../../hooks/useExplorerStore';
import { useOperatingSystem } from '../../hooks/useOperatingSystem';
import { getExplorerStore, useExplorerStore } from '../../util/explorerStore';
import { KeybindEvent } from '../../util/keybind';
import { KeyManager } from '../key/KeyManager';
import { Shortcut } from '../primitive/Shortcut';
@@ -123,12 +127,6 @@ export const TopBar: React.FC<TopBarProps> = (props) => {
const store = useExplorerStore();
// const { mutate: generateThumbsForLocation } = useLibraryMutation(
// 'jobs.generateThumbsForLocation'
// );
// const { mutate: identifyUniqueFiles } = useLibraryMutation('jobs.identifyUniqueFiles');
// const { mutate: objectValidator } = useLibraryMutation('jobs.objectValidator');
const navigate = useNavigate();
//create function to focus on search box when cmd+k is pressed
@@ -193,7 +191,7 @@ export const TopBar: React.FC<TopBarProps> = (props) => {
<div
data-tauri-drag-region
className={clsx(
'flex h-[2.95rem] -mt-0.5 max-w z-10 pl-3 flex-shrink-0 items-center border-transparent border-b bg-app overflow-hidden transition-[background-color] transition-[border-color] duration-250 ease-out',
'flex h-[46px] max-w z-20 pl-3 flex-shrink-0 items-center border-transparent border-b bg-app overflow-hidden transition-[background-color] transition-[border-color] duration-250 ease-out',
props.showSeparator && 'top-bar-blur !bg-app/90'
)}
>
@@ -236,6 +234,24 @@ export const TopBar: React.FC<TopBarProps> = (props) => {
<Rows className={TOP_BAR_ICON_STYLE} />
</TopBarButton>
</Tooltip>
<Tooltip label="Columns view">
<TopBarButton
rounding="none"
active={store.layoutMode === 'columns'}
onClick={() => (getExplorerStore().layoutMode = 'columns')}
>
<Columns className={TOP_BAR_ICON_STYLE} />
</TopBarButton>
</Tooltip>
{/* <Tooltip label="Timeline view">
<TopBarButton
rounding="none"
active={store.layoutMode === 'timeline'}
onClick={() => (getExplorerStore().layoutMode = 'timeline')}
>
<ClockCounterClockwise className={TOP_BAR_ICON_STYLE} />
</TopBarButton>
</Tooltip> */}
<Tooltip label="Media view">
<TopBarButton
@@ -271,13 +287,18 @@ export const TopBar: React.FC<TopBarProps> = (props) => {
active={store.tagAssignMode}
>
<Tag
weight={store.tagAssignMode ? 'fill' : 'bold'}
weight={store.tagAssignMode ? 'fill' : 'regular'}
className={TOP_BAR_ICON_STYLE}
/>
</TopBarButton>
</Tooltip>
<Tooltip label="Refresh">
<TopBarButton>
<Tooltip label="Regenerate thumbs (temp)">
<TopBarButton
// onClick={() =>
// store.locationId &&
// generateThumbsForLocation.mutate({ id: store.locationId, path: '' })
// }
>
<ArrowsClockwise className={TOP_BAR_ICON_STYLE} />
</TopBarButton>
</Tooltip>

View File

@@ -3,7 +3,10 @@ import { cva, tw } from '@sd/ui';
import clsx from 'clsx';
import { HTMLAttributes } from 'react';
import { getExplorerStore } from '../../util/explorerStore';
import { getExplorerStore } from '../../hooks/useExplorerStore';
import { ObjectKind } from '../../util/kind';
import { GenericAlertDialogProps } from '../dialog/AlertDialog';
import { FileItemContextMenu } from './ExplorerContextMenu';
import FileThumb from './FileThumb';
import { isObject } from './utils';
@@ -24,64 +27,81 @@ interface Props extends HTMLAttributes<HTMLDivElement> {
data: ExplorerItem;
selected: boolean;
index: number;
setShowEncryptDialog: (isShowing: boolean) => void;
setShowDecryptDialog: (isShowing: boolean) => void;
setAlertDialogData: (data: GenericAlertDialogProps) => void;
}
function FileItem({ data, selected, index, ...rest }: Props) {
function FileItem({
data,
selected,
index,
setShowEncryptDialog,
setShowDecryptDialog,
setAlertDialogData,
...rest
}: Props) {
const objectData = data ? (isObject(data) ? data : data.object) : null;
const isVid = isVideoExt(data.extension || '');
return (
<div
onContextMenu={(e) => {
const objectId = isObject(data) ? data.id : data.object?.id;
if (objectId != undefined) {
getExplorerStore().contextMenuObjectId = objectId;
if (index != undefined) {
getExplorerStore().selectedRowIndex = index;
getExplorerStore().contextMenuActiveObject = isObject(data) ? data : data.object;
}
}
}}
{...rest}
draggable
className={clsx('inline-block w-[100px] mb-3', rest.className)}
<FileItemContextMenu
item={data}
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
>
<div
style={{ width: getExplorerStore().gridItemSize, height: getExplorerStore().gridItemSize }}
className={clsx(
'border-2 border-transparent rounded-lg text-center mb-1 active:translate-y-[1px]',
{
'bg-app-selected/30': selected
onContextMenu={(e) => {
if (index != undefined) {
getExplorerStore().selectedRowIndex = index;
}
)}
}}
{...rest}
draggable
className={clsx('inline-block w-[100px] mb-3', rest.className)}
>
<div
style={{
width: getExplorerStore().gridItemSize,
height: getExplorerStore().gridItemSize
}}
className={clsx(
'flex relative items-center justify-center h-full p-1 rounded border-transparent border-2 shrink-0'
'border-2 border-transparent rounded-lg text-center mb-1 active:translate-y-[1px]',
{
'bg-app-selected/30': selected
}
)}
>
<FileThumb
<div
className={clsx(
'border-4 border-white shadow shadow-black/40 object-cover max-w-full max-h-full w-auto overflow-hidden',
isVid && '!border-black rounded border-x-0 border-y-[9px]'
'flex relative items-center justify-center h-full p-1 rounded border-transparent border-2 shrink-0'
)}
data={data}
kind={data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
size={getExplorerStore().gridItemSize}
/>
{data?.extension && isVid && (
<div className="absolute bottom-4 font-semibold opacity-70 right-2 py-0.5 px-1 text-[9px] uppercase bg-black/60 rounded">
{data.extension}
</div>
)}
>
<FileThumb
className={clsx(
'border-2 border-app-line rounded-sm shadow shadow-black/40 object-cover max-w-full max-h-full w-auto overflow-hidden',
isVid && '!border-black rounded border-x-0 border-y-[7px]'
)}
data={data}
kind={data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
size={getExplorerStore().gridItemSize}
/>
{data?.extension && isVid && (
<div className="absolute bottom-4 font-semibold opacity-70 right-2 py-0.5 px-1 text-[9px] uppercase bg-black/60 rounded">
{data.extension}
</div>
)}
</div>
</div>
<NameArea>
<span className={nameContainerStyles({ selected })}>
{data?.name}
{data?.extension && `.${data.extension}`}
</span>
</NameArea>
</div>
<NameArea>
<span className={nameContainerStyles({ selected })}>
{data?.name}
{data?.extension && `.${data.extension}`}
</span>
</NameArea>
</div>
</FileItemContextMenu>
);
}

View File

@@ -1,11 +1,14 @@
import videoSvg from '@sd/assets/svgs/video.svg';
import zipSvg from '@sd/assets/svgs/zip.svg';
import archive from '@sd/assets/images/Archive.png';
import documentPdf from '@sd/assets/images/Document_pdf.png';
import executable from '@sd/assets/images/Executable.png';
import file from '@sd/assets/images/File.png';
import video from '@sd/assets/images/Video.png';
import { ExplorerItem } from '@sd/client';
import clsx from 'clsx';
import { Suspense, lazy, useMemo } from 'react';
import { useExplorerStore } from '../../hooks/useExplorerStore';
import { usePlatform } from '../../util/Platform';
import { useExplorerStore } from '../../util/explorerStore';
import { Folder } from '../icons/Folder';
import { isObject, isPath } from './utils';
@@ -15,7 +18,7 @@ interface Props {
className?: string;
style?: React.CSSProperties;
iconClassNames?: string;
kind?: 'video' | 'image' | 'audio' | 'zip' | 'other';
kind?: string;
}
const icons = import.meta.glob('../../../../assets/icons/*.svg');
@@ -58,55 +61,12 @@ export default function FileThumb({ data, ...props }: Props) {
/>
);
let icon = file;
// Hacky (and temporary) way to integrate thumbnails
if (props.kind === 'video') {
return (
<img src={videoSvg} className={clsx('w-full overflow-hidden h-full', props.iconClassNames)} />
);
}
if (props.kind === 'zip') {
return <img src={zipSvg} className={clsx('w-full overflow-hidden h-full')} />;
}
if (props.kind === 'Archive') icon = archive;
else if (props.kind === 'Video') icon = video;
else if (props.kind === 'Document' && data.extension === 'pdf') icon = documentPdf;
else if (props.kind === 'Executable') icon = executable;
// return default file icon
return (
<div
style={{ width: props.size * 0.8, height: props.size * 0.8 }}
className="relative m-auto transition duration-200 "
>
<svg
// BACKGROUND
className="absolute -translate-x-1/2 -translate-y-1/2 pointer-events-none top-1/2 left-1/2 fill-app-box"
width="100%"
height="100%"
viewBox="0 0 65 81"
style={{ filter: 'drop-shadow(0px 2px 1px rgb(0 0 0 / 0.15))' }}
>
<path d="M0 8C0 3.58172 3.58172 0 8 0H39.6863C41.808 0 43.8429 0.842855 45.3431 2.34315L53.5 10.5L62.6569 19.6569C64.1571 21.1571 65 23.192 65 25.3137V73C65 77.4183 61.4183 81 57 81H8C3.58172 81 0 77.4183 0 73V8Z" />
</svg>
{Icon && (
<div className="absolute flex flex-col items-center justify-center w-full h-full mt-0.5 ">
<Suspense fallback={<></>}>
<Icon
className={clsx('w-full h-full ')}
style={{ width: props.size * 0.45, height: props.size * 0.45 }}
/>
</Suspense>
<span className="text-xs font-bold text-center uppercase cursor-default text-gray-450">
{data.extension}
</span>
</div>
)}
<svg
// PEEL
width="28%"
height="28%"
className="absolute top-0 right-0 -translate-x-[40%] z-0 pointer-events-none fill-app-selected"
viewBox="0 0 40 40"
style={{ filter: 'drop-shadow(-3px 1px 1px rgb(0 0 0 / 0.05))' }}
>
<path d="M41.4116 40.5577H11.234C5.02962 40.5577 0 35.5281 0 29.3238V0L41.4116 40.5577Z" />
</svg>
</div>
);
return <img src={icon} className={clsx('overflow-hidden h-full', props.iconClassNames)} />;
}

View File

@@ -1,22 +1,36 @@
// import types from '../../constants/file-types.json';
import { useLibraryQuery } from '@sd/client';
import { ExplorerContext, ExplorerItem } from '@sd/client';
import { Button } from '@sd/ui';
import { useQuery } from '@tanstack/react-query';
import { ExplorerContext, ExplorerItem, useLibraryQuery } from '@sd/client';
import { Button, tw } from '@sd/ui';
import clsx from 'clsx';
import dayjs from 'dayjs';
import { Link, Share } from 'phosphor-react';
import { Barcode, CircleWavyCheck, Clock, Cube, Link, Lock, Snowflake } from 'phosphor-react';
import { useEffect, useState } from 'react';
import { ObjectKind } from '../../util/kind';
import { DefaultProps } from '../primitive/types';
import { Tooltip } from '../tooltip/Tooltip';
import FileThumb from './FileThumb';
import { Divider } from './inspector/Divider';
import FavoriteButton from './inspector/FavoriteButton';
import { MetaItem } from './inspector/MetaItem';
import Note from './inspector/Note';
import { isObject } from './utils';
export const InfoPill = tw.span`inline border border-transparent px-1 text-[11px] font-medium shadow shadow-app-shade/5 bg-app-selected rounded-md text-ink-dull`;
export const PlaceholderPill = tw.span`inline border px-1 text-[11px] shadow shadow-app-shade/10 rounded-md bg-transparent border-dashed border-app-active transition hover:text-ink-faint hover:border-ink-faint font-medium text-ink-faint/70`;
export const MetaContainer = tw.div`flex flex-col px-4 py-1.5`;
export const MetaTitle = tw.h5`text-xs font-bold`;
export const MetaValue = tw.p`text-xs break-all text-ink truncate`;
const MetaTextLine = tw.div`flex items-center my-0.5 text-xs text-ink-dull`;
const InspectorIcon = ({ component: Icon, ...props }: any) => (
<Icon weight="bold" {...props} className={clsx('mr-2 flex-shrink-0', props.className)} />
);
interface Props extends DefaultProps<HTMLDivElement> {
context?: ExplorerContext;
data?: ExplorerItem;
@@ -25,14 +39,10 @@ interface Props extends DefaultProps<HTMLDivElement> {
export const Inspector = (props: Props) => {
const { context, data, ...elementProps } = props;
const { data: types } = useQuery(
['_file-types'],
() => import('../../constants/file-types.json')
);
const is_dir = props.data?.type === 'Path' ? props.data.is_dir : false;
const objectData = props.data ? (isObject(props.data) ? props.data : props.data.object) : null;
const isDir = props.data?.type === 'Path' ? props.data.is_dir : false;
// this prevents the inspector from fetching data when the user is navigating quickly
const [readyToFetch, setReadyToFetch] = useState(false);
@@ -48,114 +58,135 @@ export const Inspector = (props: Props) => {
enabled: readyToFetch
});
const isVid = isVideo(props.data?.extension || '');
const fullObjectData = useLibraryQuery(['files.get', { id: objectData?.id || -1 }], {
enabled: readyToFetch && objectData?.id !== undefined
});
return (
<div
{...elementProps}
className="-mt-[50px] pt-[55px] pl-1.5 pr-1 w-full h-screen overflow-x-hidden custom-scroll inspector-scroll pb-[55px]"
className="-mt-[50px] pt-[55px] z-10 pl-1.5 pr-1 w-full h-screen overflow-x-hidden custom-scroll inspector-scroll pb-4"
>
{!!props.data && (
<>
<div className="flex bg-sidebar items-center justify-center w-full h-64 mb-[10px] overflow-hidden rounded-lg ">
<div
className={clsx(
'flex h-52 items-center justify-center w-full mb-[10px] overflow-hidden rounded-lg',
objectData?.kind === 7 && objectData?.has_thumbnail && 'bg-black'
)}
>
<FileThumb
iconClassNames="mx-10"
iconClassNames="my-3 max-h-[150px]"
size={230}
kind={props.data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
className="!m-0 flex bg-green-500 flex-shrink flex-grow-0"
kind={ObjectKind[objectData?.kind || 0]}
className="flex flex-grow-0 flex-shrink bg-green-500"
data={props.data}
/>
</div>
<div className="flex flex-col w-full pt-0.5 pb-1 overflow-hidden bg-app-box rounded-lg select-text shadow-app-shade/10 border border-app-line">
<h3 className="pt-2 pb-1 pl-3 text-base font-bold">
<div className="flex flex-col w-full pt-0.5 pb-0.5 overflow-hidden bg-app-box rounded-lg select-text shadow-app-shade/10 border border-app-line">
<h3 className="px-3 pt-2 pb-1 text-base font-bold truncate">
{props.data?.name}
{props.data?.extension && `.${props.data.extension}`}
</h3>
{objectData && (
<div className="flex flex-row mt-1 mx-3 space-x-0.5">
<div className="flex flex-row mt-1 mb-0.5 mx-3 space-x-0.5">
<Tooltip label="Favorite">
<FavoriteButton data={objectData} />
</Tooltip>
<Tooltip label="Share">
<Tooltip label="Encrypt">
<Button size="icon">
<Share className="w-[18px] h-[18px]" />
<Lock className="w-[18px] h-[18px]" />
</Button>
</Tooltip>
<Tooltip label="Link">
<Tooltip label="Share">
<Button size="icon">
<Link className="w-[18px] h-[18px]" />
</Button>
</Tooltip>
</div>
)}
{tags?.data && tags.data.length > 0 && (
<>
<Divider />
<MetaItem
value={
<div className="flex flex-wrap gap-1.5">
{tags?.data?.map((tag) => (
<div
// onClick={() => setSelectedTag(tag.id === selectedTag ? null : tag.id)}
key={tag.id}
className={clsx(
'flex items-center rounded px-1.5 py-0.5'
// selectedTag === tag.id && 'ring'
)}
style={{ backgroundColor: tag.color + 'CC' }}
>
<span className="text-xs text-white drop-shadow-md">{tag.name}</span>
</div>
))}
</div>
}
/>
</>
)}
{props.context?.type == 'Location' && props.data?.type === 'Path' && (
<>
<Divider />
<MetaItem
title="URI"
value={`${props.context.local_path}/${props.data.materialized_path}`}
/>
</>
<MetaContainer>
<MetaTitle>URI</MetaTitle>
<MetaValue>{`${props.context.local_path}/${props.data.materialized_path}`}</MetaValue>
</MetaContainer>
)}
<Divider />
<MetaItem
title="Date Created"
value={dayjs(props.data?.date_created).format('MMMM Do YYYY, h:mm:ss a')}
/>
<Divider />
<MetaItem
title="Date Indexed"
value={dayjs(props.data?.date_indexed).format('MMMM Do YYYY, h:mm:ss a')}
/>
{!is_dir && (
<>
<Divider />
<div className="flex flex-row items-center px-3 py-2 meta-item">
{props.data?.extension && (
<span className="inline px-1 mr-1 text-xs font-bold uppercase bg-gray-500 rounded-md text-gray-150">
{props.data?.extension}
</span>
)}
<p className="text-xs text-gray-600 break-all truncate dark:text-gray-300">
{props.data?.extension
? //@ts-ignore
types[props.data.extension.toUpperCase()]?.descriptions.join(' / ')
: 'Unknown'}
</p>
{
<MetaContainer>
<div className="flex flex-wrap gap-1">
<InfoPill>{isDir ? 'Folder' : ObjectKind[objectData?.kind || 0]}</InfoPill>
{props.data.extension && <InfoPill>{props.data.extension}</InfoPill>}
{tags?.data?.map((tag) => (
<InfoPill
className="!text-white"
key={tag.id}
style={{ backgroundColor: tag.color + 'CC' }}
>
{tag.name}
</InfoPill>
))}
<PlaceholderPill>Add Tag</PlaceholderPill>
</div>
{objectData && (
<>
<Note data={objectData} />
<Divider />
{objectData.cas_id && (
<MetaItem title="Unique Content ID" value={objectData.cas_id} />
)}
</>
)}
</MetaContainer>
}
<Divider />
<MetaContainer className="!flex-row space-x-2">
<MetaTextLine>
<InspectorIcon component={Cube} />
<span className="mr-1.5">Size</span>
<MetaValue>{formatBytes(Number(objectData?.size_in_bytes || 0))}</MetaValue>
</MetaTextLine>
{fullObjectData.data?.media_data?.duration_seconds && (
<MetaTextLine>
<InspectorIcon component={Clock} />
<span className="mr-1.5">Duration</span>
<MetaValue>{fullObjectData.data.media_data.duration_seconds}</MetaValue>
</MetaTextLine>
)}
</MetaContainer>
<Divider />
<MetaContainer>
<Tooltip label={dayjs(props.data?.date_created).format('h:mm:ss a')}>
<MetaTextLine>
<InspectorIcon component={Clock} />
<span className="mr-1.5">Created</span>
<MetaValue>{dayjs(props.data?.date_created).format('MMM Do YYYY')}</MetaValue>
</MetaTextLine>
</Tooltip>
<Tooltip label={dayjs(props.data?.date_created).format('h:mm:ss a')}>
<MetaTextLine>
<InspectorIcon component={Barcode} />
<span className="mr-1.5">Indexed</span>
<MetaValue>{dayjs(props.data?.date_indexed).format('MMM Do YYYY')}</MetaValue>
</MetaTextLine>
</Tooltip>
</MetaContainer>
{!is_dir && objectData && (
<>
<Note data={objectData} />
<Divider />
<MetaContainer>
<Tooltip label={objectData?.cas_id || ''}>
<MetaTextLine>
<InspectorIcon component={Snowflake} />
<span className="mr-1.5">Content ID</span>
<MetaValue>{objectData?.cas_id || ''}</MetaValue>
</MetaTextLine>
</Tooltip>
{objectData?.integrity_checksum && (
<Tooltip label={objectData?.integrity_checksum || ''}>
<MetaTextLine>
<InspectorIcon component={CircleWavyCheck} />
<span className="mr-1.5">Checksum</span>
<MetaValue>{objectData.integrity_checksum}</MetaValue>
</MetaTextLine>
</Tooltip>
)}
</MetaContainer>
</>
)}
</div>
@@ -165,34 +196,14 @@ export const Inspector = (props: Props) => {
);
};
function isVideo(extension: string) {
return [
'avi',
'asf',
'mpeg',
'mts',
'mpe',
'vob',
'qt',
'mov',
'asf',
'asx',
'mjpeg',
'ts',
'mxf',
'm2ts',
'f4v',
'wm',
'3gp',
'm4v',
'wmv',
'mp4',
'webm',
'flv',
'mpg',
'hevc',
'ogv',
'swf',
'wtv'
].includes(extension);
function formatBytes(bytes: number, decimals = 2) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const dm = decimals < 0 ? 0 : decimals;
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + ' ' + sizes[i];
}

View File

@@ -4,7 +4,12 @@ import { useCallback, useEffect, useLayoutEffect, useRef, useState } from 'react
import { useSearchParams } from 'react-router-dom';
import { useKey, useOnWindowResize } from 'rooks';
import { ExplorerLayoutMode, getExplorerStore, useExplorerStore } from '../../util/explorerStore';
import {
ExplorerLayoutMode,
getExplorerStore,
useExplorerStore
} from '../../hooks/useExplorerStore';
import { GenericAlertDialogProps } from '../dialog/AlertDialog';
import FileItem from './FileItem';
import FileRow from './FileRow';
import { isPath } from './utils';
@@ -16,9 +21,19 @@ interface Props {
context: ExplorerContext;
data: ExplorerItem[];
onScroll?: (posY: number) => void;
setShowEncryptDialog: (isShowing: boolean) => void;
setShowDecryptDialog: (isShowing: boolean) => void;
setAlertDialogData: (data: GenericAlertDialogProps) => void;
}
export const VirtualizedList: React.FC<Props> = ({ data, context, onScroll }) => {
export const VirtualizedList: React.FC<Props> = ({
data,
context,
onScroll,
setShowEncryptDialog,
setShowDecryptDialog,
setAlertDialogData
}) => {
const scrollRef = useRef<HTMLDivElement>(null);
const innerRef = useRef<HTMLDivElement>(null);
@@ -142,6 +157,9 @@ export const VirtualizedList: React.FC<Props> = ({ data, context, onScroll }) =>
isSelected={getExplorerStore().selectedRowIndex === virtualRow.index}
index={virtualRow.index}
item={data[virtualRow.index]}
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
/>
) : (
[...Array(amountOfColumns)].map((_, i) => {
@@ -157,6 +175,9 @@ export const VirtualizedList: React.FC<Props> = ({ data, context, onScroll }) =>
isSelected={isSelected}
index={index}
item={item}
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
/>
)}
</div>
@@ -177,10 +198,21 @@ interface WrappedItemProps {
index: number;
isSelected: boolean;
kind: ExplorerLayoutMode;
setShowEncryptDialog: (isShowing: boolean) => void;
setShowDecryptDialog: (isShowing: boolean) => void;
setAlertDialogData: (data: GenericAlertDialogProps) => void;
}
// Wrap either list item or grid item with click logic as it is the same for both
const WrappedItem: React.FC<WrappedItemProps> = ({ item, index, isSelected, kind }) => {
const WrappedItem: React.FC<WrappedItemProps> = ({
item,
index,
isSelected,
kind,
setShowEncryptDialog,
setShowDecryptDialog,
setAlertDialogData
}) => {
const [_, setSearchParams] = useSearchParams();
const onDoubleClick = useCallback(() => {
@@ -199,6 +231,9 @@ const WrappedItem: React.FC<WrappedItemProps> = ({ item, index, isSelected, kind
onClick={onClick}
onDoubleClick={onDoubleClick}
selected={isSelected}
setShowEncryptDialog={setShowEncryptDialog}
setShowDecryptDialog={setShowDecryptDialog}
setAlertDialogData={setAlertDialogData}
/>
);

View File

@@ -30,7 +30,7 @@ export default function FavoriteButton(props: Props) {
};
return (
<Button onClick={toggleFavorite} size="sm">
<Button onClick={toggleFavorite} size="icon">
<Heart weight={favorite ? 'fill' : 'regular'} className="w-[18px] h-[18px]" />
</Button>
);

View File

@@ -1,17 +0,0 @@
interface MetaItemProps {
title?: string;
value: string | React.ReactNode;
}
export const MetaItem = (props: MetaItemProps) => {
return (
<div data-tip={props.value} className="flex flex-col px-4 py-1.5 meta-item">
{!!props.title && <h5 className="text-xs font-bold">{props.title}</h5>}
{typeof props.value === 'string' ? (
<p className="text-xs break-all truncate">{props.value}</p>
) : (
props.value
)}
</div>
);
};

View File

@@ -4,8 +4,8 @@ import { TextArea } from '@sd/ui';
import { useCallback, useState } from 'react';
import { useDebouncedCallback } from 'use-debounce';
import { MetaContainer, MetaTitle } from '../Inspector';
import { Divider } from './Divider';
import { MetaItem } from './MetaItem';
interface Props {
data: SDObject;
@@ -41,16 +41,14 @@ export default function Note(props: Props) {
return (
<>
<Divider />
<MetaItem
title="Note"
value={
<TextArea
className="mt-2 text-xs leading-snug !py-2"
value={note || ''}
onChange={handleNoteUpdate}
/>
}
/>
<MetaContainer>
<MetaTitle>Note</MetaTitle>
<TextArea
className="mt-2 mb-1 text-xs leading-snug !py-2"
value={note || ''}
onChange={handleNoteUpdate}
/>{' '}
</MetaContainer>
</>
);
}

View File

@@ -1,4 +1,4 @@
import { useLibraryQuery } from '@sd/client';
import { useLibraryMutation, useLibraryQuery } from '@sd/client';
import { JobReport } from '@sd/client';
import { Button, CategoryHeading, tw } from '@sd/ui';
import clsx from 'clsx';
@@ -15,6 +15,7 @@ import {
LockSimpleOpen,
Pause,
Question,
Trash,
X
} from 'phosphor-react';
@@ -23,7 +24,7 @@ import { Tooltip } from '../tooltip/Tooltip';
interface JobNiceData {
name: string;
icon: React.ForwardRefExoticComponent<IconProps & React.RefAttributes<SVGSVGElement>>;
icon: React.ForwardRefExoticComponent<any>;
}
const getNiceData = (job: JobReport): Record<string, JobNiceData> => ({
@@ -38,7 +39,7 @@ const getNiceData = (job: JobReport): Record<string, JobNiceData> => ({
icon: Camera
},
file_identifier: {
name: `Extracted metadata for ${numberWithCommas(job.task_count)} files`,
name: `Extracted metadata for ${numberWithCommas(job.metadata?.total_orphan_paths || 0)} files`,
icon: Eye
},
object_validator: {
@@ -72,11 +73,12 @@ function elapsed(seconds: number) {
return new Date(seconds * 1000).toUTCString().match(/(\d\d:\d\d:\d\d)/)?.[0];
}
const HeaderContainer = tw.div`z-20 flex items-center w-full h-10 px-2 border-b border-app-line/50 rounded-t-md `;
const HeaderContainer = tw.div`z-20 flex items-center w-full h-10 px-2 border-b border-app-line/50 rounded-t-md bg-app-button/70`;
export function JobsManager() {
const runningJobs = useLibraryQuery(['jobs.getRunning']);
const jobs = useLibraryQuery(['jobs.getHistory']);
const clearAllJobs = useLibraryMutation(['jobs.clearAll']);
return (
<div className="h-full pb-10 overflow-hidden">
@@ -84,8 +86,11 @@ export function JobsManager() {
<CategoryHeading className="ml-2">Recent Jobs</CategoryHeading>
<div className="flex-grow" />
<Button onClick={() => clearAllJobs.mutate(null)} size="icon">
<Trash className="w-5 h-5" />
</Button>
<Button size="icon">
<DotsThree className="w-5" />
<X className="w-5 h-5" />
</Button>
</HeaderContainer>
<div className="h-full mr-1 overflow-x-hidden custom-scroll inspector-scroll">
@@ -97,6 +102,9 @@ export function JobsManager() {
{jobs.data?.map((job) => (
<Job key={job.id} job={job} />
))}
{jobs.data?.length === 0 && runningJobs.data?.length === 0 && (
<div className="flex items-center justify-center h-32 text-ink-dull">No jobs.</div>
)}
</div>
</div>
</div>
@@ -113,10 +121,10 @@ function Job({ job }: { job: JobReport }) {
return (
<div className="flex items-center px-2 py-2 pl-4 border-b border-app-line/50 bg-opacity-60">
<Tooltip label={job.status}>
<niceData.icon className={clsx('w-5 mr-3')} />
<niceData.icon className={clsx('w-5 h-5 mr-3')} />
</Tooltip>
<div className="flex flex-col w-full ">
<span className="flex mt-0.5 items-center font-semibold truncate">
<div className="flex flex-col truncate">
<span className="mt-0.5 font-semibold truncate">
{isRunning ? job.message : niceData.name}
</span>
{isRunning && (
@@ -124,7 +132,7 @@ function Job({ job }: { job: JobReport }) {
<ProgressBar value={job.completed_task_count} total={job.task_count} />
</div>
)}
<div className="flex items-center text-ink-faint">
<div className="flex items-center truncate text-ink-faint">
<span className="text-xs">
{isRunning ? 'Elapsed' : job.status === 'Failed' ? 'Failed after' : 'Took'}{' '}
{job.seconds_elapsed
@@ -144,7 +152,7 @@ function Job({ job }: { job: JobReport }) {
<div className="flex flex-row space-x-2 ml-7">
{job.status === 'Running' && (
<Button size="icon">
<Pause className="w-4" />
<Pause className="w-4 h-4" />
</Button>
)}
{job.status === 'Failed' && (
@@ -153,7 +161,7 @@ function Job({ job }: { job: JobReport }) {
</Button>
)}
<Button size="icon">
<X className="w-4" />
<X className="w-4 h-4" />
</Button>
</div>
</div>

View File

@@ -22,21 +22,13 @@ import {
tw
} from '@sd/ui';
import clsx from 'clsx';
import {
CheckCircle,
CirclesFour,
Gear,
GearSix,
Lock,
Planet,
Plus,
ShareNetwork
} from 'phosphor-react';
import { CheckCircle, CirclesFour, Gear, Lock, Planet, Plus, ShareNetwork } from 'phosphor-react';
import React, { PropsWithChildren, useState } from 'react';
import { NavLink, NavLinkProps } from 'react-router-dom';
import { useOperatingSystem } from '../../hooks/useOperatingSystem';
import { usePlatform } from '../../util/Platform';
import AddLocationDialog from '../dialog/AddLocationDialog';
import CreateLibraryDialog from '../dialog/CreateLibraryDialog';
import { Folder } from '../icons/Folder';
import { JobsManager } from '../jobs/JobManager';
@@ -57,13 +49,13 @@ export function Sidebar() {
const [isCreateDialogOpen, setIsCreateDialogOpen] = useState(false);
return (
<SidebarBody className={macOnly(os, 'bg-opacity-[0.80]')}>
<SidebarBody className={macOnly(os, 'bg-opacity-[0.75]')}>
<WindowControls />
<Dropdown.Root
className="mt-2 mx-2.5"
// we override the sidebar dropdown item's hover styles
// because the dark style clashes with the sidebar
itemsClassName="dark:bg-sidebar-box mt-1 dark:divide-menu-selected/30"
itemsClassName="dark:bg-sidebar-box dark:border-sidebar-line mt-1 dark:divide-menu-selected/30 shadow-none"
button={
<Dropdown.Button
variant="gray"
@@ -71,7 +63,7 @@ export function Sidebar() {
`w-full text-ink `,
// these classname overrides are messy
// but they work
`!bg-sidebar-box !border-sidebar-line/50 active:!border-sidebar-line active:!bg-sidebar-button ui-open:!bg-sidebar-button ui-open:!border-sidebar-line`,
`!bg-sidebar-box !border-sidebar-line/50 active:!border-sidebar-line active:!bg-sidebar-button ui-open:!bg-sidebar-button ui-open:!border-sidebar-line ring-offset-sidebar`,
(library === null || isLoadingLibraries) && '!text-ink-faint'
)}
>
@@ -93,11 +85,11 @@ export function Sidebar() {
))}
</Dropdown.Section>
<Dropdown.Section>
<Dropdown.Item icon={GearSix} to="settings/library">
Library Settings
</Dropdown.Item>
<Dropdown.Item icon={Plus} onClick={() => setIsCreateDialogOpen(true)}>
Add Library
New Library
</Dropdown.Item>
<Dropdown.Item icon={Gear} to="settings/library">
Manage Library
</Dropdown.Item>
<Dropdown.Item icon={Lock} onClick={() => alert('TODO: Not implemented yet!')}>
Lock
@@ -110,10 +102,10 @@ export function Sidebar() {
<Icon component={Planet} />
Overview
</SidebarLink>
<SidebarLink to="photos">
{/* <SidebarLink to="photos">
<Icon component={ShareNetwork} />
Nodes
</SidebarLink>
</SidebarLink> */}
<SidebarLink to="content">
<Icon component={CirclesFour} />
Spaces
@@ -129,19 +121,18 @@ export function Sidebar() {
to="/settings/general"
size="icon"
variant="outline"
className="text-ink-faint"
className="text-ink-faint ring-offset-sidebar"
>
<Gear className="w-5 h-5" />
</ButtonLink>
<OverlayPanel
className="focus:outline-none"
transformOrigin="bottom left"
disabled={!library}
trigger={
<Button
size="icon"
variant="outline"
className="radix-state-open:bg-sidebar-selected/50 text-ink-faint"
className="radix-state-open:bg-sidebar-selected/50 text-ink-faint ring-offset-sidebar"
>
{library && <IsRunningJob />}
</Button>
@@ -251,7 +242,7 @@ function DebugPanel() {
}
const sidebarItemClass = cva(
'max-w mb-[2px] rounded px-2 py-1 gap-0.5 flex flex-row flex-grow items-center font-medium truncate text-sm',
'max-w mb-[2px] rounded px-2 py-1 gap-0.5 flex flex-row flex-grow items-center font-medium truncate text-sm outline-none ring-offset-sidebar focus:ring-2 focus:ring-accent focus:ring-offset-2',
{
variants: {
isActive: {
@@ -269,17 +260,13 @@ const sidebarItemClass = cva(
export const SidebarLink = (props: PropsWithChildren<NavLinkProps>) => {
const os = useOperatingSystem();
return (
<NavLink {...props}>
{({ isActive }) => (
<span
className={clsx(
sidebarItemClass({ isActive, isTransparent: os === 'macOS' }),
props.className
)}
>
{props.children}
</span>
)}
<NavLink
{...props}
className={({ isActive }) =>
clsx(sidebarItemClass({ isActive, isTransparent: os === 'macOS' }), props.className)
}
>
{props.children}
</NavLink>
);
};
@@ -318,6 +305,7 @@ function LibraryScopedSection() {
const { data: locations } = useLibraryQuery(['locations.list'], { keepPreviousData: true });
const { data: tags } = useLibraryQuery(['tags.list'], { keepPreviousData: true });
const { mutate: createLocation } = useLibraryMutation('locations.create');
const [textLocationDialogOpen, setTextLocationDialogOpen] = useState(false);
return (
<>
@@ -334,41 +322,40 @@ function LibraryScopedSection() {
{locations?.map((location) => {
return (
<div key={location.id} className="flex flex-row items-center">
<NavLink
<SidebarLink
className="relative w-full group"
to={{
pathname: `location/${location.id}`
}}
>
{({ isActive }) => (
<span className={sidebarItemClass({ isActive })}>
<div className="-mt-0.5 mr-1 flex-grow-0 flex-shrink-0">
<Folder size={18} />
</div>
<div className="-mt-0.5 mr-1 flex-grow-0 flex-shrink-0">
<Folder size={18} />
</div>
<span className="flex-grow flex-shrink-0">{location.name}</span>
</span>
)}
</NavLink>
<span className="flex-grow flex-shrink-0">{location.name}</span>
</SidebarLink>
</div>
);
})}
{(locations?.length || 0) < 4 && (
<button
onClick={() => {
if (!platform.openDirectoryPickerDialog) {
// TODO: Support opening locations on web
alert('Opening a dialogue is not supported on this platform!');
return;
if (platform.platform === 'web') {
setTextLocationDialogOpen(true);
} else {
if (!platform.openDirectoryPickerDialog) {
alert('Opening a dialogue is not supported on this platform!');
return;
}
platform.openDirectoryPickerDialog().then((result) => {
// TODO: Pass indexer rules ids to create location
if (result)
createLocation({
path: result as string,
indexer_rules_ids: []
} as LocationCreateArgs);
});
}
platform.openDirectoryPickerDialog().then((result) => {
// TODO: Pass indexer rules ids to create location
if (result)
createLocation({
path: result as string,
indexer_rules_ids: []
} as LocationCreateArgs);
});
}}
className={clsx(
'w-full px-2 py-1 mt-1 text-xs font-medium text-center',
@@ -380,6 +367,7 @@ function LibraryScopedSection() {
</button>
)}
</SidebarSection>
<AddLocationDialog open={textLocationDialogOpen} setOpen={setTextLocationDialogOpen} />
</div>
{!!tags?.length && (
<SidebarSection

View File

@@ -1,15 +1,16 @@
import clsx from 'clsx';
import { PropsWithChildren, ReactNode } from 'react';
interface SettingsHeaderProps {
interface SettingsHeaderProps extends PropsWithChildren {
title: string;
description: string;
description: string | ReactNode;
rightArea?: ReactNode;
}
export const SettingsHeader: React.FC<SettingsHeaderProps> = (props) => {
return (
<div className="flex mb-3">
{props.children}
<div className="flex-grow">
<h1 className="text-2xl font-bold">{props.title}</h1>
<p className="mt-1 text-sm text-gray-400">{props.description}</p>

View File

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@ import { proxy, useSnapshot } from 'valtio';
import { resetStore } from '@sd/client/src/stores/util';
export type ExplorerLayoutMode = 'list' | 'grid' | 'media';
export type ExplorerLayoutMode = 'list' | 'grid' | 'columns' | 'media';
export enum ExplorerKind {
Location,

View File

@@ -3,7 +3,7 @@ import { useEffect } from 'react';
import { useParams, useSearchParams } from 'react-router-dom';
import Explorer from '../components/explorer/Explorer';
import { getExplorerStore } from '../util/explorerStore';
import { getExplorerStore } from '../hooks/useExplorerStore';
export function useExplorerParams() {
const { id } = useParams();

View File

@@ -66,7 +66,7 @@ export default function GeneralSettings() {
<InputContainer
mini
title="Debug mode"
description="Enable extra debugging features within the app. Enabling this could have unintended consequences so be warned!"
description="Enable extra debugging features within the app."
>
<Switch
checked={debugState.enabled}

View File

@@ -1,3 +1,4 @@
import Logo from '@sd/assets/images/logo.png';
import { useBridgeQuery } from '@sd/client';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
@@ -8,11 +9,19 @@ export default function AboutSpacedrive() {
return (
<SettingsContainer>
<SettingsHeader title="About Spacedrive" description="The file manager from the future." />
<h1 className="!m-0 text-sm">
Build: v{buildInfo.data?.version || '-.-.-'} - {buildInfo.data?.commit || 'dev'}
</h1>
<SettingsHeader
title="Spacedrive"
description={
<div className="flex flex-col">
<span>The file manager from the future.</span>
<span className="mt-2 text-xs text-ink-faint/80">
v{buildInfo.data?.version || '-.-.-'} - {buildInfo.data?.commit || 'dev'}
</span>
</div>
}
>
<img src={Logo} className="w-[88px] mr-8" />
</SettingsHeader>
</SettingsContainer>
);
}

View File

@@ -2,7 +2,9 @@ import { useLibraryMutation, useLibraryQuery } from '@sd/client';
import { LocationCreateArgs } from '@sd/client';
import { Button, Input } from '@sd/ui';
import { MagnifyingGlass } from 'phosphor-react';
import { useState } from 'react';
import AddLocationDialog from '../../../components/dialog/AddLocationDialog';
import LocationListItem from '../../../components/location/LocationListItem';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
@@ -12,6 +14,7 @@ export default function LocationSettings() {
const platform = usePlatform();
const { data: locations } = useLibraryQuery(['locations.list']);
const { mutate: createLocation } = useLibraryMutation('locations.create');
const [textLocationDialogOpen, setTextLocationDialogOpen] = useState(false);
return (
<SettingsContainer>
@@ -25,24 +28,27 @@ export default function LocationSettings() {
<MagnifyingGlass className="absolute w-[18px] h-auto top-[8px] left-[11px] text-gray-350" />
<Input className="!p-0.5 !pl-9" placeholder="Search locations" />
</div>
<AddLocationDialog open={textLocationDialogOpen} setOpen={setTextLocationDialogOpen} />
<Button
variant="accent"
size="sm"
onClick={() => {
if (!platform.openDirectoryPickerDialog) {
// TODO: Support opening locations on web
alert('Opening a dialogue is not supported on this platform!');
return;
if (platform.platform === 'web') {
setTextLocationDialogOpen(true);
} else {
if (!platform.openDirectoryPickerDialog) {
alert('Opening a dialogue is not supported on this platform!');
return;
}
platform.openDirectoryPickerDialog().then((result) => {
// TODO: Pass indexer rules ids to create location
if (result)
createLocation({
path: result as string,
indexer_rules_ids: []
} as LocationCreateArgs);
});
}
platform.openDirectoryPickerDialog().then((result) => {
// TODO: Pass indexer rules ids to create location
if (result)
createLocation({
path: result as string,
indexer_rules_ids: []
} as LocationCreateArgs);
});
}}
>
Add Location

View File

@@ -1,7 +1,7 @@
import { useBridgeMutation, useBridgeQuery } from '@sd/client';
import { useBridgeMutation, useBridgeQuery, useCurrentLibrary } from '@sd/client';
import { LibraryConfigWrapped } from '@sd/client';
import { Button, ButtonLink, Card } from '@sd/ui';
import { DotsSixVertical, Pen, Trash } from 'phosphor-react';
import { Button, ButtonLink, Card, tw } from '@sd/ui';
import { Database, DotsSixVertical, Link, Pen, Pencil, Trash } from 'phosphor-react';
import { useState } from 'react';
import CreateLibraryDialog from '../../../components/dialog/CreateLibraryDialog';
@@ -9,7 +9,9 @@ import DeleteLibraryDialog from '../../../components/dialog/DeleteLibraryDialog'
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
function LibraryListItem(props: { library: LibraryConfigWrapped }) {
const Pill = tw.span`px-1.5 ml-2 py-[2px] rounded text-xs font-medium bg-accent`;
function LibraryListItem(props: { library: LibraryConfigWrapped; current: boolean }) {
const [openDeleteModal, setOpenDeleteModal] = useState(false);
const deleteLibrary = useBridgeMutation('library.delete', {
@@ -22,15 +24,21 @@ function LibraryListItem(props: { library: LibraryConfigWrapped }) {
<Card>
<DotsSixVertical weight="bold" className="mt-[15px] mr-3 opacity-30" />
<div className="flex-1 my-0.5">
<h3 className="font-semibold">{props.library.config.name}</h3>
<h3 className="font-semibold">
{props.library.config.name}
{props.current && <Pill>Current</Pill>}
</h3>
<p className="mt-0.5 text-xs text-ink-dull">{props.library.uuid}</p>
</div>
<div className="flex flex-row items-center space-x-2">
<ButtonLink size="icon" to="/settings/library" variant="gray">
<Pen className="w-4 h-4" />
<Button className="!p-1.5" onClick={() => {}} variant="gray">
<Database className="w-4 h-4" />
</Button>
<ButtonLink className="!p-1.5" to="/settings/library" variant="gray">
<Pencil className="w-4 h-4" />
</ButtonLink>
<DeleteLibraryDialog libraryUuid={props.library.uuid}>
<Button size="icon" variant="gray">
<Button className="!p-1.5" variant="gray">
<Trash className="w-4 h-4" />
</Button>
</DeleteLibraryDialog>
@@ -43,6 +51,8 @@ export default function LibrarySettings() {
const { data: libraries } = useBridgeQuery(['library.list']);
const [open, setOpen] = useState(false);
const { library: currentLibrary } = useCurrentLibrary();
return (
<SettingsContainer>
<SettingsHeader
@@ -60,9 +70,19 @@ export default function LibrarySettings() {
/>
<div className="space-y-2">
{libraries?.map((library) => (
<LibraryListItem key={library.uuid} library={library} />
))}
{libraries
?.sort((a, b) => {
if (a.uuid === currentLibrary?.uuid) return -1;
if (b.uuid === currentLibrary?.uuid) return 1;
return 0;
})
.map((library) => (
<LibraryListItem
current={library.uuid === currentLibrary?.uuid}
key={library.uuid}
library={library}
/>
))}
</div>
</SettingsContainer>
);

View File

@@ -38,13 +38,19 @@ body {
.explorer-scroll {
&::-webkit-scrollbar {
height: 6px;
width: 8px;
width: 6px;
}
&::-webkit-scrollbar-track {
@apply bg-[#00000000] mt-[53px] rounded-[6px];
}
&::-webkit-scrollbar-thumb {
@apply rounded-[6px] bg-app-box;
@apply rounded-[6px] bg-transparent;
}
&:hover {
&::-webkit-scrollbar-thumb {
@apply bg-app-divider/20;
}
}
}
.default-scroll {

View File

@@ -0,0 +1,26 @@
export const ObjectKind = [
'Unknown',
'Document',
'Folder',
'Text',
'Package',
'Image',
'Audio',
'Video',
'Archive',
'Executable',
'Alias',
'Encrypted',
'Key',
'Link',
'WebPageArchive',
'Widget',
'Album',
'Collection',
'Font',
'Mesh',
'Code',
'Database'
];
// export type ObjectKinds = keyof ObjectKind;

Some files were not shown because too many files have changed in this diff Show More