diff --git a/apps/cli/src/domains/tag/args.rs b/apps/cli/src/domains/tag/args.rs index c6a94fef3..d236dc3d1 100644 --- a/apps/cli/src/domains/tag/args.rs +++ b/apps/cli/src/domains/tag/args.rs @@ -2,7 +2,9 @@ use clap::Args; use uuid::Uuid; use sd_core::ops::tags::{ - apply::input::ApplyTagsInput, create::input::CreateTagInput, search::input::SearchTagsInput, + apply::input::{ApplyTagsInput, TagTargets}, + create::input::CreateTagInput, + search::input::SearchTagsInput, }; #[derive(Args, Debug)] @@ -34,7 +36,7 @@ pub struct TagApplyArgs { impl From for ApplyTagsInput { fn from(args: TagApplyArgs) -> Self { - ApplyTagsInput::user_tags(args.entries, args.tags) + ApplyTagsInput::user_tags_entry(args.entries, args.tags) } } diff --git a/apps/tauri/src/App.tsx b/apps/tauri/src/App.tsx index b701b0118..c9ad11d5f 100644 --- a/apps/tauri/src/App.tsx +++ b/apps/tauri/src/App.tsx @@ -27,13 +27,13 @@ function App() { useEffect(() => { // React Scan disabled - too heavy for development // Uncomment if you need to debug render performance: - if (import.meta.env.DEV) { - setTimeout(() => { - import("react-scan").then(({ scan }) => { - scan({ enabled: true, log: false }); - }); - }, 2000); - } + // if (import.meta.env.DEV) { + // setTimeout(() => { + // import("react-scan").then(({ scan }) => { + // scan({ enabled: false, log: false }); + // }); + // }, 2000); + // } // Initialize Tauri native context menu handler initializeContextMenuHandler(); diff --git a/core/Cargo.toml b/core/Cargo.toml index aa473c4f0..cbd7eab50 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -106,6 +106,9 @@ rmp = "0.8" # MessagePack core types rmp-serde = "1.3" # MessagePack serialization for job state sd-task-system = { path = "../crates/task-system" } +# Vector database for memory files (optional for now) +# lancedb = "0.15" # Embedded vector database (conflicts with gpui) + # Media processing dependencies blurhash = "0.2" image = "0.25" diff --git a/core/examples/create_memory.rs b/core/examples/create_memory.rs new file mode 100644 index 000000000..b29592ca1 --- /dev/null +++ b/core/examples/create_memory.rs @@ -0,0 +1,181 @@ +//! Create a test memory file for development +//! +//! This example creates a real .memory file demonstrating the format. +//! Run with: cargo run --example create_memory + +use sd_core::domain::memory::{DocumentType, FactType, MemoryFile, MemoryScope}; +use std::path::PathBuf; + +#[tokio::main] +async fn main() -> Result<(), Box> { + // Initialize logging + tracing_subscriber::fmt() + .with_env_filter("info") + .init(); + + println!("\n🧠 Creating Spacedrive Memory File\n"); + + // Output path + let output_path = PathBuf::from( + "/Users/jamespine/Projects/spacedrive/workbench/test-memories/memory-file-system.memory", + ); + + // Ensure directory exists + if let Some(parent) = output_path.parent() { + std::fs::create_dir_all(parent)?; + } + + // Create memory file + let mut memory = MemoryFile::create( + "memory-file-system".to_string(), + MemoryScope::Directory { + path: "/Users/jamespine/Projects/spacedrive/core/src/domain/memory".to_string(), + }, + &output_path, + ) + .await?; + + println!("āœ… Created memory archive\n"); + + // Add design documents + println!("šŸ“„ Adding documents..."); + + let design_doc = memory + .add_document( + None, + "MEMORY_FILE_FORMAT_DESIGN.md".to_string(), + Some( + "Complete specification for .memory file format with custom archive".to_string(), + ), + DocumentType::Design, + ) + .await?; + + let impl_doc = memory + .add_document( + None, + "MEMORY_FILE_IMPLEMENTATION_STATUS.md".to_string(), + Some("Implementation status with custom archive format".to_string()), + DocumentType::Documentation, + ) + .await?; + + let agent_doc = memory + .add_document( + None, + "AGENT_MEMORY_ARCHITECTURE_V1.md".to_string(), + Some("Three-type agent memory architecture".to_string()), + DocumentType::Design, + ) + .await?; + + println!(" āœ… {} documents added\n", memory.get_documents().len()); + + // Add learned facts + println!("🧩 Adding facts..."); + + memory + .add_fact( + "Memory files use custom archive format with magic bytes SDMEMORY".to_string(), + FactType::Principle, + 1.0, + Some(design_doc), + ) + .await?; + + memory + .add_fact( + "Archive is append-only with index at end for efficient updates".to_string(), + FactType::Pattern, + 1.0, + Some(impl_doc), + ) + .await?; + + memory + .add_fact( + "Vector store embedded using MessagePack serialization".to_string(), + FactType::Decision, + 0.9, + Some(impl_doc), + ) + .await?; + + memory + .add_fact( + "Agent memory types: temporal (events), associative (knowledge), working (current)".to_string(), + FactType::Pattern, + 1.0, + Some(agent_doc), + ) + .await?; + + memory + .add_fact( + "Memory files solve context-gathering problem for AI agents".to_string(), + FactType::Principle, + 1.0, + Some(design_doc), + ) + .await?; + + println!(" āœ… {} facts added\n", memory.get_facts().len()); + + // Add embeddings + println!("šŸ”¢ Adding embeddings..."); + + // 4D mock vectors (real would be 384D from AI model) + let design_vector = vec![0.9, 0.2, 0.7, 0.1]; + let impl_vector = vec![0.1, 0.9, 0.3, 0.5]; + let agent_vector = vec![0.3, 0.2, 0.95, 0.1]; + + memory.add_embedding(design_doc, design_vector).await?; + memory.add_embedding(impl_doc, impl_vector).await?; + memory.add_embedding(agent_doc, agent_vector).await?; + + println!( + " āœ… {} embeddings added\n", + memory.embedding_count().await? + ); + + // Test search + println!("šŸ” Testing semantic search..."); + let query = vec![0.7, 0.15, 0.85, 0.05]; // Query: design + architecture + let results = memory.search_similar(query, 3).await?; + + println!(" Results:"); + for (i, doc_id) in results.iter().enumerate() { + if let Some(doc) = memory.get_document(*doc_id) { + println!(" {}. {}", i + 1, doc.title); + } + } + println!(); + + // Show final statistics + let metadata = memory.metadata(); + let stats = &metadata.statistics; + + println!("šŸ“Š Memory Statistics:"); + println!(" Name: {}", metadata.name); + println!(" Scope: {}", metadata.scope.identifier()); + println!(" Documents: {}", stats.document_count); + println!(" Facts: {}", stats.fact_count); + println!(" Embeddings: {}", stats.embedding_count); + println!(" Total size: {} bytes", stats.file_size_bytes); + println!(); + + println!("āœ… Memory file created successfully!"); + println!("šŸ“ Location: {}", output_path.display()); + println!(); + println!("Verify:"); + println!(" file {}", output_path.display()); + println!(" hexdump -C {} | head -20", output_path.display()); + println!(); + + // Verify single file + assert!(output_path.is_file()); + assert!(!output_path.is_dir()); + println!("āœ… Confirmed: Single file archive\n"); + + Ok(()) +} diff --git a/core/src/domain/content_identity.rs b/core/src/domain/content_identity.rs index 01818e4ff..9ee7af9d1 100644 --- a/core/src/domain/content_identity.rs +++ b/core/src/domain/content_identity.rs @@ -59,6 +59,7 @@ pub enum ContentKind { Shortcut = 23, Package = 24, ModelEntry = 25, + Memory = 26, } // Translate database entity into domain model @@ -113,6 +114,8 @@ impl ContentKind { 22 => Self::Web, 23 => Self::Shortcut, 24 => Self::Package, + 25 => Self::ModelEntry, + 26 => Self::Memory, _ => Self::Unknown, } } @@ -305,6 +308,7 @@ impl std::fmt::Display for ContentKind { ContentKind::Shortcut => "shortcut", ContentKind::Package => "package", ContentKind::ModelEntry => "model_entry", + ContentKind::Memory => "memory", }; write!(f, "{}", s) } diff --git a/core/src/domain/file.rs b/core/src/domain/file.rs index 28dd9ee0e..a29ac64ad 100644 --- a/core/src/domain/file.rs +++ b/core/src/domain/file.rs @@ -116,6 +116,9 @@ impl crate::domain::resource::Identifiable for File { "image_media_data", "video_media_data", "audio_media_data", + "user_metadata", + "user_metadata_tag", + "tag", ] } diff --git a/core/src/domain/memory/archive.rs b/core/src/domain/memory/archive.rs new file mode 100644 index 000000000..2f69a4de9 --- /dev/null +++ b/core/src/domain/memory/archive.rs @@ -0,0 +1,384 @@ +use std::{ + collections::HashMap, + io::{Read, Seek, SeekFrom, Write}, + path::Path, +}; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +const MAGIC: &[u8; 8] = b"SDMEMORY"; +const VERSION: u32 = 1; +const HEADER_SIZE: u64 = 64; + +#[derive(Error, Debug)] +pub enum ArchiveError { + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + #[error("Invalid magic bytes")] + InvalidMagic, + + #[error("Unsupported version: {0}")] + UnsupportedVersion(u32), + + #[error("File not found in archive: {0}")] + FileNotFound(String), + + #[error("Serialization error: {0}")] + Serialization(#[from] rmp_serde::encode::Error), + + #[error("Deserialization error: {0}")] + Deserialization(#[from] rmp_serde::decode::Error), + + #[error("Corrupt index")] + CorruptIndex, +} + +pub type Result = std::result::Result; + +/// Entry in the file index +#[derive(Debug, Clone, Serialize, Deserialize)] +struct FileEntry { + /// Offset in file where data starts + offset: u64, + /// Size of data in bytes + size: u64, + /// Whether data is compressed + compressed: bool, + /// Deleted flag (for soft deletes) + deleted: bool, +} + +/// File index (stored at end of archive) +#[derive(Debug, Clone, Serialize, Deserialize)] +struct FileIndex { + /// Filename -> FileEntry + files: HashMap, +} + +/// Custom archive format for memory files +/// +/// Format: +/// - Fixed 64-byte header with magic, version, index offset +/// - Append-only data section with length-prefixed files +/// - MessagePack-encoded index at end +/// +/// Updates: +/// - Append new files to end +/// - Update index with new offsets +/// - Rewrite header with updated index offset +pub struct MemoryArchive { + file: std::fs::File, + index: FileIndex, + index_offset: u64, +} + +impl MemoryArchive { + /// Create new archive + pub fn create(path: &Path) -> Result { + let mut file = std::fs::OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(true) + .open(path)?; + + // Write header + file.write_all(MAGIC)?; + file.write_all(&VERSION.to_le_bytes())?; + file.write_all(&0u32.to_le_bytes())?; // Flags (reserved) + file.write_all(&HEADER_SIZE.to_le_bytes())?; // Index offset (will update) + file.write_all(&vec![0u8; 40])?; // Reserved space + + // Write empty index at position 64 + let index = FileIndex { + files: HashMap::new(), + }; + + let index_bytes = rmp_serde::to_vec(&index)?; + file.write_all(&index_bytes)?; + + let index_offset = HEADER_SIZE; + + Ok(Self { + file, + index, + index_offset, + }) + } + + /// Open existing archive + pub fn open(path: &Path) -> Result { + let mut file = std::fs::OpenOptions::new() + .read(true) + .write(true) + .open(path)?; + + // Read and validate header + let mut magic = [0u8; 8]; + file.read_exact(&mut magic)?; + if &magic != MAGIC { + return Err(ArchiveError::InvalidMagic); + } + + let mut version_bytes = [0u8; 4]; + file.read_exact(&mut version_bytes)?; + let version = u32::from_le_bytes(version_bytes); + if version != VERSION { + return Err(ArchiveError::UnsupportedVersion(version)); + } + + // Skip flags + file.seek(SeekFrom::Current(4))?; + + // Read index offset + let mut offset_bytes = [0u8; 8]; + file.read_exact(&mut offset_bytes)?; + let index_offset = u64::from_le_bytes(offset_bytes); + + // Seek to index and read it + file.seek(SeekFrom::Start(index_offset))?; + let mut index_bytes = Vec::new(); + file.read_to_end(&mut index_bytes)?; + + let index: FileIndex = rmp_serde::from_slice(&index_bytes) + .map_err(|_| ArchiveError::CorruptIndex)?; + + Ok(Self { + file, + index, + index_offset, + }) + } + + /// Add a file to the archive + pub fn add_file(&mut self, name: &str, data: &[u8]) -> Result<()> { + // Seek to current index position (append before index) + self.file.seek(SeekFrom::Start(self.index_offset))?; + + let offset = self.index_offset; + let size = data.len() as u64; + + // Write: [length: u64][data: bytes] + self.file.write_all(&size.to_le_bytes())?; + self.file.write_all(data)?; + + // Update index + self.index.files.insert( + name.to_string(), + FileEntry { + offset: offset + 8, // After length prefix + size, + compressed: false, + deleted: false, + }, + ); + + // New index position + self.index_offset = offset + 8 + size; + + // Write updated index + self.write_index()?; + + Ok(()) + } + + /// Read a file from the archive + pub fn read_file(&mut self, name: &str) -> Result> { + let entry = self + .index + .files + .get(name) + .ok_or_else(|| ArchiveError::FileNotFound(name.to_string()))?; + + if entry.deleted { + return Err(ArchiveError::FileNotFound(name.to_string())); + } + + // Seek to file offset + self.file.seek(SeekFrom::Start(entry.offset))?; + + // Read data + let mut data = vec![0u8; entry.size as usize]; + self.file.read_exact(&mut data)?; + + Ok(data) + } + + /// Update a file (appends new version) + pub fn update_file(&mut self, name: &str, data: &[u8]) -> Result<()> { + // Just append as new file (index will point to latest) + self.add_file(name, data) + } + + /// Delete a file (soft delete in index) + pub fn delete_file(&mut self, name: &str) -> Result<()> { + if let Some(entry) = self.index.files.get_mut(name) { + entry.deleted = true; + self.write_index()?; + } + Ok(()) + } + + /// List all files + pub fn list_files(&self) -> Vec { + self.index + .files + .iter() + .filter(|(_, entry)| !entry.deleted) + .map(|(name, _)| name.clone()) + .collect() + } + + /// Check if file exists + pub fn contains(&self, name: &str) -> bool { + self.index + .files + .get(name) + .map(|e| !e.deleted) + .unwrap_or(false) + } + + /// Write index to end of file and update header + fn write_index(&mut self) -> Result<()> { + // Serialize index + let index_bytes = rmp_serde::to_vec(&self.index)?; + + // Write at current index offset + self.file.seek(SeekFrom::Start(self.index_offset))?; + self.file.write_all(&index_bytes)?; + + // Truncate file (remove old index if it was longer) + let new_end = self.index_offset + index_bytes.len() as u64; + self.file.set_len(new_end)?; + + // Update header with new index offset + self.file.seek(SeekFrom::Start(16))?; // Skip magic + version + flags + self.file.write_all(&self.index_offset.to_le_bytes())?; + + self.file.flush()?; + + Ok(()) + } + + /// Get total archive size + pub fn size(&mut self) -> Result { + Ok(self.file.metadata()?.len()) + } + + /// Compact archive (remove deleted files) + pub fn compact(&mut self) -> Result<()> { + // TODO: Implement garbage collection + // Would require rewriting entire file with only non-deleted entries + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::NamedTempFile; + + #[test] + fn test_create_archive() { + let temp_file = NamedTempFile::new().unwrap(); + let _archive = MemoryArchive::create(temp_file.path()).unwrap(); + + // Verify magic bytes + let mut file = std::fs::File::open(temp_file.path()).unwrap(); + let mut magic = [0u8; 8]; + file.read_exact(&mut magic).unwrap(); + assert_eq!(&magic, MAGIC); + } + + #[test] + fn test_add_and_read_file() { + let temp_file = NamedTempFile::new().unwrap(); + let mut archive = MemoryArchive::create(temp_file.path()).unwrap(); + + let test_data = b"Hello, Memory!"; + archive.add_file("test.txt", test_data).unwrap(); + + let read_data = archive.read_file("test.txt").unwrap(); + assert_eq!(read_data, test_data); + } + + #[test] + fn test_update_file() { + let temp_file = NamedTempFile::new().unwrap(); + let mut archive = MemoryArchive::create(temp_file.path()).unwrap(); + + archive.add_file("test.txt", b"Version 1").unwrap(); + archive.update_file("test.txt", b"Version 2").unwrap(); + + let data = archive.read_file("test.txt").unwrap(); + assert_eq!(data, b"Version 2"); + } + + #[test] + fn test_list_files() { + let temp_file = NamedTempFile::new().unwrap(); + let mut archive = MemoryArchive::create(temp_file.path()).unwrap(); + + archive.add_file("file1.txt", b"data1").unwrap(); + archive.add_file("file2.txt", b"data2").unwrap(); + archive.add_file("file3.txt", b"data3").unwrap(); + + let files = archive.list_files(); + assert_eq!(files.len(), 3); + assert!(files.contains(&"file1.txt".to_string())); + } + + #[test] + fn test_delete_file() { + let temp_file = NamedTempFile::new().unwrap(); + let mut archive = MemoryArchive::create(temp_file.path()).unwrap(); + + archive.add_file("test.txt", b"data").unwrap(); + assert!(archive.contains("test.txt")); + + archive.delete_file("test.txt").unwrap(); + assert!(!archive.contains("test.txt")); + + let result = archive.read_file("test.txt"); + assert!(result.is_err()); + } + + #[test] + fn test_reopen_archive() { + let temp_file = NamedTempFile::new().unwrap(); + let path = temp_file.path().to_path_buf(); + + { + let mut archive = MemoryArchive::create(&path).unwrap(); + archive.add_file("persisted.txt", b"test data").unwrap(); + } + + // Reopen + let mut archive = MemoryArchive::open(&path).unwrap(); + let data = archive.read_file("persisted.txt").unwrap(); + assert_eq!(data, b"test data"); + } + + #[test] + fn test_multiple_updates() { + let temp_file = NamedTempFile::new().unwrap(); + let mut archive = MemoryArchive::create(temp_file.path()).unwrap(); + + // Add initial + archive.add_file("metadata.msgpack", b"v1").unwrap(); + + // Update multiple times + archive.update_file("metadata.msgpack", b"v2").unwrap(); + archive.update_file("metadata.msgpack", b"v3").unwrap(); + archive.update_file("metadata.msgpack", b"v4").unwrap(); + + // Should read latest + let data = archive.read_file("metadata.msgpack").unwrap(); + assert_eq!(data, b"v4"); + + // File should still be single file + assert_eq!(archive.list_files().len(), 1); + } +} diff --git a/core/src/domain/memory/metadata.rs b/core/src/domain/memory/metadata.rs new file mode 100644 index 000000000..e451d37d3 --- /dev/null +++ b/core/src/domain/memory/metadata.rs @@ -0,0 +1,69 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; + +use super::{MemoryScope, MemoryStatistics}; + +/// Metadata for a memory file +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MemoryMetadata { + /// Memory name + pub name: String, + + /// Optional description + pub description: Option, + + /// What this memory is scoped to + pub scope: MemoryScope, + + /// When memory was created + pub created_at: DateTime, + + /// Last modification time + pub updated_at: DateTime, + + /// Last time memory was loaded/used + pub last_used_at: Option>, + + /// Format version + pub version: u32, + + /// Embedding model used + pub embedding_model: String, + + /// Approximate total tokens + pub total_tokens: usize, + + /// Tags for categorization + pub tags: Vec, + + /// Statistics (cached) + pub statistics: MemoryStatistics, +} + +impl MemoryMetadata { + pub fn new(name: String, scope: MemoryScope) -> Self { + Self { + name, + description: None, + scope, + created_at: Utc::now(), + updated_at: Utc::now(), + last_used_at: None, + version: 1, + embedding_model: "all-MiniLM-L6-v2".to_string(), + total_tokens: 0, + tags: Vec::new(), + statistics: MemoryStatistics::default(), + } + } + + /// Mark memory as used (updates last_used_at) + pub fn touch(&mut self) { + self.last_used_at = Some(Utc::now()); + } + + /// Update modification time + pub fn mark_updated(&mut self) { + self.updated_at = Utc::now(); + } +} diff --git a/core/src/domain/memory/mod.rs b/core/src/domain/memory/mod.rs new file mode 100644 index 000000000..960b3a999 --- /dev/null +++ b/core/src/domain/memory/mod.rs @@ -0,0 +1,27 @@ +//! Memory file format - Modular RAG for AI agents +//! +//! Memory files (.memory) are portable knowledge packages that contain: +//! - Vector embeddings (Chroma vector store) +//! - Document references (files relevant to a task) +//! - Learned facts (extracted knowledge) +//! - Optional conversation history +//! +//! Format: Directory with MessagePack files + embedded Chroma +//! Storage: {name}.memory/ directory containing all components + +pub mod archive; +pub mod metadata; +pub mod scope; +pub mod storage; +pub mod types; +pub mod vector_store; + +pub use archive::MemoryArchive; +pub use metadata::MemoryMetadata; +pub use scope::MemoryScope; +pub use storage::MemoryFile; +pub use types::{ + AuditEntry, ConversationMessage, Document, DocumentType, Fact, FactType, MemoryStatistics, + MessageRole, +}; +pub use vector_store::{VectorDocument, VectorStore}; diff --git a/core/src/domain/memory/scope.rs b/core/src/domain/memory/scope.rs new file mode 100644 index 000000000..896765781 --- /dev/null +++ b/core/src/domain/memory/scope.rs @@ -0,0 +1,39 @@ +use serde::{Deserialize, Serialize}; + +/// Defines what a memory file is scoped to +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum MemoryScope { + /// Attached to a specific directory + Directory { path: String }, + + /// Scoped to an entire project/repository + Project { root_path: String }, + + /// Topic-based (not tied to location) + Topic { topic: String }, + + /// Standalone portable memory + Standalone, +} + +impl MemoryScope { + pub fn as_str(&self) -> &'static str { + match self { + Self::Directory { .. } => "directory", + Self::Project { .. } => "project", + Self::Topic { .. } => "topic", + Self::Standalone => "standalone", + } + } + + /// Get the scope identifier for display + pub fn identifier(&self) -> String { + match self { + Self::Directory { path } => path.clone(), + Self::Project { root_path } => root_path.clone(), + Self::Topic { topic } => topic.clone(), + Self::Standalone => "standalone".to_string(), + } + } +} diff --git a/core/src/domain/memory/storage.rs b/core/src/domain/memory/storage.rs new file mode 100644 index 000000000..d72b75465 --- /dev/null +++ b/core/src/domain/memory/storage.rs @@ -0,0 +1,439 @@ +use std::path::{Path, PathBuf}; + +use chrono::Utc; +use tracing::{debug, info}; +use uuid::Uuid; + +use super::{ + archive::MemoryArchive, + metadata::MemoryMetadata, + scope::MemoryScope, + types::{Document, DocumentType, Fact, FactType, MemoryStatistics}, + vector_store::VectorStore, +}; + +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum MemoryFileError { + #[error("Archive error: {0}")] + Archive(#[from] super::archive::ArchiveError), + + #[error("Vector store error: {0}")] + VectorStore(#[from] super::vector_store::VectorStoreError), + + #[error("Serialization error: {0}")] + Serialization(#[from] rmp_serde::encode::Error), + + #[error("Deserialization error: {0}")] + Deserialization(#[from] rmp_serde::decode::Error), + + #[error("Document not found: {0}")] + DocumentNotFound(i32), + + #[error("Fact not found: {0}")] + FactNotFound(i32), +} + +pub type Result = std::result::Result; + +/// Memory file using custom archive format +/// Single .memory file containing all data +pub struct MemoryFile { + path: PathBuf, + archive: MemoryArchive, + metadata: MemoryMetadata, + documents: Vec, + facts: Vec, + vector_store: VectorStore, + next_doc_id: i32, + next_fact_id: i32, +} + +impl MemoryFile { + /// Create new memory file (single file archive) + pub async fn create(name: String, scope: MemoryScope, output_path: &Path) -> Result { + info!("Creating memory archive at: {}", output_path.display()); + + // Create archive + let mut archive = MemoryArchive::create(output_path)?; + + // Initialize metadata + let metadata = MemoryMetadata::new(name, scope); + + // Write initial files + let metadata_bytes = rmp_serde::to_vec(&metadata)?; + archive.add_file("metadata.msgpack", &metadata_bytes)?; + + let documents: Vec = Vec::new(); + let documents_bytes = rmp_serde::to_vec(&documents)?; + archive.add_file("documents.msgpack", &documents_bytes)?; + + let facts: Vec = Vec::new(); + let facts_bytes = rmp_serde::to_vec(&facts)?; + archive.add_file("facts.msgpack", &facts_bytes)?; + + // Create in-memory vector store + let vector_store = VectorStore::create_in_memory()?; + + info!("Memory archive created successfully"); + + Ok(Self { + path: output_path.to_path_buf(), + archive, + metadata, + documents, + facts, + vector_store, + next_doc_id: 1, + next_fact_id: 1, + }) + } + + /// Open existing memory file + pub async fn open(path: PathBuf) -> Result { + info!("Opening memory archive at: {}", path.display()); + + let mut archive = MemoryArchive::open(&path)?; + + // Load metadata + let metadata_bytes = archive.read_file("metadata.msgpack")?; + let metadata: MemoryMetadata = rmp_serde::from_slice(&metadata_bytes)?; + + // Load documents + let documents: Vec = if archive.contains("documents.msgpack") { + let bytes = archive.read_file("documents.msgpack")?; + rmp_serde::from_slice(&bytes)? + } else { + Vec::new() + }; + + // Load facts + let facts: Vec = if archive.contains("facts.msgpack") { + let bytes = archive.read_file("facts.msgpack")?; + rmp_serde::from_slice(&bytes)? + } else { + Vec::new() + }; + + // Load vector store + let vector_store = if archive.contains("embeddings.msgpack") { + let bytes = archive.read_file("embeddings.msgpack")?; + VectorStore::from_bytes(&bytes)? + } else { + VectorStore::create_in_memory()? + }; + + // Compute next IDs + let next_doc_id = documents + .iter() + .map(|d: &Document| d.id) + .max() + .unwrap_or(0) + + 1; + let next_fact_id = facts.iter().map(|f: &Fact| f.id).max().unwrap_or(0) + 1; + + debug!("Loaded memory: {} docs, {} facts", documents.len(), facts.len()); + + Ok(Self { + path, + archive, + metadata, + documents, + facts, + vector_store, + next_doc_id, + next_fact_id, + }) + } + + /// Add document + pub async fn add_document( + &mut self, + content_uuid: Option, + title: String, + summary: Option, + doc_type: DocumentType, + ) -> Result { + let doc = Document { + id: self.next_doc_id, + content_uuid, + file_path: None, + title, + summary, + relevance_score: 1.0, + added_at: Utc::now(), + added_by: "user".to_string(), + doc_type, + metadata: None, + }; + + self.next_doc_id += 1; + self.documents.push(doc.clone()); + + self.persist_documents().await?; + self.update_statistics().await?; + + debug!("Added document: {} (id: {})", doc.title, doc.id); + + Ok(doc.id) + } + + /// Add fact + pub async fn add_fact( + &mut self, + text: String, + fact_type: FactType, + confidence: f32, + source_document_id: Option, + ) -> Result { + let fact = Fact { + id: self.next_fact_id, + text, + fact_type, + confidence, + source_document_id, + created_at: Utc::now(), + verified: false, + }; + + self.next_fact_id += 1; + self.facts.push(fact.clone()); + + self.persist_facts().await?; + self.update_statistics().await?; + + debug!("Added fact: {} (id: {})", fact.text, fact.id); + + Ok(fact.id) + } + + /// Add embedding + pub async fn add_embedding(&mut self, doc_id: i32, vector: Vec) -> Result<()> { + let (content_uuid, title, metadata_val) = { + let doc = self + .get_document(doc_id) + .ok_or(MemoryFileError::DocumentNotFound(doc_id))?; + (doc.content_uuid, doc.title.clone(), doc.metadata.clone()) + }; + + self.vector_store + .add_embedding(doc_id, content_uuid, title, vector, metadata_val) + .await?; + + self.persist_vector_store().await?; + self.update_statistics().await?; + + Ok(()) + } + + /// Search similar documents + pub async fn search_similar(&self, query_vector: Vec, limit: usize) -> Result> { + let results = self.vector_store.search(query_vector, limit).await?; + Ok(results.into_iter().map(|r| r.id).collect()) + } + + /// Get documents + pub fn get_documents(&self) -> &[Document] { + &self.documents + } + + /// Get document by ID + pub fn get_document(&self, id: i32) -> Option<&Document> { + self.documents.iter().find(|d| d.id == id) + } + + /// Get facts + pub fn get_facts(&self) -> &[Fact] { + &self.facts + } + + /// Get metadata + pub fn metadata(&self) -> &MemoryMetadata { + &self.metadata + } + + /// Get path + pub fn path(&self) -> &Path { + &self.path + } + + /// Get embedding count + pub async fn embedding_count(&self) -> Result { + self.vector_store.count().await.map_err(Into::into) + } + + /// Get facts sorted by confidence + pub fn get_facts_sorted(&self) -> Vec<&Fact> { + let mut sorted = self.facts.iter().collect::>(); + sorted.sort_by(|a, b| { + match (a.verified, b.verified) { + (true, false) => std::cmp::Ordering::Less, + (false, true) => std::cmp::Ordering::Greater, + _ => b + .confidence + .partial_cmp(&a.confidence) + .unwrap_or(std::cmp::Ordering::Equal), + } + }); + sorted + } + + /// Persist documents to archive + async fn persist_documents(&mut self) -> Result<()> { + let bytes = rmp_serde::to_vec(&self.documents)?; + self.archive.update_file("documents.msgpack", &bytes)?; + Ok(()) + } + + /// Persist facts to archive + async fn persist_facts(&mut self) -> Result<()> { + let bytes = rmp_serde::to_vec(&self.facts)?; + self.archive.update_file("facts.msgpack", &bytes)?; + Ok(()) + } + + /// Persist vector store to archive + async fn persist_vector_store(&mut self) -> Result<()> { + let bytes = self.vector_store.to_bytes()?; + self.archive.update_file("embeddings.msgpack", &bytes)?; + Ok(()) + } + + /// Persist metadata to archive + async fn persist_metadata(&mut self) -> Result<()> { + let bytes = rmp_serde::to_vec(&self.metadata)?; + self.archive.update_file("metadata.msgpack", &bytes)?; + Ok(()) + } + + /// Update statistics + async fn update_statistics(&mut self) -> Result<()> { + let embedding_count = self.vector_store.count().await?; + let file_size = self.archive.size()?; + + self.metadata.statistics = MemoryStatistics { + document_count: self.documents.len(), + fact_count: self.facts.len(), + conversation_message_count: 0, + embedding_count, + file_size_bytes: file_size, + }; + + self.persist_metadata().await?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::NamedTempFile; + + #[tokio::test] + async fn test_create_single_file_memory() { + let temp_file = NamedTempFile::new().unwrap(); + + let memory = MemoryFile::create( + "test".to_string(), + MemoryScope::Standalone, + temp_file.path(), + ) + .await + .unwrap(); + + // Should be a single file + assert!(memory.path().exists()); + assert!(memory.path().is_file()); + } + + #[tokio::test] + async fn test_add_and_retrieve() { + let temp_file = NamedTempFile::new().unwrap(); + + let mut memory = MemoryFile::create( + "test".to_string(), + MemoryScope::Standalone, + temp_file.path(), + ) + .await + .unwrap(); + + // Add document + let doc_id = memory + .add_document(None, "Test Doc".to_string(), None, DocumentType::Note) + .await + .unwrap(); + + assert_eq!(memory.get_documents().len(), 1); + assert_eq!(memory.get_document(doc_id).unwrap().title, "Test Doc"); + + // Add fact + memory + .add_fact("Test fact".to_string(), FactType::General, 1.0, Some(doc_id)) + .await + .unwrap(); + + assert_eq!(memory.get_facts().len(), 1); + } + + #[tokio::test] + async fn test_persistence() { + let temp_file = NamedTempFile::new().unwrap(); + let path = temp_file.path().to_path_buf(); + + { + let mut memory = MemoryFile::create( + "test".to_string(), + MemoryScope::Standalone, + &path, + ) + .await + .unwrap(); + + memory + .add_document(None, "Doc".to_string(), None, DocumentType::Note) + .await + .unwrap(); + + memory + .add_fact("Fact".to_string(), FactType::General, 1.0, None) + .await + .unwrap(); + } + + // Reopen + let memory = MemoryFile::open(path).await.unwrap(); + + assert_eq!(memory.get_documents().len(), 1); + assert_eq!(memory.get_facts().len(), 1); + } + + #[tokio::test] + async fn test_embeddings_in_archive() { + let temp_file = NamedTempFile::new().unwrap(); + + let mut memory = MemoryFile::create( + "test".to_string(), + MemoryScope::Standalone, + temp_file.path(), + ) + .await + .unwrap(); + + let doc_id = memory + .add_document(None, "Doc".to_string(), None, DocumentType::Code) + .await + .unwrap(); + + // Add embedding + let vector = vec![0.1, 0.2, 0.3, 0.4]; + memory.add_embedding(doc_id, vector.clone()).await.unwrap(); + + // Search + let results = memory.search_similar(vector, 10).await.unwrap(); + assert_eq!(results.len(), 1); + assert_eq!(results[0], doc_id); + } +} diff --git a/core/src/domain/memory/types.rs b/core/src/domain/memory/types.rs new file mode 100644 index 000000000..af3cef9bd --- /dev/null +++ b/core/src/domain/memory/types.rs @@ -0,0 +1,176 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +/// A document reference in a memory file +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Document { + /// Internal ID within memory + pub id: i32, + + /// Spacedrive content UUID (if file is in VDFS) + pub content_uuid: Option, + + /// Physical path (for non-VDFS files or reference) + pub file_path: Option, + + /// Document title + pub title: String, + + /// AI-generated or user-written summary + pub summary: Option, + + /// Relevance score (0.0-1.0) + pub relevance_score: f32, + + /// When document was added to memory + pub added_at: DateTime, + + /// Who added it + pub added_by: String, + + /// Document type classification + pub doc_type: DocumentType, + + /// Additional metadata + pub metadata: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum DocumentType { + Code, + Documentation, + Reference, + Note, + Design, + Test, + Config, + Other, +} + +impl std::fmt::Display for DocumentType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::Code => "Code", + Self::Documentation => "Documentation", + Self::Reference => "Reference", + Self::Note => "Note", + Self::Design => "Design", + Self::Test => "Test", + Self::Config => "Config", + Self::Other => "Other", + }; + write!(f, "{}", s) + } +} + +/// A learned fact in a memory file +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Fact { + /// Internal ID within memory + pub id: i32, + + /// The fact text + pub text: String, + + /// Type of fact + pub fact_type: FactType, + + /// Confidence score (0.0-1.0) + pub confidence: f32, + + /// Source document ID (if extracted from document) + pub source_document_id: Option, + + /// When fact was created + pub created_at: DateTime, + + /// Whether fact has been verified by user + pub verified: bool, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum FactType { + /// Core principle or pattern + Principle, + + /// Decision made during development + Decision, + + /// Observed pattern or behavior + Pattern, + + /// Known issue or limitation + Issue, + + /// Implementation detail + Detail, + + /// General knowledge + General, +} + +impl std::fmt::Display for FactType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let s = match self { + Self::Principle => "Principle", + Self::Decision => "Decision", + Self::Pattern => "Pattern", + Self::Issue => "Issue", + Self::Detail => "Detail", + Self::General => "General", + }; + write!(f, "{}", s) + } +} + +/// Statistics about a memory file +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct MemoryStatistics { + /// Number of documents + pub document_count: usize, + + /// Number of facts + pub fact_count: usize, + + /// Number of conversation messages (if history enabled) + pub conversation_message_count: usize, + + /// Number of embeddings in vector store + pub embedding_count: usize, + + /// Total size on disk (bytes) + pub file_size_bytes: u64, +} + +/// Conversation message (optional history) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ConversationMessage { + pub id: i32, + pub session_id: Uuid, + pub role: MessageRole, + pub content: String, + pub tokens: Option, + pub created_at: DateTime, + pub metadata: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum MessageRole { + User, + Assistant, + System, +} + +/// Audit log entry +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AuditEntry { + pub id: i32, + pub action: String, + pub actor: String, + pub details: Option, + pub timestamp: DateTime, +} diff --git a/core/src/domain/memory/vector_store.rs b/core/src/domain/memory/vector_store.rs new file mode 100644 index 000000000..eef8a065f --- /dev/null +++ b/core/src/domain/memory/vector_store.rs @@ -0,0 +1,291 @@ +use std::{collections::HashMap, path::Path}; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use tokio::fs; +use tracing::{debug, info}; +use uuid::Uuid; + +#[derive(Error, Debug)] +pub enum VectorStoreError { + #[error("Vector store error: {0}")] + Store(String), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + #[error("Serialization error: {0}")] + Serialization(#[from] rmp_serde::encode::Error), + + #[error("Deserialization error: {0}")] + Deserialization(#[from] rmp_serde::decode::Error), +} + +pub type Result = std::result::Result; + +/// Simple MessagePack-based vector store +/// TODO: Replace with LanceDB once dependency conflicts resolved +pub struct VectorStore { + storage_path: std::path::PathBuf, + embeddings: HashMap, +} + +/// Document with embedding for storage +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VectorDocument { + /// Document ID (maps to documents table) + pub id: i32, + + /// Content UUID (if from Spacedrive) + pub content_uuid: Option, + + /// Document title + pub title: String, + + /// Embedding vector + pub vector: Vec, + + /// Additional metadata + pub metadata: Option, +} + +impl VectorStore { + /// Create new vector store in memory directory (old directory format) + pub async fn create(memory_path: &Path) -> Result { + let storage_path = memory_path.join("embeddings.msgpack"); + info!("Creating vector store at: {}", storage_path.display()); + + let store = Self { + storage_path: storage_path.clone(), + embeddings: HashMap::new(), + }; + + // Write empty embeddings file + store.persist().await?; + + Ok(store) + } + + /// Create in-memory vector store (for archive format) + pub fn create_in_memory() -> Result { + Ok(Self { + storage_path: std::path::PathBuf::new(), + embeddings: HashMap::new(), + }) + } + + /// Load from bytes (for archive format) + pub fn from_bytes(bytes: &[u8]) -> Result { + let embeddings = rmp_serde::from_slice(bytes)?; + Ok(Self { + storage_path: std::path::PathBuf::new(), + embeddings, + }) + } + + /// Serialize to bytes (for archive format) + pub fn to_bytes(&self) -> Result> { + let bytes = rmp_serde::to_vec(&self.embeddings)?; + Ok(bytes) + } + + /// Open existing vector store + pub async fn open(memory_path: &Path) -> Result { + let storage_path = memory_path.join("embeddings.msgpack"); + debug!("Opening vector store at: {}", storage_path.display()); + + let embeddings = if storage_path.exists() { + let bytes = fs::read(&storage_path).await?; + rmp_serde::from_slice(&bytes)? + } else { + HashMap::new() + }; + + Ok(Self { + storage_path, + embeddings, + }) + } + + /// Persist to disk (only for directory-based format) + async fn persist(&self) -> Result<()> { + // Skip if in-memory mode (empty path) + if self.storage_path.as_os_str().is_empty() { + return Ok(()); + } + + let bytes = rmp_serde::to_vec(&self.embeddings)?; + fs::write(&self.storage_path, bytes).await?; + Ok(()) + } + + /// Add embedding for a document + pub async fn add_embedding( + &mut self, + doc_id: i32, + content_uuid: Option, + title: String, + vector: Vec, + metadata: Option, + ) -> Result<()> { + let doc = VectorDocument { + id: doc_id, + content_uuid: content_uuid.map(|u| u.to_string()), + title, + vector, + metadata, + }; + + self.embeddings.insert(doc_id, doc); + self.persist().await?; + + Ok(()) + } + + /// Search for similar documents (simple cosine similarity) + pub async fn search( + &self, + query_vector: Vec, + limit: usize, + ) -> Result> { + let mut results: Vec<(VectorDocument, f32)> = self + .embeddings + .values() + .map(|doc| { + let similarity = cosine_similarity(&query_vector, &doc.vector); + (doc.clone(), similarity) + }) + .collect(); + + // Sort by similarity (descending) + results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal)); + + Ok(results.into_iter().take(limit).map(|(doc, _)| doc).collect()) + } + + /// Get embedding count + pub async fn count(&self) -> Result { + Ok(self.embeddings.len()) + } + + /// Remove embedding by document ID + pub async fn remove_embedding(&mut self, doc_id: i32) -> Result<()> { + self.embeddings.remove(&doc_id); + self.persist().await?; + Ok(()) + } +} + +/// Calculate cosine similarity between two vectors +fn cosine_similarity(a: &[f32], b: &[f32]) -> f32 { + if a.len() != b.len() { + return 0.0; + } + + let dot: f32 = a.iter().zip(b.iter()).map(|(x, y)| x * y).sum(); + let mag_a: f32 = a.iter().map(|x| x * x).sum::().sqrt(); + let mag_b: f32 = b.iter().map(|x| x * x).sum::().sqrt(); + + if mag_a == 0.0 || mag_b == 0.0 { + return 0.0; + } + + dot / (mag_a * mag_b) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + #[tokio::test] + async fn test_create_vector_store() { + let temp_dir = tempdir().unwrap(); + let memory_path = temp_dir.path().join("test.memory"); + std::fs::create_dir_all(&memory_path).unwrap(); + + let _store = VectorStore::create(&memory_path).await.unwrap(); + + assert!(memory_path.join("embeddings.msgpack").exists()); + } + + #[tokio::test] + async fn test_add_and_search() { + let temp_dir = tempdir().unwrap(); + let memory_path = temp_dir.path().join("test.memory"); + std::fs::create_dir_all(&memory_path).unwrap(); + + let mut store = VectorStore::create(&memory_path).await.unwrap(); + + // Add test embeddings + let vector1 = vec![0.1, 0.2, 0.3, 0.4]; + let vector2 = vec![0.2, 0.3, 0.4, 0.5]; + + store + .add_embedding(1, None, "Doc 1".to_string(), vector1.clone(), None) + .await + .unwrap(); + + store + .add_embedding(2, None, "Doc 2".to_string(), vector2, None) + .await + .unwrap(); + + // Search with query similar to vector1 + let results = store.search(vector1, 10).await.unwrap(); + + assert_eq!(results.len(), 2); + assert_eq!(results[0].id, 1); // Most similar should be first + assert_eq!(results[0].title, "Doc 1"); + } + + #[tokio::test] + async fn test_count() { + let temp_dir = tempdir().unwrap(); + let memory_path = temp_dir.path().join("test.memory"); + std::fs::create_dir_all(&memory_path).unwrap(); + + let mut store = VectorStore::create(&memory_path).await.unwrap(); + + assert_eq!(store.count().await.unwrap(), 0); + + store + .add_embedding( + 1, + None, + "Doc 1".to_string(), + vec![0.1, 0.2, 0.3], + None, + ) + .await + .unwrap(); + + assert_eq!(store.count().await.unwrap(), 1); + } + + #[tokio::test] + async fn test_remove_embedding() { + let temp_dir = tempdir().unwrap(); + let memory_path = temp_dir.path().join("test.memory"); + std::fs::create_dir_all(&memory_path).unwrap(); + + let mut store = VectorStore::create(&memory_path).await.unwrap(); + + store + .add_embedding( + 1, + None, + "Doc 1".to_string(), + vec![0.1, 0.2, 0.3], + None, + ) + .await + .unwrap(); + + assert_eq!(store.count().await.unwrap(), 1); + + store.remove_embedding(1).await.unwrap(); + + assert_eq!(store.count().await.unwrap(), 0); + } +} diff --git a/core/src/domain/mod.rs b/core/src/domain/mod.rs index 59ba377bd..99282e368 100644 --- a/core/src/domain/mod.rs +++ b/core/src/domain/mod.rs @@ -11,6 +11,7 @@ pub mod device; pub mod file; pub mod location; pub mod media_data; +pub mod memory; pub mod resource; pub mod resource_manager; pub mod resource_registry; @@ -26,6 +27,7 @@ pub use device::{Device, OperatingSystem}; pub use file::{EntryKind, File, Sidecar}; pub use location::{IndexMode, Location, ScanState}; pub use media_data::{AudioMediaData, ImageMediaData, VideoMediaData}; +pub use memory::{MemoryFile, MemoryMetadata, MemoryScope}; pub use resource::Identifiable; pub use resource_manager::ResourceManager; pub use space::{ diff --git a/core/src/domain/tag.rs b/core/src/domain/tag.rs index 7367c58ae..af87097c9 100644 --- a/core/src/domain/tag.rs +++ b/core/src/domain/tag.rs @@ -10,6 +10,8 @@ use specta::Type; use std::collections::HashMap; use uuid::Uuid; +use super::resource::Identifiable; + /// A tag with advanced capabilities for contextual organization #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Type)] pub struct Tag { @@ -438,3 +440,18 @@ pub enum TagError { #[error("Database error: {0}")] DatabaseError(String), } + +// Implement Identifiable for normalized cache support +impl Identifiable for Tag { + fn id(&self) -> Uuid { + self.id + } + + fn resource_type() -> &'static str { + "tag" + } + + fn sync_dependencies() -> &'static [&'static str] { + &[] // Tags are a simple resource backed by the tags table + } +} diff --git a/core/src/filetype/definitions/documents.toml b/core/src/filetype/definitions/documents.toml index ec55362dc..c086f61a8 100644 --- a/core/src/filetype/definitions/documents.toml +++ b/core/src/filetype/definitions/documents.toml @@ -209,3 +209,21 @@ priority = 100 [file_types.metadata] text_based = true +# Spacedrive Memory Files +[[file_types]] +id = "application/x-spacedrive-memory" +name = "Spacedrive Memory" +extensions = ["memory"] +mime_types = ["application/x-spacedrive-memory"] +category = "document" +priority = 100 + +[[file_types.magic_bytes]] +pattern = "53 44 4D 45 4D 01" # "SDMEM\x01" +offset = 0 +priority = 100 + +[file_types.metadata] +spacedrive = true +ai_knowledge = true +memory_file = true diff --git a/core/src/filetype/definitions/misc.toml b/core/src/filetype/definitions/misc.toml index 86d48c9f9..e5f8e8dab 100644 --- a/core/src/filetype/definitions/misc.toml +++ b/core/src/filetype/definitions/misc.toml @@ -332,7 +332,6 @@ priority = 85 text_file = true typescript = true -# Encrypted/Spacedrive specific [[file_types]] id = "application/x-spacedrive-encrypted" name = "Spacedrive Encrypted" diff --git a/core/src/library/sync_helpers.rs b/core/src/library/sync_helpers.rs index a0253a028..8db8ca887 100644 --- a/core/src/library/sync_helpers.rs +++ b/core/src/library/sync_helpers.rs @@ -42,13 +42,35 @@ impl Library { .to_sync_json() .map_err(|e| anyhow::anyhow!("Failed to serialize model: {}", e))?; - if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await { - self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data) + let result = if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await { + self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data.clone()) .await } else { - self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data) + self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data.clone()) .await + }; + + // Emit resource event for frontend reactivity + if result.is_ok() { + use crate::infra::sync::ChangeType as CT; + match change_type { + CT::Delete => { + self.event_bus().emit(Event::ResourceDeleted { + resource_type: M::SYNC_MODEL.to_string(), + resource_id: model.sync_id(), + }); + } + CT::Insert | CT::Update => { + self.event_bus().emit(Event::ResourceChanged { + resource_type: M::SYNC_MODEL.to_string(), + resource: data, + metadata: None, + }); + } + } } + + result } /// Sync a model with FK conversion (for models with relationships) @@ -119,13 +141,35 @@ impl Library { } } - if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await { - self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data) + let result = if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await { + self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data.clone()) .await } else { - self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data) + self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data.clone()) .await + }; + + // Emit resource event for frontend reactivity + if result.is_ok() { + use crate::infra::sync::ChangeType as CT; + match change_type { + CT::Delete => { + self.event_bus().emit(Event::ResourceDeleted { + resource_type: M::SYNC_MODEL.to_string(), + resource_id: model.sync_id(), + }); + } + CT::Insert | CT::Update => { + self.event_bus().emit(Event::ResourceChanged { + resource_type: M::SYNC_MODEL.to_string(), + resource: data, + metadata: None, + }); + } + } } + + result } /// Batch sync multiple models (optimized for bulk operations) diff --git a/core/src/ops/files/query/directory_listing.rs b/core/src/ops/files/query/directory_listing.rs index f6c831390..0944ad872 100644 --- a/core/src/ops/files/query/directory_listing.rs +++ b/core/src/ops/files/query/directory_listing.rs @@ -279,6 +279,106 @@ impl DirectoryListingQuery { }); } + // Collect entry UUIDs for tag lookup + let entry_uuids: Vec = rows + .iter() + .filter_map(|row| { + row.try_get::>("", "entry_uuid") + .ok() + .flatten() + }) + .collect(); + + // Batch fetch tags for these entries (both entry-scoped and content-scoped) + let mut tags_by_entry: HashMap> = HashMap::new(); + + if !entry_uuids.is_empty() || !content_uuids.is_empty() { + use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; + + tracing::debug!("Loading tags for {} entries and {} content identities", entry_uuids.len(), content_uuids.len()); + + // Load user_metadata for entries and content + let mut metadata_records = user_metadata::Entity::find() + .filter( + user_metadata::Column::EntryUuid.is_in(entry_uuids.clone()) + .or(user_metadata::Column::ContentIdentityUuid.is_in(content_uuids.clone())) + ) + .all(db) + .await?; + + tracing::debug!("Found {} metadata records", metadata_records.len()); + + if !metadata_records.is_empty() { + let metadata_ids: Vec = metadata_records.iter().map(|m| m.id).collect(); + + // Load user_metadata_tag records + let metadata_tags = user_metadata_tag::Entity::find() + .filter(user_metadata_tag::Column::UserMetadataId.is_in(metadata_ids)) + .all(db) + .await?; + + // Get all unique tag IDs + let tag_ids: Vec = metadata_tags.iter().map(|mt| mt.tag_id).collect(); + let unique_tag_ids: std::collections::HashSet = tag_ids.iter().cloned().collect(); + + tracing::debug!("Found {} user_metadata_tag records with {} unique tags", metadata_tags.len(), unique_tag_ids.len()); + + // Load tag entities + let tag_models = tag::Entity::find() + .filter(tag::Column::Id.is_in(tag_ids)) + .all(db) + .await?; + + tracing::debug!("Loaded {} tag models", tag_models.len()); + + // Build tag_db_id -> Tag mapping + let tag_map: HashMap = tag_models + .into_iter() + .filter_map(|t| { + let db_id = t.id; + crate::ops::tags::manager::model_to_domain(t).ok().map(|tag| (db_id, tag)) + }) + .collect(); + + tracing::debug!("Built tag map with {} entries", tag_map.len()); + + // Build metadata_id -> Vec mapping + let mut tags_by_metadata: HashMap> = HashMap::new(); + for mt in metadata_tags { + if let Some(tag) = tag_map.get(&mt.tag_id) { + tags_by_metadata + .entry(mt.user_metadata_id) + .or_insert_with(Vec::new) + .push(tag.clone()); + } + } + + // Map tags to entries (prioritize entry-scoped, fall back to content-scoped) + for metadata in metadata_records { + if let Some(tags) = tags_by_metadata.get(&metadata.id) { + // Entry-scoped metadata (higher priority) + if let Some(entry_uuid) = metadata.entry_uuid { + tags_by_entry.insert(entry_uuid, tags.clone()); + } + // Content-scoped metadata (applies to all entries with this content) + else if let Some(content_uuid) = metadata.content_identity_uuid { + // Apply to all entries with this content_uuid + for row in &rows { + if let Some(ci_uuid) = row.try_get::>("", "content_identity_uuid").ok().flatten() { + if ci_uuid == content_uuid { + if let Some(entry_uuid) = row.try_get::>("", "entry_uuid").ok().flatten() { + // Only set if not already set by entry-scoped metadata + tags_by_entry.entry(entry_uuid).or_insert_with(|| tags.clone()); + } + } + } + } + } + } + } + } + } + // Convert to File objects let mut files = Vec::new(); for row in rows { @@ -447,6 +547,14 @@ impl DirectoryListingQuery { }); } + // Add tags from batch lookup + if let Some(entry_uuid_val) = entry_uuid { + if let Some(tags) = tags_by_entry.get(&entry_uuid_val) { + tracing::debug!("Adding {} tags to entry {}", tags.len(), entry_uuid_val); + file.tags = tags.clone(); + } + } + files.push(file); } diff --git a/core/src/ops/files/query/file_by_id.rs b/core/src/ops/files/query/file_by_id.rs index 30b49a819..fa6f4bb5a 100644 --- a/core/src/ops/files/query/file_by_id.rs +++ b/core/src/ops/files/query/file_by_id.rs @@ -6,7 +6,7 @@ use crate::{ domain::{addressing::SdPath, File}, infra::db::entities::{ audio_media_data, content_identity, device, directory_paths, entry, image_media_data, - location, sidecar, tag, user_metadata_tag, video_media_data, + location, sidecar, tag, user_metadata, user_metadata_tag, video_media_data, }, infra::query::LibraryQuery, }; @@ -165,7 +165,7 @@ impl LibraryQuery for FileByIdQuery { }; // Convert to File using from_entity_model - let mut file = File::from_entity_model(entry_model, sd_path); + let mut file = File::from_entity_model(entry_model.clone(), sd_path); file.sidecars = sidecars; file.content_identity = content_identity_domain; file.image_media_data = image_media; @@ -176,6 +176,55 @@ impl LibraryQuery for FileByIdQuery { file.content_kind = ci.kind; } + // Load tags for this entry + if let Some(entry_uuid) = entry_model.uuid { + use std::collections::HashMap; + + // Load user_metadata for this entry (both entry-scoped and content-scoped) + let mut metadata_filter = user_metadata::Column::EntryUuid.eq(entry_uuid); + + // Also check for content-scoped metadata if content identity exists + if let Some(ref ci) = file.content_identity { + metadata_filter = metadata_filter.or(user_metadata::Column::ContentIdentityUuid.eq(ci.uuid)); + } + + let metadata_records = user_metadata::Entity::find() + .filter(metadata_filter) + .all(db.conn()) + .await?; + + if !metadata_records.is_empty() { + let metadata_ids: Vec = metadata_records.iter().map(|m| m.id).collect(); + + // Load user_metadata_tag records + let metadata_tags = user_metadata_tag::Entity::find() + .filter(user_metadata_tag::Column::UserMetadataId.is_in(metadata_ids)) + .all(db.conn()) + .await?; + + if !metadata_tags.is_empty() { + let tag_ids: Vec = metadata_tags.iter().map(|mt| mt.tag_id).collect(); + + // Load tag entities + let tag_models = tag::Entity::find() + .filter(tag::Column::Id.is_in(tag_ids)) + .all(db.conn()) + .await?; + + // Convert to domain tags + let mut tags = Vec::new(); + for tag_model in tag_models { + if let Ok(domain_tag) = crate::ops::tags::manager::model_to_domain(tag_model) { + tags.push(domain_tag); + } + } + + file.tags = tags; + tracing::debug!("Loaded {} tags for entry {}", file.tags.len(), entry_uuid); + } + } + } + Ok(Some(file)) } } diff --git a/core/src/ops/indexing/job.rs b/core/src/ops/indexing/job.rs index 7ad529c07..9d98339c5 100644 --- a/core/src/ops/indexing/job.rs +++ b/core/src/ops/indexing/job.rs @@ -126,7 +126,7 @@ impl IndexerJobConfig { Self { location_id: None, path, - mode: IndexMode::Shallow, + mode: IndexMode::Content, // Enable content identification for ephemeral browsing scope, persistence: IndexPersistence::Ephemeral, max_depth: if scope == IndexScope::Current { @@ -759,9 +759,14 @@ impl IndexerJob { while let Some(batch) = state.entry_batches.pop() { for entry in batch { // Store entry (this will emit ResourceChanged events) - persistence + let entry_id = persistence .store_entry(&entry, None, &root_path) .await?; + + // Queue files for content identification + if entry.kind == super::state::EntryKind::File && entry.size > 0 { + state.entries_for_content.push((entry_id, entry.path.clone())); + } } } @@ -775,15 +780,104 @@ impl IndexerJob { async fn run_ephemeral_content_phase_static( state: &mut IndexerState, ctx: &JobContext<'_>, - _ephemeral_index: Arc>, + ephemeral_index: Arc>, ) -> JobResult<()> { - ctx.log("Starting ephemeral content identification"); + use crate::domain::content_identity::ContentHashGenerator; + use crate::ops::indexing::persistence::PersistenceFactory; - // For ephemeral jobs, we can skip heavy content processing or do it lightly - // This is mainly for demonstration - in practice you might generate CAS IDs + ctx.log(format!( + "Starting ephemeral content identification for {} files", + state.entries_for_content.len() + )); + + if state.entries_for_content.is_empty() { + state.phase = Phase::Complete; + return Ok(()); + } + + // Get root path and event bus + let (root_path, event_bus) = { + let index = ephemeral_index.read().await; + (index.root_path.clone(), Some(ctx.library().event_bus().clone())) + }; + + // Create ephemeral persistence for event emission + let persistence = PersistenceFactory::ephemeral( + ephemeral_index.clone(), + event_bus, + root_path, + ); + + // Process files for content identification + let mut success_count = 0; + let mut error_count = 0; + + // Process in chunks to emit progress + const CHUNK_SIZE: usize = 50; + let total = state.entries_for_content.len(); + + while !state.entries_for_content.is_empty() { + ctx.check_interrupt().await?; + + let chunk_size = CHUNK_SIZE.min(state.entries_for_content.len()); + let chunk: Vec<_> = state.entries_for_content.drain(..chunk_size).collect(); + + // Process chunk in parallel + let hash_futures: Vec<_> = chunk + .iter() + .map(|(entry_id, path)| async move { + let hash_result = ContentHashGenerator::generate_content_hash(path).await; + (*entry_id, path.clone(), hash_result) + }) + .collect(); + + let results = futures::future::join_all(hash_futures).await; + + // Store results and emit events + for (entry_id, path, hash_result) in results { + match hash_result { + Ok(cas_id) => { + // Store via persistence (this emits ResourceChanged event with content_identity) + if let Err(e) = persistence.store_content_identity(entry_id, &path, cas_id.clone()).await { + ctx.add_non_critical_error(format!( + "Failed to store content identity for {}: {}", + path.display(), + e + )); + error_count += 1; + } else { + success_count += 1; + } + } + Err(e) => { + // Skip empty files or errors + if !matches!(e, crate::domain::ContentHashError::EmptyFile) { + ctx.add_non_critical_error(format!( + "Failed to hash {}: {}", + path.display(), + e + )); + error_count += 1; + } + } + } + } + + ctx.log(format!( + "Content identification progress: {}/{} (success: {}, errors: {})", + total - state.entries_for_content.len(), + total, + success_count, + error_count + )); + } state.phase = Phase::Complete; - ctx.log("Ephemeral content identification complete"); + ctx.log(format!( + "Ephemeral content identification complete: {} files processed, {} errors", + success_count, + error_count + )); Ok(()) } diff --git a/core/src/ops/indexing/persistence.rs b/core/src/ops/indexing/persistence.rs index cbe3bd7ca..aa92c16e2 100644 --- a/core/src/ops/indexing/persistence.rs +++ b/core/src/ops/indexing/persistence.rs @@ -539,7 +539,7 @@ impl IndexPersistence for EphemeralPersistence { async fn store_content_identity( &self, - _entry_id: i32, + entry_id: i32, path: &Path, cas_id: String, ) -> JobResult<()> { @@ -559,14 +559,83 @@ impl IndexPersistence for EphemeralPersistence { let content_identity = EphemeralContentIdentity { cas_id: cas_id.clone(), - mime_type, + mime_type: mime_type.clone(), file_size, entry_count: 1, }; + // Store in ephemeral index { let mut index = self.index.write().await; - index.add_content_identity(cas_id, content_identity); + index.add_content_identity(cas_id.clone(), content_identity); + } + + // Emit ResourceChanged event with updated content_identity + if let Some(event_bus) = &self.event_bus { + use crate::device::get_current_device_slug; + use crate::domain::addressing::SdPath; + use crate::domain::content_identity::ContentIdentity; + use crate::domain::file::File; + use crate::infra::event::{Event, ResourceMetadata}; + + // Get the stored metadata for this entry + let metadata_opt = { + let index = self.index.read().await; + index.entries.get(path).cloned() + }; + + if let Some(metadata) = metadata_opt { + // Build SdPath + let device_slug = get_current_device_slug(); + let sd_path = SdPath::Physical { + device_slug: device_slug.clone(), + path: path.to_path_buf(), + }; + + // Generate UUID for this file (use entry_id as seed for consistency) + let entry_uuid = uuid::Uuid::from_u128(entry_id as u128); + + // Build File with content_identity + let mut file = File::from_ephemeral(entry_uuid, &metadata, sd_path); + + // Add content identity + file.content_identity = Some(ContentIdentity { + uuid: uuid::Uuid::new_v4(), + kind: crate::domain::ContentKind::Unknown, // TODO: detect from mime_type + content_hash: cas_id.clone(), + integrity_hash: None, + mime_type_id: None, + text_content: None, + total_size: file_size as i64, + entry_count: 1, + first_seen_at: chrono::Utc::now(), + last_verified_at: chrono::Utc::now(), + }); + + // Emit event with updated file + let parent_path = path.parent().map(|p| SdPath::Physical { + device_slug, + path: p.to_path_buf(), + }); + + let affected_paths = if let Some(parent) = parent_path { + vec![parent] + } else { + vec![] + }; + + if let Ok(resource_json) = serde_json::to_value(&file) { + event_bus.emit(Event::ResourceChanged { + resource_type: "file".to_string(), + resource: resource_json, + metadata: Some(ResourceMetadata { + no_merge_fields: vec!["sd_path".to_string()], + alternate_ids: vec![], + affected_paths, + }), + }); + } + } } Ok(()) diff --git a/core/src/ops/metadata/manager.rs b/core/src/ops/metadata/manager.rs index fa656aab6..21ba3db72 100644 --- a/core/src/ops/metadata/manager.rs +++ b/core/src/ops/metadata/manager.rs @@ -35,8 +35,13 @@ impl UserMetadataManager { } } - /// Get user metadata for an entry (creates if doesn't exist) + /// Get or create entry-scoped metadata (legacy method) pub async fn get_or_create_metadata(&self, entry_uuid: Uuid) -> Result { + self.get_or_create_entry_metadata(entry_uuid).await + } + + /// Get or create entry-scoped metadata (tags specific to this file instance) + pub async fn get_or_create_entry_metadata(&self, entry_uuid: Uuid) -> Result { let db = &*self.db; // First try to find existing metadata @@ -44,7 +49,7 @@ impl UserMetadataManager { return Ok(metadata); } - // Create new metadata if it doesn't exist + // Create new entry-scoped metadata if it doesn't exist let metadata_uuid = Uuid::new_v4(); let new_metadata = user_metadata::ActiveModel { id: NotSet, @@ -59,14 +64,43 @@ impl UserMetadataManager { updated_at: Set(Utc::now()), }; - let result = new_metadata + new_metadata .insert(&*db) .await .map_err(|e| TagError::DatabaseError(e.to_string()))?; - // No need to update entry - the metadata is linked via entry_uuid + Ok(UserMetadata::new(metadata_uuid)) + } + + /// Get or create content-scoped metadata (tags apply to all instances of this content) + pub async fn get_or_create_content_metadata(&self, content_identity_uuid: Uuid) -> Result { + let db = &*self.db; + + // First try to find existing metadata + if let Some(metadata) = self.get_metadata_by_content_uuid(content_identity_uuid).await? { + return Ok(metadata); + } + + // Create new content-scoped metadata if it doesn't exist + let metadata_uuid = Uuid::new_v4(); + let new_metadata = user_metadata::ActiveModel { + id: NotSet, + uuid: Set(metadata_uuid), + entry_uuid: Set(None), + content_identity_uuid: Set(Some(content_identity_uuid)), + notes: Set(None), + favorite: Set(false), + hidden: Set(false), + custom_data: Set(serde_json::json!({})), + created_at: Set(Utc::now()), + updated_at: Set(Utc::now()), + }; + + new_metadata + .insert(&*db) + .await + .map_err(|e| TagError::DatabaseError(e.to_string()))?; - // Return the new metadata Ok(UserMetadata::new(metadata_uuid)) } @@ -91,17 +125,39 @@ impl UserMetadataManager { Ok(None) } - /// Apply semantic tags to an entry - pub async fn apply_semantic_tags( + /// Get user metadata for content by content identity UUID + pub async fn get_metadata_by_content_uuid( &self, - entry_uuid: Uuid, - tag_applications: Vec, - device_uuid: Uuid, - ) -> Result<(), TagError> { + content_identity_uuid: Uuid, + ) -> Result, TagError> { let db = &*self.db; - // Ensure metadata exists for this entry - let metadata = self.get_or_create_metadata(entry_uuid).await?; + // Find metadata by content identity UUID + let metadata_model = user_metadata::Entity::find() + .filter(user_metadata::Column::ContentIdentityUuid.eq(content_identity_uuid)) + .one(&*db) + .await + .map_err(|e| TagError::DatabaseError(e.to_string()))?; + + if let Some(model) = metadata_model { + return Ok(Some(self.model_to_domain(model).await?)); + } + + Ok(None) + } + + /// Apply semantic tags to a content identity (tags all instances of this content) + /// Returns the created user_metadata_tag models for syncing + pub async fn apply_semantic_tags_to_content( + &self, + content_identity_uuid: Uuid, + tag_applications: Vec, + device_uuid: Uuid, + ) -> Result, TagError> { + let db = &*self.db; + + // Get or create content-scoped metadata + let metadata = self.get_or_create_content_metadata(content_identity_uuid).await?; // Get the database ID for the user metadata let metadata_model = user_metadata::Entity::find() @@ -113,6 +169,58 @@ impl UserMetadataManager { "UserMetadata not found".to_string(), ))?; + self.apply_tags_to_metadata(metadata_model.id, &tag_applications, device_uuid).await + } + + /// Apply semantic tags to a specific entry (tags only this instance) + /// Returns the created user_metadata_tag models for syncing + pub async fn apply_semantic_tags_to_entry( + &self, + entry_uuid: Uuid, + tag_applications: Vec, + device_uuid: Uuid, + ) -> Result, TagError> { + let db = &*self.db; + + // Get or create entry-scoped metadata + let metadata = self.get_or_create_entry_metadata(entry_uuid).await?; + + // Get the database ID for the user metadata + let metadata_model = user_metadata::Entity::find() + .filter(user_metadata::Column::Uuid.eq(metadata.id)) + .one(&*db) + .await + .map_err(|e| TagError::DatabaseError(e.to_string()))? + .ok_or(TagError::DatabaseError( + "UserMetadata not found".to_string(), + ))?; + + self.apply_tags_to_metadata(metadata_model.id, &tag_applications, device_uuid).await + } + + /// Apply semantic tags to an entry (legacy method - uses entry-scoped) + /// Returns the created user_metadata_tag models for syncing + pub async fn apply_semantic_tags( + &self, + entry_uuid: Uuid, + tag_applications: Vec, + device_uuid: Uuid, + ) -> Result, TagError> { + self.apply_semantic_tags_to_entry(entry_uuid, tag_applications, device_uuid).await + } + + /// Internal: Apply tags to a metadata record (shared logic) + /// Returns the created/updated user_metadata_tag models for syncing + async fn apply_tags_to_metadata( + &self, + metadata_db_id: i32, + tag_applications: &[TagApplication], + device_uuid: Uuid, + ) -> Result, TagError> { + let db = &*self.db; + + let mut created_models = Vec::new(); + // Convert tag UUIDs to database IDs let tag_uuids: Vec = tag_applications.iter().map(|app| app.tag_id).collect(); let tag_models = crate::infra::db::entities::Tag::find() @@ -125,11 +233,11 @@ impl UserMetadataManager { tag_models.into_iter().map(|m| (m.uuid, m.id)).collect(); // Insert tag applications - for app in &tag_applications { + for app in tag_applications { if let Some(&tag_db_id) = uuid_to_db_id.get(&app.tag_id) { let tag_application = user_metadata_tag::ActiveModel { id: NotSet, - user_metadata_id: Set(metadata_model.id), + user_metadata_id: Set(metadata_db_id), tag_id: Set(tag_db_id), applied_context: Set(app.applied_context.clone()), applied_variant: Set(app.applied_variant.clone()), @@ -152,41 +260,49 @@ impl UserMetadataManager { }; // Insert or update if exists - if let Err(_) = tag_application.insert(&*db).await { - // If insert fails due to unique constraint, update existing - let existing = user_metadata_tag::Entity::find() - .filter(user_metadata_tag::Column::UserMetadataId.eq(metadata_model.id)) - .filter(user_metadata_tag::Column::TagId.eq(tag_db_id)) - .one(&*db) - .await - .map_err(|e| TagError::DatabaseError(e.to_string()))?; - - if let Some(existing_model) = existing { - let mut update_model: user_metadata_tag::ActiveModel = - existing_model.into(); - update_model.applied_context = Set(app.applied_context.clone()); - update_model.applied_variant = Set(app.applied_variant.clone()); - update_model.confidence = Set(app.confidence); - update_model.source = Set(app.source.as_str().to_string()); - update_model.instance_attributes = - Set(if app.instance_attributes.is_empty() { - None - } else { - Some( - serde_json::to_value(&app.instance_attributes) - .unwrap() - .into(), - ) - }); - update_model.updated_at = Set(Utc::now()); - update_model.device_uuid = Set(device_uuid); - - update_model - .update(&*db) + let model = match tag_application.clone().insert(&*db).await { + Ok(model) => model, + Err(_) => { + // If insert fails due to unique constraint, update existing + let existing = user_metadata_tag::Entity::find() + .filter(user_metadata_tag::Column::UserMetadataId.eq(metadata_db_id)) + .filter(user_metadata_tag::Column::TagId.eq(tag_db_id)) + .one(&*db) .await .map_err(|e| TagError::DatabaseError(e.to_string()))?; + + if let Some(existing_model) = existing { + let mut update_model: user_metadata_tag::ActiveModel = + existing_model.into(); + update_model.applied_context = Set(app.applied_context.clone()); + update_model.applied_variant = Set(app.applied_variant.clone()); + update_model.confidence = Set(app.confidence); + update_model.source = Set(app.source.as_str().to_string()); + update_model.instance_attributes = + Set(if app.instance_attributes.is_empty() { + None + } else { + Some( + serde_json::to_value(&app.instance_attributes) + .unwrap() + .into(), + ) + }); + update_model.updated_at = Set(Utc::now()); + update_model.device_uuid = Set(device_uuid); + update_model.version = Set(update_model.version.unwrap() + 1); + + update_model + .update(&*db) + .await + .map_err(|e| TagError::DatabaseError(e.to_string()))? + } else { + continue; + } } - } + }; + + created_models.push(model); } } @@ -195,7 +311,7 @@ impl UserMetadataManager { .record_tag_usage(&tag_applications) .await?; - Ok(()) + Ok(created_models) } /// Remove semantic tags from an entry @@ -403,7 +519,8 @@ impl UserMetadataManager { }; self.apply_semantic_tags(Uuid::new_v4(), vec![tag_application], device_uuid) - .await // TODO: Look up actual UUID + .await + .map(|_| ()) // TODO: Look up actual UUID and sync models } /// Apply multiple semantic tags to an entry (user-applied) @@ -419,7 +536,8 @@ impl UserMetadataManager { .collect(); self.apply_semantic_tags(Uuid::new_v4(), tag_applications, device_uuid) - .await // TODO: Look up actual UUID + .await + .map(|_| ()) // TODO: Look up actual UUID and sync models } /// Apply AI-suggested semantic tags with confidence scores @@ -439,7 +557,8 @@ impl UserMetadataManager { .collect(); self.apply_semantic_tags(Uuid::new_v4(), tag_applications, device_uuid) - .await // TODO: Look up actual UUID + .await + .map(|_| ()) // TODO: Look up actual UUID and sync models } /// Find entries by semantic tags (supports hierarchy) diff --git a/core/src/ops/tags/apply/action.rs b/core/src/ops/tags/apply/action.rs index fb560dd27..de8884b03 100644 --- a/core/src/ops/tags/apply/action.rs +++ b/core/src/ops/tags/apply/action.rs @@ -1,6 +1,6 @@ //! Apply semantic tags action -use super::{input::ApplyTagsInput, output::ApplyTagsOutput}; +use super::{input::{ApplyTagsInput, TagTargets}, output::ApplyTagsOutput}; use crate::{ context::CoreContext, domain::tag::{TagApplication, TagSource}, @@ -45,7 +45,7 @@ impl LibraryAction for ApplyTagsAction { let device_id = library.id(); // Use library ID as device ID let mut warnings = Vec::new(); - let mut successfully_tagged_entries = Vec::new(); + let mut successfully_tagged_count = 0; // Create tag applications from input let tag_applications: Vec = self @@ -71,32 +71,67 @@ impl LibraryAction for ApplyTagsAction { }) .collect(); - // Apply tags to each entry - for entry_id in &self.input.entry_ids { - // Look up actual entry UUID from entry ID - let entry_uuid = lookup_entry_uuid(&db.conn(), *entry_id) - .await - .map_err(|e| { - ActionError::Internal(format!("Failed to lookup entry UUID: {}", e)) - })?; - match metadata_manager - .apply_semantic_tags(entry_uuid, tag_applications.clone(), device_id) - .await - { - Ok(()) => { - successfully_tagged_entries.push(*entry_id); + // Handle both content-based and entry-based tagging + match &self.input.targets { + TagTargets::Content(content_ids) => { + // Content-based tagging: apply to content identity (tags all instances) + for &content_id in content_ids { + match metadata_manager + .apply_semantic_tags_to_content(content_id, tag_applications.clone(), device_id) + .await + { + Ok(models) => { + successfully_tagged_count += 1; + // Sync each user_metadata_tag model + for model in models { + library + .sync_model(&model, crate::infra::sync::ChangeType::Insert) + .await + .map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?; + } + } + Err(e) => { + warnings.push(format!("Failed to tag content {}: {}", content_id, e)); + } + } } - Err(e) => { - warnings.push(format!("Failed to tag entry {}: {}", entry_id, e)); + } + TagTargets::Entry(entry_ids) => { + // Entry-based tagging: apply to specific entry instance + for &entry_id in entry_ids { + // Look up actual entry UUID from entry ID + let entry_uuid = lookup_entry_uuid(&db.conn(), entry_id) + .await + .map_err(|e| { + ActionError::Internal(format!("Failed to lookup entry UUID: {}", e)) + })?; + match metadata_manager + .apply_semantic_tags_to_entry(entry_uuid, tag_applications.clone(), device_id) + .await + { + Ok(models) => { + successfully_tagged_count += 1; + // Sync each user_metadata_tag model + for model in models { + library + .sync_model(&model, crate::infra::sync::ChangeType::Insert) + .await + .map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?; + } + } + Err(e) => { + warnings.push(format!("Failed to tag entry {}: {}", entry_id, e)); + } + } } } } let output = ApplyTagsOutput::success( - successfully_tagged_entries.len(), + successfully_tagged_count, self.input.tag_ids.len(), self.input.tag_ids.clone(), - successfully_tagged_entries, + vec![], // TODO: Return target IDs if needed ); if !warnings.is_empty() { diff --git a/core/src/ops/tags/apply/input.rs b/core/src/ops/tags/apply/input.rs index 2e4e178f0..99309ce12 100644 --- a/core/src/ops/tags/apply/input.rs +++ b/core/src/ops/tags/apply/input.rs @@ -6,10 +6,23 @@ use specta::Type; use std::collections::HashMap; use uuid::Uuid; +/// Specifies what to tag: content (all instances) or specific entries +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +#[serde(tag = "type", content = "ids")] +pub enum TagTargets { + /// Tag by content identity (applies to ALL instances of this content across devices) + /// This is the preferred/default approach + Content(Vec), + + /// Tag by entry ID (applies to ONLY this specific file instance) + /// Use when you want instance-specific tags + Entry(Vec), +} + #[derive(Debug, Clone, Serialize, Deserialize, Type)] pub struct ApplyTagsInput { - /// Entry IDs to apply tags to - pub entry_ids: Vec, + /// What to tag: content identities or specific entries + pub targets: TagTargets, /// Tag IDs to apply pub tag_ids: Vec, @@ -28,10 +41,22 @@ pub struct ApplyTagsInput { } impl ApplyTagsInput { - /// Create a simple user tag application - pub fn user_tags(entry_ids: Vec, tag_ids: Vec) -> Self { + /// Create a content-scoped user tag application (tags all instances) + pub fn user_tags_content(content_ids: Vec, tag_ids: Vec) -> Self { Self { - entry_ids, + targets: TagTargets::Content(content_ids), + tag_ids, + source: Some(TagSource::User), + confidence: Some(1.0), + applied_context: None, + instance_attributes: None, + } + } + + /// Create an entry-scoped user tag application (tags specific instance only) + pub fn user_tags_entry(entry_ids: Vec, tag_ids: Vec) -> Self { + Self { + targets: TagTargets::Entry(entry_ids), tag_ids, source: Some(TagSource::User), confidence: Some(1.0), @@ -42,13 +67,13 @@ impl ApplyTagsInput { /// Create an AI tag application with confidence pub fn ai_tags( - entry_ids: Vec, + content_ids: Vec, tag_ids: Vec, confidence: f32, context: String, ) -> Self { Self { - entry_ids, + targets: TagTargets::Content(content_ids), tag_ids, source: Some(TagSource::AI), confidence: Some(confidence), @@ -59,16 +84,27 @@ impl ApplyTagsInput { /// Validate the input pub fn validate(&self) -> Result<(), String> { - if self.entry_ids.is_empty() { - return Err("entry_ids cannot be empty".to_string()); - } + let target_count = match &self.targets { + TagTargets::Content(ids) => { + if ids.is_empty() { + return Err("content identity IDs cannot be empty".to_string()); + } + ids.len() + } + TagTargets::Entry(ids) => { + if ids.is_empty() { + return Err("entry IDs cannot be empty".to_string()); + } + ids.len() + } + }; if self.tag_ids.is_empty() { return Err("tag_ids cannot be empty".to_string()); } - if self.entry_ids.len() > 1000 { - return Err("Cannot apply tags to more than 1000 entries at once".to_string()); + if target_count > 1000 { + return Err("Cannot apply tags to more than 1000 targets at once".to_string()); } if self.tag_ids.len() > 50 { diff --git a/core/src/ops/tags/create/action.rs b/core/src/ops/tags/create/action.rs index ce77a6d79..9503d9706 100644 --- a/core/src/ops/tags/create/action.rs +++ b/core/src/ops/tags/create/action.rs @@ -1,14 +1,16 @@ //! Create semantic tag action -use super::{input::CreateTagInput, output::CreateTagOutput}; +use super::{input::{ApplyToTargets, CreateTagInput}, output::CreateTagOutput}; use crate::infra::sync::ChangeType; use crate::{ context::CoreContext, - domain::tag::{PrivacyLevel, Tag, TagType}, + domain::tag::{PrivacyLevel, Tag, TagApplication, TagSource, TagType}, infra::action::{error::ActionError, LibraryAction}, library::Library, ops::tags::manager::TagManager, + ops::metadata::manager::UserMetadataManager, }; +use chrono::Utc; use serde::{Deserialize, Serialize}; use std::sync::Arc; use uuid::Uuid; @@ -70,6 +72,66 @@ impl LibraryAction for CreateTagAction { .await .map_err(|e| ActionError::Internal(format!("Failed to sync tag: {}", e)))?; + // If apply_to is provided, apply the tag to those targets + if let Some(targets) = &self.input.apply_to { + let metadata_manager = UserMetadataManager::new(Arc::new(library.db().conn().clone())); + + // Create a tag application for this newly created tag + let tag_application = TagApplication { + tag_id: tag_entity.uuid, + applied_context: None, + applied_variant: None, + confidence: 1.0, + source: TagSource::User, + instance_attributes: Default::default(), + created_at: Utc::now(), + device_uuid: device_id, + }; + + match targets { + ApplyToTargets::Content(content_ids) => { + // Apply to content identities (all instances) + for &content_id in content_ids { + let models = metadata_manager + .apply_semantic_tags_to_content(content_id, vec![tag_application.clone()], device_id) + .await + .map_err(|e| ActionError::Internal(format!("Failed to apply tag to content: {}", e)))?; + + // Sync each user_metadata_tag model + for model in models { + library + .sync_model(&model, ChangeType::Insert) + .await + .map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?; + } + } + } + ApplyToTargets::Entry(entry_ids) => { + // Apply to specific entries + for &entry_id in entry_ids { + // Look up entry UUID from database ID + let entry_uuid = lookup_entry_uuid(&library.db().conn(), entry_id) + .await + .map_err(|e| ActionError::Internal(format!("Failed to lookup entry UUID: {}", e)))?; + + // Apply the tag + let models = metadata_manager + .apply_semantic_tags_to_entry(entry_uuid, vec![tag_application.clone()], device_id) + .await + .map_err(|e| ActionError::Internal(format!("Failed to apply tag to entry: {}", e)))?; + + // Sync each user_metadata_tag model + for model in models { + library + .sync_model(&model, ChangeType::Insert) + .await + .map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?; + } + } + } + } + } + Ok(CreateTagOutput::from_entity(&tag_entity)) } @@ -80,3 +142,19 @@ impl LibraryAction for CreateTagAction { // Register library action crate::register_library_action!(CreateTagAction, "tags.create"); + +/// Look up entry UUID from entry database ID +async fn lookup_entry_uuid(db: &sea_orm::DatabaseConnection, entry_id: i32) -> Result { + use crate::infra::db::entities::entry; + use sea_orm::EntityTrait; + + let entry_model = entry::Entity::find_by_id(entry_id) + .one(db) + .await + .map_err(|e| format!("Database error: {}", e))? + .ok_or_else(|| format!("Entry with ID {} not found", entry_id))?; + + entry_model + .uuid + .ok_or_else(|| format!("Entry {} has no UUID assigned", entry_id)) +} diff --git a/core/src/ops/tags/create/input.rs b/core/src/ops/tags/create/input.rs index 6923cc108..a2944ea36 100644 --- a/core/src/ops/tags/create/input.rs +++ b/core/src/ops/tags/create/input.rs @@ -35,6 +35,19 @@ pub struct CreateTagInput { /// Initial attributes pub attributes: Option>, + + /// Optional: Targets to immediately apply this tag to after creation + pub apply_to: Option, +} + +/// Targets for immediately applying a newly created tag +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +#[serde(tag = "type", content = "ids")] +pub enum ApplyToTargets { + /// Apply to content identities (all instances) + Content(Vec), + /// Apply to specific entries (single instance) + Entry(Vec), } impl CreateTagInput { @@ -55,6 +68,7 @@ impl CreateTagInput { privacy_level: None, search_weight: None, attributes: None, + apply_to: None, } } diff --git a/core/src/ops/tags/manager.rs b/core/src/ops/tags/manager.rs index 055f21535..e1b1fa5a6 100644 --- a/core/src/ops/tags/manager.rs +++ b/core/src/ops/tags/manager.rs @@ -31,7 +31,7 @@ pub struct TagManager { } // Helper function to convert database model to domain model -fn model_to_domain(model: tag::Model) -> Result { +pub(crate) fn model_to_domain(model: tag::Model) -> Result { let aliases: Vec = model .aliases .as_ref() @@ -689,43 +689,53 @@ impl TagManager { ) -> Result, TagError> { let db = &*self.db; - // Try FTS5 search first, fall back to LIKE patterns if FTS5 is not available let mut tag_db_ids = Vec::new(); - // Attempt FTS5 search (skip if FTS5 table doesn't exist) - if let Ok(fts_results) = db.query_all( - sea_orm::Statement::from_string( - sea_orm::DatabaseBackend::Sqlite, - format!( - "SELECT rowid FROM tag_search_fts WHERE tag_search_fts MATCH '{}' ORDER BY bm25(tag_search_fts)", - query.replace("\"", "\"\"") - ) - ) - ).await { - for row in fts_results { - if let Ok(tag_id) = row.try_get::("", "rowid") { - tag_db_ids.push(tag_id); - } - } - } - - // If FTS5 didn't return results, fall back to LIKE patterns - if tag_db_ids.is_empty() { - let search_pattern = format!("%{}%", query); - let like_models = tag::Entity::find() - .filter( - tag::Column::CanonicalName - .like(&search_pattern) - .or(tag::Column::DisplayName.like(&search_pattern)) - .or(tag::Column::FormalName.like(&search_pattern)) - .or(tag::Column::Abbreviation.like(&search_pattern)) - .or(tag::Column::Description.like(&search_pattern)), - ) + // If query is empty, return all tags (with filters applied) + if query.trim().is_empty() { + let all_models = tag::Entity::find() .all(&*db) .await .map_err(|e| TagError::DatabaseError(e.to_string()))?; - tag_db_ids = like_models.into_iter().map(|m| m.id).collect(); + tag_db_ids = all_models.into_iter().map(|m| m.id).collect(); + } else { + // Try FTS5 search first, fall back to LIKE patterns if FTS5 is not available + // Attempt FTS5 search (skip if FTS5 table doesn't exist) + if let Ok(fts_results) = db.query_all( + sea_orm::Statement::from_string( + sea_orm::DatabaseBackend::Sqlite, + format!( + "SELECT rowid FROM tag_search_fts WHERE tag_search_fts MATCH '{}' ORDER BY bm25(tag_search_fts)", + query.replace("\"", "\"\"") + ) + ) + ).await { + for row in fts_results { + if let Ok(tag_id) = row.try_get::("", "rowid") { + tag_db_ids.push(tag_id); + } + } + } + + // If FTS5 didn't return results, fall back to LIKE patterns + if tag_db_ids.is_empty() { + let search_pattern = format!("%{}%", query); + let like_models = tag::Entity::find() + .filter( + tag::Column::CanonicalName + .like(&search_pattern) + .or(tag::Column::DisplayName.like(&search_pattern)) + .or(tag::Column::FormalName.like(&search_pattern)) + .or(tag::Column::Abbreviation.like(&search_pattern)) + .or(tag::Column::Description.like(&search_pattern)), + ) + .all(&*db) + .await + .map_err(|e| TagError::DatabaseError(e.to_string()))?; + + tag_db_ids = like_models.into_iter().map(|m| m.id).collect(); + } } if tag_db_ids.is_empty() { diff --git a/core/src/ops/tags/search/input.rs b/core/src/ops/tags/search/input.rs index 312ccca94..e320d4a2f 100644 --- a/core/src/ops/tags/search/input.rs +++ b/core/src/ops/tags/search/input.rs @@ -70,10 +70,7 @@ impl SearchTagsInput { /// Validate the input pub fn validate(&self) -> Result<(), String> { - if self.query.trim().is_empty() { - return Err("query cannot be empty".to_string()); - } - + // Empty query is allowed (returns all tags) if self.query.len() > 1000 { return Err("query cannot exceed 1000 characters".to_string()); } diff --git a/core/src/ops/volumes/list/output.rs b/core/src/ops/volumes/list/output.rs index 794f46cb0..b3bd9170b 100644 --- a/core/src/ops/volumes/list/output.rs +++ b/core/src/ops/volumes/list/output.rs @@ -32,6 +32,8 @@ pub struct VolumeItem { pub write_speed_mbps: Option, /// Device ID that owns this volume pub device_id: Uuid, + /// Device slug for constructing SdPaths + pub device_slug: String, } #[derive(Debug, Clone, Serialize, Deserialize, Type)] diff --git a/core/src/ops/volumes/list/query.rs b/core/src/ops/volumes/list/query.rs index e8fbb1c30..cbe790289 100644 --- a/core/src/ops/volumes/list/query.rs +++ b/core/src/ops/volumes/list/query.rs @@ -13,6 +13,7 @@ use sea_orm::{ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelec use serde::{Deserialize, Serialize}; use specta::Type; use std::{collections::HashMap, sync::Arc}; +use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize, Type)] pub enum VolumeFilter { @@ -164,6 +165,13 @@ impl LibraryQuery for VolumeListQuery { .all(db) .await?; + // Fetch all devices to get slugs + let devices = entities::device::Entity::find().all(db).await?; + let device_slug_map: HashMap = devices + .into_iter() + .map(|d| (d.uuid, d.slug)) + .collect(); + // Create a map of tracked volumes by fingerprint let mut tracked_map: HashMap = tracked_volumes .into_iter() @@ -184,6 +192,12 @@ impl LibraryQuery for VolumeListQuery { let disk_type = Self::infer_disk_type(&tracked_vol.device_model, &tracked_vol.volume_type); + // Get device slug for this volume + let device_slug = device_slug_map + .get(&tracked_vol.device_id) + .cloned() + .unwrap_or_else(|| "unknown".to_string()); + volume_items.push(super::output::VolumeItem { id: tracked_vol.uuid, name: tracked_vol @@ -206,6 +220,7 @@ impl LibraryQuery for VolumeListQuery { read_speed_mbps: tracked_vol.read_speed_mbps.map(|s| s as u32), write_speed_mbps: tracked_vol.write_speed_mbps.map(|s| s as u32), device_id: tracked_vol.device_id, + device_slug, }); } @@ -215,6 +230,11 @@ impl LibraryQuery for VolumeListQuery { for vol in all_volumes { // Only show user-visible volumes if !tracked_map.contains_key(&vol.fingerprint.0) && vol.is_user_visible { + let device_slug = device_slug_map + .get(&vol.device_id) + .cloned() + .unwrap_or_else(|| "unknown".to_string()); + volume_items.push(super::output::VolumeItem { id: vol.id, name: vol.name.clone(), @@ -231,6 +251,7 @@ impl LibraryQuery for VolumeListQuery { read_speed_mbps: vol.read_speed_mbps.map(|s| s as u32), write_speed_mbps: vol.write_speed_mbps.map(|s| s as u32), device_id: vol.device_id, + device_slug, }); } } @@ -243,6 +264,11 @@ impl LibraryQuery for VolumeListQuery { // Only return volumes that are NOT tracked and are user-visible for vol in all_volumes { if !tracked_map.contains_key(&vol.fingerprint.0) && vol.is_user_visible { + let device_slug = device_slug_map + .get(&vol.device_id) + .cloned() + .unwrap_or_else(|| "unknown".to_string()); + volume_items.push(super::output::VolumeItem { id: vol.id, name: vol.name.clone(), @@ -259,6 +285,7 @@ impl LibraryQuery for VolumeListQuery { read_speed_mbps: vol.read_speed_mbps.map(|s| s as u32), write_speed_mbps: vol.write_speed_mbps.map(|s| s as u32), device_id: vol.device_id, + device_slug, }); } } diff --git a/packages/assets/icons/Document_memory.png b/packages/assets/icons/Document_memory.png new file mode 100644 index 000000000..d32fcbe43 Binary files /dev/null and b/packages/assets/icons/Document_memory.png differ diff --git a/packages/assets/icons/index.ts b/packages/assets/icons/index.ts index 3eeef25bb..8e9aca1fc 100644 --- a/packages/assets/icons/index.ts +++ b/packages/assets/icons/index.ts @@ -1,392 +1,394 @@ /* * This file was automatically generated by a script. - * To regenerate this file, run: pnpm assets gen + * To regenerate this file, run: bun assets gen */ -import Album_Light from './Album_Light.png'; -import Album20 from './Album-20.png'; -import Album from './Album.png'; -import Alias_Light from './Alias_Light.png'; -import Alias20 from './Alias-20.png'; -import Alias from './Alias.png'; -import AmazonS3 from './AmazonS3.png'; -import AndroidPhotos from './AndroidPhotos.png'; -import AppleFiles from './AppleFiles.png'; -import ApplePhotos from './ApplePhotos.png'; -import Application_Light from './Application_Light.png'; -import Application from './Application.png'; -import Archive_Light from './Archive_Light.png'; -import Archive20 from './Archive-20.png'; -import Archive from './Archive.png'; -import Audio_Light from './Audio_Light.png'; -import Audio20 from './Audio-20.png'; -import Audio from './Audio.png'; -import BackBlaze from './BackBlaze.png'; -import Ball from './Ball.png'; -import Book_Light from './Book_Light.png'; -import Book20 from './Book-20.png'; -import Book from './Book.png'; -import BookBlue from './BookBlue.png'; -import Box from './Box.png'; -import CloudSync_Light from './CloudSync_Light.png'; -import CloudSync from './CloudSync.png'; -import Code20 from './Code-20.png'; -import Collection_Light from './Collection_Light.png'; -import Collection20 from './Collection-20.png'; -import Collection from './Collection.png'; -import CollectionSparkle_Light from './CollectionSparkle_Light.png'; -import CollectionSparkle from './CollectionSparkle.png'; -import Config20 from './Config-20.png'; -import Database_Light from './Database_Light.png'; -import Database20 from './Database-20.png'; -import Database from './Database.png'; -import DAV from './DAV.png'; -import DeleteLocation from './DeleteLocation.png'; -import Document_doc_Light from './Document_doc_Light.png'; -import Document_doc from './Document_doc.png'; -import Document_Light from './Document_Light.png'; -import Document_pdf_Light from './Document_pdf_Light.png'; -import Document_pdf from './Document_pdf.png'; -import Document_srt from './Document_srt.png'; -import Document_xls_Light from './Document_xls_Light.png'; -import Document_xls from './Document_xls.png'; -import Document_xmp from './Document_xmp.png'; -import Document20 from './Document-20.png'; -import Document from './Document.png'; -import Dotfile20 from './Dotfile-20.png'; -import Drive_Light from './Drive_Light.png'; -import DriveAmazonS3_Light from './Drive-AmazonS3_Light.png'; -import DriveAmazonS3 from './Drive-AmazonS3.png'; -import DriveBackBlaze_Light from './Drive-BackBlaze_Light.png'; -import DriveBackBlaze from './Drive-BackBlaze.png'; -import Drivebox_Light from './Drive-box_Light.png'; -import DriveBox from './Drive-Box.png'; -import DriveDarker from './Drive-Darker.png'; -import DriveDAV_Light from './Drive-DAV_Light.png'; -import DriveDAV from './Drive-DAV.png'; -import DriveDropbox_Light from './Drive-Dropbox_Light.png'; -import DriveDropbox from './Drive-Dropbox.png'; -import DriveGoogleDrive_Light from './Drive-GoogleDrive_Light.png'; -import DriveGoogleDrive from './Drive-GoogleDrive.png'; -import DriveMega_Light from './Drive-Mega_Light.png'; -import DriveMega from './Drive-Mega.png'; -import DriveOneDrive_Light from './Drive-OneDrive_Light.png'; -import DriveOneDrive from './Drive-OneDrive.png'; -import DriveOpenStack_Light from './Drive-OpenStack_Light.png'; -import DriveOpenStack from './Drive-OpenStack.png'; -import DrivePCloud_Light from './Drive-PCloud_Light.png'; -import DrivePCloud from './Drive-PCloud.png'; -import Drive from './Drive.png'; -import Dropbox from './Dropbox.png'; -import Encrypted_Light from './Encrypted_Light.png'; -import Encrypted20 from './Encrypted-20.png'; -import Encrypted from './Encrypted.png'; -import Entity_Light from './Entity_Light.png'; -import Entity from './Entity.png'; -import Executable_Light_old from './Executable_Light_old.png'; -import Executable_Light from './Executable_Light.png'; -import Executable_old from './Executable_old.png'; -import Executable20 from './Executable-20.png'; -import Executable from './Executable.png'; -import Face_Light from './Face_Light.png'; -import Folder_Light from './Folder_Light.png'; -import Folder20 from './Folder-20.png'; -import Foldertagxmp from './Folder-tag-xmp.png'; -import Folder from './Folder.png'; -import FolderGrey_Light from './FolderGrey_Light.png'; -import FolderGrey from './FolderGrey.png'; -import FolderNoSpace_Light from './FolderNoSpace_Light.png'; -import FolderNoSpace from './FolderNoSpace.png'; -import Font20 from './Font-20.png'; -import Game_Light from './Game_Light.png'; -import Game from './Game.png'; -import Globe_Light from './Globe_Light.png'; -import Globe from './Globe.png'; -import GlobeAlt from './GlobeAlt.png'; -import GoogleDrive from './GoogleDrive.png'; -import HDD_Light from './HDD_Light.png'; -import HDD from './HDD.png'; -import Heart_Light from './Heart_Light.png'; -import Heart from './Heart.png'; -import Home_Light from './Home_Light.png'; -import Home from './Home.png'; -import Image_Light from './Image_Light.png'; -import Image20 from './Image-20.png'; -import Image from './Image.png'; -import Key_Light from './Key_Light.png'; -import Key20 from './Key-20.png'; -import Key from './Key.png'; -import Keys_Light from './Keys_Light.png'; -import Keys from './Keys.png'; -import Laptop_Light from './Laptop_Light.png'; -import Laptop from './Laptop.png'; -import Link_Light from './Link_Light.png'; -import Link20 from './Link-20.png'; -import Link from './Link.png'; -import Location from './Location.png'; -import LocationManaged from './LocationManaged.png'; -import LocationReplica from './LocationReplica.png'; -import Lock_Light from './Lock_Light.png'; -import Lock from './Lock.png'; -import Mega from './Mega.png'; -import Mesh_Light from './Mesh_Light.png'; -import Mesh20 from './Mesh-20.png'; -import Mesh from './Mesh.png'; -import MiniSilverBox from './MiniSilverBox.png'; -import Mobile_Light from './Mobile_Light.png'; -import MobileAndroid from './Mobile-Android.png'; -import Mobile from './Mobile.png'; -import MoveLocation_Light from './MoveLocation_Light.png'; -import MoveLocation from './MoveLocation.png'; -import Movie_Light from './Movie_Light.png'; -import Movie from './Movie.png'; -import NewLocation from './NewLocation.png'; -import Node_Light from './Node_Light.png'; -import Node from './Node.png'; -import OneDrive from './OneDrive.png'; -import OpenStack from './OpenStack.png'; -import Package_Light from './Package_Light.png'; -import Package20 from './Package-20.png'; -import Package from './Package.png'; -import PC from './PC.png'; -import PCloud from './PCloud.png'; -import Scrapbook_Light from './Scrapbook_Light.png'; -import Scrapbook from './Scrapbook.png'; -import Screenshot_Light from './Screenshot_Light.png'; -import Screenshot20 from './Screenshot-20.png'; -import Screenshot from './Screenshot.png'; -import ScreenshotAlt from './ScreenshotAlt.png'; -import SD_Light from './SD_Light.png'; -import SD from './SD.png'; -import Search_Light from './Search_Light.png'; -import Search from './Search.png'; -import SearchAlt from './SearchAlt.png'; -import Server_Light from './Server_Light.png'; -import Server from './Server.png'; -import SilverBox from './SilverBox.png'; -import Spacedrop_Light from './Spacedrop_Light.png'; -import Spacedrop1 from './Spacedrop-1.png'; -import Spacedrop from './Spacedrop.png'; -import Sync_Light from './Sync_Light.png'; -import Sync from './Sync.png'; -import Tablet_Light from './Tablet_Light.png'; -import Tablet from './Tablet.png'; -import Tags_Light from './Tags_Light.png'; -import Tags from './Tags.png'; -import Terminal_Light from './Terminal_Light.png'; -import Terminal from './Terminal.png'; -import Text_Light from './Text_Light.png'; -import Text_txt from './Text_txt.png'; -import Text20 from './Text-20.png'; -import Text from './Text.png'; -import TextAlt_Light from './TextAlt_Light.png'; -import TextAlt from './TextAlt.png'; -import TexturedMesh_Light from './TexturedMesh_Light.png'; -import TexturedMesh from './TexturedMesh.png'; -import Trash_Light from './Trash_Light.png'; -import Trash from './Trash.png'; -import Undefined_Light from './Undefined_Light.png'; -import Undefined from './Undefined.png'; -import Unknown20 from './Unknown-20.png'; -import Video_Light from './Video_Light.png'; -import Video20 from './Video-20.png'; -import Video from './Video.png'; -import WebPageArchive20 from './WebPageArchive-20.png'; -import Widget_Light from './Widget_Light.png'; -import Widget20 from './Widget-20.png'; -import Widget from './Widget.png'; +import Album_Light from "./Album_Light.png"; +import Album20 from "./Album-20.png"; +import Album from "./Album.png"; +import Alias_Light from "./Alias_Light.png"; +import Alias20 from "./Alias-20.png"; +import Alias from "./Alias.png"; +import AmazonS3 from "./AmazonS3.png"; +import AndroidPhotos from "./AndroidPhotos.png"; +import AppleFiles from "./AppleFiles.png"; +import ApplePhotos from "./ApplePhotos.png"; +import Application_Light from "./Application_Light.png"; +import Application from "./Application.png"; +import Archive_Light from "./Archive_Light.png"; +import Archive20 from "./Archive-20.png"; +import Archive from "./Archive.png"; +import Audio_Light from "./Audio_Light.png"; +import Audio20 from "./Audio-20.png"; +import Audio from "./Audio.png"; +import BackBlaze from "./BackBlaze.png"; +import Ball from "./Ball.png"; +import Book_Light from "./Book_Light.png"; +import Book20 from "./Book-20.png"; +import Book from "./Book.png"; +import BookBlue from "./BookBlue.png"; +import Box from "./Box.png"; +import CloudSync_Light from "./CloudSync_Light.png"; +import CloudSync from "./CloudSync.png"; +import Code20 from "./Code-20.png"; +import Collection_Light from "./Collection_Light.png"; +import Collection20 from "./Collection-20.png"; +import Collection from "./Collection.png"; +import CollectionSparkle_Light from "./CollectionSparkle_Light.png"; +import CollectionSparkle from "./CollectionSparkle.png"; +import Config20 from "./Config-20.png"; +import Database_Light from "./Database_Light.png"; +import Database20 from "./Database-20.png"; +import Database from "./Database.png"; +import DAV from "./DAV.png"; +import DeleteLocation from "./DeleteLocation.png"; +import Document_doc_Light from "./Document_doc_Light.png"; +import Document_doc from "./Document_doc.png"; +import Document_Light from "./Document_Light.png"; +import Document_pdf_Light from "./Document_pdf_Light.png"; +import Document_pdf from "./Document_pdf.png"; +import Document_srt from "./Document_srt.png"; +import Document_xls_Light from "./Document_xls_Light.png"; +import Document_xls from "./Document_xls.png"; +import Document_xmp from "./Document_xmp.png"; +import Document_memory from "./Document_memory.png"; +import Document20 from "./Document-20.png"; +import Document from "./Document.png"; +import Dotfile20 from "./Dotfile-20.png"; +import Drive_Light from "./Drive_Light.png"; +import DriveAmazonS3_Light from "./Drive-AmazonS3_Light.png"; +import DriveAmazonS3 from "./Drive-AmazonS3.png"; +import DriveBackBlaze_Light from "./Drive-BackBlaze_Light.png"; +import DriveBackBlaze from "./Drive-BackBlaze.png"; +import Drivebox_Light from "./Drive-box_Light.png"; +import DriveBox from "./Drive-Box.png"; +import DriveDarker from "./Drive-Darker.png"; +import DriveDAV_Light from "./Drive-DAV_Light.png"; +import DriveDAV from "./Drive-DAV.png"; +import DriveDropbox_Light from "./Drive-Dropbox_Light.png"; +import DriveDropbox from "./Drive-Dropbox.png"; +import DriveGoogleDrive_Light from "./Drive-GoogleDrive_Light.png"; +import DriveGoogleDrive from "./Drive-GoogleDrive.png"; +import DriveMega_Light from "./Drive-Mega_Light.png"; +import DriveMega from "./Drive-Mega.png"; +import DriveOneDrive_Light from "./Drive-OneDrive_Light.png"; +import DriveOneDrive from "./Drive-OneDrive.png"; +import DriveOpenStack_Light from "./Drive-OpenStack_Light.png"; +import DriveOpenStack from "./Drive-OpenStack.png"; +import DrivePCloud_Light from "./Drive-PCloud_Light.png"; +import DrivePCloud from "./Drive-PCloud.png"; +import Drive from "./Drive.png"; +import Dropbox from "./Dropbox.png"; +import Encrypted_Light from "./Encrypted_Light.png"; +import Encrypted20 from "./Encrypted-20.png"; +import Encrypted from "./Encrypted.png"; +import Entity_Light from "./Entity_Light.png"; +import Entity from "./Entity.png"; +import Executable_Light_old from "./Executable_Light_old.png"; +import Executable_Light from "./Executable_Light.png"; +import Executable_old from "./Executable_old.png"; +import Executable20 from "./Executable-20.png"; +import Executable from "./Executable.png"; +import Face_Light from "./Face_Light.png"; +import Folder_Light from "./Folder_Light.png"; +import Folder20 from "./Folder-20.png"; +import Foldertagxmp from "./Folder-tag-xmp.png"; +import Folder from "./Folder.png"; +import FolderGrey_Light from "./FolderGrey_Light.png"; +import FolderGrey from "./FolderGrey.png"; +import FolderNoSpace_Light from "./FolderNoSpace_Light.png"; +import FolderNoSpace from "./FolderNoSpace.png"; +import Font20 from "./Font-20.png"; +import Game_Light from "./Game_Light.png"; +import Game from "./Game.png"; +import Globe_Light from "./Globe_Light.png"; +import Globe from "./Globe.png"; +import GlobeAlt from "./GlobeAlt.png"; +import GoogleDrive from "./GoogleDrive.png"; +import HDD_Light from "./HDD_Light.png"; +import HDD from "./HDD.png"; +import Heart_Light from "./Heart_Light.png"; +import Heart from "./Heart.png"; +import Home_Light from "./Home_Light.png"; +import Home from "./Home.png"; +import Image_Light from "./Image_Light.png"; +import Image20 from "./Image-20.png"; +import Image from "./Image.png"; +import Key_Light from "./Key_Light.png"; +import Key20 from "./Key-20.png"; +import Key from "./Key.png"; +import Keys_Light from "./Keys_Light.png"; +import Keys from "./Keys.png"; +import Laptop_Light from "./Laptop_Light.png"; +import Laptop from "./Laptop.png"; +import Link_Light from "./Link_Light.png"; +import Link20 from "./Link-20.png"; +import Link from "./Link.png"; +import Location from "./Location.png"; +import LocationManaged from "./LocationManaged.png"; +import LocationReplica from "./LocationReplica.png"; +import Lock_Light from "./Lock_Light.png"; +import Lock from "./Lock.png"; +import Mega from "./Mega.png"; +import Mesh_Light from "./Mesh_Light.png"; +import Mesh20 from "./Mesh-20.png"; +import Mesh from "./Mesh.png"; +import MiniSilverBox from "./MiniSilverBox.png"; +import Mobile_Light from "./Mobile_Light.png"; +import MobileAndroid from "./Mobile-Android.png"; +import Mobile from "./Mobile.png"; +import MoveLocation_Light from "./MoveLocation_Light.png"; +import MoveLocation from "./MoveLocation.png"; +import Movie_Light from "./Movie_Light.png"; +import Movie from "./Movie.png"; +import NewLocation from "./NewLocation.png"; +import Node_Light from "./Node_Light.png"; +import Node from "./Node.png"; +import OneDrive from "./OneDrive.png"; +import OpenStack from "./OpenStack.png"; +import Package_Light from "./Package_Light.png"; +import Package20 from "./Package-20.png"; +import Package from "./Package.png"; +import PC from "./PC.png"; +import PCloud from "./PCloud.png"; +import Scrapbook_Light from "./Scrapbook_Light.png"; +import Scrapbook from "./Scrapbook.png"; +import Screenshot_Light from "./Screenshot_Light.png"; +import Screenshot20 from "./Screenshot-20.png"; +import Screenshot from "./Screenshot.png"; +import ScreenshotAlt from "./ScreenshotAlt.png"; +import SD_Light from "./SD_Light.png"; +import SD from "./SD.png"; +import Search_Light from "./Search_Light.png"; +import Search from "./Search.png"; +import SearchAlt from "./SearchAlt.png"; +import Server_Light from "./Server_Light.png"; +import Server from "./Server.png"; +import SilverBox from "./SilverBox.png"; +import Spacedrop_Light from "./Spacedrop_Light.png"; +import Spacedrop1 from "./Spacedrop-1.png"; +import Spacedrop from "./Spacedrop.png"; +import Sync_Light from "./Sync_Light.png"; +import Sync from "./Sync.png"; +import Tablet_Light from "./Tablet_Light.png"; +import Tablet from "./Tablet.png"; +import Tags_Light from "./Tags_Light.png"; +import Tags from "./Tags.png"; +import Terminal_Light from "./Terminal_Light.png"; +import Terminal from "./Terminal.png"; +import Text_Light from "./Text_Light.png"; +import Text_txt from "./Text_txt.png"; +import Text20 from "./Text-20.png"; +import Text from "./Text.png"; +import TextAlt_Light from "./TextAlt_Light.png"; +import TextAlt from "./TextAlt.png"; +import TexturedMesh_Light from "./TexturedMesh_Light.png"; +import TexturedMesh from "./TexturedMesh.png"; +import Trash_Light from "./Trash_Light.png"; +import Trash from "./Trash.png"; +import Undefined_Light from "./Undefined_Light.png"; +import Undefined from "./Undefined.png"; +import Unknown20 from "./Unknown-20.png"; +import Video_Light from "./Video_Light.png"; +import Video20 from "./Video-20.png"; +import Video from "./Video.png"; +import WebPageArchive20 from "./WebPageArchive-20.png"; +import Widget_Light from "./Widget_Light.png"; +import Widget20 from "./Widget-20.png"; +import Widget from "./Widget.png"; export { - Album20, - Album, - Album_Light, - Alias20, - Alias, - Alias_Light, - AmazonS3, - AndroidPhotos, - AppleFiles, - ApplePhotos, - Application, - Application_Light, - Archive20, - Archive, - Archive_Light, - Audio20, - Audio, - Audio_Light, - BackBlaze, - Ball, - Book20, - Book, - BookBlue, - Book_Light, - Box, - CloudSync, - CloudSync_Light, - Code20, - Collection20, - Collection, - CollectionSparkle, - CollectionSparkle_Light, - Collection_Light, - Config20, - DAV, - Database20, - Database, - Database_Light, - DeleteLocation, - Document20, - Document, - Document_Light, - Document_doc, - Document_doc_Light, - Document_pdf, - Document_pdf_Light, - Document_srt, - Document_xls, - Document_xls_Light, - Document_xmp, - Dotfile20, - DriveAmazonS3, - DriveAmazonS3_Light, - DriveBackBlaze, - DriveBackBlaze_Light, - DriveBox, - DriveDAV, - DriveDAV_Light, - DriveDarker, - DriveDropbox, - DriveDropbox_Light, - DriveGoogleDrive, - DriveGoogleDrive_Light, - DriveMega, - DriveMega_Light, - DriveOneDrive, - DriveOneDrive_Light, - DriveOpenStack, - DriveOpenStack_Light, - DrivePCloud, - DrivePCloud_Light, - Drivebox_Light, - Drive, - Drive_Light, - Dropbox, - Encrypted20, - Encrypted, - Encrypted_Light, - Entity, - Entity_Light, - Executable20, - Executable, - Executable_Light, - Executable_Light_old, - Executable_old, - Face_Light, - Folder20, - Foldertagxmp, - Folder, - FolderGrey, - FolderGrey_Light, - FolderNoSpace, - FolderNoSpace_Light, - Folder_Light, - Font20, - Game, - Game_Light, - Globe, - GlobeAlt, - Globe_Light, - GoogleDrive, - HDD, - HDD_Light, - Heart, - Heart_Light, - Home, - Home_Light, - Image20, - Image, - Image_Light, - Key20, - Key, - Key_Light, - Keys, - Keys_Light, - Laptop, - Laptop_Light, - Link20, - Link, - Link_Light, - Location, - LocationManaged, - LocationReplica, - Lock, - Lock_Light, - Mega, - Mesh20, - Mesh, - Mesh_Light, - MiniSilverBox, - MobileAndroid, - Mobile, - Mobile_Light, - MoveLocation, - MoveLocation_Light, - Movie, - Movie_Light, - NewLocation, - Node, - Node_Light, - OneDrive, - OpenStack, - PC, - PCloud, - Package20, - Package, - Package_Light, - SD, - SD_Light, - Scrapbook, - Scrapbook_Light, - Screenshot20, - Screenshot, - ScreenshotAlt, - Screenshot_Light, - Search, - SearchAlt, - Search_Light, - Server, - Server_Light, - SilverBox, - Spacedrop1, - Spacedrop, - Spacedrop_Light, - Sync, - Sync_Light, - Tablet, - Tablet_Light, - Tags, - Tags_Light, - Terminal, - Terminal_Light, - Text20, - Text, - TextAlt, - TextAlt_Light, - Text_Light, - Text_txt, - TexturedMesh, - TexturedMesh_Light, - Trash, - Trash_Light, - Undefined, - Undefined_Light, - Unknown20, - Video20, - Video, - Video_Light, - WebPageArchive20, - Widget20, - Widget, - Widget_Light + Album20, + Album, + Album_Light, + Alias20, + Alias, + Alias_Light, + AmazonS3, + AndroidPhotos, + AppleFiles, + ApplePhotos, + Application, + Application_Light, + Archive20, + Archive, + Archive_Light, + Audio20, + Audio, + Audio_Light, + BackBlaze, + Ball, + Book20, + Book, + BookBlue, + Book_Light, + Box, + CloudSync, + CloudSync_Light, + Code20, + Collection20, + Collection, + CollectionSparkle, + CollectionSparkle_Light, + Collection_Light, + Config20, + DAV, + Database20, + Database, + Database_Light, + DeleteLocation, + Document20, + Document, + Document_Light, + Document_doc, + Document_doc_Light, + Document_pdf, + Document_pdf_Light, + Document_srt, + Document_xls, + Document_xls_Light, + Document_xmp, + Document_memory, + Dotfile20, + DriveAmazonS3, + DriveAmazonS3_Light, + DriveBackBlaze, + DriveBackBlaze_Light, + DriveBox, + DriveDAV, + DriveDAV_Light, + DriveDarker, + DriveDropbox, + DriveDropbox_Light, + DriveGoogleDrive, + DriveGoogleDrive_Light, + DriveMega, + DriveMega_Light, + DriveOneDrive, + DriveOneDrive_Light, + DriveOpenStack, + DriveOpenStack_Light, + DrivePCloud, + DrivePCloud_Light, + Drivebox_Light, + Drive, + Drive_Light, + Dropbox, + Encrypted20, + Encrypted, + Encrypted_Light, + Entity, + Entity_Light, + Executable20, + Executable, + Executable_Light, + Executable_Light_old, + Executable_old, + Face_Light, + Folder20, + Foldertagxmp, + Folder, + FolderGrey, + FolderGrey_Light, + FolderNoSpace, + FolderNoSpace_Light, + Folder_Light, + Font20, + Game, + Game_Light, + Globe, + GlobeAlt, + Globe_Light, + GoogleDrive, + HDD, + HDD_Light, + Heart, + Heart_Light, + Home, + Home_Light, + Image20, + Image, + Image_Light, + Key20, + Key, + Key_Light, + Keys, + Keys_Light, + Laptop, + Laptop_Light, + Link20, + Link, + Link_Light, + Location, + LocationManaged, + LocationReplica, + Lock, + Lock_Light, + Mega, + Mesh20, + Mesh, + Mesh_Light, + MiniSilverBox, + MobileAndroid, + Mobile, + Mobile_Light, + MoveLocation, + MoveLocation_Light, + Movie, + Movie_Light, + NewLocation, + Node, + Node_Light, + OneDrive, + OpenStack, + PC, + PCloud, + Package20, + Package, + Package_Light, + SD, + SD_Light, + Scrapbook, + Scrapbook_Light, + Screenshot20, + Screenshot, + ScreenshotAlt, + Screenshot_Light, + Search, + SearchAlt, + Search_Light, + Server, + Server_Light, + SilverBox, + Spacedrop1, + Spacedrop, + Spacedrop_Light, + Sync, + Sync_Light, + Tablet, + Tablet_Light, + Tags, + Tags_Light, + Terminal, + Terminal_Light, + Text20, + Text, + TextAlt, + TextAlt_Light, + Text_Light, + Text_txt, + TexturedMesh, + TexturedMesh_Light, + Trash, + Trash_Light, + Undefined, + Undefined_Light, + Unknown20, + Video20, + Video, + Video_Light, + WebPageArchive20, + Widget20, + Widget, + Widget_Light, }; diff --git a/packages/interface/src/Explorer.tsx b/packages/interface/src/Explorer.tsx index 373e0f200..1527ec1d0 100644 --- a/packages/interface/src/Explorer.tsx +++ b/packages/interface/src/Explorer.tsx @@ -43,10 +43,12 @@ export function ExplorerLayout() { goToPreviousPreview, tagModeActive, setTagModeActive, + viewMode, } = useExplorer(); - // Check if we're on Overview (hide inspector) + // Check if we're on Overview (hide inspector) or in Knowledge view (has its own inspector) const isOverview = location.pathname === "/"; + const isKnowledgeView = viewMode === "knowledge"; // Fetch locations to get current location info const locationsQuery = useNormalizedCache< @@ -112,7 +114,7 @@ export function ExplorerLayout() {
@@ -145,8 +147,8 @@ export function ExplorerLayout() { /> - {/* Hide inspector on Overview screen */} - {inspectorVisible && !isOverview && ( + {/* Hide inspector on Overview screen and Knowledge view (has its own) */} + {inspectorVisible && !isOverview && !isKnowledgeView && ( { + const pathParam = searchParams.get("path"); + if (pathParam) { + try { + const sdPath = JSON.parse(decodeURIComponent(pathParam)); + const currentPathStr = JSON.stringify(currentPath); + const newPathStr = JSON.stringify(sdPath); + + if (currentPathStr !== newPathStr) { + console.log("Setting currentPath from query param:", sdPath); + setCurrentPath(sdPath); + } + } catch (e) { + console.error("Failed to parse path query parameter:", e); + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [searchParams]); + // Set currentPath from location ID (only when location changes) useEffect(() => { if (locationId && locationsQuery.data?.locations) { @@ -141,6 +163,8 @@ export function ExplorerView() { ) : viewMode === "size" ? ( + ) : viewMode === "knowledge" ? ( + ) : ( )} diff --git a/packages/interface/src/components/Explorer/TagAssignmentMode.tsx b/packages/interface/src/components/Explorer/TagAssignmentMode.tsx index 139ad7888..cca5cdbef 100644 --- a/packages/interface/src/components/Explorer/TagAssignmentMode.tsx +++ b/packages/interface/src/components/Explorer/TagAssignmentMode.tsx @@ -29,12 +29,13 @@ export function TagAssignmentMode({ isActive, onExit }: TagAssignmentModeProps) // Fetch all tags (for now, we'll use the first 10 as the default palette) // TODO: Implement user-defined palettes const { data: tagsData } = useNormalizedQuery({ - wireMethod: 'query:tags.list', - input: null, + wireMethod: 'query:tags.search', + input: { query: '' }, resourceType: 'tag' }); - const allTags = tagsData?.tags ?? []; + // Extract tags from search results (tags is an array of { tag, relevance, ... }) + const allTags = tagsData?.tags?.map((result: any) => result.tag) ?? []; const paletteTags = allTags.slice(0, 10); // First 10 tags for now // Keyboard shortcuts diff --git a/packages/interface/src/components/Explorer/ViewModeMenu.tsx b/packages/interface/src/components/Explorer/ViewModeMenu.tsx index c836fb7ad..7bfab10c2 100644 --- a/packages/interface/src/components/Explorer/ViewModeMenu.tsx +++ b/packages/interface/src/components/Explorer/ViewModeMenu.tsx @@ -9,11 +9,12 @@ import { ChartPieSlice, Clock, SquaresFour, + Sparkle, } from "@phosphor-icons/react"; import clsx from "clsx"; import { TopBarButton } from "@sd/ui"; -type ViewMode = "list" | "grid" | "column" | "media" | "size"; +type ViewMode = "list" | "grid" | "column" | "media" | "size" | "knowledge"; interface ViewOption { id: ViewMode | "timeline"; @@ -59,12 +60,19 @@ const viewOptions: ViewOption[] = [ color: "bg-green-500", keybind: "⌘5", }, + { + id: "knowledge", + label: "Knowledge", + icon: Sparkle, + color: "bg-purple-500", + keybind: "⌘6", + }, { id: "timeline", label: "Timeline", icon: Clock, color: "bg-yellow-500", - keybind: "⌘6", + keybind: "⌘7", }, ]; @@ -136,9 +144,9 @@ export function ViewModeMenu({ top: `${position.top}px`, right: `${position.right}px`, }} - className="w-[280px] rounded-lg bg-menu border border-menu-line shadow-2xl p-2 z-50" + className="w-[240px] rounded-lg bg-menu border border-menu-line shadow-2xl p-2 z-50" > -
+
{viewOptions.map((option) => ( + ))} +
+ + ); + } + + // Render provider selection for cloud + if (step === "provider" && selectedCategory === "cloud") { + return ( + } + description="Choose your cloud storage service" + className="w-[640px]" + onCancelled={true} + hideButtons={true} + buttonsSideContent={ + + } + > +
+ {cloudProviders.map((provider) => ( + + ))} +
+
+ ); + } + + // Render provider selection for network + if (step === "provider" && selectedCategory === "network") { + return ( + } + description="Choose your network file protocol" + className="w-[640px]" + onCancelled={true} + hideButtons={true} + buttonsSideContent={ + + } + > +
+
+ Coming Soon +

+ Network protocol support (SMB, NFS, SFTP, WebDAV) is currently in + development. Check back in a future update! +

+
+
+ {networkProtocols.map((protocol) => ( + + ))} +
+
+
+ ); + } + + // Render provider selection for external + if (step === "provider" && selectedCategory === "external") { + return ( + } + description="Select a connected drive to track" + className="w-[640px]" + onCancelled={true} + hideButtons={true} + buttonsSideContent={ + + } + > +
+ {volumes && volumes.length > 0 ? ( +
+ {volumes.map((volume) => ( + + ))} +
+ ) : ( +
+

+ No untracked external drives found. Connect a drive and refresh + to see it here. +

+
+ )} +
+
+ ); + } + + // Render local folder configuration (browse + suggested + settings) + if (step === "provider" && selectedCategory === "local") { + return ( + } + description="Choose a folder to index and manage" + className="w-[640px]" + onCancelled={true} + hideButtons={true} + buttonsSideContent={ + + } + > +
+
+ +
+ localForm.setValue("path", e.target.value)} + placeholder="Select a custom folder" + size="lg" + className="pr-14" + /> + +
+
+ + {suggestedLocations && suggestedLocations.locations.length > 0 && ( +
+ +
+ {suggestedLocations.locations.map((loc) => ( + + ))} +
+
+ )} +
+
+ ); + } + + // Render local folder settings (after path selected) + if (step === "local-config") { + return ( + } + description={localForm.watch("path")} + ctaLabel="Add Location" + onCancelled={true} + loading={addLocation.isPending} + className="w-[640px]" + buttonsSideContent={ + + } + > +
+
+ + +
+ + setTab(v as SettingsTab)}> + + Preset + + Jobs {selectedJobs.size > 0 && `(${selectedJobs.size})`} + + + + +
+ +
+ {indexModes.map((mode) => { + const isSelected = currentMode === mode.value; + return ( + + ); + })} +
+
+
+ + +
+

+ Select which jobs to run after indexing. Extensions can add + more jobs. +

+
+ {jobOptions.map((job) => { + const isSelected = selectedJobs.has(job.id); + return ( + + ); + })} +
+
+
+
+ + {localForm.formState.errors.root && ( +

+ {localForm.formState.errors.root.message} +

+ )} +
+
+ ); + } + + // Render cloud configuration form + if (step === "cloud-config" && selectedProvider) { + const provider = selectedProvider; + const isS3Type = + provider.cloudServiceType === "s3" || + provider.cloudServiceType === "b2" || + provider.cloudServiceType === "wasabi" || + provider.cloudServiceType === "spaces"; + const isOAuthType = + provider.cloudServiceType === "gdrive" || + provider.cloudServiceType === "dropbox" || + provider.cloudServiceType === "onedrive"; + const isAzureType = provider.cloudServiceType === "azblob"; + const isGCSType = provider.cloudServiceType === "gcs"; + + return ( + } + description="Configure your cloud storage connection" + ctaLabel="Add Storage" + onCancelled={true} + loading={addCloudVolume.isPending} + className="w-[640px]" + buttonsSideContent={ + + } + > +
+
+ + +
+ + {isS3Type && ( + <> +
+ + +
+
+ + +
+
+ + +
+
+ + +
+ {(provider.id === "r2" || + provider.id === "minio" || + provider.id === "wasabi" || + provider.id === "spaces") && ( +
+ + +
+ )} + + )} + + {isOAuthType && ( + <> +
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+ + )} + + {isAzureType && ( + <> +
+ + +
+
+ + +
+
+ + +
+
+ + +
+ + )} + + {isGCSType && ( + <> +
+ + +
+
+ +