Add memory system with archive and UI integration

This commit is contained in:
Jamie Pine
2025-11-23 03:33:19 -08:00
parent d2163d842d
commit 8a3387ca69
49 changed files with 4979 additions and 778 deletions

View File

@@ -2,7 +2,9 @@ use clap::Args;
use uuid::Uuid;
use sd_core::ops::tags::{
apply::input::ApplyTagsInput, create::input::CreateTagInput, search::input::SearchTagsInput,
apply::input::{ApplyTagsInput, TagTargets},
create::input::CreateTagInput,
search::input::SearchTagsInput,
};
#[derive(Args, Debug)]
@@ -34,7 +36,7 @@ pub struct TagApplyArgs {
impl From<TagApplyArgs> for ApplyTagsInput {
fn from(args: TagApplyArgs) -> Self {
ApplyTagsInput::user_tags(args.entries, args.tags)
ApplyTagsInput::user_tags_entry(args.entries, args.tags)
}
}

View File

@@ -27,13 +27,13 @@ function App() {
useEffect(() => {
// React Scan disabled - too heavy for development
// Uncomment if you need to debug render performance:
if (import.meta.env.DEV) {
setTimeout(() => {
import("react-scan").then(({ scan }) => {
scan({ enabled: true, log: false });
});
}, 2000);
}
// if (import.meta.env.DEV) {
// setTimeout(() => {
// import("react-scan").then(({ scan }) => {
// scan({ enabled: false, log: false });
// });
// }, 2000);
// }
// Initialize Tauri native context menu handler
initializeContextMenuHandler();

View File

@@ -106,6 +106,9 @@ rmp = "0.8" # MessagePack core types
rmp-serde = "1.3" # MessagePack serialization for job state
sd-task-system = { path = "../crates/task-system" }
# Vector database for memory files (optional for now)
# lancedb = "0.15" # Embedded vector database (conflicts with gpui)
# Media processing dependencies
blurhash = "0.2"
image = "0.25"

View File

@@ -0,0 +1,181 @@
//! Create a test memory file for development
//!
//! This example creates a real .memory file demonstrating the format.
//! Run with: cargo run --example create_memory
use sd_core::domain::memory::{DocumentType, FactType, MemoryFile, MemoryScope};
use std::path::PathBuf;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Initialize logging
tracing_subscriber::fmt()
.with_env_filter("info")
.init();
println!("\n🧠 Creating Spacedrive Memory File\n");
// Output path
let output_path = PathBuf::from(
"/Users/jamespine/Projects/spacedrive/workbench/test-memories/memory-file-system.memory",
);
// Ensure directory exists
if let Some(parent) = output_path.parent() {
std::fs::create_dir_all(parent)?;
}
// Create memory file
let mut memory = MemoryFile::create(
"memory-file-system".to_string(),
MemoryScope::Directory {
path: "/Users/jamespine/Projects/spacedrive/core/src/domain/memory".to_string(),
},
&output_path,
)
.await?;
println!("✅ Created memory archive\n");
// Add design documents
println!("📄 Adding documents...");
let design_doc = memory
.add_document(
None,
"MEMORY_FILE_FORMAT_DESIGN.md".to_string(),
Some(
"Complete specification for .memory file format with custom archive".to_string(),
),
DocumentType::Design,
)
.await?;
let impl_doc = memory
.add_document(
None,
"MEMORY_FILE_IMPLEMENTATION_STATUS.md".to_string(),
Some("Implementation status with custom archive format".to_string()),
DocumentType::Documentation,
)
.await?;
let agent_doc = memory
.add_document(
None,
"AGENT_MEMORY_ARCHITECTURE_V1.md".to_string(),
Some("Three-type agent memory architecture".to_string()),
DocumentType::Design,
)
.await?;
println!("{} documents added\n", memory.get_documents().len());
// Add learned facts
println!("🧩 Adding facts...");
memory
.add_fact(
"Memory files use custom archive format with magic bytes SDMEMORY".to_string(),
FactType::Principle,
1.0,
Some(design_doc),
)
.await?;
memory
.add_fact(
"Archive is append-only with index at end for efficient updates".to_string(),
FactType::Pattern,
1.0,
Some(impl_doc),
)
.await?;
memory
.add_fact(
"Vector store embedded using MessagePack serialization".to_string(),
FactType::Decision,
0.9,
Some(impl_doc),
)
.await?;
memory
.add_fact(
"Agent memory types: temporal (events), associative (knowledge), working (current)".to_string(),
FactType::Pattern,
1.0,
Some(agent_doc),
)
.await?;
memory
.add_fact(
"Memory files solve context-gathering problem for AI agents".to_string(),
FactType::Principle,
1.0,
Some(design_doc),
)
.await?;
println!("{} facts added\n", memory.get_facts().len());
// Add embeddings
println!("🔢 Adding embeddings...");
// 4D mock vectors (real would be 384D from AI model)
let design_vector = vec![0.9, 0.2, 0.7, 0.1];
let impl_vector = vec![0.1, 0.9, 0.3, 0.5];
let agent_vector = vec![0.3, 0.2, 0.95, 0.1];
memory.add_embedding(design_doc, design_vector).await?;
memory.add_embedding(impl_doc, impl_vector).await?;
memory.add_embedding(agent_doc, agent_vector).await?;
println!(
"{} embeddings added\n",
memory.embedding_count().await?
);
// Test search
println!("🔍 Testing semantic search...");
let query = vec![0.7, 0.15, 0.85, 0.05]; // Query: design + architecture
let results = memory.search_similar(query, 3).await?;
println!(" Results:");
for (i, doc_id) in results.iter().enumerate() {
if let Some(doc) = memory.get_document(*doc_id) {
println!(" {}. {}", i + 1, doc.title);
}
}
println!();
// Show final statistics
let metadata = memory.metadata();
let stats = &metadata.statistics;
println!("📊 Memory Statistics:");
println!(" Name: {}", metadata.name);
println!(" Scope: {}", metadata.scope.identifier());
println!(" Documents: {}", stats.document_count);
println!(" Facts: {}", stats.fact_count);
println!(" Embeddings: {}", stats.embedding_count);
println!(" Total size: {} bytes", stats.file_size_bytes);
println!();
println!("✅ Memory file created successfully!");
println!("📁 Location: {}", output_path.display());
println!();
println!("Verify:");
println!(" file {}", output_path.display());
println!(" hexdump -C {} | head -20", output_path.display());
println!();
// Verify single file
assert!(output_path.is_file());
assert!(!output_path.is_dir());
println!("✅ Confirmed: Single file archive\n");
Ok(())
}

View File

@@ -59,6 +59,7 @@ pub enum ContentKind {
Shortcut = 23,
Package = 24,
ModelEntry = 25,
Memory = 26,
}
// Translate database entity into domain model
@@ -113,6 +114,8 @@ impl ContentKind {
22 => Self::Web,
23 => Self::Shortcut,
24 => Self::Package,
25 => Self::ModelEntry,
26 => Self::Memory,
_ => Self::Unknown,
}
}
@@ -305,6 +308,7 @@ impl std::fmt::Display for ContentKind {
ContentKind::Shortcut => "shortcut",
ContentKind::Package => "package",
ContentKind::ModelEntry => "model_entry",
ContentKind::Memory => "memory",
};
write!(f, "{}", s)
}

View File

@@ -116,6 +116,9 @@ impl crate::domain::resource::Identifiable for File {
"image_media_data",
"video_media_data",
"audio_media_data",
"user_metadata",
"user_metadata_tag",
"tag",
]
}

View File

@@ -0,0 +1,384 @@
use std::{
collections::HashMap,
io::{Read, Seek, SeekFrom, Write},
path::Path,
};
use serde::{Deserialize, Serialize};
use thiserror::Error;
const MAGIC: &[u8; 8] = b"SDMEMORY";
const VERSION: u32 = 1;
const HEADER_SIZE: u64 = 64;
#[derive(Error, Debug)]
pub enum ArchiveError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("Invalid magic bytes")]
InvalidMagic,
#[error("Unsupported version: {0}")]
UnsupportedVersion(u32),
#[error("File not found in archive: {0}")]
FileNotFound(String),
#[error("Serialization error: {0}")]
Serialization(#[from] rmp_serde::encode::Error),
#[error("Deserialization error: {0}")]
Deserialization(#[from] rmp_serde::decode::Error),
#[error("Corrupt index")]
CorruptIndex,
}
pub type Result<T> = std::result::Result<T, ArchiveError>;
/// Entry in the file index
#[derive(Debug, Clone, Serialize, Deserialize)]
struct FileEntry {
/// Offset in file where data starts
offset: u64,
/// Size of data in bytes
size: u64,
/// Whether data is compressed
compressed: bool,
/// Deleted flag (for soft deletes)
deleted: bool,
}
/// File index (stored at end of archive)
#[derive(Debug, Clone, Serialize, Deserialize)]
struct FileIndex {
/// Filename -> FileEntry
files: HashMap<String, FileEntry>,
}
/// Custom archive format for memory files
///
/// Format:
/// - Fixed 64-byte header with magic, version, index offset
/// - Append-only data section with length-prefixed files
/// - MessagePack-encoded index at end
///
/// Updates:
/// - Append new files to end
/// - Update index with new offsets
/// - Rewrite header with updated index offset
pub struct MemoryArchive {
file: std::fs::File,
index: FileIndex,
index_offset: u64,
}
impl MemoryArchive {
/// Create new archive
pub fn create(path: &Path) -> Result<Self> {
let mut file = std::fs::OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(path)?;
// Write header
file.write_all(MAGIC)?;
file.write_all(&VERSION.to_le_bytes())?;
file.write_all(&0u32.to_le_bytes())?; // Flags (reserved)
file.write_all(&HEADER_SIZE.to_le_bytes())?; // Index offset (will update)
file.write_all(&vec![0u8; 40])?; // Reserved space
// Write empty index at position 64
let index = FileIndex {
files: HashMap::new(),
};
let index_bytes = rmp_serde::to_vec(&index)?;
file.write_all(&index_bytes)?;
let index_offset = HEADER_SIZE;
Ok(Self {
file,
index,
index_offset,
})
}
/// Open existing archive
pub fn open(path: &Path) -> Result<Self> {
let mut file = std::fs::OpenOptions::new()
.read(true)
.write(true)
.open(path)?;
// Read and validate header
let mut magic = [0u8; 8];
file.read_exact(&mut magic)?;
if &magic != MAGIC {
return Err(ArchiveError::InvalidMagic);
}
let mut version_bytes = [0u8; 4];
file.read_exact(&mut version_bytes)?;
let version = u32::from_le_bytes(version_bytes);
if version != VERSION {
return Err(ArchiveError::UnsupportedVersion(version));
}
// Skip flags
file.seek(SeekFrom::Current(4))?;
// Read index offset
let mut offset_bytes = [0u8; 8];
file.read_exact(&mut offset_bytes)?;
let index_offset = u64::from_le_bytes(offset_bytes);
// Seek to index and read it
file.seek(SeekFrom::Start(index_offset))?;
let mut index_bytes = Vec::new();
file.read_to_end(&mut index_bytes)?;
let index: FileIndex = rmp_serde::from_slice(&index_bytes)
.map_err(|_| ArchiveError::CorruptIndex)?;
Ok(Self {
file,
index,
index_offset,
})
}
/// Add a file to the archive
pub fn add_file(&mut self, name: &str, data: &[u8]) -> Result<()> {
// Seek to current index position (append before index)
self.file.seek(SeekFrom::Start(self.index_offset))?;
let offset = self.index_offset;
let size = data.len() as u64;
// Write: [length: u64][data: bytes]
self.file.write_all(&size.to_le_bytes())?;
self.file.write_all(data)?;
// Update index
self.index.files.insert(
name.to_string(),
FileEntry {
offset: offset + 8, // After length prefix
size,
compressed: false,
deleted: false,
},
);
// New index position
self.index_offset = offset + 8 + size;
// Write updated index
self.write_index()?;
Ok(())
}
/// Read a file from the archive
pub fn read_file(&mut self, name: &str) -> Result<Vec<u8>> {
let entry = self
.index
.files
.get(name)
.ok_or_else(|| ArchiveError::FileNotFound(name.to_string()))?;
if entry.deleted {
return Err(ArchiveError::FileNotFound(name.to_string()));
}
// Seek to file offset
self.file.seek(SeekFrom::Start(entry.offset))?;
// Read data
let mut data = vec![0u8; entry.size as usize];
self.file.read_exact(&mut data)?;
Ok(data)
}
/// Update a file (appends new version)
pub fn update_file(&mut self, name: &str, data: &[u8]) -> Result<()> {
// Just append as new file (index will point to latest)
self.add_file(name, data)
}
/// Delete a file (soft delete in index)
pub fn delete_file(&mut self, name: &str) -> Result<()> {
if let Some(entry) = self.index.files.get_mut(name) {
entry.deleted = true;
self.write_index()?;
}
Ok(())
}
/// List all files
pub fn list_files(&self) -> Vec<String> {
self.index
.files
.iter()
.filter(|(_, entry)| !entry.deleted)
.map(|(name, _)| name.clone())
.collect()
}
/// Check if file exists
pub fn contains(&self, name: &str) -> bool {
self.index
.files
.get(name)
.map(|e| !e.deleted)
.unwrap_or(false)
}
/// Write index to end of file and update header
fn write_index(&mut self) -> Result<()> {
// Serialize index
let index_bytes = rmp_serde::to_vec(&self.index)?;
// Write at current index offset
self.file.seek(SeekFrom::Start(self.index_offset))?;
self.file.write_all(&index_bytes)?;
// Truncate file (remove old index if it was longer)
let new_end = self.index_offset + index_bytes.len() as u64;
self.file.set_len(new_end)?;
// Update header with new index offset
self.file.seek(SeekFrom::Start(16))?; // Skip magic + version + flags
self.file.write_all(&self.index_offset.to_le_bytes())?;
self.file.flush()?;
Ok(())
}
/// Get total archive size
pub fn size(&mut self) -> Result<u64> {
Ok(self.file.metadata()?.len())
}
/// Compact archive (remove deleted files)
pub fn compact(&mut self) -> Result<()> {
// TODO: Implement garbage collection
// Would require rewriting entire file with only non-deleted entries
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::NamedTempFile;
#[test]
fn test_create_archive() {
let temp_file = NamedTempFile::new().unwrap();
let _archive = MemoryArchive::create(temp_file.path()).unwrap();
// Verify magic bytes
let mut file = std::fs::File::open(temp_file.path()).unwrap();
let mut magic = [0u8; 8];
file.read_exact(&mut magic).unwrap();
assert_eq!(&magic, MAGIC);
}
#[test]
fn test_add_and_read_file() {
let temp_file = NamedTempFile::new().unwrap();
let mut archive = MemoryArchive::create(temp_file.path()).unwrap();
let test_data = b"Hello, Memory!";
archive.add_file("test.txt", test_data).unwrap();
let read_data = archive.read_file("test.txt").unwrap();
assert_eq!(read_data, test_data);
}
#[test]
fn test_update_file() {
let temp_file = NamedTempFile::new().unwrap();
let mut archive = MemoryArchive::create(temp_file.path()).unwrap();
archive.add_file("test.txt", b"Version 1").unwrap();
archive.update_file("test.txt", b"Version 2").unwrap();
let data = archive.read_file("test.txt").unwrap();
assert_eq!(data, b"Version 2");
}
#[test]
fn test_list_files() {
let temp_file = NamedTempFile::new().unwrap();
let mut archive = MemoryArchive::create(temp_file.path()).unwrap();
archive.add_file("file1.txt", b"data1").unwrap();
archive.add_file("file2.txt", b"data2").unwrap();
archive.add_file("file3.txt", b"data3").unwrap();
let files = archive.list_files();
assert_eq!(files.len(), 3);
assert!(files.contains(&"file1.txt".to_string()));
}
#[test]
fn test_delete_file() {
let temp_file = NamedTempFile::new().unwrap();
let mut archive = MemoryArchive::create(temp_file.path()).unwrap();
archive.add_file("test.txt", b"data").unwrap();
assert!(archive.contains("test.txt"));
archive.delete_file("test.txt").unwrap();
assert!(!archive.contains("test.txt"));
let result = archive.read_file("test.txt");
assert!(result.is_err());
}
#[test]
fn test_reopen_archive() {
let temp_file = NamedTempFile::new().unwrap();
let path = temp_file.path().to_path_buf();
{
let mut archive = MemoryArchive::create(&path).unwrap();
archive.add_file("persisted.txt", b"test data").unwrap();
}
// Reopen
let mut archive = MemoryArchive::open(&path).unwrap();
let data = archive.read_file("persisted.txt").unwrap();
assert_eq!(data, b"test data");
}
#[test]
fn test_multiple_updates() {
let temp_file = NamedTempFile::new().unwrap();
let mut archive = MemoryArchive::create(temp_file.path()).unwrap();
// Add initial
archive.add_file("metadata.msgpack", b"v1").unwrap();
// Update multiple times
archive.update_file("metadata.msgpack", b"v2").unwrap();
archive.update_file("metadata.msgpack", b"v3").unwrap();
archive.update_file("metadata.msgpack", b"v4").unwrap();
// Should read latest
let data = archive.read_file("metadata.msgpack").unwrap();
assert_eq!(data, b"v4");
// File should still be single file
assert_eq!(archive.list_files().len(), 1);
}
}

View File

@@ -0,0 +1,69 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use super::{MemoryScope, MemoryStatistics};
/// Metadata for a memory file
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MemoryMetadata {
/// Memory name
pub name: String,
/// Optional description
pub description: Option<String>,
/// What this memory is scoped to
pub scope: MemoryScope,
/// When memory was created
pub created_at: DateTime<Utc>,
/// Last modification time
pub updated_at: DateTime<Utc>,
/// Last time memory was loaded/used
pub last_used_at: Option<DateTime<Utc>>,
/// Format version
pub version: u32,
/// Embedding model used
pub embedding_model: String,
/// Approximate total tokens
pub total_tokens: usize,
/// Tags for categorization
pub tags: Vec<String>,
/// Statistics (cached)
pub statistics: MemoryStatistics,
}
impl MemoryMetadata {
pub fn new(name: String, scope: MemoryScope) -> Self {
Self {
name,
description: None,
scope,
created_at: Utc::now(),
updated_at: Utc::now(),
last_used_at: None,
version: 1,
embedding_model: "all-MiniLM-L6-v2".to_string(),
total_tokens: 0,
tags: Vec::new(),
statistics: MemoryStatistics::default(),
}
}
/// Mark memory as used (updates last_used_at)
pub fn touch(&mut self) {
self.last_used_at = Some(Utc::now());
}
/// Update modification time
pub fn mark_updated(&mut self) {
self.updated_at = Utc::now();
}
}

View File

@@ -0,0 +1,27 @@
//! Memory file format - Modular RAG for AI agents
//!
//! Memory files (.memory) are portable knowledge packages that contain:
//! - Vector embeddings (Chroma vector store)
//! - Document references (files relevant to a task)
//! - Learned facts (extracted knowledge)
//! - Optional conversation history
//!
//! Format: Directory with MessagePack files + embedded Chroma
//! Storage: {name}.memory/ directory containing all components
pub mod archive;
pub mod metadata;
pub mod scope;
pub mod storage;
pub mod types;
pub mod vector_store;
pub use archive::MemoryArchive;
pub use metadata::MemoryMetadata;
pub use scope::MemoryScope;
pub use storage::MemoryFile;
pub use types::{
AuditEntry, ConversationMessage, Document, DocumentType, Fact, FactType, MemoryStatistics,
MessageRole,
};
pub use vector_store::{VectorDocument, VectorStore};

View File

@@ -0,0 +1,39 @@
use serde::{Deserialize, Serialize};
/// Defines what a memory file is scoped to
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum MemoryScope {
/// Attached to a specific directory
Directory { path: String },
/// Scoped to an entire project/repository
Project { root_path: String },
/// Topic-based (not tied to location)
Topic { topic: String },
/// Standalone portable memory
Standalone,
}
impl MemoryScope {
pub fn as_str(&self) -> &'static str {
match self {
Self::Directory { .. } => "directory",
Self::Project { .. } => "project",
Self::Topic { .. } => "topic",
Self::Standalone => "standalone",
}
}
/// Get the scope identifier for display
pub fn identifier(&self) -> String {
match self {
Self::Directory { path } => path.clone(),
Self::Project { root_path } => root_path.clone(),
Self::Topic { topic } => topic.clone(),
Self::Standalone => "standalone".to_string(),
}
}
}

View File

@@ -0,0 +1,439 @@
use std::path::{Path, PathBuf};
use chrono::Utc;
use tracing::{debug, info};
use uuid::Uuid;
use super::{
archive::MemoryArchive,
metadata::MemoryMetadata,
scope::MemoryScope,
types::{Document, DocumentType, Fact, FactType, MemoryStatistics},
vector_store::VectorStore,
};
use thiserror::Error;
#[derive(Error, Debug)]
pub enum MemoryFileError {
#[error("Archive error: {0}")]
Archive(#[from] super::archive::ArchiveError),
#[error("Vector store error: {0}")]
VectorStore(#[from] super::vector_store::VectorStoreError),
#[error("Serialization error: {0}")]
Serialization(#[from] rmp_serde::encode::Error),
#[error("Deserialization error: {0}")]
Deserialization(#[from] rmp_serde::decode::Error),
#[error("Document not found: {0}")]
DocumentNotFound(i32),
#[error("Fact not found: {0}")]
FactNotFound(i32),
}
pub type Result<T> = std::result::Result<T, MemoryFileError>;
/// Memory file using custom archive format
/// Single .memory file containing all data
pub struct MemoryFile {
path: PathBuf,
archive: MemoryArchive,
metadata: MemoryMetadata,
documents: Vec<Document>,
facts: Vec<Fact>,
vector_store: VectorStore,
next_doc_id: i32,
next_fact_id: i32,
}
impl MemoryFile {
/// Create new memory file (single file archive)
pub async fn create(name: String, scope: MemoryScope, output_path: &Path) -> Result<Self> {
info!("Creating memory archive at: {}", output_path.display());
// Create archive
let mut archive = MemoryArchive::create(output_path)?;
// Initialize metadata
let metadata = MemoryMetadata::new(name, scope);
// Write initial files
let metadata_bytes = rmp_serde::to_vec(&metadata)?;
archive.add_file("metadata.msgpack", &metadata_bytes)?;
let documents: Vec<Document> = Vec::new();
let documents_bytes = rmp_serde::to_vec(&documents)?;
archive.add_file("documents.msgpack", &documents_bytes)?;
let facts: Vec<Fact> = Vec::new();
let facts_bytes = rmp_serde::to_vec(&facts)?;
archive.add_file("facts.msgpack", &facts_bytes)?;
// Create in-memory vector store
let vector_store = VectorStore::create_in_memory()?;
info!("Memory archive created successfully");
Ok(Self {
path: output_path.to_path_buf(),
archive,
metadata,
documents,
facts,
vector_store,
next_doc_id: 1,
next_fact_id: 1,
})
}
/// Open existing memory file
pub async fn open(path: PathBuf) -> Result<Self> {
info!("Opening memory archive at: {}", path.display());
let mut archive = MemoryArchive::open(&path)?;
// Load metadata
let metadata_bytes = archive.read_file("metadata.msgpack")?;
let metadata: MemoryMetadata = rmp_serde::from_slice(&metadata_bytes)?;
// Load documents
let documents: Vec<Document> = if archive.contains("documents.msgpack") {
let bytes = archive.read_file("documents.msgpack")?;
rmp_serde::from_slice(&bytes)?
} else {
Vec::new()
};
// Load facts
let facts: Vec<Fact> = if archive.contains("facts.msgpack") {
let bytes = archive.read_file("facts.msgpack")?;
rmp_serde::from_slice(&bytes)?
} else {
Vec::new()
};
// Load vector store
let vector_store = if archive.contains("embeddings.msgpack") {
let bytes = archive.read_file("embeddings.msgpack")?;
VectorStore::from_bytes(&bytes)?
} else {
VectorStore::create_in_memory()?
};
// Compute next IDs
let next_doc_id = documents
.iter()
.map(|d: &Document| d.id)
.max()
.unwrap_or(0)
+ 1;
let next_fact_id = facts.iter().map(|f: &Fact| f.id).max().unwrap_or(0) + 1;
debug!("Loaded memory: {} docs, {} facts", documents.len(), facts.len());
Ok(Self {
path,
archive,
metadata,
documents,
facts,
vector_store,
next_doc_id,
next_fact_id,
})
}
/// Add document
pub async fn add_document(
&mut self,
content_uuid: Option<Uuid>,
title: String,
summary: Option<String>,
doc_type: DocumentType,
) -> Result<i32> {
let doc = Document {
id: self.next_doc_id,
content_uuid,
file_path: None,
title,
summary,
relevance_score: 1.0,
added_at: Utc::now(),
added_by: "user".to_string(),
doc_type,
metadata: None,
};
self.next_doc_id += 1;
self.documents.push(doc.clone());
self.persist_documents().await?;
self.update_statistics().await?;
debug!("Added document: {} (id: {})", doc.title, doc.id);
Ok(doc.id)
}
/// Add fact
pub async fn add_fact(
&mut self,
text: String,
fact_type: FactType,
confidence: f32,
source_document_id: Option<i32>,
) -> Result<i32> {
let fact = Fact {
id: self.next_fact_id,
text,
fact_type,
confidence,
source_document_id,
created_at: Utc::now(),
verified: false,
};
self.next_fact_id += 1;
self.facts.push(fact.clone());
self.persist_facts().await?;
self.update_statistics().await?;
debug!("Added fact: {} (id: {})", fact.text, fact.id);
Ok(fact.id)
}
/// Add embedding
pub async fn add_embedding(&mut self, doc_id: i32, vector: Vec<f32>) -> Result<()> {
let (content_uuid, title, metadata_val) = {
let doc = self
.get_document(doc_id)
.ok_or(MemoryFileError::DocumentNotFound(doc_id))?;
(doc.content_uuid, doc.title.clone(), doc.metadata.clone())
};
self.vector_store
.add_embedding(doc_id, content_uuid, title, vector, metadata_val)
.await?;
self.persist_vector_store().await?;
self.update_statistics().await?;
Ok(())
}
/// Search similar documents
pub async fn search_similar(&self, query_vector: Vec<f32>, limit: usize) -> Result<Vec<i32>> {
let results = self.vector_store.search(query_vector, limit).await?;
Ok(results.into_iter().map(|r| r.id).collect())
}
/// Get documents
pub fn get_documents(&self) -> &[Document] {
&self.documents
}
/// Get document by ID
pub fn get_document(&self, id: i32) -> Option<&Document> {
self.documents.iter().find(|d| d.id == id)
}
/// Get facts
pub fn get_facts(&self) -> &[Fact] {
&self.facts
}
/// Get metadata
pub fn metadata(&self) -> &MemoryMetadata {
&self.metadata
}
/// Get path
pub fn path(&self) -> &Path {
&self.path
}
/// Get embedding count
pub async fn embedding_count(&self) -> Result<usize> {
self.vector_store.count().await.map_err(Into::into)
}
/// Get facts sorted by confidence
pub fn get_facts_sorted(&self) -> Vec<&Fact> {
let mut sorted = self.facts.iter().collect::<Vec<_>>();
sorted.sort_by(|a, b| {
match (a.verified, b.verified) {
(true, false) => std::cmp::Ordering::Less,
(false, true) => std::cmp::Ordering::Greater,
_ => b
.confidence
.partial_cmp(&a.confidence)
.unwrap_or(std::cmp::Ordering::Equal),
}
});
sorted
}
/// Persist documents to archive
async fn persist_documents(&mut self) -> Result<()> {
let bytes = rmp_serde::to_vec(&self.documents)?;
self.archive.update_file("documents.msgpack", &bytes)?;
Ok(())
}
/// Persist facts to archive
async fn persist_facts(&mut self) -> Result<()> {
let bytes = rmp_serde::to_vec(&self.facts)?;
self.archive.update_file("facts.msgpack", &bytes)?;
Ok(())
}
/// Persist vector store to archive
async fn persist_vector_store(&mut self) -> Result<()> {
let bytes = self.vector_store.to_bytes()?;
self.archive.update_file("embeddings.msgpack", &bytes)?;
Ok(())
}
/// Persist metadata to archive
async fn persist_metadata(&mut self) -> Result<()> {
let bytes = rmp_serde::to_vec(&self.metadata)?;
self.archive.update_file("metadata.msgpack", &bytes)?;
Ok(())
}
/// Update statistics
async fn update_statistics(&mut self) -> Result<()> {
let embedding_count = self.vector_store.count().await?;
let file_size = self.archive.size()?;
self.metadata.statistics = MemoryStatistics {
document_count: self.documents.len(),
fact_count: self.facts.len(),
conversation_message_count: 0,
embedding_count,
file_size_bytes: file_size,
};
self.persist_metadata().await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::NamedTempFile;
#[tokio::test]
async fn test_create_single_file_memory() {
let temp_file = NamedTempFile::new().unwrap();
let memory = MemoryFile::create(
"test".to_string(),
MemoryScope::Standalone,
temp_file.path(),
)
.await
.unwrap();
// Should be a single file
assert!(memory.path().exists());
assert!(memory.path().is_file());
}
#[tokio::test]
async fn test_add_and_retrieve() {
let temp_file = NamedTempFile::new().unwrap();
let mut memory = MemoryFile::create(
"test".to_string(),
MemoryScope::Standalone,
temp_file.path(),
)
.await
.unwrap();
// Add document
let doc_id = memory
.add_document(None, "Test Doc".to_string(), None, DocumentType::Note)
.await
.unwrap();
assert_eq!(memory.get_documents().len(), 1);
assert_eq!(memory.get_document(doc_id).unwrap().title, "Test Doc");
// Add fact
memory
.add_fact("Test fact".to_string(), FactType::General, 1.0, Some(doc_id))
.await
.unwrap();
assert_eq!(memory.get_facts().len(), 1);
}
#[tokio::test]
async fn test_persistence() {
let temp_file = NamedTempFile::new().unwrap();
let path = temp_file.path().to_path_buf();
{
let mut memory = MemoryFile::create(
"test".to_string(),
MemoryScope::Standalone,
&path,
)
.await
.unwrap();
memory
.add_document(None, "Doc".to_string(), None, DocumentType::Note)
.await
.unwrap();
memory
.add_fact("Fact".to_string(), FactType::General, 1.0, None)
.await
.unwrap();
}
// Reopen
let memory = MemoryFile::open(path).await.unwrap();
assert_eq!(memory.get_documents().len(), 1);
assert_eq!(memory.get_facts().len(), 1);
}
#[tokio::test]
async fn test_embeddings_in_archive() {
let temp_file = NamedTempFile::new().unwrap();
let mut memory = MemoryFile::create(
"test".to_string(),
MemoryScope::Standalone,
temp_file.path(),
)
.await
.unwrap();
let doc_id = memory
.add_document(None, "Doc".to_string(), None, DocumentType::Code)
.await
.unwrap();
// Add embedding
let vector = vec![0.1, 0.2, 0.3, 0.4];
memory.add_embedding(doc_id, vector.clone()).await.unwrap();
// Search
let results = memory.search_similar(vector, 10).await.unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0], doc_id);
}
}

View File

@@ -0,0 +1,176 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// A document reference in a memory file
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Document {
/// Internal ID within memory
pub id: i32,
/// Spacedrive content UUID (if file is in VDFS)
pub content_uuid: Option<Uuid>,
/// Physical path (for non-VDFS files or reference)
pub file_path: Option<String>,
/// Document title
pub title: String,
/// AI-generated or user-written summary
pub summary: Option<String>,
/// Relevance score (0.0-1.0)
pub relevance_score: f32,
/// When document was added to memory
pub added_at: DateTime<Utc>,
/// Who added it
pub added_by: String,
/// Document type classification
pub doc_type: DocumentType,
/// Additional metadata
pub metadata: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum DocumentType {
Code,
Documentation,
Reference,
Note,
Design,
Test,
Config,
Other,
}
impl std::fmt::Display for DocumentType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::Code => "Code",
Self::Documentation => "Documentation",
Self::Reference => "Reference",
Self::Note => "Note",
Self::Design => "Design",
Self::Test => "Test",
Self::Config => "Config",
Self::Other => "Other",
};
write!(f, "{}", s)
}
}
/// A learned fact in a memory file
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Fact {
/// Internal ID within memory
pub id: i32,
/// The fact text
pub text: String,
/// Type of fact
pub fact_type: FactType,
/// Confidence score (0.0-1.0)
pub confidence: f32,
/// Source document ID (if extracted from document)
pub source_document_id: Option<i32>,
/// When fact was created
pub created_at: DateTime<Utc>,
/// Whether fact has been verified by user
pub verified: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum FactType {
/// Core principle or pattern
Principle,
/// Decision made during development
Decision,
/// Observed pattern or behavior
Pattern,
/// Known issue or limitation
Issue,
/// Implementation detail
Detail,
/// General knowledge
General,
}
impl std::fmt::Display for FactType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let s = match self {
Self::Principle => "Principle",
Self::Decision => "Decision",
Self::Pattern => "Pattern",
Self::Issue => "Issue",
Self::Detail => "Detail",
Self::General => "General",
};
write!(f, "{}", s)
}
}
/// Statistics about a memory file
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct MemoryStatistics {
/// Number of documents
pub document_count: usize,
/// Number of facts
pub fact_count: usize,
/// Number of conversation messages (if history enabled)
pub conversation_message_count: usize,
/// Number of embeddings in vector store
pub embedding_count: usize,
/// Total size on disk (bytes)
pub file_size_bytes: u64,
}
/// Conversation message (optional history)
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConversationMessage {
pub id: i32,
pub session_id: Uuid,
pub role: MessageRole,
pub content: String,
pub tokens: Option<usize>,
pub created_at: DateTime<Utc>,
pub metadata: Option<serde_json::Value>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum MessageRole {
User,
Assistant,
System,
}
/// Audit log entry
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AuditEntry {
pub id: i32,
pub action: String,
pub actor: String,
pub details: Option<serde_json::Value>,
pub timestamp: DateTime<Utc>,
}

View File

@@ -0,0 +1,291 @@
use std::{collections::HashMap, path::Path};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::fs;
use tracing::{debug, info};
use uuid::Uuid;
#[derive(Error, Debug)]
pub enum VectorStoreError {
#[error("Vector store error: {0}")]
Store(String),
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("Serialization error: {0}")]
Serialization(#[from] rmp_serde::encode::Error),
#[error("Deserialization error: {0}")]
Deserialization(#[from] rmp_serde::decode::Error),
}
pub type Result<T> = std::result::Result<T, VectorStoreError>;
/// Simple MessagePack-based vector store
/// TODO: Replace with LanceDB once dependency conflicts resolved
pub struct VectorStore {
storage_path: std::path::PathBuf,
embeddings: HashMap<i32, VectorDocument>,
}
/// Document with embedding for storage
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VectorDocument {
/// Document ID (maps to documents table)
pub id: i32,
/// Content UUID (if from Spacedrive)
pub content_uuid: Option<String>,
/// Document title
pub title: String,
/// Embedding vector
pub vector: Vec<f32>,
/// Additional metadata
pub metadata: Option<serde_json::Value>,
}
impl VectorStore {
/// Create new vector store in memory directory (old directory format)
pub async fn create(memory_path: &Path) -> Result<Self> {
let storage_path = memory_path.join("embeddings.msgpack");
info!("Creating vector store at: {}", storage_path.display());
let store = Self {
storage_path: storage_path.clone(),
embeddings: HashMap::new(),
};
// Write empty embeddings file
store.persist().await?;
Ok(store)
}
/// Create in-memory vector store (for archive format)
pub fn create_in_memory() -> Result<Self> {
Ok(Self {
storage_path: std::path::PathBuf::new(),
embeddings: HashMap::new(),
})
}
/// Load from bytes (for archive format)
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
let embeddings = rmp_serde::from_slice(bytes)?;
Ok(Self {
storage_path: std::path::PathBuf::new(),
embeddings,
})
}
/// Serialize to bytes (for archive format)
pub fn to_bytes(&self) -> Result<Vec<u8>> {
let bytes = rmp_serde::to_vec(&self.embeddings)?;
Ok(bytes)
}
/// Open existing vector store
pub async fn open(memory_path: &Path) -> Result<Self> {
let storage_path = memory_path.join("embeddings.msgpack");
debug!("Opening vector store at: {}", storage_path.display());
let embeddings = if storage_path.exists() {
let bytes = fs::read(&storage_path).await?;
rmp_serde::from_slice(&bytes)?
} else {
HashMap::new()
};
Ok(Self {
storage_path,
embeddings,
})
}
/// Persist to disk (only for directory-based format)
async fn persist(&self) -> Result<()> {
// Skip if in-memory mode (empty path)
if self.storage_path.as_os_str().is_empty() {
return Ok(());
}
let bytes = rmp_serde::to_vec(&self.embeddings)?;
fs::write(&self.storage_path, bytes).await?;
Ok(())
}
/// Add embedding for a document
pub async fn add_embedding(
&mut self,
doc_id: i32,
content_uuid: Option<Uuid>,
title: String,
vector: Vec<f32>,
metadata: Option<serde_json::Value>,
) -> Result<()> {
let doc = VectorDocument {
id: doc_id,
content_uuid: content_uuid.map(|u| u.to_string()),
title,
vector,
metadata,
};
self.embeddings.insert(doc_id, doc);
self.persist().await?;
Ok(())
}
/// Search for similar documents (simple cosine similarity)
pub async fn search(
&self,
query_vector: Vec<f32>,
limit: usize,
) -> Result<Vec<VectorDocument>> {
let mut results: Vec<(VectorDocument, f32)> = self
.embeddings
.values()
.map(|doc| {
let similarity = cosine_similarity(&query_vector, &doc.vector);
(doc.clone(), similarity)
})
.collect();
// Sort by similarity (descending)
results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
Ok(results.into_iter().take(limit).map(|(doc, _)| doc).collect())
}
/// Get embedding count
pub async fn count(&self) -> Result<usize> {
Ok(self.embeddings.len())
}
/// Remove embedding by document ID
pub async fn remove_embedding(&mut self, doc_id: i32) -> Result<()> {
self.embeddings.remove(&doc_id);
self.persist().await?;
Ok(())
}
}
/// Calculate cosine similarity between two vectors
fn cosine_similarity(a: &[f32], b: &[f32]) -> f32 {
if a.len() != b.len() {
return 0.0;
}
let dot: f32 = a.iter().zip(b.iter()).map(|(x, y)| x * y).sum();
let mag_a: f32 = a.iter().map(|x| x * x).sum::<f32>().sqrt();
let mag_b: f32 = b.iter().map(|x| x * x).sum::<f32>().sqrt();
if mag_a == 0.0 || mag_b == 0.0 {
return 0.0;
}
dot / (mag_a * mag_b)
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[tokio::test]
async fn test_create_vector_store() {
let temp_dir = tempdir().unwrap();
let memory_path = temp_dir.path().join("test.memory");
std::fs::create_dir_all(&memory_path).unwrap();
let _store = VectorStore::create(&memory_path).await.unwrap();
assert!(memory_path.join("embeddings.msgpack").exists());
}
#[tokio::test]
async fn test_add_and_search() {
let temp_dir = tempdir().unwrap();
let memory_path = temp_dir.path().join("test.memory");
std::fs::create_dir_all(&memory_path).unwrap();
let mut store = VectorStore::create(&memory_path).await.unwrap();
// Add test embeddings
let vector1 = vec![0.1, 0.2, 0.3, 0.4];
let vector2 = vec![0.2, 0.3, 0.4, 0.5];
store
.add_embedding(1, None, "Doc 1".to_string(), vector1.clone(), None)
.await
.unwrap();
store
.add_embedding(2, None, "Doc 2".to_string(), vector2, None)
.await
.unwrap();
// Search with query similar to vector1
let results = store.search(vector1, 10).await.unwrap();
assert_eq!(results.len(), 2);
assert_eq!(results[0].id, 1); // Most similar should be first
assert_eq!(results[0].title, "Doc 1");
}
#[tokio::test]
async fn test_count() {
let temp_dir = tempdir().unwrap();
let memory_path = temp_dir.path().join("test.memory");
std::fs::create_dir_all(&memory_path).unwrap();
let mut store = VectorStore::create(&memory_path).await.unwrap();
assert_eq!(store.count().await.unwrap(), 0);
store
.add_embedding(
1,
None,
"Doc 1".to_string(),
vec![0.1, 0.2, 0.3],
None,
)
.await
.unwrap();
assert_eq!(store.count().await.unwrap(), 1);
}
#[tokio::test]
async fn test_remove_embedding() {
let temp_dir = tempdir().unwrap();
let memory_path = temp_dir.path().join("test.memory");
std::fs::create_dir_all(&memory_path).unwrap();
let mut store = VectorStore::create(&memory_path).await.unwrap();
store
.add_embedding(
1,
None,
"Doc 1".to_string(),
vec![0.1, 0.2, 0.3],
None,
)
.await
.unwrap();
assert_eq!(store.count().await.unwrap(), 1);
store.remove_embedding(1).await.unwrap();
assert_eq!(store.count().await.unwrap(), 0);
}
}

View File

@@ -11,6 +11,7 @@ pub mod device;
pub mod file;
pub mod location;
pub mod media_data;
pub mod memory;
pub mod resource;
pub mod resource_manager;
pub mod resource_registry;
@@ -26,6 +27,7 @@ pub use device::{Device, OperatingSystem};
pub use file::{EntryKind, File, Sidecar};
pub use location::{IndexMode, Location, ScanState};
pub use media_data::{AudioMediaData, ImageMediaData, VideoMediaData};
pub use memory::{MemoryFile, MemoryMetadata, MemoryScope};
pub use resource::Identifiable;
pub use resource_manager::ResourceManager;
pub use space::{

View File

@@ -10,6 +10,8 @@ use specta::Type;
use std::collections::HashMap;
use uuid::Uuid;
use super::resource::Identifiable;
/// A tag with advanced capabilities for contextual organization
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Type)]
pub struct Tag {
@@ -438,3 +440,18 @@ pub enum TagError {
#[error("Database error: {0}")]
DatabaseError(String),
}
// Implement Identifiable for normalized cache support
impl Identifiable for Tag {
fn id(&self) -> Uuid {
self.id
}
fn resource_type() -> &'static str {
"tag"
}
fn sync_dependencies() -> &'static [&'static str] {
&[] // Tags are a simple resource backed by the tags table
}
}

View File

@@ -209,3 +209,21 @@ priority = 100
[file_types.metadata]
text_based = true
# Spacedrive Memory Files
[[file_types]]
id = "application/x-spacedrive-memory"
name = "Spacedrive Memory"
extensions = ["memory"]
mime_types = ["application/x-spacedrive-memory"]
category = "document"
priority = 100
[[file_types.magic_bytes]]
pattern = "53 44 4D 45 4D 01" # "SDMEM\x01"
offset = 0
priority = 100
[file_types.metadata]
spacedrive = true
ai_knowledge = true
memory_file = true

View File

@@ -332,7 +332,6 @@ priority = 85
text_file = true
typescript = true
# Encrypted/Spacedrive specific
[[file_types]]
id = "application/x-spacedrive-encrypted"
name = "Spacedrive Encrypted"

View File

@@ -42,13 +42,35 @@ impl Library {
.to_sync_json()
.map_err(|e| anyhow::anyhow!("Failed to serialize model: {}", e))?;
if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await {
self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data)
let result = if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await {
self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data.clone())
.await
} else {
self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data)
self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data.clone())
.await
};
// Emit resource event for frontend reactivity
if result.is_ok() {
use crate::infra::sync::ChangeType as CT;
match change_type {
CT::Delete => {
self.event_bus().emit(Event::ResourceDeleted {
resource_type: M::SYNC_MODEL.to_string(),
resource_id: model.sync_id(),
});
}
CT::Insert | CT::Update => {
self.event_bus().emit(Event::ResourceChanged {
resource_type: M::SYNC_MODEL.to_string(),
resource: data,
metadata: None,
});
}
}
}
result
}
/// Sync a model with FK conversion (for models with relationships)
@@ -119,13 +141,35 @@ impl Library {
}
}
if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await {
self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data)
let result = if crate::infra::sync::is_device_owned(M::SYNC_MODEL).await {
self.sync_device_owned_internal(M::SYNC_MODEL, model.sync_id(), data.clone())
.await
} else {
self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data)
self.sync_shared_internal(M::SYNC_MODEL, model.sync_id(), change_type, data.clone())
.await
};
// Emit resource event for frontend reactivity
if result.is_ok() {
use crate::infra::sync::ChangeType as CT;
match change_type {
CT::Delete => {
self.event_bus().emit(Event::ResourceDeleted {
resource_type: M::SYNC_MODEL.to_string(),
resource_id: model.sync_id(),
});
}
CT::Insert | CT::Update => {
self.event_bus().emit(Event::ResourceChanged {
resource_type: M::SYNC_MODEL.to_string(),
resource: data,
metadata: None,
});
}
}
}
result
}
/// Batch sync multiple models (optimized for bulk operations)

View File

@@ -279,6 +279,106 @@ impl DirectoryListingQuery {
});
}
// Collect entry UUIDs for tag lookup
let entry_uuids: Vec<Uuid> = rows
.iter()
.filter_map(|row| {
row.try_get::<Option<Uuid>>("", "entry_uuid")
.ok()
.flatten()
})
.collect();
// Batch fetch tags for these entries (both entry-scoped and content-scoped)
let mut tags_by_entry: HashMap<Uuid, Vec<crate::domain::tag::Tag>> = HashMap::new();
if !entry_uuids.is_empty() || !content_uuids.is_empty() {
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter};
tracing::debug!("Loading tags for {} entries and {} content identities", entry_uuids.len(), content_uuids.len());
// Load user_metadata for entries and content
let mut metadata_records = user_metadata::Entity::find()
.filter(
user_metadata::Column::EntryUuid.is_in(entry_uuids.clone())
.or(user_metadata::Column::ContentIdentityUuid.is_in(content_uuids.clone()))
)
.all(db)
.await?;
tracing::debug!("Found {} metadata records", metadata_records.len());
if !metadata_records.is_empty() {
let metadata_ids: Vec<i32> = metadata_records.iter().map(|m| m.id).collect();
// Load user_metadata_tag records
let metadata_tags = user_metadata_tag::Entity::find()
.filter(user_metadata_tag::Column::UserMetadataId.is_in(metadata_ids))
.all(db)
.await?;
// Get all unique tag IDs
let tag_ids: Vec<i32> = metadata_tags.iter().map(|mt| mt.tag_id).collect();
let unique_tag_ids: std::collections::HashSet<i32> = tag_ids.iter().cloned().collect();
tracing::debug!("Found {} user_metadata_tag records with {} unique tags", metadata_tags.len(), unique_tag_ids.len());
// Load tag entities
let tag_models = tag::Entity::find()
.filter(tag::Column::Id.is_in(tag_ids))
.all(db)
.await?;
tracing::debug!("Loaded {} tag models", tag_models.len());
// Build tag_db_id -> Tag mapping
let tag_map: HashMap<i32, crate::domain::tag::Tag> = tag_models
.into_iter()
.filter_map(|t| {
let db_id = t.id;
crate::ops::tags::manager::model_to_domain(t).ok().map(|tag| (db_id, tag))
})
.collect();
tracing::debug!("Built tag map with {} entries", tag_map.len());
// Build metadata_id -> Vec<Tag> mapping
let mut tags_by_metadata: HashMap<i32, Vec<crate::domain::tag::Tag>> = HashMap::new();
for mt in metadata_tags {
if let Some(tag) = tag_map.get(&mt.tag_id) {
tags_by_metadata
.entry(mt.user_metadata_id)
.or_insert_with(Vec::new)
.push(tag.clone());
}
}
// Map tags to entries (prioritize entry-scoped, fall back to content-scoped)
for metadata in metadata_records {
if let Some(tags) = tags_by_metadata.get(&metadata.id) {
// Entry-scoped metadata (higher priority)
if let Some(entry_uuid) = metadata.entry_uuid {
tags_by_entry.insert(entry_uuid, tags.clone());
}
// Content-scoped metadata (applies to all entries with this content)
else if let Some(content_uuid) = metadata.content_identity_uuid {
// Apply to all entries with this content_uuid
for row in &rows {
if let Some(ci_uuid) = row.try_get::<Option<Uuid>>("", "content_identity_uuid").ok().flatten() {
if ci_uuid == content_uuid {
if let Some(entry_uuid) = row.try_get::<Option<Uuid>>("", "entry_uuid").ok().flatten() {
// Only set if not already set by entry-scoped metadata
tags_by_entry.entry(entry_uuid).or_insert_with(|| tags.clone());
}
}
}
}
}
}
}
}
}
// Convert to File objects
let mut files = Vec::new();
for row in rows {
@@ -447,6 +547,14 @@ impl DirectoryListingQuery {
});
}
// Add tags from batch lookup
if let Some(entry_uuid_val) = entry_uuid {
if let Some(tags) = tags_by_entry.get(&entry_uuid_val) {
tracing::debug!("Adding {} tags to entry {}", tags.len(), entry_uuid_val);
file.tags = tags.clone();
}
}
files.push(file);
}

View File

@@ -6,7 +6,7 @@ use crate::{
domain::{addressing::SdPath, File},
infra::db::entities::{
audio_media_data, content_identity, device, directory_paths, entry, image_media_data,
location, sidecar, tag, user_metadata_tag, video_media_data,
location, sidecar, tag, user_metadata, user_metadata_tag, video_media_data,
},
infra::query::LibraryQuery,
};
@@ -165,7 +165,7 @@ impl LibraryQuery for FileByIdQuery {
};
// Convert to File using from_entity_model
let mut file = File::from_entity_model(entry_model, sd_path);
let mut file = File::from_entity_model(entry_model.clone(), sd_path);
file.sidecars = sidecars;
file.content_identity = content_identity_domain;
file.image_media_data = image_media;
@@ -176,6 +176,55 @@ impl LibraryQuery for FileByIdQuery {
file.content_kind = ci.kind;
}
// Load tags for this entry
if let Some(entry_uuid) = entry_model.uuid {
use std::collections::HashMap;
// Load user_metadata for this entry (both entry-scoped and content-scoped)
let mut metadata_filter = user_metadata::Column::EntryUuid.eq(entry_uuid);
// Also check for content-scoped metadata if content identity exists
if let Some(ref ci) = file.content_identity {
metadata_filter = metadata_filter.or(user_metadata::Column::ContentIdentityUuid.eq(ci.uuid));
}
let metadata_records = user_metadata::Entity::find()
.filter(metadata_filter)
.all(db.conn())
.await?;
if !metadata_records.is_empty() {
let metadata_ids: Vec<i32> = metadata_records.iter().map(|m| m.id).collect();
// Load user_metadata_tag records
let metadata_tags = user_metadata_tag::Entity::find()
.filter(user_metadata_tag::Column::UserMetadataId.is_in(metadata_ids))
.all(db.conn())
.await?;
if !metadata_tags.is_empty() {
let tag_ids: Vec<i32> = metadata_tags.iter().map(|mt| mt.tag_id).collect();
// Load tag entities
let tag_models = tag::Entity::find()
.filter(tag::Column::Id.is_in(tag_ids))
.all(db.conn())
.await?;
// Convert to domain tags
let mut tags = Vec::new();
for tag_model in tag_models {
if let Ok(domain_tag) = crate::ops::tags::manager::model_to_domain(tag_model) {
tags.push(domain_tag);
}
}
file.tags = tags;
tracing::debug!("Loaded {} tags for entry {}", file.tags.len(), entry_uuid);
}
}
}
Ok(Some(file))
}
}

View File

@@ -126,7 +126,7 @@ impl IndexerJobConfig {
Self {
location_id: None,
path,
mode: IndexMode::Shallow,
mode: IndexMode::Content, // Enable content identification for ephemeral browsing
scope,
persistence: IndexPersistence::Ephemeral,
max_depth: if scope == IndexScope::Current {
@@ -759,9 +759,14 @@ impl IndexerJob {
while let Some(batch) = state.entry_batches.pop() {
for entry in batch {
// Store entry (this will emit ResourceChanged events)
persistence
let entry_id = persistence
.store_entry(&entry, None, &root_path)
.await?;
// Queue files for content identification
if entry.kind == super::state::EntryKind::File && entry.size > 0 {
state.entries_for_content.push((entry_id, entry.path.clone()));
}
}
}
@@ -775,15 +780,104 @@ impl IndexerJob {
async fn run_ephemeral_content_phase_static(
state: &mut IndexerState,
ctx: &JobContext<'_>,
_ephemeral_index: Arc<RwLock<EphemeralIndex>>,
ephemeral_index: Arc<RwLock<EphemeralIndex>>,
) -> JobResult<()> {
ctx.log("Starting ephemeral content identification");
use crate::domain::content_identity::ContentHashGenerator;
use crate::ops::indexing::persistence::PersistenceFactory;
// For ephemeral jobs, we can skip heavy content processing or do it lightly
// This is mainly for demonstration - in practice you might generate CAS IDs
ctx.log(format!(
"Starting ephemeral content identification for {} files",
state.entries_for_content.len()
));
if state.entries_for_content.is_empty() {
state.phase = Phase::Complete;
return Ok(());
}
// Get root path and event bus
let (root_path, event_bus) = {
let index = ephemeral_index.read().await;
(index.root_path.clone(), Some(ctx.library().event_bus().clone()))
};
// Create ephemeral persistence for event emission
let persistence = PersistenceFactory::ephemeral(
ephemeral_index.clone(),
event_bus,
root_path,
);
// Process files for content identification
let mut success_count = 0;
let mut error_count = 0;
// Process in chunks to emit progress
const CHUNK_SIZE: usize = 50;
let total = state.entries_for_content.len();
while !state.entries_for_content.is_empty() {
ctx.check_interrupt().await?;
let chunk_size = CHUNK_SIZE.min(state.entries_for_content.len());
let chunk: Vec<_> = state.entries_for_content.drain(..chunk_size).collect();
// Process chunk in parallel
let hash_futures: Vec<_> = chunk
.iter()
.map(|(entry_id, path)| async move {
let hash_result = ContentHashGenerator::generate_content_hash(path).await;
(*entry_id, path.clone(), hash_result)
})
.collect();
let results = futures::future::join_all(hash_futures).await;
// Store results and emit events
for (entry_id, path, hash_result) in results {
match hash_result {
Ok(cas_id) => {
// Store via persistence (this emits ResourceChanged event with content_identity)
if let Err(e) = persistence.store_content_identity(entry_id, &path, cas_id.clone()).await {
ctx.add_non_critical_error(format!(
"Failed to store content identity for {}: {}",
path.display(),
e
));
error_count += 1;
} else {
success_count += 1;
}
}
Err(e) => {
// Skip empty files or errors
if !matches!(e, crate::domain::ContentHashError::EmptyFile) {
ctx.add_non_critical_error(format!(
"Failed to hash {}: {}",
path.display(),
e
));
error_count += 1;
}
}
}
}
ctx.log(format!(
"Content identification progress: {}/{} (success: {}, errors: {})",
total - state.entries_for_content.len(),
total,
success_count,
error_count
));
}
state.phase = Phase::Complete;
ctx.log("Ephemeral content identification complete");
ctx.log(format!(
"Ephemeral content identification complete: {} files processed, {} errors",
success_count,
error_count
));
Ok(())
}

View File

@@ -539,7 +539,7 @@ impl IndexPersistence for EphemeralPersistence {
async fn store_content_identity(
&self,
_entry_id: i32,
entry_id: i32,
path: &Path,
cas_id: String,
) -> JobResult<()> {
@@ -559,14 +559,83 @@ impl IndexPersistence for EphemeralPersistence {
let content_identity = EphemeralContentIdentity {
cas_id: cas_id.clone(),
mime_type,
mime_type: mime_type.clone(),
file_size,
entry_count: 1,
};
// Store in ephemeral index
{
let mut index = self.index.write().await;
index.add_content_identity(cas_id, content_identity);
index.add_content_identity(cas_id.clone(), content_identity);
}
// Emit ResourceChanged event with updated content_identity
if let Some(event_bus) = &self.event_bus {
use crate::device::get_current_device_slug;
use crate::domain::addressing::SdPath;
use crate::domain::content_identity::ContentIdentity;
use crate::domain::file::File;
use crate::infra::event::{Event, ResourceMetadata};
// Get the stored metadata for this entry
let metadata_opt = {
let index = self.index.read().await;
index.entries.get(path).cloned()
};
if let Some(metadata) = metadata_opt {
// Build SdPath
let device_slug = get_current_device_slug();
let sd_path = SdPath::Physical {
device_slug: device_slug.clone(),
path: path.to_path_buf(),
};
// Generate UUID for this file (use entry_id as seed for consistency)
let entry_uuid = uuid::Uuid::from_u128(entry_id as u128);
// Build File with content_identity
let mut file = File::from_ephemeral(entry_uuid, &metadata, sd_path);
// Add content identity
file.content_identity = Some(ContentIdentity {
uuid: uuid::Uuid::new_v4(),
kind: crate::domain::ContentKind::Unknown, // TODO: detect from mime_type
content_hash: cas_id.clone(),
integrity_hash: None,
mime_type_id: None,
text_content: None,
total_size: file_size as i64,
entry_count: 1,
first_seen_at: chrono::Utc::now(),
last_verified_at: chrono::Utc::now(),
});
// Emit event with updated file
let parent_path = path.parent().map(|p| SdPath::Physical {
device_slug,
path: p.to_path_buf(),
});
let affected_paths = if let Some(parent) = parent_path {
vec![parent]
} else {
vec![]
};
if let Ok(resource_json) = serde_json::to_value(&file) {
event_bus.emit(Event::ResourceChanged {
resource_type: "file".to_string(),
resource: resource_json,
metadata: Some(ResourceMetadata {
no_merge_fields: vec!["sd_path".to_string()],
alternate_ids: vec![],
affected_paths,
}),
});
}
}
}
Ok(())

View File

@@ -35,8 +35,13 @@ impl UserMetadataManager {
}
}
/// Get user metadata for an entry (creates if doesn't exist)
/// Get or create entry-scoped metadata (legacy method)
pub async fn get_or_create_metadata(&self, entry_uuid: Uuid) -> Result<UserMetadata, TagError> {
self.get_or_create_entry_metadata(entry_uuid).await
}
/// Get or create entry-scoped metadata (tags specific to this file instance)
pub async fn get_or_create_entry_metadata(&self, entry_uuid: Uuid) -> Result<UserMetadata, TagError> {
let db = &*self.db;
// First try to find existing metadata
@@ -44,7 +49,7 @@ impl UserMetadataManager {
return Ok(metadata);
}
// Create new metadata if it doesn't exist
// Create new entry-scoped metadata if it doesn't exist
let metadata_uuid = Uuid::new_v4();
let new_metadata = user_metadata::ActiveModel {
id: NotSet,
@@ -59,14 +64,43 @@ impl UserMetadataManager {
updated_at: Set(Utc::now()),
};
let result = new_metadata
new_metadata
.insert(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
// No need to update entry - the metadata is linked via entry_uuid
Ok(UserMetadata::new(metadata_uuid))
}
/// Get or create content-scoped metadata (tags apply to all instances of this content)
pub async fn get_or_create_content_metadata(&self, content_identity_uuid: Uuid) -> Result<UserMetadata, TagError> {
let db = &*self.db;
// First try to find existing metadata
if let Some(metadata) = self.get_metadata_by_content_uuid(content_identity_uuid).await? {
return Ok(metadata);
}
// Create new content-scoped metadata if it doesn't exist
let metadata_uuid = Uuid::new_v4();
let new_metadata = user_metadata::ActiveModel {
id: NotSet,
uuid: Set(metadata_uuid),
entry_uuid: Set(None),
content_identity_uuid: Set(Some(content_identity_uuid)),
notes: Set(None),
favorite: Set(false),
hidden: Set(false),
custom_data: Set(serde_json::json!({})),
created_at: Set(Utc::now()),
updated_at: Set(Utc::now()),
};
new_metadata
.insert(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
// Return the new metadata
Ok(UserMetadata::new(metadata_uuid))
}
@@ -91,17 +125,39 @@ impl UserMetadataManager {
Ok(None)
}
/// Apply semantic tags to an entry
pub async fn apply_semantic_tags(
/// Get user metadata for content by content identity UUID
pub async fn get_metadata_by_content_uuid(
&self,
entry_uuid: Uuid,
tag_applications: Vec<TagApplication>,
device_uuid: Uuid,
) -> Result<(), TagError> {
content_identity_uuid: Uuid,
) -> Result<Option<UserMetadata>, TagError> {
let db = &*self.db;
// Ensure metadata exists for this entry
let metadata = self.get_or_create_metadata(entry_uuid).await?;
// Find metadata by content identity UUID
let metadata_model = user_metadata::Entity::find()
.filter(user_metadata::Column::ContentIdentityUuid.eq(content_identity_uuid))
.one(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
if let Some(model) = metadata_model {
return Ok(Some(self.model_to_domain(model).await?));
}
Ok(None)
}
/// Apply semantic tags to a content identity (tags all instances of this content)
/// Returns the created user_metadata_tag models for syncing
pub async fn apply_semantic_tags_to_content(
&self,
content_identity_uuid: Uuid,
tag_applications: Vec<TagApplication>,
device_uuid: Uuid,
) -> Result<Vec<user_metadata_tag::Model>, TagError> {
let db = &*self.db;
// Get or create content-scoped metadata
let metadata = self.get_or_create_content_metadata(content_identity_uuid).await?;
// Get the database ID for the user metadata
let metadata_model = user_metadata::Entity::find()
@@ -113,6 +169,58 @@ impl UserMetadataManager {
"UserMetadata not found".to_string(),
))?;
self.apply_tags_to_metadata(metadata_model.id, &tag_applications, device_uuid).await
}
/// Apply semantic tags to a specific entry (tags only this instance)
/// Returns the created user_metadata_tag models for syncing
pub async fn apply_semantic_tags_to_entry(
&self,
entry_uuid: Uuid,
tag_applications: Vec<TagApplication>,
device_uuid: Uuid,
) -> Result<Vec<user_metadata_tag::Model>, TagError> {
let db = &*self.db;
// Get or create entry-scoped metadata
let metadata = self.get_or_create_entry_metadata(entry_uuid).await?;
// Get the database ID for the user metadata
let metadata_model = user_metadata::Entity::find()
.filter(user_metadata::Column::Uuid.eq(metadata.id))
.one(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?
.ok_or(TagError::DatabaseError(
"UserMetadata not found".to_string(),
))?;
self.apply_tags_to_metadata(metadata_model.id, &tag_applications, device_uuid).await
}
/// Apply semantic tags to an entry (legacy method - uses entry-scoped)
/// Returns the created user_metadata_tag models for syncing
pub async fn apply_semantic_tags(
&self,
entry_uuid: Uuid,
tag_applications: Vec<TagApplication>,
device_uuid: Uuid,
) -> Result<Vec<user_metadata_tag::Model>, TagError> {
self.apply_semantic_tags_to_entry(entry_uuid, tag_applications, device_uuid).await
}
/// Internal: Apply tags to a metadata record (shared logic)
/// Returns the created/updated user_metadata_tag models for syncing
async fn apply_tags_to_metadata(
&self,
metadata_db_id: i32,
tag_applications: &[TagApplication],
device_uuid: Uuid,
) -> Result<Vec<user_metadata_tag::Model>, TagError> {
let db = &*self.db;
let mut created_models = Vec::new();
// Convert tag UUIDs to database IDs
let tag_uuids: Vec<Uuid> = tag_applications.iter().map(|app| app.tag_id).collect();
let tag_models = crate::infra::db::entities::Tag::find()
@@ -125,11 +233,11 @@ impl UserMetadataManager {
tag_models.into_iter().map(|m| (m.uuid, m.id)).collect();
// Insert tag applications
for app in &tag_applications {
for app in tag_applications {
if let Some(&tag_db_id) = uuid_to_db_id.get(&app.tag_id) {
let tag_application = user_metadata_tag::ActiveModel {
id: NotSet,
user_metadata_id: Set(metadata_model.id),
user_metadata_id: Set(metadata_db_id),
tag_id: Set(tag_db_id),
applied_context: Set(app.applied_context.clone()),
applied_variant: Set(app.applied_variant.clone()),
@@ -152,41 +260,49 @@ impl UserMetadataManager {
};
// Insert or update if exists
if let Err(_) = tag_application.insert(&*db).await {
// If insert fails due to unique constraint, update existing
let existing = user_metadata_tag::Entity::find()
.filter(user_metadata_tag::Column::UserMetadataId.eq(metadata_model.id))
.filter(user_metadata_tag::Column::TagId.eq(tag_db_id))
.one(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
if let Some(existing_model) = existing {
let mut update_model: user_metadata_tag::ActiveModel =
existing_model.into();
update_model.applied_context = Set(app.applied_context.clone());
update_model.applied_variant = Set(app.applied_variant.clone());
update_model.confidence = Set(app.confidence);
update_model.source = Set(app.source.as_str().to_string());
update_model.instance_attributes =
Set(if app.instance_attributes.is_empty() {
None
} else {
Some(
serde_json::to_value(&app.instance_attributes)
.unwrap()
.into(),
)
});
update_model.updated_at = Set(Utc::now());
update_model.device_uuid = Set(device_uuid);
update_model
.update(&*db)
let model = match tag_application.clone().insert(&*db).await {
Ok(model) => model,
Err(_) => {
// If insert fails due to unique constraint, update existing
let existing = user_metadata_tag::Entity::find()
.filter(user_metadata_tag::Column::UserMetadataId.eq(metadata_db_id))
.filter(user_metadata_tag::Column::TagId.eq(tag_db_id))
.one(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
if let Some(existing_model) = existing {
let mut update_model: user_metadata_tag::ActiveModel =
existing_model.into();
update_model.applied_context = Set(app.applied_context.clone());
update_model.applied_variant = Set(app.applied_variant.clone());
update_model.confidence = Set(app.confidence);
update_model.source = Set(app.source.as_str().to_string());
update_model.instance_attributes =
Set(if app.instance_attributes.is_empty() {
None
} else {
Some(
serde_json::to_value(&app.instance_attributes)
.unwrap()
.into(),
)
});
update_model.updated_at = Set(Utc::now());
update_model.device_uuid = Set(device_uuid);
update_model.version = Set(update_model.version.unwrap() + 1);
update_model
.update(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?
} else {
continue;
}
}
}
};
created_models.push(model);
}
}
@@ -195,7 +311,7 @@ impl UserMetadataManager {
.record_tag_usage(&tag_applications)
.await?;
Ok(())
Ok(created_models)
}
/// Remove semantic tags from an entry
@@ -403,7 +519,8 @@ impl UserMetadataManager {
};
self.apply_semantic_tags(Uuid::new_v4(), vec![tag_application], device_uuid)
.await // TODO: Look up actual UUID
.await
.map(|_| ()) // TODO: Look up actual UUID and sync models
}
/// Apply multiple semantic tags to an entry (user-applied)
@@ -419,7 +536,8 @@ impl UserMetadataManager {
.collect();
self.apply_semantic_tags(Uuid::new_v4(), tag_applications, device_uuid)
.await // TODO: Look up actual UUID
.await
.map(|_| ()) // TODO: Look up actual UUID and sync models
}
/// Apply AI-suggested semantic tags with confidence scores
@@ -439,7 +557,8 @@ impl UserMetadataManager {
.collect();
self.apply_semantic_tags(Uuid::new_v4(), tag_applications, device_uuid)
.await // TODO: Look up actual UUID
.await
.map(|_| ()) // TODO: Look up actual UUID and sync models
}
/// Find entries by semantic tags (supports hierarchy)

View File

@@ -1,6 +1,6 @@
//! Apply semantic tags action
use super::{input::ApplyTagsInput, output::ApplyTagsOutput};
use super::{input::{ApplyTagsInput, TagTargets}, output::ApplyTagsOutput};
use crate::{
context::CoreContext,
domain::tag::{TagApplication, TagSource},
@@ -45,7 +45,7 @@ impl LibraryAction for ApplyTagsAction {
let device_id = library.id(); // Use library ID as device ID
let mut warnings = Vec::new();
let mut successfully_tagged_entries = Vec::new();
let mut successfully_tagged_count = 0;
// Create tag applications from input
let tag_applications: Vec<TagApplication> = self
@@ -71,32 +71,67 @@ impl LibraryAction for ApplyTagsAction {
})
.collect();
// Apply tags to each entry
for entry_id in &self.input.entry_ids {
// Look up actual entry UUID from entry ID
let entry_uuid = lookup_entry_uuid(&db.conn(), *entry_id)
.await
.map_err(|e| {
ActionError::Internal(format!("Failed to lookup entry UUID: {}", e))
})?;
match metadata_manager
.apply_semantic_tags(entry_uuid, tag_applications.clone(), device_id)
.await
{
Ok(()) => {
successfully_tagged_entries.push(*entry_id);
// Handle both content-based and entry-based tagging
match &self.input.targets {
TagTargets::Content(content_ids) => {
// Content-based tagging: apply to content identity (tags all instances)
for &content_id in content_ids {
match metadata_manager
.apply_semantic_tags_to_content(content_id, tag_applications.clone(), device_id)
.await
{
Ok(models) => {
successfully_tagged_count += 1;
// Sync each user_metadata_tag model
for model in models {
library
.sync_model(&model, crate::infra::sync::ChangeType::Insert)
.await
.map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?;
}
}
Err(e) => {
warnings.push(format!("Failed to tag content {}: {}", content_id, e));
}
}
}
Err(e) => {
warnings.push(format!("Failed to tag entry {}: {}", entry_id, e));
}
TagTargets::Entry(entry_ids) => {
// Entry-based tagging: apply to specific entry instance
for &entry_id in entry_ids {
// Look up actual entry UUID from entry ID
let entry_uuid = lookup_entry_uuid(&db.conn(), entry_id)
.await
.map_err(|e| {
ActionError::Internal(format!("Failed to lookup entry UUID: {}", e))
})?;
match metadata_manager
.apply_semantic_tags_to_entry(entry_uuid, tag_applications.clone(), device_id)
.await
{
Ok(models) => {
successfully_tagged_count += 1;
// Sync each user_metadata_tag model
for model in models {
library
.sync_model(&model, crate::infra::sync::ChangeType::Insert)
.await
.map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?;
}
}
Err(e) => {
warnings.push(format!("Failed to tag entry {}: {}", entry_id, e));
}
}
}
}
}
let output = ApplyTagsOutput::success(
successfully_tagged_entries.len(),
successfully_tagged_count,
self.input.tag_ids.len(),
self.input.tag_ids.clone(),
successfully_tagged_entries,
vec![], // TODO: Return target IDs if needed
);
if !warnings.is_empty() {

View File

@@ -6,10 +6,23 @@ use specta::Type;
use std::collections::HashMap;
use uuid::Uuid;
/// Specifies what to tag: content (all instances) or specific entries
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
#[serde(tag = "type", content = "ids")]
pub enum TagTargets {
/// Tag by content identity (applies to ALL instances of this content across devices)
/// This is the preferred/default approach
Content(Vec<Uuid>),
/// Tag by entry ID (applies to ONLY this specific file instance)
/// Use when you want instance-specific tags
Entry(Vec<i32>),
}
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub struct ApplyTagsInput {
/// Entry IDs to apply tags to
pub entry_ids: Vec<i32>,
/// What to tag: content identities or specific entries
pub targets: TagTargets,
/// Tag IDs to apply
pub tag_ids: Vec<Uuid>,
@@ -28,10 +41,22 @@ pub struct ApplyTagsInput {
}
impl ApplyTagsInput {
/// Create a simple user tag application
pub fn user_tags(entry_ids: Vec<i32>, tag_ids: Vec<Uuid>) -> Self {
/// Create a content-scoped user tag application (tags all instances)
pub fn user_tags_content(content_ids: Vec<Uuid>, tag_ids: Vec<Uuid>) -> Self {
Self {
entry_ids,
targets: TagTargets::Content(content_ids),
tag_ids,
source: Some(TagSource::User),
confidence: Some(1.0),
applied_context: None,
instance_attributes: None,
}
}
/// Create an entry-scoped user tag application (tags specific instance only)
pub fn user_tags_entry(entry_ids: Vec<i32>, tag_ids: Vec<Uuid>) -> Self {
Self {
targets: TagTargets::Entry(entry_ids),
tag_ids,
source: Some(TagSource::User),
confidence: Some(1.0),
@@ -42,13 +67,13 @@ impl ApplyTagsInput {
/// Create an AI tag application with confidence
pub fn ai_tags(
entry_ids: Vec<i32>,
content_ids: Vec<Uuid>,
tag_ids: Vec<Uuid>,
confidence: f32,
context: String,
) -> Self {
Self {
entry_ids,
targets: TagTargets::Content(content_ids),
tag_ids,
source: Some(TagSource::AI),
confidence: Some(confidence),
@@ -59,16 +84,27 @@ impl ApplyTagsInput {
/// Validate the input
pub fn validate(&self) -> Result<(), String> {
if self.entry_ids.is_empty() {
return Err("entry_ids cannot be empty".to_string());
}
let target_count = match &self.targets {
TagTargets::Content(ids) => {
if ids.is_empty() {
return Err("content identity IDs cannot be empty".to_string());
}
ids.len()
}
TagTargets::Entry(ids) => {
if ids.is_empty() {
return Err("entry IDs cannot be empty".to_string());
}
ids.len()
}
};
if self.tag_ids.is_empty() {
return Err("tag_ids cannot be empty".to_string());
}
if self.entry_ids.len() > 1000 {
return Err("Cannot apply tags to more than 1000 entries at once".to_string());
if target_count > 1000 {
return Err("Cannot apply tags to more than 1000 targets at once".to_string());
}
if self.tag_ids.len() > 50 {

View File

@@ -1,14 +1,16 @@
//! Create semantic tag action
use super::{input::CreateTagInput, output::CreateTagOutput};
use super::{input::{ApplyToTargets, CreateTagInput}, output::CreateTagOutput};
use crate::infra::sync::ChangeType;
use crate::{
context::CoreContext,
domain::tag::{PrivacyLevel, Tag, TagType},
domain::tag::{PrivacyLevel, Tag, TagApplication, TagSource, TagType},
infra::action::{error::ActionError, LibraryAction},
library::Library,
ops::tags::manager::TagManager,
ops::metadata::manager::UserMetadataManager,
};
use chrono::Utc;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use uuid::Uuid;
@@ -70,6 +72,66 @@ impl LibraryAction for CreateTagAction {
.await
.map_err(|e| ActionError::Internal(format!("Failed to sync tag: {}", e)))?;
// If apply_to is provided, apply the tag to those targets
if let Some(targets) = &self.input.apply_to {
let metadata_manager = UserMetadataManager::new(Arc::new(library.db().conn().clone()));
// Create a tag application for this newly created tag
let tag_application = TagApplication {
tag_id: tag_entity.uuid,
applied_context: None,
applied_variant: None,
confidence: 1.0,
source: TagSource::User,
instance_attributes: Default::default(),
created_at: Utc::now(),
device_uuid: device_id,
};
match targets {
ApplyToTargets::Content(content_ids) => {
// Apply to content identities (all instances)
for &content_id in content_ids {
let models = metadata_manager
.apply_semantic_tags_to_content(content_id, vec![tag_application.clone()], device_id)
.await
.map_err(|e| ActionError::Internal(format!("Failed to apply tag to content: {}", e)))?;
// Sync each user_metadata_tag model
for model in models {
library
.sync_model(&model, ChangeType::Insert)
.await
.map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?;
}
}
}
ApplyToTargets::Entry(entry_ids) => {
// Apply to specific entries
for &entry_id in entry_ids {
// Look up entry UUID from database ID
let entry_uuid = lookup_entry_uuid(&library.db().conn(), entry_id)
.await
.map_err(|e| ActionError::Internal(format!("Failed to lookup entry UUID: {}", e)))?;
// Apply the tag
let models = metadata_manager
.apply_semantic_tags_to_entry(entry_uuid, vec![tag_application.clone()], device_id)
.await
.map_err(|e| ActionError::Internal(format!("Failed to apply tag to entry: {}", e)))?;
// Sync each user_metadata_tag model
for model in models {
library
.sync_model(&model, ChangeType::Insert)
.await
.map_err(|e| ActionError::Internal(format!("Failed to sync tag association: {}", e)))?;
}
}
}
}
}
Ok(CreateTagOutput::from_entity(&tag_entity))
}
@@ -80,3 +142,19 @@ impl LibraryAction for CreateTagAction {
// Register library action
crate::register_library_action!(CreateTagAction, "tags.create");
/// Look up entry UUID from entry database ID
async fn lookup_entry_uuid(db: &sea_orm::DatabaseConnection, entry_id: i32) -> Result<Uuid, String> {
use crate::infra::db::entities::entry;
use sea_orm::EntityTrait;
let entry_model = entry::Entity::find_by_id(entry_id)
.one(db)
.await
.map_err(|e| format!("Database error: {}", e))?
.ok_or_else(|| format!("Entry with ID {} not found", entry_id))?;
entry_model
.uuid
.ok_or_else(|| format!("Entry {} has no UUID assigned", entry_id))
}

View File

@@ -35,6 +35,19 @@ pub struct CreateTagInput {
/// Initial attributes
pub attributes: Option<HashMap<String, serde_json::Value>>,
/// Optional: Targets to immediately apply this tag to after creation
pub apply_to: Option<ApplyToTargets>,
}
/// Targets for immediately applying a newly created tag
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
#[serde(tag = "type", content = "ids")]
pub enum ApplyToTargets {
/// Apply to content identities (all instances)
Content(Vec<Uuid>),
/// Apply to specific entries (single instance)
Entry(Vec<i32>),
}
impl CreateTagInput {
@@ -55,6 +68,7 @@ impl CreateTagInput {
privacy_level: None,
search_weight: None,
attributes: None,
apply_to: None,
}
}

View File

@@ -31,7 +31,7 @@ pub struct TagManager {
}
// Helper function to convert database model to domain model
fn model_to_domain(model: tag::Model) -> Result<Tag, TagError> {
pub(crate) fn model_to_domain(model: tag::Model) -> Result<Tag, TagError> {
let aliases: Vec<String> = model
.aliases
.as_ref()
@@ -689,43 +689,53 @@ impl TagManager {
) -> Result<Vec<Tag>, TagError> {
let db = &*self.db;
// Try FTS5 search first, fall back to LIKE patterns if FTS5 is not available
let mut tag_db_ids = Vec::new();
// Attempt FTS5 search (skip if FTS5 table doesn't exist)
if let Ok(fts_results) = db.query_all(
sea_orm::Statement::from_string(
sea_orm::DatabaseBackend::Sqlite,
format!(
"SELECT rowid FROM tag_search_fts WHERE tag_search_fts MATCH '{}' ORDER BY bm25(tag_search_fts)",
query.replace("\"", "\"\"")
)
)
).await {
for row in fts_results {
if let Ok(tag_id) = row.try_get::<i32>("", "rowid") {
tag_db_ids.push(tag_id);
}
}
}
// If FTS5 didn't return results, fall back to LIKE patterns
if tag_db_ids.is_empty() {
let search_pattern = format!("%{}%", query);
let like_models = tag::Entity::find()
.filter(
tag::Column::CanonicalName
.like(&search_pattern)
.or(tag::Column::DisplayName.like(&search_pattern))
.or(tag::Column::FormalName.like(&search_pattern))
.or(tag::Column::Abbreviation.like(&search_pattern))
.or(tag::Column::Description.like(&search_pattern)),
)
// If query is empty, return all tags (with filters applied)
if query.trim().is_empty() {
let all_models = tag::Entity::find()
.all(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
tag_db_ids = like_models.into_iter().map(|m| m.id).collect();
tag_db_ids = all_models.into_iter().map(|m| m.id).collect();
} else {
// Try FTS5 search first, fall back to LIKE patterns if FTS5 is not available
// Attempt FTS5 search (skip if FTS5 table doesn't exist)
if let Ok(fts_results) = db.query_all(
sea_orm::Statement::from_string(
sea_orm::DatabaseBackend::Sqlite,
format!(
"SELECT rowid FROM tag_search_fts WHERE tag_search_fts MATCH '{}' ORDER BY bm25(tag_search_fts)",
query.replace("\"", "\"\"")
)
)
).await {
for row in fts_results {
if let Ok(tag_id) = row.try_get::<i32>("", "rowid") {
tag_db_ids.push(tag_id);
}
}
}
// If FTS5 didn't return results, fall back to LIKE patterns
if tag_db_ids.is_empty() {
let search_pattern = format!("%{}%", query);
let like_models = tag::Entity::find()
.filter(
tag::Column::CanonicalName
.like(&search_pattern)
.or(tag::Column::DisplayName.like(&search_pattern))
.or(tag::Column::FormalName.like(&search_pattern))
.or(tag::Column::Abbreviation.like(&search_pattern))
.or(tag::Column::Description.like(&search_pattern)),
)
.all(&*db)
.await
.map_err(|e| TagError::DatabaseError(e.to_string()))?;
tag_db_ids = like_models.into_iter().map(|m| m.id).collect();
}
}
if tag_db_ids.is_empty() {

View File

@@ -70,10 +70,7 @@ impl SearchTagsInput {
/// Validate the input
pub fn validate(&self) -> Result<(), String> {
if self.query.trim().is_empty() {
return Err("query cannot be empty".to_string());
}
// Empty query is allowed (returns all tags)
if self.query.len() > 1000 {
return Err("query cannot exceed 1000 characters".to_string());
}

View File

@@ -32,6 +32,8 @@ pub struct VolumeItem {
pub write_speed_mbps: Option<u32>,
/// Device ID that owns this volume
pub device_id: Uuid,
/// Device slug for constructing SdPaths
pub device_slug: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, Type)]

View File

@@ -13,6 +13,7 @@ use sea_orm::{ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, QuerySelec
use serde::{Deserialize, Serialize};
use specta::Type;
use std::{collections::HashMap, sync::Arc};
use uuid::Uuid;
#[derive(Debug, Clone, Serialize, Deserialize, Type)]
pub enum VolumeFilter {
@@ -164,6 +165,13 @@ impl LibraryQuery for VolumeListQuery {
.all(db)
.await?;
// Fetch all devices to get slugs
let devices = entities::device::Entity::find().all(db).await?;
let device_slug_map: HashMap<Uuid, String> = devices
.into_iter()
.map(|d| (d.uuid, d.slug))
.collect();
// Create a map of tracked volumes by fingerprint
let mut tracked_map: HashMap<String, entities::volume::Model> = tracked_volumes
.into_iter()
@@ -184,6 +192,12 @@ impl LibraryQuery for VolumeListQuery {
let disk_type =
Self::infer_disk_type(&tracked_vol.device_model, &tracked_vol.volume_type);
// Get device slug for this volume
let device_slug = device_slug_map
.get(&tracked_vol.device_id)
.cloned()
.unwrap_or_else(|| "unknown".to_string());
volume_items.push(super::output::VolumeItem {
id: tracked_vol.uuid,
name: tracked_vol
@@ -206,6 +220,7 @@ impl LibraryQuery for VolumeListQuery {
read_speed_mbps: tracked_vol.read_speed_mbps.map(|s| s as u32),
write_speed_mbps: tracked_vol.write_speed_mbps.map(|s| s as u32),
device_id: tracked_vol.device_id,
device_slug,
});
}
@@ -215,6 +230,11 @@ impl LibraryQuery for VolumeListQuery {
for vol in all_volumes {
// Only show user-visible volumes
if !tracked_map.contains_key(&vol.fingerprint.0) && vol.is_user_visible {
let device_slug = device_slug_map
.get(&vol.device_id)
.cloned()
.unwrap_or_else(|| "unknown".to_string());
volume_items.push(super::output::VolumeItem {
id: vol.id,
name: vol.name.clone(),
@@ -231,6 +251,7 @@ impl LibraryQuery for VolumeListQuery {
read_speed_mbps: vol.read_speed_mbps.map(|s| s as u32),
write_speed_mbps: vol.write_speed_mbps.map(|s| s as u32),
device_id: vol.device_id,
device_slug,
});
}
}
@@ -243,6 +264,11 @@ impl LibraryQuery for VolumeListQuery {
// Only return volumes that are NOT tracked and are user-visible
for vol in all_volumes {
if !tracked_map.contains_key(&vol.fingerprint.0) && vol.is_user_visible {
let device_slug = device_slug_map
.get(&vol.device_id)
.cloned()
.unwrap_or_else(|| "unknown".to_string());
volume_items.push(super::output::VolumeItem {
id: vol.id,
name: vol.name.clone(),
@@ -259,6 +285,7 @@ impl LibraryQuery for VolumeListQuery {
read_speed_mbps: vol.read_speed_mbps.map(|s| s as u32),
write_speed_mbps: vol.write_speed_mbps.map(|s| s as u32),
device_id: vol.device_id,
device_slug,
});
}
}

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

View File

@@ -1,392 +1,394 @@
/*
* This file was automatically generated by a script.
* To regenerate this file, run: pnpm assets gen
* To regenerate this file, run: bun assets gen
*/
import Album_Light from './Album_Light.png';
import Album20 from './Album-20.png';
import Album from './Album.png';
import Alias_Light from './Alias_Light.png';
import Alias20 from './Alias-20.png';
import Alias from './Alias.png';
import AmazonS3 from './AmazonS3.png';
import AndroidPhotos from './AndroidPhotos.png';
import AppleFiles from './AppleFiles.png';
import ApplePhotos from './ApplePhotos.png';
import Application_Light from './Application_Light.png';
import Application from './Application.png';
import Archive_Light from './Archive_Light.png';
import Archive20 from './Archive-20.png';
import Archive from './Archive.png';
import Audio_Light from './Audio_Light.png';
import Audio20 from './Audio-20.png';
import Audio from './Audio.png';
import BackBlaze from './BackBlaze.png';
import Ball from './Ball.png';
import Book_Light from './Book_Light.png';
import Book20 from './Book-20.png';
import Book from './Book.png';
import BookBlue from './BookBlue.png';
import Box from './Box.png';
import CloudSync_Light from './CloudSync_Light.png';
import CloudSync from './CloudSync.png';
import Code20 from './Code-20.png';
import Collection_Light from './Collection_Light.png';
import Collection20 from './Collection-20.png';
import Collection from './Collection.png';
import CollectionSparkle_Light from './CollectionSparkle_Light.png';
import CollectionSparkle from './CollectionSparkle.png';
import Config20 from './Config-20.png';
import Database_Light from './Database_Light.png';
import Database20 from './Database-20.png';
import Database from './Database.png';
import DAV from './DAV.png';
import DeleteLocation from './DeleteLocation.png';
import Document_doc_Light from './Document_doc_Light.png';
import Document_doc from './Document_doc.png';
import Document_Light from './Document_Light.png';
import Document_pdf_Light from './Document_pdf_Light.png';
import Document_pdf from './Document_pdf.png';
import Document_srt from './Document_srt.png';
import Document_xls_Light from './Document_xls_Light.png';
import Document_xls from './Document_xls.png';
import Document_xmp from './Document_xmp.png';
import Document20 from './Document-20.png';
import Document from './Document.png';
import Dotfile20 from './Dotfile-20.png';
import Drive_Light from './Drive_Light.png';
import DriveAmazonS3_Light from './Drive-AmazonS3_Light.png';
import DriveAmazonS3 from './Drive-AmazonS3.png';
import DriveBackBlaze_Light from './Drive-BackBlaze_Light.png';
import DriveBackBlaze from './Drive-BackBlaze.png';
import Drivebox_Light from './Drive-box_Light.png';
import DriveBox from './Drive-Box.png';
import DriveDarker from './Drive-Darker.png';
import DriveDAV_Light from './Drive-DAV_Light.png';
import DriveDAV from './Drive-DAV.png';
import DriveDropbox_Light from './Drive-Dropbox_Light.png';
import DriveDropbox from './Drive-Dropbox.png';
import DriveGoogleDrive_Light from './Drive-GoogleDrive_Light.png';
import DriveGoogleDrive from './Drive-GoogleDrive.png';
import DriveMega_Light from './Drive-Mega_Light.png';
import DriveMega from './Drive-Mega.png';
import DriveOneDrive_Light from './Drive-OneDrive_Light.png';
import DriveOneDrive from './Drive-OneDrive.png';
import DriveOpenStack_Light from './Drive-OpenStack_Light.png';
import DriveOpenStack from './Drive-OpenStack.png';
import DrivePCloud_Light from './Drive-PCloud_Light.png';
import DrivePCloud from './Drive-PCloud.png';
import Drive from './Drive.png';
import Dropbox from './Dropbox.png';
import Encrypted_Light from './Encrypted_Light.png';
import Encrypted20 from './Encrypted-20.png';
import Encrypted from './Encrypted.png';
import Entity_Light from './Entity_Light.png';
import Entity from './Entity.png';
import Executable_Light_old from './Executable_Light_old.png';
import Executable_Light from './Executable_Light.png';
import Executable_old from './Executable_old.png';
import Executable20 from './Executable-20.png';
import Executable from './Executable.png';
import Face_Light from './Face_Light.png';
import Folder_Light from './Folder_Light.png';
import Folder20 from './Folder-20.png';
import Foldertagxmp from './Folder-tag-xmp.png';
import Folder from './Folder.png';
import FolderGrey_Light from './FolderGrey_Light.png';
import FolderGrey from './FolderGrey.png';
import FolderNoSpace_Light from './FolderNoSpace_Light.png';
import FolderNoSpace from './FolderNoSpace.png';
import Font20 from './Font-20.png';
import Game_Light from './Game_Light.png';
import Game from './Game.png';
import Globe_Light from './Globe_Light.png';
import Globe from './Globe.png';
import GlobeAlt from './GlobeAlt.png';
import GoogleDrive from './GoogleDrive.png';
import HDD_Light from './HDD_Light.png';
import HDD from './HDD.png';
import Heart_Light from './Heart_Light.png';
import Heart from './Heart.png';
import Home_Light from './Home_Light.png';
import Home from './Home.png';
import Image_Light from './Image_Light.png';
import Image20 from './Image-20.png';
import Image from './Image.png';
import Key_Light from './Key_Light.png';
import Key20 from './Key-20.png';
import Key from './Key.png';
import Keys_Light from './Keys_Light.png';
import Keys from './Keys.png';
import Laptop_Light from './Laptop_Light.png';
import Laptop from './Laptop.png';
import Link_Light from './Link_Light.png';
import Link20 from './Link-20.png';
import Link from './Link.png';
import Location from './Location.png';
import LocationManaged from './LocationManaged.png';
import LocationReplica from './LocationReplica.png';
import Lock_Light from './Lock_Light.png';
import Lock from './Lock.png';
import Mega from './Mega.png';
import Mesh_Light from './Mesh_Light.png';
import Mesh20 from './Mesh-20.png';
import Mesh from './Mesh.png';
import MiniSilverBox from './MiniSilverBox.png';
import Mobile_Light from './Mobile_Light.png';
import MobileAndroid from './Mobile-Android.png';
import Mobile from './Mobile.png';
import MoveLocation_Light from './MoveLocation_Light.png';
import MoveLocation from './MoveLocation.png';
import Movie_Light from './Movie_Light.png';
import Movie from './Movie.png';
import NewLocation from './NewLocation.png';
import Node_Light from './Node_Light.png';
import Node from './Node.png';
import OneDrive from './OneDrive.png';
import OpenStack from './OpenStack.png';
import Package_Light from './Package_Light.png';
import Package20 from './Package-20.png';
import Package from './Package.png';
import PC from './PC.png';
import PCloud from './PCloud.png';
import Scrapbook_Light from './Scrapbook_Light.png';
import Scrapbook from './Scrapbook.png';
import Screenshot_Light from './Screenshot_Light.png';
import Screenshot20 from './Screenshot-20.png';
import Screenshot from './Screenshot.png';
import ScreenshotAlt from './ScreenshotAlt.png';
import SD_Light from './SD_Light.png';
import SD from './SD.png';
import Search_Light from './Search_Light.png';
import Search from './Search.png';
import SearchAlt from './SearchAlt.png';
import Server_Light from './Server_Light.png';
import Server from './Server.png';
import SilverBox from './SilverBox.png';
import Spacedrop_Light from './Spacedrop_Light.png';
import Spacedrop1 from './Spacedrop-1.png';
import Spacedrop from './Spacedrop.png';
import Sync_Light from './Sync_Light.png';
import Sync from './Sync.png';
import Tablet_Light from './Tablet_Light.png';
import Tablet from './Tablet.png';
import Tags_Light from './Tags_Light.png';
import Tags from './Tags.png';
import Terminal_Light from './Terminal_Light.png';
import Terminal from './Terminal.png';
import Text_Light from './Text_Light.png';
import Text_txt from './Text_txt.png';
import Text20 from './Text-20.png';
import Text from './Text.png';
import TextAlt_Light from './TextAlt_Light.png';
import TextAlt from './TextAlt.png';
import TexturedMesh_Light from './TexturedMesh_Light.png';
import TexturedMesh from './TexturedMesh.png';
import Trash_Light from './Trash_Light.png';
import Trash from './Trash.png';
import Undefined_Light from './Undefined_Light.png';
import Undefined from './Undefined.png';
import Unknown20 from './Unknown-20.png';
import Video_Light from './Video_Light.png';
import Video20 from './Video-20.png';
import Video from './Video.png';
import WebPageArchive20 from './WebPageArchive-20.png';
import Widget_Light from './Widget_Light.png';
import Widget20 from './Widget-20.png';
import Widget from './Widget.png';
import Album_Light from "./Album_Light.png";
import Album20 from "./Album-20.png";
import Album from "./Album.png";
import Alias_Light from "./Alias_Light.png";
import Alias20 from "./Alias-20.png";
import Alias from "./Alias.png";
import AmazonS3 from "./AmazonS3.png";
import AndroidPhotos from "./AndroidPhotos.png";
import AppleFiles from "./AppleFiles.png";
import ApplePhotos from "./ApplePhotos.png";
import Application_Light from "./Application_Light.png";
import Application from "./Application.png";
import Archive_Light from "./Archive_Light.png";
import Archive20 from "./Archive-20.png";
import Archive from "./Archive.png";
import Audio_Light from "./Audio_Light.png";
import Audio20 from "./Audio-20.png";
import Audio from "./Audio.png";
import BackBlaze from "./BackBlaze.png";
import Ball from "./Ball.png";
import Book_Light from "./Book_Light.png";
import Book20 from "./Book-20.png";
import Book from "./Book.png";
import BookBlue from "./BookBlue.png";
import Box from "./Box.png";
import CloudSync_Light from "./CloudSync_Light.png";
import CloudSync from "./CloudSync.png";
import Code20 from "./Code-20.png";
import Collection_Light from "./Collection_Light.png";
import Collection20 from "./Collection-20.png";
import Collection from "./Collection.png";
import CollectionSparkle_Light from "./CollectionSparkle_Light.png";
import CollectionSparkle from "./CollectionSparkle.png";
import Config20 from "./Config-20.png";
import Database_Light from "./Database_Light.png";
import Database20 from "./Database-20.png";
import Database from "./Database.png";
import DAV from "./DAV.png";
import DeleteLocation from "./DeleteLocation.png";
import Document_doc_Light from "./Document_doc_Light.png";
import Document_doc from "./Document_doc.png";
import Document_Light from "./Document_Light.png";
import Document_pdf_Light from "./Document_pdf_Light.png";
import Document_pdf from "./Document_pdf.png";
import Document_srt from "./Document_srt.png";
import Document_xls_Light from "./Document_xls_Light.png";
import Document_xls from "./Document_xls.png";
import Document_xmp from "./Document_xmp.png";
import Document_memory from "./Document_memory.png";
import Document20 from "./Document-20.png";
import Document from "./Document.png";
import Dotfile20 from "./Dotfile-20.png";
import Drive_Light from "./Drive_Light.png";
import DriveAmazonS3_Light from "./Drive-AmazonS3_Light.png";
import DriveAmazonS3 from "./Drive-AmazonS3.png";
import DriveBackBlaze_Light from "./Drive-BackBlaze_Light.png";
import DriveBackBlaze from "./Drive-BackBlaze.png";
import Drivebox_Light from "./Drive-box_Light.png";
import DriveBox from "./Drive-Box.png";
import DriveDarker from "./Drive-Darker.png";
import DriveDAV_Light from "./Drive-DAV_Light.png";
import DriveDAV from "./Drive-DAV.png";
import DriveDropbox_Light from "./Drive-Dropbox_Light.png";
import DriveDropbox from "./Drive-Dropbox.png";
import DriveGoogleDrive_Light from "./Drive-GoogleDrive_Light.png";
import DriveGoogleDrive from "./Drive-GoogleDrive.png";
import DriveMega_Light from "./Drive-Mega_Light.png";
import DriveMega from "./Drive-Mega.png";
import DriveOneDrive_Light from "./Drive-OneDrive_Light.png";
import DriveOneDrive from "./Drive-OneDrive.png";
import DriveOpenStack_Light from "./Drive-OpenStack_Light.png";
import DriveOpenStack from "./Drive-OpenStack.png";
import DrivePCloud_Light from "./Drive-PCloud_Light.png";
import DrivePCloud from "./Drive-PCloud.png";
import Drive from "./Drive.png";
import Dropbox from "./Dropbox.png";
import Encrypted_Light from "./Encrypted_Light.png";
import Encrypted20 from "./Encrypted-20.png";
import Encrypted from "./Encrypted.png";
import Entity_Light from "./Entity_Light.png";
import Entity from "./Entity.png";
import Executable_Light_old from "./Executable_Light_old.png";
import Executable_Light from "./Executable_Light.png";
import Executable_old from "./Executable_old.png";
import Executable20 from "./Executable-20.png";
import Executable from "./Executable.png";
import Face_Light from "./Face_Light.png";
import Folder_Light from "./Folder_Light.png";
import Folder20 from "./Folder-20.png";
import Foldertagxmp from "./Folder-tag-xmp.png";
import Folder from "./Folder.png";
import FolderGrey_Light from "./FolderGrey_Light.png";
import FolderGrey from "./FolderGrey.png";
import FolderNoSpace_Light from "./FolderNoSpace_Light.png";
import FolderNoSpace from "./FolderNoSpace.png";
import Font20 from "./Font-20.png";
import Game_Light from "./Game_Light.png";
import Game from "./Game.png";
import Globe_Light from "./Globe_Light.png";
import Globe from "./Globe.png";
import GlobeAlt from "./GlobeAlt.png";
import GoogleDrive from "./GoogleDrive.png";
import HDD_Light from "./HDD_Light.png";
import HDD from "./HDD.png";
import Heart_Light from "./Heart_Light.png";
import Heart from "./Heart.png";
import Home_Light from "./Home_Light.png";
import Home from "./Home.png";
import Image_Light from "./Image_Light.png";
import Image20 from "./Image-20.png";
import Image from "./Image.png";
import Key_Light from "./Key_Light.png";
import Key20 from "./Key-20.png";
import Key from "./Key.png";
import Keys_Light from "./Keys_Light.png";
import Keys from "./Keys.png";
import Laptop_Light from "./Laptop_Light.png";
import Laptop from "./Laptop.png";
import Link_Light from "./Link_Light.png";
import Link20 from "./Link-20.png";
import Link from "./Link.png";
import Location from "./Location.png";
import LocationManaged from "./LocationManaged.png";
import LocationReplica from "./LocationReplica.png";
import Lock_Light from "./Lock_Light.png";
import Lock from "./Lock.png";
import Mega from "./Mega.png";
import Mesh_Light from "./Mesh_Light.png";
import Mesh20 from "./Mesh-20.png";
import Mesh from "./Mesh.png";
import MiniSilverBox from "./MiniSilverBox.png";
import Mobile_Light from "./Mobile_Light.png";
import MobileAndroid from "./Mobile-Android.png";
import Mobile from "./Mobile.png";
import MoveLocation_Light from "./MoveLocation_Light.png";
import MoveLocation from "./MoveLocation.png";
import Movie_Light from "./Movie_Light.png";
import Movie from "./Movie.png";
import NewLocation from "./NewLocation.png";
import Node_Light from "./Node_Light.png";
import Node from "./Node.png";
import OneDrive from "./OneDrive.png";
import OpenStack from "./OpenStack.png";
import Package_Light from "./Package_Light.png";
import Package20 from "./Package-20.png";
import Package from "./Package.png";
import PC from "./PC.png";
import PCloud from "./PCloud.png";
import Scrapbook_Light from "./Scrapbook_Light.png";
import Scrapbook from "./Scrapbook.png";
import Screenshot_Light from "./Screenshot_Light.png";
import Screenshot20 from "./Screenshot-20.png";
import Screenshot from "./Screenshot.png";
import ScreenshotAlt from "./ScreenshotAlt.png";
import SD_Light from "./SD_Light.png";
import SD from "./SD.png";
import Search_Light from "./Search_Light.png";
import Search from "./Search.png";
import SearchAlt from "./SearchAlt.png";
import Server_Light from "./Server_Light.png";
import Server from "./Server.png";
import SilverBox from "./SilverBox.png";
import Spacedrop_Light from "./Spacedrop_Light.png";
import Spacedrop1 from "./Spacedrop-1.png";
import Spacedrop from "./Spacedrop.png";
import Sync_Light from "./Sync_Light.png";
import Sync from "./Sync.png";
import Tablet_Light from "./Tablet_Light.png";
import Tablet from "./Tablet.png";
import Tags_Light from "./Tags_Light.png";
import Tags from "./Tags.png";
import Terminal_Light from "./Terminal_Light.png";
import Terminal from "./Terminal.png";
import Text_Light from "./Text_Light.png";
import Text_txt from "./Text_txt.png";
import Text20 from "./Text-20.png";
import Text from "./Text.png";
import TextAlt_Light from "./TextAlt_Light.png";
import TextAlt from "./TextAlt.png";
import TexturedMesh_Light from "./TexturedMesh_Light.png";
import TexturedMesh from "./TexturedMesh.png";
import Trash_Light from "./Trash_Light.png";
import Trash from "./Trash.png";
import Undefined_Light from "./Undefined_Light.png";
import Undefined from "./Undefined.png";
import Unknown20 from "./Unknown-20.png";
import Video_Light from "./Video_Light.png";
import Video20 from "./Video-20.png";
import Video from "./Video.png";
import WebPageArchive20 from "./WebPageArchive-20.png";
import Widget_Light from "./Widget_Light.png";
import Widget20 from "./Widget-20.png";
import Widget from "./Widget.png";
export {
Album20,
Album,
Album_Light,
Alias20,
Alias,
Alias_Light,
AmazonS3,
AndroidPhotos,
AppleFiles,
ApplePhotos,
Application,
Application_Light,
Archive20,
Archive,
Archive_Light,
Audio20,
Audio,
Audio_Light,
BackBlaze,
Ball,
Book20,
Book,
BookBlue,
Book_Light,
Box,
CloudSync,
CloudSync_Light,
Code20,
Collection20,
Collection,
CollectionSparkle,
CollectionSparkle_Light,
Collection_Light,
Config20,
DAV,
Database20,
Database,
Database_Light,
DeleteLocation,
Document20,
Document,
Document_Light,
Document_doc,
Document_doc_Light,
Document_pdf,
Document_pdf_Light,
Document_srt,
Document_xls,
Document_xls_Light,
Document_xmp,
Dotfile20,
DriveAmazonS3,
DriveAmazonS3_Light,
DriveBackBlaze,
DriveBackBlaze_Light,
DriveBox,
DriveDAV,
DriveDAV_Light,
DriveDarker,
DriveDropbox,
DriveDropbox_Light,
DriveGoogleDrive,
DriveGoogleDrive_Light,
DriveMega,
DriveMega_Light,
DriveOneDrive,
DriveOneDrive_Light,
DriveOpenStack,
DriveOpenStack_Light,
DrivePCloud,
DrivePCloud_Light,
Drivebox_Light,
Drive,
Drive_Light,
Dropbox,
Encrypted20,
Encrypted,
Encrypted_Light,
Entity,
Entity_Light,
Executable20,
Executable,
Executable_Light,
Executable_Light_old,
Executable_old,
Face_Light,
Folder20,
Foldertagxmp,
Folder,
FolderGrey,
FolderGrey_Light,
FolderNoSpace,
FolderNoSpace_Light,
Folder_Light,
Font20,
Game,
Game_Light,
Globe,
GlobeAlt,
Globe_Light,
GoogleDrive,
HDD,
HDD_Light,
Heart,
Heart_Light,
Home,
Home_Light,
Image20,
Image,
Image_Light,
Key20,
Key,
Key_Light,
Keys,
Keys_Light,
Laptop,
Laptop_Light,
Link20,
Link,
Link_Light,
Location,
LocationManaged,
LocationReplica,
Lock,
Lock_Light,
Mega,
Mesh20,
Mesh,
Mesh_Light,
MiniSilverBox,
MobileAndroid,
Mobile,
Mobile_Light,
MoveLocation,
MoveLocation_Light,
Movie,
Movie_Light,
NewLocation,
Node,
Node_Light,
OneDrive,
OpenStack,
PC,
PCloud,
Package20,
Package,
Package_Light,
SD,
SD_Light,
Scrapbook,
Scrapbook_Light,
Screenshot20,
Screenshot,
ScreenshotAlt,
Screenshot_Light,
Search,
SearchAlt,
Search_Light,
Server,
Server_Light,
SilverBox,
Spacedrop1,
Spacedrop,
Spacedrop_Light,
Sync,
Sync_Light,
Tablet,
Tablet_Light,
Tags,
Tags_Light,
Terminal,
Terminal_Light,
Text20,
Text,
TextAlt,
TextAlt_Light,
Text_Light,
Text_txt,
TexturedMesh,
TexturedMesh_Light,
Trash,
Trash_Light,
Undefined,
Undefined_Light,
Unknown20,
Video20,
Video,
Video_Light,
WebPageArchive20,
Widget20,
Widget,
Widget_Light
Album20,
Album,
Album_Light,
Alias20,
Alias,
Alias_Light,
AmazonS3,
AndroidPhotos,
AppleFiles,
ApplePhotos,
Application,
Application_Light,
Archive20,
Archive,
Archive_Light,
Audio20,
Audio,
Audio_Light,
BackBlaze,
Ball,
Book20,
Book,
BookBlue,
Book_Light,
Box,
CloudSync,
CloudSync_Light,
Code20,
Collection20,
Collection,
CollectionSparkle,
CollectionSparkle_Light,
Collection_Light,
Config20,
DAV,
Database20,
Database,
Database_Light,
DeleteLocation,
Document20,
Document,
Document_Light,
Document_doc,
Document_doc_Light,
Document_pdf,
Document_pdf_Light,
Document_srt,
Document_xls,
Document_xls_Light,
Document_xmp,
Document_memory,
Dotfile20,
DriveAmazonS3,
DriveAmazonS3_Light,
DriveBackBlaze,
DriveBackBlaze_Light,
DriveBox,
DriveDAV,
DriveDAV_Light,
DriveDarker,
DriveDropbox,
DriveDropbox_Light,
DriveGoogleDrive,
DriveGoogleDrive_Light,
DriveMega,
DriveMega_Light,
DriveOneDrive,
DriveOneDrive_Light,
DriveOpenStack,
DriveOpenStack_Light,
DrivePCloud,
DrivePCloud_Light,
Drivebox_Light,
Drive,
Drive_Light,
Dropbox,
Encrypted20,
Encrypted,
Encrypted_Light,
Entity,
Entity_Light,
Executable20,
Executable,
Executable_Light,
Executable_Light_old,
Executable_old,
Face_Light,
Folder20,
Foldertagxmp,
Folder,
FolderGrey,
FolderGrey_Light,
FolderNoSpace,
FolderNoSpace_Light,
Folder_Light,
Font20,
Game,
Game_Light,
Globe,
GlobeAlt,
Globe_Light,
GoogleDrive,
HDD,
HDD_Light,
Heart,
Heart_Light,
Home,
Home_Light,
Image20,
Image,
Image_Light,
Key20,
Key,
Key_Light,
Keys,
Keys_Light,
Laptop,
Laptop_Light,
Link20,
Link,
Link_Light,
Location,
LocationManaged,
LocationReplica,
Lock,
Lock_Light,
Mega,
Mesh20,
Mesh,
Mesh_Light,
MiniSilverBox,
MobileAndroid,
Mobile,
Mobile_Light,
MoveLocation,
MoveLocation_Light,
Movie,
Movie_Light,
NewLocation,
Node,
Node_Light,
OneDrive,
OpenStack,
PC,
PCloud,
Package20,
Package,
Package_Light,
SD,
SD_Light,
Scrapbook,
Scrapbook_Light,
Screenshot20,
Screenshot,
ScreenshotAlt,
Screenshot_Light,
Search,
SearchAlt,
Search_Light,
Server,
Server_Light,
SilverBox,
Spacedrop1,
Spacedrop,
Spacedrop_Light,
Sync,
Sync_Light,
Tablet,
Tablet_Light,
Tags,
Tags_Light,
Terminal,
Terminal_Light,
Text20,
Text,
TextAlt,
TextAlt_Light,
Text_Light,
Text_txt,
TexturedMesh,
TexturedMesh_Light,
Trash,
Trash_Light,
Undefined,
Undefined_Light,
Unknown20,
Video20,
Video,
Video_Light,
WebPageArchive20,
Widget20,
Widget,
Widget_Light,
};

View File

@@ -43,10 +43,12 @@ export function ExplorerLayout() {
goToPreviousPreview,
tagModeActive,
setTagModeActive,
viewMode,
} = useExplorer();
// Check if we're on Overview (hide inspector)
// Check if we're on Overview (hide inspector) or in Knowledge view (has its own inspector)
const isOverview = location.pathname === "/";
const isKnowledgeView = viewMode === "knowledge";
// Fetch locations to get current location info
const locationsQuery = useNormalizedCache<
@@ -112,7 +114,7 @@ export function ExplorerLayout() {
<div className="relative flex h-screen select-none overflow-hidden text-sidebar-ink bg-app rounded-[10px] border border-transparent frame">
<TopBar
sidebarWidth={sidebarVisible ? 224 : 0}
inspectorWidth={inspectorVisible && !isOverview ? 284 : 0}
inspectorWidth={inspectorVisible && !isOverview && !isKnowledgeView ? 284 : 0}
/>
<AnimatePresence initial={false} mode="popLayout">
@@ -145,8 +147,8 @@ export function ExplorerLayout() {
/>
<AnimatePresence initial={false}>
{/* Hide inspector on Overview screen */}
{inspectorVisible && !isOverview && (
{/* Hide inspector on Overview screen and Knowledge view (has its own) */}
{inspectorVisible && !isOverview && !isKnowledgeView && (
<motion.div
initial={{ width: 0 }}
animate={{ width: 280 }}

View File

@@ -1,5 +1,5 @@
import { useEffect } from "react";
import { useParams } from "react-router-dom";
import { useParams, useSearchParams } from "react-router-dom";
import { useExplorer } from "./context";
import { useNormalizedCache } from "../../context";
import { GridView } from "./views/GridView";
@@ -7,6 +7,7 @@ import { ListView } from "./views/ListView";
import { MediaView } from "./views/MediaView";
import { ColumnView } from "./views/ColumnView";
import { SizeView } from "./views/SizeView";
import { KnowledgeView } from "./views/KnowledgeView";
import { EmptyView } from "./views/EmptyView";
import { TopBarPortal } from "../../TopBar";
import {
@@ -24,6 +25,7 @@ import { ViewModeMenu } from "./ViewModeMenu";
export function ExplorerView() {
const { locationId } = useParams();
const [searchParams] = useSearchParams();
const {
sidebarVisible,
setSidebarVisible,
@@ -51,6 +53,26 @@ export function ExplorerView() {
resourceType: "location",
});
// Set currentPath from query parameter (for direct path navigation like volumes)
useEffect(() => {
const pathParam = searchParams.get("path");
if (pathParam) {
try {
const sdPath = JSON.parse(decodeURIComponent(pathParam));
const currentPathStr = JSON.stringify(currentPath);
const newPathStr = JSON.stringify(sdPath);
if (currentPathStr !== newPathStr) {
console.log("Setting currentPath from query param:", sdPath);
setCurrentPath(sdPath);
}
} catch (e) {
console.error("Failed to parse path query parameter:", e);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [searchParams]);
// Set currentPath from location ID (only when location changes)
useEffect(() => {
if (locationId && locationsQuery.data?.locations) {
@@ -141,6 +163,8 @@ export function ExplorerView() {
<ColumnView />
) : viewMode === "size" ? (
<SizeView />
) : viewMode === "knowledge" ? (
<KnowledgeView />
) : (
<MediaView />
)}

View File

@@ -29,12 +29,13 @@ export function TagAssignmentMode({ isActive, onExit }: TagAssignmentModeProps)
// Fetch all tags (for now, we'll use the first 10 as the default palette)
// TODO: Implement user-defined palettes
const { data: tagsData } = useNormalizedQuery({
wireMethod: 'query:tags.list',
input: null,
wireMethod: 'query:tags.search',
input: { query: '' },
resourceType: 'tag'
});
const allTags = tagsData?.tags ?? [];
// Extract tags from search results (tags is an array of { tag, relevance, ... })
const allTags = tagsData?.tags?.map((result: any) => result.tag) ?? [];
const paletteTags = allTags.slice(0, 10); // First 10 tags for now
// Keyboard shortcuts

View File

@@ -9,11 +9,12 @@ import {
ChartPieSlice,
Clock,
SquaresFour,
Sparkle,
} from "@phosphor-icons/react";
import clsx from "clsx";
import { TopBarButton } from "@sd/ui";
type ViewMode = "list" | "grid" | "column" | "media" | "size";
type ViewMode = "list" | "grid" | "column" | "media" | "size" | "knowledge";
interface ViewOption {
id: ViewMode | "timeline";
@@ -59,12 +60,19 @@ const viewOptions: ViewOption[] = [
color: "bg-green-500",
keybind: "⌘5",
},
{
id: "knowledge",
label: "Knowledge",
icon: Sparkle,
color: "bg-purple-500",
keybind: "⌘6",
},
{
id: "timeline",
label: "Timeline",
icon: Clock,
color: "bg-yellow-500",
keybind: "⌘6",
keybind: "⌘7",
},
];
@@ -136,9 +144,9 @@ export function ViewModeMenu({
top: `${position.top}px`,
right: `${position.right}px`,
}}
className="w-[280px] rounded-lg bg-menu border border-menu-line shadow-2xl p-2 z-50"
className="w-[240px] rounded-lg bg-menu border border-menu-line shadow-2xl p-2 z-50"
>
<div className="grid grid-cols-2 gap-1.5">
<div className="grid grid-cols-3 gap-1">
{viewOptions.map((option) => (
<button
key={`${option.id}-${option.label}`}
@@ -149,8 +157,8 @@ export function ViewModeMenu({
setIsOpen(false);
}}
className={clsx(
"flex items-center gap-3 px-2.5 py-2 rounded-md",
"transition-colors text-left",
"flex flex-col items-center gap-1.5 px-2 py-2 rounded-md",
"transition-colors",
option.id === "timeline" &&
"opacity-50 cursor-not-allowed",
viewMode === option.id
@@ -158,22 +166,15 @@ export function ViewModeMenu({
: "hover:bg-menu-hover",
)}
>
<div
className={clsx(
"flex items-center justify-center size-8 rounded-md",
option.color,
)}
>
<option.icon
className="size-4 text-white"
weight="bold"
/>
</div>
<div className="flex-1 min-w-0">
<div className="text-sm font-medium text-menu-ink">
<option.icon
className="size-6 text-menu-ink"
weight={viewMode === option.id ? "fill" : "bold"}
/>
<div className="flex flex-col items-center gap-0.5">
<div className="text-xs font-medium text-menu-ink">
{option.label}
</div>
<div className="text-[11px] text-menu-faint">
<div className="text-[10px] text-menu-faint">
{option.keybind}
</div>
</div>

View File

File diff suppressed because it is too large Load Diff

View File

@@ -36,8 +36,8 @@ interface ExplorerState {
canGoBack: boolean;
canGoForward: boolean;
viewMode: "grid" | "list" | "media" | "column" | "size";
setViewMode: (mode: "grid" | "list" | "media" | "column" | "size") => void;
viewMode: "grid" | "list" | "media" | "column" | "size" | "knowledge";
setViewMode: (mode: "grid" | "list" | "media" | "column" | "size" | "knowledge") => void;
sortBy: DirectorySortBy | MediaSortBy;
setSortBy: (sort: DirectorySortBy | MediaSortBy) => void;
@@ -70,7 +70,7 @@ export function ExplorerProvider({ children }: { children: ReactNode }) {
const [currentPath, setCurrentPathInternal] = useState<SdPath | null>(null);
const [history, setHistory] = useState<SdPath[]>([]);
const [historyIndex, setHistoryIndex] = useState(-1);
const [viewMode, setViewMode] = useState<"grid" | "list" | "media" | "column" | "size">("grid");
const [viewMode, setViewMode] = useState<"grid" | "list" | "media" | "column" | "size" | "knowledge">("grid");
const [sortBy, setSortBy] = useState<DirectorySortBy | MediaSortBy>("name");
// Update sort when switching to media view

View File

@@ -0,0 +1,394 @@
import {
Sparkle,
Tag as TagIcon,
Chat,
Database,
FilmStrip,
Image,
MusicNote,
File as FileIcon,
Folder,
FileText,
} from "@phosphor-icons/react";
import { KnowledgeInspector } from "../../../inspectors/KnowledgeInspector";
import { useExplorer } from "../context";
import { useNormalizedCache } from "../../../context";
import type { File, ContentKind } from "@sd/ts-client";
import { useMemo } from "react";
import clsx from "clsx";
import { File as FileComponent } from "../File";
const CONTENT_KIND_ICONS: Record<ContentKind, React.ElementType> = {
image: Image,
video: FilmStrip,
audio: MusicNote,
document: FileText,
archive: Folder,
code: FileText,
text: FileText,
database: Database,
book: FileText,
font: FileText,
mesh: FileIcon,
config: FileText,
encrypted: FileIcon,
key: FileIcon,
executable: FileIcon,
binary: FileIcon,
spreadsheet: FileText,
presentation: FileText,
email: FileText,
calendar: FileText,
contact: FileText,
web: FileText,
shortcut: FileIcon,
package: Folder,
model_entry: FileIcon,
unknown: FileIcon,
};
const CONTENT_KIND_LABELS: Record<ContentKind, string> = {
image: "Images",
video: "Videos",
audio: "Audio",
document: "Documents",
archive: "Archives",
code: "Code",
text: "Text",
database: "Databases",
book: "Books",
font: "Fonts",
mesh: "3D Models",
config: "Config",
encrypted: "Encrypted",
key: "Keys",
executable: "Apps",
binary: "Binary",
spreadsheet: "Spreadsheets",
presentation: "Presentations",
email: "Emails",
calendar: "Calendar",
contact: "Contacts",
web: "Web",
shortcut: "Shortcuts",
package: "Packages",
model_entry: "Models",
unknown: "Other",
};
export function KnowledgeView() {
const { inspectorVisible, currentPath, sortBy } = useExplorer();
const directoryQuery = useNormalizedCache({
wireMethod: "query:files.directory_listing",
input: currentPath
? {
path: currentPath,
limit: null,
offset: 0,
sort_by: sortBy,
}
: null,
resourceType: "file",
enabled: !!currentPath,
});
const files = (directoryQuery.data?.files || []) as File[];
// Group files by content kind
const filesByKind = useMemo(() => {
const groups = new Map<ContentKind, File[]>();
files.forEach((file) => {
const kind = file.content_identity?.kind || "unknown";
if (!groups.has(kind)) {
groups.set(kind, []);
}
groups.get(kind)!.push(file);
});
// Sort by count and return top categories
return Array.from(groups.entries())
.sort((a, b) => b[1].length - a[1].length)
.slice(0, 6);
}, [files]);
// Collect all unique tags
const allTags = useMemo(() => {
const tagMap = new Map<string, { id: string; name: string; color: string; count: number }>();
files.forEach((file) => {
file.tags?.forEach((tag) => {
if (tagMap.has(tag.id)) {
tagMap.get(tag.id)!.count++;
} else {
tagMap.set(tag.id, {
id: tag.id,
name: tag.canonical_name,
color: tag.color || "#3B82F6",
count: 1,
});
}
});
});
return Array.from(tagMap.values()).sort((a, b) => b.count - a.count);
}, [files]);
return (
<div className="flex h-full gap-2">
{/* Main content area */}
<div className="flex-1 overflow-y-auto no-scrollbar px-6 py-4">
<div className="max-w-5xl space-y-6">
{/* Header */}
<div className="flex items-center gap-3">
<Sparkle className="size-8 text-accent" weight="fill" />
<div>
<h1 className="text-2xl font-semibold text-ink">Knowledge View</h1>
<p className="text-sm text-ink-dull">
AI-powered insights for {files.length} items
</p>
</div>
</div>
{/* Content Piles */}
<Section title="Content" icon={Folder}>
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-6 gap-4">
{filesByKind.map(([kind, kindFiles]) => (
<ContentPile
key={kind}
kind={kind}
files={kindFiles.slice(0, 3)}
totalCount={kindFiles.length}
/>
))}
</div>
</Section>
{/* Tags */}
{allTags.length > 0 && (
<Section title="Tags" icon={TagIcon}>
<div className="flex flex-wrap gap-2">
{allTags.map((tag) => (
<button
key={tag.id}
className="flex items-center gap-2 px-3 py-1.5 rounded-full bg-app-box hover:bg-app-hover border border-app-line transition-colors"
>
<div
className="size-2 rounded-full"
style={{ backgroundColor: tag.color }}
/>
<span className="text-xs font-medium text-ink">{tag.name}</span>
<span className="text-xs text-ink-dull">({tag.count})</span>
</button>
))}
</div>
</Section>
)}
{/* Summary & Conversations */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-4">
{/* Summary */}
<Section title="Summary" icon={Sparkle}>
<div className="space-y-2 text-sm text-ink-dull">
<p>
This directory contains {files.length} items across{" "}
{filesByKind.length} content types.
</p>
{filesByKind.length > 0 && (
<p>
Most common type: {CONTENT_KIND_LABELS[filesByKind[0][0]]} (
{filesByKind[0][1].length} items)
</p>
)}
{allTags.length > 0 && (
<p>Tagged items: {allTags.reduce((sum, tag) => sum + tag.count, 0)}</p>
)}
</div>
</Section>
{/* Conversations */}
<Section title="Conversations" icon={Chat}>
<div className="grid grid-cols-2 gap-2">
<ConversationCard
title="Organize photos"
preview="Can you help sort these by date?"
time="2h ago"
/>
<ConversationCard
title="Find duplicates"
preview="Looking for duplicate files in..."
time="Yesterday"
/>
</div>
</Section>
</div>
{/* Intelligence Sidecars */}
<Section title="Intelligence" icon={Database}>
<div className="grid grid-cols-1 md:grid-cols-2 gap-3">
<SidecarItem
kind="OCR Text"
variant="Extracted text from 12 images"
status="ready"
size="2.4 MB"
/>
<SidecarItem
kind="Thumbnails"
variant="Generated for 48 media files"
status="ready"
size="8.1 MB"
/>
<SidecarItem
kind="Video Transcripts"
variant="Speech-to-text from 3 videos"
status="pending"
size="—"
/>
<SidecarItem
kind="Embeddings"
variant="Semantic vectors for search"
status="ready"
size="14.2 MB"
/>
</div>
</Section>
</div>
</div>
{/* Dedicated Knowledge Inspector */}
{inspectorVisible && (
<div className="w-96 h-full shrink-0 pr-2 pb-2">
<div className="h-full rounded-lg overflow-hidden bg-sidebar/65">
<KnowledgeInspector />
</div>
</div>
)}
</div>
);
}
function Section({
title,
icon: Icon,
children,
}: {
title: string;
icon: React.ElementType;
children: React.ReactNode;
}) {
return (
<div className="space-y-3">
<div className="flex items-center gap-2">
<Icon className="size-4 text-ink-dull" weight="bold" />
<h2 className="text-sm font-semibold text-ink">{title}</h2>
</div>
{children}
</div>
);
}
function ContentPile({
kind,
files,
totalCount,
}: {
kind: ContentKind;
files: File[];
totalCount: number;
}) {
const Icon = CONTENT_KIND_ICONS[kind];
const label = CONTENT_KIND_LABELS[kind];
return (
<button className="group flex flex-col items-center gap-2 p-3 rounded-lg hover:bg-app-box/40 transition-colors">
{/* Stacked file previews */}
<div className="relative w-full aspect-square">
{files.length > 0 ? (
files.map((file, i) => (
<div
key={file.id}
className="absolute inset-0"
style={{
transform: `rotate(${(i - 1) * 3}deg) translateY(${i * 2}px)`,
zIndex: files.length - i,
}}
>
<FileComponent.Thumb
file={file}
size={120}
iconScale={0.5}
className="w-full h-full rounded-md shadow-sm"
/>
</div>
))
) : (
<div className="flex items-center justify-center w-full h-full">
<Icon className="size-12 text-ink-faint" weight="thin" />
</div>
)}
</div>
{/* Label */}
<div className="text-center">
<div className="text-xs font-medium text-ink">{label}</div>
<div className="text-[10px] text-ink-dull">{totalCount} items</div>
</div>
</button>
);
}
function ConversationCard({
title,
preview,
time,
}: {
title: string;
preview: string;
time: string;
}) {
return (
<button className="flex flex-col gap-1.5 p-3 rounded-lg bg-app-box/40 hover:bg-app-box border border-app-line/50 hover:border-app-line transition-colors text-left">
<div className="flex items-start justify-between gap-2">
<div className="text-xs font-medium text-ink truncate">{title}</div>
<Sparkle className="size-3 text-accent shrink-0" weight="fill" />
</div>
<p className="text-[11px] text-ink-dull line-clamp-2">{preview}</p>
<span className="text-[10px] text-ink-faint">{time}</span>
</button>
);
}
function SidecarItem({
kind,
variant,
status,
size,
}: {
kind: string;
variant: string;
status: "ready" | "pending";
size: string;
}) {
return (
<div className="flex items-start gap-3 p-3 rounded-lg bg-app-box/40 border border-app-line/50">
<div className="size-10 shrink-0 rounded-md bg-accent/10 border border-accent/20 flex items-center justify-center">
<Database className="size-5 text-accent" weight="bold" />
</div>
<div className="flex-1 min-w-0">
<div className="text-xs font-medium text-ink">{kind}</div>
<div className="text-[11px] text-ink-dull">{variant}</div>
<div className="text-[10px] text-ink-faint mt-1">{size}</div>
</div>
<span
className={clsx(
"text-[10px] font-semibold px-2 py-0.5 rounded-full shrink-0",
status === "ready" && "bg-accent/20 text-accent",
status === "pending" && "bg-ink-faint/20 text-ink-dull",
)}
>
{status}
</span>
</div>
);
}

View File

@@ -22,6 +22,10 @@ interface SpaceItemProps {
className?: string;
/** Optional icon weight (default: "bold") */
iconWeight?: "thin" | "light" | "regular" | "bold" | "fill" | "duotone";
/** Optional onClick handler to override default navigation */
onClick?: () => void;
/** Volume data for constructing explorer path */
volumeData?: { device_slug: string; mount_path: string };
}
function getItemIcon(itemType: ItemType): any {
@@ -64,14 +68,25 @@ function getItemLabel(itemType: ItemType): string {
return "Unknown";
}
function getItemPath(itemType: ItemType): string | null {
function getItemPath(itemType: ItemType, volumeData?: { device_slug: string; mount_path: string }): string | null {
if (itemType === "Overview") return "/";
if (itemType === "Recents") return "/recents";
if (itemType === "Favorites") return "/favorites";
if (typeof itemType === "object" && "Location" in itemType)
return `/location/${itemType.Location.location_id}`;
if (typeof itemType === "object" && "Volume" in itemType)
if (typeof itemType === "object" && "Volume" in itemType) {
// Navigate to explorer with volume's root path
if (volumeData) {
const sdPath = {
Physical: {
device_slug: volumeData.device_slug,
path: volumeData.mount_path || "/",
},
};
return `/explorer?path=${encodeURIComponent(JSON.stringify(sdPath))}`;
}
return `/volume/${itemType.Volume.volume_id}`;
}
if (typeof itemType === "object" && "Tag" in itemType)
return `/tag/${itemType.Tag.tag_id}`;
return null;
@@ -82,6 +97,8 @@ export function SpaceItem({
rightComponent,
className,
iconWeight = "bold",
onClick,
volumeData,
}: SpaceItemProps) {
const navigate = useNavigate();
const location = useLocation();
@@ -101,13 +118,15 @@ export function SpaceItem({
// Handle proper SpaceItem
iconData = getItemIcon(item.item_type);
label = getItemLabel(item.item_type);
path = getItemPath(item.item_type);
path = getItemPath(item.item_type, volumeData);
}
const isActive = location.pathname === path;
const handleClick = () => {
if (path) {
if (onClick) {
onClick();
} else if (path) {
navigate(path);
}
};

View File

@@ -85,14 +85,15 @@ export function TagsGroup({ isCollapsed, onToggle }: TagsGroupProps) {
const createTag = useLibraryMutation('tags.create');
// Fetch tags with real-time updates
// Fetch tags with real-time updates using search with empty query
const { data: tagsData, isLoading } = useNormalizedQuery({
wireMethod: 'query:tags.list',
input: null,
wireMethod: 'query:tags.search',
input: { query: '' },
resourceType: 'tag'
});
const tags = tagsData?.tags ?? [];
// Extract tags from search results (tags is an array of { tag, relevance, ... })
const tags = tagsData?.tags?.map((result: any) => result.tag) ?? [];
const handleCreateTag = async () => {
if (!newTagName.trim()) return;
@@ -100,6 +101,7 @@ export function TagsGroup({ isCollapsed, onToggle }: TagsGroupProps) {
try {
const result = await createTag.mutateAsync({
canonical_name: newTagName.trim(),
aliases: [],
color: `#${Math.floor(Math.random() * 16777215).toString(16).padStart(6, '0')}`,
});

View File

@@ -1,5 +1,6 @@
import { CaretRight } from "@phosphor-icons/react";
import clsx from "clsx";
import { useNavigate } from "react-router-dom";
import { useNormalizedCache } from "@sd/ts-client";
import { SpaceItem } from "./SpaceItem";
import type { VolumeItem } from "@sd/ts-client";
@@ -16,6 +17,8 @@ export function VolumesGroup({
onToggle,
filter = "All",
}: VolumesGroupProps) {
const navigate = useNavigate();
const { data: volumesData } = useNormalizedCache({
wireMethod: "query:volumes.list",
input: { filter },
@@ -76,6 +79,10 @@ export function VolumesGroup({
},
} as any
}
volumeData={{
device_slug: volume.device_slug,
mount_path: volume.mount_point || "/",
}}
rightComponent={getVolumeBadges(volume)}
className={
volume.is_tracked

View File

@@ -1,7 +1,7 @@
import { useState, useEffect } from 'react';
import { AnimatePresence, motion } from 'framer-motion';
import { MagnifyingGlass, Plus } from '@phosphor-icons/react';
import clsx from 'clsx';
import { Popover, usePopover } from '@sd/ui';
import { useNormalizedQuery, useLibraryMutation } from '../../context';
import type { Tag } from '@sd/ts-client';
@@ -11,6 +11,10 @@ interface TagSelectorProps {
contextTags?: Tag[];
autoFocus?: boolean;
className?: string;
/** Optional file ID to apply newly created tags to */
fileId?: string;
/** Optional content identity UUID (preferred for content-based tagging) */
contentId?: string;
}
/**
@@ -22,21 +26,24 @@ export function TagSelector({
onClose,
contextTags = [],
autoFocus = true,
className
className,
fileId,
contentId
}: TagSelectorProps) {
const [query, setQuery] = useState('');
const [selectedIndex, setSelectedIndex] = useState(0);
const createTag = useLibraryMutation('tags.create');
// Fetch all tags
// Fetch all tags using search with empty query
const { data: tagsData } = useNormalizedQuery({
wireMethod: 'query:tags.list',
input: null,
wireMethod: 'query:tags.search',
input: { query: '' },
resourceType: 'tag'
});
const allTags = tagsData?.tags ?? [];
// Extract tags from search results (tags is an array of { tag, relevance, ... })
const allTags = tagsData?.tags?.map((result: any) => result.tag) ?? [];
// Check if query matches an existing tag
const exactMatch = allTags.find(
@@ -93,7 +100,13 @@ export function TagSelector({
try {
const newTag = await createTag.mutateAsync({
canonical_name: query.trim(),
aliases: [],
color: `#${Math.floor(Math.random() * 16777215).toString(16).padStart(6, '0')}`, // Random color
apply_to: contentId
? { type: 'Content', ids: [contentId] }
: fileId
? { type: 'Entry', ids: [parseInt(fileId)] }
: undefined,
});
// Select the newly created tag
@@ -108,10 +121,10 @@ export function TagSelector({
};
return (
<div className={clsx('flex flex-col bg-menu border border-menu-line rounded-lg shadow-lg overflow-hidden', className)}>
<div className={clsx('flex flex-col overflow-hidden', className)}>
{/* Search Input */}
<div className="flex items-center gap-2 px-3 py-2 border-b border-menu-line">
<MagnifyingGlass size={16} className="text-menu-ink-dull flex-shrink-0" />
<div className="flex items-center gap-2 px-3 py-2 border-b border-app-line">
<MagnifyingGlass size={16} className="text-ink-dull flex-shrink-0" />
<input
type="text"
value={query}
@@ -119,7 +132,7 @@ export function TagSelector({
onKeyDown={handleKeyDown}
placeholder="Search tags..."
autoFocus={autoFocus}
className="flex-1 bg-transparent text-sm text-menu-ink placeholder:text-menu-ink-faint outline-none"
className="flex-1 bg-transparent text-sm text-ink placeholder:text-ink-faint outline-none"
/>
</div>
@@ -131,24 +144,24 @@ export function TagSelector({
onClick={handleCreateTag}
onMouseEnter={() => setSelectedIndex(-1)}
className={clsx(
'flex items-center gap-2 w-full px-3 py-2 text-sm transition-colors border-b border-menu-line',
'flex items-center gap-2 w-full px-3 py-2 text-sm transition-colors border-b border-app-line',
selectedIndex === -1
? 'bg-menu-hover text-menu-ink'
: 'text-menu-ink-dull hover:bg-menu-hover hover:text-menu-ink'
? 'bg-app-hover text-ink'
: 'text-ink-dull hover:bg-app-hover hover:text-ink'
)}
>
<Plus size={16} weight="bold" className="flex-shrink-0" />
<span className="flex-1 text-left">
Create tag "<strong>{query}</strong>"
</span>
<kbd className="text-xs text-menu-ink-faint px-1.5 py-0.5 rounded bg-menu-line">
<kbd className="text-xs text-ink-faint px-1.5 py-0.5 rounded bg-app-line">
</kbd>
</button>
)}
{filteredTags.length === 0 && !query.trim() ? (
<div className="px-3 py-4 text-sm text-menu-ink-dull text-center">
<div className="px-3 py-4 text-sm text-ink-dull text-center">
No tags yet
</div>
) : filteredTags.length === 0 && query.trim() ? null : (
@@ -160,8 +173,8 @@ export function TagSelector({
className={clsx(
'flex items-center gap-2 w-full px-3 py-2 text-sm transition-colors',
index === selectedIndex
? 'bg-menu-hover text-menu-ink'
: 'text-menu-ink-dull hover:bg-menu-hover hover:text-menu-ink'
? 'bg-app-hover text-ink'
: 'text-ink-dull hover:bg-app-hover hover:text-ink'
)}
>
{/* Color dot */}
@@ -175,7 +188,7 @@ export function TagSelector({
{/* Namespace badge */}
{tag.namespace && (
<span className="text-xs text-menu-ink-faint px-1.5 py-0.5 rounded bg-menu-line">
<span className="text-xs text-ink-faint px-1.5 py-0.5 rounded bg-app-line">
{tag.namespace}
</span>
)}
@@ -191,49 +204,34 @@ interface TagSelectorButtonProps {
onSelect: (tag: Tag) => void;
trigger: React.ReactNode;
contextTags?: Tag[];
/** Optional file ID to apply newly created tags to */
fileId?: string;
/** Optional content identity UUID (preferred for content-based tagging) */
contentId?: string;
}
/**
* Wrapper component that shows TagSelector in a dropdown when trigger is clicked
*/
export function TagSelectorButton({ onSelect, trigger, contextTags }: TagSelectorButtonProps) {
const [isOpen, setIsOpen] = useState(false);
export function TagSelectorButton({ onSelect, trigger, contextTags, fileId, contentId }: TagSelectorButtonProps) {
const popover = usePopover();
return (
<div className="relative">
<div onClick={() => setIsOpen(!isOpen)}>
{trigger}
</div>
<AnimatePresence>
{isOpen && (
<>
{/* Backdrop */}
<div
className="fixed inset-0 z-40"
onClick={() => setIsOpen(false)}
/>
{/* Dropdown */}
<motion.div
initial={{ opacity: 0, y: -8 }}
animate={{ opacity: 1, y: 0 }}
exit={{ opacity: 0, y: -8 }}
transition={{ duration: 0.15 }}
className="absolute top-full left-0 mt-1 w-64 z-50"
>
<TagSelector
onSelect={(tag) => {
onSelect(tag);
setIsOpen(false);
}}
onClose={() => setIsOpen(false)}
contextTags={contextTags}
/>
</motion.div>
</>
)}
</AnimatePresence>
</div>
<Popover
popover={popover}
trigger={trigger}
className="w-64 p-0"
>
<TagSelector
onSelect={(tag) => {
onSelect(tag);
popover.setOpen(false);
}}
onClose={() => popover.setOpen(false)}
contextTags={contextTags}
fileId={fileId}
contentId={contentId}
/>
</Popover>
);
}

View File

@@ -47,9 +47,6 @@ interface FileInspectorProps {
export function FileInspector({ file }: FileInspectorProps) {
const [activeTab, setActiveTab] = useState("overview");
const applyTag = useLibraryMutation('tags.apply');
const removeTag = useLibraryMutation('tags.remove');
const fileQuery = useNormalizedCache<{ file_id: string }, File>({
wireMethod: "query:files.by_id",
input: { file_id: file?.id || "" },
@@ -114,6 +111,9 @@ function OverviewTab({ file }: { file: File }) {
});
};
// Tag mutations
const applyTag = useLibraryMutation("tags.apply");
// AI Processing mutations
const extractText = useLibraryMutation("media.ocr.extract");
const transcribeAudio = useLibraryMutation("media.speech.transcribe");
@@ -150,6 +150,7 @@ function OverviewTab({ file }: { file: File }) {
<div className="px-2 text-center">
<h4 className="text-sm font-semibold text-sidebar-ink truncate">
{file.name}
{file.extension ? `.${file.extension}` : ""}
</h4>
<p className="text-xs text-sidebar-inkDull mt-1">{fileKind}</p>
</div>
@@ -317,29 +318,31 @@ function OverviewTab({ file }: { file: File }) {
{/* Tags */}
<Section title="Tags" icon={TagIcon}>
<div className="flex flex-wrap gap-1.5">
{file.tags && file.tags.length > 0 && file.tags.map((tag) => (
<Tag
key={tag.id}
color={tag.color || "#3B82F6"}
size="sm"
>
{tag.canonical_name}
</Tag>
))}
{file.tags &&
file.tags.length > 0 &&
file.tags.map((tag) => (
<Tag key={tag.id} color={tag.color || "#3B82F6"} size="sm">
{tag.canonical_name}
</Tag>
))}
{/* Add Tag Button */}
<TagSelectorButton
onSelect={async (tag) => {
// Use content-based tagging by default (tags all instances)
// Fall back to entry-based if no content identity
await applyTag.mutateAsync({
file_ids: [file.id],
tag_applications: [{
tag_id: tag.id,
source: 'User',
confidence: 1.0,
}],
targets: file.content_identity?.uuid
? { type: "Content", ids: [file.content_identity.uuid] }
: { type: "Entry", ids: [parseInt(file.id)] },
tag_ids: [tag.id],
source: "User",
confidence: 1.0,
});
}}
contextTags={file.tags || []}
fileId={file.id}
contentId={file.content_identity?.uuid}
trigger={
<button className="px-2 py-0.5 text-xs font-medium rounded-full bg-app-box hover:bg-app-hover border border-app-line text-ink-dull hover:text-ink transition-colors">
+ Add tags

View File

@@ -0,0 +1,180 @@
import { Sparkle, PaperPlaneRight, Paperclip } from "@phosphor-icons/react";
import { useState } from "react";
import clsx from "clsx";
interface Message {
id: number;
role: "user" | "assistant";
content: string;
timestamp: Date;
}
export function KnowledgeInspector() {
const [message, setMessage] = useState("");
const [messages, setMessages] = useState<Message[]>([
{
id: 1,
role: "assistant",
content:
"Hi! I'm your AI assistant for Spacedrive. I can help you organize files, search your library, and answer questions about your data.",
timestamp: new Date(),
},
]);
const handleSend = () => {
if (!message.trim()) return;
const newMessage: Message = {
id: messages.length + 1,
role: "user",
content: message,
timestamp: new Date(),
};
setMessages([...messages, newMessage]);
setMessage("");
// Simulate AI response
setTimeout(() => {
const aiResponse: Message = {
id: messages.length + 2,
role: "assistant",
content: "This is a prototype. AI responses will be implemented soon!",
timestamp: new Date(),
};
setMessages((prev) => [...prev, aiResponse]);
}, 500);
};
return (
<div className="flex flex-col h-full">
{/* Header */}
<div className="px-3 py-2.5 border-b border-sidebar-line">
<div className="flex items-center gap-2">
<div className="size-7 rounded-full bg-accent/20 flex items-center justify-center">
<Sparkle className="size-4 text-accent" weight="fill" />
</div>
<div>
<div className="text-sm font-semibold text-sidebar-ink">
AI Assistant
</div>
<div className="text-[10px] text-sidebar-inkDull">
Knowledge & Insights
</div>
</div>
</div>
</div>
{/* Messages */}
<div className="flex-1 overflow-y-auto px-3 py-4 space-y-4 no-scrollbar">
{messages.map((msg) => (
<div
key={msg.id}
className={clsx(
"flex gap-2.5",
msg.role === "user" ? "flex-row-reverse" : "flex-row",
)}
>
{/* Avatar */}
<div
className={clsx(
"size-7 rounded-full shrink-0 flex items-center justify-center",
msg.role === "assistant"
? "bg-accent/20"
: "bg-sidebar-selected",
)}
>
{msg.role === "assistant" ? (
<Sparkle className="size-3.5 text-accent" weight="fill" />
) : (
<div className="text-[10px] font-bold text-sidebar-ink">U</div>
)}
</div>
{/* Message content */}
<div
className={clsx(
"flex flex-col max-w-[80%]",
msg.role === "user" ? "items-end" : "items-start",
)}
>
<div
className={clsx(
"px-3 py-2 rounded-lg",
msg.role === "assistant"
? "bg-app-box/60 border border-app-line/50"
: "bg-accent/10 border border-accent/20",
)}
>
<p className="text-xs text-sidebar-ink leading-relaxed">
{msg.content}
</p>
</div>
<span className="text-[10px] text-sidebar-inkDull mt-1 px-1">
{msg.timestamp.toLocaleTimeString([], {
hour: "2-digit",
minute: "2-digit",
})}
</span>
</div>
</div>
))}
</div>
{/* Input */}
<div className="border-t border-sidebar-line p-3 space-y-2">
<div className="flex items-end gap-2">
<button
className="p-2 rounded-lg hover:bg-sidebar-selected transition-colors text-sidebar-inkDull hover:text-sidebar-ink"
title="Attach file"
>
<Paperclip className="size-4" weight="bold" />
</button>
<div className="flex-1 flex items-center gap-2 bg-app-box border border-app-line rounded-lg px-3 py-2">
<input
type="text"
value={message}
onChange={(e) => setMessage(e.target.value)}
onKeyPress={(e) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
handleSend();
}
}}
placeholder="Ask me anything..."
className="flex-1 bg-transparent text-sm text-sidebar-ink placeholder:text-sidebar-inkDull outline-none"
/>
</div>
<button
onClick={handleSend}
disabled={!message.trim()}
className={clsx(
"p-2 rounded-lg transition-colors",
message.trim()
? "bg-accent hover:bg-accent/90 text-white"
: "bg-app-box text-sidebar-inkDull cursor-not-allowed",
)}
title="Send message"
>
<PaperPlaneRight className="size-4" weight="fill" />
</button>
</div>
{/* Quick actions */}
<div className="flex flex-wrap gap-1.5">
<button className="px-2.5 py-1.5 text-[11px] font-medium text-sidebar-inkDull hover:text-sidebar-ink bg-app-box/40 hover:bg-app-box/60 rounded-md transition-colors">
Organize files
</button>
<button className="px-2.5 py-1.5 text-[11px] font-medium text-sidebar-inkDull hover:text-sidebar-ink bg-app-box/40 hover:bg-app-box/60 rounded-md transition-colors">
Find duplicates
</button>
<button className="px-2.5 py-1.5 text-[11px] font-medium text-sidebar-inkDull hover:text-sidebar-ink bg-app-box/40 hover:bg-app-box/60 rounded-md transition-colors">
Smart search
</button>
</div>
</div>
</div>
);
}

View File

@@ -19,6 +19,10 @@ export function createExplorerRouter() {
index: true,
element: <Overview />,
},
{
path: "explorer",
element: <ExplorerView />,
},
{
path: "location/:locationId",
element: <ExplorerView />,

View File

@@ -13,7 +13,7 @@ import { TopBarButton, Popover, usePopover } from "@sd/ui";
import clsx from "clsx";
import { TopBarPortal } from "../../TopBar";
import { PairingModal } from "../../components/PairingModal";
import { useAddLocationDialog } from "../../components/explorer/components/AddLocationModal";
import { useAddStorageDialog } from "../../components/Explorer/components/AddStorageModal";
import { useSyncSetupDialog } from "../../components/SyncSetupModal";
import { useSpacedriveClient } from "../../context";
import { useLibraries } from "../../hooks/useLibraries";
@@ -86,9 +86,9 @@ export function OverviewTopBar({ libraryName }: OverviewTopBarProps) {
(lib) => lib.id === currentLibraryId,
);
const handleAddLocation = () => {
useAddLocationDialog((locationId) => {
navigate(`/location/${locationId}`);
const handleAddStorage = () => {
useAddStorageDialog((id) => {
navigate(`/location/${id}`);
});
};
@@ -222,9 +222,9 @@ export function OverviewTopBar({ libraryName }: OverviewTopBarProps) {
<TopBarButton
icon={Plus}
className="!bg-accent hover:!bg-accent-deep !text-white"
onClick={handleAddLocation}
onClick={handleAddStorage}
>
Add Location
Add Storage
</TopBarButton>
</div>
}

View File

@@ -3,8 +3,6 @@
// This file has been generated by Specta. DO NOT EDIT.
export type Empty = Record<string, never>;
export type ActionContextInfo = { action_type: string; initiated_at: string; initiated_by: string | null; action_input: JsonValue; context: JsonValue };
export type AddGroupInput = { space_id: string; name: string; group_type: GroupType };
@@ -32,9 +30,9 @@ export type ApfsVolumeRole = "System" | "Data" | "Preboot" | "Recovery" | "VM" |
export type ApplyTagsInput = {
/**
* Entry IDs to apply tags to
* What to tag: content identities or specific entries
*/
entry_ids: number[];
targets: TagTargets;
/**
* Tag IDs to apply
*/
@@ -82,6 +80,19 @@ warnings: string[];
*/
message: string };
/**
* Targets for immediately applying a newly created tag
*/
export type ApplyToTargets =
/**
* Apply to content identities (all instances)
*/
{ type: "Content"; ids: string[] } |
/**
* Apply to specific entries (single instance)
*/
{ type: "Entry"; ids: number[] };
/**
* Audio metadata extracted from FFmpeg
*/
@@ -128,7 +139,7 @@ export type ContentIdentity = { uuid: string; kind: ContentKind; content_hash: s
/**
* Type of content
*/
export type ContentKind = "unknown" | "image" | "video" | "audio" | "document" | "archive" | "code" | "text" | "database" | "book" | "font" | "mesh" | "config" | "encrypted" | "key" | "executable" | "binary" | "spreadsheet" | "presentation" | "email" | "calendar" | "contact" | "web" | "shortcut" | "package" | "model_entry";
export type ContentKind = "unknown" | "image" | "video" | "audio" | "document" | "archive" | "code" | "text" | "database" | "book" | "font" | "mesh" | "config" | "encrypted" | "key" | "executable" | "binary" | "spreadsheet" | "presentation" | "email" | "calendar" | "contact" | "web" | "shortcut" | "package" | "model_entry" | "memory";
/**
* Copy method preference for file operations
@@ -177,7 +188,11 @@ is_organizational_anchor: boolean | null; privacy_level: PrivacyLevel | null; se
/**
* Initial attributes
*/
attributes: { [key in string]: JsonValue } | null };
attributes: { [key in string]: JsonValue } | null;
/**
* Optional: Targets to immediately apply this tag to after creation
*/
apply_to: ApplyToTargets | null };
export type CreateTagOutput = {
/**
@@ -2747,6 +2762,21 @@ export type TagSource =
*/
"Sync";
/**
* Specifies what to tag: content (all instances) or specific entries
*/
export type TagTargets =
/**
* Tag by content identity (applies to ALL instances of this content across devices)
* This is the preferred/default approach
*/
{ type: "Content"; ids: string[] } |
/**
* Tag by entry ID (applies to ONLY this specific file instance)
* Use when you want instance-specific tags
*/
{ type: "Entry"; ids: number[] };
/**
* Types of semantic tags with different behaviors
*/
@@ -3043,7 +3073,11 @@ write_speed_mbps: number | null;
/**
* Device ID that owns this volume
*/
device_id: string };
device_id: string;
/**
* Device slug for constructing SdPaths
*/
device_slug: string };
export type VolumeListOutput = { volumes: VolumeItem[] };
@@ -3179,185 +3213,185 @@ success: boolean };
// ===== API Type Unions =====
export type CoreAction =
{ type: 'network.stop'; input: NetworkStopInput; output: NetworkStopOutput }
| { type: 'network.start'; input: NetworkStartInput; output: NetworkStartOutput }
| { type: 'network.pair.join'; input: PairJoinInput; output: PairJoinOutput }
| { type: 'network.pair.generate'; input: PairGenerateInput; output: PairGenerateOutput }
| { type: 'libraries.open'; input: LibraryOpenInput; output: LibraryOpenOutput }
| { type: 'libraries.create'; input: LibraryCreateInput; output: LibraryCreateOutput }
| { type: 'models.whisper.delete'; input: DeleteWhisperModelInput; output: DeleteWhisperModelOutput }
{ type: 'models.whisper.delete'; input: DeleteWhisperModelInput; output: DeleteWhisperModelOutput }
| { type: 'models.whisper.download'; input: DownloadWhisperModelInput; output: DownloadWhisperModelOutput }
| { type: 'network.sync_setup'; input: LibrarySyncSetupInput; output: LibrarySyncSetupOutput }
| { type: 'network.device.revoke'; input: DeviceRevokeInput; output: DeviceRevokeOutput }
| { type: 'libraries.delete'; input: LibraryDeleteInput; output: LibraryDeleteOutput }
| { type: 'network.pair.cancel'; input: PairCancelInput; output: PairCancelOutput }
| { type: 'network.stop'; input: NetworkStopInput; output: NetworkStopOutput }
| { type: 'network.sync_setup'; input: LibrarySyncSetupInput; output: LibrarySyncSetupOutput }
| { type: 'libraries.open'; input: LibraryOpenInput; output: LibraryOpenOutput }
| { type: 'network.pair.generate'; input: PairGenerateInput; output: PairGenerateOutput }
| { type: 'network.spacedrop.send'; input: SpacedropSendInput; output: SpacedropSendOutput }
| { type: 'network.pair.join'; input: PairJoinInput; output: PairJoinOutput }
| { type: 'network.pair.cancel'; input: PairCancelInput; output: PairCancelOutput }
| { type: 'network.start'; input: NetworkStartInput; output: NetworkStartOutput }
| { type: 'libraries.create'; input: LibraryCreateInput; output: LibraryCreateOutput }
;
export type LibraryAction =
{ type: 'jobs.resume'; input: JobResumeInput; output: JobResumeOutput }
| { type: 'media.thumbnail.regenerate'; input: RegenerateThumbnailInput; output: RegenerateThumbnailOutput }
| { type: 'media.thumbnail'; input: ThumbnailInput; output: JobReceipt }
| { type: 'libraries.rename'; input: LibraryRenameInput; output: LibraryRenameOutput }
| { type: 'spaces.delete_group'; input: DeleteGroupInput; output: DeleteGroupOutput }
| { type: 'tags.apply'; input: ApplyTagsInput; output: ApplyTagsOutput }
| { type: 'locations.rescan'; input: LocationRescanInput; output: LocationRescanOutput }
| { type: 'volumes.add_cloud'; input: VolumeAddCloudInput; output: VolumeAddCloudOutput }
| { type: 'libraries.export'; input: LibraryExportInput; output: LibraryExportOutput }
| { type: 'volumes.speed_test'; input: VolumeSpeedTestInput; output: VolumeSpeedTestOutput }
| { type: 'spaces.create'; input: SpaceCreateInput; output: SpaceCreateOutput }
| { type: 'spaces.delete'; input: SpaceDeleteInput; output: SpaceDeleteOutput }
| { type: 'locations.add'; input: LocationAddInput; output: LocationAddOutput }
| { type: 'indexing.start'; input: IndexInput; output: JobReceipt }
| { type: 'spaces.add_group'; input: AddGroupInput; output: AddGroupOutput }
| { type: 'tags.create'; input: CreateTagInput; output: CreateTagOutput }
| { type: 'spaces.delete_item'; input: DeleteItemInput; output: DeleteItemOutput }
| { type: 'jobs.cancel'; input: JobCancelInput; output: JobCancelOutput }
| { type: 'volumes.track'; input: VolumeTrackInput; output: VolumeTrackOutput }
| { type: 'spaces.add_item'; input: AddItemInput; output: AddItemOutput }
| { type: 'spaces.update'; input: SpaceUpdateInput; output: SpaceUpdateOutput }
| { type: 'jobs.pause'; input: JobPauseInput; output: JobPauseOutput }
| { type: 'media.proxy.generate'; input: GenerateProxyInput; output: GenerateProxyOutput }
| { type: 'volumes.remove_cloud'; input: VolumeRemoveCloudInput; output: VolumeRemoveCloudOutput }
| { type: 'media.ocr.extract'; input: ExtractTextInput; output: ExtractTextOutput }
| { type: 'volumes.refresh'; input: VolumeRefreshInput; output: VolumeRefreshOutput }
| { type: 'volumes.untrack'; input: VolumeUntrackInput; output: VolumeUntrackOutput }
| { type: 'locations.update'; input: LocationUpdateInput; output: LocationUpdateOutput }
| { type: 'locations.triggerJob'; input: LocationTriggerJobInput; output: LocationTriggerJobOutput }
| { type: 'files.copy'; input: FileCopyInput; output: JobReceipt }
| { type: 'media.speech.transcribe'; input: TranscribeAudioInput; output: TranscribeAudioOutput }
| { type: 'indexing.verify'; input: IndexVerifyInput; output: IndexVerifyOutput }
| { type: 'locations.remove'; input: LocationRemoveInput; output: LocationRemoveOutput }
| { type: 'files.delete'; input: FileDeleteInput; output: JobReceipt }
| { type: 'media.thumbstrip.generate'; input: GenerateThumbstripInput; output: GenerateThumbstripOutput }
{ type: 'spaces.add_group'; input: AddGroupInput; output: AddGroupOutput }
| { type: 'spaces.update_group'; input: UpdateGroupInput; output: UpdateGroupOutput }
| { type: 'locations.remove'; input: LocationRemoveInput; output: LocationRemoveOutput }
| { type: 'indexing.verify'; input: IndexVerifyInput; output: IndexVerifyOutput }
| { type: 'spaces.reorder_items'; input: ReorderItemsInput; output: ReorderOutput }
| { type: 'spaces.reorder_groups'; input: ReorderGroupsInput; output: ReorderOutput }
| { type: 'spaces.update'; input: SpaceUpdateInput; output: SpaceUpdateOutput }
| { type: 'volumes.track'; input: VolumeTrackInput; output: VolumeTrackOutput }
| { type: 'spaces.create'; input: SpaceCreateInput; output: SpaceCreateOutput }
| { type: 'spaces.delete_item'; input: DeleteItemInput; output: DeleteItemOutput }
| { type: 'media.thumbstrip.generate'; input: GenerateThumbstripInput; output: GenerateThumbstripOutput }
| { type: 'tags.create'; input: CreateTagInput; output: CreateTagOutput }
| { type: 'indexing.start'; input: IndexInput; output: JobReceipt }
| { type: 'jobs.cancel'; input: JobCancelInput; output: JobCancelOutput }
| { type: 'jobs.pause'; input: JobPauseInput; output: JobPauseOutput }
| { type: 'files.delete'; input: FileDeleteInput; output: JobReceipt }
| { type: 'media.speech.transcribe'; input: TranscribeAudioInput; output: TranscribeAudioOutput }
| { type: 'volumes.speed_test'; input: VolumeSpeedTestInput; output: VolumeSpeedTestOutput }
| { type: 'locations.update'; input: LocationUpdateInput; output: LocationUpdateOutput }
| { type: 'jobs.resume'; input: JobResumeInput; output: JobResumeOutput }
| { type: 'spaces.add_item'; input: AddItemInput; output: AddItemOutput }
| { type: 'media.proxy.generate'; input: GenerateProxyInput; output: GenerateProxyOutput }
| { type: 'locations.add'; input: LocationAddInput; output: LocationAddOutput }
| { type: 'locations.triggerJob'; input: LocationTriggerJobInput; output: LocationTriggerJobOutput }
| { type: 'libraries.export'; input: LibraryExportInput; output: LibraryExportOutput }
| { type: 'tags.apply'; input: ApplyTagsInput; output: ApplyTagsOutput }
| { type: 'libraries.rename'; input: LibraryRenameInput; output: LibraryRenameOutput }
| { type: 'volumes.untrack'; input: VolumeUntrackInput; output: VolumeUntrackOutput }
| { type: 'media.thumbnail.regenerate'; input: RegenerateThumbnailInput; output: RegenerateThumbnailOutput }
| { type: 'media.thumbnail'; input: ThumbnailInput; output: JobReceipt }
| { type: 'locations.rescan'; input: LocationRescanInput; output: LocationRescanOutput }
| { type: 'volumes.add_cloud'; input: VolumeAddCloudInput; output: VolumeAddCloudOutput }
| { type: 'spaces.delete_group'; input: DeleteGroupInput; output: DeleteGroupOutput }
| { type: 'spaces.delete'; input: SpaceDeleteInput; output: SpaceDeleteOutput }
| { type: 'files.copy'; input: FileCopyInput; output: JobReceipt }
| { type: 'volumes.refresh'; input: VolumeRefreshInput; output: VolumeRefreshOutput }
| { type: 'volumes.remove_cloud'; input: VolumeRemoveCloudInput; output: VolumeRemoveCloudOutput }
| { type: 'media.ocr.extract'; input: ExtractTextInput; output: ExtractTextOutput }
;
export type CoreQuery =
{ type: 'network.devices.list'; input: ListPairedDevicesInput; output: ListPairedDevicesOutput }
| { type: 'models.whisper.list'; input: ListWhisperModelsInput; output: ListWhisperModelsOutput }
| { type: 'core.status'; input: Empty; output: CoreStatus }
| { type: 'network.pair.status'; input: PairStatusQueryInput; output: PairStatusOutput }
| { type: 'network.sync_setup.discover'; input: DiscoverRemoteLibrariesInput; output: DiscoverRemoteLibrariesOutput }
| { type: 'models.whisper.list'; input: ListWhisperModelsInput; output: ListWhisperModelsOutput }
| { type: 'network.status'; input: NetworkStatusQueryInput; output: NetworkStatus }
| { type: 'core.events.list'; input: ListEventsInput; output: ListEventsOutput }
| { type: 'libraries.list'; input: ListLibrariesInput; output: [LibraryInfo] }
| { type: 'core.status'; input: Empty; output: CoreStatus }
| { type: 'network.status'; input: NetworkStatusQueryInput; output: NetworkStatus }
| { type: 'network.sync_setup.discover'; input: DiscoverRemoteLibrariesInput; output: DiscoverRemoteLibrariesOutput }
;
export type LibraryQuery =
{ type: 'files.media_listing'; input: MediaListingInput; output: MediaListingOutput }
| { type: 'tags.search'; input: SearchTagsInput; output: SearchTagsOutput }
| { type: 'spaces.get'; input: SpaceGetQueryInput; output: SpaceGetOutput }
| { type: 'spaces.list'; input: SpacesListQueryInput; output: SpacesListOutput }
| { type: 'libraries.info'; input: LibraryInfoQueryInput; output: LibraryInfoOutput }
{ type: 'libraries.info'; input: LibraryInfoQueryInput; output: LibraryInfoOutput }
| { type: 'files.directory_listing'; input: DirectoryListingInput; output: DirectoryListingOutput }
| { type: 'jobs.list'; input: JobListInput; output: JobListOutput }
| { type: 'jobs.info'; input: JobInfoQueryInput; output: JobInfoOutput }
| { type: 'search.files'; input: FileSearchInput; output: FileSearchOutput }
| { type: 'sync.metrics'; input: GetSyncMetricsInput; output: GetSyncMetricsOutput }
| { type: 'jobs.info'; input: JobInfoQueryInput; output: JobInfoOutput }
| { type: 'files.unique_to_location'; input: UniqueToLocationInput; output: UniqueToLocationOutput }
| { type: 'jobs.list'; input: JobListInput; output: JobListOutput }
| { type: 'volumes.list'; input: VolumeListQueryInput; output: VolumeListOutput }
| { type: 'locations.list'; input: LocationsListQueryInput; output: LocationsListOutput }
| { type: 'locations.suggested'; input: SuggestedLocationsQueryInput; output: SuggestedLocationsOutput }
| { type: 'files.by_path'; input: FileByPathQuery; output: File }
| { type: 'files.directory_listing'; input: DirectoryListingInput; output: DirectoryListingOutput }
| { type: 'devices.list'; input: ListLibraryDevicesInput; output: [LibraryDeviceInfo] }
| { type: 'files.by_id'; input: FileByIdQuery; output: File }
| { type: 'test.ping'; input: PingInput; output: PingOutput }
| { type: 'files.by_path'; input: FileByPathQuery; output: File }
| { type: 'files.unique_to_location'; input: UniqueToLocationInput; output: UniqueToLocationOutput }
| { type: 'devices.list'; input: ListLibraryDevicesInput; output: [LibraryDeviceInfo] }
| { type: 'spaces.list'; input: SpacesListQueryInput; output: SpacesListOutput }
| { type: 'locations.suggested'; input: SuggestedLocationsQueryInput; output: SuggestedLocationsOutput }
| { type: 'tags.search'; input: SearchTagsInput; output: SearchTagsOutput }
| { type: 'spaces.get'; input: SpaceGetQueryInput; output: SpaceGetOutput }
| { type: 'files.media_listing'; input: MediaListingInput; output: MediaListingOutput }
| { type: 'spaces.get_layout'; input: SpaceLayoutQueryInput; output: SpaceLayout }
| { type: 'files.by_id'; input: FileByIdQuery; output: File }
;
// ===== Wire Method Mappings =====
export const WIRE_METHODS = {
coreActions: {
'network.stop': 'action:network.stop.input',
'network.start': 'action:network.start.input',
'network.pair.join': 'action:network.pair.join.input',
'network.pair.generate': 'action:network.pair.generate.input',
'libraries.open': 'action:libraries.open.input',
'libraries.create': 'action:libraries.create.input',
'models.whisper.delete': 'action:models.whisper.delete.input',
'models.whisper.download': 'action:models.whisper.download.input',
'network.sync_setup': 'action:network.sync_setup.input',
'network.device.revoke': 'action:network.device.revoke.input',
'libraries.delete': 'action:libraries.delete.input',
'network.pair.cancel': 'action:network.pair.cancel.input',
'network.stop': 'action:network.stop.input',
'network.sync_setup': 'action:network.sync_setup.input',
'libraries.open': 'action:libraries.open.input',
'network.pair.generate': 'action:network.pair.generate.input',
'network.spacedrop.send': 'action:network.spacedrop.send.input',
'network.pair.join': 'action:network.pair.join.input',
'network.pair.cancel': 'action:network.pair.cancel.input',
'network.start': 'action:network.start.input',
'libraries.create': 'action:libraries.create.input',
},
libraryActions: {
'jobs.resume': 'action:jobs.resume.input',
'media.thumbnail.regenerate': 'action:media.thumbnail.regenerate.input',
'media.thumbnail': 'action:media.thumbnail.input',
'libraries.rename': 'action:libraries.rename.input',
'spaces.delete_group': 'action:spaces.delete_group.input',
'tags.apply': 'action:tags.apply.input',
'locations.rescan': 'action:locations.rescan.input',
'volumes.add_cloud': 'action:volumes.add_cloud.input',
'libraries.export': 'action:libraries.export.input',
'volumes.speed_test': 'action:volumes.speed_test.input',
'spaces.create': 'action:spaces.create.input',
'spaces.delete': 'action:spaces.delete.input',
'locations.add': 'action:locations.add.input',
'indexing.start': 'action:indexing.start.input',
'spaces.add_group': 'action:spaces.add_group.input',
'tags.create': 'action:tags.create.input',
'spaces.delete_item': 'action:spaces.delete_item.input',
'jobs.cancel': 'action:jobs.cancel.input',
'volumes.track': 'action:volumes.track.input',
'spaces.add_item': 'action:spaces.add_item.input',
'spaces.update': 'action:spaces.update.input',
'jobs.pause': 'action:jobs.pause.input',
'media.proxy.generate': 'action:media.proxy.generate.input',
'volumes.remove_cloud': 'action:volumes.remove_cloud.input',
'media.ocr.extract': 'action:media.ocr.extract.input',
'volumes.refresh': 'action:volumes.refresh.input',
'volumes.untrack': 'action:volumes.untrack.input',
'locations.update': 'action:locations.update.input',
'locations.triggerJob': 'action:locations.triggerJob.input',
'files.copy': 'action:files.copy.input',
'media.speech.transcribe': 'action:media.speech.transcribe.input',
'indexing.verify': 'action:indexing.verify.input',
'locations.remove': 'action:locations.remove.input',
'files.delete': 'action:files.delete.input',
'media.thumbstrip.generate': 'action:media.thumbstrip.generate.input',
'spaces.update_group': 'action:spaces.update_group.input',
'locations.remove': 'action:locations.remove.input',
'indexing.verify': 'action:indexing.verify.input',
'spaces.reorder_items': 'action:spaces.reorder_items.input',
'spaces.reorder_groups': 'action:spaces.reorder_groups.input',
'spaces.update': 'action:spaces.update.input',
'volumes.track': 'action:volumes.track.input',
'spaces.create': 'action:spaces.create.input',
'spaces.delete_item': 'action:spaces.delete_item.input',
'media.thumbstrip.generate': 'action:media.thumbstrip.generate.input',
'tags.create': 'action:tags.create.input',
'indexing.start': 'action:indexing.start.input',
'jobs.cancel': 'action:jobs.cancel.input',
'jobs.pause': 'action:jobs.pause.input',
'files.delete': 'action:files.delete.input',
'media.speech.transcribe': 'action:media.speech.transcribe.input',
'volumes.speed_test': 'action:volumes.speed_test.input',
'locations.update': 'action:locations.update.input',
'jobs.resume': 'action:jobs.resume.input',
'spaces.add_item': 'action:spaces.add_item.input',
'media.proxy.generate': 'action:media.proxy.generate.input',
'locations.add': 'action:locations.add.input',
'locations.triggerJob': 'action:locations.triggerJob.input',
'libraries.export': 'action:libraries.export.input',
'tags.apply': 'action:tags.apply.input',
'libraries.rename': 'action:libraries.rename.input',
'volumes.untrack': 'action:volumes.untrack.input',
'media.thumbnail.regenerate': 'action:media.thumbnail.regenerate.input',
'media.thumbnail': 'action:media.thumbnail.input',
'locations.rescan': 'action:locations.rescan.input',
'volumes.add_cloud': 'action:volumes.add_cloud.input',
'spaces.delete_group': 'action:spaces.delete_group.input',
'spaces.delete': 'action:spaces.delete.input',
'files.copy': 'action:files.copy.input',
'volumes.refresh': 'action:volumes.refresh.input',
'volumes.remove_cloud': 'action:volumes.remove_cloud.input',
'media.ocr.extract': 'action:media.ocr.extract.input',
},
coreQueries: {
'network.devices.list': 'query:network.devices.list',
'models.whisper.list': 'query:models.whisper.list',
'core.status': 'query:core.status',
'network.pair.status': 'query:network.pair.status',
'network.sync_setup.discover': 'query:network.sync_setup.discover',
'models.whisper.list': 'query:models.whisper.list',
'network.status': 'query:network.status',
'core.events.list': 'query:core.events.list',
'libraries.list': 'query:libraries.list',
'core.status': 'query:core.status',
'network.status': 'query:network.status',
'network.sync_setup.discover': 'query:network.sync_setup.discover',
},
libraryQueries: {
'files.media_listing': 'query:files.media_listing',
'tags.search': 'query:tags.search',
'spaces.get': 'query:spaces.get',
'spaces.list': 'query:spaces.list',
'libraries.info': 'query:libraries.info',
'files.directory_listing': 'query:files.directory_listing',
'jobs.list': 'query:jobs.list',
'jobs.info': 'query:jobs.info',
'search.files': 'query:search.files',
'sync.metrics': 'query:sync.metrics',
'jobs.info': 'query:jobs.info',
'files.unique_to_location': 'query:files.unique_to_location',
'jobs.list': 'query:jobs.list',
'volumes.list': 'query:volumes.list',
'locations.list': 'query:locations.list',
'locations.suggested': 'query:locations.suggested',
'files.by_path': 'query:files.by_path',
'files.directory_listing': 'query:files.directory_listing',
'devices.list': 'query:devices.list',
'files.by_id': 'query:files.by_id',
'test.ping': 'query:test.ping',
'files.by_path': 'query:files.by_path',
'files.unique_to_location': 'query:files.unique_to_location',
'devices.list': 'query:devices.list',
'spaces.list': 'query:spaces.list',
'locations.suggested': 'query:locations.suggested',
'tags.search': 'query:tags.search',
'spaces.get': 'query:spaces.get',
'files.media_listing': 'query:files.media_listing',
'spaces.get_layout': 'query:spaces.get_layout',
'files.by_id': 'query:files.by_id',
},
} as const;