mirror of
https://github.com/spacedriveapp/spacedrive.git
synced 2026-02-20 07:37:26 -05:00
Improve ephemeral walker (#1877)
* `non_indexed::walk` as `impl Stream` * wip * this is gooooood * savepoint * remove * Batched stream * `unsafe_streamed_query` * nightmare nightmare nightmare nightmare nightmare nightmare * JS Mutex * cleanup * proper error handling * myCode satisfies Typescript * Move to npm fork of rspc * fixes * rspc more crashy crashy * Typescript is very disappointed
This commit is contained in:
@@ -12,8 +12,8 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/router": "^1.13.1",
|
||||
"@rspc/client": "=0.0.0-main-799eec5d",
|
||||
"@rspc/tauri": "=0.0.0-main-799eec5d",
|
||||
"@oscartbeaumont-sd/rspc-client": "=0.0.0-main-dc31e5b2",
|
||||
"@oscartbeaumont-sd/rspc-tauri": "=0.0.0-main-dc31e5b2",
|
||||
"@sd/client": "workspace:*",
|
||||
"@sd/interface": "workspace:*",
|
||||
"@sd/ui": "workspace:*",
|
||||
|
||||
@@ -23,7 +23,7 @@ futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
prisma-client-rust = { workspace = true }
|
||||
rand = { workspace = true }
|
||||
rspc = { workspace = true, features = ["tauri"] }
|
||||
rspc = { workspace = true, features = ["tauri", "tracing"] }
|
||||
serde = { workspace = true }
|
||||
specta = { workspace = true }
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { tauriLink } from '@rspc/tauri/v2';
|
||||
import { tauriLink } from '@oscartbeaumont-sd/rspc-tauri/v2';
|
||||
|
||||
globalThis.isDev = import.meta.env.DEV;
|
||||
globalThis.rspcLinks = [
|
||||
|
||||
@@ -5,8 +5,8 @@ const path = require('path');
|
||||
// Needed for transforming svgs from @sd/assets
|
||||
const [reactSVGPath, reactSVGExclude] = resolveUniqueModule('react-native-svg');
|
||||
|
||||
const [rspcClientPath, rspcClientExclude] = resolveUniqueModule('@rspc/client');
|
||||
const [rspcReactPath, rspcReactExclude] = resolveUniqueModule('@rspc/react');
|
||||
const [rspcClientPath, rspcClientExclude] = resolveUniqueModule('@oscartbeaumont-sd/rspc-client');
|
||||
const [rspcReactPath, rspcReactExclude] = resolveUniqueModule('@oscartbeaumont-sd/rspc-react');
|
||||
|
||||
const { getDefaultConfig } = require('expo/metro-config');
|
||||
const expoDefaultConfig = getDefaultConfig(__dirname);
|
||||
@@ -32,13 +32,13 @@ const metroConfig = makeMetroConfig({
|
||||
path.resolve(workspaceRoot, 'node_modules')
|
||||
],
|
||||
resolveRequest: (context, moduleName, platform) => {
|
||||
if (moduleName.startsWith('@rspc/client/v2')) {
|
||||
if (moduleName.startsWith('@oscartbeaumont-sd/rspc-client/v2')) {
|
||||
return {
|
||||
filePath: path.resolve(rspcClientPath, 'dist', 'v2.js'),
|
||||
type: 'sourceFile'
|
||||
};
|
||||
}
|
||||
if (moduleName.startsWith('@rspc/react/v2')) {
|
||||
if (moduleName.startsWith('@oscartbeaumont-sd/rspc-react/v2')) {
|
||||
return {
|
||||
filePath: path.resolve(rspcReactPath, 'dist', 'v2.js'),
|
||||
type: 'sourceFile'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AlphaRSPCError, Link, RspcRequest } from '@rspc/client/v2';
|
||||
import { AlphaRSPCError, Link, RspcRequest } from '@oscartbeaumont-sd/rspc-client/v2';
|
||||
import { EventEmitter, requireNativeModule } from 'expo-modules-core';
|
||||
|
||||
// It loads the native module object from the JSI or falls back to
|
||||
|
||||
@@ -24,8 +24,8 @@
|
||||
"@react-navigation/drawer": "^6.6.3",
|
||||
"@react-navigation/native": "^6.1.7",
|
||||
"@react-navigation/stack": "^6.3.17",
|
||||
"@rspc/client": "=0.0.0-main-799eec5d",
|
||||
"@rspc/react": "=0.0.0-main-799eec5d",
|
||||
"@oscartbeaumont-sd/rspc-client": "=0.0.0-main-dc31e5b2",
|
||||
"@oscartbeaumont-sd/rspc-react": "=0.0.0-main-dc31e5b2",
|
||||
"@sd/assets": "workspace:*",
|
||||
"@sd/client": "workspace:*",
|
||||
"@shopify/flash-list": "1.4.3",
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@fontsource/inter": "^4.5.15",
|
||||
"@rspc/client": "=0.0.0-main-799eec5d",
|
||||
"@oscartbeaumont-sd/rspc-client": "=0.0.0-main-dc31e5b2",
|
||||
"@sd/client": "workspace:*",
|
||||
"@sd/interface": "workspace:*",
|
||||
"@tanstack/react-query": "^4.36.1",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { wsBatchLink } from '@rspc/client/v2';
|
||||
import { wsBatchLink } from '@oscartbeaumont-sd/rspc-client/v2';
|
||||
|
||||
globalThis.isDev = import.meta.env.DEV;
|
||||
globalThis.rspcLinks = [
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::{
|
||||
library::Library,
|
||||
location::{non_indexed, LocationError},
|
||||
object::media::thumbnail::get_indexed_thumb_key,
|
||||
util::{unsafe_streamed_query, BatchedStream},
|
||||
};
|
||||
|
||||
use sd_cache::{CacheNode, Model, Normalise, Reference};
|
||||
@@ -13,6 +14,9 @@ use sd_prisma::prisma::{self, PrismaClient};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use async_stream::stream;
|
||||
use futures::StreamExt;
|
||||
use itertools::Either;
|
||||
use rspc::{alpha::AlphaRouter, ErrorCode};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use specta::Type;
|
||||
@@ -99,63 +103,82 @@ pub fn mount() -> AlphaRouter<Ctx> {
|
||||
#[specta(optional)]
|
||||
order: Option<EphemeralPathOrder>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Type, Debug)]
|
||||
struct EphemeralPathsResult {
|
||||
struct EphemeralPathsResultItem {
|
||||
pub entries: Vec<Reference<ExplorerItem>>,
|
||||
pub errors: Vec<rspc::Error>,
|
||||
pub nodes: Vec<CacheNode>,
|
||||
}
|
||||
|
||||
R.with2(library()).query(
|
||||
R.with2(library()).subscription(
|
||||
|(node, library),
|
||||
EphemeralPathSearchArgs {
|
||||
path,
|
||||
with_hidden_files,
|
||||
order,
|
||||
}| async move {
|
||||
let mut paths =
|
||||
non_indexed::walk(path, with_hidden_files, node, library).await?;
|
||||
let paths =
|
||||
non_indexed::walk(path, with_hidden_files, node, library, |entries| {
|
||||
macro_rules! order_match {
|
||||
($order:ident, [$(($variant:ident, |$i:ident| $func:expr)),+]) => {{
|
||||
match $order {
|
||||
$(EphemeralPathOrder::$variant(order) => {
|
||||
entries.sort_unstable_by(|path1, path2| {
|
||||
let func = |$i: &non_indexed::Entry| $func;
|
||||
|
||||
macro_rules! order_match {
|
||||
($order:ident, [$(($variant:ident, |$i:ident| $func:expr)),+]) => {{
|
||||
match $order {
|
||||
$(EphemeralPathOrder::$variant(order) => {
|
||||
paths.entries.sort_unstable_by(|path1, path2| {
|
||||
let func = |$i: &ExplorerItem| $func;
|
||||
let one = func(path1);
|
||||
let two = func(path2);
|
||||
|
||||
let one = func(path1);
|
||||
let two = func(path2);
|
||||
|
||||
match order {
|
||||
SortOrder::Desc => two.cmp(&one),
|
||||
SortOrder::Asc => one.cmp(&two),
|
||||
}
|
||||
});
|
||||
})+
|
||||
match order {
|
||||
SortOrder::Desc => two.cmp(&one),
|
||||
SortOrder::Asc => one.cmp(&two),
|
||||
}
|
||||
});
|
||||
})+
|
||||
}
|
||||
}};
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
if let Some(order) = order {
|
||||
order_match!(
|
||||
order,
|
||||
[
|
||||
(Name, |p| p.name().to_lowercase()),
|
||||
(SizeInBytes, |p| p.size_in_bytes()),
|
||||
(DateCreated, |p| p.date_created()),
|
||||
(DateModified, |p| p.date_modified())
|
||||
]
|
||||
)
|
||||
}
|
||||
if let Some(order) = order {
|
||||
order_match!(
|
||||
order,
|
||||
[
|
||||
(Name, |p| p.name().to_lowercase()),
|
||||
(SizeInBytes, |p| p.size_in_bytes()),
|
||||
(DateCreated, |p| p.date_created()),
|
||||
(DateModified, |p| p.date_modified())
|
||||
]
|
||||
)
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let (nodes, entries) = paths.entries.normalise(|item| item.id());
|
||||
let mut stream = BatchedStream::new(paths);
|
||||
Ok(unsafe_streamed_query(stream! {
|
||||
while let Some(result) = stream.next().await {
|
||||
// We optimise for the case of no errors because it should be way more common.
|
||||
let mut entries = Vec::with_capacity(result.len());
|
||||
let mut errors = Vec::with_capacity(0);
|
||||
|
||||
Ok(EphemeralPathsResult {
|
||||
entries,
|
||||
errors: paths.errors,
|
||||
nodes,
|
||||
})
|
||||
for item in result {
|
||||
match item {
|
||||
Ok(item) => entries.push(item),
|
||||
Err(e) => match e {
|
||||
Either::Left(e) => errors.push(e),
|
||||
Either::Right(e) => errors.push(e.into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let (nodes, entries) = entries.normalise(|item: &ExplorerItem| item.id());
|
||||
|
||||
yield EphemeralPathsResultItem {
|
||||
entries,
|
||||
errors,
|
||||
nodes,
|
||||
};
|
||||
}
|
||||
}))
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
@@ -88,6 +88,12 @@ impl InvalidRequests {
|
||||
|
||||
let queries = r.queries();
|
||||
for req in &invalidate_requests.queries {
|
||||
// This is a subscription in Rust but is query in React where it needs revalidation.
|
||||
// We also don't check it's arguments are valid because we can't, lol.
|
||||
if req.key == "search.ephemeralPaths" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(query_ty) = queries.get(req.key) {
|
||||
if let Some(arg) = &req.arg_ty {
|
||||
if &query_ty.ty.input != arg {
|
||||
|
||||
@@ -147,7 +147,7 @@ impl Libraries {
|
||||
}
|
||||
|
||||
/// create creates a new library with the given config and mounts it into the running [LibraryManager].
|
||||
pub(crate) async fn create(
|
||||
pub async fn create(
|
||||
self: &Arc<Self>,
|
||||
name: LibraryName,
|
||||
description: Option<String>,
|
||||
|
||||
@@ -8,6 +8,8 @@ use crate::{
|
||||
Node,
|
||||
};
|
||||
|
||||
use futures::Stream;
|
||||
use itertools::Either;
|
||||
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
|
||||
use sd_file_path_helper::{path_is_hidden, MetadataExt};
|
||||
use sd_prisma::prisma::location;
|
||||
@@ -15,6 +17,7 @@ use sd_utils::{chain_optional_iter, error::FileIOError};
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::ErrorKind,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
@@ -24,8 +27,9 @@ use rspc::ErrorCode;
|
||||
use serde::Serialize;
|
||||
use specta::Type;
|
||||
use thiserror::Error;
|
||||
use tokio::{fs, io};
|
||||
use tracing::{error, warn};
|
||||
use tokio::{io, sync::mpsc, task::JoinError};
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
use tracing::{error, span, warn, Level};
|
||||
|
||||
use super::{
|
||||
indexer::rules::{
|
||||
@@ -45,6 +49,18 @@ pub enum NonIndexedLocationError {
|
||||
|
||||
#[error("database error: {0}")]
|
||||
Database(#[from] prisma_client_rust::QueryError),
|
||||
|
||||
#[error("error joining tokio task: {0}")]
|
||||
TaskJoinError(#[from] JoinError),
|
||||
|
||||
#[error("receiver shutdown error")]
|
||||
SendError,
|
||||
}
|
||||
|
||||
impl<T> From<mpsc::error::SendError<T>> for NonIndexedLocationError {
|
||||
fn from(_: mpsc::error::SendError<T>) -> Self {
|
||||
Self::SendError
|
||||
}
|
||||
}
|
||||
|
||||
impl From<NonIndexedLocationError> for rspc::Error {
|
||||
@@ -68,12 +84,6 @@ impl<P: AsRef<Path>> From<(P, io::Error)> for NonIndexedLocationError {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Type, Debug)]
|
||||
pub struct NonIndexedFileSystemEntries {
|
||||
pub entries: Vec<ExplorerItem>,
|
||||
pub errors: Vec<rspc::Error>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Type, Debug)]
|
||||
pub struct NonIndexedPathItem {
|
||||
pub path: String,
|
||||
@@ -87,192 +97,294 @@ pub struct NonIndexedPathItem {
|
||||
pub hidden: bool,
|
||||
}
|
||||
|
||||
// #[instrument(name = "non_indexed::walk", skip(sort_fn))]
|
||||
pub async fn walk(
|
||||
full_path: impl AsRef<Path>,
|
||||
path: PathBuf,
|
||||
with_hidden_files: bool,
|
||||
node: Arc<Node>,
|
||||
library: Arc<Library>,
|
||||
) -> Result<NonIndexedFileSystemEntries, NonIndexedLocationError> {
|
||||
let path = full_path.as_ref();
|
||||
let mut read_dir = fs::read_dir(path).await.map_err(|e| (path, e))?;
|
||||
sort_fn: impl FnOnce(&mut Vec<Entry>) + Send,
|
||||
) -> Result<
|
||||
impl Stream<Item = Result<ExplorerItem, Either<rspc::Error, NonIndexedLocationError>>> + Send,
|
||||
NonIndexedLocationError,
|
||||
> {
|
||||
let mut entries = get_all_entries(path.clone()).await?;
|
||||
|
||||
let mut directories = vec![];
|
||||
let mut errors = vec![];
|
||||
let mut entries = vec![];
|
||||
{
|
||||
let span = span!(Level::INFO, "sort_fn");
|
||||
let _enter = span.enter();
|
||||
|
||||
let rules = chain_optional_iter(
|
||||
[IndexerRule::from(no_os_protected())],
|
||||
[(!with_hidden_files).then(|| IndexerRule::from(no_hidden()))],
|
||||
);
|
||||
sort_fn(&mut entries);
|
||||
}
|
||||
|
||||
let mut thumbnails_to_generate = vec![];
|
||||
// Generating thumbnails for PDFs is kinda slow, so we're leaving them for last in the batch
|
||||
let mut document_thumbnails_to_generate = vec![];
|
||||
let (tx, rx) = mpsc::channel(128);
|
||||
let tx2 = tx.clone();
|
||||
|
||||
while let Some(entry) = read_dir.next_entry().await.map_err(|e| (path, e))? {
|
||||
let Ok((entry_path, name)) = normalize_path(entry.path())
|
||||
.map_err(|e| errors.push(NonIndexedLocationError::from((path, e)).into()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
// We wanna process and let the caller use the stream.
|
||||
let task = tokio::spawn(async move {
|
||||
let path = &path;
|
||||
let rules = chain_optional_iter(
|
||||
[IndexerRule::from(no_os_protected())],
|
||||
[(!with_hidden_files).then(|| IndexerRule::from(no_hidden()))],
|
||||
);
|
||||
|
||||
if let Ok(rule_results) = IndexerRule::apply_all(&rules, &entry_path)
|
||||
.await
|
||||
.map_err(|e| errors.push(e.into()))
|
||||
{
|
||||
// No OS Protected and No Hidden rules, must always be from this kind, should panic otherwise
|
||||
if rule_results[&RuleKind::RejectFilesByGlob]
|
||||
.iter()
|
||||
.any(|reject| !reject)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
let mut thumbnails_to_generate = vec![];
|
||||
// Generating thumbnails for PDFs is kinda slow, so we're leaving them for last in the batch
|
||||
let mut document_thumbnails_to_generate = vec![];
|
||||
let mut directories = vec![];
|
||||
|
||||
let Ok(metadata) = entry
|
||||
.metadata()
|
||||
.await
|
||||
.map_err(|e| errors.push(NonIndexedLocationError::from((path, e)).into()))
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if metadata.is_dir() {
|
||||
directories.push((entry_path, name, metadata));
|
||||
} else {
|
||||
let path = Path::new(&entry_path);
|
||||
|
||||
let Some(name) = path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str().map(str::to_string))
|
||||
else {
|
||||
warn!("Failed to extract name from path: {}", &entry_path);
|
||||
continue;
|
||||
};
|
||||
|
||||
let extension = path
|
||||
.extension()
|
||||
.and_then(|s| s.to_str().map(str::to_string))
|
||||
.unwrap_or_default();
|
||||
|
||||
let kind = Extension::resolve_conflicting(&path, false)
|
||||
.await
|
||||
.map(Into::into)
|
||||
.unwrap_or(ObjectKind::Unknown);
|
||||
|
||||
let should_generate_thumbnail = {
|
||||
#[cfg(feature = "ffmpeg")]
|
||||
{
|
||||
matches!(
|
||||
kind,
|
||||
ObjectKind::Image | ObjectKind::Video | ObjectKind::Document
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ffmpeg"))]
|
||||
{
|
||||
matches!(kind, ObjectKind::Image | ObjectKind::Document)
|
||||
for entry in entries.into_iter() {
|
||||
let (entry_path, name) = match normalize_path(entry.path) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
tx.send(Err(Either::Left(
|
||||
NonIndexedLocationError::from((path, e)).into(),
|
||||
)))
|
||||
.await?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let thumbnail_key = if should_generate_thumbnail {
|
||||
if let Ok(cas_id) = generate_cas_id(&path, metadata.len())
|
||||
match IndexerRule::apply_all(&rules, &entry_path).await {
|
||||
Ok(rule_results) => {
|
||||
// No OS Protected and No Hidden rules, must always be from this kind, should panic otherwise
|
||||
if rule_results[&RuleKind::RejectFilesByGlob]
|
||||
.iter()
|
||||
.any(|reject| !reject)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
tx.send(Err(Either::Left(e.into()))).await?;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if entry.metadata.is_dir() {
|
||||
directories.push((entry_path, name, entry.metadata));
|
||||
} else {
|
||||
let path = Path::new(&entry_path);
|
||||
|
||||
let Some(name) = path
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str().map(str::to_string))
|
||||
else {
|
||||
warn!("Failed to extract name from path: {}", &entry_path);
|
||||
continue;
|
||||
};
|
||||
|
||||
let extension = path
|
||||
.extension()
|
||||
.and_then(|s| s.to_str().map(str::to_string))
|
||||
.unwrap_or_default();
|
||||
|
||||
let kind = Extension::resolve_conflicting(&path, false)
|
||||
.await
|
||||
.map_err(|e| errors.push(NonIndexedLocationError::from((path, e)).into()))
|
||||
{
|
||||
if kind == ObjectKind::Document {
|
||||
document_thumbnails_to_generate.push(GenerateThumbnailArgs::new(
|
||||
extension.clone(),
|
||||
cas_id.clone(),
|
||||
path.to_path_buf(),
|
||||
));
|
||||
} else {
|
||||
thumbnails_to_generate.push(GenerateThumbnailArgs::new(
|
||||
extension.clone(),
|
||||
cas_id.clone(),
|
||||
path.to_path_buf(),
|
||||
));
|
||||
.map(Into::into)
|
||||
.unwrap_or(ObjectKind::Unknown);
|
||||
|
||||
let should_generate_thumbnail = {
|
||||
#[cfg(feature = "ffmpeg")]
|
||||
{
|
||||
matches!(
|
||||
kind,
|
||||
ObjectKind::Image | ObjectKind::Video | ObjectKind::Document
|
||||
)
|
||||
}
|
||||
|
||||
Some(get_ephemeral_thumb_key(&cas_id))
|
||||
#[cfg(not(feature = "ffmpeg"))]
|
||||
{
|
||||
matches!(kind, ObjectKind::Image | ObjectKind::Document)
|
||||
}
|
||||
};
|
||||
|
||||
let thumbnail_key = if should_generate_thumbnail {
|
||||
if let Ok(cas_id) =
|
||||
generate_cas_id(&path, entry.metadata.len())
|
||||
.await
|
||||
.map_err(|e| {
|
||||
tx.send(Err(Either::Left(
|
||||
NonIndexedLocationError::from((path, e)).into(),
|
||||
)))
|
||||
}) {
|
||||
if kind == ObjectKind::Document {
|
||||
document_thumbnails_to_generate.push(GenerateThumbnailArgs::new(
|
||||
extension.clone(),
|
||||
cas_id.clone(),
|
||||
path.to_path_buf(),
|
||||
));
|
||||
} else {
|
||||
thumbnails_to_generate.push(GenerateThumbnailArgs::new(
|
||||
extension.clone(),
|
||||
cas_id.clone(),
|
||||
path.to_path_buf(),
|
||||
));
|
||||
}
|
||||
|
||||
Some(get_ephemeral_thumb_key(&cas_id))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
tx.send(Ok(ExplorerItem::NonIndexedPath {
|
||||
has_local_thumbnail: thumbnail_key.is_some(),
|
||||
thumbnail_key,
|
||||
item: NonIndexedPathItem {
|
||||
hidden: path_is_hidden(Path::new(&entry_path), &entry.metadata),
|
||||
path: entry_path,
|
||||
name,
|
||||
extension,
|
||||
kind: kind as i32,
|
||||
is_dir: false,
|
||||
date_created: entry.metadata.created_or_now().into(),
|
||||
date_modified: entry.metadata.modified_or_now().into(),
|
||||
size_in_bytes_bytes: entry.metadata.len().to_be_bytes().to_vec(),
|
||||
},
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
thumbnails_to_generate.extend(document_thumbnails_to_generate);
|
||||
|
||||
node.thumbnailer
|
||||
.new_ephemeral_thumbnails_batch(BatchToProcess::new(
|
||||
thumbnails_to_generate,
|
||||
false,
|
||||
false,
|
||||
))
|
||||
.await;
|
||||
|
||||
let mut locations = library
|
||||
.db
|
||||
.location()
|
||||
.find_many(vec![location::path::in_vec(
|
||||
directories
|
||||
.iter()
|
||||
.map(|(path, _, _)| path.clone())
|
||||
.collect(),
|
||||
)])
|
||||
.exec()
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|location| {
|
||||
location
|
||||
.path
|
||||
.clone()
|
||||
.map(|location_path| (location_path, location))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
for (directory, name, metadata) in directories {
|
||||
if let Some(location) = locations.remove(&directory) {
|
||||
tx.send(Ok(ExplorerItem::Location {
|
||||
has_local_thumbnail: false,
|
||||
thumbnail_key: None,
|
||||
item: location,
|
||||
}))
|
||||
.await?;
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
entries.push(ExplorerItem::NonIndexedPath {
|
||||
has_local_thumbnail: thumbnail_key.is_some(),
|
||||
thumbnail_key,
|
||||
item: NonIndexedPathItem {
|
||||
hidden: path_is_hidden(Path::new(&entry_path), &metadata),
|
||||
path: entry_path,
|
||||
name,
|
||||
extension,
|
||||
kind: kind as i32,
|
||||
is_dir: false,
|
||||
date_created: metadata.created_or_now().into(),
|
||||
date_modified: metadata.modified_or_now().into(),
|
||||
size_in_bytes_bytes: metadata.len().to_be_bytes().to_vec(),
|
||||
},
|
||||
});
|
||||
tx.send(Ok(ExplorerItem::NonIndexedPath {
|
||||
has_local_thumbnail: false,
|
||||
thumbnail_key: None,
|
||||
item: NonIndexedPathItem {
|
||||
hidden: path_is_hidden(Path::new(&directory), &metadata),
|
||||
path: directory,
|
||||
name,
|
||||
extension: String::new(),
|
||||
kind: ObjectKind::Folder as i32,
|
||||
is_dir: true,
|
||||
date_created: metadata.created_or_now().into(),
|
||||
date_modified: metadata.modified_or_now().into(),
|
||||
size_in_bytes_bytes: metadata.len().to_be_bytes().to_vec(),
|
||||
},
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
thumbnails_to_generate.extend(document_thumbnails_to_generate);
|
||||
Ok::<_, NonIndexedLocationError>(())
|
||||
});
|
||||
|
||||
node.thumbnailer
|
||||
.new_ephemeral_thumbnails_batch(BatchToProcess::new(thumbnails_to_generate, false, false))
|
||||
.await;
|
||||
|
||||
let mut locations = library
|
||||
.db
|
||||
.location()
|
||||
.find_many(vec![location::path::in_vec(
|
||||
directories
|
||||
.iter()
|
||||
.map(|(path, _, _)| path.clone())
|
||||
.collect(),
|
||||
)])
|
||||
.exec()
|
||||
.await?
|
||||
.into_iter()
|
||||
.flat_map(|location| {
|
||||
location
|
||||
.path
|
||||
.clone()
|
||||
.map(|location_path| (location_path, location))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
for (directory, name, metadata) in directories {
|
||||
if let Some(location) = locations.remove(&directory) {
|
||||
entries.push(ExplorerItem::Location {
|
||||
has_local_thumbnail: false,
|
||||
thumbnail_key: None,
|
||||
item: location,
|
||||
});
|
||||
} else {
|
||||
entries.push(ExplorerItem::NonIndexedPath {
|
||||
has_local_thumbnail: false,
|
||||
thumbnail_key: None,
|
||||
item: NonIndexedPathItem {
|
||||
hidden: path_is_hidden(Path::new(&directory), &metadata),
|
||||
path: directory,
|
||||
name,
|
||||
extension: String::new(),
|
||||
kind: ObjectKind::Folder as i32,
|
||||
is_dir: true,
|
||||
date_created: metadata.created_or_now().into(),
|
||||
date_modified: metadata.modified_or_now().into(),
|
||||
size_in_bytes_bytes: metadata.len().to_be_bytes().to_vec(),
|
||||
},
|
||||
});
|
||||
tokio::spawn(async move {
|
||||
match task.await {
|
||||
Ok(Ok(())) => {}
|
||||
Ok(Err(e)) => {
|
||||
let _ = tx2.send(Err(Either::Left(e.into()))).await;
|
||||
}
|
||||
Err(e) => error!("error joining tokio task: {}", e),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(NonIndexedFileSystemEntries { entries, errors })
|
||||
Ok(ReceiverStream::new(rx))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Entry {
|
||||
path: PathBuf,
|
||||
name: String,
|
||||
// size_in_bytes: u64,
|
||||
// date_created:
|
||||
metadata: std::fs::Metadata,
|
||||
}
|
||||
|
||||
impl Entry {
|
||||
pub fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn size_in_bytes(&self) -> u64 {
|
||||
self.metadata.len()
|
||||
}
|
||||
|
||||
pub fn date_created(&self) -> DateTime<Utc> {
|
||||
self.metadata.created_or_now().into()
|
||||
}
|
||||
|
||||
pub fn date_modified(&self) -> DateTime<Utc> {
|
||||
self.metadata.modified_or_now().into()
|
||||
}
|
||||
}
|
||||
|
||||
/// We get all of the FS entries first before we start processing on each of them.
|
||||
///
|
||||
/// From my M1 Macbook Pro this:
|
||||
/// - takes 11ms per 10 000 files
|
||||
/// and
|
||||
/// - consumes 0.16MB of RAM per 10 000 entries.
|
||||
///
|
||||
/// The reason we collect these all up is so we can apply ordering, and then begin streaming the data as it's processed to the frontend.
|
||||
// #[instrument(name = "get_all_entries")]
|
||||
pub async fn get_all_entries(path: PathBuf) -> Result<Vec<Entry>, NonIndexedLocationError> {
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let path = &path;
|
||||
let mut dir = std::fs::read_dir(&path).map_err(|e| (path, e))?;
|
||||
let mut entries = Vec::new();
|
||||
while let Some(entry) = dir.next() {
|
||||
let entry = entry.map_err(|e| (path, e))?;
|
||||
|
||||
// We must not keep `entry` around as we will quickly hit the OS limit on open file descriptors
|
||||
entries.push(Entry {
|
||||
path: entry.path(),
|
||||
name: entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
(
|
||||
path,
|
||||
io::Error::new(ErrorKind::Other, "error non UTF-8 path"),
|
||||
)
|
||||
})?
|
||||
.to_string(),
|
||||
metadata: entry.metadata().map_err(|e| (path, e))?,
|
||||
});
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
67
core/src/util/batched_stream.rs
Normal file
67
core/src/util/batched_stream.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use std::{
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use futures::Stream;
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
// We limit the number of polls to prevent starvation of other tasks.
|
||||
// This number is chosen arbitrarily but it is set smaller than `FuturesUnordered` or `StreamUnordered`.
|
||||
const MAX_POLLS: usize = 15;
|
||||
|
||||
pin_project! {
|
||||
#[project = BatchedStreamProj]
|
||||
pub enum BatchedStream<S> where S: Stream {
|
||||
Active {
|
||||
#[pin]
|
||||
stream: S,
|
||||
batch: Vec<S::Item>,
|
||||
},
|
||||
Complete
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Stream> BatchedStream<S> {
|
||||
pub fn new(stream: S) -> Self {
|
||||
Self::Active {
|
||||
stream,
|
||||
batch: Vec::with_capacity(MAX_POLLS),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Stream + Unpin> Stream for BatchedStream<S> {
|
||||
type Item = Vec<S::Item>;
|
||||
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
match self.as_mut().project() {
|
||||
BatchedStreamProj::Active { mut stream, batch } => {
|
||||
for _ in 0..MAX_POLLS {
|
||||
match stream.as_mut().poll_next(cx) {
|
||||
Poll::Ready(Some(item)) => batch.push(item),
|
||||
Poll::Ready(None) => {
|
||||
if batch.is_empty() {
|
||||
return Poll::Ready(None);
|
||||
} else {
|
||||
let batch = std::mem::take(batch);
|
||||
self.as_mut().set(BatchedStream::Complete);
|
||||
return Poll::Ready(Some(batch));
|
||||
}
|
||||
}
|
||||
Poll::Pending => break,
|
||||
}
|
||||
}
|
||||
|
||||
if batch.is_empty() {
|
||||
cx.waker().wake_by_ref();
|
||||
Poll::Pending
|
||||
} else {
|
||||
let batch = std::mem::take(batch);
|
||||
return Poll::Ready(Some(batch));
|
||||
}
|
||||
}
|
||||
BatchedStreamProj::Complete => return Poll::Ready(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
mod abort_on_drop;
|
||||
mod batched_stream;
|
||||
#[cfg(debug_assertions)]
|
||||
pub mod debug_initializer;
|
||||
pub mod http;
|
||||
@@ -6,9 +7,12 @@ mod infallible_request;
|
||||
mod maybe_undefined;
|
||||
pub mod mpscrr;
|
||||
mod observable;
|
||||
mod unsafe_streamed_query;
|
||||
pub mod version_manager;
|
||||
|
||||
pub use abort_on_drop::*;
|
||||
pub use batched_stream::*;
|
||||
pub use infallible_request::*;
|
||||
pub use maybe_undefined::*;
|
||||
pub use observable::*;
|
||||
pub use unsafe_streamed_query::*;
|
||||
|
||||
39
core/src/util/unsafe_streamed_query.rs
Normal file
39
core/src/util/unsafe_streamed_query.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use std::pin::pin;
|
||||
|
||||
use async_stream::stream;
|
||||
use futures::{Stream, StreamExt};
|
||||
use serde::Serialize;
|
||||
use specta::{reference::Reference, DataType, Type, TypeMap};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum Output<T> {
|
||||
Data(T),
|
||||
Complete { __stream_complete: () },
|
||||
}
|
||||
|
||||
impl<T: Type> Type for Output<T> {
|
||||
fn inline(type_map: &mut TypeMap, generics: &[DataType]) -> DataType {
|
||||
T::inline(type_map, generics)
|
||||
}
|
||||
|
||||
fn definition(type_map: &mut TypeMap) -> DataType {
|
||||
T::definition(type_map)
|
||||
}
|
||||
|
||||
fn reference(type_map: &mut TypeMap, generics: &[DataType]) -> Reference {
|
||||
T::reference(type_map, generics)
|
||||
}
|
||||
}
|
||||
|
||||
// Marked as unsafe as the types are a lie and this should always be used with `useUnsafeStreamedQuery`
|
||||
pub fn unsafe_streamed_query<S: Stream>(stream: S) -> impl Stream<Item = Output<S::Item>> {
|
||||
stream! {
|
||||
let mut stream = pin!(stream);
|
||||
while let Some(v) = stream.next().await {
|
||||
yield Output::Data(v);
|
||||
}
|
||||
|
||||
yield Output::Complete { __stream_complete: () };
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
// import { createClient, httpLink } from '@rspc/client';
|
||||
// import { createReactHooks } from '@rspc/react';
|
||||
// import { createClient, httpLink } from '@oscartbeaumont-sd/rspc-client';
|
||||
// import { createReactHooks } from '@oscartbeaumont-sd/rspc-react';
|
||||
// import { QueryClient } from '@tanstack/react-query';
|
||||
// import type { Procedures } from './bindings';
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { inferSubscriptionResult } from '@rspc/client';
|
||||
import { inferSubscriptionResult } from '@oscartbeaumont-sd/rspc-client';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { Procedures, useLibraryMutation, useLibrarySubscription } from '@sd/client';
|
||||
import { Button } from '@sd/ui';
|
||||
|
||||
@@ -3,13 +3,12 @@ import * as Dialog from '@radix-ui/react-dialog';
|
||||
import { iconNames } from '@sd/assets/util';
|
||||
import clsx from 'clsx';
|
||||
import { memo, Suspense, useDeferredValue, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router';
|
||||
import {
|
||||
ExplorerItem,
|
||||
getExplorerItemData,
|
||||
useCache,
|
||||
useLibraryQuery,
|
||||
useNodes,
|
||||
useLibraryContext,
|
||||
useNormalisedCache,
|
||||
useUnsafeStreamedQuery,
|
||||
type EphemeralPathOrder
|
||||
} from '@sd/client';
|
||||
import { Button, Tooltip } from '@sd/ui';
|
||||
@@ -176,23 +175,36 @@ const EphemeralExplorer = memo((props: { args: PathParams }) => {
|
||||
|
||||
const settingsSnapshot = explorerSettings.useSettingsSnapshot();
|
||||
|
||||
const query = useLibraryQuery(
|
||||
const libraryCtx = useLibraryContext();
|
||||
const cache = useNormalisedCache();
|
||||
const query = useUnsafeStreamedQuery(
|
||||
[
|
||||
'search.ephemeralPaths',
|
||||
{
|
||||
path: path ?? (os === 'windows' ? 'C:\\' : '/'),
|
||||
withHiddenFiles: settingsSnapshot.showHiddenFiles,
|
||||
order: settingsSnapshot.order
|
||||
library_id: libraryCtx.library.uuid,
|
||||
arg: {
|
||||
path: path ?? (os === 'windows' ? 'C:\\' : '/'),
|
||||
withHiddenFiles: settingsSnapshot.showHiddenFiles,
|
||||
order: settingsSnapshot.order
|
||||
}
|
||||
}
|
||||
],
|
||||
{
|
||||
enabled: path != null,
|
||||
suspense: true,
|
||||
onSuccess: () => getExplorerStore().resetNewThumbnails()
|
||||
onSuccess: () => getExplorerStore().resetNewThumbnails(),
|
||||
onBatch: (item) => {
|
||||
cache.withNodes(item.nodes);
|
||||
}
|
||||
}
|
||||
);
|
||||
useNodes(query.data?.nodes);
|
||||
const entries = useCache(query.data?.entries);
|
||||
|
||||
const entries = useMemo(() => {
|
||||
return cache.withCache(
|
||||
query.data?.flatMap((item) => item.entries) ||
|
||||
query.streaming.flatMap((item) => item.entries)
|
||||
);
|
||||
}, [cache, query.streaming, query.data]);
|
||||
|
||||
const items = useMemo(() => {
|
||||
if (!entries) return [];
|
||||
|
||||
@@ -11,8 +11,8 @@
|
||||
"typecheck": "tsc -b"
|
||||
},
|
||||
"dependencies": {
|
||||
"@rspc/client": "=0.0.0-main-799eec5d",
|
||||
"@rspc/react": "=0.0.0-main-799eec5d",
|
||||
"@oscartbeaumont-sd/rspc-client": "=0.0.0-main-dc31e5b2",
|
||||
"@oscartbeaumont-sd/rspc-react": "=0.0.0-main-dc31e5b2",
|
||||
"@tanstack/react-query": "^4.36.1",
|
||||
"@zxcvbn-ts/core": "^3.0.4",
|
||||
"@zxcvbn-ts/language-common": "^3.0.4",
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { proxy, snapshot, subscribe } from 'valtio';
|
||||
|
||||
import { type CacheNode } from './core';
|
||||
import { getPermits } from './rspc-cursed';
|
||||
|
||||
declare global {
|
||||
interface Window {
|
||||
@@ -65,6 +66,12 @@ export function CacheProvider({ cache, children }: PropsWithChildren<{ cache: No
|
||||
const queryClient = useQueryClient();
|
||||
useEffect(() => {
|
||||
const interval = setInterval(() => {
|
||||
const permits = getPermits();
|
||||
if (permits !== 0) {
|
||||
console.warn('Not safe to cleanup cache. ${permits} permits currently held.');
|
||||
return;
|
||||
}
|
||||
|
||||
const requiredKeys = new StableSet<[string, string]>();
|
||||
for (const query of queryClient.getQueryCache().getAll()) {
|
||||
if (query.state.data) scanDataForKeys(cache.cache, requiredKeys, query.state.data);
|
||||
|
||||
@@ -36,7 +36,6 @@ export type Procedures = {
|
||||
{ key: "notifications.dismissAll", input: never, result: null } |
|
||||
{ key: "notifications.get", input: never, result: Notification[] } |
|
||||
{ key: "preferences.get", input: LibraryArgs<null>, result: LibraryPreferences } |
|
||||
{ key: "search.ephemeralPaths", input: LibraryArgs<EphemeralPathSearchArgs>, result: EphemeralPathsResult } |
|
||||
{ key: "search.objects", input: LibraryArgs<ObjectSearchArgs>, result: SearchData<ExplorerItem> } |
|
||||
{ key: "search.objectsCount", input: LibraryArgs<{ filters?: SearchFilterArgs[] }>, result: number } |
|
||||
{ key: "search.paths", input: LibraryArgs<FilePathSearchArgs>, result: SearchData<ExplorerItem> } |
|
||||
@@ -124,6 +123,7 @@ export type Procedures = {
|
||||
{ key: "locations.quickRescan", input: LibraryArgs<LightScanArgs>, result: null } |
|
||||
{ key: "notifications.listen", input: never, result: Notification } |
|
||||
{ key: "p2p.events", input: never, result: P2PEvent } |
|
||||
{ key: "search.ephemeralPaths", input: LibraryArgs<EphemeralPathSearchArgs>, result: EphemeralPathsResultItem } |
|
||||
{ key: "sync.newMessage", input: LibraryArgs<null>, result: null }
|
||||
};
|
||||
|
||||
@@ -202,7 +202,7 @@ export type EphemeralPathOrder = { field: "name"; value: SortOrder } | { field:
|
||||
|
||||
export type EphemeralPathSearchArgs = { path: string; withHiddenFiles: boolean; order?: EphemeralPathOrder | null }
|
||||
|
||||
export type EphemeralPathsResult = { entries: Reference<ExplorerItem>[]; errors: Error[]; nodes: CacheNode[] }
|
||||
export type EphemeralPathsResultItem = { entries: Reference<ExplorerItem>[]; errors: Error[]; nodes: CacheNode[] }
|
||||
|
||||
export type EphemeralRenameFileArgs = { kind: EphemeralRenameKind }
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Link } from '@rspc/client/v2';
|
||||
import { Link } from '@oscartbeaumont-sd/rspc-client/v2';
|
||||
|
||||
declare global {
|
||||
// eslint-disable-next-line
|
||||
@@ -23,6 +23,7 @@ declare global {
|
||||
export * from './hooks';
|
||||
export * from './stores';
|
||||
export * from './rspc';
|
||||
export * from './rspc-cursed';
|
||||
export * from './core';
|
||||
export * from './utils';
|
||||
export * from './lib';
|
||||
|
||||
73
packages/client/src/rspc-cursed.ts
Normal file
73
packages/client/src/rspc-cursed.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import { _inferProcedureHandlerInput, inferProcedureResult } from '@oscartbeaumont-sd/rspc-client';
|
||||
import { useQuery, UseQueryOptions, UseQueryResult } from '@tanstack/react-query';
|
||||
import { useRef } from 'react';
|
||||
|
||||
import { Procedures } from './core';
|
||||
import { useRspcContext } from './rspc';
|
||||
|
||||
// If permits is > 0 then we're currently streaming data.
|
||||
// This means it would be unsafe to cleanup the normalised cache.
|
||||
let permits = 0; // A Mutex in JS, lmao
|
||||
|
||||
export const getPermits = () => permits;
|
||||
|
||||
if (import.meta.hot) {
|
||||
import.meta.hot.dispose(() => (permits = 0)); // Reset the mutex on HMR
|
||||
}
|
||||
|
||||
// A query where the data is streamed in.
|
||||
// Also basically a subscription with support for React Suspense and proper loading states, invalidation, etc.
|
||||
// Be aware this lacks proper type safety and is an absolutely cursed abomination of code.
|
||||
//
|
||||
// It requires you using `UnsafeStreamedQuery` on the backend and will not type error if you don't hence unsafe.
|
||||
// It also requires special modification to the invalidation system to work correctly.
|
||||
//
|
||||
// Be aware `.streaming` will be emptied on a refetch so you should only use it when `.data` is not available.
|
||||
export function useUnsafeStreamedQuery<
|
||||
K extends Procedures['subscriptions']['key'] & string,
|
||||
TData = inferProcedureResult<Procedures, 'subscriptions', K>
|
||||
>(
|
||||
keyAndInput: [K, ..._inferProcedureHandlerInput<Procedures, 'subscriptions', K>],
|
||||
opts: UseQueryOptions<TData[]> & {
|
||||
onBatch(item: TData): void;
|
||||
}
|
||||
): UseQueryResult<TData[], unknown> & { streaming: TData[] } {
|
||||
const data = useRef<TData[]>([]);
|
||||
const rspc = useRspcContext();
|
||||
|
||||
// TODO: The normalised cache might cleanup nodes for this query before it's finished streaming. We need a global mutex on the cleanup routine.
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: keyAndInput,
|
||||
queryFn: ({ signal }) =>
|
||||
new Promise((resolve) => {
|
||||
permits += 1;
|
||||
|
||||
try {
|
||||
data.current = [];
|
||||
const shutdown = rspc.client.addSubscription(keyAndInput as any, {
|
||||
onData: (item) => {
|
||||
if (item === null || item === undefined) return;
|
||||
|
||||
if (typeof item === 'object' && '__stream_complete' in item) {
|
||||
resolve(data.current as any);
|
||||
return;
|
||||
}
|
||||
|
||||
opts.onBatch(item as any);
|
||||
data.current.push(item as any);
|
||||
}
|
||||
});
|
||||
signal?.addEventListener('abort', () => shutdown());
|
||||
} finally {
|
||||
permits -= 1;
|
||||
}
|
||||
}),
|
||||
...opts
|
||||
});
|
||||
|
||||
return {
|
||||
...query,
|
||||
streaming: data.current
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ProcedureDef } from '@rspc/client';
|
||||
import { AlphaRSPCError, initRspc } from '@rspc/client/v2';
|
||||
import { Context, createReactQueryHooks } from '@rspc/react/v2';
|
||||
import { ProcedureDef } from '@oscartbeaumont-sd/rspc-client';
|
||||
import { AlphaRSPCError, initRspc } from '@oscartbeaumont-sd/rspc-client/v2';
|
||||
import { Context, createReactQueryHooks } from '@oscartbeaumont-sd/rspc-react/v2';
|
||||
import { QueryClient } from '@tanstack/react-query';
|
||||
import { createContext, PropsWithChildren, useContext } from 'react';
|
||||
import { match, P } from 'ts-pattern';
|
||||
@@ -42,9 +42,11 @@ export type LibraryProceduresDef = {
|
||||
subscriptions: StripLibraryArgsFromInput<LibraryProcedures<'subscriptions'>, true>;
|
||||
};
|
||||
|
||||
const context = createContext<Context<LibraryProceduresDef>>(undefined!);
|
||||
const context = createContext<Context<Procedures>>(undefined!);
|
||||
const context2 = createContext<Context<LibraryProceduresDef>>(undefined!);
|
||||
|
||||
export const useRspcLibraryContext = () => useContext(context);
|
||||
export const useRspcContext = () => useContext(context);
|
||||
export const useRspcLibraryContext = () => useContext(context2);
|
||||
|
||||
export const rspc = initRspc<Procedures>({
|
||||
links: globalThis.rspcLinks
|
||||
@@ -56,7 +58,7 @@ export const rspc2 = initRspc<Procedures>({
|
||||
export const nonLibraryClient = rspc.dangerouslyHookIntoInternals<NonLibraryProceduresDef>();
|
||||
// @ts-expect-error // TODO: Fix
|
||||
const nonLibraryHooks = createReactQueryHooks<NonLibraryProceduresDef>(nonLibraryClient, {
|
||||
// context // TODO: Shared context
|
||||
context // TODO: Shared context
|
||||
});
|
||||
|
||||
export const libraryClient = rspc2.dangerouslyHookIntoInternals<LibraryProceduresDef>({
|
||||
@@ -69,7 +71,7 @@ export const libraryClient = rspc2.dangerouslyHookIntoInternals<LibraryProcedure
|
||||
});
|
||||
// @ts-expect-error // TODO: idk
|
||||
const libraryHooks = createReactQueryHooks<LibraryProceduresDef>(libraryClient, {
|
||||
context
|
||||
context: context2
|
||||
});
|
||||
|
||||
// TODO: Allow both hooks to use a unified context -> Right now they override each others local state
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { RSPCError } from '@rspc/client';
|
||||
import { RSPCError } from '@oscartbeaumont-sd/rspc-client';
|
||||
import { proxy, useSnapshot } from 'valtio';
|
||||
|
||||
import { nonLibraryClient } from '../rspc';
|
||||
|
||||
BIN
pnpm-lock.yaml
generated
BIN
pnpm-lock.yaml
generated
Binary file not shown.
Reference in New Issue
Block a user