[ENG-288, ENG-601] Media data (image) extraction, thumbnail orientation fix (#1099)

* basic layout

* lock

* add codec to image

* very messy wip

* rm that

* lock and toml

* working perfect exif extraction

* formatting

* migration and formatting

* mostly working

* fix

* set date created properly

* fix tsc

* working media data creation

* fix bad main merge? sorry brendan

* schema, migrations, bindings

* working exif data extraction

* why won't it work

* update migrations

* fix bad merge

* final cleanup

* cleanup migrations

* remove test (that was purely used for testing my code)

* working media data pulling, correct thumbnail orientation

* slightly optimise thumbnail rotation

* rename location to prevent specta clashes

* further improvements (location parsing is still broken)

* fix coordinate parsing i think

* rspc add some todos regarding final cleanup

* further thoughts

* major upgrades

* Some improved handling of errors and async calls

* accordion component

* heavily wip flash refactor

* fix builds all because of a (terrible) merge/move

* annoying missing newline

* i really hate exif

* remove dead code

* further flash progress :D

* docs(media-data): clarification

* minor cleanup

* cleanup and some async removal

* fix location parsing

* remove async (will do proper impl for async eventually) and major cleanup

* big W

* clippy and `FlashMode::Unknown` instead of `Invalid`

* add `NIKON` gps ref support

* comments about gps refs

* commit the submodule -_-

* major cleanup & composite image support

* remove old test image

* major cleanup and easier to use API

* remove old consts

* move `ExifReader` to dedicated module

* Media Data Extractor job and shallow job

* Extracting media data on watcher

* report no exif data on file gracefully

* cleanup errors and doctests

* Merging thumbnailer and media data extractor

* Job name and some strings in UI

* remove reliance on `sd-prisma` within the media data crate

* rename query to be more specific

* custom serializer for `MediaTime`

* tweak to format utc -> naive correctly

* generate migrations

* comment out duration in mobile

* delete test-assets folder

* all optional fields

* fix migrations again

* make requested name changes

* make further requested changes

* remove erroneous files from another wip branch

* updates procedures

* use strings where appropriate

* regen pnpm-lock

* add base layouts for video and audio media data

* use appropriate data types in schema and add audio media data framework

* make requested changes

* general cleanup and renaming of enum

* cleanup media data api

* rename media metadata type

* finishing touches

---------

Co-authored-by: Ericson Soares <ericson.ds999@gmail.com>
Co-authored-by: ameer2468 <33054370+ameer2468@users.noreply.github.com>
Co-authored-by: Jamie Pine <32987599+jamiepine@users.noreply.github.com>
Co-authored-by: Oscar Beaumont <oscar@otbeaumont.me>
Co-authored-by: Utku <74243531+utkubakir@users.noreply.github.com>
Co-authored-by: Brendan Allan <brendonovich@outlook.com>
This commit is contained in:
jake
2023-08-29 18:02:55 +01:00
committed by GitHub
parent dfceb3c307
commit cd339a7812
65 changed files with 2913 additions and 987 deletions

BIN
Cargo.lock generated
View File

Binary file not shown.

View File

@@ -99,13 +99,13 @@ const FileInfoModal = forwardRef<ModalRef, FileInfoModalProps>((props, ref) => {
value={`${byteSize(filePathData?.size_in_bytes_bytes)}`}
/>
{/* Duration */}
{fullObjectData.data?.media_data?.duration_seconds && (
{/* {fullObjectData.data?.media_data?.duration && (
<MetaItem
title="Duration"
value={fullObjectData.data.media_data.duration_seconds}
value={fullObjectData.data.media_data.duration}
icon={Clock}
/>
)}
)} */}
{/* Created */}
<MetaItem
icon={Clock}

View File

@@ -1,21 +0,0 @@
import { zodResolver } from '@hookform/resolvers/zod';
import { UseFormProps, useForm } from 'react-hook-form';
import { z } from 'zod';
interface UseZodFormProps<S extends z.ZodSchema>
extends Exclude<UseFormProps<z.infer<S>>, 'resolver'> {
schema?: S;
}
export const useZodForm = <S extends z.ZodSchema = z.ZodObject<Record<string, never>>>(
props?: UseZodFormProps<S>
) => {
const { schema, ...formProps } = props ?? {};
return useForm({
...formProps,
resolver: zodResolver(schema || z.object({}))
});
};
export { z } from 'zod';

View File

@@ -18,6 +18,8 @@ location-watcher = ["dep:notify"]
heif = ["dep:sd-heif"]
[dependencies]
sd-media-metadata = { path = "../crates/media-metadata" }
sd-prisma = { path = "../crates/prisma" }
sd-ffmpeg = { path = "../crates/ffmpeg", optional = true }
sd-crypto = { path = "../crates/crypto", features = [
"rspc",
@@ -29,9 +31,7 @@ sd-heif = { path = "../crates/heif", optional = true }
sd-file-ext = { path = "../crates/file-ext" }
sd-sync = { path = "../crates/sync" }
sd-p2p = { path = "../crates/p2p", features = ["specta", "serde"] }
sd-prisma = { path = "../crates/prisma" }
sd-utils = { path = "../crates/utils" }
sd-core-sync = { path = "./crates/sync" }
rspc = { workspace = true, features = [
@@ -52,8 +52,9 @@ tokio = { workspace = true, features = [
"time",
"process",
] }
kamadak-exif = "0.5.5"
base64 = "0.21.2"
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4.25", features = ["serde"] }
serde_json = { workspace = true }
@@ -71,9 +72,7 @@ async-trait = "^0.1.68"
image = "0.24.6"
webp = "0.2.2"
tracing = { workspace = true }
tracing-subscriber = { workspace = true, features = [
"env-filter",
] }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
async-stream = "0.3.5"
once_cell = "1.17.2"
ctor = "0.1.26"

View File

@@ -0,0 +1,152 @@
/*
Warnings:
- You are about to drop the column `capture_device_make` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `capture_device_model` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `capture_device_software` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `codecs` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `duration_seconds` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `fps` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `latitude` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `longitude` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `pixel_height` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `pixel_width` on the `media_data` table. All the data in the column will be lost.
- You are about to drop the column `streams` on the `media_data` table. All the data in the column will be lost.
- Added the required column `object_id` to the `media_data` table without a default value. This is not possible if the table is not empty.
*/
-- RedefineTables
PRAGMA foreign_keys=OFF;
CREATE TABLE "new_label_on_object" (
"date_created" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"label_id" INTEGER NOT NULL,
"object_id" INTEGER NOT NULL,
PRIMARY KEY ("label_id", "object_id"),
CONSTRAINT "label_on_object_label_id_fkey" FOREIGN KEY ("label_id") REFERENCES "label" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT "label_on_object_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_label_on_object" ("date_created", "label_id", "object_id") SELECT "date_created", "label_id", "object_id" FROM "label_on_object";
DROP TABLE "label_on_object";
ALTER TABLE "new_label_on_object" RENAME TO "label_on_object";
CREATE TABLE "new_tag_on_object" (
"tag_id" INTEGER NOT NULL,
"object_id" INTEGER NOT NULL,
PRIMARY KEY ("tag_id", "object_id"),
CONSTRAINT "tag_on_object_tag_id_fkey" FOREIGN KEY ("tag_id") REFERENCES "tag" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT "tag_on_object_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_tag_on_object" ("object_id", "tag_id") SELECT "object_id", "tag_id" FROM "tag_on_object";
DROP TABLE "tag_on_object";
ALTER TABLE "new_tag_on_object" RENAME TO "tag_on_object";
CREATE TABLE "new_file_path" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" BLOB NOT NULL,
"is_dir" BOOLEAN,
"cas_id" TEXT,
"integrity_checksum" TEXT,
"location_id" INTEGER,
"materialized_path" TEXT,
"name" TEXT,
"extension" TEXT,
"size_in_bytes" TEXT,
"size_in_bytes_bytes" BLOB,
"inode" BLOB,
"device" BLOB,
"object_id" INTEGER,
"key_id" INTEGER,
"date_created" DATETIME,
"date_modified" DATETIME,
"date_indexed" DATETIME,
CONSTRAINT "file_path_location_id_fkey" FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT "file_path_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_file_path" ("cas_id", "date_created", "date_indexed", "date_modified", "device", "extension", "id", "inode", "integrity_checksum", "is_dir", "key_id", "location_id", "materialized_path", "name", "object_id", "pub_id", "size_in_bytes", "size_in_bytes_bytes") SELECT "cas_id", "date_created", "date_indexed", "date_modified", "device", "extension", "id", "inode", "integrity_checksum", "is_dir", "key_id", "location_id", "materialized_path", "name", "object_id", "pub_id", "size_in_bytes", "size_in_bytes_bytes" FROM "file_path";
DROP TABLE "file_path";
ALTER TABLE "new_file_path" RENAME TO "file_path";
CREATE UNIQUE INDEX "file_path_pub_id_key" ON "file_path"("pub_id");
CREATE INDEX "file_path_location_id_idx" ON "file_path"("location_id");
CREATE INDEX "file_path_location_id_materialized_path_idx" ON "file_path"("location_id", "materialized_path");
CREATE UNIQUE INDEX "file_path_location_id_materialized_path_name_extension_key" ON "file_path"("location_id", "materialized_path", "name", "extension");
CREATE UNIQUE INDEX "file_path_location_id_inode_device_key" ON "file_path"("location_id", "inode", "device");
CREATE TABLE "new_location" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"pub_id" BLOB NOT NULL,
"name" TEXT,
"path" TEXT,
"total_capacity" INTEGER,
"available_capacity" INTEGER,
"is_archived" BOOLEAN,
"generate_preview_media" BOOLEAN,
"sync_preview_media" BOOLEAN,
"hidden" BOOLEAN,
"date_created" DATETIME,
"instance_id" INTEGER,
CONSTRAINT "location_instance_id_fkey" FOREIGN KEY ("instance_id") REFERENCES "instance" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_location" ("available_capacity", "date_created", "generate_preview_media", "hidden", "id", "instance_id", "is_archived", "name", "path", "pub_id", "sync_preview_media", "total_capacity") SELECT "available_capacity", "date_created", "generate_preview_media", "hidden", "id", "instance_id", "is_archived", "name", "path", "pub_id", "sync_preview_media", "total_capacity" FROM "location";
DROP TABLE "location";
ALTER TABLE "new_location" RENAME TO "location";
CREATE UNIQUE INDEX "location_pub_id_key" ON "location"("pub_id");
CREATE TABLE "new_job" (
"id" BLOB NOT NULL PRIMARY KEY,
"name" TEXT,
"action" TEXT,
"status" INTEGER,
"errors_text" TEXT,
"data" BLOB,
"metadata" BLOB,
"parent_id" BLOB,
"task_count" INTEGER,
"completed_task_count" INTEGER,
"date_estimated_completion" DATETIME,
"date_created" DATETIME,
"date_started" DATETIME,
"date_completed" DATETIME,
CONSTRAINT "job_parent_id_fkey" FOREIGN KEY ("parent_id") REFERENCES "job" ("id") ON DELETE SET NULL ON UPDATE CASCADE
);
INSERT INTO "new_job" ("action", "completed_task_count", "data", "date_completed", "date_created", "date_estimated_completion", "date_started", "errors_text", "id", "metadata", "name", "parent_id", "status", "task_count") SELECT "action", "completed_task_count", "data", "date_completed", "date_created", "date_estimated_completion", "date_started", "errors_text", "id", "metadata", "name", "parent_id", "status", "task_count" FROM "job";
DROP TABLE "job";
ALTER TABLE "new_job" RENAME TO "job";
CREATE TABLE "new_indexer_rule_in_location" (
"location_id" INTEGER NOT NULL,
"indexer_rule_id" INTEGER NOT NULL,
PRIMARY KEY ("location_id", "indexer_rule_id"),
CONSTRAINT "indexer_rule_in_location_location_id_fkey" FOREIGN KEY ("location_id") REFERENCES "location" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT "indexer_rule_in_location_indexer_rule_id_fkey" FOREIGN KEY ("indexer_rule_id") REFERENCES "indexer_rule" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_indexer_rule_in_location" ("indexer_rule_id", "location_id") SELECT "indexer_rule_id", "location_id" FROM "indexer_rule_in_location";
DROP TABLE "indexer_rule_in_location";
ALTER TABLE "new_indexer_rule_in_location" RENAME TO "indexer_rule_in_location";
CREATE TABLE "new_media_data" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"dimensions" BLOB,
"media_date" BLOB,
"media_location" BLOB,
"camera_data" BLOB,
"artist" TEXT,
"description" TEXT,
"copyright" TEXT,
"exif_version" TEXT,
"object_id" INTEGER NOT NULL,
CONSTRAINT "media_data_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE CASCADE ON UPDATE CASCADE
);
INSERT INTO "new_media_data" ("id") SELECT "id" FROM "media_data";
DROP TABLE "media_data";
ALTER TABLE "new_media_data" RENAME TO "media_data";
CREATE UNIQUE INDEX "media_data_object_id_key" ON "media_data"("object_id");
CREATE TABLE "new_object_in_space" (
"space_id" INTEGER NOT NULL,
"object_id" INTEGER NOT NULL,
PRIMARY KEY ("space_id", "object_id"),
CONSTRAINT "object_in_space_space_id_fkey" FOREIGN KEY ("space_id") REFERENCES "space" ("id") ON DELETE RESTRICT ON UPDATE CASCADE,
CONSTRAINT "object_in_space_object_id_fkey" FOREIGN KEY ("object_id") REFERENCES "object" ("id") ON DELETE RESTRICT ON UPDATE CASCADE
);
INSERT INTO "new_object_in_space" ("object_id", "space_id") SELECT "object_id", "space_id" FROM "object_in_space";
DROP TABLE "object_in_space";
ALTER TABLE "new_object_in_space" RENAME TO "object_in_space";
PRAGMA foreign_key_check;
PRAGMA foreign_keys=ON;

View File

@@ -293,20 +293,25 @@ model Object {
// }
model MediaData {
id Int @id
pixel_width Int?
pixel_height Int?
longitude Float?
latitude Float?
fps Int?
capture_device_make String? // eg: "Apple"
capture_device_model String? // eg: "iPhone 12"
capture_device_software String? // eg: "12.1.1"
duration_seconds Int?
codecs String? // eg: "h264,acc"
streams Int?
id Int @id @default(autoincrement())
object Object? @relation(fields: [id], references: [id], onDelete: Cascade)
dimensions Bytes?
media_date Bytes?
media_location Bytes?
camera_data Bytes?
artist String?
description String?
copyright String?
exif_version String?
// video-specific
// duration Int?
// fps Int?
// streams Int?
// codecs String? // eg: "h264,acc"
object_id Int @unique
object Object @relation(fields: [object_id], references: [id], onDelete: Cascade)
@@map("media_data")
}

View File

@@ -9,9 +9,12 @@ use crate::{
},
find_location, LocationError,
},
object::fs::{
copy::FileCopierJobInit, cut::FileCutterJobInit, delete::FileDeleterJobInit,
erase::FileEraserJobInit,
object::{
fs::{
copy::FileCopierJobInit, cut::FileCutterJobInit, delete::FileDeleterJobInit,
erase::FileEraserJobInit,
},
media::media_data_image_from_prisma_data,
},
prisma::{file_path, location, object},
util::{db::maybe_missing, error::FileIOError},
@@ -23,6 +26,8 @@ use chrono::Utc;
use futures::future::join_all;
use regex::Regex;
use rspc::{alpha::AlphaRouter, ErrorCode};
use sd_file_ext::kind::ObjectKind;
use sd_media_metadata::MediaMetadata;
use serde::Deserialize;
use specta::Type;
use tokio::{fs, io};
@@ -43,11 +48,36 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.db
.object()
.find_unique(object::id::equals(args.id))
.include(object::include!({ file_paths media_data }))
.include(object::include!({ file_paths }))
.exec()
.await?)
})
})
.procedure("getMediaData", {
R.with2(library())
.query(|(_, library), args: object::id::Type| async move {
library
.db
.object()
.find_unique(object::id::equals(args))
.select(object::select!({ id kind media_data }))
.exec()
.await?
.and_then(|obj| {
Some(match obj.kind {
Some(v) if v == ObjectKind::Image as i32 => {
MediaMetadata::Image(Box::new(
media_data_image_from_prisma_data(obj.media_data?).ok()?,
))
}
_ => return None, // TODO(brxken128): audio and video
})
})
.ok_or_else(|| {
rspc::Error::new(ErrorCode::NotFound, "Object not found".to_string())
})
})
})
.procedure("getPath", {
R.with2(library())
.query(|(_, library), id: i32| async move {

View File

@@ -3,8 +3,7 @@ use crate::{
job::{job_without_data, Job, JobReport, JobStatus, Jobs},
location::{find_location, LocationError},
object::{
file_identifier::file_identifier_job::FileIdentifierJobInit,
preview::thumbnailer_job::ThumbnailerJobInit,
file_identifier::file_identifier_job::FileIdentifierJobInit, media::MediaProcessorJobInit,
validation::validator_job::ObjectValidatorJobInit,
},
prisma::{job, location, SortOrder},
@@ -236,7 +235,7 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
return Err(LocationError::IdNotFound(args.id).into());
};
Job::new(ThumbnailerJobInit {
Job::new(MediaProcessorJobInit {
location,
sub_path: Some(args.path),
})

View File

@@ -8,7 +8,7 @@ use crate::{
file_path_helper::{check_file_path_exists, IsolatedFilePathData},
non_indexed, LocationError,
},
object::preview::get_thumb_key,
object::media::thumbnail::get_thumb_key,
prisma::{self, file_path, location, object, tag, tag_on_object, PrismaClient},
};

View File

@@ -2,7 +2,7 @@ use crate::{
location::{indexer::IndexerError, LocationError},
object::{
file_identifier::FileIdentifierJobError, fs::error::FileSystemJobsError,
preview::ThumbnailerError, validation::ValidatorError,
media::media_processor::MediaProcessorError, validation::ValidatorError,
},
util::{db::MissingFieldError, error::FileIOError},
};
@@ -43,20 +43,6 @@ pub enum JobError {
FileIO(#[from] FileIOError),
#[error("Location error: {0}")]
Location(#[from] LocationError),
// Specific job errors
#[error(transparent)]
Indexer(#[from] IndexerError),
#[error(transparent)]
ThumbnailError(#[from] ThumbnailerError),
#[error(transparent)]
IdentifierError(#[from] FileIdentifierJobError),
#[error(transparent)]
Validator(#[from] ValidatorError),
#[error(transparent)]
FileSystemJobsError(#[from] FileSystemJobsError),
#[error(transparent)]
CryptoError(#[from] CryptoError),
#[error("missing-field: {0}")]
MissingField(#[from] MissingFieldError),
#[error("item of type '{0}' with id '{1}' is missing from the db")]
@@ -64,6 +50,20 @@ pub enum JobError {
#[error("Thumbnail skipped")]
ThumbnailSkipped,
// Specific job errors
#[error(transparent)]
Indexer(#[from] IndexerError),
#[error(transparent)]
MediaProcessor(#[from] MediaProcessorError),
#[error(transparent)]
FileIdentifier(#[from] FileIdentifierJobError),
#[error(transparent)]
Validator(#[from] ValidatorError),
#[error(transparent)]
FileSystemJobsError(#[from] FileSystemJobsError),
#[error(transparent)]
CryptoError(#[from] CryptoError),
// Not errors
#[error("job had a early finish: <name='{name}', reason='{reason}'>")]
EarlyFinish { name: String, reason: String },

View File

@@ -8,7 +8,7 @@ use crate::{
copy::FileCopierJobInit, cut::FileCutterJobInit, delete::FileDeleterJobInit,
erase::FileEraserJobInit,
},
preview::thumbnailer_job::ThumbnailerJobInit,
media::media_processor::MediaProcessorJobInit,
validation::validator_job::ObjectValidatorJobInit,
},
prisma::job,
@@ -388,7 +388,7 @@ fn initialize_resumable_job(
Err(JobError::UnknownJobName(job_report.id, job_report.name))
},
jobs = [
ThumbnailerJobInit,
MediaProcessorJobInit,
IndexerJobInit,
FileIdentifierJobInit,
ObjectValidatorJobInit,

View File

@@ -30,9 +30,15 @@ pub type JobMetadata = Option<serde_json::Value>;
#[derive(Debug, Default)]
pub struct JobRunErrors(pub Vec<String>);
impl From<Vec<String>> for JobRunErrors {
fn from(errors: Vec<String>) -> Self {
Self(errors)
impl<I: IntoIterator<Item = String>> From<I> for JobRunErrors {
fn from(errors: I) -> Self {
Self(errors.into_iter().collect())
}
}
impl fmt::Display for JobRunErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.join("\n"))
}
}
@@ -63,6 +69,7 @@ pub trait StatefulJob:
/// The name of the job is a unique human readable identifier for the job.
const NAME: &'static str;
const IS_BACKGROUND: bool = false;
const IS_BATCHED: bool = false;
/// initialize the steps for the job
async fn init(
@@ -290,10 +297,13 @@ impl<RunMetadata, Step> From<(RunMetadata, Vec<Step>)> for JobInitOutput<RunMeta
}
}
impl<Step> From<Vec<Step>> for JobInitOutput<(), Step> {
impl<RunMetadata, Step> From<Vec<Step>> for JobInitOutput<RunMetadata, Step>
where
RunMetadata: Default,
{
fn from(steps: Vec<Step>) -> Self {
Self {
run_metadata: (),
run_metadata: RunMetadata::default(),
steps: VecDeque::from(steps),
errors: Default::default(),
}
@@ -365,6 +375,18 @@ impl<Step, RunMetadata: JobRunMetadata> From<(Vec<Step>, RunMetadata)>
}
}
impl<Step, RunMetadata: JobRunMetadata> From<(RunMetadata, JobRunErrors)>
for JobStepOutput<Step, RunMetadata>
{
fn from((more_metadata, errors): (RunMetadata, JobRunErrors)) -> Self {
Self {
maybe_more_steps: None,
maybe_more_metadata: Some(more_metadata),
errors,
}
}
}
impl<Step, RunMetadata: JobRunMetadata> From<(Vec<Step>, RunMetadata, JobRunErrors)>
for JobStepOutput<Step, RunMetadata>
{
@@ -456,11 +478,9 @@ impl<SJob: StatefulJob> DynJob for Job<SJob> {
let res = stateful_job.init(&inner_ctx, &mut new_data).await;
if let Ok(res) = res.as_ref() {
inner_ctx.progress(vec![JobReportUpdate::TaskCount(res.steps.len())]);
}
if let Ok(res) = res.as_ref() {
inner_ctx.progress(vec![JobReportUpdate::TaskCount(res.steps.len())]);
if !<SJob as StatefulJob>::IS_BATCHED {
inner_ctx.progress(vec![JobReportUpdate::TaskCount(res.steps.len())]);
}
}
(new_data, res)
@@ -801,7 +821,9 @@ impl<SJob: StatefulJob> DynJob for Job<SJob> {
run_metadata.update(more_metadata);
}
ctx.progress(events);
if !<SJob as StatefulJob>::IS_BATCHED {
ctx.progress(events);
}
if !new_errors.is_empty() {
warn!("Job<id='{job_id}', name='{job_name}'> had a step with errors");

View File

@@ -5,7 +5,7 @@ use crate::{
},
location::file_path_helper::{file_path_to_full_path, IsolatedFilePathData},
notifications,
object::{orphan_remover::OrphanRemoverActor, preview::get_thumbnail_path},
object::{media::thumbnail::get_thumbnail_path, orphan_remover::OrphanRemoverActor},
prisma::{file_path, location, PrismaClient},
sync,
util::{db::maybe_missing, error::FileIOError},

View File

@@ -14,7 +14,7 @@ use regex::RegexSet;
use serde::{Deserialize, Serialize};
use super::{
file_path_for_file_identifier, file_path_for_object_validator, file_path_for_thumbnailer,
file_path_for_file_identifier, file_path_for_media_processor, file_path_for_object_validator,
file_path_to_full_path, file_path_to_handle_custom_uri, file_path_to_isolate,
file_path_to_isolate_with_id, file_path_walker, file_path_with_object, FilePathError,
};
@@ -454,7 +454,7 @@ impl_from_db!(
impl_from_db_without_location_id!(
file_path_for_file_identifier,
file_path_to_full_path,
file_path_for_thumbnailer,
file_path_for_media_processor,
file_path_for_object_validator,
file_path_to_handle_custom_uri
);

View File

@@ -45,12 +45,14 @@ file_path::select!(file_path_for_object_validator {
extension
integrity_checksum
});
file_path::select!(file_path_for_thumbnailer {
file_path::select!(file_path_for_media_processor {
id
materialized_path
is_dir
name
extension
cas_id
object_id
});
file_path::select!(file_path_to_isolate {
location_id

View File

@@ -134,6 +134,7 @@ impl StatefulJob for IndexerJobInit {
type RunMetadata = IndexerJobRunMetadata;
const NAME: &'static str = "indexer";
const IS_BATCHED: bool = true;
/// Creates a vector of valid path buffers from a directory, chunked into batches of `BATCH_SIZE`.
async fn init(

View File

@@ -15,7 +15,13 @@ use crate::{
scan_location_sub_path,
},
object::{
file_identifier::FileMetadata, preview::get_thumbnail_path, validation::hash::file_checksum,
file_identifier::FileMetadata,
media::{
media_data_extractor::{can_extract_media_data_for_image, extract_media_data},
media_data_image_to_query,
thumbnail::get_thumbnail_path,
},
validation::hash::file_checksum,
},
prisma::{file_path, location, object},
util::{
@@ -36,9 +42,12 @@ use std::{
ffi::OsStr,
fs::Metadata,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use sd_file_ext::{extensions::ImageExtension, kind::ObjectKind};
use chrono::{DateTime, Local, Utc};
use notify::Event;
use prisma_client_rust::{raw, PrismaValue};
@@ -46,7 +55,7 @@ use sd_prisma::prisma_sync;
use sd_sync::OperationFactory;
use serde_json::json;
use tokio::{fs, io::ErrorKind};
use tracing::{debug, trace, warn};
use tracing::{debug, error, trace, warn};
use uuid::Uuid;
use super::INodeAndDevice;
@@ -309,14 +318,36 @@ async fn inner_create_file(
.exec()
.await?;
if !extension.is_empty() {
if !extension.is_empty() && matches!(kind, ObjectKind::Image | ObjectKind::Video) {
// Running in a detached task as thumbnail generation can take a while and we don't want to block the watcher
let path = path.to_path_buf();
let inner_path = path.to_path_buf();
let node = node.clone();
let inner_extension = extension.clone();
tokio::spawn(async move {
generate_thumbnail(&extension, &cas_id, path, &node).await;
generate_thumbnail(&inner_extension, &cas_id, inner_path, &node).await;
});
// TODO: Currently we only extract media data for images, remove this if later
if matches!(kind, ObjectKind::Image) {
if let Ok(image_extension) = ImageExtension::from_str(&extension) {
if can_extract_media_data_for_image(&image_extension) {
if let Ok(media_data) = extract_media_data(path)
.await
.map_err(|e| error!("Failed to extract media data: {e:#?}"))
{
if let Ok(media_data_params) =
media_data_image_to_query(media_data, object.id).map_err(|e| {
error!("Failed to prepare media data create params: {e:#?}")
}) {
db.media_data()
.create_many(vec![media_data_params])
.exec()
.await?;
}
}
}
}
}
}
invalidate_query!(library, "search.paths");
@@ -533,8 +564,13 @@ async fn inner_update_file(
if let Some(ref object) = file_path.object {
// if this file had a thumbnail previously, we update it to match the new content
if library.thumbnail_exists(node, old_cas_id).await? {
if let Some(ext) = &file_path.extension {
generate_thumbnail(ext, &cas_id, full_path, node).await;
if let Some(ext) = file_path.extension.clone() {
// Running in a detached task as thumbnail generation can take a while and we don't want to block the watcher
let inner_path = full_path.to_path_buf();
let inner_node = node.clone();
tokio::spawn(async move {
generate_thumbnail(&ext, &cas_id, inner_path, &inner_node).await;
});
// remove the old thumbnail as we're generating a new one
let thumb_path = get_thumbnail_path(node, old_cas_id);
@@ -563,6 +599,30 @@ async fn inner_update_file(
)
.await?;
}
// TODO: Change this if to include ObjectKind::Video in the future
if let Some(ext) = &file_path.extension {
if let Ok(image_extension) = ImageExtension::from_str(ext) {
if can_extract_media_data_for_image(&image_extension)
&& matches!(kind, ObjectKind::Image)
{
if let Ok(media_data) = extract_media_data(full_path)
.await
.map_err(|e| error!("Failed to extract media data: {e:#?}"))
{
if let Ok(media_data_params) =
media_data_image_to_query(media_data, object.id).map_err(|e| {
error!("Failed to prepare media data create params: {e:#?}")
}) {
db.media_data()
.create_many(vec![media_data_params])
.exec()
.await?;
}
}
}
}
}
}
invalidate_query!(library, "search.paths");

View File

@@ -6,9 +6,13 @@ use crate::{
location::file_path_helper::filter_existing_file_path_params,
object::{
file_identifier::{self, file_identifier_job::FileIdentifierJobInit},
preview::{
can_generate_thumbnail_for_image, generate_image_thumbnail, get_thumb_key,
get_thumbnail_path, shallow_thumbnailer, thumbnailer_job::ThumbnailerJobInit,
media::{
media_processor,
thumbnail::{
can_generate_thumbnail_for_image, generate_image_thumbnail, get_thumb_key,
get_thumbnail_path,
},
MediaProcessorJobInit,
},
},
prisma::{file_path, indexer_rules_in_location, location, PrismaClient},
@@ -405,7 +409,7 @@ pub async fn scan_location(
location: location_base_data.clone(),
sub_path: None,
})
.queue_next(ThumbnailerJobInit {
.queue_next(MediaProcessorJobInit {
location: location_base_data,
sub_path: None,
})
@@ -443,7 +447,7 @@ pub async fn scan_location_sub_path(
location: location_base_data.clone(),
sub_path: Some(sub_path.clone()),
})
.queue_next(ThumbnailerJobInit {
.queue_next(MediaProcessorJobInit {
location: location_base_data,
sub_path: Some(sub_path),
})
@@ -469,7 +473,7 @@ pub async fn light_scan_location(
indexer::shallow(&location, &sub_path, &node, &library).await?;
file_identifier::shallow(&location_base_data, &sub_path, &library).await?;
shallow_thumbnailer(&location_base_data, &sub_path, &library, &node).await?;
media_processor::shallow(&location_base_data, &sub_path, &library, &node).await?;
Ok(())
}
@@ -873,7 +877,9 @@ pub(super) async fn generate_thumbnail(
#[cfg(feature = "ffmpeg")]
{
use crate::object::preview::{can_generate_thumbnail_for_video, generate_video_thumbnail};
use crate::object::media::thumbnail::{
can_generate_thumbnail_for_video, generate_video_thumbnail,
};
use sd_file_ext::extensions::VideoExtension;
if let Ok(extension) = VideoExtension::from_str(extension) {

View File

@@ -1,7 +1,7 @@
use crate::{
api::locations::ExplorerItem,
library::Library,
object::{cas::generate_cas_id, preview::get_thumb_key},
object::{cas::generate_cas_id, media::thumbnail::get_thumb_key},
prisma::location,
util::error::FileIOError,
Node,

View File

@@ -1,7 +1,7 @@
use crate::{
job::{
CurrentStep, JobError, JobInitOutput, JobResult, JobRunMetadata, JobStepOutput,
StatefulJob, WorkerContext,
CurrentStep, JobError, JobInitOutput, JobReportUpdate, JobResult, JobRunMetadata,
JobStepOutput, StatefulJob, WorkerContext,
},
library::Library,
location::file_path_helper::{
@@ -75,6 +75,7 @@ impl StatefulJob for FileIdentifierJobInit {
type RunMetadata = FileIdentifierJobRunMetadata;
const NAME: &'static str = "file_identifier";
const IS_BATCHED: bool = true;
async fn init(
&self,
@@ -152,7 +153,12 @@ impl StatefulJob for FileIdentifierJobInit {
.select(file_path::select!({ id }))
.exec()
.await?
.expect("We already validated before that there are orphans `file_path`s"); // SAFETY: We already validated before that there are orphans `file_path`s
.expect("We already validated before that there are orphans `file_path`s");
ctx.progress(vec![
JobReportUpdate::TaskCount(orphan_count),
JobReportUpdate::Message(format!("Found {orphan_count} files to be identified")),
]);
Ok((
FileIdentifierJobRunMetadata {
@@ -211,11 +217,14 @@ impl StatefulJob for FileIdentifierJobInit {
new_metadata.total_objects_linked = total_objects_linked;
new_metadata.cursor = new_cursor;
ctx.progress_msg(format!(
"Processed {} of {} orphan Paths",
step_number * CHUNK_SIZE,
run_metadata.total_orphan_paths
));
ctx.progress(vec![
JobReportUpdate::CompletedTaskCount(step_number * CHUNK_SIZE + file_paths.len()),
JobReportUpdate::Message(format!(
"Processed {} of {} orphan Paths",
step_number * CHUNK_SIZE,
run_metadata.total_orphan_paths
)),
]);
Ok(new_metadata.into())
}

View File

@@ -10,17 +10,19 @@ use crate::{
};
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
use sd_prisma::prisma_sync;
use sd_sync::{CRDTOperation, OperationFactory};
use sd_utils::uuid_to_bytes;
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
path::Path,
};
use futures::future::join_all;
use serde_json::json;
use thiserror::Error;
use tokio::fs;
use tracing::{error, trace};
use uuid::Uuid;
@@ -33,7 +35,7 @@ pub use shallow::*;
// we break these jobs into chunks of 100 to improve performance
const CHUNK_SIZE: usize = 100;
#[derive(Error, Debug)]
#[derive(thiserror::Error, Debug)]
pub enum FileIdentifierJobError {
#[error("received sub path not in database: <path='{}'>", .0.display())]
SubPathNotFound(Box<Path>),
@@ -227,7 +229,6 @@ async fn identifier_job_step(
.iter()
.map(|(file_path_pub_id, (meta, fp))| {
let object_pub_id = Uuid::new_v4();
let sync_id = || prisma_sync::object::SyncId {
pub_id: sd_utils::uuid_to_bytes(object_pub_id),
};
@@ -249,7 +250,7 @@ async fn identifier_job_step(
let object_creation_args = (
sync.shared_create(sync_id(), sync_params),
object::create_unchecked(sd_utils::uuid_to_bytes(object_pub_id), db_params),
object::create_unchecked(uuid_to_bytes(object_pub_id), db_params),
);
(object_creation_args, {

View File

@@ -0,0 +1,167 @@
use crate::{
job::JobRunErrors,
location::file_path_helper::{file_path_for_media_processor, IsolatedFilePathData},
prisma::{location, media_data, PrismaClient},
util::error::FileIOError,
};
use sd_file_ext::extensions::{Extension, ImageExtension, ALL_IMAGE_EXTENSIONS};
use sd_media_metadata::ImageMetadata;
use std::{collections::HashSet, path::Path};
use futures_concurrency::future::Join;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::task::spawn_blocking;
use tracing::error;
use super::media_data_image_to_query;
#[derive(Error, Debug)]
pub enum MediaDataError {
// Internal errors
#[error("database error: {0}")]
Database(#[from] prisma_client_rust::QueryError),
#[error(transparent)]
FileIO(#[from] FileIOError),
#[error(transparent)]
MediaData(#[from] sd_media_metadata::Error),
#[error("failed to join tokio task: {0}")]
TokioJoinHandle(#[from] tokio::task::JoinError),
}
#[derive(Serialize, Deserialize, Default, Debug)]
pub struct MediaDataExtractorMetadata {
pub extracted: u32,
pub skipped: u32,
}
pub(super) static FILTERED_IMAGE_EXTENSIONS: Lazy<Vec<Extension>> = Lazy::new(|| {
ALL_IMAGE_EXTENSIONS
.iter()
.cloned()
.filter(can_extract_media_data_for_image)
.map(Extension::Image)
.collect()
});
pub const fn can_extract_media_data_for_image(image_extension: &ImageExtension) -> bool {
use ImageExtension::*;
matches!(
image_extension,
Tiff | Dng | Jpeg | Jpg | Heif | Heifs | Heic | Avif | Avcs | Avci | Hif | Png | Webp
)
}
pub async fn extract_media_data(path: impl AsRef<Path>) -> Result<ImageMetadata, MediaDataError> {
let path = path.as_ref().to_path_buf();
// Running in a separated blocking thread due to MediaData blocking behavior (due to sync exif lib)
spawn_blocking(|| ImageMetadata::from_path(path))
.await?
.map_err(Into::into)
}
pub async fn process(
files_paths: impl IntoIterator<Item = &file_path_for_media_processor::Data>,
location_id: location::id::Type,
location_path: impl AsRef<Path>,
db: &PrismaClient,
) -> Result<(MediaDataExtractorMetadata, JobRunErrors), MediaDataError> {
let mut run_metadata = MediaDataExtractorMetadata::default();
let files_paths = files_paths.into_iter().collect::<Vec<_>>();
if files_paths.is_empty() {
return Ok((run_metadata, JobRunErrors::default()));
}
let location_path = location_path.as_ref();
let objects_already_with_media_data = db
.media_data()
.find_many(vec![media_data::object_id::in_vec(
files_paths
.iter()
.filter_map(|file_path| file_path.object_id)
.collect(),
)])
.select(media_data::select!({ object_id }))
.exec()
.await?
.into_iter()
.map(|media_data| media_data.object_id)
.collect::<HashSet<_>>();
run_metadata.skipped = objects_already_with_media_data.len() as u32;
let (media_datas, errors) = {
let maybe_media_data = files_paths
.into_iter()
.filter_map(|file_path| {
file_path.object_id.and_then(|object_id| {
(!objects_already_with_media_data.contains(&object_id))
.then_some((file_path, object_id))
})
})
.filter_map(|(file_path, object_id)| {
IsolatedFilePathData::try_from((location_id, file_path))
.map_err(|e| error!("{e:#?}"))
.ok()
.map(|iso_file_path| (location_path.join(iso_file_path), object_id))
})
.map(
|(path, object_id)| async move { (extract_media_data(&path).await, path, object_id) },
)
.collect::<Vec<_>>()
.join()
.await;
let total_media_data = maybe_media_data.len();
maybe_media_data.into_iter().fold(
// In the good case, all media data were extracted
(Vec::with_capacity(total_media_data), Vec::new()),
|(mut media_datas, mut errors), (maybe_media_data, path, object_id)| {
match maybe_media_data {
Ok(media_data) => media_datas.push((media_data, object_id)),
Err(MediaDataError::MediaData(sd_media_metadata::Error::NoExifDataOnPath(
_,
))) => {
// No exif data on path, skipping
run_metadata.skipped += 1;
}
Err(e) => errors.push((e, path)),
}
(media_datas, errors)
},
)
};
let created = db
.media_data()
.create_many(
media_datas
.into_iter()
.filter_map(|(media_data, object_id)| {
media_data_image_to_query(media_data, object_id)
.map_err(|e| error!("{e:#?}"))
.ok()
})
.collect(),
)
.exec()
.await?;
run_metadata.extracted = created as u32;
run_metadata.skipped += errors.len() as u32;
Ok((
run_metadata,
errors
.into_iter()
.map(|(e, path)| format!("Couldn't process file: \"{}\"; Error: {e}", path.display()))
.collect::<Vec<_>>()
.into(),
))
}

View File

@@ -0,0 +1,270 @@
use crate::{
invalidate_query,
job::{
CurrentStep, JobError, JobInitOutput, JobReportUpdate, JobResult, JobStepOutput,
StatefulJob, WorkerContext,
},
library::Library,
location::file_path_helper::{
ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location,
file_path_for_media_processor, IsolatedFilePathData,
},
object::media::media_data_extractor,
object::media::thumbnail::{self, init_thumbnail_dir},
prisma::{location, PrismaClient},
util::db::maybe_missing,
};
use std::{
collections::HashMap,
hash::Hash,
path::{Path, PathBuf},
};
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tracing::{debug, info};
use super::{
get_all_children_files_by_extensions, process, MediaProcessorEntry, MediaProcessorEntryKind,
MediaProcessorError, MediaProcessorMetadata, ThumbnailerEntryKind,
};
const BATCH_SIZE: usize = 10;
#[derive(Serialize, Deserialize, Debug)]
pub struct MediaProcessorJobInit {
pub location: location::Data,
pub sub_path: Option<PathBuf>,
}
impl Hash for MediaProcessorJobInit {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.location.id.hash(state);
if let Some(ref sub_path) = self.sub_path {
sub_path.hash(state);
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct MediaProcessorJobData {
thumbnails_base_dir: PathBuf,
location_path: PathBuf,
to_process_path: PathBuf,
}
type MediaProcessorJobStep = Vec<MediaProcessorEntry>;
#[async_trait::async_trait]
impl StatefulJob for MediaProcessorJobInit {
type Data = MediaProcessorJobData;
type Step = MediaProcessorJobStep;
type RunMetadata = MediaProcessorMetadata;
const NAME: &'static str = "media_processor";
const IS_BATCHED: bool = true;
async fn init(
&self,
ctx: &WorkerContext,
data: &mut Option<Self::Data>,
) -> Result<JobInitOutput<Self::RunMetadata, Self::Step>, JobError> {
let Library { db, .. } = ctx.library.as_ref();
let thumbnails_base_dir = init_thumbnail_dir(ctx.node.config.data_directory())
.await
.map_err(MediaProcessorError::from)?;
let location_id = self.location.id;
let location_path =
maybe_missing(&self.location.path, "location.path").map(PathBuf::from)?;
let (to_process_path, iso_file_path) = match &self.sub_path {
Some(sub_path) if sub_path != Path::new("") => {
let full_path = ensure_sub_path_is_in_location(&location_path, sub_path)
.await
.map_err(MediaProcessorError::from)?;
ensure_sub_path_is_directory(&location_path, sub_path)
.await
.map_err(MediaProcessorError::from)?;
let sub_iso_file_path =
IsolatedFilePathData::new(location_id, &location_path, &full_path, true)
.map_err(MediaProcessorError::from)?;
ensure_file_path_exists(
sub_path,
&sub_iso_file_path,
db,
MediaProcessorError::SubPathNotFound,
)
.await?;
(full_path, sub_iso_file_path)
}
_ => (
location_path.to_path_buf(),
IsolatedFilePathData::new(location_id, &location_path, &location_path, true)
.map_err(MediaProcessorError::from)?,
),
};
debug!(
"Searching for media files in location {location_id} at directory \"{iso_file_path}\""
);
let thumbnailer_files = get_files_for_thumbnailer(db, &iso_file_path).await?;
let mut media_data_files_map = get_files_for_media_data_extraction(db, &iso_file_path)
.await?
.map(|file_path| (file_path.id, file_path))
.collect::<HashMap<_, _>>();
let mut total_files_for_thumbnailer = 0;
let chunked_files = thumbnailer_files
.into_iter()
.map(|(file_path, thumb_kind)| {
total_files_for_thumbnailer += 1;
MediaProcessorEntry {
operation_kind: if media_data_files_map.remove(&file_path.id).is_some() {
MediaProcessorEntryKind::MediaDataAndThumbnailer(thumb_kind)
} else {
MediaProcessorEntryKind::Thumbnailer(thumb_kind)
},
file_path,
}
})
.collect::<Vec<_>>()
.into_iter()
.chain(
media_data_files_map
.into_values()
.map(|file_path| MediaProcessorEntry {
operation_kind: MediaProcessorEntryKind::MediaData,
file_path,
}),
)
.chunks(BATCH_SIZE)
.into_iter()
.map(|chunk| chunk.collect::<Vec<_>>())
.collect::<Vec<_>>();
ctx.progress(vec![
JobReportUpdate::TaskCount(total_files_for_thumbnailer),
JobReportUpdate::Message(format!(
"Preparing to process {total_files_for_thumbnailer} files in {} chunks",
chunked_files.len()
)),
]);
*data = Some(MediaProcessorJobData {
thumbnails_base_dir,
location_path,
to_process_path,
});
Ok(chunked_files.into())
}
async fn execute_step(
&self,
ctx: &WorkerContext,
CurrentStep { step, step_number }: CurrentStep<'_, Self::Step>,
data: &Self::Data,
_: &Self::RunMetadata,
) -> Result<JobStepOutput<Self::Step, Self::RunMetadata>, JobError> {
process(
step,
self.location.id,
&data.location_path,
&data.thumbnails_base_dir,
&ctx.library,
|completed_count| {
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(
step_number * BATCH_SIZE + completed_count,
)]);
},
)
.await
.map(Into::into)
.map_err(Into::into)
}
async fn finalize(
&self,
ctx: &WorkerContext,
data: &Option<Self::Data>,
run_metadata: &Self::RunMetadata,
) -> JobResult {
info!(
"Finished media processing for location {} at {}",
self.location.id,
data.as_ref()
.expect("critical error: missing data on job state")
.to_process_path
.display()
);
if run_metadata.thumbnailer.created > 0 || run_metadata.media_data.extracted > 0 {
invalidate_query!(ctx.library, "search.paths");
}
Ok(Some(json!({"init: ": self, "run_metadata": run_metadata})))
}
}
async fn get_files_for_thumbnailer(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
) -> Result<
impl Iterator<Item = (file_path_for_media_processor::Data, ThumbnailerEntryKind)>,
MediaProcessorError,
> {
// query database for all image files in this location that need thumbnails
let image_thumb_files = get_all_children_files_by_extensions(
db,
parent_iso_file_path,
&thumbnail::FILTERED_IMAGE_EXTENSIONS,
)
.await?
.into_iter()
.map(|file_path| (file_path, ThumbnailerEntryKind::Image));
#[cfg(feature = "ffmpeg")]
let all_files = {
// query database for all video files in this location that need thumbnails
let video_files = get_all_children_files_by_extensions(
db,
parent_iso_file_path,
&thumbnail::FILTERED_VIDEO_EXTENSIONS,
)
.await?;
image_thumb_files.chain(
video_files
.into_iter()
.map(|file_path| (file_path, ThumbnailerEntryKind::Video)),
)
};
#[cfg(not(feature = "ffmpeg"))]
let all_files = { image_thumb_files };
Ok(all_files)
}
async fn get_files_for_media_data_extraction(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
) -> Result<impl Iterator<Item = file_path_for_media_processor::Data>, MediaProcessorError> {
get_all_children_files_by_extensions(
db,
parent_iso_file_path,
&media_data_extractor::FILTERED_IMAGE_EXTENSIONS,
)
.await
.map(|file_paths| file_paths.into_iter())
.map_err(Into::into)
}

View File

@@ -0,0 +1,188 @@
use crate::{
job::{JobRunErrors, JobRunMetadata},
library::Library,
location::file_path_helper::{
file_path_for_media_processor, FilePathError, IsolatedFilePathData,
},
};
use sd_file_ext::extensions::Extension;
use sd_prisma::prisma::{file_path, location, PrismaClient};
use std::path::Path;
use futures_concurrency::future::TryJoin;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use super::{
media_data_extractor::{self, MediaDataError, MediaDataExtractorMetadata},
thumbnail::{self, ThumbnailerEntryKind, ThumbnailerError, ThumbnailerMetadata},
};
mod job;
mod shallow;
pub use job::MediaProcessorJobInit;
pub use shallow::shallow;
#[derive(Error, Debug)]
pub enum MediaProcessorError {
#[error("sub path not found: <path='{}'>", .0.display())]
SubPathNotFound(Box<Path>),
#[error("database error: {0}")]
Database(#[from] prisma_client_rust::QueryError),
#[error(transparent)]
FilePath(#[from] FilePathError),
#[error(transparent)]
Thumbnailer(#[from] ThumbnailerError),
#[error(transparent)]
MediaDataExtractor(#[from] MediaDataError),
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
pub enum MediaProcessorEntryKind {
MediaData,
Thumbnailer(ThumbnailerEntryKind),
MediaDataAndThumbnailer(ThumbnailerEntryKind),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MediaProcessorEntry {
file_path: file_path_for_media_processor::Data,
operation_kind: MediaProcessorEntryKind,
}
#[derive(Debug, Serialize, Deserialize, Default)]
pub struct MediaProcessorMetadata {
media_data: MediaDataExtractorMetadata,
thumbnailer: ThumbnailerMetadata,
}
impl JobRunMetadata for MediaProcessorMetadata {
fn update(&mut self, new_data: Self) {
self.media_data.extracted += new_data.media_data.extracted;
self.media_data.skipped += new_data.media_data.skipped;
self.thumbnailer.created += new_data.thumbnailer.created;
self.thumbnailer.skipped += new_data.thumbnailer.skipped;
}
}
async fn get_all_children_files_by_extensions(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
extensions: &[Extension],
) -> Result<Vec<file_path_for_media_processor::Data>, MediaProcessorError> {
db.file_path()
.find_many(vec![
file_path::location_id::equals(Some(parent_iso_file_path.location_id())),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::starts_with(
parent_iso_file_path
.materialized_path_for_children()
.expect("sub path iso_file_path must be a directory"),
),
])
.select(file_path_for_media_processor::select())
.exec()
.await
.map_err(Into::into)
}
async fn get_files_by_extensions(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
extensions: &[Extension],
) -> Result<Vec<file_path_for_media_processor::Data>, MediaDataError> {
db.file_path()
.find_many(vec![
file_path::location_id::equals(Some(parent_iso_file_path.location_id())),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::equals(Some(
parent_iso_file_path
.materialized_path_for_children()
.expect("sub path iso_file_path must be a directory"),
)),
])
.select(file_path_for_media_processor::select())
.exec()
.await
.map_err(Into::into)
}
async fn process(
entries: &[MediaProcessorEntry],
location_id: location::id::Type,
location_path: impl AsRef<Path>,
thumbnails_base_dir: impl AsRef<Path>,
library: &Library,
ctx_update_fn: impl Fn(usize),
) -> Result<(MediaProcessorMetadata, JobRunErrors), MediaProcessorError> {
let location_path = location_path.as_ref();
let ((media_data_metadata, mut media_data_errors), (thumbnailer_metadata, thumbnailer_errors)) =
(
async {
media_data_extractor::process(
entries.iter().filter_map(
|MediaProcessorEntry {
file_path,
operation_kind,
}| {
matches!(
operation_kind,
MediaProcessorEntryKind::MediaDataAndThumbnailer(_)
| MediaProcessorEntryKind::MediaData
)
.then_some(file_path)
},
),
location_id,
location_path,
&library.db,
)
.await
.map_err(MediaProcessorError::from)
},
async {
thumbnail::process(
entries.iter().filter_map(
|MediaProcessorEntry {
file_path,
operation_kind,
}| {
if let MediaProcessorEntryKind::Thumbnailer(thumb_kind)
| MediaProcessorEntryKind::MediaDataAndThumbnailer(thumb_kind) = operation_kind
{
Some((file_path, *thumb_kind))
} else {
None
}
},
),
location_id,
location_path,
thumbnails_base_dir,
library,
ctx_update_fn,
)
.await
.map_err(MediaProcessorError::from)
},
)
.try_join()
.await?;
media_data_errors.0.extend(thumbnailer_errors.0.into_iter());
Ok((
MediaProcessorMetadata {
media_data: media_data_metadata,
thumbnailer: thumbnailer_metadata,
},
media_data_errors,
))
}

View File

@@ -0,0 +1,196 @@
use crate::{
invalidate_query,
job::{JobError, JobRunMetadata},
library::Library,
location::file_path_helper::{
ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location,
file_path_for_media_processor, IsolatedFilePathData,
},
object::media::{
media_data_extractor,
thumbnail::{self, init_thumbnail_dir, ThumbnailerEntryKind},
},
prisma::{location, PrismaClient},
util::db::maybe_missing,
Node,
};
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use itertools::Itertools;
use tracing::{debug, error, info};
use super::{
get_files_by_extensions, process, MediaProcessorEntry, MediaProcessorEntryKind,
MediaProcessorError, MediaProcessorMetadata,
};
const BATCH_SIZE: usize = 10;
pub async fn shallow(
location: &location::Data,
sub_path: &PathBuf,
library: &Library,
node: &Node,
) -> Result<(), JobError> {
let Library { db, .. } = library;
let thumbnails_base_dir = init_thumbnail_dir(node.config.data_directory())
.await
.map_err(MediaProcessorError::from)?;
let location_id = location.id;
let location_path = maybe_missing(&location.path, "location.path").map(PathBuf::from)?;
let iso_file_path = if sub_path != Path::new("") {
let full_path = ensure_sub_path_is_in_location(&location_path, &sub_path)
.await
.map_err(MediaProcessorError::from)?;
ensure_sub_path_is_directory(&location_path, &sub_path)
.await
.map_err(MediaProcessorError::from)?;
let sub_iso_file_path =
IsolatedFilePathData::new(location_id, &location_path, &full_path, true)
.map_err(MediaProcessorError::from)?;
ensure_file_path_exists(
&sub_path,
&sub_iso_file_path,
db,
MediaProcessorError::SubPathNotFound,
)
.await?;
sub_iso_file_path
} else {
IsolatedFilePathData::new(location_id, &location_path, &location_path, true)
.map_err(MediaProcessorError::from)?
};
debug!("Searching for images in location {location_id} at path {iso_file_path}");
let thumbnailer_files = get_files_for_thumbnailer(db, &iso_file_path).await?;
let mut media_data_files_map = get_files_for_media_data_extraction(db, &iso_file_path)
.await?
.map(|file_path| (file_path.id, file_path))
.collect::<HashMap<_, _>>();
let mut total_files = 0;
let chunked_files = thumbnailer_files
.into_iter()
.map(|(file_path, thumb_kind)| MediaProcessorEntry {
operation_kind: if media_data_files_map.remove(&file_path.id).is_some() {
MediaProcessorEntryKind::MediaDataAndThumbnailer(thumb_kind)
} else {
MediaProcessorEntryKind::Thumbnailer(thumb_kind)
},
file_path,
})
.collect::<Vec<_>>()
.into_iter()
.chain(
media_data_files_map
.into_values()
.map(|file_path| MediaProcessorEntry {
operation_kind: MediaProcessorEntryKind::MediaData,
file_path,
}),
)
.chunks(BATCH_SIZE)
.into_iter()
.map(|chunk| {
let chunk = chunk.collect::<Vec<_>>();
total_files += chunk.len();
chunk
})
.collect::<Vec<_>>();
debug!(
"Preparing to process {total_files} files in {} chunks",
chunked_files.len()
);
let mut run_metadata = MediaProcessorMetadata::default();
for files in chunked_files {
let (more_run_metadata, errors) = process(
&files,
location.id,
&location_path,
&thumbnails_base_dir,
library,
|_| {},
)
.await?;
run_metadata.update(more_run_metadata);
error!("Errors processing chunk of media data shallow extraction:\n{errors}");
}
info!("Media shallow processor run metadata: {run_metadata:#?}");
if run_metadata.media_data.extracted > 0 || run_metadata.thumbnailer.created > 0 {
invalidate_query!(library, "search.paths");
}
Ok(())
}
async fn get_files_for_thumbnailer(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
) -> Result<
impl Iterator<Item = (file_path_for_media_processor::Data, ThumbnailerEntryKind)>,
MediaProcessorError,
> {
// query database for all image files in this location that need thumbnails
let image_thumb_files = get_files_by_extensions(
db,
parent_iso_file_path,
&thumbnail::FILTERED_IMAGE_EXTENSIONS,
)
.await?
.into_iter()
.map(|file_path| (file_path, ThumbnailerEntryKind::Image));
#[cfg(feature = "ffmpeg")]
let all_files = {
// query database for all video files in this location that need thumbnails
let video_files = get_files_by_extensions(
db,
parent_iso_file_path,
&thumbnail::FILTERED_VIDEO_EXTENSIONS,
)
.await?;
image_thumb_files.chain(
video_files
.into_iter()
.map(|file_path| (file_path, ThumbnailerEntryKind::Video)),
)
};
#[cfg(not(feature = "ffmpeg"))]
let all_files = { image_thumb_files };
Ok(all_files)
}
async fn get_files_for_media_data_extraction(
db: &PrismaClient,
parent_iso_file_path: &IsolatedFilePathData<'_>,
) -> Result<impl Iterator<Item = file_path_for_media_processor::Data>, MediaProcessorError> {
get_files_by_extensions(
db,
parent_iso_file_path,
&media_data_extractor::FILTERED_IMAGE_EXTENSIONS,
)
.await
.map(|file_paths| file_paths.into_iter())
.map_err(Into::into)
}

View File

@@ -0,0 +1,61 @@
pub mod media_data_extractor;
pub mod media_processor;
pub mod thumbnail;
pub use media_processor::MediaProcessorJobInit;
use sd_media_metadata::ImageMetadata;
use sd_prisma::prisma::media_data::*;
use self::media_data_extractor::MediaDataError;
pub fn media_data_image_to_query(
mdi: ImageMetadata,
object_id: object_id::Type,
) -> Result<CreateUnchecked, MediaDataError> {
Ok(CreateUnchecked {
object_id,
_params: vec![
camera_data::set(serde_json::to_vec(&mdi.camera_data).ok()),
media_date::set(serde_json::to_vec(&mdi.date_taken).ok()),
dimensions::set(serde_json::to_vec(&mdi.dimensions).ok()),
media_location::set(serde_json::to_vec(&mdi.location).ok()),
artist::set(serde_json::to_string(&mdi.artist).ok()),
description::set(serde_json::to_string(&mdi.description).ok()),
copyright::set(serde_json::to_string(&mdi.copyright).ok()),
exif_version::set(serde_json::to_string(&mdi.exif_version).ok()),
],
})
}
pub fn media_data_image_from_prisma_data(
data: sd_prisma::prisma::media_data::Data,
) -> Result<ImageMetadata, MediaDataError> {
Ok(ImageMetadata {
dimensions: from_slice_option_to_option(data.dimensions).unwrap_or_default(),
camera_data: from_slice_option_to_option(data.camera_data).unwrap_or_default(),
date_taken: from_slice_option_to_option(data.media_date).unwrap_or_default(),
description: from_string_option_to_option(data.description),
copyright: from_string_option_to_option(data.copyright),
artist: from_string_option_to_option(data.artist),
location: from_slice_option_to_option(data.media_location),
exif_version: from_string_option_to_option(data.exif_version),
})
}
#[must_use]
fn from_slice_option_to_option<T: serde::Serialize + serde::de::DeserializeOwned>(
value: Option<Vec<u8>>,
) -> Option<T> {
value
.map(|x| serde_json::from_slice(&x).ok())
.unwrap_or_default()
}
#[must_use]
fn from_string_option_to_option<T: serde::Serialize + serde::de::DeserializeOwned>(
value: Option<String>,
) -> Option<T> {
value
.map(|x| serde_json::from_str(&x).ok())
.unwrap_or_default()
}

View File

@@ -1,16 +1,16 @@
use crate::util::{error::FileIOError, version_manager::VersionManager};
use std::path::PathBuf;
use tokio::fs as async_fs;
use int_enum::IntEnum;
use tokio::fs;
use tracing::{debug, error, trace};
use crate::util::{error::FileIOError, version_manager::VersionManager};
use super::{get_shard_hex, ThumbnailerError, THUMBNAIL_CACHE_DIR_NAME};
#[derive(IntEnum, Debug, Clone, Copy, Eq, PartialEq)]
#[repr(i32)]
pub enum ThumbnailVersion {
enum ThumbnailVersion {
V1 = 1,
V2 = 2,
Unknown = 0,
@@ -27,7 +27,7 @@ pub async fn init_thumbnail_dir(data_dir: PathBuf) -> Result<PathBuf, Thumbnaile
debug!("Thumbnail directory: {:?}", thumbnail_dir);
// create all necessary directories if they don't exist
async_fs::create_dir_all(&thumbnail_dir)
fs::create_dir_all(&thumbnail_dir)
.await
.map_err(|e| FileIOError::from((&thumbnail_dir, e)))?;
@@ -63,7 +63,7 @@ pub async fn init_thumbnail_dir(data_dir: PathBuf) -> Result<PathBuf, Thumbnaile
/// This function moves all webp files in the thumbnail directory to their respective shard folders.
/// It is used to migrate from V1 to V2.
async fn move_webp_files(dir: &PathBuf) -> Result<(), ThumbnailerError> {
let mut dir_entries = async_fs::read_dir(dir)
let mut dir_entries = fs::read_dir(dir)
.await
.map_err(|source| FileIOError::from((dir, source)))?;
let mut count = 0;
@@ -81,12 +81,12 @@ async fn move_webp_files(dir: &PathBuf) -> Result<(), ThumbnailerError> {
let shard_folder = get_shard_hex(filename);
let new_dir = dir.join(shard_folder);
async_fs::create_dir_all(&new_dir)
fs::create_dir_all(&new_dir)
.await
.map_err(|source| FileIOError::from((new_dir.clone(), source)))?;
let new_path = new_dir.join(filename);
async_fs::rename(&path, &new_path)
fs::rename(&path, &new_path)
.await
.map_err(|source| FileIOError::from((path.clone(), source)))?;
count += 1;

View File

@@ -0,0 +1,382 @@
use crate::{
api::CoreEvent,
job::JobRunErrors,
library::Library,
location::file_path_helper::{file_path_for_media_processor, IsolatedFilePathData},
prisma::location,
util::{error::FileIOError, version_manager::VersionManagerError},
Node,
};
use sd_file_ext::extensions::{Extension, ImageExtension, ALL_IMAGE_EXTENSIONS};
use sd_media_metadata::image::Orientation;
#[cfg(feature = "ffmpeg")]
use sd_file_ext::extensions::{VideoExtension, ALL_VIDEO_EXTENSIONS};
use std::{
collections::HashMap,
error::Error,
ops::Deref,
path::{Path, PathBuf},
};
use futures_concurrency::future::{Join, TryJoin};
use image::{self, imageops, DynamicImage, GenericImageView};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::{fs, io, task::block_in_place};
use tracing::{error, trace, warn};
use webp::Encoder;
mod directory;
mod shard;
pub use directory::init_thumbnail_dir;
pub use shard::get_shard_hex;
const THUMBNAIL_SIZE_FACTOR: f32 = 0.2;
const THUMBNAIL_QUALITY: f32 = 30.0;
pub const THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
/// This does not check if a thumbnail exists, it just returns the path that it would exist at
pub fn get_thumbnail_path(node: &Node, cas_id: &str) -> PathBuf {
let mut thumb_path = node.config.data_directory();
thumb_path.push(THUMBNAIL_CACHE_DIR_NAME);
thumb_path.push(get_shard_hex(cas_id));
thumb_path.push(cas_id);
thumb_path.set_extension("webp");
thumb_path
}
// this is used to pass the relevant data to the frontend so it can request the thumbnail
// it supports extending the shard hex to support deeper directory structures in the future
pub fn get_thumb_key(cas_id: &str) -> Vec<String> {
vec![get_shard_hex(cas_id), cas_id.to_string()]
}
#[cfg(feature = "ffmpeg")]
pub(super) static FILTERED_VIDEO_EXTENSIONS: Lazy<Vec<Extension>> = Lazy::new(|| {
ALL_VIDEO_EXTENSIONS
.iter()
.cloned()
.filter(can_generate_thumbnail_for_video)
.map(Extension::Video)
.collect()
});
pub(super) static FILTERED_IMAGE_EXTENSIONS: Lazy<Vec<Extension>> = Lazy::new(|| {
ALL_IMAGE_EXTENSIONS
.iter()
.cloned()
.filter(can_generate_thumbnail_for_image)
.map(Extension::Image)
.collect()
});
#[derive(Error, Debug)]
pub enum ThumbnailerError {
// Internal errors
#[error("database error: {0}")]
Database(#[from] prisma_client_rust::QueryError),
#[error(transparent)]
FileIO(#[from] FileIOError),
#[error(transparent)]
VersionManager(#[from] VersionManagerError),
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub enum ThumbnailerEntryKind {
Image,
#[cfg(feature = "ffmpeg")]
Video,
}
#[derive(Serialize, Deserialize, Default, Debug)]
pub struct ThumbnailerMetadata {
pub created: u32,
pub skipped: u32,
}
// TOOD(brxken128): validate avci and avcs
#[cfg(all(feature = "heif", not(target_os = "linux")))]
const HEIF_EXTENSIONS: [&str; 7] = ["heif", "heifs", "heic", "heics", "avif", "avci", "avcs"];
pub async fn generate_image_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
// Webp creation has blocking code
let webp = block_in_place(|| -> Result<Vec<u8>, Box<dyn Error>> {
#[cfg(all(feature = "heif", not(target_os = "linux")))]
let img = {
let ext = file_path
.as_ref()
.extension()
.unwrap_or_default()
.to_ascii_lowercase();
if HEIF_EXTENSIONS
.iter()
.any(|e| ext == std::ffi::OsStr::new(e))
{
sd_heif::heif_to_dynamic_image(file_path.as_ref())?
} else {
image::open(file_path.as_ref())?
}
};
#[cfg(not(all(feature = "heif", not(target_os = "linux"))))]
let img = image::open(file_path.as_ref())?;
let orientation = Orientation::source_orientation(&file_path);
let (w, h) = img.dimensions();
// Optionally, resize the existing photo and convert back into DynamicImage
let mut img = DynamicImage::ImageRgba8(imageops::resize(
&img,
// FIXME : Think of a better heuristic to get the thumbnail size
(w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
(h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
imageops::FilterType::Triangle,
));
// this corrects the rotation/flip of the image based on the available exif data
if let Some(x) = orientation {
img = x.correct_thumbnail(img);
}
// Create the WebP encoder for the above image
let encoder = Encoder::from_image(&img)?;
// Encode the image at a specified quality 0-100
// Type WebPMemory is !Send, which makes the Future in this function !Send,
// this make us `deref` to have a `&[u8]` and then `to_owned` to make a Vec<u8>
// which implies on a unwanted clone...
Ok(encoder.encode(THUMBNAIL_QUALITY).deref().to_owned())
})?;
let output_path = output_path.as_ref();
if let Some(shard_dir) = output_path.parent() {
fs::create_dir_all(shard_dir)
.await
.map_err(|e| FileIOError::from((shard_dir, e)))?;
} else {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Cannot determine parent shard directory for thumbnail",
)
.into());
}
fs::write(output_path, &webp)
.await
.map_err(|e| FileIOError::from((output_path, e)))
.map_err(Into::into)
}
#[cfg(feature = "ffmpeg")]
pub async fn generate_video_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
use sd_ffmpeg::to_thumbnail;
to_thumbnail(file_path, output_path, 256, THUMBNAIL_QUALITY).await?;
Ok(())
}
#[cfg(feature = "ffmpeg")]
pub const fn can_generate_thumbnail_for_video(video_extension: &VideoExtension) -> bool {
use VideoExtension::*;
// File extensions that are specifically not supported by the thumbnailer
!matches!(video_extension, Mpg | Swf | M2v | Hevc | M2ts | Mts | Ts)
}
pub const fn can_generate_thumbnail_for_image(image_extension: &ImageExtension) -> bool {
use ImageExtension::*;
#[cfg(all(feature = "heif", not(target_os = "linux")))]
let res = matches!(
image_extension,
Jpg | Jpeg | Png | Webp | Gif | Heic | Heics | Heif | Heifs | Avif
);
#[cfg(not(all(feature = "heif", not(target_os = "linux"))))]
let res = matches!(image_extension, Jpg | Jpeg | Png | Webp | Gif);
res
}
pub(super) async fn process(
entries: impl IntoIterator<Item = (&file_path_for_media_processor::Data, ThumbnailerEntryKind)>,
location_id: location::id::Type,
location_path: impl AsRef<Path>,
thumbnails_base_dir: impl AsRef<Path>,
library: &Library,
ctx_update_fn: impl Fn(usize),
) -> Result<(ThumbnailerMetadata, JobRunErrors), ThumbnailerError> {
let mut run_metadata = ThumbnailerMetadata::default();
let location_path = location_path.as_ref();
let thumbnails_base_dir = thumbnails_base_dir.as_ref();
let mut errors = vec![];
let mut to_create_dirs = HashMap::new();
struct WorkTable<'a> {
kind: ThumbnailerEntryKind,
input_path: PathBuf,
cas_id: &'a str,
output_path: PathBuf,
metadata_res: io::Result<()>,
}
let entries = entries
.into_iter()
.filter_map(|(file_path, kind)| {
IsolatedFilePathData::try_from((location_id, file_path))
.map(|iso_file_path| (file_path, kind, location_path.join(iso_file_path)))
.map_err(|e| {
errors.push(format!(
"Failed to build path for file with id {}: {e}",
file_path.id
))
})
.ok()
})
.filter_map(|(file_path, kind, path)| {
if let Some(cas_id) = &file_path.cas_id {
Some((kind, path, cas_id))
} else {
warn!(
"Skipping thumbnail generation for {} due to missing cas_id",
path.display()
);
run_metadata.skipped += 1;
None
}
})
.map(|(kind, input_path, cas_id)| {
let thumbnails_shard_dir = thumbnails_base_dir.join(get_shard_hex(cas_id));
let output_path = thumbnails_shard_dir.join(format!("{cas_id}.webp"));
// Putting all sharding directories in a map to avoid trying to create repeteaded ones
to_create_dirs
.entry(thumbnails_shard_dir.clone())
.or_insert_with(|| async move {
fs::create_dir_all(&thumbnails_shard_dir)
.await
.map_err(|e| FileIOError::from((thumbnails_shard_dir, e)))
});
async move {
WorkTable {
kind,
input_path,
cas_id,
// Discarding the ok part as we don't actually care about metadata here, maybe avoiding extra space
metadata_res: fs::metadata(&output_path).await.map(|_| ()),
output_path,
}
}
})
.collect::<Vec<_>>();
if entries.is_empty() {
return Ok((run_metadata, errors.into()));
}
// Resolving these futures first, as we want to fail early if we can't create the directories
to_create_dirs
.into_values()
.collect::<Vec<_>>()
.try_join()
.await?;
// Running thumbs generation sequentially to don't overload the system, if we're wasting too much time on I/O we can
// try to run them in parallel
for (
idx,
WorkTable {
kind,
input_path,
cas_id,
output_path,
metadata_res,
},
) in entries.join().await.into_iter().enumerate()
{
ctx_update_fn(idx + 1);
match metadata_res {
Ok(_) => {
trace!(
"Thumb already exists, skipping generation for {}",
output_path.display()
);
run_metadata.skipped += 1;
continue;
}
Err(e) if e.kind() == io::ErrorKind::NotFound => {
trace!(
"Writing {} to {}",
input_path.display(),
output_path.display()
);
match kind {
ThumbnailerEntryKind::Image => {
if let Err(e) = generate_image_thumbnail(&input_path, &output_path).await {
error!(
"Error generating thumb for image \"{}\": {e:#?}",
input_path.display()
);
errors.push(format!(
"Had an error generating thumbnail for \"{}\"",
input_path.display()
));
continue;
}
}
#[cfg(feature = "ffmpeg")]
ThumbnailerEntryKind::Video => {
if let Err(e) = generate_video_thumbnail(&input_path, &output_path).await {
error!(
"Error generating thumb for video \"{}\": {e:#?}",
input_path.display()
);
errors.push(format!(
"Had an error generating thumbnail for \"{}\"",
input_path.display()
));
continue;
}
}
}
trace!("Emitting new thumbnail event");
library.emit(CoreEvent::NewThumbnail {
thumb_key: get_thumb_key(cas_id),
});
run_metadata.created += 1;
}
Err(e) => {
error!(
"Error getting metadata for thumb: {:#?}",
FileIOError::from((output_path, e))
);
errors.push(format!(
"Had an error generating thumbnail for \"{}\"",
input_path.display()
));
}
}
}
Ok((run_metadata, errors.into()))
}

View File

@@ -6,8 +6,8 @@ use specta::Type;
pub mod cas;
pub mod file_identifier;
pub mod fs;
pub mod media;
pub mod orphan_remover;
pub mod preview;
pub mod tag;
pub mod thumbnail_remover;
pub mod validation;
@@ -19,7 +19,7 @@ pub mod validation;
// Object selectables!
object::select!(object_for_file_identifier {
pub_id
file_paths: select { pub_id cas_id }
file_paths: select { pub_id cas_id extension is_dir materialized_path name }
});
// The response to provide the Explorer when looking at Objects

View File

@@ -1,140 +0,0 @@
// #[cfg(feature = "ffmpeg")]
// use std::{ffi::OsStr, path::PathBuf};
//
// #[cfg(feature = "ffmpeg")]
// use ffmpeg_next::{codec::context::Context, format, media::Type};
//
// #[derive(Default, Debug)]
// pub struct MediaItem {
// pub created_at: Option<String>,
// pub brand: Option<String>,
// pub model: Option<String>,
// pub duration_seconds: i32,
// pub best_video_stream_index: usize,
// pub best_audio_stream_index: usize,
// pub best_subtitle_stream_index: usize,
// pub steams: Vec<Stream>,
// }
//
// #[derive(Debug)]
// pub struct Stream {
// pub codec: String,
// pub frames: f64,
// pub duration_seconds: f64,
// #[cfg(feature = "ffmpeg")]
// pub kind: Option<StreamKind>,
// }
//
// #[cfg(feature = "ffmpeg")]
// #[derive(Debug, PartialEq, Eq)]
// pub enum StreamKind {
// Video(VideoStream),
// Audio(AudioStream),
// }
//
// #[derive(Debug, PartialEq, Eq)]
// pub struct VideoStream {
// pub width: u32,
// pub height: u32,
// pub aspect_ratio: String,
// #[cfg(feature = "ffmpeg")]
// pub format: format::Pixel,
// pub bitrate: usize,
// }
//
// #[derive(Debug, PartialEq, Eq)]
// pub struct AudioStream {
// pub channels: u16,
// #[cfg(feature = "ffmpeg")]
// pub format: format::Sample,
// pub bitrate: usize,
// pub rate: u32,
// }
//
// #[cfg(feature = "ffmpeg")]
// fn extract(iter: &mut ffmpeg_next::dictionary::Iter, key: &str) -> Option<String> {
// iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
// }
// #[cfg(feature = "ffmpeg")]
// pub fn extract_media_data(path: &PathBuf) -> Result<MediaItem, ffmpeg_next::Error> {
// use chrono::NaiveDateTime;
//
// ffmpeg_next::init().unwrap();
//
// let mut name = path
// .file_name()
// .and_then(OsStr::to_str)
// .map(ToString::to_string)
// .unwrap_or_default();
//
// // strip to exact potential date length and attempt to parse
// name = name.chars().take(19).collect();
// // specifically OBS uses this format for time, other checks could be added
// let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
//
// let context = format::input(&path)?;
//
// let mut media_item = MediaItem::default();
// let metadata = context.metadata();
// let mut iter = metadata.iter();
//
// // creation_time is usually the creation date of the file
// media_item.created_at = extract(&mut iter, "creation_time");
// // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
// media_item.created_at = extract(&mut iter, "creationdate");
// // fallback to potential time if exists
// if media_item.created_at.is_none() {
// media_item.created_at = potential_date.map(|d| d.to_string()).ok();
// }
// // origin metadata
// media_item.brand = extract(&mut iter, "major_brand");
// media_item.brand = extract(&mut iter, "make");
// media_item.model = extract(&mut iter, "model");
//
// if let Some(stream) = context.streams().best(Type::Video) {
// media_item.best_video_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(Type::Audio) {
// media_item.best_audio_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(Type::Subtitle) {
// media_item.best_subtitle_stream_index = stream.index();
// }
// media_item.duration_seconds = context.duration() as i32 / ffmpeg_next::ffi::AV_TIME_BASE;
//
// for stream in context.streams() {
// let codec = Context::from_parameters(stream.parameters())?;
//
// let mut stream_item = Stream {
// codec: codec.id().name().to_string(),
// frames: stream.frames() as f64,
// duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
// kind: None,
// };
//
// if codec.medium() == Type::Video {
// if let Ok(video) = codec.decoder().video() {
// stream_item.kind = Some(StreamKind::Video(VideoStream {
// bitrate: video.bit_rate(),
// format: video.format(),
// width: video.width(),
// height: video.height(),
// aspect_ratio: video.aspect_ratio().to_string(),
// }));
// }
// } else if codec.medium() == Type::Audio {
// if let Ok(audio) = codec.decoder().audio() {
// stream_item.kind = Some(StreamKind::Audio(AudioStream {
// channels: audio.channels(),
// bitrate: audio.bit_rate(),
// rate: audio.rate(),
// format: audio.format(),
// }));
// }
// }
// media_item.steams.push(stream_item);
// }
//
// Ok(media_item)
// }

View File

View File

@@ -1,5 +0,0 @@
mod media_data;
mod thumbnail;
pub use media_data::*;
pub use thumbnail::*;

View File

@@ -1,269 +0,0 @@
use crate::{
api::CoreEvent,
job::JobError,
library::Library,
location::file_path_helper::{file_path_for_thumbnailer, FilePathError, IsolatedFilePathData},
prisma::location,
util::{db::maybe_missing, error::FileIOError, version_manager::VersionManagerError},
Node,
};
use std::{
error::Error,
ops::Deref,
path::{Path, PathBuf},
};
use sd_file_ext::extensions::{Extension, ImageExtension};
#[cfg(feature = "ffmpeg")]
use sd_file_ext::extensions::VideoExtension;
use image::{self, imageops, DynamicImage, GenericImageView};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::{fs, io, task::block_in_place};
use tracing::{error, trace, warn};
use webp::Encoder;
mod directory;
mod shallow;
mod shard;
pub mod thumbnailer_job;
pub use directory::*;
pub use shallow::*;
pub use shard::*;
const THUMBNAIL_SIZE_FACTOR: f32 = 0.2;
const THUMBNAIL_QUALITY: f32 = 30.0;
pub const THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
/// This does not check if a thumbnail exists, it just returns the path that it would exist at
pub fn get_thumbnail_path(node: &Node, cas_id: &str) -> PathBuf {
let mut thumb_path = node.config.data_directory();
thumb_path.push(THUMBNAIL_CACHE_DIR_NAME);
thumb_path.push(get_shard_hex(cas_id));
thumb_path.push(cas_id);
thumb_path.set_extension("webp");
thumb_path
}
// this is used to pass the relevant data to the frontend so it can request the thumbnail
// it supports extending the shard hex to support deeper directory structures in the future
pub fn get_thumb_key(cas_id: &str) -> Vec<String> {
vec![get_shard_hex(cas_id), cas_id.to_string()]
}
#[cfg(feature = "ffmpeg")]
static FILTERED_VIDEO_EXTENSIONS: Lazy<Vec<Extension>> = Lazy::new(|| {
sd_file_ext::extensions::ALL_VIDEO_EXTENSIONS
.iter()
.map(Clone::clone)
.filter(can_generate_thumbnail_for_video)
.map(Extension::Video)
.collect()
});
static FILTERED_IMAGE_EXTENSIONS: Lazy<Vec<Extension>> = Lazy::new(|| {
sd_file_ext::extensions::ALL_IMAGE_EXTENSIONS
.iter()
.map(Clone::clone)
.filter(can_generate_thumbnail_for_image)
.map(Extension::Image)
.collect()
});
#[derive(Error, Debug)]
pub enum ThumbnailerError {
#[error("sub path not found: <path='{}'>", .0.display())]
SubPathNotFound(Box<Path>),
// Internal errors
#[error("database error: {0}")]
Database(#[from] prisma_client_rust::QueryError),
#[error(transparent)]
FilePath(#[from] FilePathError),
#[error(transparent)]
FileIO(#[from] FileIOError),
#[error(transparent)]
VersionManager(#[from] VersionManagerError),
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
enum ThumbnailerJobStepKind {
Image,
#[cfg(feature = "ffmpeg")]
Video,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ThumbnailerJobStep {
file_path: file_path_for_thumbnailer::Data,
kind: ThumbnailerJobStepKind,
}
// TOOD(brxken128): validate avci and avcs
#[cfg(all(feature = "heif", not(target_os = "linux")))]
const HEIF_EXTENSIONS: [&str; 7] = ["heif", "heifs", "heic", "heics", "avif", "avci", "avcs"];
pub async fn generate_image_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
// Webp creation has blocking code
let webp = block_in_place(|| -> Result<Vec<u8>, Box<dyn Error>> {
#[cfg(all(feature = "heif", not(target_os = "linux")))]
let img = {
let ext = file_path
.as_ref()
.extension()
.unwrap_or_default()
.to_ascii_lowercase();
if HEIF_EXTENSIONS
.iter()
.any(|e| ext == std::ffi::OsStr::new(e))
{
sd_heif::heif_to_dynamic_image(file_path.as_ref())?
} else {
image::open(file_path)?
}
};
#[cfg(not(all(feature = "heif", not(target_os = "linux"))))]
let img = image::open(file_path)?;
let (w, h) = img.dimensions();
// Optionally, resize the existing photo and convert back into DynamicImage
let img = DynamicImage::ImageRgba8(imageops::resize(
&img,
// FIXME : Think of a better heuristic to get the thumbnail size
(w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
(h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
imageops::FilterType::Triangle,
));
// Create the WebP encoder for the above image
let encoder = Encoder::from_image(&img)?;
// Encode the image at a specified quality 0-100
// Type WebPMemory is !Send, which makes the Future in this function !Send,
// this make us `deref` to have a `&[u8]` and then `to_owned` to make a Vec<u8>
// which implies on a unwanted clone...
Ok(encoder.encode(THUMBNAIL_QUALITY).deref().to_owned())
})?;
fs::create_dir_all(output_path.as_ref().parent().ok_or(io::Error::new(
io::ErrorKind::InvalidInput,
"Cannot determine parent directory",
))?)
.await?;
fs::write(output_path, &webp).await.map_err(Into::into)
}
#[cfg(feature = "ffmpeg")]
pub async fn generate_video_thumbnail<P: AsRef<Path>>(
file_path: P,
output_path: P,
) -> Result<(), Box<dyn Error>> {
use sd_ffmpeg::to_thumbnail;
to_thumbnail(file_path, output_path, 256, THUMBNAIL_QUALITY).await?;
Ok(())
}
#[cfg(feature = "ffmpeg")]
pub const fn can_generate_thumbnail_for_video(video_extension: &VideoExtension) -> bool {
use VideoExtension::*;
// File extensions that are specifically not supported by the thumbnailer
!matches!(video_extension, Mpg | Swf | M2v | Hevc | M2ts | Mts | Ts)
}
pub const fn can_generate_thumbnail_for_image(image_extension: &ImageExtension) -> bool {
use ImageExtension::*;
#[cfg(all(feature = "heif", not(target_os = "linux")))]
let res = matches!(
image_extension,
Jpg | Jpeg | Png | Webp | Gif | Heic | Heics | Heif | Heifs | Avif
);
#[cfg(not(all(feature = "heif", not(target_os = "linux"))))]
let res = matches!(image_extension, Jpg | Jpeg | Png | Webp | Gif);
res
}
pub async fn inner_process_step(
step: &ThumbnailerJobStep,
location_path: impl AsRef<Path>,
thumbnail_dir: impl AsRef<Path>,
location: &location::Data,
library: &Library,
) -> Result<bool, JobError> {
let ThumbnailerJobStep { file_path, kind } = step;
let location_path = location_path.as_ref();
let thumbnail_dir = thumbnail_dir.as_ref();
// assemble the file path
let path = location_path.join(IsolatedFilePathData::try_from((location.id, file_path))?);
trace!("image_file {:?}", file_path);
// get cas_id, if none found skip
let Some(cas_id) = &file_path.cas_id else {
warn!(
"skipping thumbnail generation for {}",
maybe_missing(&file_path.materialized_path, "file_path.materialized_path")?
);
return Ok(false);
};
let thumb_dir = thumbnail_dir.join(get_shard_hex(cas_id));
// Create the directory if it doesn't exist
if let Err(e) = fs::create_dir_all(&thumb_dir).await {
error!("Error creating thumbnail directory {:#?}", e);
}
// Define and write the WebP-encoded file to a given path
let output_path = thumb_dir.join(format!("{cas_id}.webp"));
match fs::metadata(&output_path).await {
Ok(_) => {
trace!(
"Thumb already exists, skipping generation for {}",
output_path.display()
);
return Ok(false);
}
Err(e) if e.kind() == io::ErrorKind::NotFound => {
trace!("Writing {:?} to {:?}", path, output_path);
match kind {
ThumbnailerJobStepKind::Image => {
if let Err(e) = generate_image_thumbnail(&path, &output_path).await {
error!("Error generating thumb for image {:#?}", e);
}
}
#[cfg(feature = "ffmpeg")]
ThumbnailerJobStepKind::Video => {
if let Err(e) = generate_video_thumbnail(&path, &output_path).await {
error!("Error generating thumb for video: {:?} {:#?}", &path, e);
}
}
}
trace!("Emitting new thumbnail event");
library.emit(CoreEvent::NewThumbnail {
thumb_key: get_thumb_key(cas_id),
});
}
Err(e) => return Err(ThumbnailerError::from(FileIOError::from((output_path, e))).into()),
}
Ok(true)
}

View File

@@ -1,152 +0,0 @@
use super::{
ThumbnailerError, ThumbnailerJobStep, ThumbnailerJobStepKind, FILTERED_IMAGE_EXTENSIONS,
};
use crate::{
invalidate_query,
job::JobError,
library::Library,
location::file_path_helper::{
ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location,
file_path_for_thumbnailer, IsolatedFilePathData,
},
object::preview::thumbnail,
prisma::{file_path, location, PrismaClient},
util::error::FileIOError,
Node,
};
use sd_file_ext::extensions::Extension;
use std::path::{Path, PathBuf};
use thumbnail::init_thumbnail_dir;
use tokio::fs;
use tracing::{debug, trace};
#[cfg(feature = "ffmpeg")]
use super::FILTERED_VIDEO_EXTENSIONS;
pub async fn shallow_thumbnailer(
location: &location::Data,
sub_path: &PathBuf,
library: &Library,
node: &Node,
) -> Result<(), JobError> {
let Library { db, .. } = library;
let thumbnail_dir = init_thumbnail_dir(node.config.data_directory()).await?;
let location_id = location.id;
let location_path = match &location.path {
Some(v) => PathBuf::from(v),
None => return Ok(()),
};
let (path, iso_file_path) = if sub_path != Path::new("") {
let full_path = ensure_sub_path_is_in_location(&location_path, &sub_path)
.await
.map_err(ThumbnailerError::from)?;
ensure_sub_path_is_directory(&location_path, &sub_path)
.await
.map_err(ThumbnailerError::from)?;
let sub_iso_file_path =
IsolatedFilePathData::new(location_id, &location_path, &full_path, true)
.map_err(ThumbnailerError::from)?;
ensure_file_path_exists(
&sub_path,
&sub_iso_file_path,
db,
ThumbnailerError::SubPathNotFound,
)
.await?;
(full_path, sub_iso_file_path)
} else {
(
location_path.to_path_buf(),
IsolatedFilePathData::new(location_id, &location_path, &location_path, true)
.map_err(ThumbnailerError::from)?,
)
};
debug!(
"Searching for images in location {location_id} at path {}",
path.display()
);
// create all necessary directories if they don't exist
fs::create_dir_all(&thumbnail_dir)
.await
.map_err(|e| FileIOError::from((&thumbnail_dir, e)))?;
// query database for all image files in this location that need thumbnails
let image_files = get_files_by_extensions(
&library.db,
location_id,
&iso_file_path,
&FILTERED_IMAGE_EXTENSIONS,
ThumbnailerJobStepKind::Image,
)
.await?;
trace!("Found {:?} image files", image_files.len());
#[cfg(feature = "ffmpeg")]
let video_files = {
// query database for all video files in this location that need thumbnails
let video_files = get_files_by_extensions(
&library.db,
location_id,
&iso_file_path,
&FILTERED_VIDEO_EXTENSIONS,
ThumbnailerJobStepKind::Video,
)
.await?;
trace!("Found {:?} video files", video_files.len());
video_files
};
let all_files = [
image_files,
#[cfg(feature = "ffmpeg")]
video_files,
]
.into_iter()
.flatten();
for file in all_files {
thumbnail::inner_process_step(&file, &location_path, &thumbnail_dir, location, library)
.await?;
}
invalidate_query!(library, "search.paths");
Ok(())
}
async fn get_files_by_extensions(
db: &PrismaClient,
location_id: location::id::Type,
parent_isolated_file_path_data: &IsolatedFilePathData<'_>,
extensions: &[Extension],
kind: ThumbnailerJobStepKind,
) -> Result<Vec<ThumbnailerJobStep>, JobError> {
Ok(db
.file_path()
.find_many(vec![
file_path::location_id::equals(Some(location_id)),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::equals(Some(
parent_isolated_file_path_data
.materialized_path_for_children()
.expect("sub path iso_file_path must be a directory"),
)),
])
.select(file_path_for_thumbnailer::select())
.exec()
.await?
.into_iter()
.map(|file_path| ThumbnailerJobStep { file_path, kind })
.collect())
}

View File

@@ -1,259 +0,0 @@
use crate::{
invalidate_query,
job::{
CurrentStep, JobError, JobInitOutput, JobResult, JobRunMetadata, JobStepOutput,
StatefulJob, WorkerContext,
},
library::Library,
location::file_path_helper::{
ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location,
file_path_for_thumbnailer, IsolatedFilePathData,
},
object::preview::thumbnail::directory::init_thumbnail_dir,
prisma::{file_path, location, PrismaClient},
util::db::maybe_missing,
};
use std::{
hash::Hash,
path::{Path, PathBuf},
};
use sd_file_ext::extensions::Extension;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tracing::{debug, info, trace};
use super::{
inner_process_step, ThumbnailerError, ThumbnailerJobStep, ThumbnailerJobStepKind,
FILTERED_IMAGE_EXTENSIONS,
};
#[cfg(feature = "ffmpeg")]
use super::FILTERED_VIDEO_EXTENSIONS;
#[derive(Serialize, Deserialize, Debug)]
pub struct ThumbnailerJobInit {
pub location: location::Data,
pub sub_path: Option<PathBuf>,
}
impl Hash for ThumbnailerJobInit {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.location.id.hash(state);
if let Some(ref sub_path) = self.sub_path {
sub_path.hash(state);
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ThumbnailerJobData {
thumbnail_dir: PathBuf,
location_path: PathBuf,
path: PathBuf,
}
#[derive(Serialize, Deserialize, Default, Debug)]
pub struct ThumbnailerJobRunMetadata {
thumbnails_created: u32,
thumbnails_skipped: u32,
}
impl JobRunMetadata for ThumbnailerJobRunMetadata {
fn update(&mut self, new_data: Self) {
self.thumbnails_created += new_data.thumbnails_created;
self.thumbnails_skipped += new_data.thumbnails_skipped;
}
}
#[async_trait::async_trait]
impl StatefulJob for ThumbnailerJobInit {
type Data = ThumbnailerJobData;
type Step = ThumbnailerJobStep;
type RunMetadata = ThumbnailerJobRunMetadata;
const NAME: &'static str = "thumbnailer";
async fn init(
&self,
ctx: &WorkerContext,
data: &mut Option<Self::Data>,
) -> Result<JobInitOutput<Self::RunMetadata, Self::Step>, JobError> {
let init = self;
let Library { db, .. } = &*ctx.library;
let thumbnail_dir = init_thumbnail_dir(ctx.node.config.data_directory()).await?;
let location_id = init.location.id;
let location_path =
maybe_missing(&init.location.path, "location.path").map(PathBuf::from)?;
let (path, iso_file_path) = match &init.sub_path {
Some(sub_path) if sub_path != Path::new("") => {
let full_path = ensure_sub_path_is_in_location(&location_path, sub_path)
.await
.map_err(ThumbnailerError::from)?;
ensure_sub_path_is_directory(&location_path, sub_path)
.await
.map_err(ThumbnailerError::from)?;
let sub_iso_file_path =
IsolatedFilePathData::new(location_id, &location_path, &full_path, true)
.map_err(ThumbnailerError::from)?;
ensure_file_path_exists(
sub_path,
&sub_iso_file_path,
db,
ThumbnailerError::SubPathNotFound,
)
.await?;
(full_path, sub_iso_file_path)
}
_ => (
location_path.to_path_buf(),
IsolatedFilePathData::new(location_id, &location_path, &location_path, true)
.map_err(ThumbnailerError::from)?,
),
};
debug!("Searching for images in location {location_id} at directory {iso_file_path}");
// query database for all image files in this location that need thumbnails
let image_files = get_files_by_extensions(
db,
&iso_file_path,
&FILTERED_IMAGE_EXTENSIONS,
ThumbnailerJobStepKind::Image,
)
.await?;
trace!("Found {:?} image files", image_files.len());
#[cfg(feature = "ffmpeg")]
let all_files = {
// query database for all video files in this location that need thumbnails
let video_files = get_files_by_extensions(
db,
&iso_file_path,
&FILTERED_VIDEO_EXTENSIONS,
ThumbnailerJobStepKind::Video,
)
.await?;
trace!("Found {:?} video files", video_files.len());
image_files
.into_iter()
.chain(video_files)
.collect::<Vec<_>>()
};
#[cfg(not(feature = "ffmpeg"))]
let all_files = { image_files.into_iter().collect::<Vec<_>>() };
ctx.progress_msg(format!("Preparing to process {} files", all_files.len()));
*data = Some(ThumbnailerJobData {
thumbnail_dir,
location_path,
path,
});
Ok((
ThumbnailerJobRunMetadata {
thumbnails_created: 0,
thumbnails_skipped: 0,
},
all_files,
)
.into())
}
async fn execute_step(
&self,
ctx: &WorkerContext,
CurrentStep { step, .. }: CurrentStep<'_, Self::Step>,
data: &Self::Data,
_: &Self::RunMetadata,
) -> Result<JobStepOutput<Self::Step, Self::RunMetadata>, JobError> {
let init = self;
ctx.progress_msg(format!(
"Processing {}",
maybe_missing(
&step.file_path.materialized_path,
"file_path.materialized_path"
)?
));
let mut new_metadata = Self::RunMetadata::default();
let step_result = inner_process_step(
step,
&data.location_path,
&data.thumbnail_dir,
&init.location,
&ctx.library,
)
.await;
step_result.map(|thumbnail_was_created| {
if thumbnail_was_created {
new_metadata.thumbnails_created += 1;
} else {
new_metadata.thumbnails_skipped += 1;
}
})?;
Ok(new_metadata.into())
}
async fn finalize(
&self,
ctx: &WorkerContext,
data: &Option<Self::Data>,
run_metadata: &Self::RunMetadata,
) -> JobResult {
let init = self;
info!(
"Finished thumbnail generation for location {} at {}",
init.location.id,
data.as_ref()
.expect("critical error: missing data on job state")
.path
.display()
);
if run_metadata.thumbnails_created > 0 {
invalidate_query!(ctx.library, "search.paths");
}
Ok(Some(json!({"init: ": init, "run_metadata": run_metadata})))
}
}
async fn get_files_by_extensions(
db: &PrismaClient,
iso_file_path: &IsolatedFilePathData<'_>,
extensions: &[Extension],
kind: ThumbnailerJobStepKind,
) -> Result<Vec<ThumbnailerJobStep>, JobError> {
Ok(db
.file_path()
.find_many(vec![
file_path::location_id::equals(Some(iso_file_path.location_id())),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::starts_with(
iso_file_path
.materialized_path_for_children()
.expect("sub path iso_file_path must be a directory"),
),
])
.select(file_path_for_thumbnailer::select())
.exec()
.await?
.into_iter()
.map(|file_path| ThumbnailerJobStep { file_path, kind })
.collect())
}

View File

@@ -26,7 +26,7 @@ use tokio_util::sync::{CancellationToken, DropGuard};
use tracing::{debug, error, trace};
use uuid::Uuid;
use super::preview::THUMBNAIL_CACHE_DIR_NAME;
use super::media::thumbnail::THUMBNAIL_CACHE_DIR_NAME;
const THIRTY_SECS: Duration = Duration::from_secs(30);
const HALF_HOUR: Duration = Duration::from_secs(30 * 60);

View File

@@ -80,6 +80,7 @@ extension_category_enum! {
Heics = [0x00, 0x00, 0x00, 0x18, 0x66, 0x74, 0x79, 0x70, 0x68, 0x65, 0x69, 0x63],
Heif = [],
Heifs = [],
Hif = [],
Avif = [],
Avci = [],
Avcs = [],

View File

@@ -21,8 +21,8 @@ pub enum HeifError {
LibHeif(#[from] libheif_rs::HeifError),
#[error("error while loading the image (via the `image` crate): {0}")]
Image(#[from] image::ImageError),
#[error("io error: {0}")]
Io(#[from] std::io::Error),
#[error("io error: {0} at {}", .1.display())]
Io(std::io::Error, Box<Path>),
#[error("there was an error while converting the image to an `RgbImage`")]
RgbImageConversion,
#[error("the image provided is unsupported")]
@@ -36,7 +36,10 @@ pub enum HeifError {
}
pub fn heif_to_dynamic_image(path: &Path) -> HeifResult<DynamicImage> {
if fs::metadata(path)?.len() > HEIF_MAXIMUM_FILE_SIZE {
if fs::metadata(path)
.map_err(|e| HeifError::Io(e, path.to_path_buf().into_boxed_path()))?
.len() > HEIF_MAXIMUM_FILE_SIZE
{
return Err(HeifError::TooLarge);
}
@@ -65,10 +68,14 @@ pub fn heif_to_dynamic_image(path: &Path) -> HeifResult<DynamicImage> {
// this is the interpolation stuff, it essentially just makes the image correct
// in regards to stretching/resolution, etc
for y in 0..img.height() {
reader.seek(SeekFrom::Start((i.stride * y as usize) as u64))?;
reader
.seek(SeekFrom::Start((i.stride * y as usize) as u64))
.map_err(|e| HeifError::Io(e, path.to_path_buf().into_boxed_path()))?;
for _ in 0..img.width() {
reader.read_exact(&mut buffer)?;
reader
.read_exact(&mut buffer)
.map_err(|e| HeifError::Io(e, path.to_path_buf().into_boxed_path()))?;
sequence.extend_from_slice(&buffer);
}
}

View File

@@ -0,0 +1,16 @@
[package]
name = "sd-media-metadata"
version = "0.0.0"
authors = ["Jake Robinson <jake@spacedrive.com>"]
edition = "2021"
[dependencies]
kamadak-exif = "0.5.5"
thiserror = "1.0.43"
image-rs = { package = "image", version = '0.24.6' }
serde = { version = "1.0.183", features = ["derive"] }
serde_json = { version = "1.0.104" }
specta = { workspace = true, features = ["chrono"] }
chrono = { version = "0.4.26", features = ["serde"] }
# symphonia crate looks great for audio metadata

View File

@@ -0,0 +1 @@
# Spacedrive's EXIF/media data parsing library

View File

@@ -0,0 +1 @@
allow-unwrap-in-tests = true

View File

@@ -0,0 +1,19 @@
use std::path::Path;
use crate::Result;
#[derive(
Default, Clone, PartialEq, Eq, Debug, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub struct AudioMetadata {
duration: Option<i32>, // can't use `Duration` due to bigint
audio_codec: Option<String>,
}
impl AudioMetadata {
#[allow(clippy::missing_errors_doc)]
#[allow(clippy::missing_panics_doc)]
pub fn from_path(_path: impl AsRef<Path>) -> Result<Self> {
todo!()
}
}

View File

@@ -0,0 +1,28 @@
use std::{num::ParseFloatError, path::PathBuf};
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("there was an i/o error")]
Io(#[from] std::io::Error),
#[error("error from the exif crate: {0}")]
Exif(#[from] exif::Error),
#[error("there was an error while parsing time with chrono: {0}")]
Chrono(#[from] chrono::ParseError),
#[error("there was an error while converting between types")]
Conversion,
#[error("there was an error while parsing the location of an image")]
MediaLocationParse,
#[error("there was an error while parsing a float")]
FloatParse(#[from] ParseFloatError),
#[error("there was an error while initializing the exif reader")]
Init,
#[error("the file provided at ({0}) contains no exif data")]
NoExifDataOnPath(PathBuf),
#[error("the slice provided contains no exif data")]
NoExifDataOnSlice,
#[error("serde error {0}")]
Serde(#[from] serde_json::Error),
}
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -0,0 +1,45 @@
use std::path::Path;
use exif::Tag;
use super::ExifReader;
#[derive(
Default, Clone, PartialEq, Eq, Debug, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub enum Composite {
/// The data is present, but we're unable to determine what they mean
#[default]
Unknown,
/// Not a composite image
False,
/// A general composite image
General,
/// The composite image was captured while shooting
Live,
}
impl Composite {
/// This is used for quickly sourcing [`Composite`] data from a path
#[allow(clippy::future_not_send)]
pub fn source_composite(path: impl AsRef<Path>) -> Option<Self> {
let reader = ExifReader::from_path(path).ok()?;
reader.get_tag_int(Tag::CompositeImage).map(Into::into)
}
/// This is used for quickly sourcing a [`Composite`] type from an [`ExifReader`]
pub fn from_reader(reader: &ExifReader) -> Option<Self> {
reader.get_tag_int(Tag::CompositeImage).map(Into::into)
}
}
impl From<u32> for Composite {
fn from(value: u32) -> Self {
match value {
1 => Self::False,
2 => Self::General,
3 => Self::Live,
_ => Self::Unknown,
}
}
}

View File

@@ -0,0 +1,31 @@
use exif::Tag;
/// Used for converting DMS to decimal coordinates, and is the amount to divide by.
///
/// # Examples:
///
/// ```
/// use sd_media_metadata::image::DMS_DIVISION;
///
/// let latitude = [53_f64, 19_f64, 35.11_f64]; // in DMS
/// latitude.iter().zip(DMS_DIVISION.iter());
/// ```
pub const DMS_DIVISION: [f64; 3] = [1_f64, 60_f64, 3600_f64];
/// The amount of significant figures we wish to retain after the decimal point.
///
/// This is currrently 8 digits (after the integer) as that is precise enough for most
/// applications.
///
/// This is calculated with `10^n`, where `n` is the desired amount of SFs.
pub const DECIMAL_SF: f64 = 100_000_000_f64;
/// All possible time tags, to be zipped with [`OFFSET_TAGS`]
pub const TIME_TAGS: [Tag; 3] = [Tag::DateTime, Tag::DateTimeOriginal, Tag::DateTimeDigitized];
/// All possible time offset tags, to be zipped with [`TIME_TAGS`]
pub const OFFSET_TAGS: [Tag; 3] = [
Tag::OffsetTime,
Tag::OffsetTimeOriginal,
Tag::OffsetTimeDigitized,
];

View File

@@ -0,0 +1,47 @@
use std::fmt::Display;
use exif::Tag;
use super::ExifReader;
#[derive(
Default, Clone, PartialEq, Eq, Debug, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub struct Dimensions {
pub width: i32,
pub height: i32,
}
impl Dimensions {
#[must_use]
/// Creates a new width and height container
///
/// # Examples
///
/// ```
/// use sd_media_metadata::image::Dimensions;
///
/// Dimensions::new(1920, 1080);
/// ```
pub const fn new(width: i32, height: i32) -> Self {
Self { width, height }
}
#[must_use]
pub fn from_reader(reader: &ExifReader) -> Self {
Self {
width: reader
.get_tag(Tag::PixelXDimension)
.unwrap_or_else(|| reader.get_tag(Tag::XResolution).unwrap_or_default()),
height: reader
.get_tag(Tag::PixelYDimension)
.unwrap_or_else(|| reader.get_tag(Tag::YResolution).unwrap_or_default()),
}
}
}
impl Display for Dimensions {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}x{}", self.width, self.height))
}
}

View File

@@ -0,0 +1,13 @@
use super::FlashMode;
pub const FLASH_AUTO: [u32; 8] = [0x18, 0x19, 0x1d, 0x1f, 0x58, 0x59, 0x5d, 0x5f];
pub const FLASH_ENABLED: [u32; 7] = [0x08, 0x09, 0x0d, 0x0f, 0x49, 0x4d, 0x4f];
pub const FLASH_DISABLED: [u32; 4] = [0x10, 0x14, 0x30, 0x50];
pub const FLASH_FORCED: [u32; 3] = [0x41, 0x45, 0x47];
pub const FLASH_MODES: [(FlashMode, &[u32]); 4] = [
(FlashMode::Auto, &FLASH_AUTO),
(FlashMode::On, &FLASH_ENABLED),
(FlashMode::Off, &FLASH_DISABLED),
(FlashMode::Forced, &FLASH_FORCED),
];

View File

@@ -0,0 +1,168 @@
use exif::Tag;
use super::FlashValue;
use crate::image::{flash::consts::FLASH_MODES, ExifReader};
#[derive(
Default, Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub struct Flash {
/// Specifies how flash was used (on, auto, off, forced, onvalid)
///
/// [`FlashMode::Unknown`] isn't a valid EXIF state, but it's included as the default,
/// just in case we're unable to correctly match it to a known (valid) state.
///
/// This type should only ever be evaluated if flash EXIF data is present, so having this as a non-option shouldn't be an issue.
pub mode: FlashMode,
/// Did the flash actually fire?
pub fired: Option<bool>,
/// Did flash return to the camera? (Unsure of the meaning)
pub returned: Option<bool>,
/// Was red eye reduction used?
pub red_eye_reduction: Option<bool>,
}
impl Flash {
#[must_use]
pub fn from_reader(reader: &ExifReader) -> Option<Self> {
let value = reader.get_tag_int(Tag::Flash)?;
FlashValue::try_from(value).ok()?.into()
}
}
#[derive(
Default, Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub enum FlashMode {
/// The data is present, but we're unable to determine what they mean
#[default]
Unknown,
/// FLash was on
On,
/// Flash was off
Off,
/// Flash was set to automatically fire in certain conditions
Auto,
/// Flash was forcefully fired
Forced,
}
impl From<u32> for FlashMode {
fn from(value: u32) -> Self {
FLASH_MODES
.into_iter()
.find_map(|(mode, slice)| slice.contains(&value).then_some(mode))
.unwrap_or_default()
}
}
impl From<FlashValue> for Option<Flash> {
// TODO(brxken128): This can be heavily optimised with bitwise AND
// e.g. to see if flash was fired, `(value & 1) != 0`
// or to see if red eye reduction was enabled, `(value & 64) != 0`
// May not be worth it as some states may be invalid according to `https://www.awaresystems.be/imaging/tiff/tifftags/privateifd/exif/flash.html`
fn from(value: FlashValue) -> Self {
#[allow(clippy::as_conversions)]
let mut data = Flash {
mode: FlashMode::from(value as u32),
..Default::default()
};
#[allow(clippy::match_same_arms)]
match value {
FlashValue::Fired => {
data.fired = Some(true);
}
FlashValue::NoFire => {
data.fired = Some(false);
}
FlashValue::FiredReturn => {
data.fired = Some(true);
data.returned = Some(true);
}
FlashValue::FiredNoReturn => {
data.fired = Some(true);
data.returned = Some(false);
}
FlashValue::AutoFired => {
data.fired = Some(true);
}
FlashValue::AutoFiredNoReturn => {
data.fired = Some(true);
data.returned = Some(false);
}
FlashValue::OffNoFire => data.fired = Some(false),
FlashValue::AutoNoFire => data.fired = Some(false),
FlashValue::NoFlashFunction | FlashValue::OffNoFlashFunction | FlashValue::Unknown => {
data = Flash::default();
}
FlashValue::AutoFiredRedEyeReduction => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
}
FlashValue::AutoFiredRedEyeReductionNoReturn => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
data.returned = Some(false);
}
FlashValue::AutoFiredRedEyeReductionReturn => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
data.returned = Some(true);
}
FlashValue::OnFired => {
data.fired = Some(true);
}
FlashValue::OnNoFire => {
data.fired = Some(false);
}
FlashValue::AutoFiredReturn => {
data.fired = Some(true);
data.returned = Some(true);
}
FlashValue::OnReturn => {
data.returned = Some(true);
}
FlashValue::OnNoReturn => data.returned = Some(false),
FlashValue::AutoNoFireRedEyeReduction => {
data.fired = Some(false);
data.red_eye_reduction = Some(true);
}
FlashValue::OffNoFireNoReturn => {
data.fired = Some(false);
data.returned = Some(false);
}
FlashValue::OffRedEyeReduction => data.red_eye_reduction = Some(true),
FlashValue::OnRedEyeReduction => data.red_eye_reduction = Some(true),
FlashValue::FiredRedEyeReductionNoReturn => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
data.returned = Some(false);
}
FlashValue::FiredRedEyeReduction => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
}
FlashValue::FiredRedEyeReductionReturn => {
data.fired = Some(true);
data.red_eye_reduction = Some(true);
data.returned = Some(false);
}
FlashValue::OnRedEyeReductionReturn => {
data.red_eye_reduction = Some(true);
data.returned = Some(true);
}
FlashValue::OnRedEyeReductionNoReturn => {
data.red_eye_reduction = Some(true);
data.returned = Some(false);
}
}
// this means it had a value of Flash::NoFlashFunctionality
if data == Flash::default() {
None
} else {
Some(data)
}
}
}

View File

@@ -0,0 +1,6 @@
pub mod consts;
mod data;
mod values;
pub use data::{Flash, FlashMode};
pub use values::FlashValue;

View File

@@ -0,0 +1,136 @@
use std::fmt::Display;
// https://exiftool.org/TagNames/EXIF.html scroll to bottom to get codds
#[derive(
Clone, Copy, Default, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub enum FlashValue {
#[default]
Unknown,
NoFire,
Fired,
FiredNoReturn,
FiredReturn,
OnNoFire,
OnFired,
OnNoReturn,
OnReturn,
OffNoFire,
OffNoFireNoReturn,
AutoNoFire,
AutoFired,
AutoFiredNoReturn,
AutoFiredReturn,
NoFlashFunction,
OffNoFlashFunction,
FiredRedEyeReduction,
FiredRedEyeReductionNoReturn,
FiredRedEyeReductionReturn,
OnRedEyeReduction,
OnRedEyeReductionNoReturn,
OnRedEyeReductionReturn,
OffRedEyeReduction,
AutoNoFireRedEyeReduction,
AutoFiredRedEyeReduction,
AutoFiredRedEyeReductionNoReturn,
AutoFiredRedEyeReductionReturn,
}
impl FlashValue {
#[must_use]
pub fn new(value: u32) -> Option<Self> {
value.try_into().ok()
}
}
impl From<u32> for FlashValue {
fn from(value: u32) -> Self {
match value {
0x00 => Self::NoFire,
0x01 => Self::Fired,
0x05 => Self::FiredNoReturn,
0x07 => Self::FiredReturn,
0x08 => Self::OnNoFire,
0x09 => Self::OnFired,
0x0d => Self::OnNoReturn,
0x0f => Self::OnReturn,
0x10 => Self::OffNoFire,
0x14 => Self::OffNoFireNoReturn,
0x18 => Self::AutoNoFire,
0x19 => Self::AutoFired,
0x1d => Self::AutoFiredNoReturn,
0x1f => Self::AutoFiredReturn,
0x20 => Self::NoFlashFunction,
0x30 => Self::OffNoFlashFunction,
0x41 => Self::FiredRedEyeReduction,
0x45 => Self::FiredRedEyeReductionNoReturn,
0x47 => Self::FiredRedEyeReductionReturn,
0x49 => Self::OnRedEyeReduction,
0x4d => Self::OnRedEyeReductionNoReturn,
0x4f => Self::OnRedEyeReductionReturn,
0x50 => Self::OffRedEyeReduction,
0x58 => Self::AutoNoFireRedEyeReduction,
0x59 => Self::AutoFiredRedEyeReduction,
0x5d => Self::AutoFiredRedEyeReductionNoReturn,
0x5f => Self::AutoFiredRedEyeReductionReturn,
_ => Self::default(),
}
}
}
impl Display for FlashValue {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Unknown => f.write_str("Flash data was present but we were unable to parse it"),
Self::NoFire => f.write_str("Flash didn't fire"),
Self::Fired => f.write_str("Flash fired"),
Self::FiredNoReturn => f.write_str("Flash fired but no return detected"),
Self::FiredReturn => f.write_str("Flash fired and return was detected"),
Self::OnNoFire => f.write_str("Flash was enabled but not fired"),
Self::OnFired => f.write_str("Flash was enabled and fired"),
Self::OnNoReturn => f.write_str("Flash was enabled but no return detected"),
Self::OnReturn => f.write_str("Flash was enabled and return was detected"),
Self::OffNoFire => f.write_str("Flash was disabled"),
Self::OffNoFireNoReturn => {
f.write_str("FLash was disabled, did not fire and no return was detected")
}
Self::AutoNoFire => f.write_str("Auto was enabled but flash did not fire"),
Self::AutoFired => f.write_str("Auto was enabled and fired"),
Self::AutoFiredNoReturn => {
f.write_str("Auto was enabled and fired, no return was detected")
}
Self::AutoFiredReturn => f.write_str("Auto was enabled and fired, return was detected"),
Self::NoFlashFunction => f.write_str("Device has no flash function"),
Self::OffNoFlashFunction => f.write_str("Off as device has no flash function"),
Self::FiredRedEyeReduction => f.write_str("Flash fired with red eye reduction"),
Self::FiredRedEyeReductionNoReturn => {
f.write_str("Flash fired with red eye reduction, no return was detected")
}
Self::FiredRedEyeReductionReturn => {
f.write_str("Flash fired with red eye reduction, return was detecteed")
}
Self::OnRedEyeReduction => f.write_str("Flash was enabled with red eye reduction"),
Self::OnRedEyeReductionNoReturn => {
f.write_str("Flash was enabled with red eye reduction, no return was detected")
}
Self::OnRedEyeReductionReturn => {
f.write_str("Flash was enabled with red eye reduction, return was detected")
}
Self::OffRedEyeReduction => {
f.write_str("Flash was disabled, but red eye reduction was enabled")
}
Self::AutoNoFireRedEyeReduction => {
f.write_str("Auto was enabled but didn't fire, and red eye reduction was used")
}
Self::AutoFiredRedEyeReduction => {
f.write_str("Auto was enabled and fired, and red eye reduction was used")
}
Self::AutoFiredRedEyeReductionNoReturn => f.write_str(
"Auto was enabled and fired, and red eye reduction was enabled but did not return",
),
Self::AutoFiredRedEyeReductionReturn => f.write_str(
"Auto was enabled and fired, and red eye reduction was enabled and returned",
),
}
}
}

View File

@@ -0,0 +1,197 @@
use super::{
consts::{DECIMAL_SF, DMS_DIVISION},
ExifReader,
};
use crate::{Error, Result};
use exif::Tag;
use std::{fmt::Display, ops::Neg};
#[derive(Default, Clone, PartialEq, Debug, serde::Serialize, serde::Deserialize, specta::Type)]
pub struct MediaLocation {
latitude: f64,
longitude: f64,
altitude: Option<i32>,
direction: Option<i32>, // the direction that the image was taken in, as a bearing (should always be <= 0 && <= 360)
}
const LAT_MAX_POS: f64 = 90_f64;
const LONG_MAX_POS: f64 = 180_f64;
impl MediaLocation {
/// This is used to clamp and format coordinates. They are rounded to 8 significant figures after the decimal point.
///
/// `max` must be a positive `f64`, and it should be the maximum distance allowed (e.g. 90 or 180 degrees)
#[must_use]
fn format_coordinate(v: f64, max: f64) -> f64 {
(v.clamp(max.neg(), max) * DECIMAL_SF).round() / DECIMAL_SF
}
/// Create a new [`MediaLocation`] from a latitude and longitude pair.
///
/// Both of the provided values will be rounded to 8 digits after the decimal point ([`DECIMAL_SF`]),
///
/// # Examples
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let x = MediaLocation::new(38.89767633, -7.36560353, Some(32), Some(20));
/// ```
#[must_use]
pub fn new(lat: f64, long: f64, altitude: Option<i32>, direction: Option<i32>) -> Self {
let latitude = Self::format_coordinate(lat, LAT_MAX_POS);
let longitude = Self::format_coordinate(long, LONG_MAX_POS);
Self {
latitude,
longitude,
altitude,
direction,
}
}
/// Create a new [`MediaLocation`] from an [`ExifReader`] instance.
///
/// Both of the provided values will be rounded to 8 digits after the decimal point ([`DECIMAL_SF`]),
///
/// This does not take into account the poles, e.g. N/E/S/W, but should still produce valid results (Untested!)
///
/// # Examples
///
/// ```ignore
/// use sd_media_metadata::image::{ExifReader, Location};
///
/// let mut reader = ExifReader::from_path("path").unwrap();
/// MediaLocation::from_exif_reader(&mut reader).unwrap();
/// ```
pub fn from_exif_reader(reader: &ExifReader) -> Result<Self> {
let res = [
(
reader.get_tag(Tag::GPSLatitude),
reader.get_tag(Tag::GPSLatitudeRef),
),
(
reader.get_tag(Tag::GPSLongitude),
reader.get_tag(Tag::GPSLongitudeRef),
),
]
.into_iter()
.filter_map(|(item, reference)| {
let mut item: String = item.unwrap_or_default();
let reference: String = reference.unwrap_or_default();
item.retain(|x| {
x.is_numeric() || x.is_whitespace() || x == '.' || x == '/' || x == '-'
});
let i = item
.split_whitespace()
.filter_map(|x| x.parse::<f64>().ok());
(i.clone().count() == 3)
.then(|| i.zip(DMS_DIVISION.iter()).map(|(x, y)| x / y).sum::<f64>())
.map(|mut x| {
(reference == "W" || reference == "S" || reference == "3" || reference == "1")
.then(|| x = x.neg());
x
})
})
.collect::<Vec<_>>();
(!res.is_empty() && res.len() == 2)
.then(|| {
Self::new(
Self::format_coordinate(res[0], LAT_MAX_POS),
Self::format_coordinate(res[1], LONG_MAX_POS),
reader.get_tag(Tag::GPSAltitude),
reader
.get_tag(Tag::GPSImgDirection)
.map(|x: i32| x.clamp(0, 360)),
)
})
.ok_or(Error::MediaLocationParse)
}
/// # Examples
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let mut home = MediaLocation::new(38.89767633, -7.36560353, Some(32), Some(20));
/// home.update_latitude(60_f64);
/// ```
pub fn update_latitude(&mut self, lat: f64) {
self.latitude = Self::format_coordinate(lat, LAT_MAX_POS);
}
/// # Examples
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let mut home = MediaLocation::new(38.89767633, -7.36560353, Some(32), Some(20));
/// home.update_longitude(20_f64);
/// ```
pub fn update_longitude(&mut self, long: f64) {
self.longitude = Self::format_coordinate(long, LONG_MAX_POS);
}
/// # Examples
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let mut home = MediaLocation::new(38.89767633, -7.36560353, Some(32), Some(20));
/// home.update_altitude(20);
/// ```
pub fn update_altitude(&mut self, altitude: i32) {
self.altitude = Some(altitude);
}
/// # Examples
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let mut home = MediaLocation::new(38.89767633, -7.36560353, Some(32), Some(20));
/// home.update_direction(233);
/// ```
pub fn update_direction(&mut self, bearing: i32) {
self.direction = Some(bearing.clamp(0, 360));
}
}
impl TryFrom<String> for MediaLocation {
type Error = Error;
/// This tries to parse a standard "34.2493458, -23.4923843" string to a [`MediaLocation`]
///
/// # Examples:
///
/// ```
/// use sd_media_metadata::image::MediaLocation;
///
/// let s = String::from("32.47583923, -28.49238495");
/// MediaLocation::try_from(s).unwrap();
///
/// ```
fn try_from(value: String) -> std::result::Result<Self, Self::Error> {
let iter = value
.split_terminator(", ")
.filter_map(|x| x.parse::<f64>().ok());
if iter.clone().count() == 2 {
let items = iter.collect::<Vec<_>>();
Ok(Self::new(
Self::format_coordinate(items[0], LAT_MAX_POS),
Self::format_coordinate(items[1], LONG_MAX_POS),
None,
None,
))
} else {
Err(Error::Conversion)
}
}
}
impl Display for MediaLocation {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{}, {}", self.latitude, self.longitude))
}
}

View File

@@ -0,0 +1,124 @@
use exif::Tag;
use std::path::Path;
mod composite;
mod consts;
mod dimensions;
mod flash;
mod location;
mod orientation;
mod profile;
mod reader;
mod time;
pub use composite::Composite;
pub use consts::DMS_DIVISION;
pub use dimensions::Dimensions;
pub use flash::{Flash, FlashMode, FlashValue};
pub use location::MediaLocation;
pub use orientation::Orientation;
pub use profile::ColorProfile;
pub use reader::ExifReader;
pub use time::MediaTime;
use crate::Result;
#[derive(Default, Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize, specta::Type)]
pub struct ImageMetadata {
pub dimensions: Dimensions,
pub date_taken: MediaTime,
pub location: Option<MediaLocation>,
pub camera_data: ImageData,
pub artist: Option<String>,
pub description: Option<String>,
pub copyright: Option<String>,
pub exif_version: Option<String>,
}
#[derive(Default, Clone, PartialEq, Debug, serde::Serialize, serde::Deserialize, specta::Type)]
pub struct ImageData {
pub device_make: Option<String>,
pub device_model: Option<String>,
pub color_space: Option<String>,
pub color_profile: Option<ColorProfile>,
pub focal_length: Option<f64>,
pub shutter_speed: Option<f64>,
pub flash: Option<Flash>,
pub orientation: Orientation,
pub lens_make: Option<String>,
pub lens_model: Option<String>,
pub bit_depth: Option<i32>,
pub red_eye: Option<bool>,
pub zoom: Option<f64>,
pub iso: Option<i32>,
pub software: Option<String>,
pub serial_number: Option<String>,
pub lens_serial_number: Option<String>,
pub contrast: Option<i32>,
pub saturation: Option<i32>,
pub sharpness: Option<i32>,
pub composite: Option<Composite>,
}
impl ImageMetadata {
pub fn from_path(path: impl AsRef<Path>) -> Result<Self> {
Self::from_reader(&ExifReader::from_path(path)?)
}
pub fn from_slice(bytes: &[u8]) -> Result<Self> {
Self::from_reader(&ExifReader::from_slice(bytes)?)
}
#[allow(clippy::field_reassign_with_default)]
pub fn from_reader(reader: &ExifReader) -> Result<Self> {
let mut data = Self::default();
let camera_data = &mut data.camera_data;
data.date_taken = MediaTime::from_reader(reader);
data.dimensions = Dimensions::from_reader(reader);
data.artist = reader.get_tag(Tag::Artist);
data.description = reader.get_tag(Tag::ImageDescription);
data.copyright = reader.get_tag(Tag::Copyright);
data.exif_version = reader.get_tag(Tag::ExifVersion);
data.location = MediaLocation::from_exif_reader(reader).ok();
camera_data.device_make = reader.get_tag(Tag::Make);
camera_data.device_model = reader.get_tag(Tag::Model);
camera_data.focal_length = reader.get_tag(Tag::FocalLength);
camera_data.shutter_speed = reader.get_tag(Tag::ShutterSpeedValue);
camera_data.color_space = reader.get_tag(Tag::ColorSpace);
camera_data.color_profile = ColorProfile::from_reader(reader);
camera_data.lens_make = reader.get_tag(Tag::LensMake);
camera_data.lens_model = reader.get_tag(Tag::LensModel);
camera_data.iso = reader.get_tag(Tag::PhotographicSensitivity);
camera_data.zoom = reader
.get_tag(Tag::DigitalZoomRatio)
.map(|x: String| x.replace("unused", "1").parse().ok())
.unwrap_or_default();
camera_data.bit_depth = reader.get_tag::<String>(Tag::BitsPerSample).map_or_else(
|| {
reader
.get_tag::<String>(Tag::CompressedBitsPerPixel)
.unwrap_or_default()
.parse()
.ok()
},
|x| x.parse::<i32>().ok(),
);
camera_data.orientation = Orientation::from_reader(reader).unwrap_or_default();
camera_data.flash = Flash::from_reader(reader);
camera_data.software = reader.get_tag(Tag::Software);
camera_data.serial_number = reader.get_tag(Tag::BodySerialNumber);
camera_data.lens_serial_number = reader.get_tag(Tag::LensSerialNumber);
camera_data.software = reader.get_tag(Tag::Software);
camera_data.contrast = reader.get_tag(Tag::Contrast);
camera_data.saturation = reader.get_tag(Tag::Saturation);
camera_data.sharpness = reader.get_tag(Tag::Sharpness);
camera_data.composite = Composite::from_reader(reader);
Ok(data)
}
}

View File

@@ -0,0 +1,63 @@
use super::ExifReader;
use exif::Tag;
use image_rs::DynamicImage;
use std::path::Path;
#[derive(
Default, Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub enum Orientation {
#[default]
Normal,
MirroredHorizontal,
CW90,
MirroredVertical,
MirroredHorizontalAnd270CW,
MirroredHorizontalAnd90CW,
CW180,
CW270,
}
impl Orientation {
/// This is used for quickly sourcing [`Orientation`] data from a path, to be later used by one of the modification functions.
#[allow(clippy::future_not_send)]
pub fn source_orientation(path: impl AsRef<Path>) -> Option<Self> {
let reader = ExifReader::from_path(path).ok()?;
reader.get_tag_int(Tag::Orientation).map(Into::into)
}
/// This is used for quickly sourcing an [`Orientation`] data from an [`ExifReader`]
pub fn from_reader(reader: &ExifReader) -> Option<Self> {
reader.get_tag_int(Tag::Orientation).map(Into::into)
}
/// This is used to correct thumbnails in the thumbnailer, if we are able to source orientation data for the file at hand.
#[must_use]
pub fn correct_thumbnail(&self, img: DynamicImage) -> DynamicImage {
match self {
Self::Normal => img,
Self::CW180 => img.rotate180(),
Self::CW270 => img.rotate270(),
Self::CW90 => img.rotate90(),
Self::MirroredHorizontal => img.fliph(),
Self::MirroredVertical => img.flipv(),
Self::MirroredHorizontalAnd90CW => img.fliph().rotate90(),
Self::MirroredHorizontalAnd270CW => img.fliph().rotate270(),
}
}
}
impl From<u32> for Orientation {
fn from(value: u32) -> Self {
match value {
2 => Self::MirroredHorizontal,
3 => Self::CW180,
4 => Self::MirroredVertical,
5 => Self::MirroredHorizontalAnd270CW,
6 => Self::CW90,
7 => Self::MirroredHorizontalAnd90CW,
8 => Self::CW270,
_ => Self::Normal,
}
}
}

View File

@@ -0,0 +1,55 @@
use super::ExifReader;
use exif::Tag;
use std::fmt::Display;
#[derive(
Default, Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub enum ColorProfile {
#[default]
Normal,
Custom,
HDRNoOriginal,
HDRWithOriginal,
OriginalForHDR,
Panorama,
PortraitHDR,
Portrait,
}
impl ColorProfile {
/// This is used for quickly sourcing a [`ColorProfile`] data from an [`ExifReader`]
pub fn from_reader(reader: &ExifReader) -> Option<Self> {
reader.get_tag_int(Tag::CustomRendered).map(Into::into)
}
}
impl From<u32> for ColorProfile {
fn from(value: u32) -> Self {
match value {
0 => Self::Custom,
2 => Self::HDRNoOriginal,
3 => Self::HDRWithOriginal,
4 => Self::OriginalForHDR,
6 => Self::Panorama,
7 => Self::Portrait,
8 => Self::PortraitHDR,
_ => Self::Normal,
}
}
}
impl Display for ColorProfile {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Normal => f.write_str("Normal"),
Self::Custom => f.write_str("Custom"),
Self::HDRNoOriginal => f.write_str("HDR (with no original saved)"),
Self::HDRWithOriginal => f.write_str("HDR (with original saved)"),
Self::OriginalForHDR => f.write_str("Original for HDR image"),
Self::Panorama => f.write_str("Panorama"),
Self::Portrait => f.write_str("Portrait"),
Self::PortraitHDR => f.write_str("HDR Portrait"),
}
}
}

View File

@@ -0,0 +1,54 @@
use std::{
fs::File,
io::{BufReader, Cursor},
path::Path,
str::FromStr,
};
use exif::{Exif, In, Tag};
use crate::{Error, Result};
/// An [`ExifReader`]. This can get exif tags from images (either files or slices).
pub struct ExifReader(Exif);
impl ExifReader {
pub fn from_path(path: impl AsRef<Path>) -> Result<Self> {
exif::Reader::new()
.read_from_container(&mut BufReader::new(File::open(&path)?))
.map_or_else(
|_| Err(Error::NoExifDataOnPath(path.as_ref().to_path_buf())),
|reader| Ok(Self(reader)),
)
}
pub fn from_slice(slice: &[u8]) -> Result<Self> {
exif::Reader::new()
.read_from_container(&mut Cursor::new(slice))
.map_or_else(|_| Err(Error::NoExifDataOnSlice), |reader| Ok(Self(reader)))
}
/// A helper function which gets the target `Tag` as `T`, provided `T` impls `FromStr`.
///
/// This function strips any erroneous newlines
#[must_use]
pub fn get_tag<T>(&self, tag: Tag) -> Option<T>
where
T: FromStr,
{
self.0.get_field(tag, In::PRIMARY).map(|x| {
x.display_value()
.to_string()
.replace(['\\', '\"'], "")
.parse::<T>()
.ok()
})?
}
pub(crate) fn get_tag_int(&self, tag: Tag) -> Option<u32> {
self.0
.get_field(tag, In::PRIMARY)
.map(|x| x.value.get_uint(0))
.unwrap_or_default()
}
}

View File

@@ -0,0 +1,101 @@
use super::{
consts::{OFFSET_TAGS, TIME_TAGS},
ExifReader,
};
use crate::Error;
use chrono::{DateTime, FixedOffset, NaiveDateTime};
use std::fmt::Display;
pub const NAIVE_FORMAT_STR: &str = "%Y-%m-%d %H:%M:%S";
#[derive(Default, Clone, Debug, PartialEq, Eq, serde::Deserialize, specta::Type)]
/// This can be either naive with no TZ (`YYYY-MM-DD HH-MM-SS`) or UTC with a fixed offset (`rfc3339`).
///
/// This may also be `undefined`.
pub enum MediaTime {
Naive(NaiveDateTime),
Utc(DateTime<FixedOffset>),
#[default]
Undefined,
}
impl MediaTime {
/// This iterates over all 3 pairs of time/offset tags in an attempt to create a UTC time.
///
/// If the above fails, we fall back to Naive time - if that's not present this is `Undefined`.
pub fn from_reader(reader: &ExifReader) -> Self {
let z = TIME_TAGS
.into_iter()
.zip(OFFSET_TAGS)
.filter_map(|(time_tag, offset_tag)| {
let time = reader.get_tag::<String>(time_tag);
let offset = reader.get_tag::<String>(offset_tag);
if let (Some(t), Some(o)) = (time.clone(), offset) {
DateTime::parse_and_remainder(&format!("{t} {o}"), "%F %X %#z")
.ok()
.map(|x| Self::Utc(x.0))
} else if let Some(t) = time {
Some(
NaiveDateTime::parse_from_str(&t, NAIVE_FORMAT_STR)
.map_or(Self::Undefined, Self::Naive),
)
} else {
Some(Self::Undefined)
}
})
.collect::<Vec<_>>();
z.iter()
.find(|x| match x {
Self::Utc(_) | Self::Naive(_) => true,
Self::Undefined => false,
})
.map_or(Self::Undefined, Clone::clone)
}
}
impl TryFrom<String> for MediaTime {
type Error = Error;
fn try_from(value: String) -> Result<Self, Self::Error> {
if &value == "Undefined" {
return Ok(Self::Undefined);
}
if let Ok(time) = DateTime::parse_from_rfc3339(&value) {
return Ok(Self::Utc(time));
}
Ok(NaiveDateTime::parse_from_str(&value, NAIVE_FORMAT_STR)
.map_or(Self::Undefined, Self::Naive))
}
}
impl Display for MediaTime {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Undefined => f.write_str("Undefined"),
Self::Naive(l) => f.write_str(&l.to_string()),
Self::Utc(u) => f.write_str(&u.to_rfc3339()),
}
}
}
impl serde::Serialize for MediaTime {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
Self::Naive(t) => serializer.collect_str(&t.to_string()),
Self::Utc(t) => {
let local = NaiveDateTime::from_timestamp_millis(t.timestamp_millis()).ok_or_else(
|| serde::ser::Error::custom("Error converting UTC to Naive time"),
)?;
serializer.collect_str(&local.format("%Y-%m-%d %H:%M:%S").to_string())
}
Self::Undefined => serializer.collect_str("Undefined"),
}
}
}

View File

@@ -0,0 +1,40 @@
#![doc = include_str!("../README.md")]
#![warn(
clippy::all,
clippy::pedantic,
clippy::correctness,
clippy::perf,
clippy::style,
clippy::suspicious,
clippy::complexity,
clippy::nursery,
clippy::unwrap_used,
unused_qualifications,
rust_2018_idioms,
clippy::expect_used,
trivial_casts,
trivial_numeric_casts,
unused_allocation,
clippy::as_conversions,
clippy::dbg_macro
)]
#![forbid(unsafe_code)]
#![allow(clippy::missing_errors_doc, clippy::module_name_repetitions)]
pub mod audio;
mod error;
pub mod image;
pub mod video;
pub use audio::AudioMetadata;
pub use error::{Error, Result};
pub use image::ImageMetadata;
pub use video::VideoMetadata;
#[derive(Clone, PartialEq, Debug, serde::Serialize, serde::Deserialize, specta::Type)]
#[serde(tag = "type")]
pub enum MediaMetadata {
Image(Box<ImageMetadata>),
Video(Box<VideoMetadata>),
Audio(Box<AudioMetadata>),
}

View File

@@ -0,0 +1,20 @@
use std::path::Path;
use crate::Result;
#[derive(
Default, Clone, PartialEq, Eq, Debug, serde::Serialize, serde::Deserialize, specta::Type,
)]
pub struct VideoMetadata {
duration: Option<i32>, // bigint
video_codec: Option<String>,
audio_codec: Option<String>,
}
impl VideoMetadata {
#[allow(clippy::missing_errors_doc)]
#[allow(clippy::missing_panics_doc)]
pub fn from_path(_path: impl AsRef<Path>) -> Result<Self> {
todo!()
}
}

View File

@@ -13,6 +13,7 @@ import {
} from '@sd/client';
import { Dialog, ErrorMessage, InputField, UseDialogProps, useDialog, z } from '@sd/ui';
import { showAlertDialog } from '~/components';
import Accordion from '~/components/Accordion';
import { useCallbackToWatchForm } from '~/hooks';
import { Platform, usePlatform } from '~/util/Platform';
import IndexerRuleEditor from './IndexerRuleEditor';
@@ -68,7 +69,6 @@ export const AddLocationDialog = ({
const relinkLocation = useLibraryMutation('locations.relink');
const listIndexerRules = useLibraryQuery(['locations.indexer_rules.list']);
const addLocationToLibrary = useLibraryMutation('locations.addLibrary');
const [toggleSettings, setToggleSettings] = useState(false);
// This is required because indexRules is undefined on first render
const indexerRulesIds = useMemo(
@@ -217,7 +217,7 @@ export const AddLocationDialog = ({
: ''
}
>
<ErrorMessage name={REMOTE_ERROR_FORM_FIELD} variant="large" className="mb-4 mt-2" />
<ErrorMessage name={REMOTE_ERROR_FORM_FIELD} variant="large" className="mt-2 mb-4" />
<InputField
size="md"
@@ -234,37 +234,21 @@ export const AddLocationDialog = ({
<input type="hidden" {...form.register('method')} />
<div className="rounded-md border border-app-line bg-app-darkBox">
<div
onClick={() => setToggleSettings((t) => !t)}
className="flex items-center justify-between px-3 py-2"
>
<p className="text-sm">Advanced settings</p>
<CaretDown
className={clsx(
toggleSettings && 'rotate-180',
'transition-all duration-200'
)}
/>
</div>
{toggleSettings && (
<div className="rounded-b-md border-t border-app-line bg-app-box p-3 pt-2">
<Controller
name="indexerRulesIds"
render={({ field }) => (
<IndexerRuleEditor
field={field}
label="File indexing rules:"
className="relative flex flex-col"
rulesContainerClass="grid grid-cols-2 gap-1"
ruleButtonClass="w-full"
/>
)}
control={form.control}
<Accordion title="Advanced settings">
<Controller
name="indexerRulesIds"
render={({ field }) => (
<IndexerRuleEditor
field={field}
label="File indexing rules:"
className="relative flex flex-col"
rulesContainerClass="grid grid-cols-2 gap-1"
ruleButtonClass="w-full"
/>
</div>
)}
</div>
)}
control={form.control}
/>
</Accordion>
</Dialog>
);
};

View File

@@ -0,0 +1,34 @@
import clsx from 'clsx';
import { CaretDown } from 'phosphor-react';
import { useState } from 'react';
interface Props {
children: React.ReactNode;
className?: string;
title: string;
}
const Accordion = ({ title, className, children }: Props) => {
const [toggle, setToggle] = useState(false);
return (
<div className={clsx(className, 'rounded-md border border-app-line bg-app-darkBox')}>
<div
onClick={() => setToggle((t) => !t)}
className="flex items-center justify-between px-3 py-2"
>
<p className="text-xs">{title}</p>
<CaretDown
className={clsx(toggle && 'rotate-180', 'transition-all duration-200')}
/>
</div>
{toggle && (
<div className="p-3 pt-2 border-t rounded-b-md border-app-line bg-app-box">
{children}
</div>
)}
</div>
);
};
export default Accordion;

View File

@@ -22,6 +22,7 @@
"@zxcvbn-ts/language-common": "^2.0.1",
"@zxcvbn-ts/language-en": "^2.1.0",
"plausible-tracker": "^0.3.8",
"react-hook-form": "~7.45.2",
"valtio": "^1.7.4",
"zod": "~3.22.2"
},

View File

@@ -6,7 +6,8 @@ export type Procedures = {
{ key: "backups.getAll", input: never, result: GetAll } |
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "categories.list", input: LibraryArgs<null>, result: { [key in Category]: number } } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[]; media_data: MediaData | null } | null } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[] } | null } |
{ key: "files.getMediaData", input: LibraryArgs<number>, result: MediaMetadata } |
{ key: "files.getPath", input: LibraryArgs<number>, result: string | null } |
{ key: "invalidation.test-invalidate", input: never, result: number } |
{ key: "jobs.isActive", input: LibraryArgs<null>, result: boolean } |
@@ -96,6 +97,8 @@ export type Procedures = {
{ key: "sync.newMessage", input: LibraryArgs<null>, result: null }
};
export type AudioMetadata = { duration: number | null; audio_codec: string | null }
export type Backup = ({ id: string; timestamp: string; library_id: string; library_name: string }) & { path: string }
export type BuildInfo = { version: string; commit: string }
@@ -111,8 +114,14 @@ export type Category = "Recents" | "Favorites" | "Albums" | "Photos" | "Videos"
export type ChangeNodeNameArgs = { name: string | null }
export type ColorProfile = "Normal" | "Custom" | "HDRNoOriginal" | "HDRWithOriginal" | "OriginalForHDR" | "Panorama" | "PortraitHDR" | "Portrait"
export type Composite = "Unknown" | "False" | "General" | "Live"
export type CreateLibraryArgs = { name: LibraryName }
export type Dimensions = { width: number; height: number }
export type DiskType = "SSD" | "HDD" | "Removable"
export type DoubleClickAction = "openFile" | "quickPreview"
@@ -152,6 +161,10 @@ export type FilePathSearchOrdering = { field: "name"; value: SortOrder } | { fie
export type FilePathWithObject = { id: number; pub_id: number[]; is_dir: boolean | null; cas_id: string | null; integrity_checksum: string | null; location_id: number | null; materialized_path: string | null; name: string | null; extension: string | null; size_in_bytes: string | null; size_in_bytes_bytes: number[] | null; inode: number[] | null; device: number[] | null; object_id: number | null; key_id: number | null; date_created: string | null; date_modified: string | null; date_indexed: string | null; object: Object | null }
export type Flash = { mode: FlashMode; fired: boolean | null; returned: boolean | null; red_eye_reduction: boolean | null }
export type FlashMode = "Unknown" | "On" | "Off" | "Auto" | "Forced"
export type FromPattern = { pattern: string; replace_all: boolean }
export type FullRescanArgs = { location_id: number; reidentify_objects: boolean }
@@ -166,6 +179,10 @@ export type Header = { id: string; timestamp: string; library_id: string; librar
export type IdentifyUniqueFilesArgs = { id: number; path: string }
export type ImageData = { device_make: string | null; device_model: string | null; color_space: string | null; color_profile: ColorProfile | null; focal_length: number | null; shutter_speed: number | null; flash: Flash | null; orientation: Orientation; lens_make: string | null; lens_model: string | null; bit_depth: number | null; red_eye: boolean | null; zoom: number | null; iso: number | null; software: string | null; serial_number: string | null; lens_serial_number: string | null; contrast: number | null; saturation: number | null; sharpness: number | null; composite: Composite | null }
export type ImageMetadata = { dimensions: Dimensions; date_taken: MediaTime; location: MediaLocation | null; camera_data: ImageData; artist: string | null; description: string | null; copyright: string | null; exif_version: string | null }
export type IndexerRule = { id: number; pub_id: number[]; name: string | null; default: boolean | null; rules_per_kind: number[] | null; date_created: string | null; date_modified: string | null }
/**
@@ -239,7 +256,16 @@ export type MaybeNot<T> = T | { not: T }
export type MaybeUndefined<T> = null | null | T
export type MediaData = { id: number; pixel_width: number | null; pixel_height: number | null; longitude: number | null; latitude: number | null; fps: number | null; capture_device_make: string | null; capture_device_model: string | null; capture_device_software: string | null; duration_seconds: number | null; codecs: string | null; streams: number | null }
export type MediaLocation = { latitude: number; longitude: number; altitude: number | null; direction: number | null }
export type MediaMetadata = ({ type: "Image" } & ImageMetadata) | ({ type: "Video" } & VideoMetadata) | ({ type: "Audio" } & AudioMetadata)
/**
* This can be either naive with no TZ (`YYYY-MM-DD HH-MM-SS`) or UTC with a fixed offset (`rfc3339`).
*
* This may also be `undefined`.
*/
export type MediaTime = { Naive: string } | { Utc: string } | "Undefined"
export type NodeState = ({ id: string; name: string; p2p_port: number | null; p2p_email: string | null; p2p_img_url: string | null }) & { data_path: string }
@@ -288,6 +314,8 @@ export type OperatingSystem = "Windows" | "Linux" | "MacOS" | "Ios" | "Android"
export type OptionalRange<T> = { from: T | null; to: T | null }
export type Orientation = "Normal" | "MirroredHorizontal" | "CW90" | "MirroredVertical" | "MirroredHorizontalAnd270CW" | "MirroredHorizontalAnd90CW" | "CW180" | "CW270"
/**
* TODO: P2P event for the frontend
*/
@@ -343,4 +371,6 @@ export type TagCreateArgs = { name: string; color: string }
export type TagUpdateArgs = { id: number; name: string | null; color: string | null }
export type VideoMetadata = { duration: number | null; video_codec: string | null; audio_codec: string | null }
export type Volume = { name: string; mount_points: string[]; total_capacity: string; available_capacity: string; disk_type: DiskType; file_system: string | null; is_root_filesystem: boolean }

Submodule packages/test-files updated: 58edee8a34...146fbb543f

BIN
pnpm-lock.yaml generated
View File

Binary file not shown.