mirror of
https://github.com/booklore-app/booklore.git
synced 2025-12-23 22:28:11 -05:00
Merge pull request #1939 from booklore-app/develop
Merge develop into master for release
This commit is contained in:
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
25
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,20 +2,13 @@
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# 🐛 Bug Report for Booklore
|
||||
# 🐛 Bug Report
|
||||
|
||||
Thank you for taking the time to report this bug. Your feedback helps make Booklore better for everyone!
|
||||
|
||||
Let's squash this bug together! 🔨
|
||||
|
||||
---
|
||||
|
||||
## 📝 What happened?
|
||||
## 📝 Description
|
||||
<!-- Describe what went wrong -->
|
||||
|
||||
|
||||
@@ -28,11 +21,12 @@ ## 🔄 Steps to Reproduce
|
||||
|
||||
**Result:** <!-- What happened after these steps? -->
|
||||
|
||||
|
||||
## ✅ Expected Behavior
|
||||
<!-- Describe what should have happened instead -->
|
||||
|
||||
|
||||
## 📸 Screenshots / Error Messages
|
||||
## 📸 Screenshots / Error Messages _(Optional)_
|
||||
<!-- Share any screenshots or error messages here (just drag & drop) -->
|
||||
|
||||
|
||||
@@ -44,9 +38,10 @@ ## 💻 Environment
|
||||
- **Installation:** (e.g., Docker, Unraid, Manual)
|
||||
- **Storage Type:** (e.g., Local HDD/SSD, Synology NAS, SMB Share, NFS Mount, S3 Bucket)
|
||||
|
||||
## 📌 Additional Context
|
||||
<!-- Add any other relevant information: recent changes, specific books, configuration details, etc. -->
|
||||
|
||||
|
||||
## ✨ Possible Solution _(Optional)_
|
||||
## 💡 Possible Solution _(Optional)_
|
||||
<!-- Share any ideas on how to fix this issue -->
|
||||
|
||||
|
||||
## 📌 Additional Context _(Optional)_
|
||||
<!-- Add any other relevant information: recent changes, specific books, configuration details, etc. -->
|
||||
|
||||
9
.github/ISSUE_TEMPLATE/feature_request.md
vendored
9
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -4,14 +4,9 @@
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
# ✨ Feature Request for Booklore
|
||||
|
||||
Thank you for contributing to Booklore's development. Your suggestions help shape the future of this project.
|
||||
|
||||
---
|
||||
# ✨ Feature Request
|
||||
|
||||
## 📝 Description
|
||||
<!-- Describe the problem you're facing and the solution you're proposing -->
|
||||
@@ -36,5 +31,5 @@ ## 🎨 Technical Details _(Optional)_
|
||||
<!-- Share implementation ideas, alternative solutions, or related features -->
|
||||
|
||||
|
||||
## 📌 Additional Context
|
||||
## 📌 Additional Context _(Optional)_
|
||||
<!-- Any other information, research, or context that would be helpful -->
|
||||
|
||||
50
.github/dependabot.yml
vendored
Normal file
50
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
version: 2
|
||||
updates:
|
||||
# Backend – Gradle (Java / Spring)
|
||||
- package-ecosystem: "gradle"
|
||||
directory: "/booklore-api"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "friday"
|
||||
time: "03:00"
|
||||
timezone: "UTC"
|
||||
open-pull-requests-limit: 5
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "backend"
|
||||
commit-message:
|
||||
prefix: "chore(deps)"
|
||||
groups:
|
||||
gradle-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
# Frontend – npm (Angular)
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/booklore-ui"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "friday"
|
||||
time: "03:00"
|
||||
timezone: "UTC"
|
||||
open-pull-requests-limit: 5
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "frontend"
|
||||
commit-message:
|
||||
prefix: "chore(deps)"
|
||||
groups:
|
||||
npm-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
# GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "ci"
|
||||
commit-message:
|
||||
prefix: "chore(deps)"
|
||||
35
.github/pull_request_template.md
vendored
Normal file
35
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# 🚀 Pull Request
|
||||
|
||||
## 📝 Description
|
||||
<!-- Provide a clear and concise summary of the changes introduced in this pull request -->
|
||||
<!-- Reference related issues using "Fixes #123", "Closes #456", or "Relates to #789" -->
|
||||
|
||||
|
||||
## 🛠️ Changes Implemented
|
||||
<!-- Detail the specific modifications, additions, or removals made in this pull request -->
|
||||
-
|
||||
|
||||
|
||||
## 🧪 Testing Strategy
|
||||
<!-- Describe the testing methodology used to verify the correctness of these changes -->
|
||||
<!-- Include testing approach, scenarios covered, and edge cases considered -->
|
||||
|
||||
|
||||
## 📸 Visual Changes _(if applicable)_
|
||||
<!-- Attach screenshots or videos demonstrating UI/UX modifications -->
|
||||
|
||||
|
||||
## ⚠️ Required Pre-Submission Checklist
|
||||
<!-- ⛔ Pull requests will NOT be considered for review unless ALL required items are completed -->
|
||||
<!-- All items below are MANDATORY prerequisites for submission -->
|
||||
- [ ] Code adheres to project style guidelines and conventions
|
||||
- [ ] Branch synchronized with latest `develop` branch
|
||||
- [ ] Automated unit/integration tests added/updated to cover changes
|
||||
- [ ] All tests pass locally (`./gradlew test` for backend)
|
||||
- [ ] Manual testing completed in local development environment
|
||||
- [ ] Flyway migration versioning follows correct sequence _(if database schema modified)_
|
||||
- [ ] Documentation pull request submitted to [booklore-docs](https://github.com/booklore-app/booklore-docs) _(required for features or enhancements that introduce user-facing or visual changes)_
|
||||
|
||||
|
||||
## 💬 Additional Context _(optional)_
|
||||
<!-- Provide any supplementary information, implementation considerations, or discussion points for reviewers -->
|
||||
70
.github/scripts/analyze-changes.sh
vendored
Normal file
70
.github/scripts/analyze-changes.sh
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/bin/bash
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# Exit if any command in a pipeline fails, not just the last one.
|
||||
set -o pipefail
|
||||
|
||||
# Define the path where Flyway migration files are stored.
|
||||
MIGRATION_PATH="booklore-api/src/main/resources/db/migration"
|
||||
|
||||
# Get ALL changes: Added (A), Modified (M), Renamed (R), Copied (C), Deleted (D)
|
||||
# for SQL files in the migration path between the comparison ref and the current HEAD.
|
||||
# The output is saved to a temporary file for further processing.
|
||||
git diff --name-status --diff-filter=AMRCD $COMPARE_REF...HEAD -- "$MIGRATION_PATH/V*.sql" > /tmp/all_changes.txt
|
||||
|
||||
# The check for no changes is now handled in the workflow.
|
||||
# If this script runs, it's because changes were detected.
|
||||
|
||||
echo "📝 Migration changes detected:"
|
||||
# Display the detected changes, indented for readability.
|
||||
cat /tmp/all_changes.txt | sed 's/^/ /'
|
||||
echo ""
|
||||
|
||||
# Check for deleted files
|
||||
# Grep for lines starting with 'D' (Deleted). The '|| true' prevents the script from exiting if no matches are found.
|
||||
DELETED=$(grep "^D" /tmp/all_changes.txt || true)
|
||||
if [ -n "$DELETED" ]; then
|
||||
echo "❌ ERROR: Deleted migration files detected!"
|
||||
echo "$DELETED" | sed 's/^/ /'
|
||||
echo ""
|
||||
echo "Flyway migrations should NEVER be deleted after being applied."
|
||||
echo "If you need to revert changes, create a new migration."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for renamed files
|
||||
# Grep for lines starting with 'R' (Renamed).
|
||||
RENAMED=$(grep "^R" /tmp/all_changes.txt || true)
|
||||
if [ -n "$RENAMED" ]; then
|
||||
echo "❌ ERROR: Renamed migration files detected!"
|
||||
echo "$RENAMED" | sed 's/^/ /'
|
||||
echo ""
|
||||
echo "Flyway migrations should NEVER be renamed after being applied."
|
||||
echo "This will cause issues with migration history tracking."
|
||||
echo ""
|
||||
echo "💡 To fix: Revert the rename and create a new migration file instead."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for modified files
|
||||
# Grep for lines starting with 'M' (Modified).
|
||||
MODIFIED=$(grep "^M" /tmp/all_changes.txt || true)
|
||||
if [ -n "$MODIFIED" ]; then
|
||||
echo "❌ ERROR: Modified migration files detected!"
|
||||
echo "$MODIFIED" | sed 's/^/ /'
|
||||
echo ""
|
||||
echo "Flyway migrations should NEVER be modified after being applied."
|
||||
echo "This will cause checksum validation failures in environments where it has already been applied."
|
||||
echo ""
|
||||
echo "💡 To fix: Revert the changes and create a new migration file instead."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract ADDED files for conflict checking in a later step.
|
||||
# We grep for lines starting with 'A' (Added), then use 'cut' to get just the file path.
|
||||
# 'touch' ensures the file exists even if there are no added files.
|
||||
grep "^A" /tmp/all_changes.txt | cut -f2- > /tmp/pr_files.txt || touch /tmp/pr_files.txt
|
||||
|
||||
# Set a GitHub Actions output variable to indicate that migration changes were found.
|
||||
# This is used by the workflow to decide whether to run subsequent steps.
|
||||
echo "has_changes=true" >> $GITHUB_OUTPUT
|
||||
96
.github/scripts/check-conflicts.sh
vendored
Normal file
96
.github/scripts/check-conflicts.sh
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/bin/bash
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# Exit if any command in a pipeline fails, not just the last one.
|
||||
set -o pipefail
|
||||
|
||||
# If there are no new versions to check, exit gracefully.
|
||||
# This file is created by 'validate-versions.sh'.
|
||||
# This can happen if a PR has changes, but none are new migration files.
|
||||
if [ ! -s /tmp/versions_pr_unique.txt ]; then
|
||||
echo "ℹ️ No new migration versions to check for conflicts."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Define the path where Flyway migration files are stored.
|
||||
MIGRATION_PATH="booklore-api/src/main/resources/db/migration"
|
||||
|
||||
echo "🔍 Fetching migration files from $COMPARE_REF..."
|
||||
|
||||
# Get ALL existing migration files from the comparison ref (e.g., 'develop' or a tag).
|
||||
# 'git ls-tree' lists the contents of a tree object.
|
||||
# The output is piped to grep to filter for only Flyway SQL files.
|
||||
# '|| touch' ensures the temp file exists even if no files are found.
|
||||
git ls-tree -r --name-only $COMPARE_REF -- "$MIGRATION_PATH/" 2>/dev/null | \
|
||||
grep "V.*\.sql$" > /tmp/base_files.txt || touch /tmp/base_files.txt
|
||||
|
||||
# Handle the case where no migration files exist in the base branch.
|
||||
if [ ! -s /tmp/base_files.txt ]; then
|
||||
echo "⚠️ No migration files found in $COMPARE_REF"
|
||||
echo "This might be the first migration or the path has changed."
|
||||
echo ""
|
||||
echo "✅ Skipping version conflict check."
|
||||
|
||||
PR_COUNT=$(wc -l < /tmp/versions_pr_unique.txt)
|
||||
echo ""
|
||||
echo "📊 Migration Summary:"
|
||||
echo " - Existing migrations in $COMPARE_REF: 0"
|
||||
echo " - New migrations in this PR: $PR_COUNT"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📋 Found $(wc -l < /tmp/base_files.txt) migration files in $COMPARE_REF"
|
||||
|
||||
# Extract versions from the base files.
|
||||
# The loop reads each file path, extracts the version number from the filename,
|
||||
# and appends it to a temporary file.
|
||||
> /tmp/versions_base.txt
|
||||
while IFS= read -r file; do
|
||||
filename=$(basename "$file")
|
||||
# sed extracts the version number (e.g., 1.0.0) from a filename like 'V1.0.0__description.sql'.
|
||||
version=$(echo "$filename" | sed -n 's/^V\([0-9.]*\)__.*/\1/p')
|
||||
[ -n "$version" ] && echo "$version" >> /tmp/versions_base.txt
|
||||
done < /tmp/base_files.txt
|
||||
|
||||
# Create a file with only unique, sorted version numbers from the base.
|
||||
sort -u /tmp/versions_base.txt > /tmp/versions_base_unique.txt
|
||||
|
||||
BASE_COUNT=$(wc -l < /tmp/versions_base_unique.txt)
|
||||
echo "📊 Found $BASE_COUNT unique versions in $COMPARE_REF"
|
||||
|
||||
# Find conflicts between base versions and versions from NEW PR files.
|
||||
# 'comm -12' finds lines common to both sorted files.
|
||||
CONFLICTS=$(comm -12 /tmp/versions_base_unique.txt /tmp/versions_pr_unique.txt)
|
||||
|
||||
# If conflicts are found, report them and exit with an error.
|
||||
if [ -n "$CONFLICTS" ]; then
|
||||
echo "❌ Version conflicts detected!"
|
||||
echo ""
|
||||
echo "The following versions from your new migration files already exist in $COMPARE_REF:"
|
||||
echo "$CONFLICTS" | sed 's/^/ V/'
|
||||
echo ""
|
||||
|
||||
# Show which files have conflicting versions for easier debugging.
|
||||
echo "Conflicting files:"
|
||||
while IFS= read -r version; do
|
||||
echo " Version V$version exists in:"
|
||||
grep "V${version}__" /tmp/base_files.txt | xargs -n1 basename | sed 's/^/ BASE: /'
|
||||
# /tmp/pr_files.txt contains only added files from the PR (from analyze-changes.sh).
|
||||
grep "V${version}__" /tmp/pr_files.txt | xargs -n1 basename | sed 's/^/ PR: /'
|
||||
done <<< "$CONFLICTS"
|
||||
|
||||
echo ""
|
||||
echo "💡 To fix: Use a version number that doesn't exist in $COMPARE_REF"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ No version conflicts detected."
|
||||
|
||||
# Get the count of new migrations in the PR.
|
||||
PR_COUNT=$(wc -l < /tmp/versions_pr_unique.txt)
|
||||
|
||||
# Print a final summary.
|
||||
echo ""
|
||||
echo "📊 Migration Summary:"
|
||||
echo " - Existing migrations in $COMPARE_REF: $BASE_COUNT"
|
||||
echo " - New migrations in this PR: $PR_COUNT"
|
||||
41
.github/scripts/determine-compare-ref.sh
vendored
Normal file
41
.github/scripts/determine-compare-ref.sh
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# Exit if any command in a pipeline fails, not just the last one.
|
||||
set -o pipefail
|
||||
|
||||
# The target branch of the pull request (e.g., 'develop', 'master') is passed as the first argument.
|
||||
TARGET_BRANCH="$1"
|
||||
echo "🎯 Target branch: $TARGET_BRANCH"
|
||||
|
||||
# Handle cases where the target branch is not specified, such as a direct push to a branch.
|
||||
if [ -z "$TARGET_BRANCH" ]; then
|
||||
echo "⚠️ No target branch specified (e.g., a direct push event). Defaulting to compare with 'develop'."
|
||||
TARGET_BRANCH="develop"
|
||||
fi
|
||||
|
||||
# Logic to determine the comparison reference based on the target branch.
|
||||
if [ "$TARGET_BRANCH" = "master" ]; then
|
||||
# For PRs to 'master', we compare against the latest git tag.
|
||||
# This is common for release workflows where 'master' only contains tagged releases.
|
||||
if ! LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null); then
|
||||
echo "⚠️ No tags found in repository. Skipping conflict check."
|
||||
# Set output to signal the workflow to stop.
|
||||
echo "has_ref=false" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
echo "📌 Comparing against last tag: $LAST_TAG"
|
||||
# Set the COMPARE_REF environment variable for subsequent steps in the job.
|
||||
echo "COMPARE_REF=$LAST_TAG" >> $GITHUB_ENV
|
||||
else
|
||||
# For all other cases (PRs to 'develop', other feature branches, or direct pushes),
|
||||
# we compare against the 'develop' branch.
|
||||
echo "🔄 Comparing against head of develop branch"
|
||||
# Ensure the local 'develop' branch is up-to-date with the remote.
|
||||
git fetch origin develop:develop
|
||||
# Set the COMPARE_REF to the remote develop branch.
|
||||
echo "COMPARE_REF=origin/develop" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
# Set a GitHub Actions output variable to indicate that a valid comparison ref was found.
|
||||
echo "has_ref=true" >> $GITHUB_OUTPUT
|
||||
79
.github/scripts/validate-versions.sh
vendored
Normal file
79
.github/scripts/validate-versions.sh
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
# Exit immediately if a command exits with a non-zero status.
|
||||
set -e
|
||||
# Exit if any command in a pipeline fails, not just the last one.
|
||||
set -o pipefail
|
||||
|
||||
# Define the path where Flyway migration files are stored.
|
||||
MIGRATION_PATH="booklore-api/src/main/resources/db/migration"
|
||||
|
||||
# --- Part 1: Check for duplicate versions within the PR branch itself ---
|
||||
|
||||
# Get ALL migration files in the current HEAD of the PR branch for an internal duplicate check.
|
||||
find "$MIGRATION_PATH" -type f -name "V*.sql" > /tmp/all_pr_files.txt
|
||||
|
||||
# Check for duplicate versions within the PR branch. This prevents merging a branch
|
||||
# that contains multiple files with the same version number.
|
||||
echo "🔎 Checking for duplicate versions in the branch..."
|
||||
> /tmp/versions_all_pr.txt
|
||||
# Loop through all found migration files and extract their version numbers.
|
||||
while IFS= read -r file; do
|
||||
filename=$(basename "$file")
|
||||
# sed extracts the version number (e.g., 1.0.0) from a filename like 'V1.0.0__description.sql'.
|
||||
version=$(echo "$filename" | sed -n 's/^V\([0-9.]*\)__.*/\1/p')
|
||||
[ -n "$version" ] && echo "$version" >> /tmp/versions_all_pr.txt
|
||||
done < /tmp/all_pr_files.txt
|
||||
|
||||
# 'uniq -d' filters for lines that appear more than once in the sorted list.
|
||||
sort /tmp/versions_all_pr.txt | uniq -d > /tmp/duplicates_in_pr.txt
|
||||
|
||||
# If the duplicates file is not empty, report the error and exit.
|
||||
if [ -s /tmp/duplicates_in_pr.txt ]; then
|
||||
echo "❌ Duplicate migration versions found within the branch!"
|
||||
echo ""
|
||||
echo "The following versions are duplicated:"
|
||||
while IFS= read -r version; do
|
||||
echo " - Version V$version is used by:"
|
||||
# Show the conflicting files for easy debugging.
|
||||
grep "V${version}__" /tmp/all_pr_files.txt | xargs -n1 basename | sed 's/^/ /'
|
||||
done < /tmp/duplicates_in_pr.txt
|
||||
echo ""
|
||||
echo "💡 To fix: Ensure all migration files have a unique version number."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ No duplicate versions found within the branch."
|
||||
|
||||
# --- Part 2: Extract versions from NEWLY ADDED files for conflict checking against the base branch ---
|
||||
|
||||
# /tmp/pr_files.txt is created by analyze-changes.sh and contains only ADDED files.
|
||||
# If the file doesn't exist or is empty, there's nothing to check.
|
||||
if [ ! -f /tmp/pr_files.txt ] || [ ! -s /tmp/pr_files.txt ]; then
|
||||
echo "ℹ️ No new migration files to check for conflicts."
|
||||
# Set output to signal the workflow to skip the conflict check step.
|
||||
echo "has_versions=false" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "🔎 Extracting versions from new files..."
|
||||
> /tmp/versions_pr.txt
|
||||
# Loop through only the NEWLY ADDED files and extract their versions.
|
||||
while IFS= read -r file; do
|
||||
filename=$(basename "$file")
|
||||
version=$(echo "$filename" | sed -n 's/^V\([0-9.]*\)__.*/\1/p')
|
||||
[ -n "$version" ] && echo "$version" >> /tmp/versions_pr.txt
|
||||
done < /tmp/pr_files.txt
|
||||
|
||||
# If no valid versions were extracted from the new files, exit.
|
||||
if [ ! -s /tmp/versions_pr.txt ]; then
|
||||
echo "ℹ️ No versions found in new migration files."
|
||||
echo "has_versions=false" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create a sorted, unique list of versions from the new files.
|
||||
# This file will be used by 'check-conflicts.sh'.
|
||||
sort -u /tmp/versions_pr.txt > /tmp/versions_pr_unique.txt
|
||||
|
||||
# Set output to signal that there are new versions to check for conflicts.
|
||||
echo "has_versions=true" >> $GITHUB_OUTPUT
|
||||
224
.github/workflows/docker-build-publish.yml
vendored
224
.github/workflows/docker-build-publish.yml
vendored
@@ -2,11 +2,94 @@ name: Build, Tag, Push, and Release to GitHub Container Registry
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'develop'
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
jobs:
|
||||
check-for-migrations:
|
||||
name: Check for DB Migrations
|
||||
if: github.event_name == 'pull_request' && ((github.base_ref == 'master' && github.head_ref == 'develop') || github.base_ref == 'develop')
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
has_migrations: ${{ steps.check_migrations.outputs.has_migrations }}
|
||||
steps:
|
||||
- name: Checkout Repository for Diff
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Detect Flyway Migration Changes
|
||||
id: check_migrations
|
||||
run: |
|
||||
# Compare PR head with the target base branch
|
||||
if git diff --name-only origin/${{ github.base_ref }}...HEAD | grep -q "booklore-api/src/main/resources/db/migration/V.*.sql"; then
|
||||
echo "Migration file changes detected. Proceeding with migration preview."
|
||||
echo "has_migrations=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "No migration file changes detected. Skipping migration preview."
|
||||
echo "has_migrations=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
flyway-migration-preview:
|
||||
name: Flyway DB Migration Preview
|
||||
needs: [check-for-migrations]
|
||||
if: needs.check-for-migrations.outputs.has_migrations == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
mariadb:
|
||||
image: mariadb:10.6
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
MYSQL_DATABASE: booklore_test
|
||||
ports:
|
||||
- 3306:3306
|
||||
options: >-
|
||||
--health-cmd="mysqladmin ping --silent"
|
||||
--health-interval=5s
|
||||
--health-timeout=5s
|
||||
--health-retries=10
|
||||
|
||||
steps:
|
||||
- name: Checkout Base Branch
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: '${{ github.base_ref }}'
|
||||
|
||||
- name: Apply Migrations from Base Branch
|
||||
run: |
|
||||
echo "Applying migrations from '${{ github.base_ref }}' branch..."
|
||||
docker run --network host \
|
||||
-v ${{ github.workspace }}:/flyway/sql \
|
||||
flyway/flyway:11.19.0-alpine \
|
||||
-url=jdbc:mariadb://127.0.0.1:3306/booklore_test \
|
||||
-user=root -password=root \
|
||||
-locations=filesystem:/flyway/sql/booklore-api/src/main/resources/db/migration \
|
||||
migrate
|
||||
|
||||
- name: Checkout Pull Request Branch
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Apply Migrations from PR Branch
|
||||
run: |
|
||||
echo "Applying new migrations from PR branch..."
|
||||
docker run --network host \
|
||||
-v ${{ github.workspace }}:/flyway/sql \
|
||||
flyway/flyway:11.19.0-alpine \
|
||||
-url=jdbc:mariadb://127.0.0.1:3306/booklore_test \
|
||||
-user=root -password=root \
|
||||
-locations=filesystem:/flyway/sql/booklore-api/src/main/resources/db/migration \
|
||||
migrate
|
||||
|
||||
- name: Confirm Flyway Dry Run Success
|
||||
run: echo "✅ Flyway migration preview successful. Migrations can be applied cleanly."
|
||||
|
||||
build-and-push:
|
||||
needs: [check-for-migrations, flyway-migration-preview]
|
||||
if: always() && (needs.flyway-migration-preview.result == 'success' || needs.flyway-migration-preview.result == 'skipped')
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
@@ -18,44 +101,47 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
- name: Authenticate to Docker Hub
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
- name: Authenticate to GitHub Container Registry
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ github.token }}
|
||||
|
||||
- name: Set up QEMU for multi-arch builds
|
||||
- name: Set Up QEMU for Multi-Architecture Builds
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
- name: Set Up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Set up JDK 21
|
||||
uses: actions/setup-java@v4
|
||||
- name: Set Up JDK 21
|
||||
uses: actions/setup-java@v5
|
||||
with:
|
||||
java-version: '21'
|
||||
distribution: 'temurin'
|
||||
cache: 'gradle'
|
||||
|
||||
- name: Run Backend Tests
|
||||
- name: Execute Backend Tests
|
||||
id: backend_tests
|
||||
working-directory: ./booklore-api
|
||||
run: |
|
||||
echo "Running backend tests with testcontainers..."
|
||||
./gradlew test
|
||||
./gradlew test --no-daemon --parallel --build-cache
|
||||
continue-on-error: true
|
||||
|
||||
- name: Publish Test Results
|
||||
- name: Publish Backend Test Results
|
||||
uses: EnricoMi/publish-unit-test-result-action@v2
|
||||
if: always()
|
||||
with:
|
||||
@@ -65,8 +151,8 @@ jobs:
|
||||
report_individual_runs: true
|
||||
report_suite_logs: 'any'
|
||||
|
||||
- name: Upload Test Reports
|
||||
uses: actions/upload-artifact@v4
|
||||
- name: Upload Backend Test Reports
|
||||
uses: actions/upload-artifact@v6
|
||||
if: always()
|
||||
with:
|
||||
name: test-reports
|
||||
@@ -75,13 +161,13 @@ jobs:
|
||||
booklore-api/build/test-results/
|
||||
retention-days: 30
|
||||
|
||||
- name: Check Test Results
|
||||
- name: Validate Backend Test Results
|
||||
if: steps.backend_tests.outcome == 'failure'
|
||||
run: |
|
||||
echo "❌ Backend tests failed! Check the test results above."
|
||||
exit 1
|
||||
|
||||
- name: Get Latest Master Version
|
||||
- name: Retrieve Latest Master Version Tag
|
||||
id: get_version
|
||||
run: |
|
||||
latest_tag=$(git tag --list "v*" --sort=-v:refname | head -n 1)
|
||||
@@ -89,7 +175,7 @@ jobs:
|
||||
echo "latest_tag=$latest_tag" >> $GITHUB_ENV
|
||||
echo "Latest master tag: $latest_tag"
|
||||
|
||||
- name: Determine Version Bump (Only for Master)
|
||||
- name: Determine Version Bump (Master Only)
|
||||
if: github.ref == 'refs/heads/master'
|
||||
id: determine_bump
|
||||
env:
|
||||
@@ -107,11 +193,11 @@ jobs:
|
||||
labels=$(gh pr view "$pr_number" --json labels --jq '.labels[].name' || echo "")
|
||||
echo "PR labels: $labels"
|
||||
|
||||
if echo "$labels" | grep -q 'major'; then
|
||||
if echo "$labels" | grep -q 'bump:major'; then
|
||||
bump="major"
|
||||
elif echo "$labels" | grep -q 'minor'; then
|
||||
elif echo "$labels" | grep -q 'bump:minor'; then
|
||||
bump="minor"
|
||||
elif echo "$labels" | grep -q 'patch'; then
|
||||
elif echo "$labels" | grep -q 'bump:patch'; then
|
||||
bump="patch"
|
||||
else
|
||||
last_commit_msg=$(git log -1 --pretty=%B)
|
||||
@@ -161,28 +247,44 @@ jobs:
|
||||
echo "image_tag=$image_tag" >> $GITHUB_ENV
|
||||
echo "Image tag: $image_tag"
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
run: |
|
||||
docker buildx create --use
|
||||
docker buildx build \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--build-arg APP_VERSION=${{ env.image_tag }} \
|
||||
--build-arg APP_REVISION=${{ github.sha }} \
|
||||
--tag booklore/booklore:${{ env.image_tag }} \
|
||||
--tag ghcr.io/booklore-app/booklore:${{ env.image_tag }} \
|
||||
--push .
|
||||
- name: Build and push Docker image
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
booklore/booklore:${{ env.image_tag }}
|
||||
ghcr.io/booklore-app/booklore:${{ env.image_tag }}
|
||||
build-args: |
|
||||
APP_VERSION=${{ env.image_tag }}
|
||||
APP_REVISION=${{ github.sha }}
|
||||
cache-from: |
|
||||
type=gha
|
||||
type=registry,ref=ghcr.io/booklore-app/booklore:buildcache
|
||||
cache-to: |
|
||||
type=gha,mode=max
|
||||
type=registry,ref=ghcr.io/booklore-app/booklore:buildcache,mode=max
|
||||
|
||||
- name: Push Latest Tag (Only for Master)
|
||||
if: github.ref == 'refs/heads/master'
|
||||
run: |
|
||||
docker buildx build \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--build-arg APP_VERSION=${{ env.new_tag }} \
|
||||
--tag booklore/booklore:latest \
|
||||
--tag ghcr.io/booklore-app/booklore:latest \
|
||||
--push .
|
||||
- name: Push Latest Tag (Master Only)
|
||||
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
booklore/booklore:latest
|
||||
booklore/booklore:${{ env.new_tag }}
|
||||
ghcr.io/booklore-app/booklore:latest
|
||||
ghcr.io/booklore-app/booklore:${{ env.new_tag }}
|
||||
build-args: |
|
||||
APP_VERSION=${{ env.new_tag }}
|
||||
APP_REVISION=${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
|
||||
- name: Update Release Draft (Only for Master)
|
||||
- name: Update GitHub Release Draft (Master Only)
|
||||
if: github.ref == 'refs/heads/master'
|
||||
uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
@@ -191,47 +293,9 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
|
||||
- name: Publish Draft Release (Only for Master)
|
||||
- name: Publish GitHub Draft Release (Master Only)
|
||||
if: github.ref == 'refs/heads/master'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
gh release edit ${{ env.new_tag }} --draft=false
|
||||
|
||||
- name: Notify Discord of New Release
|
||||
if: false
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
NEW_TAG: ${{ env.new_tag }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
if [ -z "${DISCORD_WEBHOOK_URL:-}" ]; then
|
||||
echo "DISCORD_WEBHOOK_URL not set, skipping Discord notification."
|
||||
exit 0
|
||||
fi
|
||||
release_json=$(gh release view "$NEW_TAG" --json name,body,url)
|
||||
release_name=$(jq -r '.name' <<< "$release_json")
|
||||
release_body=$(jq -r '.body' <<< "$release_json")
|
||||
release_url=$(jq -r '.url' <<< "$release_json")
|
||||
clean_body=$(echo "$release_body" | tr -d '\r')
|
||||
max_length=1800
|
||||
if [ ${#clean_body} -gt $max_length ]; then
|
||||
clean_body="${clean_body:0:$((max_length-12))}… [truncated]"
|
||||
fi
|
||||
payload=$(jq -n \
|
||||
--arg title "New Release: $release_name" \
|
||||
--arg url "$release_url" \
|
||||
--arg desc "$clean_body" \
|
||||
'{
|
||||
content: null,
|
||||
embeds: [{
|
||||
title: $title,
|
||||
url: $url,
|
||||
description: $desc,
|
||||
color: 3066993
|
||||
}]
|
||||
}')
|
||||
curl -H "Content-Type: application/json" -d "$payload" "$DISCORD_WEBHOOK_URL"
|
||||
gh release edit ${{ env.new_tag }} --draft=true
|
||||
|
||||
396
CONTRIBUTING.md
396
CONTRIBUTING.md
@@ -1,16 +1,17 @@
|
||||
# Contributing to Booklore
|
||||
|
||||
🎉 Thanks for your interest in contributing to **Booklore**, a modern, self-hostable digital library system for books and comics. Whether you're fixing bugs, adding features, improving documentation, or asking questions - your contribution matters!
|
||||
🎉 **Thank you for your interest in contributing to Booklore!** Whether you're fixing bugs, adding features, improving documentation, or simply asking questions, every contribution helps make Booklore better for everyone.
|
||||
|
||||
---
|
||||
|
||||
## 📚 Overview
|
||||
## 📚 What is Booklore?
|
||||
|
||||
**Booklore** is a self-hostable platform designed to manage and read books and comics. It includes:
|
||||
**Booklore** is a modern, self-hostable digital library platform for managing and reading books and comics. It's designed with privacy, flexibility, and ease of use in mind.
|
||||
|
||||
- **Frontend**: Angular 20, TypeScript, PrimeNG 19, Tailwind CSS
|
||||
**Tech Stack:**
|
||||
- **Frontend**: Angular 20, TypeScript, PrimeNG 19
|
||||
- **Backend**: Java 21, Spring Boot 3.5
|
||||
- **Authentication**: Local JWT + optional OIDC (e.g. Authentik)
|
||||
- **Authentication**: Local JWT + optional OIDC (e.g., Authentik)
|
||||
- **Database**: MariaDB
|
||||
- **Deployment**: Docker-compatible, reverse proxy-ready
|
||||
|
||||
@@ -20,116 +21,202 @@ ## 📦 Project Structure
|
||||
|
||||
```
|
||||
booklore/
|
||||
├── booklore-ui/ # Angular frontend
|
||||
├── booklore-api/ # Spring Boot backend
|
||||
├── assets/ # Shared assets
|
||||
├── booklore-ui/ # Angular frontend application
|
||||
├── booklore-api/ # Spring Boot backend API
|
||||
├── assets/ # Shared assets (logos, icons, etc.)
|
||||
├── docker-compose.yml # Production Docker setup
|
||||
└── dev.docker-compose.yml # Development Docker setup
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
1. **Fork the repository** on GitHub
|
||||
2. **Clone your fork** locally:
|
||||
### 1. Fork and Clone
|
||||
|
||||
First, fork the repository to your GitHub account, then clone it locally:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/adityachandelgit/BookLore.git
|
||||
# Clone your fork
|
||||
git clone https://github.com/<your-username>/booklore.git
|
||||
cd booklore
|
||||
|
||||
# Add upstream remote to keep your fork in sync
|
||||
git remote add upstream https://github.com/booklore-app/booklore.git
|
||||
```
|
||||
|
||||
### 2. Keep Your Fork Updated
|
||||
|
||||
Before starting work on a new feature or fix:
|
||||
|
||||
```bash
|
||||
# Fetch latest changes from upstream
|
||||
git fetch upstream
|
||||
|
||||
# Merge upstream changes into your local main branch
|
||||
git checkout main
|
||||
git merge upstream/main
|
||||
|
||||
# Push updates to your fork
|
||||
git push origin main
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🧱 Local Development Setup
|
||||
|
||||
Booklore has a simple all-in-one Docker development stack, or you can install & run everything manually.
|
||||
Booklore offers two development approaches: an all-in-one Docker stack for quick setup, or manual installation for more control.
|
||||
|
||||
### Option 1: Docker Development Stack (Recommended for Quick Start)
|
||||
|
||||
### Development using Docker stack
|
||||
|
||||
Run `docker compose -f dev.docker-compose.yml up`
|
||||
|
||||
- Dev web server is accessible at `http://localhost:4200/`
|
||||
- Dev database is accessible at `http://localhost:3366/`
|
||||
- Remote java debugging is accessible at `http://localhost:5005/`
|
||||
|
||||
All ports are configurable using environment variables - see dev.docker-compose.yml
|
||||
|
||||
---
|
||||
|
||||
### Development on local machine
|
||||
|
||||
#### 1. Prerequisites
|
||||
|
||||
- **Java 21+**
|
||||
- **Node.js 18+**
|
||||
- **MariaDB**
|
||||
- **Docker and Docker Compose**
|
||||
|
||||
---
|
||||
|
||||
#### 2. Frontend Setup
|
||||
|
||||
To set up the Angular frontend:
|
||||
This option sets up everything with a single command:
|
||||
|
||||
```bash
|
||||
cd booklore-ui
|
||||
npm install
|
||||
ng serve
|
||||
docker compose -f dev.docker-compose.yml up
|
||||
```
|
||||
|
||||
The dev server runs at `http://localhost:4200/`.
|
||||
**What you get:**
|
||||
- ✅ Frontend dev server at `http://localhost:4200/`
|
||||
- ✅ Backend API at `http://localhost:8080/`
|
||||
- ✅ MariaDB at `localhost:3366`
|
||||
- ✅ Remote Java debugging at `localhost:5005`
|
||||
|
||||
> ⚠️ Use `--force` with `npm install` only as a last resort for dependency conflicts.
|
||||
**Note:** All ports are configurable via environment variables in `dev.docker-compose.yml`:
|
||||
- `FRONTEND_PORT` (default: 4200)
|
||||
- `BACKEND_PORT` (default: 8080)
|
||||
- `DB_PORT` (default: 3366)
|
||||
- `REMOTE_DEBUG_PORT` (default: 5005)
|
||||
|
||||
**Stopping the stack:**
|
||||
```bash
|
||||
docker compose -f dev.docker-compose.yml down
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### 3. Backend Setup
|
||||
### Option 2: Manual Local Development
|
||||
|
||||
##### a. Configure `application-dev.yml`
|
||||
For more control over your development environment, you can run each component separately.
|
||||
|
||||
Create or edit `booklore-api/src/main/resources/application-dev.yml`:
|
||||
#### Prerequisites
|
||||
|
||||
Ensure you have the following installed:
|
||||
- **Java 21+** ([Download](https://adoptium.net/))
|
||||
- **Node.js 18+** and **npm** ([Download](https://nodejs.org/))
|
||||
- **MariaDB 10.6+** ([Download](https://mariadb.org/download/))
|
||||
- **Git** ([Download](https://git-scm.com/))
|
||||
|
||||
#### Frontend Setup
|
||||
|
||||
```bash
|
||||
# Navigate to the frontend directory
|
||||
cd booklore-ui
|
||||
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Start the development server
|
||||
ng serve
|
||||
|
||||
# Or use npm script
|
||||
npm start
|
||||
```
|
||||
|
||||
The frontend will be available at `http://localhost:4200/` with hot-reload enabled.
|
||||
|
||||
**Common Issues:**
|
||||
- If you encounter dependency conflicts, try `npm install --legacy-peer-deps`
|
||||
- Use `--force` only as a last resort
|
||||
|
||||
---
|
||||
|
||||
#### Backend Setup
|
||||
|
||||
##### Step 1: Configure Application Properties
|
||||
|
||||
Create a development configuration file at `booklore-api/src/main/resources/application-dev.yml`:
|
||||
|
||||
```yaml
|
||||
app:
|
||||
path-book: '/path/to/booklore/books' # Directory for book/comic files
|
||||
path-config: '/path/to/booklore/config' # Directory for thumbnails, metadata, etc.
|
||||
# Path where books and comics are stored
|
||||
path-book: '/Users/yourname/booklore-data/books'
|
||||
|
||||
# Path for thumbnails, metadata cache, and other config files
|
||||
path-config: '/Users/yourname/booklore-data/config'
|
||||
|
||||
spring:
|
||||
datasource:
|
||||
driver-class-name: org.mariadb.jdbc.Driver
|
||||
url: jdbc:mariadb://localhost:3333/booklore?createDatabaseIfNotExist=true
|
||||
url: jdbc:mariadb://localhost:3306/booklore?createDatabaseIfNotExist=true
|
||||
username: root
|
||||
password: Password123
|
||||
password: your_secure_password
|
||||
```
|
||||
|
||||
> 🔧 Replace `/path/to/...` with actual local paths
|
||||
**Important:**
|
||||
- Replace `/Users/yourname/...` with actual paths on your system
|
||||
- Create these directories if they don't exist
|
||||
- Ensure proper read/write permissions
|
||||
|
||||
##### b. Run the Backend
|
||||
**Example paths:**
|
||||
- **macOS/Linux**: `/Users/yourname/booklore-data/books`
|
||||
- **Windows**: `C:\Users\yourname\booklore-data\books`
|
||||
|
||||
##### Step 2: Set Up the Database
|
||||
|
||||
Ensure MariaDB is running and create the database:
|
||||
|
||||
```bash
|
||||
# Connect to MariaDB
|
||||
mysql -u root -p
|
||||
|
||||
# Create database and user (optional)
|
||||
CREATE DATABASE IF NOT EXISTS booklore;
|
||||
CREATE USER 'booklore_user'@'localhost' IDENTIFIED BY 'your_secure_password';
|
||||
GRANT ALL PRIVILEGES ON booklore.* TO 'booklore_user'@'localhost';
|
||||
FLUSH PRIVILEGES;
|
||||
EXIT;
|
||||
```
|
||||
|
||||
##### Step 3: Run the Backend
|
||||
|
||||
```bash
|
||||
cd booklore-api
|
||||
./gradlew bootRun
|
||||
./gradlew bootRun --args='--spring.profiles.active=dev'
|
||||
```
|
||||
|
||||
The backend API will be available at `http://localhost:8080/`
|
||||
|
||||
**Verify it's running:**
|
||||
```bash
|
||||
curl http://localhost:8080/actuator/health
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Frontend
|
||||
Always run tests before submitting a pull request to ensure your changes don't break existing functionality.
|
||||
|
||||
Run unit tests using:
|
||||
|
||||
```bash
|
||||
cd booklore-ui
|
||||
ng test
|
||||
```
|
||||
|
||||
### Backend
|
||||
|
||||
Run backend tests using:
|
||||
### Backend Tests
|
||||
|
||||
```bash
|
||||
cd booklore-api
|
||||
|
||||
# Run all tests
|
||||
./gradlew test
|
||||
|
||||
# Run tests with detailed output
|
||||
./gradlew test --info
|
||||
|
||||
# Run a specific test class
|
||||
./gradlew test --tests "com.booklore.api.service.BookServiceTest"
|
||||
|
||||
# Generate coverage report
|
||||
./gradlew test jacocoTestReport
|
||||
```
|
||||
|
||||
**Before creating a PR, always run:**
|
||||
```bash
|
||||
./gradlew test
|
||||
```
|
||||
|
||||
@@ -137,74 +224,199 @@ ### Backend
|
||||
|
||||
## 🛠️ Contributing Guidelines
|
||||
|
||||
### 💡 Bug Reports
|
||||
### 💡 Reporting Bugs
|
||||
|
||||
- Check [existing issues](https://github.com/adityachandelgit/BookLore/issues)
|
||||
- Include reproduction steps, expected vs. actual behavior, and logs if possible
|
||||
Found a bug? Help us fix it by providing detailed information:
|
||||
|
||||
### 🌟 Feature Requests
|
||||
1. **Search existing issues** to avoid duplicates
|
||||
2. **Create a new issue** with the `bug` label
|
||||
3. **Include the following:**
|
||||
- Clear, descriptive title (e.g., "Book import fails with PDF files over 100MB")
|
||||
- Steps to reproduce the issue
|
||||
- Expected behavior vs. actual behavior
|
||||
- Screenshots or error logs if applicable
|
||||
- Your environment (OS, browser, Docker version, etc.)
|
||||
|
||||
- Clearly explain the use case and benefit
|
||||
- Label the issue with `feature`
|
||||
**Example Bug Report:**
|
||||
```markdown
|
||||
**Title:** Book metadata not updating after manual edit
|
||||
|
||||
### 🔃 Code Contributions
|
||||
**Description:**
|
||||
When I manually edit a book's metadata through the UI and click Save,
|
||||
the changes appear to save but revert after page refresh.
|
||||
|
||||
- Create a feature branch:
|
||||
**Steps to Reproduce:**
|
||||
1. Navigate to any book detail page
|
||||
2. Click "Edit Metadata"
|
||||
3. Change the title from "Old Title" to "New Title"
|
||||
4. Click "Save"
|
||||
5. Refresh the page
|
||||
|
||||
```bash
|
||||
git checkout -b feat/my-feature
|
||||
**Expected:** Title should remain "New Title"
|
||||
**Actual:** Title reverts to "Old Title"
|
||||
|
||||
**Environment:**
|
||||
- Browser: Chrome 120
|
||||
- OS: macOS 14.2
|
||||
- Booklore Version: 1.2.0
|
||||
```
|
||||
|
||||
- For bug fixes:
|
||||
---
|
||||
|
||||
### 🔃 Submitting Code Changes
|
||||
|
||||
#### Branch Naming Convention
|
||||
|
||||
Create descriptive branches that clearly indicate the purpose of your changes:
|
||||
|
||||
```bash
|
||||
git checkout -b fix/my-fix
|
||||
# For new features
|
||||
git checkout -b feat/add-dark-mode-theme
|
||||
git checkout -b feat/epub-reader-support
|
||||
|
||||
# For bug fixes
|
||||
git checkout -b fix/book-import-validation
|
||||
git checkout -b fix/memory-leak-in-scanner
|
||||
|
||||
# For documentation
|
||||
git checkout -b docs/update-installation-guide
|
||||
|
||||
# For refactoring
|
||||
git checkout -b refactor/improve-authentication-flow
|
||||
```
|
||||
|
||||
- Follow code conventions, keep PRs focused and scoped
|
||||
- Link the relevant issue in your PR
|
||||
- Test your changes
|
||||
- Target the `develop` branch when opening PRs
|
||||
#### Development Workflow
|
||||
|
||||
1. **Create a branch** from `develop` (not `main`)
|
||||
2. **Make your changes** in small, logical commits
|
||||
3. **Test thoroughly** - run both frontend and backend tests
|
||||
4. **Update documentation** if your changes affect usage
|
||||
5. **Run the linter** and fix any issues
|
||||
6. **Commit with clear messages** following Conventional Commits
|
||||
7. **Push to your fork**
|
||||
8. **Open a pull request** targeting the `develop` branch
|
||||
|
||||
#### Pull Request Checklist
|
||||
|
||||
Before submitting, ensure:
|
||||
- [ ] Code follows project conventions
|
||||
- [ ] All tests pass (`./gradlew test` for backend)
|
||||
- [ ] IntelliJ linter shows no errors
|
||||
- [ ] Changes are documented (README, inline comments)
|
||||
- [ ] PR description clearly explains what and why
|
||||
- [ ] PR is linked to a related issue (if applicable)
|
||||
- [ ] Branch is up-to-date with `develop`
|
||||
- [ ] **For big features:** Create a documentation PR at [booklore-docs](https://github.com/booklore-app/booklore-docs) with styling similar to other documentation pages
|
||||
|
||||
---
|
||||
|
||||
## 🧼 Code Style & Conventions
|
||||
|
||||
- **Angular**: Follow the [official style guide](https://angular.io/guide/styleguide)
|
||||
- **Java**: Use modern features (Java 17+), clean structure
|
||||
- **Format**: Use linters and Prettier where applicable
|
||||
- **UI**: Use Tailwind CSS and PrimeNG components consistently
|
||||
- **Java**: Use modern features (Java 21), clean structure
|
||||
- **Linter**: Use IntelliJ IDEA's built-in linter for code formatting and style checks
|
||||
- **UI**: Use SCSS and PrimeNG components consistently
|
||||
|
||||
---
|
||||
|
||||
## 📝 Commit Message Format
|
||||
|
||||
Follow [Conventional Commits](https://www.conventionalcommits.org/):
|
||||
We follow [Conventional Commits](https://www.conventionalcommits.org/) for clear, standardized commit messages.
|
||||
|
||||
Examples:
|
||||
### Format
|
||||
|
||||
- `feat: add column visibility setting to book table`
|
||||
- `fix: correct metadata locking behavior`
|
||||
- `docs: improve contributing instructions`
|
||||
```
|
||||
<type>(<scope>): <subject>
|
||||
|
||||
[optional body]
|
||||
|
||||
[optional footer]
|
||||
```
|
||||
|
||||
### Types
|
||||
|
||||
- `feat`: New feature
|
||||
- `fix`: Bug fix
|
||||
- `docs`: Documentation changes
|
||||
- `style`: Code style changes (formatting, no logic change)
|
||||
- `refactor`: Code refactoring
|
||||
- `test`: Adding or updating tests
|
||||
- `chore`: Maintenance tasks
|
||||
- `perf`: Performance improvements
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# Feature addition
|
||||
feat(reader): add keyboard navigation for page turning
|
||||
|
||||
# Bug fix
|
||||
fix(api): resolve memory leak in book scanning service
|
||||
|
||||
# Documentation
|
||||
docs(readme): add troubleshooting section for Docker setup
|
||||
|
||||
# Multiple scopes
|
||||
feat(api,ui): implement book collection management
|
||||
|
||||
# Breaking change
|
||||
feat(auth)!: migrate to OAuth 2.1
|
||||
|
||||
BREAKING CHANGE: OAuth 2.0 is no longer supported
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🙏 Code of Conduct
|
||||
|
||||
Please be respectful, inclusive, and collaborative. Harassment, abuse, or discrimination of any kind will not be tolerated.
|
||||
We're committed to providing a welcoming and inclusive environment for everyone.
|
||||
|
||||
**Our Standards:**
|
||||
- ✅ Be respectful and considerate
|
||||
- ✅ Welcome newcomers and help them learn
|
||||
- ✅ Accept constructive criticism gracefully
|
||||
- ✅ Focus on what's best for the community
|
||||
|
||||
**Unacceptable Behavior:**
|
||||
- ❌ Harassment, trolling, or discrimination
|
||||
- ❌ Personal attacks or insults
|
||||
- ❌ Publishing others' private information
|
||||
- ❌ Any conduct that would be inappropriate in a professional setting
|
||||
|
||||
**Enforcement:**
|
||||
Instances of unacceptable behavior may result in temporary or permanent ban from the project.
|
||||
|
||||
---
|
||||
|
||||
## 💬 Community & Support
|
||||
|
||||
- Discord server: https://discord.gg/Ee5hd458Uz
|
||||
**Need help or want to discuss ideas?**
|
||||
|
||||
- 💬 **Discord**: [Join our server](https://discord.gg/Ee5hd458Uz)
|
||||
- 🐛 **Issues**: [GitHub Issues](https://github.com/adityachandelgit/BookLore/issues)
|
||||
|
||||
---
|
||||
|
||||
## 📄 License
|
||||
|
||||
Booklore is open-source and licensed under the GPL-3.0 License. See [`LICENSE`](./LICENSE) for details.
|
||||
Booklore is open-source software licensed under the **GPL-3.0 License**.
|
||||
|
||||
By contributing, you agree that your contributions will be licensed under the same license. See the [`LICENSE`](./LICENSE) file for full details.
|
||||
|
||||
---
|
||||
|
||||
Happy contributing!
|
||||
## 🎯 What to Work On?
|
||||
|
||||
Not sure where to start? Check out:
|
||||
|
||||
- Issues labeled [`good first issue`](https://github.com/adityachandelgit/BookLore/labels/good%20first%20issue)
|
||||
- Issues labeled [`help wanted`](https://github.com/adityachandelgit/BookLore/labels/help%20wanted)
|
||||
- Our [project roadmap](https://github.com/adityachandelgit/BookLore/projects)
|
||||
|
||||
---
|
||||
|
||||
## 🎉 Thank You!
|
||||
|
||||
Every contribution, no matter how small, makes Booklore better. Thank you for being part of our community!
|
||||
|
||||
**Happy Contributing! 📚✨**
|
||||
|
||||
18
Dockerfile
18
Dockerfile
@@ -4,11 +4,10 @@ FROM node:22-alpine AS angular-build
|
||||
WORKDIR /angular-app
|
||||
|
||||
COPY ./booklore-ui/package.json ./booklore-ui/package-lock.json ./
|
||||
RUN npm config set registry http://registry.npmjs.org/ \
|
||||
&& npm config set fetch-retries 5 \
|
||||
&& npm config set fetch-retry-mintimeout 20000 \
|
||||
&& npm config set fetch-retry-maxtimeout 120000 \
|
||||
&& npm install --force
|
||||
RUN --mount=type=cache,target=/root/.npm \
|
||||
npm config set registry http://registry.npmjs.org/ \
|
||||
&& npm ci --force
|
||||
|
||||
COPY ./booklore-ui /angular-app/
|
||||
|
||||
RUN npm run build --configuration=production
|
||||
@@ -18,7 +17,13 @@ FROM gradle:8.14.3-jdk21-alpine AS springboot-build
|
||||
|
||||
WORKDIR /springboot-app
|
||||
|
||||
# Copy only build files first to cache dependencies
|
||||
COPY ./booklore-api/build.gradle ./booklore-api/settings.gradle /springboot-app/
|
||||
|
||||
# Download dependencies (cached layer)
|
||||
RUN --mount=type=cache,target=/home/gradle/.gradle \
|
||||
gradle dependencies --no-daemon
|
||||
|
||||
COPY ./booklore-api/src /springboot-app/src
|
||||
|
||||
# Inject version into application.yaml using yq
|
||||
@@ -26,7 +31,8 @@ ARG APP_VERSION
|
||||
RUN apk add --no-cache yq && \
|
||||
yq eval '.app.version = strenv(APP_VERSION)' -i /springboot-app/src/main/resources/application.yaml
|
||||
|
||||
RUN gradle clean build -x test
|
||||
RUN --mount=type=cache,target=/home/gradle/.gradle \
|
||||
gradle clean build -x test --no-daemon --parallel
|
||||
|
||||
# Stage 3: Final image
|
||||
FROM eclipse-temurin:21.0.9_10-jre-alpine
|
||||
|
||||
@@ -39,7 +39,7 @@ dependencies {
|
||||
|
||||
// --- Database & Migration ---
|
||||
implementation 'org.mariadb.jdbc:mariadb-java-client:3.5.6'
|
||||
implementation 'org.flywaydb:flyway-mysql:11.18.0'
|
||||
implementation 'org.flywaydb:flyway-mysql:11.19.0'
|
||||
|
||||
// --- Security & Authentication ---
|
||||
implementation 'io.jsonwebtoken:jjwt-api:0.13.0'
|
||||
@@ -73,14 +73,14 @@ dependencies {
|
||||
implementation 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.8.14'
|
||||
implementation 'org.apache.commons:commons-compress:1.28.0'
|
||||
implementation 'org.tukaani:xz:1.11' // Required by commons-compress for 7z support
|
||||
implementation 'org.apache.commons:commons-text:1.14.0'
|
||||
implementation 'org.apache.commons:commons-text:1.15.0'
|
||||
|
||||
// --- Template Engine ---
|
||||
implementation 'org.freemarker:freemarker:2.3.33'
|
||||
implementation 'org.freemarker:freemarker:2.3.34'
|
||||
|
||||
// --- Test Dependencies ---
|
||||
testImplementation 'org.springframework.boot:spring-boot-starter-test'
|
||||
testImplementation 'org.assertj:assertj-core:3.27.3'
|
||||
testImplementation 'org.assertj:assertj-core:3.27.6'
|
||||
testImplementation "org.mockito:mockito-inline:5.2.0"
|
||||
}
|
||||
|
||||
|
||||
BIN
booklore-api/gradle/wrapper/gradle-wrapper.jar
vendored
BIN
booklore-api/gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.1-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
||||
5
booklore-api/gradlew
vendored
5
booklore-api/gradlew
vendored
@@ -1,7 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Copyright © 2015 the original authors.
|
||||
# Copyright © 2015-2021 the original authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@@ -114,6 +114,7 @@ case "$( uname )" in #(
|
||||
NONSTOP* ) nonstop=true ;;
|
||||
esac
|
||||
|
||||
CLASSPATH="\\\"\\\""
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
@@ -171,6 +172,7 @@ fi
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if "$cygwin" || "$msys" ; then
|
||||
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||
|
||||
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||
|
||||
@@ -210,6 +212,7 @@ DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
-classpath "$CLASSPATH" \
|
||||
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
|
||||
"$@"
|
||||
|
||||
|
||||
3
booklore-api/gradlew.bat
vendored
3
booklore-api/gradlew.bat
vendored
@@ -70,10 +70,11 @@ goto fail
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
|
||||
@@ -2,16 +2,23 @@ package com.adityachandel.booklore.controller;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookdropFile;
|
||||
import com.adityachandel.booklore.model.dto.BookdropFileNotification;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropFinalizeRequest;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropSelectionRequest;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropFinalizeResult;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
|
||||
import com.adityachandel.booklore.service.bookdrop.BookDropService;
|
||||
import com.adityachandel.booklore.service.bookdrop.BookdropBulkEditService;
|
||||
import com.adityachandel.booklore.service.bookdrop.BookdropMonitoringService;
|
||||
import com.adityachandel.booklore.service.monitoring.MonitoringService;
|
||||
import com.adityachandel.booklore.service.bookdrop.FilenamePatternExtractor;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import jakarta.validation.Valid;
|
||||
import lombok.AllArgsConstructor;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -26,6 +33,8 @@ public class BookdropFileController {
|
||||
|
||||
private final BookDropService bookDropService;
|
||||
private final BookdropMonitoringService monitoringService;
|
||||
private final FilenamePatternExtractor filenamePatternExtractor;
|
||||
private final BookdropBulkEditService bookdropBulkEditService;
|
||||
|
||||
@Operation(summary = "Get bookdrop notification summary", description = "Retrieve a summary of bookdrop file notifications.")
|
||||
@ApiResponse(responseCode = "200", description = "Notification summary returned successfully")
|
||||
@@ -68,4 +77,22 @@ public class BookdropFileController {
|
||||
monitoringService.rescanBookdropFolder();
|
||||
return ResponseEntity.ok().build();
|
||||
}
|
||||
|
||||
@Operation(summary = "Extract metadata from filenames using pattern", description = "Parse filenames of selected files using a pattern to extract metadata fields.")
|
||||
@ApiResponse(responseCode = "200", description = "Pattern extraction completed")
|
||||
@PostMapping("/files/extract-pattern")
|
||||
public ResponseEntity<BookdropPatternExtractResult> extractFromPattern(
|
||||
@Parameter(description = "Pattern extraction request") @Valid @RequestBody BookdropPatternExtractRequest request) {
|
||||
BookdropPatternExtractResult result = filenamePatternExtractor.bulkExtract(request);
|
||||
return ResponseEntity.ok(result);
|
||||
}
|
||||
|
||||
@Operation(summary = "Bulk edit metadata for selected files", description = "Apply metadata changes to multiple selected files at once.")
|
||||
@ApiResponse(responseCode = "200", description = "Bulk edit completed")
|
||||
@PostMapping("/files/bulk-edit")
|
||||
public ResponseEntity<BookdropBulkEditResult> bulkEditMetadata(
|
||||
@Parameter(description = "Bulk edit request") @Valid @RequestBody BookdropBulkEditRequest request) {
|
||||
BookdropBulkEditResult result = bookdropBulkEditService.bulkEdit(request);
|
||||
return ResponseEntity.ok(result);
|
||||
}
|
||||
}
|
||||
@@ -156,6 +156,27 @@ public class MetadataController {
|
||||
bookMetadataService.regenerateCover(bookId);
|
||||
}
|
||||
|
||||
@Operation(summary = "Regenerate covers for selected books", description = "Regenerate covers for a list of books. Requires metadata edit permission or admin.")
|
||||
@ApiResponse(responseCode = "204", description = "Cover regeneration started successfully")
|
||||
@PostMapping("/bulk-regenerate-covers")
|
||||
@PreAuthorize("@securityUtil.canEditMetadata() or @securityUtil.isAdmin()")
|
||||
public ResponseEntity<Void> regenerateCoversForBooks(
|
||||
@Parameter(description = "List of book IDs") @Validated @RequestBody BulkBookIdsRequest request) {
|
||||
bookMetadataService.regenerateCoversForBooks(request.getBookIds());
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
|
||||
@Operation(summary = "Upload cover image for multiple books", description = "Upload a cover image to apply to multiple books. Requires metadata edit permission or admin.")
|
||||
@ApiResponse(responseCode = "204", description = "Cover upload started successfully")
|
||||
@PostMapping("/bulk-upload-cover")
|
||||
@PreAuthorize("@securityUtil.canEditMetadata() or @securityUtil.isAdmin()")
|
||||
public ResponseEntity<Void> bulkUploadCover(
|
||||
@Parameter(description = "Cover image file") @RequestParam("file") MultipartFile file,
|
||||
@Parameter(description = "Comma-separated book IDs") @RequestParam("bookIds") @jakarta.validation.constraints.NotEmpty java.util.Set<Long> bookIds) {
|
||||
bookMetadataService.updateCoverImageFromFileForBooks(bookIds, file);
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
|
||||
@Operation(summary = "Recalculate metadata match scores", description = "Recalculate match scores for all metadata. Requires admin.")
|
||||
@ApiResponse(responseCode = "204", description = "Match scores recalculated successfully")
|
||||
@PostMapping("/metadata/recalculate-match-scores")
|
||||
|
||||
@@ -107,6 +107,16 @@ public class OpdsController {
|
||||
.body(feed);
|
||||
}
|
||||
|
||||
@Operation(summary = "Get OPDS series navigation", description = "Retrieve the OPDS series navigation feed.")
|
||||
@ApiResponse(responseCode = "200", description = "Series navigation feed returned successfully")
|
||||
@GetMapping(value = "/series", produces = OPDS_CATALOG_MEDIA_TYPE)
|
||||
public ResponseEntity<String> getSeriesNavigation(@Parameter(hidden = true) HttpServletRequest request) {
|
||||
String feed = opdsFeedService.generateSeriesNavigation(request);
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.parseMediaType(OPDS_CATALOG_MEDIA_TYPE))
|
||||
.body(feed);
|
||||
}
|
||||
|
||||
@Operation(summary = "Get OPDS catalog feed", description = "Retrieve the OPDS acquisition catalog feed.")
|
||||
@ApiResponse(responseCode = "200", description = "Catalog feed returned successfully")
|
||||
@GetMapping(value = "/catalog", produces = OPDS_ACQUISITION_MEDIA_TYPE)
|
||||
|
||||
@@ -2,6 +2,7 @@ package com.adityachandel.booklore.controller;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.OpdsUserV2;
|
||||
import com.adityachandel.booklore.model.dto.request.OpdsUserV2CreateRequest;
|
||||
import com.adityachandel.booklore.model.dto.request.OpdsUserV2UpdateRequest;
|
||||
import com.adityachandel.booklore.service.opds.OpdsUserV2Service;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.Parameter;
|
||||
@@ -46,4 +47,13 @@ public class OpdsUserV2Controller {
|
||||
@Parameter(description = "ID of the OPDS user to delete") @PathVariable Long id) {
|
||||
service.deleteOpdsUser(id);
|
||||
}
|
||||
}
|
||||
|
||||
@Operation(summary = "Update OPDS user", description = "Update an OPDS user's settings by ID.")
|
||||
@ApiResponse(responseCode = "200", description = "OPDS user updated successfully")
|
||||
@PatchMapping("/{id}")
|
||||
@PreAuthorize("@securityUtil.isAdmin() or @securityUtil.canAccessOpds()")
|
||||
public OpdsUserV2 updateUser(
|
||||
@Parameter(description = "ID of the OPDS user to update") @PathVariable Long id,
|
||||
@Parameter(description = "OPDS user update request") @RequestBody OpdsUserV2UpdateRequest updateRequest) {
|
||||
return service.updateOpdsUser(id, updateRequest);
|
||||
}}
|
||||
@@ -55,7 +55,7 @@ public enum ApiError {
|
||||
SHELF_CANNOT_BE_DELETED(HttpStatus.FORBIDDEN, "'%s' shelf can't be deleted" ),
|
||||
TASK_NOT_FOUND(HttpStatus.NOT_FOUND, "Scheduled task not found: %s"),
|
||||
TASK_ALREADY_RUNNING(HttpStatus.CONFLICT, "Task is already running: %s"),
|
||||
ICON_ALREADY_EXISTS(HttpStatus.CONFLICT, "SVG icon with name '%s' already exists"),;
|
||||
ICON_ALREADY_EXISTS(HttpStatus.CONFLICT, "SVG icon with name '%s' already exists");
|
||||
|
||||
private final HttpStatus status;
|
||||
private final String message;
|
||||
|
||||
@@ -19,6 +19,7 @@ public class MetadataClearFlags {
|
||||
private boolean goodreadsId;
|
||||
private boolean comicvineId;
|
||||
private boolean hardcoverId;
|
||||
private boolean hardcoverBookId;
|
||||
private boolean googleId;
|
||||
private boolean pageCount;
|
||||
private boolean language;
|
||||
|
||||
@@ -37,6 +37,7 @@ public class BookMetadata {
|
||||
private Double goodreadsRating;
|
||||
private Integer goodreadsReviewCount;
|
||||
private String hardcoverId;
|
||||
private Integer hardcoverBookId;
|
||||
private Double hardcoverRating;
|
||||
private Integer hardcoverReviewCount;
|
||||
private String doubanId;
|
||||
@@ -66,6 +67,7 @@ public class BookMetadata {
|
||||
private Boolean goodreadsIdLocked;
|
||||
private Boolean comicvineIdLocked;
|
||||
private Boolean hardcoverIdLocked;
|
||||
private Boolean hardcoverBookIdLocked;
|
||||
private Boolean doubanIdLocked;
|
||||
private Boolean googleIdLocked;
|
||||
private Boolean pageCountLocked;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.adityachandel.booklore.model.dto;
|
||||
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import lombok.*;
|
||||
|
||||
@@ -14,4 +15,5 @@ public class OpdsUserV2 {
|
||||
private String username;
|
||||
@JsonIgnore
|
||||
private String passwordHash;
|
||||
private OpdsSortOrder sortOrder;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
package com.adityachandel.booklore.model.dto.request;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import lombok.Data;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@Data
|
||||
public class BookdropBulkEditRequest {
|
||||
@NotNull
|
||||
private BookMetadata fields;
|
||||
@NotNull
|
||||
private Set<String> enabledFields;
|
||||
private boolean mergeArrays;
|
||||
private boolean selectAll;
|
||||
private List<Long> excludedIds;
|
||||
private List<Long> selectedIds;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
package com.adityachandel.booklore.model.dto.request;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
public class BookdropPatternExtractRequest {
|
||||
@NotBlank
|
||||
private String pattern;
|
||||
private Boolean selectAll;
|
||||
private List<Long> excludedIds;
|
||||
private List<Long> selectedIds;
|
||||
private Boolean preview;
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package com.adityachandel.booklore.model.dto.request;
|
||||
|
||||
import jakarta.validation.constraints.NotEmpty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
@Data
|
||||
public class BulkBookIdsRequest {
|
||||
@NotEmpty(message = "At least one book ID is required")
|
||||
private Set<Long> bookIds;
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
package com.adityachandel.booklore.model.dto.request;
|
||||
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
public class OpdsUserV2CreateRequest {
|
||||
private String username;
|
||||
private String password;
|
||||
private OpdsSortOrder sortOrder;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
package com.adityachandel.booklore.model.dto.request;
|
||||
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
public record OpdsUserV2UpdateRequest(
|
||||
@NotNull(message = "Sort order is required")
|
||||
OpdsSortOrder sortOrder
|
||||
) {
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
package com.adityachandel.booklore.model.dto.response;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
public class BookdropBulkEditResult {
|
||||
private int totalFiles;
|
||||
private int successfullyUpdated;
|
||||
private int failed;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.adityachandel.booklore.model.dto.response;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
public class BookdropPatternExtractResult {
|
||||
private int totalFiles;
|
||||
private int successfullyExtracted;
|
||||
private int failed;
|
||||
private List<FileExtractionResult> results;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
public static class FileExtractionResult {
|
||||
private Long fileId;
|
||||
private String fileName;
|
||||
private boolean success;
|
||||
private BookMetadata extractedMetadata;
|
||||
private String errorMessage;
|
||||
}
|
||||
}
|
||||
@@ -16,6 +16,7 @@ public enum AppSettingKey {
|
||||
METADATA_PERSISTENCE_SETTINGS("metadata_persistence_settings", true, false),
|
||||
METADATA_PUBLIC_REVIEWS_SETTINGS("metadata_public_reviews_settings", true, false),
|
||||
KOBO_SETTINGS("kobo_settings", true, false),
|
||||
COVER_CROPPING_SETTINGS("cover_cropping_settings", true, false),
|
||||
|
||||
AUTO_BOOK_SEARCH("auto_book_search", false, false),
|
||||
COVER_IMAGE_RESOLUTION("cover_image_resolution", false, false),
|
||||
|
||||
@@ -33,4 +33,5 @@ public class AppSettings {
|
||||
private MetadataPersistenceSettings metadataPersistenceSettings;
|
||||
private MetadataPublicReviewsSettings metadataPublicReviewsSettings;
|
||||
private KoboSettings koboSettings;
|
||||
private CoverCroppingSettings coverCroppingSettings;
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.adityachandel.booklore.model.dto.settings;
|
||||
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
|
||||
@Builder
|
||||
@Data
|
||||
public class CoverCroppingSettings {
|
||||
private boolean verticalCroppingEnabled;
|
||||
private boolean horizontalCroppingEnabled;
|
||||
private double aspectRatioThreshold;
|
||||
private boolean smartCroppingEnabled;
|
||||
}
|
||||
@@ -11,4 +11,5 @@ public class KoboSettings {
|
||||
private boolean convertCbxToEpub;
|
||||
private int conversionLimitInMbForCbx;
|
||||
private boolean forceEnableHyphenation;
|
||||
private int conversionImageCompressionPercentage;
|
||||
}
|
||||
|
||||
@@ -97,6 +97,9 @@ public class BookMetadataEntity {
|
||||
@Column(name = "hardcover_id", length = 100)
|
||||
private String hardcoverId;
|
||||
|
||||
@Column(name = "hardcover_book_id")
|
||||
private Integer hardcoverBookId;
|
||||
|
||||
@Column(name = "google_id", length = 100)
|
||||
private String googleId;
|
||||
|
||||
@@ -208,6 +211,10 @@ public class BookMetadataEntity {
|
||||
@Builder.Default
|
||||
private Boolean hardcoverIdLocked = Boolean.FALSE;
|
||||
|
||||
@Column(name = "hardcover_book_id_locked")
|
||||
@Builder.Default
|
||||
private Boolean hardcoverBookIdLocked = Boolean.FALSE;
|
||||
|
||||
@Column(name = "google_id_locked")
|
||||
@Builder.Default
|
||||
private Boolean googleIdLocked = Boolean.FALSE;
|
||||
@@ -309,6 +316,7 @@ public class BookMetadataEntity {
|
||||
this.comicvineIdLocked = lock;
|
||||
this.goodreadsIdLocked = lock;
|
||||
this.hardcoverIdLocked = lock;
|
||||
this.hardcoverBookIdLocked = lock;
|
||||
this.googleIdLocked = lock;
|
||||
this.reviewsLocked = lock;
|
||||
}
|
||||
@@ -341,6 +349,7 @@ public class BookMetadataEntity {
|
||||
&& Boolean.TRUE.equals(this.goodreadsIdLocked)
|
||||
&& Boolean.TRUE.equals(this.comicvineIdLocked)
|
||||
&& Boolean.TRUE.equals(this.hardcoverIdLocked)
|
||||
&& Boolean.TRUE.equals(this.hardcoverBookIdLocked)
|
||||
&& Boolean.TRUE.equals(this.googleIdLocked)
|
||||
&& Boolean.TRUE.equals(this.reviewsLocked)
|
||||
;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.adityachandel.booklore.model.entity;
|
||||
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import jakarta.persistence.*;
|
||||
import lombok.*;
|
||||
|
||||
@@ -28,6 +29,11 @@ public class OpdsUserV2Entity {
|
||||
@Column(name = "password_hash", nullable = false)
|
||||
private String passwordHash;
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
@Column(name = "sort_order", length = 20)
|
||||
@Builder.Default
|
||||
private OpdsSortOrder sortOrder = OpdsSortOrder.RECENT;
|
||||
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private Instant createdAt;
|
||||
|
||||
|
||||
@@ -13,7 +13,8 @@ public enum BookFileExtension {
|
||||
EPUB("epub", BookFileType.EPUB),
|
||||
CBZ("cbz", BookFileType.CBX),
|
||||
CBR("cbr", BookFileType.CBX),
|
||||
CB7("cb7", BookFileType.CBX);
|
||||
CB7("cb7", BookFileType.CBX),
|
||||
FB2("fb2", BookFileType.FB2);
|
||||
|
||||
private final String extension;
|
||||
private final BookFileType type;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
package com.adityachandel.booklore.model.enums;
|
||||
|
||||
public enum BookFileType {
|
||||
PDF, EPUB, CBX
|
||||
PDF, EPUB, CBX, FB2
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.adityachandel.booklore.model.enums;
|
||||
|
||||
public enum OpdsSortOrder {
|
||||
RECENT,
|
||||
TITLE_ASC,
|
||||
TITLE_DESC,
|
||||
AUTHOR_ASC,
|
||||
AUTHOR_DESC,
|
||||
SERIES_ASC,
|
||||
SERIES_DESC,
|
||||
RATING_ASC,
|
||||
RATING_DESC
|
||||
}
|
||||
@@ -2,9 +2,12 @@ package com.adityachandel.booklore.repository;
|
||||
|
||||
import com.adityachandel.booklore.model.entity.*;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
|
||||
public interface BookMetadataRepository extends JpaRepository<BookMetadataEntity, Long> {
|
||||
@@ -12,6 +15,11 @@ public interface BookMetadataRepository extends JpaRepository<BookMetadataEntity
|
||||
@Query("SELECT m FROM BookMetadataEntity m WHERE m.bookId IN :bookIds")
|
||||
List<BookMetadataEntity> getMetadataForBookIds(@Param("bookIds") List<Long> bookIds);
|
||||
|
||||
@Modifying
|
||||
@Transactional
|
||||
@Query("UPDATE BookMetadataEntity m SET m.coverUpdatedOn = :timestamp WHERE m.bookId = :bookId")
|
||||
void updateCoverTimestamp(@Param("bookId") Long bookId, @Param("timestamp") Instant timestamp);
|
||||
|
||||
List<BookMetadataEntity> findAllByAuthorsContaining(AuthorEntity author);
|
||||
|
||||
List<BookMetadataEntity> findAllByCategoriesContaining(CategoryEntity category);
|
||||
|
||||
@@ -162,4 +162,52 @@ public interface BookOpdsRepository extends JpaRepository<BookEntity, Long>, Jpa
|
||||
ORDER BY b.addedOn DESC
|
||||
""")
|
||||
Page<Long> findBookIdsByAuthorNameAndLibraryIds(@Param("authorName") String authorName, @Param("libraryIds") Collection<Long> libraryIds, Pageable pageable);
|
||||
|
||||
// ============================================
|
||||
// SERIES - Distinct Series List
|
||||
// ============================================
|
||||
|
||||
@Query("""
|
||||
SELECT DISTINCT m.seriesName FROM BookMetadataEntity m
|
||||
JOIN m.book b
|
||||
WHERE (b.deleted IS NULL OR b.deleted = false)
|
||||
AND m.seriesName IS NOT NULL
|
||||
AND m.seriesName != ''
|
||||
ORDER BY m.seriesName
|
||||
""")
|
||||
List<String> findDistinctSeries();
|
||||
|
||||
@Query("""
|
||||
SELECT DISTINCT m.seriesName FROM BookMetadataEntity m
|
||||
JOIN m.book b
|
||||
WHERE (b.deleted IS NULL OR b.deleted = false)
|
||||
AND b.library.id IN :libraryIds
|
||||
AND m.seriesName IS NOT NULL
|
||||
AND m.seriesName != ''
|
||||
ORDER BY m.seriesName
|
||||
""")
|
||||
List<String> findDistinctSeriesByLibraryIds(@Param("libraryIds") Collection<Long> libraryIds);
|
||||
|
||||
// ============================================
|
||||
// BOOKS BY SERIES - Two Query Pattern (sorted by series number)
|
||||
// ============================================
|
||||
|
||||
@Query("""
|
||||
SELECT DISTINCT b.id FROM BookEntity b
|
||||
JOIN b.metadata m
|
||||
WHERE m.seriesName = :seriesName
|
||||
AND (b.deleted IS NULL OR b.deleted = false)
|
||||
ORDER BY COALESCE(m.seriesNumber, 999999), b.addedOn DESC
|
||||
""")
|
||||
Page<Long> findBookIdsBySeriesName(@Param("seriesName") String seriesName, Pageable pageable);
|
||||
|
||||
@Query("""
|
||||
SELECT DISTINCT b.id FROM BookEntity b
|
||||
JOIN b.metadata m
|
||||
WHERE m.seriesName = :seriesName
|
||||
AND b.library.id IN :libraryIds
|
||||
AND (b.deleted IS NULL OR b.deleted = false)
|
||||
ORDER BY COALESCE(m.seriesNumber, 999999), b.addedOn DESC
|
||||
""")
|
||||
Page<Long> findBookIdsBySeriesNameAndLibraryIds(@Param("seriesName") String seriesName, @Param("libraryIds") Collection<Long> libraryIds, Pageable pageable);
|
||||
}
|
||||
@@ -95,6 +95,7 @@ public class AppSettingService {
|
||||
builder.metadataPersistenceSettings(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.METADATA_PERSISTENCE_SETTINGS, MetadataPersistenceSettings.class, settingPersistenceHelper.getDefaultMetadataPersistenceSettings(), true));
|
||||
builder.metadataPublicReviewsSettings(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.METADATA_PUBLIC_REVIEWS_SETTINGS, MetadataPublicReviewsSettings.class, settingPersistenceHelper.getDefaultMetadataPublicReviewsSettings(), true));
|
||||
builder.koboSettings(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.KOBO_SETTINGS, KoboSettings.class, settingPersistenceHelper.getDefaultKoboSettings(), true));
|
||||
builder.coverCroppingSettings(settingPersistenceHelper.getJsonSetting(settingsMap, AppSettingKey.COVER_CROPPING_SETTINGS, CoverCroppingSettings.class, settingPersistenceHelper.getDefaultCoverCroppingSettings(), true));
|
||||
|
||||
builder.autoBookSearch(Boolean.parseBoolean(settingPersistenceHelper.getOrCreateSetting(AppSettingKey.AUTO_BOOK_SEARCH, "true")));
|
||||
builder.uploadPattern(settingPersistenceHelper.getOrCreateSetting(AppSettingKey.UPLOAD_FILE_PATTERN, "{authors}/<{series}/><{seriesIndex}. >{title}< - {authors}>< ({year})>"));
|
||||
|
||||
@@ -255,7 +255,17 @@ public class SettingPersistenceHelper {
|
||||
.conversionLimitInMb(100)
|
||||
.convertCbxToEpub(false)
|
||||
.conversionLimitInMbForCbx(100)
|
||||
.conversionImageCompressionPercentage(85)
|
||||
.forceEnableHyphenation(false)
|
||||
.build();
|
||||
}
|
||||
|
||||
public CoverCroppingSettings getDefaultCoverCroppingSettings() {
|
||||
return CoverCroppingSettings.builder()
|
||||
.verticalCroppingEnabled(false)
|
||||
.horizontalCroppingEnabled(false)
|
||||
.aspectRatioThreshold(2.5)
|
||||
.smartCroppingEnabled(false)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,6 +96,7 @@ public class BookDownloadService {
|
||||
boolean convertEpubToKepub = isEpub && koboSettings.isConvertToKepub() && bookEntity.getFileSizeKb() <= (long) koboSettings.getConversionLimitInMb() * 1024;
|
||||
boolean convertCbxToEpub = isCbx && koboSettings.isConvertCbxToEpub() && bookEntity.getFileSizeKb() <= (long) koboSettings.getConversionLimitInMbForCbx() * 1024;
|
||||
|
||||
int compressionPercentage = koboSettings.getConversionImageCompressionPercentage();
|
||||
Path tempDir = null;
|
||||
try {
|
||||
File inputFile = new File(FileUtils.getBookFullPath(bookEntity));
|
||||
@@ -106,7 +107,7 @@ public class BookDownloadService {
|
||||
}
|
||||
|
||||
if (convertCbxToEpub) {
|
||||
fileToSend = cbxConversionService.convertCbxToEpub(inputFile, tempDir.toFile(), bookEntity);
|
||||
fileToSend = cbxConversionService.convertCbxToEpub(inputFile, tempDir.toFile(), bookEntity,compressionPercentage);
|
||||
}
|
||||
|
||||
if (convertEpubToKepub) {
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
package com.adityachandel.booklore.service.bookdrop;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
|
||||
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
|
||||
import com.adityachandel.booklore.repository.BookdropFileRepository;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class BookdropBulkEditService {
|
||||
|
||||
private static final int BATCH_SIZE = 500;
|
||||
|
||||
private final BookdropFileRepository bookdropFileRepository;
|
||||
private final BookdropMetadataHelper metadataHelper;
|
||||
|
||||
@Transactional
|
||||
public BookdropBulkEditResult bulkEdit(BookdropBulkEditRequest request) {
|
||||
List<Long> fileIds = metadataHelper.resolveFileIds(
|
||||
request.isSelectAll(),
|
||||
request.getExcludedIds(),
|
||||
request.getSelectedIds()
|
||||
);
|
||||
|
||||
return processBulkEditInBatches(fileIds, request);
|
||||
}
|
||||
|
||||
private BookdropBulkEditResult processBulkEditInBatches(List<Long> fileIds, BookdropBulkEditRequest request) {
|
||||
int totalSuccessCount = 0;
|
||||
int totalFailedCount = 0;
|
||||
int totalFiles = fileIds.size();
|
||||
|
||||
for (int batchStart = 0; batchStart < fileIds.size(); batchStart += BATCH_SIZE) {
|
||||
int batchEnd = Math.min(batchStart + BATCH_SIZE, fileIds.size());
|
||||
|
||||
BatchEditResult batchResult = processSingleBatch(fileIds, batchStart, batchEnd, request);
|
||||
|
||||
totalSuccessCount += batchResult.successCount();
|
||||
totalFailedCount += batchResult.failureCount();
|
||||
|
||||
log.debug("Processed batch {}-{} of {}: {} successful, {} failed",
|
||||
batchStart, batchEnd, totalFiles, batchResult.successCount(), batchResult.failureCount());
|
||||
}
|
||||
|
||||
return BookdropBulkEditResult.builder()
|
||||
.totalFiles(totalFiles)
|
||||
.successfullyUpdated(totalSuccessCount)
|
||||
.failed(totalFailedCount)
|
||||
.build();
|
||||
}
|
||||
|
||||
private BatchEditResult processSingleBatch(List<Long> allFileIds, int batchStart, int batchEnd,
|
||||
BookdropBulkEditRequest request) {
|
||||
List<Long> batchIds = allFileIds.subList(batchStart, batchEnd);
|
||||
List<BookdropFileEntity> batchFiles = bookdropFileRepository.findAllById(batchIds);
|
||||
|
||||
int successCount = 0;
|
||||
int failureCount = 0;
|
||||
Set<Long> failedFileIds = new HashSet<>();
|
||||
|
||||
for (BookdropFileEntity file : batchFiles) {
|
||||
try {
|
||||
updateFileMetadata(file, request);
|
||||
successCount++;
|
||||
} catch (RuntimeException e) {
|
||||
log.error("Failed to update metadata for file {} ({}): {}",
|
||||
file.getId(), file.getFileName(), e.getMessage(), e);
|
||||
failureCount++;
|
||||
failedFileIds.add(file.getId());
|
||||
}
|
||||
}
|
||||
|
||||
List<BookdropFileEntity> filesToSave = batchFiles.stream()
|
||||
.filter(file -> !failedFileIds.contains(file.getId()))
|
||||
.toList();
|
||||
|
||||
if (!filesToSave.isEmpty()) {
|
||||
bookdropFileRepository.saveAll(filesToSave);
|
||||
}
|
||||
|
||||
return new BatchEditResult(successCount, failureCount);
|
||||
}
|
||||
|
||||
private void updateFileMetadata(BookdropFileEntity file, BookdropBulkEditRequest request) {
|
||||
BookMetadata currentMetadata = metadataHelper.getCurrentMetadata(file);
|
||||
BookMetadata updates = request.getFields();
|
||||
Set<String> enabledFields = request.getEnabledFields();
|
||||
boolean mergeArrays = request.isMergeArrays();
|
||||
|
||||
if (enabledFields.contains("seriesName") && updates.getSeriesName() != null) {
|
||||
currentMetadata.setSeriesName(updates.getSeriesName());
|
||||
}
|
||||
if (enabledFields.contains("seriesTotal") && updates.getSeriesTotal() != null) {
|
||||
currentMetadata.setSeriesTotal(updates.getSeriesTotal());
|
||||
}
|
||||
if (enabledFields.contains("publisher") && updates.getPublisher() != null) {
|
||||
currentMetadata.setPublisher(updates.getPublisher());
|
||||
}
|
||||
if (enabledFields.contains("language") && updates.getLanguage() != null) {
|
||||
currentMetadata.setLanguage(updates.getLanguage());
|
||||
}
|
||||
|
||||
updateArrayField("authors", enabledFields, currentMetadata.getAuthors(), updates.getAuthors(),
|
||||
currentMetadata::setAuthors, mergeArrays);
|
||||
updateArrayField("categories", enabledFields, currentMetadata.getCategories(), updates.getCategories(),
|
||||
currentMetadata::setCategories, mergeArrays);
|
||||
updateArrayField("moods", enabledFields, currentMetadata.getMoods(), updates.getMoods(),
|
||||
currentMetadata::setMoods, mergeArrays);
|
||||
updateArrayField("tags", enabledFields, currentMetadata.getTags(), updates.getTags(),
|
||||
currentMetadata::setTags, mergeArrays);
|
||||
|
||||
metadataHelper.updateFetchedMetadata(file, currentMetadata);
|
||||
}
|
||||
|
||||
private void updateArrayField(String fieldName, Set<String> enabledFields,
|
||||
Set<String> currentValue, Set<String> newValue,
|
||||
java.util.function.Consumer<Set<String>> setter, boolean mergeArrays) {
|
||||
if (enabledFields.contains(fieldName) && newValue != null) {
|
||||
if (mergeArrays && currentValue != null) {
|
||||
Set<String> merged = new LinkedHashSet<>(currentValue);
|
||||
merged.addAll(newValue);
|
||||
setter.accept(merged);
|
||||
} else {
|
||||
setter.accept(newValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private record BatchEditResult(int successCount, int failureCount) {}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
package com.adityachandel.booklore.service.bookdrop;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
|
||||
import com.adityachandel.booklore.repository.BookdropFileRepository;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
@RequiredArgsConstructor
|
||||
public class BookdropMetadataHelper {
|
||||
|
||||
private final BookdropFileRepository bookdropFileRepository;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
public List<Long> resolveFileIds(boolean selectAll, List<Long> excludedIds, List<Long> selectedIds) {
|
||||
if (selectAll) {
|
||||
List<Long> excluded = excludedIds != null ? excludedIds : Collections.emptyList();
|
||||
if (excluded.isEmpty()) {
|
||||
return bookdropFileRepository.findAllIds();
|
||||
} else {
|
||||
return bookdropFileRepository.findAllExcludingIdsFlat(excluded);
|
||||
}
|
||||
}
|
||||
return selectedIds != null ? selectedIds : Collections.emptyList();
|
||||
}
|
||||
|
||||
public BookMetadata getCurrentMetadata(BookdropFileEntity file) {
|
||||
try {
|
||||
String fetchedMetadataJson = file.getFetchedMetadata();
|
||||
if (fetchedMetadataJson != null && !fetchedMetadataJson.isBlank()) {
|
||||
return objectMapper.readValue(fetchedMetadataJson, BookMetadata.class);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error parsing existing metadata for file {}: {}", file.getId(), e.getMessage());
|
||||
}
|
||||
return new BookMetadata();
|
||||
}
|
||||
|
||||
public void updateFetchedMetadata(BookdropFileEntity file, BookMetadata metadata) {
|
||||
try {
|
||||
String updatedMetadataJson = objectMapper.writeValueAsString(metadata);
|
||||
file.setFetchedMetadata(updatedMetadataJson);
|
||||
} catch (Exception e) {
|
||||
log.error("Error serializing metadata for file {}: {}", file.getId(), e.getMessage());
|
||||
throw new RuntimeException("Failed to update metadata", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void mergeMetadata(BookMetadata target, BookMetadata source) {
|
||||
if (source.getSeriesName() != null) target.setSeriesName(source.getSeriesName());
|
||||
if (source.getTitle() != null) target.setTitle(source.getTitle());
|
||||
if (source.getSubtitle() != null) target.setSubtitle(source.getSubtitle());
|
||||
if (source.getAuthors() != null && !source.getAuthors().isEmpty()) target.setAuthors(source.getAuthors());
|
||||
if (source.getSeriesNumber() != null) target.setSeriesNumber(source.getSeriesNumber());
|
||||
if (source.getPublishedDate() != null) target.setPublishedDate(source.getPublishedDate());
|
||||
if (source.getPublisher() != null) target.setPublisher(source.getPublisher());
|
||||
if (source.getLanguage() != null) target.setLanguage(source.getLanguage());
|
||||
if (source.getSeriesTotal() != null) target.setSeriesTotal(source.getSeriesTotal());
|
||||
if (source.getIsbn10() != null) target.setIsbn10(source.getIsbn10());
|
||||
if (source.getIsbn13() != null) target.setIsbn13(source.getIsbn13());
|
||||
if (source.getAsin() != null) target.setAsin(source.getAsin());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,630 @@
|
||||
package com.adityachandel.booklore.service.bookdrop;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
|
||||
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
|
||||
import com.adityachandel.booklore.repository.BookdropFileRepository;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import java.time.LocalDate;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.PatternSyntaxException;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class FilenamePatternExtractor {
|
||||
|
||||
private final BookdropFileRepository bookdropFileRepository;
|
||||
private final BookdropMetadataHelper metadataHelper;
|
||||
private final ExecutorService regexExecutor = Executors.newCachedThreadPool(runnable -> {
|
||||
Thread thread = new Thread(runnable);
|
||||
thread.setDaemon(true);
|
||||
return thread;
|
||||
});
|
||||
|
||||
private static final int PREVIEW_FILE_LIMIT = 5;
|
||||
private static final long REGEX_TIMEOUT_SECONDS = 5;
|
||||
private static final int TWO_DIGIT_YEAR_CUTOFF = 50;
|
||||
private static final int TWO_DIGIT_YEAR_CENTURY_BASE = 1900;
|
||||
private static final int FOUR_DIGIT_YEAR_LENGTH = 4;
|
||||
private static final int TWO_DIGIT_YEAR_LENGTH = 2;
|
||||
private static final int COMPACT_DATE_LENGTH = 8;
|
||||
|
||||
private static final Map<String, PlaceholderConfig> PLACEHOLDER_CONFIGS = Map.ofEntries(
|
||||
Map.entry("SeriesName", new PlaceholderConfig("(.+?)", "seriesName")),
|
||||
Map.entry("Title", new PlaceholderConfig("(.+?)", "title")),
|
||||
Map.entry("Subtitle", new PlaceholderConfig("(.+?)", "subtitle")),
|
||||
Map.entry("Authors", new PlaceholderConfig("(.+?)", "authors")),
|
||||
Map.entry("SeriesNumber", new PlaceholderConfig("(\\d+(?:\\.\\d+)?)", "seriesNumber")),
|
||||
Map.entry("Published", new PlaceholderConfig("(.+?)", "publishedDate")),
|
||||
Map.entry("Publisher", new PlaceholderConfig("(.+?)", "publisher")),
|
||||
Map.entry("Language", new PlaceholderConfig("([a-zA-Z]+)", "language")),
|
||||
Map.entry("SeriesTotal", new PlaceholderConfig("(\\d+)", "seriesTotal")),
|
||||
Map.entry("ISBN10", new PlaceholderConfig("(\\d{9}[0-9Xx])", "isbn10")),
|
||||
Map.entry("ISBN13", new PlaceholderConfig("([0-9]{13})", "isbn13")),
|
||||
Map.entry("ASIN", new PlaceholderConfig("(B[A-Za-z0-9]{9}|\\d{9}[0-9Xx])", "asin"))
|
||||
);
|
||||
|
||||
private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\{(\\w+)(?::(.*?))?}|\\*");
|
||||
|
||||
private static final Pattern FOUR_DIGIT_YEAR_PATTERN = Pattern.compile("\\d{4}");
|
||||
private static final Pattern TWO_DIGIT_YEAR_PATTERN = Pattern.compile("\\d{2}");
|
||||
private static final Pattern COMPACT_DATE_PATTERN = Pattern.compile("\\d{8}");
|
||||
private static final Pattern FLEXIBLE_DATE_PATTERN = Pattern.compile("(\\d{1,4})([^\\d])(\\d{1,2})\\2(\\d{1,4})");
|
||||
|
||||
@Transactional
|
||||
public BookdropPatternExtractResult bulkExtract(BookdropPatternExtractRequest request) {
|
||||
List<Long> fileIds = metadataHelper.resolveFileIds(
|
||||
Boolean.TRUE.equals(request.getSelectAll()),
|
||||
request.getExcludedIds(),
|
||||
request.getSelectedIds()
|
||||
);
|
||||
|
||||
boolean isPreview = Boolean.TRUE.equals(request.getPreview());
|
||||
ParsedPattern cachedPattern = parsePattern(request.getPattern());
|
||||
|
||||
if (cachedPattern == null) {
|
||||
log.error("Failed to parse pattern: '{}'", request.getPattern());
|
||||
return buildEmptyResult(fileIds.size());
|
||||
}
|
||||
|
||||
return isPreview
|
||||
? processPreviewExtraction(fileIds, cachedPattern)
|
||||
: processFullExtractionInBatches(fileIds, cachedPattern);
|
||||
}
|
||||
|
||||
private BookdropPatternExtractResult processPreviewExtraction(List<Long> fileIds, ParsedPattern pattern) {
|
||||
List<Long> limitedFileIds = fileIds.size() > PREVIEW_FILE_LIMIT
|
||||
? fileIds.subList(0, PREVIEW_FILE_LIMIT)
|
||||
: fileIds;
|
||||
|
||||
List<BookdropFileEntity> previewFiles = bookdropFileRepository.findAllById(limitedFileIds);
|
||||
List<BookdropPatternExtractResult.FileExtractionResult> results = new ArrayList<>();
|
||||
int successCount = 0;
|
||||
|
||||
for (BookdropFileEntity file : previewFiles) {
|
||||
BookdropPatternExtractResult.FileExtractionResult result = extractFromFile(file, pattern);
|
||||
results.add(result);
|
||||
if (result.isSuccess()) {
|
||||
successCount++;
|
||||
}
|
||||
}
|
||||
|
||||
int failureCount = previewFiles.size() - successCount;
|
||||
|
||||
return BookdropPatternExtractResult.builder()
|
||||
.totalFiles(fileIds.size())
|
||||
.successfullyExtracted(successCount)
|
||||
.failed(failureCount)
|
||||
.results(results)
|
||||
.build();
|
||||
}
|
||||
|
||||
private BookdropPatternExtractResult processFullExtractionInBatches(List<Long> fileIds, ParsedPattern pattern) {
|
||||
final int BATCH_SIZE = 500;
|
||||
List<BookdropPatternExtractResult.FileExtractionResult> allResults = new ArrayList<>();
|
||||
int totalSuccessCount = 0;
|
||||
int totalFailureCount = 0;
|
||||
int totalFiles = fileIds.size();
|
||||
|
||||
for (int batchStart = 0; batchStart < fileIds.size(); batchStart += BATCH_SIZE) {
|
||||
int batchEnd = Math.min(batchStart + BATCH_SIZE, fileIds.size());
|
||||
|
||||
BatchExtractionResult batchResult = processSingleExtractionBatch(fileIds, batchStart, batchEnd, pattern);
|
||||
|
||||
allResults.addAll(batchResult.results());
|
||||
totalSuccessCount += batchResult.successCount();
|
||||
totalFailureCount += batchResult.failureCount();
|
||||
|
||||
log.debug("Processed pattern extraction batch {}-{} of {}: {} successful, {} failed",
|
||||
batchStart, batchEnd, totalFiles, batchResult.successCount(), batchResult.failureCount());
|
||||
}
|
||||
|
||||
return BookdropPatternExtractResult.builder()
|
||||
.totalFiles(totalFiles)
|
||||
.successfullyExtracted(totalSuccessCount)
|
||||
.failed(totalFailureCount)
|
||||
.results(allResults)
|
||||
.build();
|
||||
}
|
||||
|
||||
private BatchExtractionResult processSingleExtractionBatch(List<Long> allFileIds, int batchStart,
|
||||
int batchEnd, ParsedPattern pattern) {
|
||||
List<Long> batchIds = allFileIds.subList(batchStart, batchEnd);
|
||||
List<BookdropFileEntity> batchFiles = bookdropFileRepository.findAllById(batchIds);
|
||||
List<BookdropPatternExtractResult.FileExtractionResult> batchResults = new ArrayList<>();
|
||||
|
||||
for (BookdropFileEntity file : batchFiles) {
|
||||
BookdropPatternExtractResult.FileExtractionResult result = extractFromFile(file, pattern);
|
||||
batchResults.add(result);
|
||||
}
|
||||
|
||||
persistExtractedMetadata(batchResults, batchFiles);
|
||||
|
||||
int successCount = (int) batchResults.stream().filter(BookdropPatternExtractResult.FileExtractionResult::isSuccess).count();
|
||||
int failureCount = batchFiles.size() - successCount;
|
||||
return new BatchExtractionResult(batchResults, successCount, failureCount);
|
||||
}
|
||||
|
||||
private BookdropPatternExtractResult buildEmptyResult(int totalFiles) {
|
||||
return BookdropPatternExtractResult.builder()
|
||||
.totalFiles(totalFiles)
|
||||
.successfullyExtracted(0)
|
||||
.failed(totalFiles)
|
||||
.results(Collections.emptyList())
|
||||
.build();
|
||||
}
|
||||
|
||||
public BookMetadata extractFromFilename(String filename, String pattern) {
|
||||
ParsedPattern parsedPattern = parsePattern(pattern);
|
||||
if (parsedPattern == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return extractFromFilenameWithParsedPattern(filename, parsedPattern);
|
||||
}
|
||||
|
||||
private BookMetadata extractFromFilenameWithParsedPattern(String filename, ParsedPattern parsedPattern) {
|
||||
String nameOnly = FilenameUtils.getBaseName(filename);
|
||||
|
||||
Optional<Matcher> matcherResult = executeRegexMatchingWithTimeout(parsedPattern.compiledPattern(), nameOnly);
|
||||
|
||||
if (matcherResult.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Matcher matcher = matcherResult.get();
|
||||
return buildMetadataFromMatch(matcher, parsedPattern.placeholderOrder());
|
||||
}
|
||||
|
||||
private Optional<Matcher> executeRegexMatchingWithTimeout(Pattern pattern, String input) {
|
||||
Future<Optional<Matcher>> future = regexExecutor.submit(() -> {
|
||||
Matcher matcher = pattern.matcher(input);
|
||||
return matcher.find() ? Optional.of(matcher) : Optional.empty();
|
||||
});
|
||||
|
||||
try {
|
||||
return future.get(REGEX_TIMEOUT_SECONDS, TimeUnit.SECONDS);
|
||||
} catch (TimeoutException e) {
|
||||
future.cancel(true);
|
||||
log.warn("Pattern matching exceeded {} second timeout for: {}",
|
||||
REGEX_TIMEOUT_SECONDS, input.substring(0, Math.min(50, input.length())));
|
||||
return Optional.empty();
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
return Optional.empty();
|
||||
} catch (ExecutionException e) {
|
||||
log.error("Pattern matching failed: {}", e.getCause() != null ? e.getCause().getMessage() : "Unknown");
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void shutdownRegexExecutor() {
|
||||
regexExecutor.shutdown();
|
||||
try {
|
||||
if (!regexExecutor.awaitTermination(5, TimeUnit.SECONDS)) {
|
||||
regexExecutor.shutdownNow();
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
regexExecutor.shutdownNow();
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
private BookdropPatternExtractResult.FileExtractionResult extractFromFile(
|
||||
BookdropFileEntity file,
|
||||
ParsedPattern parsedPattern) {
|
||||
try {
|
||||
BookMetadata extracted = extractFromFilenameWithParsedPattern(file.getFileName(), parsedPattern);
|
||||
|
||||
if (extracted == null) {
|
||||
String errorMsg = "Pattern did not match filename structure. Check if the pattern aligns with the filename format.";
|
||||
log.debug("Pattern mismatch for file '{}'", file.getFileName());
|
||||
return BookdropPatternExtractResult.FileExtractionResult.builder()
|
||||
.fileId(file.getId())
|
||||
.fileName(file.getFileName())
|
||||
.success(false)
|
||||
.errorMessage(errorMsg)
|
||||
.build();
|
||||
}
|
||||
|
||||
return BookdropPatternExtractResult.FileExtractionResult.builder()
|
||||
.fileId(file.getId())
|
||||
.fileName(file.getFileName())
|
||||
.success(true)
|
||||
.extractedMetadata(extracted)
|
||||
.build();
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
String errorMsg = "Extraction failed: " + e.getMessage();
|
||||
log.debug("Pattern extraction failed for file '{}': {}", file.getFileName(), e.getMessage());
|
||||
return BookdropPatternExtractResult.FileExtractionResult.builder()
|
||||
.fileId(file.getId())
|
||||
.fileName(file.getFileName())
|
||||
.success(false)
|
||||
.errorMessage(errorMsg)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
private ParsedPattern parsePattern(String pattern) {
|
||||
if (pattern == null || pattern.isBlank()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
List<PlaceholderMatch> placeholderMatches = findAllPlaceholders(pattern);
|
||||
StringBuilder regexBuilder = new StringBuilder();
|
||||
List<String> placeholderOrder = new ArrayList<>();
|
||||
int lastEnd = 0;
|
||||
|
||||
for (int i = 0; i < placeholderMatches.size(); i++) {
|
||||
PlaceholderMatch placeholderMatch = placeholderMatches.get(i);
|
||||
|
||||
String literalTextBeforePlaceholder = pattern.substring(lastEnd, placeholderMatch.start);
|
||||
regexBuilder.append(Pattern.quote(literalTextBeforePlaceholder));
|
||||
|
||||
String placeholderName = placeholderMatch.name;
|
||||
String formatParameter = placeholderMatch.formatParameter;
|
||||
|
||||
boolean isLastPlaceholder = (i == placeholderMatches.size() - 1);
|
||||
boolean hasTextAfterPlaceholder = (placeholderMatch.end < pattern.length());
|
||||
boolean shouldUseGreedyMatching = isLastPlaceholder && !hasTextAfterPlaceholder;
|
||||
|
||||
String regexForPlaceholder;
|
||||
if ("*".equals(placeholderName)) {
|
||||
regexForPlaceholder = shouldUseGreedyMatching ? "(.+)" : "(.+?)";
|
||||
} else if ("Published".equals(placeholderName) && formatParameter != null) {
|
||||
regexForPlaceholder = buildRegexForDateFormat(formatParameter);
|
||||
} else {
|
||||
PlaceholderConfig config = PLACEHOLDER_CONFIGS.get(placeholderName);
|
||||
regexForPlaceholder = determineRegexForPlaceholder(config, shouldUseGreedyMatching);
|
||||
}
|
||||
|
||||
regexBuilder.append(regexForPlaceholder);
|
||||
|
||||
String placeholderWithFormat = formatParameter != null ? placeholderName + ":" + formatParameter : placeholderName;
|
||||
placeholderOrder.add(placeholderWithFormat);
|
||||
lastEnd = placeholderMatch.end;
|
||||
}
|
||||
|
||||
String literalTextAfterLastPlaceholder = pattern.substring(lastEnd);
|
||||
regexBuilder.append(Pattern.quote(literalTextAfterLastPlaceholder));
|
||||
|
||||
try {
|
||||
Pattern compiledPattern = Pattern.compile(regexBuilder.toString());
|
||||
return new ParsedPattern(compiledPattern, placeholderOrder);
|
||||
} catch (PatternSyntaxException e) {
|
||||
log.error("Invalid regex syntax from user input '{}': {}", pattern, e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private List<PlaceholderMatch> findAllPlaceholders(String pattern) {
|
||||
List<PlaceholderMatch> placeholderMatches = new ArrayList<>();
|
||||
Matcher matcher = PLACEHOLDER_PATTERN.matcher(pattern);
|
||||
|
||||
while (matcher.find()) {
|
||||
String placeholderName;
|
||||
String formatParameter = null;
|
||||
|
||||
if (matcher.group(0).equals("*")) {
|
||||
placeholderName = "*";
|
||||
} else {
|
||||
placeholderName = matcher.group(1);
|
||||
formatParameter = matcher.group(2);
|
||||
}
|
||||
|
||||
placeholderMatches.add(new PlaceholderMatch(
|
||||
matcher.start(),
|
||||
matcher.end(),
|
||||
placeholderName,
|
||||
formatParameter
|
||||
));
|
||||
}
|
||||
|
||||
return placeholderMatches;
|
||||
}
|
||||
|
||||
private String buildRegexForDateFormat(String dateFormat) {
|
||||
StringBuilder result = new StringBuilder();
|
||||
int i = 0;
|
||||
|
||||
while (i < dateFormat.length()) {
|
||||
if (dateFormat.startsWith("yyyy", i)) {
|
||||
result.append("\\d{4}");
|
||||
i += 4;
|
||||
} else if (dateFormat.startsWith("yy", i)) {
|
||||
result.append("\\d{2}");
|
||||
i += 2;
|
||||
} else if (dateFormat.startsWith("MM", i)) {
|
||||
result.append("\\d{2}");
|
||||
i += 2;
|
||||
} else if (i < dateFormat.length() && dateFormat.charAt(i) == 'M') {
|
||||
result.append("\\d{1,2}");
|
||||
i += 1;
|
||||
} else if (dateFormat.startsWith("dd", i)) {
|
||||
result.append("\\d{2}");
|
||||
i += 2;
|
||||
} else if (i < dateFormat.length() && dateFormat.charAt(i) == 'd') {
|
||||
result.append("\\d{1,2}");
|
||||
i += 1;
|
||||
} else {
|
||||
result.append(Pattern.quote(String.valueOf(dateFormat.charAt(i))));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
return "(" + result.toString() + ")";
|
||||
}
|
||||
|
||||
private String determineRegexForPlaceholder(PlaceholderConfig config, boolean shouldUseGreedyMatching) {
|
||||
if (config != null) {
|
||||
String configuredRegex = config.regex();
|
||||
boolean isNonGreedyTextPattern = configuredRegex.equals("(.+?)");
|
||||
|
||||
if (shouldUseGreedyMatching && isNonGreedyTextPattern) {
|
||||
return "(.+)";
|
||||
}
|
||||
return configuredRegex;
|
||||
}
|
||||
|
||||
return shouldUseGreedyMatching ? "(.+)" : "(.+?)";
|
||||
}
|
||||
|
||||
private BookMetadata buildMetadataFromMatch(Matcher matcher, List<String> placeholderOrder) {
|
||||
BookMetadata metadata = new BookMetadata();
|
||||
|
||||
for (int i = 0; i < placeholderOrder.size(); i++) {
|
||||
String placeholderWithFormat = placeholderOrder.get(i);
|
||||
String[] parts = placeholderWithFormat.split(":", 2);
|
||||
String placeholderName = parts[0];
|
||||
String formatParameter = parts.length > 1 ? parts[1] : null;
|
||||
|
||||
if ("*".equals(placeholderName)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
String value = matcher.group(i + 1).trim();
|
||||
applyValueToMetadata(metadata, placeholderName, value, formatParameter);
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private void applyValueToMetadata(BookMetadata metadata, String placeholderName, String value, String formatParameter) {
|
||||
if (value == null || value.isBlank()) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (placeholderName) {
|
||||
case "SeriesName" -> metadata.setSeriesName(value);
|
||||
case "Title" -> metadata.setTitle(value);
|
||||
case "Subtitle" -> metadata.setSubtitle(value);
|
||||
case "Authors" -> metadata.setAuthors(parseAuthors(value));
|
||||
case "SeriesNumber" -> setSeriesNumber(metadata, value);
|
||||
case "Published" -> setPublishedDate(metadata, value, formatParameter);
|
||||
case "Publisher" -> metadata.setPublisher(value);
|
||||
case "Language" -> metadata.setLanguage(value);
|
||||
case "SeriesTotal" -> setSeriesTotal(metadata, value);
|
||||
case "ISBN10" -> metadata.setIsbn10(value);
|
||||
case "ISBN13" -> metadata.setIsbn13(value);
|
||||
case "ASIN" -> metadata.setAsin(value);
|
||||
}
|
||||
}
|
||||
|
||||
private Set<String> parseAuthors(String value) {
|
||||
String[] parts = value.split("[,;&]");
|
||||
Set<String> authors = new LinkedHashSet<>();
|
||||
for (String part : parts) {
|
||||
String trimmed = part.trim();
|
||||
if (!trimmed.isEmpty()) {
|
||||
authors.add(trimmed);
|
||||
}
|
||||
}
|
||||
return authors;
|
||||
}
|
||||
|
||||
private void setSeriesNumber(BookMetadata metadata, String value) {
|
||||
try {
|
||||
metadata.setSeriesNumber(Float.parseFloat(value));
|
||||
} catch (NumberFormatException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private void setPublishedDate(BookMetadata metadata, String value, String dateFormat) {
|
||||
String detectedFormat = (dateFormat == null || dateFormat.isBlank())
|
||||
? detectDateFormat(value)
|
||||
: dateFormat;
|
||||
|
||||
if (detectedFormat == null) {
|
||||
log.warn("Could not detect date format for value: '{}'", value);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if ("yyyy".equals(detectedFormat) || "yy".equals(detectedFormat)) {
|
||||
int year = Integer.parseInt(value);
|
||||
if ("yy".equals(detectedFormat) && year < 100) {
|
||||
year += (year < TWO_DIGIT_YEAR_CUTOFF) ? 2000 : TWO_DIGIT_YEAR_CENTURY_BASE;
|
||||
}
|
||||
metadata.setPublishedDate(LocalDate.of(year, 1, 1));
|
||||
return;
|
||||
}
|
||||
|
||||
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(detectedFormat);
|
||||
LocalDate date = LocalDate.parse(value, formatter);
|
||||
metadata.setPublishedDate(date);
|
||||
} catch (NumberFormatException e) {
|
||||
log.warn("Failed to parse year value '{}': {}", value, e.getMessage());
|
||||
} catch (DateTimeParseException e) {
|
||||
log.warn("Failed to parse date '{}' with format '{}': {}", value, detectedFormat, e.getMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.warn("Invalid date format '{}' for value '{}': {}", detectedFormat, value, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private String detectDateFormat(String value) {
|
||||
if (value == null || value.isBlank()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String trimmed = value.trim();
|
||||
int length = trimmed.length();
|
||||
|
||||
if (length == FOUR_DIGIT_YEAR_LENGTH && FOUR_DIGIT_YEAR_PATTERN.matcher(trimmed).matches()) {
|
||||
return "yyyy";
|
||||
}
|
||||
|
||||
if (length == TWO_DIGIT_YEAR_LENGTH && TWO_DIGIT_YEAR_PATTERN.matcher(trimmed).matches()) {
|
||||
return "yy";
|
||||
}
|
||||
|
||||
if (length == COMPACT_DATE_LENGTH && COMPACT_DATE_PATTERN.matcher(trimmed).matches()) {
|
||||
return "yyyyMMdd";
|
||||
}
|
||||
|
||||
Matcher flexibleMatcher = FLEXIBLE_DATE_PATTERN.matcher(trimmed);
|
||||
if (flexibleMatcher.matches()) {
|
||||
String separator = flexibleMatcher.group(2);
|
||||
return determineFlexibleDateFormat(flexibleMatcher, separator);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private String determineFlexibleDateFormat(Matcher matcher, String separator) {
|
||||
String part1 = matcher.group(1);
|
||||
String part2 = matcher.group(3);
|
||||
String part3 = matcher.group(4);
|
||||
|
||||
int val1, val2, val3;
|
||||
try {
|
||||
val1 = Integer.parseInt(part1);
|
||||
val2 = Integer.parseInt(part2);
|
||||
val3 = Integer.parseInt(part3);
|
||||
} catch (NumberFormatException e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String format1, format2, format3;
|
||||
|
||||
if (isYearValue(part1, val1)) {
|
||||
format1 = buildYearFormat(part1);
|
||||
if (val2 <= 12 && val3 > 12) {
|
||||
format2 = buildMonthFormat(part2);
|
||||
format3 = buildDayFormat(part3);
|
||||
} else if (val3 <= 12 && val2 > 12) {
|
||||
format2 = buildDayFormat(part2);
|
||||
format3 = buildMonthFormat(part3);
|
||||
} else {
|
||||
format2 = buildMonthFormat(part2);
|
||||
format3 = buildDayFormat(part3);
|
||||
}
|
||||
} else if (isYearValue(part3, val3)) {
|
||||
format3 = buildYearFormat(part3);
|
||||
if (val1 <= 12 && val2 > 12) {
|
||||
format1 = buildMonthFormat(part1);
|
||||
format2 = buildDayFormat(part2);
|
||||
} else if (val2 <= 12 && val1 > 12) {
|
||||
format1 = buildDayFormat(part1);
|
||||
format2 = buildMonthFormat(part2);
|
||||
} else {
|
||||
format1 = buildDayFormat(part1);
|
||||
format2 = buildMonthFormat(part2);
|
||||
}
|
||||
} else {
|
||||
format1 = buildDayFormat(part1);
|
||||
format2 = buildMonthFormat(part2);
|
||||
format3 = part3.length() == 2 ? "yy" : "y";
|
||||
}
|
||||
|
||||
return format1 + separator + format2 + separator + format3;
|
||||
}
|
||||
|
||||
private boolean isYearValue(String part, int value) {
|
||||
return part.length() == 4 || value > 31;
|
||||
}
|
||||
|
||||
private String buildYearFormat(String part) {
|
||||
return part.length() == 4 ? "yyyy" : "yy";
|
||||
}
|
||||
|
||||
private String buildMonthFormat(String part) {
|
||||
return part.length() == 2 ? "MM" : "M";
|
||||
}
|
||||
|
||||
private String buildDayFormat(String part) {
|
||||
return part.length() == 2 ? "dd" : "d";
|
||||
}
|
||||
|
||||
private void setSeriesTotal(BookMetadata metadata, String value) {
|
||||
try {
|
||||
metadata.setSeriesTotal(Integer.parseInt(value));
|
||||
} catch (NumberFormatException ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
private void persistExtractedMetadata(List<BookdropPatternExtractResult.FileExtractionResult> results, List<BookdropFileEntity> files) {
|
||||
Map<Long, BookdropFileEntity> fileMap = new HashMap<>();
|
||||
for (BookdropFileEntity file : files) {
|
||||
fileMap.put(file.getId(), file);
|
||||
}
|
||||
|
||||
Set<Long> failedFileIds = new HashSet<>();
|
||||
|
||||
for (BookdropPatternExtractResult.FileExtractionResult result : results) {
|
||||
if (!result.isSuccess() || result.getExtractedMetadata() == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
BookdropFileEntity file = fileMap.get(result.getFileId());
|
||||
if (file == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
BookMetadata currentMetadata = metadataHelper.getCurrentMetadata(file);
|
||||
BookMetadata extractedMetadata = result.getExtractedMetadata();
|
||||
metadataHelper.mergeMetadata(currentMetadata, extractedMetadata);
|
||||
metadataHelper.updateFetchedMetadata(file, currentMetadata);
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
log.error("Error persisting extracted metadata for file {} ({}): {}",
|
||||
file.getId(), file.getFileName(), e.getMessage(), e);
|
||||
failedFileIds.add(file.getId());
|
||||
result.setSuccess(false);
|
||||
result.setErrorMessage("Failed to save metadata: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
List<BookdropFileEntity> filesToSave = files.stream()
|
||||
.filter(file -> !failedFileIds.contains(file.getId()))
|
||||
.toList();
|
||||
|
||||
if (!filesToSave.isEmpty()) {
|
||||
bookdropFileRepository.saveAll(filesToSave);
|
||||
}
|
||||
}
|
||||
|
||||
private record PlaceholderConfig(String regex, String metadataField) {}
|
||||
|
||||
private record ParsedPattern(Pattern compiledPattern, List<String> placeholderOrder) {}
|
||||
|
||||
private record PlaceholderMatch(int start, int end, String name, String formatParameter) {}
|
||||
|
||||
private record BatchExtractionResult(List<BookdropPatternExtractResult.FileExtractionResult> results,
|
||||
int successCount, int failureCount) {}
|
||||
}
|
||||
@@ -78,8 +78,6 @@ public class CbxProcessor extends AbstractFileProcessor implements BookFileProce
|
||||
try {
|
||||
boolean saved = fileService.saveCoverImages(image, bookEntity.getId());
|
||||
if (saved) {
|
||||
bookEntity.getMetadata().setCoverUpdatedOn(Instant.now());
|
||||
bookMetadataRepository.save(bookEntity.getMetadata());
|
||||
return true;
|
||||
}
|
||||
} finally {
|
||||
|
||||
@@ -80,10 +80,6 @@ public class EpubProcessor extends AbstractFileProcessor implements BookFileProc
|
||||
originalImage.flush();
|
||||
}
|
||||
|
||||
if (saved) {
|
||||
bookEntity.getMetadata().setCoverUpdatedOn(Instant.now());
|
||||
bookMetadataRepository.save(bookEntity.getMetadata());
|
||||
}
|
||||
return saved;
|
||||
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -0,0 +1,140 @@
|
||||
package com.adityachandel.booklore.service.fileprocessor;
|
||||
|
||||
import com.adityachandel.booklore.mapper.BookMapper;
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.dto.settings.LibraryFile;
|
||||
import com.adityachandel.booklore.model.entity.BookEntity;
|
||||
import com.adityachandel.booklore.model.entity.BookMetadataEntity;
|
||||
import com.adityachandel.booklore.model.enums.BookFileType;
|
||||
import com.adityachandel.booklore.repository.BookAdditionalFileRepository;
|
||||
import com.adityachandel.booklore.repository.BookMetadataRepository;
|
||||
import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.service.book.BookCreatorService;
|
||||
import com.adityachandel.booklore.service.metadata.MetadataMatchService;
|
||||
import com.adityachandel.booklore.service.metadata.extractor.Fb2MetadataExtractor;
|
||||
import com.adityachandel.booklore.util.FileService;
|
||||
import com.adityachandel.booklore.util.FileUtils;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.time.Instant;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.adityachandel.booklore.util.FileService.truncate;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class Fb2Processor extends AbstractFileProcessor implements BookFileProcessor {
|
||||
|
||||
private final Fb2MetadataExtractor fb2MetadataExtractor;
|
||||
private final BookMetadataRepository bookMetadataRepository;
|
||||
|
||||
public Fb2Processor(BookRepository bookRepository,
|
||||
BookAdditionalFileRepository bookAdditionalFileRepository,
|
||||
BookCreatorService bookCreatorService,
|
||||
BookMapper bookMapper,
|
||||
FileService fileService,
|
||||
BookMetadataRepository bookMetadataRepository,
|
||||
MetadataMatchService metadataMatchService,
|
||||
Fb2MetadataExtractor fb2MetadataExtractor) {
|
||||
super(bookRepository, bookAdditionalFileRepository, bookCreatorService, bookMapper, fileService, metadataMatchService);
|
||||
this.fb2MetadataExtractor = fb2MetadataExtractor;
|
||||
this.bookMetadataRepository = bookMetadataRepository;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BookEntity processNewFile(LibraryFile libraryFile) {
|
||||
BookEntity bookEntity = bookCreatorService.createShellBook(libraryFile, BookFileType.FB2);
|
||||
setBookMetadata(bookEntity);
|
||||
if (generateCover(bookEntity)) {
|
||||
FileService.setBookCoverPath(bookEntity.getMetadata());
|
||||
}
|
||||
return bookEntity;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean generateCover(BookEntity bookEntity) {
|
||||
try {
|
||||
File fb2File = new File(FileUtils.getBookFullPath(bookEntity));
|
||||
byte[] coverData = fb2MetadataExtractor.extractCover(fb2File);
|
||||
|
||||
if (coverData == null || coverData.length == 0) {
|
||||
log.warn("No cover image found in FB2 '{}'", bookEntity.getFileName());
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean saved = saveCoverImage(coverData, bookEntity.getId());
|
||||
return saved;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Error generating cover for FB2 '{}': {}", bookEntity.getFileName(), e.getMessage(), e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BookFileType> getSupportedTypes() {
|
||||
return List.of(BookFileType.FB2);
|
||||
}
|
||||
|
||||
private void setBookMetadata(BookEntity bookEntity) {
|
||||
File bookFile = new File(bookEntity.getFullFilePath().toUri());
|
||||
BookMetadata fb2Metadata = fb2MetadataExtractor.extractMetadata(bookFile);
|
||||
if (fb2Metadata == null) return;
|
||||
|
||||
BookMetadataEntity metadata = bookEntity.getMetadata();
|
||||
|
||||
metadata.setTitle(truncate(fb2Metadata.getTitle(), 1000));
|
||||
metadata.setSubtitle(truncate(fb2Metadata.getSubtitle(), 1000));
|
||||
metadata.setDescription(truncate(fb2Metadata.getDescription(), 2000));
|
||||
metadata.setPublisher(truncate(fb2Metadata.getPublisher(), 1000));
|
||||
metadata.setPublishedDate(fb2Metadata.getPublishedDate());
|
||||
metadata.setSeriesName(truncate(fb2Metadata.getSeriesName(), 1000));
|
||||
metadata.setSeriesNumber(fb2Metadata.getSeriesNumber());
|
||||
metadata.setSeriesTotal(fb2Metadata.getSeriesTotal());
|
||||
metadata.setIsbn13(truncate(fb2Metadata.getIsbn13(), 64));
|
||||
metadata.setIsbn10(truncate(fb2Metadata.getIsbn10(), 64));
|
||||
metadata.setPageCount(fb2Metadata.getPageCount());
|
||||
|
||||
String lang = fb2Metadata.getLanguage();
|
||||
metadata.setLanguage(truncate((lang == null || "UND".equalsIgnoreCase(lang)) ? "en" : lang, 1000));
|
||||
|
||||
metadata.setAsin(truncate(fb2Metadata.getAsin(), 20));
|
||||
metadata.setAmazonRating(fb2Metadata.getAmazonRating());
|
||||
metadata.setAmazonReviewCount(fb2Metadata.getAmazonReviewCount());
|
||||
metadata.setGoodreadsId(truncate(fb2Metadata.getGoodreadsId(), 100));
|
||||
metadata.setGoodreadsRating(fb2Metadata.getGoodreadsRating());
|
||||
metadata.setGoodreadsReviewCount(fb2Metadata.getGoodreadsReviewCount());
|
||||
metadata.setHardcoverId(truncate(fb2Metadata.getHardcoverId(), 100));
|
||||
metadata.setHardcoverRating(fb2Metadata.getHardcoverRating());
|
||||
metadata.setHardcoverReviewCount(fb2Metadata.getHardcoverReviewCount());
|
||||
metadata.setGoogleId(truncate(fb2Metadata.getGoogleId(), 100));
|
||||
metadata.setComicvineId(truncate(fb2Metadata.getComicvineId(), 100));
|
||||
|
||||
bookCreatorService.addAuthorsToBook(fb2Metadata.getAuthors(), bookEntity);
|
||||
|
||||
if (fb2Metadata.getCategories() != null) {
|
||||
Set<String> validSubjects = fb2Metadata.getCategories().stream()
|
||||
.filter(s -> s != null && !s.isBlank() && s.length() <= 100 && !s.contains("\n") && !s.contains("\r") && !s.contains(" "))
|
||||
.collect(Collectors.toSet());
|
||||
bookCreatorService.addCategoriesToBook(validSubjects, bookEntity);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean saveCoverImage(byte[] coverData, long bookId) throws Exception {
|
||||
BufferedImage originalImage = ImageIO.read(new ByteArrayInputStream(coverData));
|
||||
try {
|
||||
return fileService.saveCoverImages(originalImage, bookId);
|
||||
} finally {
|
||||
if (originalImage != null) {
|
||||
originalImage.flush(); // Release resources after processing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,10 +62,7 @@ public class PdfProcessor extends AbstractFileProcessor implements BookFileProce
|
||||
@Override
|
||||
public boolean generateCover(BookEntity bookEntity) {
|
||||
try (PDDocument pdf = Loader.loadPDF(new File(FileUtils.getBookFullPath(bookEntity)))) {
|
||||
boolean saved = generateCoverImageAndSave(bookEntity.getId(), pdf);
|
||||
bookEntity.getMetadata().setCoverUpdatedOn(Instant.now());
|
||||
bookMetadataRepository.save(bookEntity.getMetadata());
|
||||
return saved;
|
||||
return generateCoverImageAndSave(bookEntity.getId(), pdf);
|
||||
} catch (OutOfMemoryError e) {
|
||||
// Note: Catching OOM is generally discouraged, but for batch processing
|
||||
// of potentially large/corrupted PDFs, we prefer graceful degradation
|
||||
|
||||
@@ -0,0 +1,596 @@
|
||||
package com.adityachandel.booklore.service.hardcover;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.settings.MetadataProviderSettings;
|
||||
import com.adityachandel.booklore.model.entity.BookEntity;
|
||||
import com.adityachandel.booklore.model.entity.BookMetadataEntity;
|
||||
import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.service.appsettings.AppSettingService;
|
||||
import com.adityachandel.booklore.service.metadata.parser.hardcover.GraphQLRequest;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.scheduling.annotation.Async;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.client.RestClient;
|
||||
import org.springframework.web.client.RestClientException;
|
||||
|
||||
import java.time.LocalDate;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Service to sync reading progress to Hardcover.
|
||||
* Uses the global Hardcover API token from Metadata Provider Settings.
|
||||
* Sync only activates if the token is configured and Hardcover is enabled.
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
public class HardcoverSyncService {
|
||||
|
||||
private static final String HARDCOVER_API_URL = "https://api.hardcover.app/v1/graphql";
|
||||
private static final int STATUS_CURRENTLY_READING = 2;
|
||||
private static final int STATUS_READ = 3;
|
||||
|
||||
private final RestClient restClient;
|
||||
private final AppSettingService appSettingService;
|
||||
private final BookRepository bookRepository;
|
||||
|
||||
@Autowired
|
||||
public HardcoverSyncService(AppSettingService appSettingService, BookRepository bookRepository) {
|
||||
this.appSettingService = appSettingService;
|
||||
this.bookRepository = bookRepository;
|
||||
this.restClient = RestClient.builder()
|
||||
.baseUrl(HARDCOVER_API_URL)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously sync Kobo reading progress to Hardcover.
|
||||
* This method is non-blocking and will not fail the calling process if sync fails.
|
||||
*
|
||||
* @param bookId The book ID to sync progress for
|
||||
* @param progressPercent The reading progress as a percentage (0-100)
|
||||
*/
|
||||
@Async
|
||||
@Transactional(readOnly = true)
|
||||
public void syncProgressToHardcover(Long bookId, Float progressPercent) {
|
||||
try {
|
||||
if (!isHardcoverSyncEnabled()) {
|
||||
log.trace("Hardcover sync skipped: not enabled or no API token configured");
|
||||
return;
|
||||
}
|
||||
|
||||
if (progressPercent == null) {
|
||||
log.debug("Hardcover sync skipped: no progress to sync");
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch book fresh within the async context to avoid lazy loading issues
|
||||
BookEntity book = bookRepository.findById(bookId).orElse(null);
|
||||
if (book == null) {
|
||||
log.debug("Hardcover sync skipped: book {} not found", bookId);
|
||||
return;
|
||||
}
|
||||
|
||||
BookMetadataEntity metadata = book.getMetadata();
|
||||
if (metadata == null) {
|
||||
log.debug("Hardcover sync skipped: book {} has no metadata", bookId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the book on Hardcover - use stored ID if available
|
||||
HardcoverBookInfo hardcoverBook;
|
||||
if (metadata.getHardcoverBookId() != null) {
|
||||
// Use the stored numeric book ID directly
|
||||
hardcoverBook = new HardcoverBookInfo();
|
||||
hardcoverBook.bookId = metadata.getHardcoverBookId();
|
||||
hardcoverBook.pages = metadata.getPageCount();
|
||||
log.debug("Using stored Hardcover book ID: {}", hardcoverBook.bookId);
|
||||
} else {
|
||||
// Search by ISBN
|
||||
hardcoverBook = findHardcoverBook(metadata);
|
||||
if (hardcoverBook == null) {
|
||||
log.debug("Hardcover sync skipped: book {} not found on Hardcover", bookId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the status based on progress
|
||||
int statusId = progressPercent >= 99.0f ? STATUS_READ : STATUS_CURRENTLY_READING;
|
||||
|
||||
// Calculate progress in pages
|
||||
int progressPages = 0;
|
||||
if (hardcoverBook.pages != null && hardcoverBook.pages > 0) {
|
||||
progressPages = Math.round((progressPercent / 100.0f) * hardcoverBook.pages);
|
||||
progressPages = Math.max(0, Math.min(hardcoverBook.pages, progressPages));
|
||||
}
|
||||
log.info("Progress calculation: progressPercent={}%, totalPages={}, progressPages={}",
|
||||
progressPercent, hardcoverBook.pages, progressPages);
|
||||
|
||||
// Step 1: Add/update the book in user's library
|
||||
Integer userBookId = insertOrGetUserBook(hardcoverBook.bookId, hardcoverBook.editionId, statusId);
|
||||
if (userBookId == null) {
|
||||
log.warn("Hardcover sync failed: could not get user_book_id for book {}", bookId);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2: Create or update the reading progress
|
||||
boolean success = upsertReadingProgress(userBookId, hardcoverBook.editionId, progressPages);
|
||||
|
||||
if (success) {
|
||||
log.info("Synced progress to Hardcover: book={}, hardcoverBookId={}, progress={}% ({}pages)",
|
||||
bookId, hardcoverBook.bookId, Math.round(progressPercent), progressPages);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to sync progress to Hardcover for book {}: {}",
|
||||
bookId, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isHardcoverSyncEnabled() {
|
||||
MetadataProviderSettings.Hardcover hardcoverSettings =
|
||||
appSettingService.getAppSettings().getMetadataProviderSettings().getHardcover();
|
||||
|
||||
if (hardcoverSettings == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return hardcoverSettings.isEnabled()
|
||||
&& hardcoverSettings.getApiKey() != null
|
||||
&& !hardcoverSettings.getApiKey().isBlank();
|
||||
}
|
||||
|
||||
private String getApiToken() {
|
||||
return appSettingService.getAppSettings()
|
||||
.getMetadataProviderSettings()
|
||||
.getHardcover()
|
||||
.getApiKey();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a book on Hardcover by ISBN or hardcoverId.
|
||||
* Returns the numeric book_id, edition_id, and page count.
|
||||
*/
|
||||
private HardcoverBookInfo findHardcoverBook(BookMetadataEntity metadata) {
|
||||
// Try ISBN first
|
||||
String isbn = metadata.getIsbn13();
|
||||
if (isbn == null || isbn.isBlank()) {
|
||||
isbn = metadata.getIsbn10();
|
||||
}
|
||||
|
||||
if (isbn == null || isbn.isBlank()) {
|
||||
log.debug("No ISBN available for Hardcover lookup");
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
String searchQuery = """
|
||||
query SearchBooks($query: String!) {
|
||||
search(query: $query, query_type: "Book", per_page: 1, page: 1) {
|
||||
results
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(searchQuery);
|
||||
request.setVariables(Map.of("query", isbn));
|
||||
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
log.debug("Hardcover search response for ISBN {}: {}", isbn, response);
|
||||
if (response == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Navigate the response to get book info
|
||||
Map<String, Object> data = (Map<String, Object>) response.get("data");
|
||||
if (data == null) return null;
|
||||
|
||||
Map<String, Object> search = (Map<String, Object>) data.get("search");
|
||||
if (search == null) return null;
|
||||
|
||||
Map<String, Object> results = (Map<String, Object>) search.get("results");
|
||||
if (results == null) return null;
|
||||
|
||||
List<Map<String, Object>> hits = (List<Map<String, Object>>) results.get("hits");
|
||||
if (hits == null || hits.isEmpty()) return null;
|
||||
|
||||
Map<String, Object> document = (Map<String, Object>) hits.get(0).get("document");
|
||||
if (document == null) return null;
|
||||
|
||||
// Extract book info
|
||||
HardcoverBookInfo info = new HardcoverBookInfo();
|
||||
|
||||
// The 'id' field contains the numeric book ID
|
||||
Object idObj = document.get("id");
|
||||
if (idObj instanceof String) {
|
||||
info.bookId = Integer.parseInt((String) idObj);
|
||||
} else if (idObj instanceof Number) {
|
||||
info.bookId = ((Number) idObj).intValue();
|
||||
}
|
||||
|
||||
// Get page count
|
||||
Object pagesObj = document.get("pages");
|
||||
if (pagesObj instanceof Number) {
|
||||
info.pages = ((Number) pagesObj).intValue();
|
||||
}
|
||||
|
||||
// Try to get default_edition_id from the search results
|
||||
Object defaultEditionObj = document.get("default_edition_id");
|
||||
if (defaultEditionObj instanceof Number) {
|
||||
info.editionId = ((Number) defaultEditionObj).intValue();
|
||||
} else if (defaultEditionObj instanceof String) {
|
||||
try {
|
||||
info.editionId = Integer.parseInt((String) defaultEditionObj);
|
||||
} catch (NumberFormatException e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
// If no default edition, try to look up edition by ISBN
|
||||
// This also gets the page count from the specific edition
|
||||
if (info.bookId != null) {
|
||||
EditionInfo edition = findEditionByIsbn(info.bookId, isbn);
|
||||
if (edition != null) {
|
||||
info.editionId = edition.id;
|
||||
// Prefer edition page count over book page count
|
||||
if (edition.pages != null && edition.pages > 0) {
|
||||
info.pages = edition.pages;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.info("Found Hardcover book: bookId={}, editionId={}, pages={}",
|
||||
info.bookId, info.editionId, info.pages);
|
||||
|
||||
return info.bookId != null ? info : null;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to search Hardcover by ISBN {}: {}", isbn, e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an edition by ISBN for a given book.
|
||||
* This queries Hardcover's editions table to match by ISBN.
|
||||
*/
|
||||
private EditionInfo findEditionByIsbn(Integer bookId, String isbn) {
|
||||
String query = """
|
||||
query FindEditionByIsbn($bookId: Int!, $isbn: String!) {
|
||||
editions(where: {
|
||||
book_id: {_eq: $bookId},
|
||||
_or: [
|
||||
{isbn_10: {_eq: $isbn}},
|
||||
{isbn_13: {_eq: $isbn}}
|
||||
]
|
||||
}, limit: 1) {
|
||||
id
|
||||
pages
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(query);
|
||||
request.setVariables(Map.of("bookId", bookId, "isbn", isbn));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
log.debug("Edition lookup response: {}", response);
|
||||
if (response == null) return null;
|
||||
|
||||
Map<String, Object> data = (Map<String, Object>) response.get("data");
|
||||
if (data == null) return null;
|
||||
|
||||
List<Map<String, Object>> editions = (List<Map<String, Object>>) data.get("editions");
|
||||
if (editions == null || editions.isEmpty()) return null;
|
||||
|
||||
Map<String, Object> edition = editions.get(0);
|
||||
EditionInfo info = new EditionInfo();
|
||||
|
||||
Object idObj = edition.get("id");
|
||||
if (idObj instanceof Number) {
|
||||
info.id = ((Number) idObj).intValue();
|
||||
}
|
||||
|
||||
Object pagesObj = edition.get("pages");
|
||||
if (pagesObj instanceof Number) {
|
||||
info.pages = ((Number) pagesObj).intValue();
|
||||
}
|
||||
|
||||
return info.id != null ? info : null;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.debug("Failed to find edition by ISBN: {}", e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert a book into the user's library or get existing user_book_id.
|
||||
*/
|
||||
private Integer insertOrGetUserBook(Integer bookId, Integer editionId, int statusId) {
|
||||
String mutation = """
|
||||
mutation InsertUserBook($object: UserBookCreateInput!) {
|
||||
insert_user_book(object: $object) {
|
||||
user_book {
|
||||
id
|
||||
}
|
||||
error
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
Map<String, Object> bookInput = new java.util.HashMap<>();
|
||||
bookInput.put("book_id", bookId);
|
||||
bookInput.put("status_id", statusId);
|
||||
bookInput.put("date_added", LocalDate.now().format(DateTimeFormatter.ISO_LOCAL_DATE));
|
||||
if (editionId != null) {
|
||||
bookInput.put("edition_id", editionId);
|
||||
}
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(mutation);
|
||||
request.setVariables(Map.of("object", bookInput));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
log.debug("insert_user_book response: {}", response);
|
||||
if (response == null) return null;
|
||||
|
||||
Map<String, Object> data = (Map<String, Object>) response.get("data");
|
||||
if (data == null) return null;
|
||||
|
||||
Map<String, Object> insertResult = (Map<String, Object>) data.get("insert_user_book");
|
||||
if (insertResult == null) return null;
|
||||
|
||||
// Check for error (might mean book already exists)
|
||||
String error = (String) insertResult.get("error");
|
||||
if (error != null && !error.isBlank()) {
|
||||
log.debug("insert_user_book returned error: {} - book may already exist, trying to find it", error);
|
||||
return findExistingUserBook(bookId);
|
||||
}
|
||||
|
||||
Map<String, Object> userBook = (Map<String, Object>) insertResult.get("user_book");
|
||||
if (userBook == null) return null;
|
||||
|
||||
Object idObj = userBook.get("id");
|
||||
if (idObj instanceof Number) {
|
||||
return ((Number) idObj).intValue();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (RestClientException e) {
|
||||
log.warn("Failed to insert user_book: {}", e.getMessage());
|
||||
// Try to find existing
|
||||
return findExistingUserBook(bookId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find an existing user_book entry for a book.
|
||||
*/
|
||||
private Integer findExistingUserBook(Integer bookId) {
|
||||
String query = """
|
||||
query FindUserBook($bookId: Int!) {
|
||||
me {
|
||||
user_books(where: {book_id: {_eq: $bookId}}, limit: 1) {
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(query);
|
||||
request.setVariables(Map.of("bookId", bookId));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
if (response == null) return null;
|
||||
|
||||
Map<String, Object> data = (Map<String, Object>) response.get("data");
|
||||
if (data == null) return null;
|
||||
|
||||
Map<String, Object> me = (Map<String, Object>) data.get("me");
|
||||
if (me == null) return null;
|
||||
|
||||
List<Map<String, Object>> userBooks = (List<Map<String, Object>>) me.get("user_books");
|
||||
if (userBooks == null || userBooks.isEmpty()) return null;
|
||||
|
||||
Object idObj = userBooks.get(0).get("id");
|
||||
if (idObj instanceof Number) {
|
||||
return ((Number) idObj).intValue();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (RestClientException e) {
|
||||
log.warn("Failed to find existing user_book: {}", e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update reading progress for a user_book.
|
||||
*/
|
||||
private boolean upsertReadingProgress(Integer userBookId, Integer editionId, int progressPages) {
|
||||
log.info("upsertReadingProgress: userBookId={}, editionId={}, progressPages={}",
|
||||
userBookId, editionId, progressPages);
|
||||
|
||||
// First, try to find existing user_book_read
|
||||
Integer existingReadId = findExistingUserBookRead(userBookId);
|
||||
|
||||
if (existingReadId != null) {
|
||||
// Update existing
|
||||
log.info("Updating existing user_book_read: id={}", existingReadId);
|
||||
return updateUserBookRead(existingReadId, editionId, progressPages);
|
||||
} else {
|
||||
// Create new
|
||||
log.info("Creating new user_book_read for userBookId={}", userBookId);
|
||||
return insertUserBookRead(userBookId, editionId, progressPages);
|
||||
}
|
||||
}
|
||||
|
||||
private Integer findExistingUserBookRead(Integer userBookId) {
|
||||
String query = """
|
||||
query FindUserBookRead($userBookId: Int!) {
|
||||
user_book_reads(where: {user_book_id: {_eq: $userBookId}}, limit: 1) {
|
||||
id
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(query);
|
||||
request.setVariables(Map.of("userBookId", userBookId));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
if (response == null) return null;
|
||||
|
||||
Map<String, Object> data = (Map<String, Object>) response.get("data");
|
||||
if (data == null) return null;
|
||||
|
||||
List<Map<String, Object>> reads = (List<Map<String, Object>>) data.get("user_book_reads");
|
||||
if (reads == null || reads.isEmpty()) return null;
|
||||
|
||||
Object idObj = reads.get(0).get("id");
|
||||
if (idObj instanceof Number) {
|
||||
return ((Number) idObj).intValue();
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (RestClientException e) {
|
||||
log.warn("Failed to find existing user_book_read: {}", e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean insertUserBookRead(Integer userBookId, Integer editionId, int progressPages) {
|
||||
String mutation = """
|
||||
mutation InsertUserBookRead($userBookId: Int!, $object: DatesReadInput!) {
|
||||
insert_user_book_read(user_book_id: $userBookId, user_book_read: $object) {
|
||||
user_book_read {
|
||||
id
|
||||
}
|
||||
error
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
Map<String, Object> readInput = new java.util.HashMap<>();
|
||||
readInput.put("started_at", LocalDate.now().format(DateTimeFormatter.ISO_LOCAL_DATE));
|
||||
readInput.put("progress_pages", progressPages);
|
||||
if (editionId != null) {
|
||||
readInput.put("edition_id", editionId);
|
||||
}
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(mutation);
|
||||
request.setVariables(Map.of(
|
||||
"userBookId", userBookId,
|
||||
"object", readInput
|
||||
));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
log.info("insert_user_book_read response: {}", response);
|
||||
if (response == null) return false;
|
||||
|
||||
if (response.containsKey("errors")) {
|
||||
log.warn("insert_user_book_read returned errors: {}", response.get("errors"));
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to insert user_book_read: {}", e.getMessage());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean updateUserBookRead(Integer readId, Integer editionId, int progressPages) {
|
||||
String mutation = """
|
||||
mutation UpdateUserBookRead($id: Int!, $object: DatesReadInput!) {
|
||||
update_user_book_read(id: $id, object: $object) {
|
||||
user_book_read {
|
||||
id
|
||||
progress
|
||||
}
|
||||
error
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
Map<String, Object> readInput = new java.util.HashMap<>();
|
||||
readInput.put("progress_pages", progressPages);
|
||||
if (editionId != null) {
|
||||
readInput.put("edition_id", editionId);
|
||||
}
|
||||
|
||||
GraphQLRequest request = new GraphQLRequest();
|
||||
request.setQuery(mutation);
|
||||
request.setVariables(Map.of(
|
||||
"id", readId,
|
||||
"object", readInput
|
||||
));
|
||||
|
||||
try {
|
||||
Map<String, Object> response = executeGraphQL(request);
|
||||
log.debug("update_user_book_read response: {}", response);
|
||||
if (response == null) return false;
|
||||
|
||||
if (response.containsKey("errors")) {
|
||||
log.warn("update_user_book_read returned errors: {}", response.get("errors"));
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
} catch (RestClientException e) {
|
||||
log.error("Failed to update user_book_read: {}", e.getMessage());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Object> executeGraphQL(GraphQLRequest request) {
|
||||
try {
|
||||
return restClient.post()
|
||||
.uri("")
|
||||
.header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
|
||||
.header(HttpHeaders.AUTHORIZATION, "Bearer " + getApiToken())
|
||||
.body(request)
|
||||
.retrieve()
|
||||
.body(Map.class);
|
||||
} catch (RestClientException e) {
|
||||
log.error("GraphQL request failed: {}", e.getMessage());
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class to hold Hardcover book information.
|
||||
*/
|
||||
private static class HardcoverBookInfo {
|
||||
Integer bookId;
|
||||
Integer editionId;
|
||||
Integer pages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class to hold edition information.
|
||||
*/
|
||||
private static class EditionInfo {
|
||||
Integer id;
|
||||
Integer pages;
|
||||
}
|
||||
}
|
||||
@@ -102,20 +102,20 @@ public class CbxConversionService {
|
||||
* @throws IllegalArgumentException if the file format is not supported
|
||||
* @throws IllegalStateException if no valid images are found in the archive
|
||||
*/
|
||||
public File convertCbxToEpub(File cbxFile, File tempDir, BookEntity bookEntity)
|
||||
public File convertCbxToEpub(File cbxFile, File tempDir, BookEntity bookEntity, int compressionPercentage)
|
||||
throws IOException, TemplateException, RarException {
|
||||
validateInputs(cbxFile, tempDir);
|
||||
|
||||
log.info("Starting CBX to EPUB conversion for: {}", cbxFile.getName());
|
||||
|
||||
File outputFile = executeCbxConversion(cbxFile, tempDir, bookEntity);
|
||||
File outputFile = executeCbxConversion(cbxFile, tempDir, bookEntity,compressionPercentage);
|
||||
|
||||
log.info("Successfully converted {} to {} (size: {} bytes)",
|
||||
cbxFile.getName(), outputFile.getName(), outputFile.length());
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
private File executeCbxConversion(File cbxFile, File tempDir, BookEntity bookEntity)
|
||||
private File executeCbxConversion(File cbxFile, File tempDir, BookEntity bookEntity,int compressionPercentage)
|
||||
throws IOException, TemplateException, RarException {
|
||||
|
||||
Path epubFilePath = Paths.get(tempDir.getAbsolutePath(), cbxFile.getName() + ".epub");
|
||||
@@ -136,7 +136,7 @@ public class CbxConversionService {
|
||||
addMetaInfContainer(zipOut);
|
||||
addStylesheet(zipOut);
|
||||
|
||||
List<EpubContentFileGroup> contentGroups = addImagesAndPages(zipOut, imagePaths);
|
||||
List<EpubContentFileGroup> contentGroups = addImagesAndPages(zipOut, imagePaths,compressionPercentage);
|
||||
|
||||
addContentOpf(zipOut, bookEntity, contentGroups);
|
||||
addTocNcx(zipOut, bookEntity, contentGroups);
|
||||
@@ -340,13 +340,13 @@ public class CbxConversionService {
|
||||
zipOut.closeArchiveEntry();
|
||||
}
|
||||
|
||||
private List<EpubContentFileGroup> addImagesAndPages(ZipArchiveOutputStream zipOut, List<Path> imagePaths)
|
||||
private List<EpubContentFileGroup> addImagesAndPages(ZipArchiveOutputStream zipOut, List<Path> imagePaths,int compressionPercentage)
|
||||
throws IOException, TemplateException {
|
||||
|
||||
List<EpubContentFileGroup> contentGroups = new ArrayList<>();
|
||||
|
||||
if (!imagePaths.isEmpty()) {
|
||||
addImageToZipFromPath(zipOut, COVER_IMAGE_PATH, imagePaths.getFirst());
|
||||
addImageToZipFromPath(zipOut, COVER_IMAGE_PATH, imagePaths.getFirst(),compressionPercentage);
|
||||
}
|
||||
|
||||
for (int i = 0; i < imagePaths.size(); i++) {
|
||||
@@ -358,7 +358,7 @@ public class CbxConversionService {
|
||||
String imagePath = IMAGE_ROOT_PATH + imageFileName;
|
||||
String htmlPath = HTML_ROOT_PATH + htmlFileName;
|
||||
|
||||
addImageToZipFromPath(zipOut, imagePath, imageSourcePath);
|
||||
addImageToZipFromPath(zipOut, imagePath, imageSourcePath,compressionPercentage);
|
||||
|
||||
String htmlContent = generatePageHtml(imageFileName, i + 1);
|
||||
ZipArchiveEntry htmlEntry = new ZipArchiveEntry(htmlPath);
|
||||
@@ -372,7 +372,7 @@ public class CbxConversionService {
|
||||
return contentGroups;
|
||||
}
|
||||
|
||||
private void addImageToZipFromPath(ZipArchiveOutputStream zipOut, String epubImagePath, Path sourceImagePath)
|
||||
private void addImageToZipFromPath(ZipArchiveOutputStream zipOut, String epubImagePath, Path sourceImagePath,int compressionPercentage)
|
||||
throws IOException {
|
||||
ZipArchiveEntry imageEntry = new ZipArchiveEntry(epubImagePath);
|
||||
zipOut.putArchiveEntry(imageEntry);
|
||||
@@ -385,7 +385,7 @@ public class CbxConversionService {
|
||||
try (InputStream fis = Files.newInputStream(sourceImagePath)) {
|
||||
BufferedImage image = ImageIO.read(fis);
|
||||
if (image != null) {
|
||||
writeJpegImage(image, zipOut, 0.85f);
|
||||
writeJpegImage(image, zipOut, compressionPercentage/100f);
|
||||
} else {
|
||||
log.warn("Could not decode image {}, copying raw bytes", sourceImagePath.getFileName());
|
||||
try (InputStream rawStream = Files.newInputStream(sourceImagePath)) {
|
||||
|
||||
@@ -41,6 +41,7 @@ public class KoboInitializationService {
|
||||
objectNode.put("image_host", baseBuilder.build().toUriString());
|
||||
objectNode.put("image_url_template", koboUrlBuilder.imageUrlTemplate(token));
|
||||
objectNode.put("image_url_quality_template", koboUrlBuilder.imageUrlQualityTemplate(token));
|
||||
objectNode.put("library_sync", koboUrlBuilder.librarySyncUrl(token));
|
||||
}
|
||||
|
||||
return ResponseEntity.ok()
|
||||
|
||||
@@ -16,6 +16,7 @@ import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.repository.KoboReadingStateRepository;
|
||||
import com.adityachandel.booklore.repository.UserBookProgressRepository;
|
||||
import com.adityachandel.booklore.repository.UserRepository;
|
||||
import com.adityachandel.booklore.service.hardcover.HardcoverSyncService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.stereotype.Service;
|
||||
@@ -41,6 +42,7 @@ public class KoboReadingStateService {
|
||||
private final AuthenticationService authenticationService;
|
||||
private final KoboSettingsService koboSettingsService;
|
||||
private final KoboReadingStateBuilder readingStateBuilder;
|
||||
private final HardcoverSyncService hardcoverSyncService;
|
||||
|
||||
@Transactional
|
||||
public KoboReadingStateResponse saveReadingState(List<KoboReadingState> readingStates) {
|
||||
@@ -168,6 +170,9 @@ public class KoboReadingStateService {
|
||||
|
||||
progressRepository.save(progress);
|
||||
log.debug("Synced Kobo progress: bookId={}, progress={}%", bookId, progress.getKoboProgressPercent());
|
||||
|
||||
// Sync progress to Hardcover asynchronously (if enabled)
|
||||
hardcoverSyncService.syncProgressToHardcover(book.getId(), progress.getKoboProgressPercent());
|
||||
} catch (NumberFormatException e) {
|
||||
log.warn("Invalid entitlement ID format: {}", readingState.getEntitlementId());
|
||||
}
|
||||
|
||||
@@ -48,6 +48,7 @@ import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -57,6 +58,8 @@ import java.util.stream.Collectors;
|
||||
@AllArgsConstructor
|
||||
public class BookMetadataService {
|
||||
|
||||
private static final int BATCH_SIZE = 100;
|
||||
|
||||
private final BookRepository bookRepository;
|
||||
private final BookMapper bookMapper;
|
||||
private final BookMetadataMapper bookMetadataMapper;
|
||||
@@ -157,6 +160,76 @@ public class BookMetadataService {
|
||||
return updateCover(bookId, (writer, book) -> writer.replaceCoverImageFromUpload(book, file));
|
||||
}
|
||||
|
||||
public void updateCoverImageFromFileForBooks(Set<Long> bookIds, MultipartFile file) {
|
||||
validateCoverFile(file);
|
||||
byte[] coverImageBytes = extractBytesFromMultipartFile(file);
|
||||
List<BookCoverInfo> unlockedBooks = getUnlockedBookCoverInfos(bookIds);
|
||||
|
||||
SecurityContextVirtualThread.runWithSecurityContext(() ->
|
||||
processBulkCoverUpdate(unlockedBooks, coverImageBytes));
|
||||
}
|
||||
|
||||
private void validateCoverFile(MultipartFile file) {
|
||||
if (file.isEmpty()) {
|
||||
throw ApiError.INVALID_INPUT.createException("Uploaded file is empty");
|
||||
}
|
||||
String contentType = file.getContentType();
|
||||
if (contentType == null || (!contentType.toLowerCase().startsWith("image/jpeg") && !contentType.toLowerCase().startsWith("image/png"))) {
|
||||
throw ApiError.INVALID_INPUT.createException("Only JPEG and PNG files are allowed");
|
||||
}
|
||||
long maxFileSize = 5L * 1024 * 1024;
|
||||
if (file.getSize() > maxFileSize) {
|
||||
throw ApiError.FILE_TOO_LARGE.createException(5);
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] extractBytesFromMultipartFile(MultipartFile file) {
|
||||
try {
|
||||
return file.getBytes();
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to read cover file: {}", e.getMessage());
|
||||
throw new RuntimeException("Failed to read cover file", e);
|
||||
}
|
||||
}
|
||||
|
||||
private record BookCoverInfo(Long id, String title) {}
|
||||
|
||||
private List<BookCoverInfo> getUnlockedBookCoverInfos(Set<Long> bookIds) {
|
||||
return bookQueryService.findAllWithMetadataByIds(bookIds).stream()
|
||||
.filter(book -> !isCoverLocked(book))
|
||||
.map(book -> new BookCoverInfo(book.getId(), book.getMetadata().getTitle()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
private boolean isCoverLocked(BookEntity book) {
|
||||
return book.getMetadata().getCoverLocked() != null && book.getMetadata().getCoverLocked();
|
||||
}
|
||||
|
||||
private void processBulkCoverUpdate(List<BookCoverInfo> books, byte[] coverImageBytes) {
|
||||
try {
|
||||
int total = books.size();
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Started updating covers for " + total + " selected book(s)"));
|
||||
|
||||
int current = 1;
|
||||
for (BookCoverInfo bookInfo : books) {
|
||||
try {
|
||||
String progress = "(" + current + "/" + total + ") ";
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info(progress + "Updating cover for: " + bookInfo.title()));
|
||||
fileService.createThumbnailFromBytes(bookInfo.id(), coverImageBytes);
|
||||
log.info("{}Successfully updated cover for book ID {} ({})", progress, bookInfo.id(), bookInfo.title());
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to update cover for book ID {}: {}", bookInfo.id(), e.getMessage(), e);
|
||||
}
|
||||
pauseAfterBatchIfNeeded(current, total);
|
||||
current++;
|
||||
}
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Finished updating covers for selected books"));
|
||||
} catch (Exception e) {
|
||||
log.error("Error during cover update: {}", e.getMessage(), e);
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.error("Error occurred during cover update"));
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public BookMetadata updateCoverImageFromUrl(Long bookId, String url) {
|
||||
fileService.createThumbnailFromUrl(bookId, url);
|
||||
@@ -190,24 +263,83 @@ public class BookMetadataService {
|
||||
}
|
||||
}
|
||||
|
||||
private record BookRegenerationInfo(Long id, String title, BookFileType bookType) {}
|
||||
|
||||
public void regenerateCoversForBooks(Set<Long> bookIds) {
|
||||
List<BookRegenerationInfo> unlockedBooks = getUnlockedBookRegenerationInfos(bookIds);
|
||||
SecurityContextVirtualThread.runWithSecurityContext(() ->
|
||||
processBulkCoverRegeneration(unlockedBooks));
|
||||
}
|
||||
|
||||
private List<BookRegenerationInfo> getUnlockedBookRegenerationInfos(Set<Long> bookIds) {
|
||||
return bookQueryService.findAllWithMetadataByIds(bookIds).stream()
|
||||
.filter(book -> !isCoverLocked(book))
|
||||
.map(book -> new BookRegenerationInfo(book.getId(), book.getMetadata().getTitle(), book.getBookType()))
|
||||
.toList();
|
||||
}
|
||||
|
||||
private void processBulkCoverRegeneration(List<BookRegenerationInfo> books) {
|
||||
try {
|
||||
int total = books.size();
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Started regenerating covers for " + total + " selected book(s)"));
|
||||
|
||||
int current = 1;
|
||||
for (BookRegenerationInfo bookInfo : books) {
|
||||
try {
|
||||
String progress = "(" + current + "/" + total + ") ";
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info(progress + "Regenerating cover for: " + bookInfo.title()));
|
||||
regenerateCoverForBookId(bookInfo);
|
||||
log.info("{}Successfully regenerated cover for book ID {} ({})", progress, bookInfo.id(), bookInfo.title());
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to regenerate cover for book ID {}: {}", bookInfo.id(), e.getMessage(), e);
|
||||
}
|
||||
pauseAfterBatchIfNeeded(current, total);
|
||||
current++;
|
||||
}
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Finished regenerating covers for selected books"));
|
||||
} catch (Exception e) {
|
||||
log.error("Error during cover regeneration: {}", e.getMessage(), e);
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.error("Error occurred during cover regeneration"));
|
||||
}
|
||||
}
|
||||
|
||||
private void pauseAfterBatchIfNeeded(int current, int total) {
|
||||
if (current % BATCH_SIZE == 0 && current < total) {
|
||||
try {
|
||||
log.info("Processed {} items, pausing briefly before next batch...", current);
|
||||
Thread.sleep(1000);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
log.warn("Batch pause interrupted");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void regenerateCoverForBookId(BookRegenerationInfo bookInfo) {
|
||||
bookRepository.findById(bookInfo.id()).ifPresent(book -> {
|
||||
BookFileProcessor processor = processorRegistry.getProcessorOrThrow(bookInfo.bookType());
|
||||
processor.generateCover(book);
|
||||
});
|
||||
}
|
||||
|
||||
public void regenerateCovers() {
|
||||
SecurityContextVirtualThread.runWithSecurityContext(() -> {
|
||||
try {
|
||||
List<BookEntity> books = bookQueryService.getAllFullBookEntities().stream()
|
||||
.filter(book -> book.getMetadata().getCoverLocked() == null || !book.getMetadata().getCoverLocked())
|
||||
.filter(book -> !isCoverLocked(book))
|
||||
.toList();
|
||||
int total = books.size();
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Started regenerating covers for " + total + " books"));
|
||||
|
||||
int[] current = {1};
|
||||
int current = 1;
|
||||
for (BookEntity book : books) {
|
||||
try {
|
||||
String progress = "(" + current[0] + "/" + total + ") ";
|
||||
String progress = "(" + current + "/" + total + ") ";
|
||||
regenerateCoverForBook(book, progress);
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to regenerate cover for book ID {}: {}", book.getId(), e.getMessage());
|
||||
log.error("Failed to regenerate cover for book ID {}: {}", book.getId(), e.getMessage(), e);
|
||||
}
|
||||
current[0]++;
|
||||
current++;
|
||||
}
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info("Finished regenerating covers"));
|
||||
} catch (Exception e) {
|
||||
@@ -219,8 +351,7 @@ public class BookMetadataService {
|
||||
|
||||
private void regenerateCoverForBook(BookEntity book, String progress) {
|
||||
String title = book.getMetadata().getTitle();
|
||||
String message = progress + "Regenerating cover for: " + title;
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info(message));
|
||||
notificationService.sendMessage(Topic.LOG, LogNotification.info(progress + "Regenerating cover for: " + title));
|
||||
|
||||
BookFileProcessor processor = processorRegistry.getProcessorOrThrow(book.getBookType());
|
||||
processor.generateCover(book);
|
||||
|
||||
@@ -160,6 +160,7 @@ public class BookMetadataUpdater {
|
||||
handleFieldUpdate(e.getGoodreadsIdLocked(), clear.isGoodreadsId(), m.getGoodreadsId(), v -> e.setGoodreadsId(nullIfBlank(v)), e::getGoodreadsId, replaceMode);
|
||||
handleFieldUpdate(e.getComicvineIdLocked(), clear.isComicvineId(), m.getComicvineId(), v -> e.setComicvineId(nullIfBlank(v)), e::getComicvineId, replaceMode);
|
||||
handleFieldUpdate(e.getHardcoverIdLocked(), clear.isHardcoverId(), m.getHardcoverId(), v -> e.setHardcoverId(nullIfBlank(v)), e::getHardcoverId, replaceMode);
|
||||
handleFieldUpdate(e.getHardcoverBookIdLocked(), clear.isHardcoverBookId(), m.getHardcoverBookId(), e::setHardcoverBookId, e::getHardcoverBookId, replaceMode);
|
||||
handleFieldUpdate(e.getGoogleIdLocked(), clear.isGoogleId(), m.getGoogleId(), v -> e.setGoogleId(nullIfBlank(v)), e::getGoogleId, replaceMode);
|
||||
handleFieldUpdate(e.getPageCountLocked(), clear.isPageCount(), m.getPageCount(), e::setPageCount, e::getPageCount, replaceMode);
|
||||
handleFieldUpdate(e.getLanguageLocked(), clear.isLanguage(), m.getLanguage(), v -> e.setLanguage(nullIfBlank(v)), e::getLanguage, replaceMode);
|
||||
@@ -356,7 +357,6 @@ public class BookMetadataUpdater {
|
||||
if (!set) return;
|
||||
if (!StringUtils.hasText(m.getThumbnailUrl()) || isLocalOrPrivateUrl(m.getThumbnailUrl())) return;
|
||||
fileService.createThumbnailFromUrl(bookId, m.getThumbnailUrl());
|
||||
e.setCoverUpdatedOn(Instant.now());
|
||||
}
|
||||
|
||||
private void updateLocks(BookMetadata m, BookMetadataEntity e) {
|
||||
@@ -375,6 +375,7 @@ public class BookMetadataUpdater {
|
||||
Pair.of(m.getGoodreadsIdLocked(), e::setGoodreadsIdLocked),
|
||||
Pair.of(m.getComicvineIdLocked(), e::setComicvineIdLocked),
|
||||
Pair.of(m.getHardcoverIdLocked(), e::setHardcoverIdLocked),
|
||||
Pair.of(m.getHardcoverBookIdLocked(), e::setHardcoverBookIdLocked),
|
||||
Pair.of(m.getGoogleIdLocked(), e::setGoogleIdLocked),
|
||||
Pair.of(m.getPageCountLocked(), e::setPageCountLocked),
|
||||
Pair.of(m.getLanguageLocked(), e::setLanguageLocked),
|
||||
|
||||
@@ -482,6 +482,7 @@ public class MetadataRefreshService {
|
||||
if (enabledFields.isHardcoverId()) {
|
||||
if (metadataMap.containsKey(Hardcover)) {
|
||||
metadata.setHardcoverId(metadataMap.get(Hardcover).getHardcoverId());
|
||||
metadata.setHardcoverBookId(metadataMap.get(Hardcover).getHardcoverBookId());
|
||||
}
|
||||
}
|
||||
if (enabledFields.isGoogleId()) {
|
||||
|
||||
@@ -0,0 +1,367 @@
|
||||
package com.adityachandel.booklore.service.metadata.extractor;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.parsers.DocumentBuilder;
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.InputStream;
|
||||
import java.time.LocalDate;
|
||||
import java.util.Base64;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
public class Fb2MetadataExtractor implements FileMetadataExtractor {
|
||||
|
||||
private static final String FB2_NAMESPACE = "http://www.gribuser.ru/xml/fictionbook/2.0";
|
||||
private static final Pattern YEAR_PATTERN = Pattern.compile("\\d{4}");
|
||||
private static final Pattern ISBN_PATTERN = Pattern.compile("\\d{9}[\\dXx]");
|
||||
private static final Pattern KEYWORD_SEPARATOR_PATTERN = Pattern.compile("[,;]");
|
||||
private static final Pattern ISBN_CLEANER_PATTERN = Pattern.compile("[^0-9Xx]");
|
||||
private static final Pattern ISO_DATE_PATTERN = Pattern.compile("\\d{4}-\\d{2}-\\d{2}");
|
||||
|
||||
@Override
|
||||
public byte[] extractCover(File file) {
|
||||
try (InputStream inputStream = getInputStream(file)) {
|
||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||
dbf.setNamespaceAware(true);
|
||||
dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
|
||||
DocumentBuilder builder = dbf.newDocumentBuilder();
|
||||
Document doc = builder.parse(inputStream);
|
||||
|
||||
// Look for cover image in binary elements
|
||||
NodeList binaries = doc.getElementsByTagNameNS(FB2_NAMESPACE, "binary");
|
||||
for (int i = 0; i < binaries.getLength(); i++) {
|
||||
Element binary = (Element) binaries.item(i);
|
||||
String id = binary.getAttribute("id");
|
||||
|
||||
if (id != null && id.toLowerCase().contains("cover")) {
|
||||
String contentType = binary.getAttribute("content-type");
|
||||
if (contentType != null && contentType.startsWith("image/")) {
|
||||
String base64Data = binary.getTextContent().trim();
|
||||
return Base64.getDecoder().decode(base64Data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no cover found by name, try to find the first referenced image in title-info
|
||||
Element titleInfo = getFirstElementByTagNameNS(doc, FB2_NAMESPACE, "title-info");
|
||||
if (titleInfo != null) {
|
||||
NodeList coverPages = titleInfo.getElementsByTagNameNS(FB2_NAMESPACE, "coverpage");
|
||||
if (coverPages.getLength() > 0) {
|
||||
Element coverPage = (Element) coverPages.item(0);
|
||||
NodeList images = coverPage.getElementsByTagNameNS(FB2_NAMESPACE, "image");
|
||||
if (images.getLength() > 0) {
|
||||
Element image = (Element) images.item(0);
|
||||
String href = image.getAttributeNS("http://www.w3.org/1999/xlink", "href");
|
||||
if (href != null && href.startsWith("#")) {
|
||||
String imageId = href.substring(1);
|
||||
// Find the binary with this ID
|
||||
for (int i = 0; i < binaries.getLength(); i++) {
|
||||
Element binary = (Element) binaries.item(i);
|
||||
if (imageId.equals(binary.getAttribute("id"))) {
|
||||
String base64Data = binary.getTextContent().trim();
|
||||
return Base64.getDecoder().decode(base64Data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to extract cover from FB2: {}", file.getName(), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public BookMetadata extractMetadata(File file) {
|
||||
try (InputStream inputStream = getInputStream(file)) {
|
||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||
dbf.setNamespaceAware(true);
|
||||
dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
|
||||
DocumentBuilder builder = dbf.newDocumentBuilder();
|
||||
Document doc = builder.parse(inputStream);
|
||||
|
||||
BookMetadata.BookMetadataBuilder metadataBuilder = BookMetadata.builder();
|
||||
Set<String> authors = new HashSet<>();
|
||||
Set<String> categories = new HashSet<>();
|
||||
|
||||
// Extract title-info (main metadata section)
|
||||
Element titleInfo = getFirstElementByTagNameNS(doc, FB2_NAMESPACE, "title-info");
|
||||
if (titleInfo != null) {
|
||||
extractTitleInfo(titleInfo, metadataBuilder, authors, categories);
|
||||
}
|
||||
|
||||
// Extract publish-info (publisher, year, ISBN)
|
||||
Element publishInfo = getFirstElementByTagNameNS(doc, FB2_NAMESPACE, "publish-info");
|
||||
if (publishInfo != null) {
|
||||
extractPublishInfo(publishInfo, metadataBuilder);
|
||||
}
|
||||
|
||||
// Extract document-info (optional metadata)
|
||||
Element documentInfo = getFirstElementByTagNameNS(doc, FB2_NAMESPACE, "document-info");
|
||||
if (documentInfo != null) {
|
||||
extractDocumentInfo(documentInfo, metadataBuilder);
|
||||
}
|
||||
|
||||
metadataBuilder.authors(authors);
|
||||
metadataBuilder.categories(categories);
|
||||
|
||||
return metadataBuilder.build();
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to extract metadata from FB2: {}", file.getName(), e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private void extractTitleInfo(Element titleInfo, BookMetadata.BookMetadataBuilder builder,
|
||||
Set<String> authors, Set<String> categories) {
|
||||
// Extract genres (categories)
|
||||
NodeList genres = titleInfo.getElementsByTagNameNS(FB2_NAMESPACE, "genre");
|
||||
for (int i = 0; i < genres.getLength(); i++) {
|
||||
String genre = genres.item(i).getTextContent().trim();
|
||||
if (StringUtils.isNotBlank(genre)) {
|
||||
categories.add(genre);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract authors
|
||||
NodeList authorNodes = titleInfo.getElementsByTagNameNS(FB2_NAMESPACE, "author");
|
||||
for (int i = 0; i < authorNodes.getLength(); i++) {
|
||||
Element author = (Element) authorNodes.item(i);
|
||||
String authorName = extractPersonName(author);
|
||||
if (StringUtils.isNotBlank(authorName)) {
|
||||
authors.add(authorName);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract book title
|
||||
Element bookTitle = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "book-title");
|
||||
if (bookTitle != null) {
|
||||
builder.title(bookTitle.getTextContent().trim());
|
||||
}
|
||||
|
||||
// Extract annotation (description)
|
||||
Element annotation = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "annotation");
|
||||
if (annotation != null) {
|
||||
String description = extractTextFromElement(annotation);
|
||||
if (StringUtils.isNotBlank(description)) {
|
||||
builder.description(description);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract keywords (additional categories/tags)
|
||||
Element keywords = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "keywords");
|
||||
if (keywords != null) {
|
||||
String keywordsText = keywords.getTextContent().trim();
|
||||
if (StringUtils.isNotBlank(keywordsText)) {
|
||||
for (String keyword : KEYWORD_SEPARATOR_PATTERN.split(keywordsText)) {
|
||||
String trimmed = keyword.trim();
|
||||
if (StringUtils.isNotBlank(trimmed)) {
|
||||
categories.add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract date
|
||||
Element date = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "date");
|
||||
if (date != null) {
|
||||
String dateValue = date.getAttribute("value");
|
||||
if (StringUtils.isBlank(dateValue)) {
|
||||
dateValue = date.getTextContent().trim();
|
||||
}
|
||||
LocalDate publishedDate = parseDate(dateValue);
|
||||
if (publishedDate != null) {
|
||||
builder.publishedDate(publishedDate);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract language
|
||||
Element lang = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "lang");
|
||||
if (lang != null) {
|
||||
builder.language(lang.getTextContent().trim());
|
||||
}
|
||||
|
||||
// Extract sequence (series information)
|
||||
Element sequence = getFirstElementByTagNameNS(titleInfo, FB2_NAMESPACE, "sequence");
|
||||
if (sequence != null) {
|
||||
String seriesName = sequence.getAttribute("name");
|
||||
if (StringUtils.isNotBlank(seriesName)) {
|
||||
builder.seriesName(seriesName.trim());
|
||||
}
|
||||
String seriesNumber = sequence.getAttribute("number");
|
||||
if (StringUtils.isNotBlank(seriesNumber)) {
|
||||
try {
|
||||
builder.seriesNumber(Float.parseFloat(seriesNumber));
|
||||
} catch (NumberFormatException e) {
|
||||
log.debug("Failed to parse series number: {}", seriesNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void extractPublishInfo(Element publishInfo, BookMetadata.BookMetadataBuilder builder) {
|
||||
// Extract publisher
|
||||
Element publisher = getFirstElementByTagNameNS(publishInfo, FB2_NAMESPACE, "publisher");
|
||||
if (publisher != null) {
|
||||
builder.publisher(publisher.getTextContent().trim());
|
||||
}
|
||||
|
||||
// Extract publication year
|
||||
Element year = getFirstElementByTagNameNS(publishInfo, FB2_NAMESPACE, "year");
|
||||
if (year != null) {
|
||||
String yearText = year.getTextContent().trim();
|
||||
Matcher matcher = YEAR_PATTERN.matcher(yearText);
|
||||
if (matcher.find()) {
|
||||
try {
|
||||
int yearValue = Integer.parseInt(matcher.group());
|
||||
builder.publishedDate(LocalDate.of(yearValue, 1, 1));
|
||||
} catch (NumberFormatException e) {
|
||||
log.debug("Failed to parse year: {}", yearText);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract ISBN
|
||||
Element isbn = getFirstElementByTagNameNS(publishInfo, FB2_NAMESPACE, "isbn");
|
||||
if (isbn != null) {
|
||||
String isbnText = ISBN_CLEANER_PATTERN.matcher(isbn.getTextContent().trim()).replaceAll("");
|
||||
if (isbnText.length() == 13) {
|
||||
builder.isbn13(isbnText);
|
||||
} else if (isbnText.length() == 10) {
|
||||
builder.isbn10(isbnText);
|
||||
} else if (ISBN_PATTERN.matcher(isbnText).find()) {
|
||||
// Extract the first valid ISBN pattern found
|
||||
Matcher matcher = ISBN_PATTERN.matcher(isbnText);
|
||||
if (matcher.find()) {
|
||||
builder.isbn10(matcher.group());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void extractDocumentInfo(Element documentInfo, BookMetadata.BookMetadataBuilder builder) {
|
||||
// Extract document ID (can be used as an identifier)
|
||||
Element id = getFirstElementByTagNameNS(documentInfo, FB2_NAMESPACE, "id");
|
||||
if (id != null) {
|
||||
// Could potentially map this to a custom identifier field if needed
|
||||
log.debug("FB2 document ID: {}", id.getTextContent().trim());
|
||||
}
|
||||
}
|
||||
|
||||
private String extractPersonName(Element personElement) {
|
||||
Element firstName = getFirstElementByTagNameNS(personElement, FB2_NAMESPACE, "first-name");
|
||||
Element middleName = getFirstElementByTagNameNS(personElement, FB2_NAMESPACE, "middle-name");
|
||||
Element lastName = getFirstElementByTagNameNS(personElement, FB2_NAMESPACE, "last-name");
|
||||
Element nickname = getFirstElementByTagNameNS(personElement, FB2_NAMESPACE, "nickname");
|
||||
|
||||
StringBuilder name = new StringBuilder(64);
|
||||
|
||||
if (firstName != null) {
|
||||
name.append(firstName.getTextContent().trim());
|
||||
}
|
||||
if (middleName != null) {
|
||||
if (!name.isEmpty()) name.append(" ");
|
||||
name.append(middleName.getTextContent().trim());
|
||||
}
|
||||
if (lastName != null) {
|
||||
if (!name.isEmpty()) name.append(" ");
|
||||
name.append(lastName.getTextContent().trim());
|
||||
}
|
||||
|
||||
// If no name parts found, try nickname
|
||||
if (name.isEmpty() && nickname != null) {
|
||||
name.append(nickname.getTextContent().trim());
|
||||
}
|
||||
|
||||
return name.toString();
|
||||
}
|
||||
|
||||
private String extractTextFromElement(Element element) {
|
||||
StringBuilder text = new StringBuilder();
|
||||
NodeList children = element.getChildNodes();
|
||||
|
||||
for (int i = 0; i < children.getLength(); i++) {
|
||||
Node child = children.item(i);
|
||||
if (child.getNodeType() == Node.TEXT_NODE) {
|
||||
text.append(child.getTextContent().trim()).append(" ");
|
||||
} else if (child.getNodeType() == Node.ELEMENT_NODE) {
|
||||
Element childElement = (Element) child;
|
||||
if ("p".equals(childElement.getLocalName())) {
|
||||
text.append(childElement.getTextContent().trim()).append("\n\n");
|
||||
} else {
|
||||
text.append(extractTextFromElement(childElement));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return text.toString().trim();
|
||||
}
|
||||
|
||||
private LocalDate parseDate(String dateString) {
|
||||
if (StringUtils.isBlank(dateString)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// Try parsing ISO date format (YYYY-MM-DD)
|
||||
if (ISO_DATE_PATTERN.matcher(dateString).matches()) {
|
||||
return LocalDate.parse(dateString);
|
||||
}
|
||||
|
||||
// Try extracting year only
|
||||
Matcher matcher = YEAR_PATTERN.matcher(dateString);
|
||||
if (matcher.find()) {
|
||||
int year = Integer.parseInt(matcher.group());
|
||||
return LocalDate.of(year, 1, 1);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.debug("Failed to parse date: {}", dateString, e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private Element getFirstElementByTagNameNS(Node parent, String namespace, String localName) {
|
||||
NodeList nodes;
|
||||
if (parent instanceof Document document) {
|
||||
nodes = document.getElementsByTagNameNS(namespace, localName);
|
||||
} else if (parent instanceof Element element) {
|
||||
nodes = element.getElementsByTagNameNS(namespace, localName);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
return nodes.getLength() > 0 ? (Element) nodes.item(0) : null;
|
||||
}
|
||||
|
||||
private InputStream getInputStream(File file) throws Exception {
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
try {
|
||||
if (file.getName().toLowerCase().endsWith(".gz")) {
|
||||
return new GZIPInputStream(fis);
|
||||
}
|
||||
return fis;
|
||||
} catch (Exception e) {
|
||||
fis.close();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,12 +15,14 @@ public class MetadataExtractorFactory {
|
||||
private final EpubMetadataExtractor epubMetadataExtractor;
|
||||
private final PdfMetadataExtractor pdfMetadataExtractor;
|
||||
private final CbxMetadataExtractor cbxMetadataExtractor;
|
||||
private final Fb2MetadataExtractor fb2MetadataExtractor;
|
||||
|
||||
public BookMetadata extractMetadata(BookFileType bookFileType, File file) {
|
||||
return switch (bookFileType) {
|
||||
case PDF -> pdfMetadataExtractor.extractMetadata(file);
|
||||
case EPUB -> epubMetadataExtractor.extractMetadata(file);
|
||||
case CBX -> cbxMetadataExtractor.extractMetadata(file);
|
||||
case FB2 -> fb2MetadataExtractor.extractMetadata(file);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -29,6 +31,7 @@ public class MetadataExtractorFactory {
|
||||
case PDF -> pdfMetadataExtractor.extractMetadata(file);
|
||||
case EPUB -> epubMetadataExtractor.extractMetadata(file);
|
||||
case CBZ, CBR, CB7 -> cbxMetadataExtractor.extractMetadata(file);
|
||||
case FB2 -> fb2MetadataExtractor.extractMetadata(file);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -37,6 +40,7 @@ public class MetadataExtractorFactory {
|
||||
case EPUB -> epubMetadataExtractor.extractCover(file);
|
||||
case PDF -> pdfMetadataExtractor.extractCover(file);
|
||||
case CBZ, CBR, CB7 -> cbxMetadataExtractor.extractCover(file);
|
||||
case FB2 -> fb2MetadataExtractor.extractCover(file);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,7 +247,7 @@ public class AmazonBookParser implements BookParser {
|
||||
return url;
|
||||
}
|
||||
|
||||
StringBuilder searchTerm = new StringBuilder();
|
||||
StringBuilder searchTerm = new StringBuilder(256);
|
||||
|
||||
String title = fetchMetadataRequest.getTitle();
|
||||
if (title != null && !title.isEmpty()) {
|
||||
|
||||
@@ -77,7 +77,7 @@ public class ComicvineBookParser implements BookParser {
|
||||
|
||||
HttpRequest request = HttpRequest.newBuilder()
|
||||
.uri(uri)
|
||||
.header("User-Agent", "Booklore/1.0")
|
||||
.header("User-Agent", "BookLore/1.0 (Book and Comic Metadata Fetcher; +https://github.com/booklore-app/booklore)")
|
||||
.GET()
|
||||
.build();
|
||||
|
||||
|
||||
@@ -300,7 +300,7 @@ public class DoubanBookParser implements BookParser {
|
||||
}
|
||||
|
||||
private String buildQueryUrl(FetchMetadataRequest fetchMetadataRequest, Book book) {
|
||||
StringBuilder searchTerm = new StringBuilder();
|
||||
StringBuilder searchTerm = new StringBuilder(256);
|
||||
|
||||
String title = fetchMetadataRequest.getTitle();
|
||||
if (title != null && !title.isEmpty()) {
|
||||
|
||||
@@ -78,6 +78,14 @@ public class HardcoverParser implements BookParser {
|
||||
.map(doc -> {
|
||||
BookMetadata metadata = new BookMetadata();
|
||||
metadata.setHardcoverId(doc.getSlug());
|
||||
// Set numeric book ID for API operations
|
||||
if (doc.getId() != null) {
|
||||
try {
|
||||
metadata.setHardcoverBookId(Integer.parseInt(doc.getId()));
|
||||
} catch (NumberFormatException e) {
|
||||
log.debug("Could not parse Hardcover book ID: {}", doc.getId());
|
||||
}
|
||||
}
|
||||
metadata.setTitle(doc.getTitle());
|
||||
metadata.setSubtitle(doc.getSubtitle());
|
||||
metadata.setDescription(doc.getDescription());
|
||||
|
||||
@@ -7,11 +7,12 @@ import com.adityachandel.booklore.model.dto.*;
|
||||
import com.adityachandel.booklore.model.entity.BookEntity;
|
||||
import com.adityachandel.booklore.model.entity.BookLoreUserEntity;
|
||||
import com.adityachandel.booklore.model.entity.ShelfEntity;
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import com.adityachandel.booklore.repository.BookOpdsRepository;
|
||||
import com.adityachandel.booklore.repository.ShelfRepository;
|
||||
import com.adityachandel.booklore.repository.UserRepository;
|
||||
import com.adityachandel.booklore.service.library.LibraryService;
|
||||
import com.adityachandel.booklore.util.BookUtils;
|
||||
import com.adityachandel.booklore.service.library.LibraryService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.data.domain.Page;
|
||||
@@ -202,7 +203,7 @@ public class OpdsBookService {
|
||||
if (idPage.isEmpty()) {
|
||||
return new PageImpl<>(List.of(), pageable, 0);
|
||||
}
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithMetadataByIds(idPage.getContent());
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithFullMetadataByIds(idPage.getContent());
|
||||
return createPageFromEntities(books, idPage, pageable);
|
||||
}
|
||||
|
||||
@@ -215,7 +216,61 @@ public class OpdsBookService {
|
||||
return new PageImpl<>(List.of(), pageable, 0);
|
||||
}
|
||||
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithMetadataByIdsAndLibraryIds(idPage.getContent(), libraryIds);
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithFullMetadataByIdsAndLibraryIds(idPage.getContent(), libraryIds);
|
||||
Page<Book> booksPage = createPageFromEntities(books, idPage, pageable);
|
||||
return applyBookFilters(booksPage, userId);
|
||||
}
|
||||
|
||||
public List<String> getDistinctSeries(Long userId) {
|
||||
if (userId == null) {
|
||||
return List.of();
|
||||
}
|
||||
|
||||
BookLoreUserEntity entity = userRepository.findById(userId)
|
||||
.orElseThrow(() -> ApiError.USER_NOT_FOUND.createException(userId));
|
||||
BookLoreUser user = bookLoreUserTransformer.toDTO(entity);
|
||||
|
||||
if (user.getPermissions().isAdmin()) {
|
||||
return bookOpdsRepository.findDistinctSeries();
|
||||
}
|
||||
|
||||
Set<Long> libraryIds = user.getAssignedLibraries().stream()
|
||||
.map(Library::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
return bookOpdsRepository.findDistinctSeriesByLibraryIds(libraryIds);
|
||||
}
|
||||
|
||||
public Page<Book> getBooksBySeriesName(Long userId, String seriesName, int page, int size) {
|
||||
if (userId == null) {
|
||||
throw ApiError.FORBIDDEN.createException("Authentication required");
|
||||
}
|
||||
|
||||
BookLoreUserEntity entity = userRepository.findById(userId)
|
||||
.orElseThrow(() -> ApiError.USER_NOT_FOUND.createException(userId));
|
||||
BookLoreUser user = bookLoreUserTransformer.toDTO(entity);
|
||||
|
||||
Pageable pageable = PageRequest.of(Math.max(page, 0), size);
|
||||
|
||||
if (user.getPermissions().isAdmin()) {
|
||||
Page<Long> idPage = bookOpdsRepository.findBookIdsBySeriesName(seriesName, pageable);
|
||||
if (idPage.isEmpty()) {
|
||||
return new PageImpl<>(List.of(), pageable, 0);
|
||||
}
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithFullMetadataByIds(idPage.getContent());
|
||||
return createPageFromEntities(books, idPage, pageable);
|
||||
}
|
||||
|
||||
Set<Long> libraryIds = user.getAssignedLibraries().stream()
|
||||
.map(Library::getId)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Page<Long> idPage = bookOpdsRepository.findBookIdsBySeriesNameAndLibraryIds(seriesName, libraryIds, pageable);
|
||||
if (idPage.isEmpty()) {
|
||||
return new PageImpl<>(List.of(), pageable, 0);
|
||||
}
|
||||
|
||||
List<BookEntity> books = bookOpdsRepository.findAllWithFullMetadataByIdsAndLibraryIds(idPage.getContent(), libraryIds);
|
||||
Page<Book> booksPage = createPageFromEntities(books, idPage, pageable);
|
||||
return applyBookFilters(booksPage, userId);
|
||||
}
|
||||
@@ -346,4 +401,170 @@ public class OpdsBookService {
|
||||
}
|
||||
return dto;
|
||||
}
|
||||
|
||||
public Page<Book> applySortOrder(Page<Book> booksPage, OpdsSortOrder sortOrder) {
|
||||
if (sortOrder == null || sortOrder == OpdsSortOrder.RECENT) {
|
||||
return booksPage; // Already sorted by addedOn DESC from repository
|
||||
}
|
||||
|
||||
List<Book> sortedBooks = new ArrayList<>(booksPage.getContent());
|
||||
|
||||
switch (sortOrder) {
|
||||
case TITLE_ASC -> sortedBooks.sort((b1, b2) -> {
|
||||
String title1 = b1.getMetadata() != null && b1.getMetadata().getTitle() != null
|
||||
? b1.getMetadata().getTitle() : "";
|
||||
String title2 = b2.getMetadata() != null && b2.getMetadata().getTitle() != null
|
||||
? b2.getMetadata().getTitle() : "";
|
||||
return title1.compareToIgnoreCase(title2);
|
||||
});
|
||||
case TITLE_DESC -> sortedBooks.sort((b1, b2) -> {
|
||||
String title1 = b1.getMetadata() != null && b1.getMetadata().getTitle() != null
|
||||
? b1.getMetadata().getTitle() : "";
|
||||
String title2 = b2.getMetadata() != null && b2.getMetadata().getTitle() != null
|
||||
? b2.getMetadata().getTitle() : "";
|
||||
return title2.compareToIgnoreCase(title1);
|
||||
});
|
||||
case AUTHOR_ASC -> sortedBooks.sort((b1, b2) -> {
|
||||
String author1 = getFirstAuthor(b1);
|
||||
String author2 = getFirstAuthor(b2);
|
||||
return author1.compareToIgnoreCase(author2);
|
||||
});
|
||||
case AUTHOR_DESC -> sortedBooks.sort((b1, b2) -> {
|
||||
String author1 = getFirstAuthor(b1);
|
||||
String author2 = getFirstAuthor(b2);
|
||||
return author2.compareToIgnoreCase(author1);
|
||||
});
|
||||
case SERIES_ASC -> sortedBooks.sort((b1, b2) -> {
|
||||
String series1 = getSeriesName(b1);
|
||||
String series2 = getSeriesName(b2);
|
||||
boolean hasSeries1 = !series1.isEmpty();
|
||||
boolean hasSeries2 = !series2.isEmpty();
|
||||
|
||||
// Books without series come after books with series
|
||||
if (!hasSeries1 && !hasSeries2) {
|
||||
// Both have no series, sort by addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
}
|
||||
if (!hasSeries1) return 1;
|
||||
if (!hasSeries2) return -1;
|
||||
|
||||
// Both have series, sort by series name then number
|
||||
int seriesComp = series1.compareToIgnoreCase(series2);
|
||||
if (seriesComp != 0) return seriesComp;
|
||||
return Float.compare(getSeriesNumber(b1), getSeriesNumber(b2));
|
||||
});
|
||||
case SERIES_DESC -> sortedBooks.sort((b1, b2) -> {
|
||||
String series1 = getSeriesName(b1);
|
||||
String series2 = getSeriesName(b2);
|
||||
boolean hasSeries1 = !series1.isEmpty();
|
||||
boolean hasSeries2 = !series2.isEmpty();
|
||||
|
||||
// Books without series come after books with series
|
||||
if (!hasSeries1 && !hasSeries2) {
|
||||
// Both have no series, sort by addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
}
|
||||
if (!hasSeries1) return 1;
|
||||
if (!hasSeries2) return -1;
|
||||
|
||||
// Both have series, sort by series name then number
|
||||
int seriesComp = series2.compareToIgnoreCase(series1);
|
||||
if (seriesComp != 0) return seriesComp;
|
||||
return Float.compare(getSeriesNumber(b2), getSeriesNumber(b1));
|
||||
});
|
||||
case RATING_ASC -> sortedBooks.sort((b1, b2) -> {
|
||||
Float rating1 = calculateRating(b1);
|
||||
Float rating2 = calculateRating(b2);
|
||||
// Books with no rating go to the end
|
||||
if (rating1 == null && rating2 == null) {
|
||||
// Both have no rating, fall back to addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
}
|
||||
if (rating1 == null) return 1;
|
||||
if (rating2 == null) return -1;
|
||||
int ratingComp = Float.compare(rating1, rating2); // Ascending order (lowest first)
|
||||
if (ratingComp != 0) return ratingComp;
|
||||
// Same rating, fall back to addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
});
|
||||
case RATING_DESC -> sortedBooks.sort((b1, b2) -> {
|
||||
Float rating1 = calculateRating(b1);
|
||||
Float rating2 = calculateRating(b2);
|
||||
// Books with no rating go to the end
|
||||
if (rating1 == null && rating2 == null) {
|
||||
// Both have no rating, fall back to addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
}
|
||||
if (rating1 == null) return 1;
|
||||
if (rating2 == null) return -1;
|
||||
int ratingComp = Float.compare(rating2, rating1); // Descending order (highest first)
|
||||
if (ratingComp != 0) return ratingComp;
|
||||
// Same rating, fall back to addedOn descending
|
||||
return compareByAddedOn(b2, b1);
|
||||
});
|
||||
}
|
||||
|
||||
return new PageImpl<>(sortedBooks, booksPage.getPageable(), booksPage.getTotalElements());
|
||||
}
|
||||
|
||||
private String getFirstAuthor(Book book) {
|
||||
if (book.getMetadata() != null && book.getMetadata().getAuthors() != null
|
||||
&& !book.getMetadata().getAuthors().isEmpty()) {
|
||||
return book.getMetadata().getAuthors().iterator().next();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private String getSeriesName(Book book) {
|
||||
if (book.getMetadata() != null && book.getMetadata().getSeriesName() != null) {
|
||||
return book.getMetadata().getSeriesName();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private Float getSeriesNumber(Book book) {
|
||||
if (book.getMetadata() != null && book.getMetadata().getSeriesNumber() != null) {
|
||||
return book.getMetadata().getSeriesNumber();
|
||||
}
|
||||
return Float.MAX_VALUE;
|
||||
}
|
||||
|
||||
private int compareByAddedOn(Book b1, Book b2) {
|
||||
if (b1.getAddedOn() == null && b2.getAddedOn() == null) return 0;
|
||||
if (b1.getAddedOn() == null) return 1;
|
||||
if (b2.getAddedOn() == null) return -1;
|
||||
return b1.getAddedOn().compareTo(b2.getAddedOn());
|
||||
}
|
||||
|
||||
private Float calculateRating(Book book) {
|
||||
if (book.getMetadata() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Double hardcoverRating = book.getMetadata().getHardcoverRating();
|
||||
Double amazonRating = book.getMetadata().getAmazonRating();
|
||||
Double goodreadsRating = book.getMetadata().getGoodreadsRating();
|
||||
|
||||
double sum = 0;
|
||||
int count = 0;
|
||||
|
||||
if (hardcoverRating != null && hardcoverRating > 0) {
|
||||
sum += hardcoverRating;
|
||||
count++;
|
||||
}
|
||||
if (amazonRating != null && amazonRating > 0) {
|
||||
sum += amazonRating;
|
||||
count++;
|
||||
}
|
||||
if (goodreadsRating != null && goodreadsRating > 0) {
|
||||
sum += goodreadsRating;
|
||||
count++;
|
||||
}
|
||||
|
||||
if (count == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (float) (sum / count);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.adityachandel.booklore.config.security.userdetails.OpdsUserDetails;
|
||||
import com.adityachandel.booklore.model.dto.Book;
|
||||
import com.adityachandel.booklore.model.dto.Library;
|
||||
import com.adityachandel.booklore.model.dto.MagicShelf;
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import com.adityachandel.booklore.service.MagicShelfService;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@@ -100,6 +101,16 @@ public class OpdsFeedService {
|
||||
</entry>
|
||||
""".formatted(now()));
|
||||
|
||||
feed.append("""
|
||||
<entry>
|
||||
<title>Series</title>
|
||||
<id>urn:booklore:navigation:series</id>
|
||||
<updated>%s</updated>
|
||||
<link rel="subsection" href="/api/v1/opds/series" type="application/atom+xml;profile=opds-catalog;kind=navigation"/>
|
||||
<content type="text">Browse books by series</content>
|
||||
</entry>
|
||||
""".formatted(now()));
|
||||
|
||||
feed.append("""
|
||||
<entry>
|
||||
<title>Surprise Me</title>
|
||||
@@ -270,28 +281,72 @@ public class OpdsFeedService {
|
||||
return feed.toString();
|
||||
}
|
||||
|
||||
public String generateSeriesNavigation(HttpServletRequest request) {
|
||||
Long userId = getUserId();
|
||||
List<String> seriesList = opdsBookService.getDistinctSeries(userId);
|
||||
|
||||
var feed = new StringBuilder("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:opds="http://opds-spec.org/2010/catalog">
|
||||
<id>urn:booklore:navigation:series</id>
|
||||
<title>Series</title>
|
||||
<updated>%s</updated>
|
||||
<link rel="self" href="/api/v1/opds/series" type="application/atom+xml;profile=opds-catalog;kind=navigation"/>
|
||||
<link rel="start" href="/api/v1/opds" type="application/atom+xml;profile=opds-catalog;kind=navigation"/>
|
||||
<link rel="search" type="application/opensearchdescription+xml" title="Search" href="/api/v1/opds/search.opds"/>
|
||||
""".formatted(now()));
|
||||
|
||||
for (String series : seriesList) {
|
||||
feed.append("""
|
||||
<entry>
|
||||
<title>%s</title>
|
||||
<id>urn:booklore:series:%s</id>
|
||||
<updated>%s</updated>
|
||||
<link rel="subsection" href="%s" type="application/atom+xml;profile=opds-catalog;kind=acquisition"/>
|
||||
<content type="text">Books in the %s series</content>
|
||||
</entry>
|
||||
""".formatted(
|
||||
escapeXml(series),
|
||||
escapeXml(series),
|
||||
now(),
|
||||
escapeXml("/api/v1/opds/catalog?series=" + java.net.URLEncoder.encode(series, java.nio.charset.StandardCharsets.UTF_8)),
|
||||
escapeXml(series)
|
||||
));
|
||||
}
|
||||
|
||||
feed.append("</feed>");
|
||||
return feed.toString();
|
||||
}
|
||||
|
||||
public String generateCatalogFeed(HttpServletRequest request) {
|
||||
Long libraryId = parseLongParam(request, "libraryId", null);
|
||||
Long shelfId = parseLongParam(request, "shelfId", null);
|
||||
Long magicShelfId = parseLongParam(request, "magicShelfId", null);
|
||||
String query = request.getParameter("q");
|
||||
String author = request.getParameter("author");
|
||||
String series = request.getParameter("series");
|
||||
int page = Math.max(1, parseLongParam(request, "page", 1L).intValue());
|
||||
int size = Math.min(parseLongParam(request, "size", (long) DEFAULT_PAGE_SIZE).intValue(), MAX_PAGE_SIZE);
|
||||
|
||||
Long userId = getUserId();
|
||||
OpdsSortOrder sortOrder = getSortOrder();
|
||||
Page<Book> booksPage;
|
||||
|
||||
if (magicShelfId != null) {
|
||||
booksPage = magicShelfBookService.getBooksByMagicShelfId(userId, magicShelfId, page - 1, size);
|
||||
} else if (author != null && !author.isBlank()) {
|
||||
booksPage = opdsBookService.getBooksByAuthorName(userId, author, page - 1, size);
|
||||
} else if (series != null && !series.isBlank()) {
|
||||
booksPage = opdsBookService.getBooksBySeriesName(userId, series, page - 1, size);
|
||||
} else {
|
||||
booksPage = opdsBookService.getBooksPage(userId, query, libraryId, shelfId, page - 1, size);
|
||||
}
|
||||
|
||||
String feedTitle = determineFeedTitle(libraryId, shelfId, magicShelfId, author);
|
||||
String feedId = determineFeedId(libraryId, shelfId, magicShelfId, author);
|
||||
// Apply user's preferred sort order
|
||||
booksPage = opdsBookService.applySortOrder(booksPage, sortOrder);
|
||||
|
||||
String feedTitle = determineFeedTitle(libraryId, shelfId, magicShelfId, author, series);
|
||||
String feedId = determineFeedId(libraryId, shelfId, magicShelfId, author, series);
|
||||
|
||||
var feed = new StringBuilder("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
@@ -325,11 +380,15 @@ public class OpdsFeedService {
|
||||
|
||||
public String generateRecentFeed(HttpServletRequest request) {
|
||||
Long userId = getUserId();
|
||||
OpdsSortOrder sortOrder = getSortOrder();
|
||||
int page = Math.max(1, parseLongParam(request, "page", 1L).intValue());
|
||||
int size = Math.min(parseLongParam(request, "size", (long) DEFAULT_PAGE_SIZE).intValue(), MAX_PAGE_SIZE);
|
||||
|
||||
Page<Book> booksPage = opdsBookService.getRecentBooksPage(userId, page - 1, size);
|
||||
|
||||
// Apply user's preferred sort order
|
||||
booksPage = opdsBookService.applySortOrder(booksPage, sortOrder);
|
||||
|
||||
var feed = new StringBuilder("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:dc="http://purl.org/dc/terms/" xmlns:opds="http://opds-spec.org/2010/catalog" xmlns:opensearch="http://a9.com/-/spec/opensearch/1.1/">
|
||||
@@ -502,7 +561,7 @@ public class OpdsFeedService {
|
||||
}
|
||||
}
|
||||
|
||||
private String determineFeedTitle(Long libraryId, Long shelfId, Long magicShelfId, String author) {
|
||||
private String determineFeedTitle(Long libraryId, Long shelfId, Long magicShelfId, String author, String series) {
|
||||
if (magicShelfId != null) {
|
||||
return magicShelfBookService.getMagicShelfName(magicShelfId);
|
||||
}
|
||||
@@ -515,10 +574,13 @@ public class OpdsFeedService {
|
||||
if (author != null && !author.isBlank()) {
|
||||
return "Books by " + author;
|
||||
}
|
||||
if (series != null && !series.isBlank()) {
|
||||
return series + " series";
|
||||
}
|
||||
return "Booklore Catalog";
|
||||
}
|
||||
|
||||
private String determineFeedId(Long libraryId, Long shelfId, Long magicShelfId, String author) {
|
||||
private String determineFeedId(Long libraryId, Long shelfId, Long magicShelfId, String author, String series) {
|
||||
if (magicShelfId != null) {
|
||||
return "urn:booklore:magic-shelf:" + magicShelfId;
|
||||
}
|
||||
@@ -531,6 +593,9 @@ public class OpdsFeedService {
|
||||
if (author != null && !author.isBlank()) {
|
||||
return "urn:booklore:author:" + author;
|
||||
}
|
||||
if (series != null && !series.isBlank()) {
|
||||
return "urn:booklore:series:" + series;
|
||||
}
|
||||
return "urn:booklore:catalog";
|
||||
}
|
||||
|
||||
@@ -574,4 +639,11 @@ public class OpdsFeedService {
|
||||
? details.getOpdsUserV2().getUserId()
|
||||
: null;
|
||||
}
|
||||
|
||||
private OpdsSortOrder getSortOrder() {
|
||||
OpdsUserDetails details = authenticationService.getOpdsUser();
|
||||
return details != null && details.getOpdsUserV2() != null && details.getOpdsUserV2().getSortOrder() != null
|
||||
? details.getOpdsUserV2().getSortOrder()
|
||||
: OpdsSortOrder.RECENT;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import com.adityachandel.booklore.mapper.OpdsUserV2Mapper;
|
||||
import com.adityachandel.booklore.model.dto.BookLoreUser;
|
||||
import com.adityachandel.booklore.model.dto.OpdsUserV2;
|
||||
import com.adityachandel.booklore.model.dto.request.OpdsUserV2CreateRequest;
|
||||
import com.adityachandel.booklore.model.dto.request.OpdsUserV2UpdateRequest;
|
||||
import com.adityachandel.booklore.model.entity.BookLoreUserEntity;
|
||||
import com.adityachandel.booklore.model.entity.OpdsUserV2Entity;
|
||||
import com.adityachandel.booklore.repository.OpdsUserV2Repository;
|
||||
@@ -45,6 +46,7 @@ public class OpdsUserV2Service {
|
||||
.user(userEntity)
|
||||
.username(request.getUsername())
|
||||
.passwordHash(passwordEncoder.encode(request.getPassword()))
|
||||
.sortOrder(request.getSortOrder() != null ? request.getSortOrder() : com.adityachandel.booklore.model.enums.OpdsSortOrder.RECENT)
|
||||
.build();
|
||||
|
||||
return mapper.toDto(opdsUserV2Repository.save(opdsUserV2));
|
||||
@@ -64,4 +66,17 @@ public class OpdsUserV2Service {
|
||||
}
|
||||
opdsUserV2Repository.delete(user);
|
||||
}
|
||||
|
||||
public OpdsUserV2 updateOpdsUser(Long userId, OpdsUserV2UpdateRequest request) {
|
||||
BookLoreUser bookLoreUser = authenticationService.getAuthenticatedUser();
|
||||
OpdsUserV2Entity user = opdsUserV2Repository.findById(userId)
|
||||
.orElseThrow(() -> new RuntimeException("User not found with ID: " + userId));
|
||||
|
||||
if (!user.getUser().getId().equals(bookLoreUser.getId())) {
|
||||
throw new AccessDeniedException("You are not allowed to update this user");
|
||||
}
|
||||
|
||||
user.setSortOrder(request.sortOrder());
|
||||
return mapper.toDto(opdsUserV2Repository.save(user));
|
||||
}
|
||||
}
|
||||
@@ -76,6 +76,9 @@ public class BookFilePersistenceService {
|
||||
|
||||
@Transactional
|
||||
public int markAllBooksUnderPathAsDeleted(long libraryPathId, String relativeFolderPath) {
|
||||
if (relativeFolderPath == null) {
|
||||
throw new IllegalArgumentException("relativeFolderPath cannot be null");
|
||||
}
|
||||
String normalizedPrefix = relativeFolderPath.endsWith("/") ? relativeFolderPath : (relativeFolderPath + "/");
|
||||
|
||||
List<BookEntity> books = bookRepository.findAllByLibraryPathIdAndFileSubPathStartingWith(libraryPathId, normalizedPrefix);
|
||||
|
||||
@@ -12,11 +12,12 @@ public class BookUtils {
|
||||
private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+");
|
||||
private static final Pattern SPECIAL_CHARACTERS_PATTERN = Pattern.compile("[!@$%^&*_=|~`<>?/\"]");
|
||||
private static final Pattern PARENTHESES_WITH_OPTIONAL_SPACE_PATTERN = Pattern.compile("\\s?\\(.*?\\)");
|
||||
private static final Pattern DIACRITICAL_MARKS_PATTERN = Pattern.compile("\\p{InCombiningDiacriticalMarks}+");
|
||||
|
||||
public static String buildSearchText(BookMetadataEntity e) {
|
||||
if (e == null) return null;
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
StringBuilder sb = new StringBuilder(256);
|
||||
if (e.getTitle() != null) sb.append(e.getTitle()).append(" ");
|
||||
if (e.getSubtitle() != null) sb.append(e.getSubtitle()).append(" ");
|
||||
if (e.getSeriesName() != null) sb.append(e.getSeriesName()).append(" ");
|
||||
@@ -41,7 +42,7 @@ public class BookUtils {
|
||||
return null;
|
||||
}
|
||||
String s = java.text.Normalizer.normalize(term, java.text.Normalizer.Form.NFD);
|
||||
s = s.replaceAll("\\p{InCombiningDiacriticalMarks}+", "");
|
||||
s = DIACRITICAL_MARKS_PATTERN.matcher(s).replaceAll("");
|
||||
s = s.replace("ø", "o").replace("Ø", "O")
|
||||
.replace("ł", "l").replace("Ł", "L")
|
||||
.replace("æ", "ae").replace("Æ", "AE")
|
||||
@@ -82,7 +83,7 @@ public class BookUtils {
|
||||
if (s.length() > 60) {
|
||||
String[] words = WHITESPACE_PATTERN.split(s);
|
||||
if (words.length > 1) {
|
||||
StringBuilder truncated = new StringBuilder();
|
||||
StringBuilder truncated = new StringBuilder(64);
|
||||
for (String word : words) {
|
||||
if (truncated.length() + word.length() + 1 > 60) break;
|
||||
if (!truncated.isEmpty()) truncated.append(" ");
|
||||
|
||||
@@ -2,7 +2,10 @@ package com.adityachandel.booklore.util;
|
||||
|
||||
import com.adityachandel.booklore.config.AppProperties;
|
||||
import com.adityachandel.booklore.exception.ApiError;
|
||||
import com.adityachandel.booklore.model.dto.settings.CoverCroppingSettings;
|
||||
import com.adityachandel.booklore.model.entity.BookMetadataEntity;
|
||||
import com.adityachandel.booklore.repository.BookMetadataRepository;
|
||||
import com.adityachandel.booklore.service.appsettings.AppSettingService;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
@@ -38,6 +41,12 @@ public class FileService {
|
||||
|
||||
private final AppProperties appProperties;
|
||||
private final RestTemplate restTemplate;
|
||||
private final AppSettingService appSettingService;
|
||||
private final BookMetadataRepository bookMetadataRepository;
|
||||
|
||||
private static final double TARGET_COVER_ASPECT_RATIO = 1.5;
|
||||
private static final int SMART_CROP_COLOR_TOLERANCE = 30;
|
||||
private static final double SMART_CROP_MARGIN_PERCENT = 0.02;
|
||||
|
||||
// @formatter:off
|
||||
private static final String IMAGES_DIR = "images";
|
||||
@@ -168,7 +177,7 @@ public class FileService {
|
||||
public BufferedImage downloadImageFromUrl(String imageUrl) throws IOException {
|
||||
try {
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.set(HttpHeaders.USER_AGENT, "BookLore/1.0 (Metadata Fetcher)");
|
||||
headers.set(HttpHeaders.USER_AGENT, "BookLore/1.0 (Book and Comic Metadata Fetcher; +https://github.com/booklore-app/booklore)");
|
||||
headers.set(HttpHeaders.ACCEPT, "image/*");
|
||||
|
||||
HttpEntity<String> entity = new HttpEntity<>(headers);
|
||||
@@ -224,6 +233,27 @@ public class FileService {
|
||||
}
|
||||
}
|
||||
|
||||
public void createThumbnailFromBytes(long bookId, byte[] imageBytes) {
|
||||
try {
|
||||
BufferedImage originalImage;
|
||||
try (InputStream inputStream = new java.io.ByteArrayInputStream(imageBytes)) {
|
||||
originalImage = ImageIO.read(inputStream);
|
||||
}
|
||||
if (originalImage == null) {
|
||||
throw ApiError.IMAGE_NOT_FOUND.createException();
|
||||
}
|
||||
boolean success = saveCoverImages(originalImage, bookId);
|
||||
if (!success) {
|
||||
throw ApiError.FILE_READ_ERROR.createException("Failed to save cover images");
|
||||
}
|
||||
originalImage.flush();
|
||||
log.info("Cover images created and saved from bytes for book ID: {}", bookId);
|
||||
} catch (Exception e) {
|
||||
log.error("An error occurred while creating thumbnail from bytes: {}", e.getMessage(), e);
|
||||
throw ApiError.FILE_READ_ERROR.createException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void createThumbnailFromUrl(long bookId, String imageUrl) {
|
||||
try {
|
||||
BufferedImage originalImage = downloadImageFromUrl(imageUrl);
|
||||
@@ -241,6 +271,7 @@ public class FileService {
|
||||
|
||||
public boolean saveCoverImages(BufferedImage coverImage, long bookId) throws IOException {
|
||||
BufferedImage rgbImage = null;
|
||||
BufferedImage cropped = null;
|
||||
BufferedImage resized = null;
|
||||
BufferedImage thumb = null;
|
||||
try {
|
||||
@@ -260,6 +291,12 @@ public class FileService {
|
||||
g.dispose();
|
||||
// Note: coverImage is not flushed here - caller is responsible for its lifecycle
|
||||
|
||||
cropped = applyCoverCropping(rgbImage);
|
||||
if (cropped != rgbImage) {
|
||||
rgbImage.flush();
|
||||
rgbImage = cropped;
|
||||
}
|
||||
|
||||
// Resize original image if too large to prevent OOM
|
||||
double scale = Math.min(
|
||||
(double) MAX_ORIGINAL_WIDTH / rgbImage.getWidth(),
|
||||
@@ -278,13 +315,19 @@ public class FileService {
|
||||
File thumbnailFile = new File(folder, THUMBNAIL_FILENAME);
|
||||
boolean thumbnailSaved = ImageIO.write(thumb, IMAGE_FORMAT, thumbnailFile);
|
||||
|
||||
if (originalSaved && thumbnailSaved) {
|
||||
bookMetadataRepository.updateCoverTimestamp(bookId, Instant.now());
|
||||
}
|
||||
return originalSaved && thumbnailSaved;
|
||||
} finally {
|
||||
// Cleanup resources created within this method
|
||||
// Note: resized may equal rgbImage after reassignment, avoid double-flush
|
||||
// Note: cropped/resized may equal rgbImage after reassignment, avoid double-flush
|
||||
if (rgbImage != null) {
|
||||
rgbImage.flush();
|
||||
}
|
||||
if (cropped != null && cropped != rgbImage) {
|
||||
cropped.flush();
|
||||
}
|
||||
if (resized != null && resized != rgbImage) {
|
||||
resized.flush();
|
||||
}
|
||||
@@ -294,6 +337,110 @@ public class FileService {
|
||||
}
|
||||
}
|
||||
|
||||
private BufferedImage applyCoverCropping(BufferedImage image) {
|
||||
CoverCroppingSettings settings = appSettingService.getAppSettings().getCoverCroppingSettings();
|
||||
if (settings == null) {
|
||||
return image;
|
||||
}
|
||||
|
||||
int width = image.getWidth();
|
||||
int height = image.getHeight();
|
||||
double heightToWidthRatio = (double) height / width;
|
||||
double widthToHeightRatio = (double) width / height;
|
||||
double threshold = settings.getAspectRatioThreshold();
|
||||
boolean smartCrop = settings.isSmartCroppingEnabled();
|
||||
|
||||
boolean isExtremelyTall = settings.isVerticalCroppingEnabled() && heightToWidthRatio > threshold;
|
||||
if (isExtremelyTall) {
|
||||
int croppedHeight = (int) (width * TARGET_COVER_ASPECT_RATIO);
|
||||
log.debug("Cropping tall image: {}x{} (ratio {}) -> {}x{}, smartCrop={}",
|
||||
width, height, String.format("%.2f", heightToWidthRatio), width, croppedHeight, smartCrop);
|
||||
return cropFromTop(image, width, croppedHeight, smartCrop);
|
||||
}
|
||||
|
||||
boolean isExtremelyWide = settings.isHorizontalCroppingEnabled() && widthToHeightRatio > threshold;
|
||||
if (isExtremelyWide) {
|
||||
int croppedWidth = (int) (height / TARGET_COVER_ASPECT_RATIO);
|
||||
log.debug("Cropping wide image: {}x{} (ratio {}) -> {}x{}, smartCrop={}",
|
||||
width, height, String.format("%.2f", widthToHeightRatio), croppedWidth, height, smartCrop);
|
||||
return cropFromLeft(image, croppedWidth, height, smartCrop);
|
||||
}
|
||||
|
||||
return image;
|
||||
}
|
||||
|
||||
private BufferedImage cropFromTop(BufferedImage image, int targetWidth, int targetHeight, boolean smartCrop) {
|
||||
int startY = 0;
|
||||
if (smartCrop) {
|
||||
int contentStartY = findContentStartY(image);
|
||||
int margin = (int) (targetHeight * SMART_CROP_MARGIN_PERCENT);
|
||||
startY = Math.max(0, contentStartY - margin);
|
||||
|
||||
int maxStartY = image.getHeight() - targetHeight;
|
||||
startY = Math.min(startY, maxStartY);
|
||||
}
|
||||
return image.getSubimage(0, startY, targetWidth, targetHeight);
|
||||
}
|
||||
|
||||
private BufferedImage cropFromLeft(BufferedImage image, int targetWidth, int targetHeight, boolean smartCrop) {
|
||||
int startX = 0;
|
||||
if (smartCrop) {
|
||||
int contentStartX = findContentStartX(image);
|
||||
int margin = (int) (targetWidth * SMART_CROP_MARGIN_PERCENT);
|
||||
startX = Math.max(0, contentStartX - margin);
|
||||
|
||||
int maxStartX = image.getWidth() - targetWidth;
|
||||
startX = Math.min(startX, maxStartX);
|
||||
}
|
||||
return image.getSubimage(startX, 0, targetWidth, targetHeight);
|
||||
}
|
||||
|
||||
private int findContentStartY(BufferedImage image) {
|
||||
for (int y = 0; y < image.getHeight(); y++) {
|
||||
if (!isRowUniformColor(image, y)) {
|
||||
return y;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private int findContentStartX(BufferedImage image) {
|
||||
for (int x = 0; x < image.getWidth(); x++) {
|
||||
if (!isColumnUniformColor(image, x)) {
|
||||
return x;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private boolean isRowUniformColor(BufferedImage image, int y) {
|
||||
int firstPixel = image.getRGB(0, y);
|
||||
for (int x = 1; x < image.getWidth(); x++) {
|
||||
if (!colorsAreSimilar(firstPixel, image.getRGB(x, y))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean isColumnUniformColor(BufferedImage image, int x) {
|
||||
int firstPixel = image.getRGB(x, 0);
|
||||
for (int y = 1; y < image.getHeight(); y++) {
|
||||
if (!colorsAreSimilar(firstPixel, image.getRGB(x, y))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean colorsAreSimilar(int rgb1, int rgb2) {
|
||||
int r1 = (rgb1 >> 16) & 0xFF, g1 = (rgb1 >> 8) & 0xFF, b1 = rgb1 & 0xFF;
|
||||
int r2 = (rgb2 >> 16) & 0xFF, g2 = (rgb2 >> 8) & 0xFF, b2 = rgb2 & 0xFF;
|
||||
return Math.abs(r1 - r2) <= SMART_CROP_COLOR_TOLERANCE
|
||||
&& Math.abs(g1 - g2) <= SMART_CROP_COLOR_TOLERANCE
|
||||
&& Math.abs(b1 - b2) <= SMART_CROP_COLOR_TOLERANCE;
|
||||
}
|
||||
|
||||
public static void setBookCoverPath(BookMetadataEntity bookMetadataEntity) {
|
||||
bookMetadataEntity.setCoverUpdatedOn(Instant.now());
|
||||
}
|
||||
|
||||
@@ -35,6 +35,7 @@ public class MetadataChangeDetector {
|
||||
compare(changes, "goodreadsId", clear.isGoodreadsId(), newMeta.getGoodreadsId(), existingMeta.getGoodreadsId(), () -> !isTrue(existingMeta.getGoodreadsIdLocked()), newMeta.getGoodreadsIdLocked(), existingMeta.getGoodreadsIdLocked());
|
||||
compare(changes, "comicvineId", clear.isComicvineId(), newMeta.getComicvineId(), existingMeta.getComicvineId(), () -> !isTrue(existingMeta.getComicvineIdLocked()), newMeta.getComicvineIdLocked(), existingMeta.getComicvineIdLocked());
|
||||
compare(changes, "hardcoverId", clear.isHardcoverId(), newMeta.getHardcoverId(), existingMeta.getHardcoverId(), () -> !isTrue(existingMeta.getHardcoverIdLocked()), newMeta.getHardcoverIdLocked(), existingMeta.getHardcoverIdLocked());
|
||||
compare(changes, "hardcoverBookId", clear.isHardcoverBookId(), newMeta.getHardcoverBookId(), existingMeta.getHardcoverBookId(), () -> !isTrue(existingMeta.getHardcoverBookIdLocked()), newMeta.getHardcoverBookIdLocked(), existingMeta.getHardcoverBookIdLocked());
|
||||
compare(changes, "googleId", clear.isGoogleId(), newMeta.getGoogleId(), existingMeta.getGoogleId(), () -> !isTrue(existingMeta.getGoogleIdLocked()), newMeta.getGoogleIdLocked(), existingMeta.getGoogleIdLocked());
|
||||
compare(changes, "pageCount", clear.isPageCount(), newMeta.getPageCount(), existingMeta.getPageCount(), () -> !isTrue(existingMeta.getPageCountLocked()), newMeta.getPageCountLocked(), existingMeta.getPageCountLocked());
|
||||
compare(changes, "language", clear.isLanguage(), newMeta.getLanguage(), existingMeta.getLanguage(), () -> !isTrue(existingMeta.getLanguageLocked()), newMeta.getLanguageLocked(), existingMeta.getLanguageLocked());
|
||||
@@ -75,6 +76,7 @@ public class MetadataChangeDetector {
|
||||
compareValue(diffs, "goodreadsId", clear.isGoodreadsId(), newMeta.getGoodreadsId(), existingMeta.getGoodreadsId(), () -> !isTrue(existingMeta.getGoodreadsIdLocked()));
|
||||
compareValue(diffs, "comicvineId", clear.isComicvineId(), newMeta.getComicvineId(), existingMeta.getComicvineId(), () -> !isTrue(existingMeta.getComicvineIdLocked()));
|
||||
compareValue(diffs, "hardcoverId", clear.isHardcoverId(), newMeta.getHardcoverId(), existingMeta.getHardcoverId(), () -> !isTrue(existingMeta.getHardcoverIdLocked()));
|
||||
compareValue(diffs, "hardcoverBookId", clear.isHardcoverBookId(), newMeta.getHardcoverBookId(), existingMeta.getHardcoverBookId(), () -> !isTrue(existingMeta.getHardcoverBookIdLocked()));
|
||||
compareValue(diffs, "googleId", clear.isGoogleId(), newMeta.getGoogleId(), existingMeta.getGoogleId(), () -> !isTrue(existingMeta.getGoogleIdLocked()));
|
||||
compareValue(diffs, "pageCount", clear.isPageCount(), newMeta.getPageCount(), existingMeta.getPageCount(), () -> !isTrue(existingMeta.getPageCountLocked()));
|
||||
compareValue(diffs, "language", clear.isLanguage(), newMeta.getLanguage(), existingMeta.getLanguage(), () -> !isTrue(existingMeta.getLanguageLocked()));
|
||||
@@ -107,6 +109,7 @@ public class MetadataChangeDetector {
|
||||
compareValue(diffs, "goodreadsId", clear.isGoodreadsId(), newMeta.getGoodreadsId(), existingMeta.getGoodreadsId(), () -> !isTrue(existingMeta.getGoodreadsIdLocked()));
|
||||
compareValue(diffs, "comicvineId", clear.isComicvineId(), newMeta.getComicvineId(), existingMeta.getComicvineId(), () -> !isTrue(existingMeta.getComicvineIdLocked()));
|
||||
compareValue(diffs, "hardcoverId", clear.isHardcoverId(), newMeta.getHardcoverId(), existingMeta.getHardcoverId(), () -> !isTrue(existingMeta.getHardcoverIdLocked()));
|
||||
compareValue(diffs, "hardcoverBookId", clear.isHardcoverBookId(), newMeta.getHardcoverBookId(), existingMeta.getHardcoverBookId(), () -> !isTrue(existingMeta.getHardcoverBookIdLocked()));
|
||||
compareValue(diffs, "googleId", clear.isGoogleId(), newMeta.getGoogleId(), existingMeta.getGoogleId(), () -> !isTrue(existingMeta.getGoogleIdLocked()));
|
||||
compareValue(diffs, "language", clear.isLanguage(), newMeta.getLanguage(), existingMeta.getLanguage(), () -> !isTrue(existingMeta.getLanguageLocked()));
|
||||
compareValue(diffs, "authors", clear.isAuthors(), newMeta.getAuthors(), toNameSet(existingMeta.getAuthors()), () -> !isTrue(existingMeta.getAuthorsLocked()));
|
||||
|
||||
@@ -28,7 +28,6 @@ public class PathPatternResolver {
|
||||
private final int SUFFIX_BYTES = TRUNCATION_SUFFIX.getBytes(StandardCharsets.UTF_8).length;
|
||||
|
||||
private final Pattern WHITESPACE_PATTERN = Pattern.compile("\\s+");
|
||||
private final Pattern FILE_EXTENSION_PATTERN = Pattern.compile(".*\\.[a-zA-Z0-9]+$");
|
||||
private final Pattern CONTROL_CHARACTER_PATTERN = Pattern.compile("\\p{Cntrl}");
|
||||
private final Pattern INVALID_CHARS_PATTERN = Pattern.compile("[\\\\/:*?\"<>|]");
|
||||
private final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\{(.*?)}");
|
||||
@@ -129,7 +128,7 @@ public class PathPatternResolver {
|
||||
// Handle optional blocks enclosed in <...>
|
||||
Pattern optionalBlockPattern = Pattern.compile("<([^<>]*)>");
|
||||
Matcher matcher = optionalBlockPattern.matcher(pattern);
|
||||
StringBuilder resolved = new StringBuilder();
|
||||
StringBuilder resolved = new StringBuilder(1024);
|
||||
|
||||
while (matcher.find()) {
|
||||
String block = matcher.group(1);
|
||||
@@ -162,7 +161,7 @@ public class PathPatternResolver {
|
||||
|
||||
// Replace known placeholders with values, preserve unknown ones
|
||||
Matcher placeholderMatcher = PLACEHOLDER_PATTERN.matcher(result);
|
||||
StringBuilder finalResult = new StringBuilder();
|
||||
StringBuilder finalResult = new StringBuilder(1024);
|
||||
|
||||
while (placeholderMatcher.find()) {
|
||||
String key = placeholderMatcher.group(1);
|
||||
@@ -178,14 +177,16 @@ public class PathPatternResolver {
|
||||
|
||||
result = finalResult.toString();
|
||||
|
||||
boolean usedFallbackFilename = false;
|
||||
if (result.isBlank()) {
|
||||
result = values.getOrDefault("currentFilename", "untitled");
|
||||
usedFallbackFilename = true;
|
||||
}
|
||||
|
||||
boolean hasExtension = FILE_EXTENSION_PATTERN.matcher(result).matches();
|
||||
boolean explicitlySetExtension = pattern.contains("{extension}");
|
||||
boolean patternIncludesExtension = pattern.contains("{extension}");
|
||||
boolean patternIncludesFullFilename = pattern.contains("{currentFilename}");
|
||||
|
||||
if (!explicitlySetExtension && !hasExtension && !extension.isBlank()) {
|
||||
if (!usedFallbackFilename && !patternIncludesExtension && !patternIncludesFullFilename && !extension.isBlank()) {
|
||||
result += "." + extension;
|
||||
}
|
||||
|
||||
@@ -209,7 +210,7 @@ public class PathPatternResolver {
|
||||
}
|
||||
|
||||
String[] authorArray = COMMA_SPACE_PATTERN.split(authors);
|
||||
StringBuilder result = new StringBuilder();
|
||||
StringBuilder result = new StringBuilder(256);
|
||||
int currentBytes = 0;
|
||||
int truncationLimit = MAX_AUTHOR_BYTES - SUFFIX_BYTES;
|
||||
|
||||
@@ -264,7 +265,7 @@ public class PathPatternResolver {
|
||||
|
||||
private String validateFinalPath(String path) {
|
||||
String[] components = SLASH_PATTERN.split(path);
|
||||
StringBuilder result = new StringBuilder();
|
||||
StringBuilder result = new StringBuilder(512);
|
||||
|
||||
for (int i = 0; i < components.length; i++) {
|
||||
String component = components[i];
|
||||
@@ -276,7 +277,7 @@ public class PathPatternResolver {
|
||||
if (component.getBytes(StandardCharsets.UTF_8).length > MAX_FILESYSTEM_COMPONENT_BYTES) {
|
||||
component = truncatePathComponent(component, MAX_FILESYSTEM_COMPONENT_BYTES);
|
||||
}
|
||||
while (component.endsWith(".")) {
|
||||
while (component != null && !component.isEmpty() && component.endsWith(".")) {
|
||||
component = component.substring(0, component.length() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -70,4 +70,11 @@ public class KoboUrlBuilder {
|
||||
.build()
|
||||
.toUriString();
|
||||
}
|
||||
|
||||
public String librarySyncUrl(String token) {
|
||||
return baseBuilder()
|
||||
.pathSegment("api", "kobo", token, "v1", "library", "sync")
|
||||
.build()
|
||||
.toUriString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
-- Add sort_order column to opds_user_v2 table
|
||||
ALTER TABLE opds_user_v2 ADD COLUMN sort_order VARCHAR(20) NOT NULL DEFAULT 'RECENT';
|
||||
@@ -0,0 +1,6 @@
|
||||
-- Add numeric hardcover_book_id column to book_metadata table
|
||||
-- This stores the numeric Hardcover book ID for API operations,
|
||||
-- while the existing hardcover_id column stores the slug for URL linking.
|
||||
|
||||
ALTER TABLE book_metadata ADD COLUMN hardcover_book_id INTEGER;
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
-- Add hardcover_book_id_locked column to book_metadata table
|
||||
|
||||
ALTER TABLE book_metadata ADD COLUMN hardcover_book_id_locked BOOLEAN DEFAULT FALSE;
|
||||
@@ -0,0 +1,13 @@
|
||||
package com.adityachandel.booklore.model.enums;
|
||||
|
||||
public enum OpdsSortOrder {
|
||||
RECENT,
|
||||
TITLE_ASC,
|
||||
TITLE_DESC,
|
||||
AUTHOR_ASC,
|
||||
AUTHOR_DESC,
|
||||
SERIES_ASC,
|
||||
SERIES_DESC,
|
||||
RATING_ASC,
|
||||
RATING_DESC
|
||||
}
|
||||
@@ -15,6 +15,7 @@ import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.repository.KoboReadingStateRepository;
|
||||
import com.adityachandel.booklore.repository.UserBookProgressRepository;
|
||||
import com.adityachandel.booklore.repository.UserRepository;
|
||||
import com.adityachandel.booklore.service.hardcover.HardcoverSyncService;
|
||||
import com.adityachandel.booklore.service.kobo.KoboReadingStateBuilder;
|
||||
import com.adityachandel.booklore.service.kobo.KoboReadingStateService;
|
||||
import com.adityachandel.booklore.service.kobo.KoboSettingsService;
|
||||
@@ -64,6 +65,9 @@ class KoboReadingStateServiceTest {
|
||||
@Mock
|
||||
private KoboReadingStateBuilder readingStateBuilder;
|
||||
|
||||
@Mock
|
||||
private HardcoverSyncService hardcoverSyncService;
|
||||
|
||||
@InjectMocks
|
||||
private KoboReadingStateService service;
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.repository.KoboReadingStateRepository;
|
||||
import com.adityachandel.booklore.repository.UserBookProgressRepository;
|
||||
import com.adityachandel.booklore.repository.UserRepository;
|
||||
import com.adityachandel.booklore.service.hardcover.HardcoverSyncService;
|
||||
import com.adityachandel.booklore.service.kobo.KoboReadingStateBuilder;
|
||||
import com.adityachandel.booklore.service.kobo.KoboReadingStateService;
|
||||
import com.adityachandel.booklore.service.kobo.KoboSettingsService;
|
||||
@@ -58,6 +59,9 @@ class KoboStatusSyncProtectionTest {
|
||||
@Mock
|
||||
private KoboReadingStateBuilder readingStateBuilder;
|
||||
|
||||
@Mock
|
||||
private HardcoverSyncService hardcoverSyncService;
|
||||
|
||||
@InjectMocks
|
||||
private KoboReadingStateService service;
|
||||
|
||||
|
||||
@@ -0,0 +1,341 @@
|
||||
package com.adityachandel.booklore.service.bookdrop;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
|
||||
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
|
||||
import com.adityachandel.booklore.repository.BookdropFileRepository;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.mockito.Captor;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class BookdropBulkEditServiceTest {
|
||||
|
||||
@Mock
|
||||
private BookdropFileRepository bookdropFileRepository;
|
||||
|
||||
@Mock
|
||||
private BookdropMetadataHelper metadataHelper;
|
||||
|
||||
@InjectMocks
|
||||
private BookdropBulkEditService bulkEditService;
|
||||
|
||||
@Captor
|
||||
private ArgumentCaptor<List<BookdropFileEntity>> filesCaptor;
|
||||
|
||||
private BookdropFileEntity createFileEntity(Long id, String fileName, BookMetadata metadata) {
|
||||
BookdropFileEntity entity = new BookdropFileEntity();
|
||||
entity.setId(id);
|
||||
entity.setFileName(fileName);
|
||||
entity.setFilePath("/bookdrop/" + fileName);
|
||||
return entity;
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
when(metadataHelper.getCurrentMetadata(any())).thenReturn(new BookMetadata());
|
||||
doNothing().when(metadataHelper).updateFetchedMetadata(any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithSingleValueFields_ShouldUpdateTextAndNumericFields() {
|
||||
BookMetadata existingMetadata = new BookMetadata();
|
||||
existingMetadata.setSeriesName("Old Series");
|
||||
|
||||
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", existingMetadata);
|
||||
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", existingMetadata);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L)))
|
||||
.thenReturn(List.of(1L, 2L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file1, file2));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setSeriesName("New Series");
|
||||
updates.setPublisher("Test Publisher");
|
||||
updates.setLanguage("en");
|
||||
updates.setSeriesTotal(100);
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("seriesName", "publisher", "language", "seriesTotal"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L, 2L));
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(2, result.getTotalFiles());
|
||||
assertEquals(2, result.getSuccessfullyUpdated());
|
||||
assertEquals(0, result.getFailed());
|
||||
|
||||
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
|
||||
verify(bookdropFileRepository, times(1)).saveAll(anyList());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithArrayFieldsMergeMode_ShouldMergeArrays() {
|
||||
BookMetadata existingMetadata = new BookMetadata();
|
||||
existingMetadata.setAuthors(new LinkedHashSet<>(List.of("Author 1")));
|
||||
existingMetadata.setCategories(new LinkedHashSet<>(List.of("Category 1")));
|
||||
|
||||
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
|
||||
|
||||
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
|
||||
.thenReturn(List.of(1L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setAuthors(new LinkedHashSet<>(List.of("Author 2")));
|
||||
updates.setCategories(new LinkedHashSet<>(List.of("Category 2")));
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("authors", "categories"));
|
||||
request.setMergeArrays(true);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L));
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(1, result.getTotalFiles());
|
||||
assertEquals(1, result.getSuccessfullyUpdated());
|
||||
assertEquals(0, result.getFailed());
|
||||
|
||||
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
|
||||
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
|
||||
|
||||
BookMetadata captured = metadataCaptor.getValue();
|
||||
assertTrue(captured.getAuthors().contains("Author 1"));
|
||||
assertTrue(captured.getAuthors().contains("Author 2"));
|
||||
assertTrue(captured.getCategories().contains("Category 1"));
|
||||
assertTrue(captured.getCategories().contains("Category 2"));
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithArrayFieldsReplaceMode_ShouldReplaceArrays() {
|
||||
BookMetadata existingMetadata = new BookMetadata();
|
||||
existingMetadata.setAuthors(new LinkedHashSet<>(List.of("Author 1")));
|
||||
|
||||
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
|
||||
|
||||
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
|
||||
.thenReturn(List.of(1L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setAuthors(new LinkedHashSet<>(List.of("Author 2")));
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("authors"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L));
|
||||
|
||||
bulkEditService.bulkEdit(request);
|
||||
|
||||
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
|
||||
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
|
||||
|
||||
BookMetadata captured = metadataCaptor.getValue();
|
||||
assertFalse(captured.getAuthors().contains("Author 1"));
|
||||
assertTrue(captured.getAuthors().contains("Author 2"));
|
||||
assertEquals(1, captured.getAuthors().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithDisabledFields_ShouldNotUpdateThoseFields() {
|
||||
BookMetadata existingMetadata = new BookMetadata();
|
||||
existingMetadata.setSeriesName("Original Series");
|
||||
existingMetadata.setPublisher("Original Publisher");
|
||||
|
||||
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
|
||||
|
||||
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
|
||||
.thenReturn(List.of(1L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setSeriesName("New Series");
|
||||
updates.setPublisher("New Publisher");
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("seriesName"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L));
|
||||
|
||||
bulkEditService.bulkEdit(request);
|
||||
|
||||
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
|
||||
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
|
||||
|
||||
BookMetadata captured = metadataCaptor.getValue();
|
||||
assertEquals("New Series", captured.getSeriesName());
|
||||
assertEquals("Original Publisher", captured.getPublisher());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithSelectAll_ShouldProcessAllFiles() {
|
||||
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", new BookMetadata());
|
||||
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", new BookMetadata());
|
||||
BookdropFileEntity file3 = createFileEntity(3L, "file3.cbz", new BookMetadata());
|
||||
|
||||
when(metadataHelper.resolveFileIds(true, List.of(2L), null))
|
||||
.thenReturn(List.of(1L, 3L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file1, file3));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setLanguage("en");
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("language"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(true);
|
||||
request.setExcludedIds(List.of(2L));
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(2, result.getTotalFiles());
|
||||
assertEquals(2, result.getSuccessfullyUpdated());
|
||||
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithOneFileError_ShouldContinueWithOthers() {
|
||||
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", new BookMetadata());
|
||||
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", new BookMetadata());
|
||||
BookdropFileEntity file3 = createFileEntity(3L, "file3.cbz", new BookMetadata());
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
|
||||
.thenReturn(List.of(1L, 2L, 3L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file1, file2, file3));
|
||||
|
||||
doThrow(new RuntimeException("JSON serialization error"))
|
||||
.when(metadataHelper).updateFetchedMetadata(eq(file2), any());
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setLanguage("en");
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("language"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L, 2L, 3L));
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(3, result.getTotalFiles());
|
||||
assertEquals(2, result.getSuccessfullyUpdated());
|
||||
assertEquals(1, result.getFailed());
|
||||
|
||||
verify(bookdropFileRepository).saveAll(filesCaptor.capture());
|
||||
List<BookdropFileEntity> savedFiles = filesCaptor.getValue();
|
||||
assertEquals(2, savedFiles.size());
|
||||
assertTrue(savedFiles.stream().anyMatch(f -> f.getId().equals(1L)));
|
||||
assertTrue(savedFiles.stream().anyMatch(f -> f.getId().equals(3L)));
|
||||
assertFalse(savedFiles.stream().anyMatch(f -> f.getId().equals(2L)));
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithEmptyEnabledFields_ShouldNotUpdateAnything() {
|
||||
BookdropFileEntity file = createFileEntity(1L, "file.cbz", new BookMetadata());
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
|
||||
.thenReturn(List.of(1L));
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(List.of(file));
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setSeriesName("New Series");
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Collections.emptySet());
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L));
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(1, result.getSuccessfullyUpdated());
|
||||
|
||||
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
|
||||
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
|
||||
|
||||
assertNull(metadataCaptor.getValue().getSeriesName());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkEdit_WithLargeSelection_ShouldProcessInBatches() {
|
||||
List<BookdropFileEntity> batch1 = new ArrayList<>();
|
||||
List<BookdropFileEntity> batch2 = new ArrayList<>();
|
||||
List<BookdropFileEntity> batch3 = new ArrayList<>();
|
||||
List<Long> manyIds = new ArrayList<>();
|
||||
|
||||
for (long i = 1; i <= 1500; i++) {
|
||||
manyIds.add(i);
|
||||
BookdropFileEntity file = createFileEntity(i, "file" + i + ".cbz", new BookMetadata());
|
||||
if (i <= 500) {
|
||||
batch1.add(file);
|
||||
} else if (i <= 1000) {
|
||||
batch2.add(file);
|
||||
} else {
|
||||
batch3.add(file);
|
||||
}
|
||||
}
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, manyIds))
|
||||
.thenReturn(manyIds);
|
||||
|
||||
when(bookdropFileRepository.findAllById(anyList()))
|
||||
.thenReturn(batch1, batch2, batch3);
|
||||
|
||||
BookMetadata updates = new BookMetadata();
|
||||
updates.setLanguage("en");
|
||||
|
||||
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
|
||||
request.setFields(updates);
|
||||
request.setEnabledFields(Set.of("language"));
|
||||
request.setMergeArrays(false);
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(manyIds);
|
||||
|
||||
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
|
||||
|
||||
assertEquals(1500, result.getTotalFiles());
|
||||
assertEquals(1500, result.getSuccessfullyUpdated());
|
||||
assertEquals(0, result.getFailed());
|
||||
|
||||
verify(bookdropFileRepository, times(3)).findAllById(anyList());
|
||||
verify(bookdropFileRepository, times(3)).saveAll(anyList());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,644 @@
|
||||
package com.adityachandel.booklore.service.bookdrop;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
|
||||
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
|
||||
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
|
||||
import com.adityachandel.booklore.repository.BookdropFileRepository;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyList;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class FilenamePatternExtractorTest {
|
||||
|
||||
@Mock
|
||||
private BookdropFileRepository bookdropFileRepository;
|
||||
|
||||
@Mock
|
||||
private BookdropMetadataHelper metadataHelper;
|
||||
|
||||
@InjectMocks
|
||||
private FilenamePatternExtractor extractor;
|
||||
|
||||
private BookdropFileEntity createFileEntity(Long id, String fileName) {
|
||||
BookdropFileEntity entity = new BookdropFileEntity();
|
||||
entity.setId(id);
|
||||
entity.setFileName(fileName);
|
||||
entity.setFilePath("/bookdrop/" + fileName);
|
||||
return entity;
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithSeriesAndChapter_ShouldExtractBoth() {
|
||||
String filename = "Chronicles of Earth - Ch 25.cbz";
|
||||
String pattern = "{SeriesName} - Ch {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(25.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithVolumeAndIssuePattern_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth Vol.3 (of 150).cbz";
|
||||
String pattern = "{SeriesName} Vol.{SeriesNumber} (of {SeriesTotal})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(3.0f, result.getSeriesNumber());
|
||||
assertEquals(150, result.getSeriesTotal());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedYearPattern_ShouldExtractYear() {
|
||||
String filename = "Chronicles of Earth (2016) 001.cbz";
|
||||
String pattern = "{SeriesName} ({Published:yyyy}) {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(2016, result.getPublishedDate().getYear());
|
||||
assertEquals(1.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithAuthorAndTitle_ShouldExtractBoth() {
|
||||
String filename = "John Smith - The Lost City.epub";
|
||||
String pattern = "{Authors} - {Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(Set.of("John Smith"), result.getAuthors());
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithMultipleAuthors_ShouldParseAll() {
|
||||
String filename = "John Smith, Jane Doe - The Lost City.epub";
|
||||
String pattern = "{Authors} - {Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.getAuthors().contains("John Smith"));
|
||||
assertTrue(result.getAuthors().contains("Jane Doe"));
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithDecimalSeriesNumber_ShouldParseCorrectly() {
|
||||
String filename = "Chronicles of Earth - Ch 10.5.cbz";
|
||||
String pattern = "{SeriesName} - Ch {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(10.5f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithNonMatchingPattern_ShouldReturnNull() {
|
||||
String filename = "Random File Name.pdf";
|
||||
String pattern = "{SeriesName} - Ch {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithNullPattern_ShouldReturnNull() {
|
||||
String filename = "Test File.pdf";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, null);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithEmptyPattern_ShouldReturnNull() {
|
||||
String filename = "Test File.pdf";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, "");
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublisherYearAndIssue_ShouldExtractAll() {
|
||||
String filename = "Epic Press - Chronicles of Earth #001 (2011).cbz";
|
||||
String pattern = "{Publisher} - {SeriesName} #{SeriesNumber} ({Published:yyyy})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Epic Press", result.getPublisher());
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(1.0f, result.getSeriesNumber());
|
||||
assertEquals(2011, result.getPublishedDate().getYear());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithLanguageTag_ShouldExtractLanguage() {
|
||||
String filename = "Chronicles of Earth - Ch 500 [EN].cbz";
|
||||
String pattern = "{SeriesName} - Ch {SeriesNumber} [{Language}]";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(500.0f, result.getSeriesNumber());
|
||||
assertEquals("EN", result.getLanguage());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkExtract_WithPreviewMode_ShouldReturnExtractionResults() {
|
||||
BookdropFileEntity file1 = createFileEntity(1L, "Chronicles A - Ch 1.cbz");
|
||||
BookdropFileEntity file2 = createFileEntity(2L, "Chronicles B - Ch 2.cbz");
|
||||
BookdropFileEntity file3 = createFileEntity(3L, "Random Name.cbz");
|
||||
|
||||
BookdropPatternExtractRequest request = new BookdropPatternExtractRequest();
|
||||
request.setPattern("{SeriesName} - Ch {SeriesNumber}");
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L, 2L, 3L));
|
||||
request.setPreview(true);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
|
||||
.thenReturn(List.of(1L, 2L, 3L));
|
||||
when(bookdropFileRepository.findAllById(anyList())).thenReturn(List.of(file1, file2, file3));
|
||||
|
||||
BookdropPatternExtractResult result = extractor.bulkExtract(request);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(3, result.getTotalFiles());
|
||||
assertEquals(2, result.getSuccessfullyExtracted());
|
||||
assertEquals(1, result.getFailed());
|
||||
|
||||
var successResults = result.getResults().stream()
|
||||
.filter(BookdropPatternExtractResult.FileExtractionResult::isSuccess)
|
||||
.toList();
|
||||
assertEquals(2, successResults.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
void bulkExtract_WithFullExtraction_ShouldProcessAndPersistAll() {
|
||||
BookdropFileEntity file1 = createFileEntity(1L, "Chronicles A - Ch 1.cbz");
|
||||
BookdropFileEntity file2 = createFileEntity(2L, "Chronicles B - Ch 2.cbz");
|
||||
BookdropFileEntity file3 = createFileEntity(3L, "Random Name.cbz");
|
||||
|
||||
BookdropPatternExtractRequest request = new BookdropPatternExtractRequest();
|
||||
request.setPattern("{SeriesName} - Ch {SeriesNumber}");
|
||||
request.setSelectAll(false);
|
||||
request.setSelectedIds(List.of(1L, 2L, 3L));
|
||||
request.setPreview(false);
|
||||
|
||||
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
|
||||
.thenReturn(List.of(1L, 2L, 3L));
|
||||
when(bookdropFileRepository.findAllById(anyList())).thenReturn(List.of(file1, file2, file3));
|
||||
when(metadataHelper.getCurrentMetadata(any())).thenReturn(new BookMetadata());
|
||||
|
||||
BookdropPatternExtractResult result = extractor.bulkExtract(request);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(3, result.getTotalFiles());
|
||||
assertEquals(2, result.getSuccessfullyExtracted());
|
||||
assertEquals(1, result.getFailed());
|
||||
|
||||
// Verify metadata was updated for successful extractions (2 files matched pattern)
|
||||
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
|
||||
// Verify all files were saved (even the one that failed extraction keeps original metadata)
|
||||
verify(bookdropFileRepository, times(1)).saveAll(anyList());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithSpecialCharacters_ShouldHandleCorrectly() {
|
||||
String filename = "Chronicles (Special Edition) - Ch 5.cbz";
|
||||
String pattern = "{SeriesName} - Ch {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles (Special Edition)", result.getSeriesName());
|
||||
assertEquals(5.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
// ===== Greedy Matching Tests =====
|
||||
|
||||
@Test
|
||||
void extractFromFilename_SeriesNameOnly_ShouldCaptureFullName() {
|
||||
String filename = "Chronicles of Earth.cbz";
|
||||
String pattern = "{SeriesName}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_TitleOnly_ShouldCaptureFullTitle() {
|
||||
String filename = "The Last Kingdom.epub";
|
||||
String pattern = "{Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Last Kingdom", result.getTitle());
|
||||
}
|
||||
|
||||
// ===== Complex Pattern Tests =====
|
||||
|
||||
@Test
|
||||
void extractFromFilename_SeriesNumberAndTitle_ShouldExtractBoth() {
|
||||
String filename = "Chronicles of Earth 01 - The Beginning.epub";
|
||||
String pattern = "{SeriesName} {SeriesNumber} - {Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(1.0f, result.getSeriesNumber());
|
||||
assertEquals("The Beginning", result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_AuthorSeriesTitleFormat_ShouldExtractAll() {
|
||||
String filename = "Chronicles of Earth 07 - The Final Battle - John Smith.epub";
|
||||
String pattern = "{SeriesName} {SeriesNumber} - {Title} - {Authors}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(7.0f, result.getSeriesNumber());
|
||||
assertEquals("The Final Battle", result.getTitle());
|
||||
assertEquals(Set.of("John Smith"), result.getAuthors());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_AuthorTitleYear_ShouldExtractAll() {
|
||||
String filename = "John Smith - The Lost City (1949).epub";
|
||||
String pattern = "{Authors} - {Title} ({Published:yyyy})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(Set.of("John Smith"), result.getAuthors());
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals(1949, result.getPublishedDate().getYear());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_AuthorWithCommas_ShouldParseProperly() {
|
||||
String filename = "Smith, John R. - The Lost City.epub";
|
||||
String pattern = "{Authors} - {Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(Set.of("Smith", "John R."), result.getAuthors());
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PartNumberFormat_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth - Part 2 - Rising Darkness.epub";
|
||||
String pattern = "{SeriesName} - Part {SeriesNumber} - {Title}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(2.0f, result.getSeriesNumber());
|
||||
assertEquals("Rising Darkness", result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublisherBracketFormat_ShouldExtractCorrectly() {
|
||||
String filename = "[Epic Press] Chronicles of Earth Vol.5 [5 of 20].epub";
|
||||
String pattern = "[{Publisher}] {SeriesName} Vol.{SeriesNumber} [* of {SeriesTotal}]";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Epic Press", result.getPublisher());
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(5.0f, result.getSeriesNumber());
|
||||
assertEquals(20, result.getSeriesTotal());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_CalibreStyleFormat_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth 01 The Beginning - John Smith.epub";
|
||||
String pattern = "{SeriesName} {SeriesNumber} {Title} - {Authors}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(1.0f, result.getSeriesNumber());
|
||||
assertEquals("The Beginning", result.getTitle());
|
||||
assertEquals(Set.of("John Smith"), result.getAuthors());
|
||||
}
|
||||
|
||||
// ===== New Placeholder Tests =====
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithSubtitle_ShouldExtractBoth() {
|
||||
String filename = "The Lost City - A Tale of Adventure.epub";
|
||||
String pattern = "{Title} - {Subtitle}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals("A Tale of Adventure", result.getSubtitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithISBN13_ShouldExtractISBN13() {
|
||||
String filename = "The Lost City [1234567890123].epub";
|
||||
String pattern = "{Title} [{ISBN13}]";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals("1234567890123", result.getIsbn13());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithISBN10_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Tomorrow - 0553293354.epub";
|
||||
String pattern = "{Title} - {ISBN10}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Tomorrow", result.getTitle());
|
||||
assertEquals("0553293354", result.getIsbn10());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithISBN10EndingInX_ShouldExtractCorrectly() {
|
||||
String filename = "Test Book - 043942089X.epub";
|
||||
String pattern = "{Title} - {ISBN10}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Test Book", result.getTitle());
|
||||
assertEquals("043942089X", result.getIsbn10());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithASIN_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth - B001234567.epub";
|
||||
String pattern = "{Title} - {ASIN}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getTitle());
|
||||
assertEquals("B001234567", result.getAsin());
|
||||
}
|
||||
|
||||
// ===== Published Date Format Tests =====
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedDateYYYYMMDD_ShouldExtractCorrectly() {
|
||||
String filename = "The Lost City - 1925-04-10.epub";
|
||||
String pattern = "{Title} - {Published:yyyy-MM-dd}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals(1925, result.getPublishedDate().getYear());
|
||||
assertEquals(4, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(10, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedDateCompact_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Tomorrow_19650801.epub";
|
||||
String pattern = "{Title}_{Published:yyyyMMdd}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Tomorrow", result.getTitle());
|
||||
assertEquals(1965, result.getPublishedDate().getYear());
|
||||
assertEquals(8, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(1, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedDateDots_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Tomorrow (1951.05.01).epub";
|
||||
String pattern = "{Title} ({Published:yyyy.MM.dd})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Tomorrow", result.getTitle());
|
||||
assertEquals(1951, result.getPublishedDate().getYear());
|
||||
assertEquals(5, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(1, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedDateDashes_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth [05-15-2020].epub";
|
||||
String pattern = "{Title} [{Published:MM-dd-yyyy}]";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getTitle());
|
||||
assertEquals(2020, result.getPublishedDate().getYear());
|
||||
assertEquals(5, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(15, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WithPublishedDateSingleDigits_ShouldExtractCorrectly() {
|
||||
String filename = "Chronicles of Earth - 2023-1-5.epub";
|
||||
String pattern = "{Title} - {Published:yyyy-M-d}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getTitle());
|
||||
assertEquals(2023, result.getPublishedDate().getYear());
|
||||
assertEquals(1, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(5, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_ComplexPatternWithMultiplePlaceholders_ShouldExtractAll() {
|
||||
String filename = "Chronicles of Earth - The Beginning [1234567890123] - 2020-05-15.epub";
|
||||
String pattern = "{SeriesName} - {Title} [{ISBN13}] - {Published:yyyy-MM-dd}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals("The Beginning", result.getTitle());
|
||||
assertEquals("1234567890123", result.getIsbn13());
|
||||
assertEquals(2020, result.getPublishedDate().getYear());
|
||||
assertEquals(5, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(15, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublishedWithoutFormat_AutoDetectsISODate() {
|
||||
String filename = "The Lost City (2023-05-15).epub";
|
||||
String pattern = "{Title} ({Published})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals(2023, result.getPublishedDate().getYear());
|
||||
assertEquals(5, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(15, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublishedWithoutFormat_AutoDetectsCompactDate() {
|
||||
String filename = "The Beginning [20231225].epub";
|
||||
String pattern = "{Title} [{Published}]";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Beginning", result.getTitle());
|
||||
assertEquals(2023, result.getPublishedDate().getYear());
|
||||
assertEquals(12, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(25, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublishedWithoutFormat_AutoDetectsYear() {
|
||||
String filename = "The Lost City (2023).epub";
|
||||
String pattern = "{Title} ({Published})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals(2023, result.getPublishedDate().getYear());
|
||||
assertEquals(1, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(1, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublishedWithoutFormat_AutoDetectsTwoDigitYear() {
|
||||
String filename = "Chronicles of Tomorrow (99).epub";
|
||||
String pattern = "{Title} ({Published})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Tomorrow", result.getTitle());
|
||||
assertEquals(1999, result.getPublishedDate().getYear());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_PublishedWithoutFormat_AutoDetectsFlexibleFormat() {
|
||||
String filename = "Tomorrow (15|05|2023).epub";
|
||||
String pattern = "{Title} ({Published})";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Tomorrow", result.getTitle());
|
||||
assertEquals(2023, result.getPublishedDate().getYear());
|
||||
assertEquals(5, result.getPublishedDate().getMonthValue());
|
||||
assertEquals(15, result.getPublishedDate().getDayOfMonth());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WildcardBeforePlaceholder_SkipsUnwantedText() {
|
||||
String filename = "[Extra] Chronicles of Earth - Ch 42.cbz";
|
||||
String pattern = "[*] {SeriesName} - Ch {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(42.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WildcardBetweenPlaceholders_SkipsMiddleText() {
|
||||
String filename = "The Lost City (extra) John Smith.epub";
|
||||
String pattern = "{Title} (*) {Authors}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("The Lost City", result.getTitle());
|
||||
assertEquals(Set.of("John Smith"), result.getAuthors());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WildcardAtEnd_SkipsTrailingText() {
|
||||
String filename = "Chronicles of Earth v1 - extra.cbz";
|
||||
String pattern = "{SeriesName} v{SeriesNumber} - *";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(1.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WildcardAtEnd_AllowsPartialMatch() {
|
||||
String filename = "Chronicles of Earth - Chapter 20.cbz";
|
||||
String pattern = "{SeriesName} - * {SeriesNumber}";
|
||||
|
||||
BookMetadata result = extractor.extractFromFilename(filename, pattern);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("Chronicles of Earth", result.getSeriesName());
|
||||
assertEquals(20.0f, result.getSeriesNumber());
|
||||
}
|
||||
|
||||
@Test
|
||||
void extractFromFilename_WildcardWithVariousPlacements_HandlesCorrectly() {
|
||||
String filename1 = "Chronicles of Tomorrow - Chapter 8.1 (2025).cbz";
|
||||
String pattern1 = "{SeriesName} - * {SeriesNumber}";
|
||||
BookMetadata result1 = extractor.extractFromFilename(filename1, pattern1);
|
||||
assertNotNull(result1);
|
||||
assertEquals("Chronicles of Tomorrow", result1.getSeriesName());
|
||||
assertEquals(8.1f, result1.getSeriesNumber());
|
||||
|
||||
String filename2 = "Junk - Chapter 20.cbz";
|
||||
String pattern2 = "* - Chapter {SeriesNumber}";
|
||||
BookMetadata result2 = extractor.extractFromFilename(filename2, pattern2);
|
||||
assertNotNull(result2);
|
||||
assertEquals(20.0f, result2.getSeriesNumber());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,439 @@
|
||||
package com.adityachandel.booklore.service.hardcover;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.settings.AppSettings;
|
||||
import com.adityachandel.booklore.model.dto.settings.MetadataProviderSettings;
|
||||
import com.adityachandel.booklore.model.entity.BookEntity;
|
||||
import com.adityachandel.booklore.model.entity.BookMetadataEntity;
|
||||
import com.adityachandel.booklore.repository.BookRepository;
|
||||
import com.adityachandel.booklore.service.appsettings.AppSettingService;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.mockito.junit.jupiter.MockitoSettings;
|
||||
import org.mockito.quality.Strictness;
|
||||
import org.springframework.web.client.RestClient;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@MockitoSettings(strictness = Strictness.LENIENT)
|
||||
class HardcoverSyncServiceTest {
|
||||
|
||||
@Mock
|
||||
private AppSettingService appSettingService;
|
||||
|
||||
@Mock
|
||||
private BookRepository bookRepository;
|
||||
|
||||
@Mock
|
||||
private RestClient restClient;
|
||||
|
||||
@Mock
|
||||
private RestClient.RequestBodyUriSpec requestBodyUriSpec;
|
||||
|
||||
@Mock
|
||||
private RestClient.RequestBodySpec requestBodySpec;
|
||||
|
||||
@Mock
|
||||
private RestClient.ResponseSpec responseSpec;
|
||||
|
||||
private HardcoverSyncService service;
|
||||
|
||||
private BookEntity testBook;
|
||||
private BookMetadataEntity testMetadata;
|
||||
private AppSettings appSettings;
|
||||
private MetadataProviderSettings.Hardcover hardcoverSettings;
|
||||
|
||||
private static final Long TEST_BOOK_ID = 100L;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() throws Exception {
|
||||
// Create service with mocked dependencies
|
||||
service = new HardcoverSyncService(appSettingService, bookRepository);
|
||||
|
||||
// Inject our mocked restClient using reflection
|
||||
Field restClientField = HardcoverSyncService.class.getDeclaredField("restClient");
|
||||
restClientField.setAccessible(true);
|
||||
restClientField.set(service, restClient);
|
||||
|
||||
testBook = new BookEntity();
|
||||
testBook.setId(TEST_BOOK_ID);
|
||||
|
||||
testMetadata = new BookMetadataEntity();
|
||||
testMetadata.setIsbn13("9781234567890");
|
||||
testMetadata.setPageCount(300);
|
||||
testBook.setMetadata(testMetadata);
|
||||
|
||||
appSettings = new AppSettings();
|
||||
MetadataProviderSettings metadataSettings = new MetadataProviderSettings();
|
||||
hardcoverSettings = new MetadataProviderSettings.Hardcover();
|
||||
hardcoverSettings.setEnabled(true);
|
||||
hardcoverSettings.setApiKey("test-api-key");
|
||||
metadataSettings.setHardcover(hardcoverSettings);
|
||||
appSettings.setMetadataProviderSettings(metadataSettings);
|
||||
|
||||
when(appSettingService.getAppSettings()).thenReturn(appSettings);
|
||||
when(bookRepository.findById(TEST_BOOK_ID)).thenReturn(Optional.of(testBook));
|
||||
|
||||
// Setup RestClient mock chain - handles multiple calls
|
||||
when(restClient.post()).thenReturn(requestBodyUriSpec);
|
||||
when(requestBodyUriSpec.uri(anyString())).thenReturn(requestBodySpec);
|
||||
when(requestBodySpec.header(anyString(), anyString())).thenReturn(requestBodySpec);
|
||||
when(requestBodySpec.body(any())).thenReturn(requestBodySpec);
|
||||
when(requestBodySpec.retrieve()).thenReturn(responseSpec);
|
||||
}
|
||||
|
||||
// === Tests for skipping sync (no API calls should be made) ===
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when Hardcover is not enabled")
|
||||
void syncProgressToHardcover_whenHardcoverDisabled_shouldSkip() {
|
||||
hardcoverSettings.setEnabled(false);
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when API key is missing")
|
||||
void syncProgressToHardcover_whenApiKeyMissing_shouldSkip() {
|
||||
hardcoverSettings.setApiKey(null);
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when API key is blank")
|
||||
void syncProgressToHardcover_whenApiKeyBlank_shouldSkip() {
|
||||
hardcoverSettings.setApiKey(" ");
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when progress is null")
|
||||
void syncProgressToHardcover_whenProgressNull_shouldSkip() {
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, null);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when book not found")
|
||||
void syncProgressToHardcover_whenBookNotFound_shouldSkip() {
|
||||
when(bookRepository.findById(TEST_BOOK_ID)).thenReturn(Optional.empty());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when book has no metadata")
|
||||
void syncProgressToHardcover_whenNoMetadata_shouldSkip() {
|
||||
testBook.setMetadata(null);
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip sync when no ISBN available")
|
||||
void syncProgressToHardcover_whenNoIsbn_shouldSkip() {
|
||||
testMetadata.setIsbn13(null);
|
||||
testMetadata.setIsbn10(null);
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, never()).post();
|
||||
}
|
||||
|
||||
// === Tests for successful sync (API calls should be made) ===
|
||||
|
||||
@Test
|
||||
@DisplayName("Should use stored hardcoverBookId when available")
|
||||
void syncProgressToHardcover_withStoredBookId_shouldUseStoredId() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
testMetadata.setPageCount(300);
|
||||
|
||||
// Mock successful responses for the chain
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
// Verify API was called at least once (using stored ID, no search needed)
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should search by ISBN when hardcoverBookId is not stored")
|
||||
void syncProgressToHardcover_withoutStoredBookId_shouldSearchByIsbn() {
|
||||
// Mock successful responses for the chain
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createSearchResponse(12345, 300))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
// Verify API was called at least once
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should skip further processing when book not found on Hardcover")
|
||||
void syncProgressToHardcover_whenBookNotFoundOnHardcover_shouldSkipAfterSearch() {
|
||||
// Mock: search returns empty results
|
||||
when(responseSpec.body(Map.class)).thenReturn(createEmptySearchResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
// Should call search only
|
||||
verify(restClient, times(1)).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should set status to READ when progress >= 99%")
|
||||
void syncProgressToHardcover_whenProgress99Percent_shouldMakeApiCalls() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
testMetadata.setPageCount(300);
|
||||
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 99.0f);
|
||||
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should set status to CURRENTLY_READING when progress < 99%")
|
||||
void syncProgressToHardcover_whenProgressLessThan99_shouldMakeApiCalls() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
testMetadata.setPageCount(300);
|
||||
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle existing user_book gracefully")
|
||||
void syncProgressToHardcover_whenUserBookExists_shouldFindExisting() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
|
||||
// Mock: insert_user_book returns error, then find existing, then create progress
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createInsertUserBookResponse(null, "Book already exists"))
|
||||
.thenReturn(createFindUserBookResponse(5001))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update existing reading progress")
|
||||
void syncProgressToHardcover_whenProgressExists_shouldUpdate() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
|
||||
// Mock: insert_user_book -> find existing read -> update read
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createFindUserBookReadResponse(6001))
|
||||
.thenReturn(createUpdateUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should use ISBN10 when ISBN13 is missing")
|
||||
void syncProgressToHardcover_whenIsbn13Missing_shouldUseIsbn10() {
|
||||
testMetadata.setIsbn13(null);
|
||||
testMetadata.setIsbn10("1234567890");
|
||||
|
||||
when(responseSpec.body(Map.class))
|
||||
.thenReturn(createSearchResponse(12345, 300))
|
||||
.thenReturn(createInsertUserBookResponse(5001, null))
|
||||
.thenReturn(createEmptyUserBookReadsResponse())
|
||||
.thenReturn(createInsertUserBookReadResponse());
|
||||
|
||||
service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f);
|
||||
|
||||
verify(restClient, atLeastOnce()).post();
|
||||
}
|
||||
|
||||
// === Tests for error handling ===
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle API errors gracefully")
|
||||
void syncProgressToHardcover_whenApiError_shouldNotThrow() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
|
||||
when(responseSpec.body(Map.class)).thenReturn(Map.of("errors", List.of(Map.of("message", "Unauthorized"))));
|
||||
|
||||
assertDoesNotThrow(() -> service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null response gracefully")
|
||||
void syncProgressToHardcover_whenResponseNull_shouldNotThrow() {
|
||||
testMetadata.setHardcoverBookId(12345);
|
||||
|
||||
when(responseSpec.body(Map.class)).thenReturn(null);
|
||||
|
||||
assertDoesNotThrow(() -> service.syncProgressToHardcover(TEST_BOOK_ID, 50.0f));
|
||||
}
|
||||
|
||||
// === Helper methods to create mock responses ===
|
||||
|
||||
private Map<String, Object> createSearchResponse(Integer bookId, Integer pages) {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> search = new HashMap<>();
|
||||
Map<String, Object> results = new HashMap<>();
|
||||
Map<String, Object> hit = new HashMap<>();
|
||||
Map<String, Object> document = new HashMap<>();
|
||||
|
||||
document.put("id", bookId.toString());
|
||||
document.put("pages", pages);
|
||||
hit.put("document", document);
|
||||
results.put("hits", List.of(hit));
|
||||
search.put("results", results);
|
||||
data.put("search", search);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createEmptySearchResponse() {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> search = new HashMap<>();
|
||||
Map<String, Object> results = new HashMap<>();
|
||||
|
||||
results.put("hits", List.of());
|
||||
search.put("results", results);
|
||||
data.put("search", search);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createInsertUserBookResponse(Integer userBookId, String error) {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> insertResult = new HashMap<>();
|
||||
|
||||
if (userBookId != null) {
|
||||
Map<String, Object> userBook = new HashMap<>();
|
||||
userBook.put("id", userBookId);
|
||||
insertResult.put("user_book", userBook);
|
||||
}
|
||||
if (error != null) {
|
||||
insertResult.put("error", error);
|
||||
}
|
||||
|
||||
data.put("insert_user_book", insertResult);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createFindUserBookResponse(Integer userBookId) {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> me = new HashMap<>();
|
||||
Map<String, Object> userBook = new HashMap<>();
|
||||
|
||||
userBook.put("id", userBookId);
|
||||
me.put("user_books", List.of(userBook));
|
||||
data.put("me", me);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createInsertUserBookReadResponse() {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> insertResult = new HashMap<>();
|
||||
Map<String, Object> userBookRead = new HashMap<>();
|
||||
|
||||
userBookRead.put("id", 6001);
|
||||
insertResult.put("user_book_read", userBookRead);
|
||||
data.put("insert_user_book_read", insertResult);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createFindUserBookReadResponse(Integer readId) {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> read = new HashMap<>();
|
||||
|
||||
read.put("id", readId);
|
||||
data.put("user_book_reads", List.of(read));
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createEmptyUserBookReadsResponse() {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
|
||||
data.put("user_book_reads", List.of());
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
private Map<String, Object> createUpdateUserBookReadResponse() {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
Map<String, Object> updateResult = new HashMap<>();
|
||||
Map<String, Object> userBookRead = new HashMap<>();
|
||||
|
||||
userBookRead.put("id", 6001);
|
||||
userBookRead.put("progress", 50);
|
||||
updateResult.put("user_book_read", userBookRead);
|
||||
data.put("update_user_book_read", updateResult);
|
||||
response.put("data", data);
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
@@ -42,7 +42,7 @@ class CbxConversionIntegrationTest {
|
||||
File testCbzFile = createTestComicCbzFile();
|
||||
BookEntity bookMetadata = createTestBookMetadata();
|
||||
|
||||
File epubFile = conversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), bookMetadata);
|
||||
File epubFile = conversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), bookMetadata,85);
|
||||
|
||||
assertThat(epubFile)
|
||||
.exists()
|
||||
|
||||
@@ -42,7 +42,7 @@ class CbxConversionServiceTest {
|
||||
|
||||
@Test
|
||||
void convertCbxToEpub_WithValidCbzFile_ShouldGenerateValidEpub() throws IOException, TemplateException, RarException {
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), testBookEntity);
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), testBookEntity,85);
|
||||
|
||||
assertThat(epubFile).exists();
|
||||
assertThat(epubFile.getName()).endsWith(".epub");
|
||||
@@ -53,7 +53,7 @@ class CbxConversionServiceTest {
|
||||
|
||||
@Test
|
||||
void convertCbxToEpub_WithNullCbxFile_ShouldThrowException() {
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(null, tempDir.toFile(), testBookEntity))
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(null, tempDir.toFile(), testBookEntity,85))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessageContaining("Invalid CBX file");
|
||||
}
|
||||
@@ -62,7 +62,7 @@ class CbxConversionServiceTest {
|
||||
void convertCbxToEpub_WithNonExistentFile_ShouldThrowException() {
|
||||
File nonExistentFile = new File(tempDir.toFile(), "non-existent.cbz");
|
||||
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(nonExistentFile, tempDir.toFile(), testBookEntity))
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(nonExistentFile, tempDir.toFile(), testBookEntity,85))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessageContaining("Invalid CBX file");
|
||||
}
|
||||
@@ -71,14 +71,14 @@ class CbxConversionServiceTest {
|
||||
void convertCbxToEpub_WithUnsupportedFileFormat_ShouldThrowException() throws IOException {
|
||||
File unsupportedFile = Files.createFile(tempDir.resolve("test.txt")).toFile();
|
||||
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(unsupportedFile, tempDir.toFile(), testBookEntity))
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(unsupportedFile, tempDir.toFile(), testBookEntity,85))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessageContaining("Unsupported file format");
|
||||
}
|
||||
|
||||
@Test
|
||||
void convertCbxToEpub_WithNullTempDir_ShouldThrowException() {
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(testCbzFile, null, testBookEntity))
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(testCbzFile, null, testBookEntity,85))
|
||||
.isInstanceOf(IllegalArgumentException.class)
|
||||
.hasMessageContaining("Invalid temp directory");
|
||||
}
|
||||
@@ -87,7 +87,7 @@ class CbxConversionServiceTest {
|
||||
void convertCbxToEpub_WithEmptyCbzFile_ShouldThrowException() throws IOException {
|
||||
File emptyCbzFile = createEmptyCbzFile();
|
||||
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(emptyCbzFile, tempDir.toFile(), testBookEntity))
|
||||
assertThatThrownBy(() -> cbxConversionService.convertCbxToEpub(emptyCbzFile, tempDir.toFile(), testBookEntity,85))
|
||||
.isInstanceOf(IllegalStateException.class)
|
||||
.hasMessageContaining("No valid images found");
|
||||
}
|
||||
@@ -118,7 +118,7 @@ class CbxConversionServiceTest {
|
||||
|
||||
@Test
|
||||
void convertCbxToEpub_WithNullBookEntity_ShouldUseDefaultMetadata() throws IOException, TemplateException, RarException {
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), null);
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(testCbzFile, tempDir.toFile(), null,85);
|
||||
|
||||
assertThat(epubFile).exists();
|
||||
verifyEpubStructure(epubFile);
|
||||
@@ -128,7 +128,7 @@ class CbxConversionServiceTest {
|
||||
void convertCbxToEpub_WithMultipleImages_ShouldPreservePageOrder() throws IOException, TemplateException, RarException {
|
||||
File multiPageCbzFile = createMultiPageCbzFile();
|
||||
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(multiPageCbzFile, tempDir.toFile(), testBookEntity);
|
||||
File epubFile = cbxConversionService.convertCbxToEpub(multiPageCbzFile, tempDir.toFile(), testBookEntity,85);
|
||||
|
||||
assertThat(epubFile).exists();
|
||||
verifyPageOrderInEpub(epubFile, 5);
|
||||
|
||||
@@ -0,0 +1,885 @@
|
||||
package com.adityachandel.booklore.service.metadata.extractor;
|
||||
|
||||
import com.adityachandel.booklore.model.dto.BookMetadata;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDate;
|
||||
import java.util.Base64;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class Fb2MetadataExtractorTest {
|
||||
|
||||
private static final String DEFAULT_TITLE = "The Seven Poor Travellers";
|
||||
private static final String DEFAULT_AUTHOR_FIRST = "Charles";
|
||||
private static final String DEFAULT_AUTHOR_LAST = "Dickens";
|
||||
private static final String DEFAULT_AUTHOR_FULL = "Charles Dickens";
|
||||
private static final String DEFAULT_GENRE = "antique";
|
||||
private static final String DEFAULT_LANGUAGE = "ru";
|
||||
private static final String DEFAULT_PUBLISHER = "Test Publisher";
|
||||
private static final String DEFAULT_ISBN = "9781234567890";
|
||||
private static final String DEFAULT_SERIES = "Great Works";
|
||||
|
||||
private Fb2MetadataExtractor extractor;
|
||||
|
||||
@TempDir
|
||||
Path tempDir;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
extractor = new Fb2MetadataExtractor();
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Basic Metadata Extraction Tests")
|
||||
class BasicMetadataTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract title from title-info")
|
||||
void extractMetadata_withTitle_returnsTitle() throws IOException {
|
||||
String fb2Content = createFb2WithTitleInfo(
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_AUTHOR_FIRST,
|
||||
DEFAULT_AUTHOR_LAST,
|
||||
DEFAULT_GENRE,
|
||||
DEFAULT_LANGUAGE
|
||||
);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(DEFAULT_TITLE, result.getTitle());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract author name from title-info")
|
||||
void extractMetadata_withAuthor_returnsAuthor() throws IOException {
|
||||
String fb2Content = createFb2WithTitleInfo(
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_AUTHOR_FIRST,
|
||||
DEFAULT_AUTHOR_LAST,
|
||||
DEFAULT_GENRE,
|
||||
DEFAULT_LANGUAGE
|
||||
);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getAuthors());
|
||||
assertEquals(1, result.getAuthors().size());
|
||||
assertTrue(result.getAuthors().contains(DEFAULT_AUTHOR_FULL));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract multiple authors")
|
||||
void extractMetadata_withMultipleAuthors_returnsAllAuthors() throws IOException {
|
||||
String fb2Content = createFb2WithMultipleAuthors();
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getAuthors());
|
||||
assertEquals(2, result.getAuthors().size());
|
||||
assertTrue(result.getAuthors().contains("Charles Dickens"));
|
||||
assertTrue(result.getAuthors().contains("Jane Austen"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract genre as category")
|
||||
void extractMetadata_withGenre_returnsCategory() throws IOException {
|
||||
String fb2Content = createFb2WithTitleInfo(
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_AUTHOR_FIRST,
|
||||
DEFAULT_AUTHOR_LAST,
|
||||
DEFAULT_GENRE,
|
||||
DEFAULT_LANGUAGE
|
||||
);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getCategories());
|
||||
assertTrue(result.getCategories().contains(DEFAULT_GENRE));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract multiple genres as categories")
|
||||
void extractMetadata_withMultipleGenres_returnsAllCategories() throws IOException {
|
||||
String fb2Content = createFb2WithMultipleGenres();
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getCategories());
|
||||
assertTrue(result.getCategories().contains("fiction"));
|
||||
assertTrue(result.getCategories().contains("drama"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract language")
|
||||
void extractMetadata_withLanguage_returnsLanguage() throws IOException {
|
||||
String fb2Content = createFb2WithTitleInfo(
|
||||
DEFAULT_TITLE,
|
||||
DEFAULT_AUTHOR_FIRST,
|
||||
DEFAULT_AUTHOR_LAST,
|
||||
DEFAULT_GENRE,
|
||||
DEFAULT_LANGUAGE
|
||||
);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(DEFAULT_LANGUAGE, result.getLanguage());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract annotation as description")
|
||||
void extractMetadata_withAnnotation_returnsDescription() throws IOException {
|
||||
String annotation = "This is a test book description";
|
||||
String fb2Content = createFb2WithAnnotation(annotation);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getDescription());
|
||||
assertTrue(result.getDescription().contains(annotation));
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Date Extraction Tests")
|
||||
class DateExtractionTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract date from title-info")
|
||||
void extractMetadata_withDate_returnsDate() throws IOException {
|
||||
String fb2Content = createFb2WithDate("2024-06-15");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(LocalDate.of(2024, 6, 15), result.getPublishedDate());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract year-only date")
|
||||
void extractMetadata_withYearOnly_returnsDateWithJanuary1st() throws IOException {
|
||||
String fb2Content = createFb2WithDate("2024");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(LocalDate.of(2024, 1, 1), result.getPublishedDate());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle date with value attribute")
|
||||
void extractMetadata_withDateValue_returnsDate() throws IOException {
|
||||
String fb2Content = createFb2WithDateValue("2024-06-15", "June 15, 2024");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(LocalDate.of(2024, 6, 15), result.getPublishedDate());
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Series Metadata Tests")
|
||||
class SeriesMetadataTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract series name from sequence")
|
||||
void extractMetadata_withSequence_returnsSeriesName() throws IOException {
|
||||
String fb2Content = createFb2WithSequence(DEFAULT_SERIES, "3");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(DEFAULT_SERIES, result.getSeriesName());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract series number from sequence")
|
||||
void extractMetadata_withSequence_returnsSeriesNumber() throws IOException {
|
||||
String fb2Content = createFb2WithSequence(DEFAULT_SERIES, "3");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(3.0f, result.getSeriesNumber(), 0.001);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle decimal series numbers")
|
||||
void extractMetadata_withDecimalSequence_returnsDecimalSeriesNumber() throws IOException {
|
||||
String fb2Content = createFb2WithSequence(DEFAULT_SERIES, "2.5");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(2.5f, result.getSeriesNumber(), 0.001);
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Publisher Info Extraction Tests")
|
||||
class PublisherInfoTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract publisher from publish-info")
|
||||
void extractMetadata_withPublisher_returnsPublisher() throws IOException {
|
||||
String fb2Content = createFb2WithPublishInfo(DEFAULT_PUBLISHER, "2024", DEFAULT_ISBN);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(DEFAULT_PUBLISHER, result.getPublisher());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract year from publish-info")
|
||||
void extractMetadata_withPublishYear_returnsDate() throws IOException {
|
||||
String fb2Content = createFb2WithPublishInfo(DEFAULT_PUBLISHER, "2024", null);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals(LocalDate.of(2024, 1, 1), result.getPublishedDate());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract ISBN-13 from publish-info")
|
||||
void extractMetadata_withIsbn13_returnsIsbn13() throws IOException {
|
||||
String fb2Content = createFb2WithPublishInfo(null, null, "9781234567890");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("9781234567890", result.getIsbn13());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract ISBN-10 from publish-info")
|
||||
void extractMetadata_withIsbn10_returnsIsbn10() throws IOException {
|
||||
String fb2Content = createFb2WithPublishInfo(null, null, "1234567890");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("1234567890", result.getIsbn10());
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Keywords Extraction Tests")
|
||||
class KeywordsTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract keywords as categories")
|
||||
void extractMetadata_withKeywords_returnsCategories() throws IOException {
|
||||
String fb2Content = createFb2WithKeywords("adventure, mystery, thriller");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getCategories());
|
||||
assertTrue(result.getCategories().contains("adventure"));
|
||||
assertTrue(result.getCategories().contains("mystery"));
|
||||
assertTrue(result.getCategories().contains("thriller"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle keywords with semicolon separator")
|
||||
void extractMetadata_withSemicolonKeywords_returnsCategories() throws IOException {
|
||||
String fb2Content = createFb2WithKeywords("adventure; mystery; thriller");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getCategories());
|
||||
assertTrue(result.getCategories().contains("adventure"));
|
||||
assertTrue(result.getCategories().contains("mystery"));
|
||||
assertTrue(result.getCategories().contains("thriller"));
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Author Name Extraction Tests")
|
||||
class AuthorNameTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract author with first and last name")
|
||||
void extractMetadata_withFirstAndLastName_returnsFullName() throws IOException {
|
||||
String fb2Content = createFb2WithAuthorNames("John", null, "Doe", null);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.getAuthors().contains("John Doe"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract author with first, middle and last name")
|
||||
void extractMetadata_withMiddleName_returnsFullNameWithMiddle() throws IOException {
|
||||
String fb2Content = createFb2WithAuthorNames("John", "Robert", "Doe", null);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.getAuthors().contains("John Robert Doe"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should use nickname when name parts are missing")
|
||||
void extractMetadata_withNicknameOnly_returnsNickname() throws IOException {
|
||||
String fb2Content = createFb2WithAuthorNames(null, null, null, "WriterPro");
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.getAuthors().contains("WriterPro"));
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Cover Extraction Tests")
|
||||
class CoverExtractionTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract cover image from binary section")
|
||||
void extractCover_withCoverImage_returnsCoverBytes() throws IOException {
|
||||
byte[] imageData = createMinimalPngImage();
|
||||
String fb2Content = createFb2WithCover(imageData);
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
byte[] result = extractor.extractCover(fb2File);
|
||||
|
||||
assertNotNull(result);
|
||||
assertTrue(result.length > 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should return null when no cover present")
|
||||
void extractCover_noCover_returnsNull() throws IOException {
|
||||
String fb2Content = createMinimalFb2();
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
byte[] result = extractor.extractCover(fb2File);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Complete Metadata Extraction Test")
|
||||
class CompleteMetadataTest {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should extract all metadata fields from complete FB2 with title-info")
|
||||
void extractMetadata_completeFile_extractsAllFields() throws IOException {
|
||||
String fb2Content = createCompleteFb2();
|
||||
File fb2File = createFb2File(fb2Content);
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(fb2File);
|
||||
|
||||
assertAll(
|
||||
() -> assertNotNull(result, "Metadata should not be null"),
|
||||
() -> assertEquals("Pride and Prejudice", result.getTitle(), "Title should be extracted"),
|
||||
() -> assertNotNull(result.getAuthors(), "Authors should not be null"),
|
||||
() -> assertEquals(1, result.getAuthors().size(), "Should have one author"),
|
||||
() -> assertTrue(result.getAuthors().contains("Jane Austen"), "Should contain full author name"),
|
||||
() -> assertNotNull(result.getCategories(), "Categories should not be null"),
|
||||
() -> assertTrue(result.getCategories().contains("romance"), "Should contain genre"),
|
||||
() -> assertEquals("en", result.getLanguage(), "Language should be extracted"),
|
||||
() -> assertNotNull(result.getDescription(), "Description should not be null"),
|
||||
() -> assertTrue(result.getDescription().contains("classic novel"), "Description should contain annotation text"),
|
||||
() -> assertEquals(LocalDate.of(1813, 1, 1), result.getPublishedDate(), "Published date should be extracted"),
|
||||
() -> assertEquals("T. Egerton", result.getPublisher(), "Publisher should be extracted"),
|
||||
() -> assertEquals("Classic Literature Series", result.getSeriesName(), "Series name should be extracted"),
|
||||
() -> assertEquals(2.0f, result.getSeriesNumber(), 0.001, "Series number should be extracted")
|
||||
);
|
||||
}
|
||||
|
||||
private String createCompleteFb2() {
|
||||
return """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>romance</genre>
|
||||
<author>
|
||||
<first-name>Jane</first-name>
|
||||
<last-name>Austen</last-name>
|
||||
</author>
|
||||
<book-title>Pride and Prejudice</book-title>
|
||||
<annotation>
|
||||
<p>Pride and Prejudice is a classic novel by Jane Austen, first published in 1813. It is a romantic novel of manners that follows the character development of Elizabeth Bennet.</p>
|
||||
<p>The novel deals with issues of morality, education, and marriage in the society of the landed gentry of the British Regency. Elizabeth must learn the error of making hasty judgments and come to appreciate the difference between superficial goodness and actual goodness.</p>
|
||||
</annotation>
|
||||
<keywords>romance, regency, england, bennet, darcy, marriage</keywords>
|
||||
<date value="1813-01-01">1813</date>
|
||||
<lang>en</lang>
|
||||
<sequence name="Classic Literature Series" number="2"/>
|
||||
</title-info>
|
||||
<document-info>
|
||||
<author>
|
||||
<nickname>TestUser</nickname>
|
||||
</author>
|
||||
<date value="2024-01-01">January 1, 2024</date>
|
||||
<id>TestUser_PrideAndPrejudice_12345</id>
|
||||
<version>2.0</version>
|
||||
</document-info>
|
||||
<publish-info>
|
||||
<book-name>Pride and Prejudice</book-name>
|
||||
<publisher>T. Egerton</publisher>
|
||||
<city>London</city>
|
||||
<year>1813</year>
|
||||
</publish-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<title>
|
||||
<p>Chapter 1</p>
|
||||
</title>
|
||||
<p>It is a truth universally acknowledged, that a single man in possession of a good fortune, must be in want of a wife.</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""";
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Edge Cases and Error Handling")
|
||||
class EdgeCaseTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle empty FB2 file gracefully")
|
||||
void extractMetadata_emptyFile_returnsNull() throws IOException {
|
||||
File emptyFile = tempDir.resolve("empty.fb2").toFile();
|
||||
try (FileOutputStream fos = new FileOutputStream(emptyFile)) {
|
||||
fos.write("".getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(emptyFile);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle invalid XML gracefully")
|
||||
void extractMetadata_invalidXml_returnsNull() throws IOException {
|
||||
File invalidFile = tempDir.resolve("invalid.fb2").toFile();
|
||||
try (FileOutputStream fos = new FileOutputStream(invalidFile)) {
|
||||
fos.write("this is not valid XML".getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(invalidFile);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle non-existent file gracefully")
|
||||
void extractMetadata_nonExistentFile_returnsNull() {
|
||||
File nonExistent = new File(tempDir.toFile(), "does-not-exist.fb2");
|
||||
|
||||
BookMetadata result = extractor.extractMetadata(nonExistent);
|
||||
|
||||
assertNull(result);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper methods to create FB2 test files
|
||||
|
||||
private String createMinimalFb2() {
|
||||
return """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Test Book</book-title>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Test content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""";
|
||||
}
|
||||
|
||||
private String createFb2WithTitleInfo(String title, String firstName, String lastName, String genre, String lang) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>%s</genre>
|
||||
<author>
|
||||
<first-name>%s</first-name>
|
||||
<last-name>%s</last-name>
|
||||
</author>
|
||||
<book-title>%s</book-title>
|
||||
<lang>%s</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", genre, firstName, lastName, title, lang);
|
||||
}
|
||||
|
||||
private String createFb2WithMultipleAuthors() {
|
||||
return """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Charles</first-name>
|
||||
<last-name>Dickens</last-name>
|
||||
</author>
|
||||
<author>
|
||||
<first-name>Jane</first-name>
|
||||
<last-name>Austen</last-name>
|
||||
</author>
|
||||
<book-title>Collaborative Work</book-title>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""";
|
||||
}
|
||||
|
||||
private String createFb2WithMultipleGenres() {
|
||||
return """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<genre>drama</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Multi-Genre Book</book-title>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""";
|
||||
}
|
||||
|
||||
private String createFb2WithAnnotation(String annotation) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Annotation</book-title>
|
||||
<annotation>
|
||||
<p>%s</p>
|
||||
</annotation>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", annotation);
|
||||
}
|
||||
|
||||
private String createFb2WithDate(String date) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Date</book-title>
|
||||
<date>%s</date>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", date);
|
||||
}
|
||||
|
||||
private String createFb2WithDateValue(String dateValue, String dateText) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Date Value</book-title>
|
||||
<date value="%s">%s</date>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", dateValue, dateText);
|
||||
}
|
||||
|
||||
private String createFb2WithSequence(String seriesName, String seriesNumber) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book in Series</book-title>
|
||||
<sequence name="%s" number="%s"/>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", seriesName, seriesNumber);
|
||||
}
|
||||
|
||||
private String createFb2WithPublishInfo(String publisher, String year, String isbn) {
|
||||
StringBuilder publishInfo = new StringBuilder();
|
||||
if (publisher != null) {
|
||||
publishInfo.append(String.format(" <publisher>%s</publisher>\n", publisher));
|
||||
}
|
||||
if (year != null) {
|
||||
publishInfo.append(String.format(" <year>%s</year>\n", year));
|
||||
}
|
||||
if (isbn != null) {
|
||||
publishInfo.append(String.format(" <isbn>%s</isbn>\n", isbn));
|
||||
}
|
||||
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Publish Info</book-title>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
<publish-info>
|
||||
%s </publish-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", publishInfo);
|
||||
}
|
||||
|
||||
private String createFb2WithKeywords(String keywords) {
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Keywords</book-title>
|
||||
<keywords>%s</keywords>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", keywords);
|
||||
}
|
||||
|
||||
private String createFb2WithAuthorNames(String firstName, String middleName, String lastName, String nickname) {
|
||||
StringBuilder authorInfo = new StringBuilder();
|
||||
if (firstName != null) {
|
||||
authorInfo.append(String.format(" <first-name>%s</first-name>\n", firstName));
|
||||
}
|
||||
if (middleName != null) {
|
||||
authorInfo.append(String.format(" <middle-name>%s</middle-name>\n", middleName));
|
||||
}
|
||||
if (lastName != null) {
|
||||
authorInfo.append(String.format(" <last-name>%s</last-name>\n", lastName));
|
||||
}
|
||||
if (nickname != null) {
|
||||
authorInfo.append(String.format(" <nickname>%s</nickname>\n", nickname));
|
||||
}
|
||||
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
%s </author>
|
||||
<book-title>Book with Complex Author</book-title>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
</FictionBook>
|
||||
""", authorInfo);
|
||||
}
|
||||
|
||||
private String createFb2WithCover(byte[] imageData) {
|
||||
String base64Image = Base64.getEncoder().encodeToString(imageData);
|
||||
return String.format("""
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<FictionBook xmlns="http://www.gribuser.ru/xml/fictionbook/2.0" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<description>
|
||||
<title-info>
|
||||
<genre>fiction</genre>
|
||||
<author>
|
||||
<first-name>Test</first-name>
|
||||
<last-name>Author</last-name>
|
||||
</author>
|
||||
<book-title>Book with Cover</book-title>
|
||||
<coverpage>
|
||||
<image xlink:href="#cover.jpg"/>
|
||||
</coverpage>
|
||||
<lang>en</lang>
|
||||
</title-info>
|
||||
</description>
|
||||
<body>
|
||||
<section>
|
||||
<p>Content</p>
|
||||
</section>
|
||||
</body>
|
||||
<binary id="cover.jpg" content-type="image/jpeg">%s</binary>
|
||||
</FictionBook>
|
||||
""", base64Image);
|
||||
}
|
||||
|
||||
private byte[] createMinimalPngImage() {
|
||||
return new byte[]{
|
||||
(byte) 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A,
|
||||
0x00, 0x00, 0x00, 0x0D,
|
||||
0x49, 0x48, 0x44, 0x52,
|
||||
0x00, 0x00, 0x00, 0x01,
|
||||
0x00, 0x00, 0x00, 0x01,
|
||||
0x08, 0x06,
|
||||
0x00, 0x00, 0x00,
|
||||
(byte) 0x90, (byte) 0x77, (byte) 0x53, (byte) 0xDE,
|
||||
0x00, 0x00, 0x00, 0x0A,
|
||||
0x49, 0x44, 0x41, 0x54,
|
||||
0x78, (byte) 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05,
|
||||
0x00, 0x01,
|
||||
0x0D, (byte) 0x0A, 0x2D, (byte) 0xB4,
|
||||
0x00, 0x00, 0x00, 0x00,
|
||||
0x49, 0x45, 0x4E, 0x44,
|
||||
(byte) 0xAE, 0x42, 0x60, (byte) 0x82
|
||||
};
|
||||
}
|
||||
|
||||
private File createFb2File(String content) throws IOException {
|
||||
File fb2File = tempDir.resolve("test-" + System.nanoTime() + ".fb2").toFile();
|
||||
try (FileOutputStream fos = new FileOutputStream(fb2File)) {
|
||||
fos.write(content.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
return fb2File;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import com.adityachandel.booklore.model.dto.Library;
|
||||
import com.adityachandel.booklore.model.dto.OpdsUserV2;
|
||||
import com.adityachandel.booklore.model.entity.ShelfEntity;
|
||||
import com.adityachandel.booklore.model.enums.BookFileType;
|
||||
import com.adityachandel.booklore.model.enums.OpdsSortOrder;
|
||||
import com.adityachandel.booklore.service.MagicShelfService;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
@@ -51,6 +52,7 @@ class OpdsFeedServiceTest {
|
||||
OpdsUserV2 v2 = mock(OpdsUserV2.class);
|
||||
when(userDetails.getOpdsUserV2()).thenReturn(v2);
|
||||
when(v2.getUserId()).thenReturn(TEST_USER_ID);
|
||||
when(v2.getSortOrder()).thenReturn(OpdsSortOrder.RECENT);
|
||||
when(authenticationService.getOpdsUser()).thenReturn(userDetails);
|
||||
return userDetails;
|
||||
}
|
||||
@@ -152,6 +154,7 @@ class OpdsFeedServiceTest {
|
||||
|
||||
Page<Book> page = new PageImpl<>(List.of(book), PageRequest.of(0, 50), 1);
|
||||
when(opdsBookService.getBooksPage(eq(TEST_USER_ID), any(), any(), any(), eq(0), eq(50))).thenReturn(page);
|
||||
when(opdsBookService.applySortOrder(any(), any())).thenReturn(page);
|
||||
|
||||
String xml = opdsFeedService.generateCatalogFeed(request);
|
||||
assertThat(xml).contains("Book Title");
|
||||
@@ -173,6 +176,7 @@ class OpdsFeedServiceTest {
|
||||
|
||||
Page<Book> page = new PageImpl<>(Collections.emptyList(), PageRequest.of(0, 50), 0);
|
||||
when(opdsBookService.getBooksPage(any(), any(), any(), any(), anyInt(), anyInt())).thenReturn(page);
|
||||
when(opdsBookService.applySortOrder(any(), any())).thenReturn(page);
|
||||
|
||||
String xml = opdsFeedService.generateCatalogFeed(request);
|
||||
assertThat(xml).contains("</feed>");
|
||||
@@ -196,6 +200,7 @@ class OpdsFeedServiceTest {
|
||||
|
||||
Page<Book> page = new PageImpl<>(List.of(book), PageRequest.of(0, 50), 1);
|
||||
when(opdsBookService.getRecentBooksPage(eq(TEST_USER_ID), eq(0), eq(50))).thenReturn(page);
|
||||
when(opdsBookService.applySortOrder(any(), any())).thenReturn(page);
|
||||
|
||||
String xml = opdsFeedService.generateRecentFeed(request);
|
||||
assertThat(xml).contains("Recent Book");
|
||||
@@ -214,6 +219,7 @@ class OpdsFeedServiceTest {
|
||||
|
||||
Page<Book> page = new PageImpl<>(Collections.emptyList(), PageRequest.of(0, 50), 0);
|
||||
when(opdsBookService.getRecentBooksPage(any(), anyInt(), anyInt())).thenReturn(page);
|
||||
when(opdsBookService.applySortOrder(any(), any())).thenReturn(page);
|
||||
|
||||
String xml = opdsFeedService.generateRecentFeed(request);
|
||||
assertThat(xml).contains("</feed>");
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
package com.adityachandel.booklore.util;
|
||||
|
||||
import com.adityachandel.booklore.config.AppProperties;
|
||||
import com.adityachandel.booklore.model.dto.settings.AppSettings;
|
||||
import com.adityachandel.booklore.model.dto.settings.CoverCroppingSettings;
|
||||
import com.adityachandel.booklore.model.entity.BookMetadataEntity;
|
||||
import com.adityachandel.booklore.repository.BookMetadataRepository;
|
||||
import com.adityachandel.booklore.service.appsettings.AppSettingService;
|
||||
import org.junit.jupiter.api.*;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.io.TempDir;
|
||||
@@ -41,6 +45,9 @@ class FileServiceTest {
|
||||
@Mock
|
||||
private AppProperties appProperties;
|
||||
|
||||
@Mock
|
||||
private AppSettingService appSettingService;
|
||||
|
||||
private FileService fileService;
|
||||
|
||||
@TempDir
|
||||
@@ -48,7 +55,17 @@ class FileServiceTest {
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
fileService = new FileService(appProperties, mock(RestTemplate.class)); // mock RestTemplate for most tests
|
||||
CoverCroppingSettings coverCroppingSettings = CoverCroppingSettings.builder()
|
||||
.verticalCroppingEnabled(true)
|
||||
.horizontalCroppingEnabled(true)
|
||||
.aspectRatioThreshold(2.5)
|
||||
.build();
|
||||
AppSettings appSettings = AppSettings.builder()
|
||||
.coverCroppingSettings(coverCroppingSettings)
|
||||
.build();
|
||||
lenient().when(appSettingService.getAppSettings()).thenReturn(appSettings);
|
||||
|
||||
fileService = new FileService(appProperties, mock(RestTemplate.class), appSettingService, mock(BookMetadataRepository.class));
|
||||
}
|
||||
|
||||
@Nested
|
||||
@@ -614,6 +631,116 @@ class FileServiceTest {
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("Cover Cropping for Extreme Aspect Ratios")
|
||||
class CoverCroppingTests {
|
||||
|
||||
@Test
|
||||
@DisplayName("extremely tall image is cropped when vertical cropping enabled")
|
||||
void extremelyTallImage_isCropped() throws IOException {
|
||||
// Create an extremely tall image like a web comic page (ratio > 2.5)
|
||||
int width = 940;
|
||||
int height = 11280; // ratio = 12:1
|
||||
|
||||
BufferedImage tallImage = createTestImage(width, height);
|
||||
boolean result = fileService.saveCoverImages(tallImage, 100L);
|
||||
|
||||
assertTrue(result);
|
||||
|
||||
BufferedImage savedCover = ImageIO.read(
|
||||
new File(fileService.getCoverFile(100L)));
|
||||
|
||||
assertNotNull(savedCover);
|
||||
|
||||
// The image should be cropped to approximately 1.5:1 ratio from the top
|
||||
double savedRatio = (double) savedCover.getHeight() / savedCover.getWidth();
|
||||
assertTrue(savedRatio < 3.0,
|
||||
"Cropped image should have reasonable aspect ratio, was: " + savedRatio);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("extremely wide image is cropped when horizontal cropping enabled")
|
||||
void extremelyWideImage_isCropped() throws IOException {
|
||||
// Create an extremely wide image (ratio > 2.5)
|
||||
int width = 3000;
|
||||
int height = 400; // width/height ratio = 7.5:1
|
||||
|
||||
BufferedImage wideImage = createTestImage(width, height);
|
||||
boolean result = fileService.saveCoverImages(wideImage, 101L);
|
||||
|
||||
assertTrue(result);
|
||||
|
||||
BufferedImage savedCover = ImageIO.read(
|
||||
new File(fileService.getCoverFile(101L)));
|
||||
|
||||
assertNotNull(savedCover);
|
||||
|
||||
// The image should be cropped to a more reasonable aspect ratio
|
||||
double savedRatio = (double) savedCover.getWidth() / savedCover.getHeight();
|
||||
assertTrue(savedRatio < 3.0,
|
||||
"Cropped image should have reasonable aspect ratio, was: " + savedRatio);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("normal aspect ratio image is not cropped")
|
||||
void normalAspectRatioImage_isNotCropped() throws IOException {
|
||||
// Create a normal book cover sized image (ratio ~1.5:1)
|
||||
int width = 600;
|
||||
int height = 900; // ratio = 1.5:1
|
||||
|
||||
BufferedImage normalImage = createTestImage(width, height);
|
||||
boolean result = fileService.saveCoverImages(normalImage, 102L);
|
||||
|
||||
assertTrue(result);
|
||||
|
||||
BufferedImage savedCover = ImageIO.read(
|
||||
new File(fileService.getCoverFile(102L)));
|
||||
|
||||
assertNotNull(savedCover);
|
||||
|
||||
// The image should maintain its original aspect ratio
|
||||
double originalRatio = (double) height / width;
|
||||
double savedRatio = (double) savedCover.getHeight() / savedCover.getWidth();
|
||||
assertEquals(originalRatio, savedRatio, 0.01,
|
||||
"Normal aspect ratio image should not be cropped");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("cropping is disabled when settings are off")
|
||||
void croppingDisabled_imageNotCropped() throws IOException {
|
||||
// Reconfigure with cropping disabled
|
||||
CoverCroppingSettings disabledSettings = CoverCroppingSettings.builder()
|
||||
.verticalCroppingEnabled(false)
|
||||
.horizontalCroppingEnabled(false)
|
||||
.aspectRatioThreshold(2.5)
|
||||
.build();
|
||||
AppSettings appSettings = AppSettings.builder()
|
||||
.coverCroppingSettings(disabledSettings)
|
||||
.build();
|
||||
when(appSettingService.getAppSettings()).thenReturn(appSettings);
|
||||
|
||||
// Create an extremely tall image
|
||||
int width = 400;
|
||||
int height = 4000; // ratio = 10:1
|
||||
|
||||
BufferedImage tallImage = createTestImage(width, height);
|
||||
boolean result = fileService.saveCoverImages(tallImage, 103L);
|
||||
|
||||
assertTrue(result);
|
||||
|
||||
BufferedImage savedCover = ImageIO.read(
|
||||
new File(fileService.getCoverFile(103L)));
|
||||
|
||||
assertNotNull(savedCover);
|
||||
|
||||
// Since the image exceeds max dimensions, it will be scaled, but aspect ratio preserved
|
||||
double originalRatio = (double) height / width;
|
||||
double savedRatio = (double) savedCover.getHeight() / savedCover.getWidth();
|
||||
assertEquals(originalRatio, savedRatio, 0.01,
|
||||
"Image should not be cropped when cropping is disabled");
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("createThumbnailFromFile")
|
||||
class CreateThumbnailFromFileTests {
|
||||
@@ -823,12 +950,26 @@ class FileServiceTest {
|
||||
@Mock
|
||||
private RestTemplate restTemplate;
|
||||
|
||||
@Mock
|
||||
private AppSettingService appSettingServiceForNetwork;
|
||||
|
||||
private FileService fileService;
|
||||
|
||||
@BeforeEach
|
||||
void setup() {
|
||||
lenient().when(appProperties.getPathConfig()).thenReturn(tempDir.toString());
|
||||
fileService = new FileService(appProperties, restTemplate);
|
||||
|
||||
CoverCroppingSettings coverCroppingSettings = CoverCroppingSettings.builder()
|
||||
.verticalCroppingEnabled(true)
|
||||
.horizontalCroppingEnabled(true)
|
||||
.aspectRatioThreshold(2.5)
|
||||
.build();
|
||||
AppSettings appSettings = AppSettings.builder()
|
||||
.coverCroppingSettings(coverCroppingSettings)
|
||||
.build();
|
||||
lenient().when(appSettingServiceForNetwork.getAppSettings()).thenReturn(appSettings);
|
||||
|
||||
fileService = new FileService(appProperties, restTemplate, appSettingServiceForNetwork, mock(BookMetadataRepository.class));
|
||||
}
|
||||
|
||||
@Nested
|
||||
@@ -844,7 +985,8 @@ class FileServiceTest {
|
||||
byte[] imageBytes = imageToBytes(testImage);
|
||||
|
||||
RestTemplate mockRestTemplate = mock(RestTemplate.class);
|
||||
FileService testFileService = new FileService(appProperties, mockRestTemplate);
|
||||
AppSettingService mockAppSettingService = mock(AppSettingService.class);
|
||||
FileService testFileService = new FileService(appProperties, mockRestTemplate, mockAppSettingService, mock(BookMetadataRepository.class));
|
||||
|
||||
ResponseEntity<byte[]> responseEntity = ResponseEntity.ok(imageBytes);
|
||||
when(mockRestTemplate.exchange(
|
||||
|
||||
@@ -510,4 +510,104 @@ class PathPatternResolverTest {
|
||||
assertTrue(components[0].equals("Author"));
|
||||
assertTrue(components[1].equals("Series"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should preserve extension for files with numeric patterns in name (e.g., Chapter 8.1.cbz)")
|
||||
void testResolvePattern_filenameWithNumericPattern() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Comic Title")
|
||||
.seriesName("Series Name")
|
||||
.seriesNumber(8.1f)
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{series} - Chapter {seriesIndex}", "original.cbz");
|
||||
|
||||
assertEquals("Series Name - Chapter 8.1.cbz", result, "Extension should be preserved for files with numeric patterns");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should preserve extension for files with multiple dots in name")
|
||||
void testResolvePattern_filenameWithMultipleDots() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("My.Awesome.Book")
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{title}", "My.Awesome.Book.epub");
|
||||
|
||||
assertEquals("My.Awesome.Book.epub", result, "Extension should be preserved for files with dots in title");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should add extension when pattern doesn't include it")
|
||||
void testResolvePattern_extensionNotInPattern() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Book Title")
|
||||
.authors(Set.of("Author Name"))
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{authors} - {title}", "original.pdf");
|
||||
|
||||
assertEquals("Author Name - Book Title.pdf", result, "Extension should be added automatically");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should not add extension when using {currentFilename} in subdirectory")
|
||||
void testResolvePattern_currentFilenameWithPath() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Book Title")
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "books/{currentFilename}", "My.File.With.Dots.epub");
|
||||
|
||||
assertEquals("books/My.File.With.Dots.epub", result, "Extension should not be added when {currentFilename} is used, even with dots in name");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle title with dots and numeric suffix without duplicating extension")
|
||||
void testResolvePattern_titleWithDotsAndNumericSuffix() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Chapter.8.1")
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{title}", "Chapter.8.1.cbz");
|
||||
|
||||
assertEquals("Chapter.8.1.cbz", result, "Should not treat .1 as extension");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should preserve CBZ extension for comic files with chapter numbers")
|
||||
void testResolvePattern_comicWithChapterNumber() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.seriesName("One Punch Man")
|
||||
.seriesNumber(8.1f)
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{series} - Chapter {seriesIndex}", "One Punch Man - Chapter 8.1.cbz");
|
||||
|
||||
assertEquals("One Punch Man - Chapter 8.1.cbz", result, "CBZ extension should be preserved for comics");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle files with only numeric extension-like pattern correctly")
|
||||
void testResolvePattern_numericExtensionLikePattern() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Book Version 2")
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{title}.1", "original.epub");
|
||||
|
||||
assertEquals("Book Version 2.1.epub", result, "Should add real extension even when pattern ends with .1");
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle empty extension gracefully")
|
||||
void testResolvePattern_noExtension() {
|
||||
BookMetadata metadata = BookMetadata.builder()
|
||||
.title("Book Title")
|
||||
.build();
|
||||
|
||||
String result = PathPatternResolver.resolvePattern(metadata, "{title}", "fileWithoutExtension");
|
||||
|
||||
assertEquals("Book Title", result, "Should not add extension when original file has none");
|
||||
}
|
||||
}
|
||||
@@ -22,7 +22,7 @@
|
||||
"prefix": "app",
|
||||
"architect": {
|
||||
"build": {
|
||||
"builder": "@angular-devkit/build-angular:application",
|
||||
"builder": "@angular/build:application",
|
||||
"options": {
|
||||
"outputPath": "dist/booklore",
|
||||
"index": "src/index.html",
|
||||
@@ -91,7 +91,7 @@
|
||||
"defaultConfiguration": "production"
|
||||
},
|
||||
"serve": {
|
||||
"builder": "@angular-devkit/build-angular:dev-server",
|
||||
"builder": "@angular/build:dev-server",
|
||||
"configurations": {
|
||||
"production": {
|
||||
"buildTarget": "booklore:build:production"
|
||||
@@ -103,10 +103,10 @@
|
||||
"defaultConfiguration": "development"
|
||||
},
|
||||
"extract-i18n": {
|
||||
"builder": "@angular-devkit/build-angular:extract-i18n"
|
||||
"builder": "@angular/build:extract-i18n"
|
||||
},
|
||||
"test": {
|
||||
"builder": "@angular-devkit/build-angular:karma",
|
||||
"builder": "@angular/build:karma",
|
||||
"options": {
|
||||
"polyfills": [
|
||||
"zone.js",
|
||||
|
||||
9005
booklore-ui/package-lock.json
generated
9005
booklore-ui/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -12,17 +12,17 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "^20.3.5",
|
||||
"@angular/cdk": "^20.2.9",
|
||||
"@angular/common": "^20.3.5",
|
||||
"@angular/compiler": "^20.3.5",
|
||||
"@angular/core": "^20.3.5",
|
||||
"@angular/forms": "^20.3.5",
|
||||
"@angular/platform-browser": "^20.3.5",
|
||||
"@angular/platform-browser-dynamic": "^20.3.5",
|
||||
"@angular/router": "^20.3.5",
|
||||
"@angular/animations": "^21.0.5",
|
||||
"@angular/cdk": "^21.0.3",
|
||||
"@angular/common": "^21.0.5",
|
||||
"@angular/compiler": "^21.0.5",
|
||||
"@angular/core": "^21.0.5",
|
||||
"@angular/forms": "^21.0.5",
|
||||
"@angular/platform-browser": "^21.0.5",
|
||||
"@angular/platform-browser-dynamic": "^21.0.5",
|
||||
"@angular/router": "^21.0.5",
|
||||
"@iharbeck/ngx-virtual-scroller": "^19.0.1",
|
||||
"@primeng/themes": "^20.4.0",
|
||||
"@primeng/themes": "^21.0.2",
|
||||
"@stomp/rx-stomp": "^2.3.0",
|
||||
"@stomp/stompjs": "^7.2.1",
|
||||
"@tweenjs/tween.js": "^25.0.0",
|
||||
@@ -34,10 +34,10 @@
|
||||
"jwt-decode": "^4.0.0",
|
||||
"ng-lazyload-image": "^9.1.3",
|
||||
"ng2-charts": "^8.0.0",
|
||||
"ngx-extended-pdf-viewer": "^25.6.1",
|
||||
"ngx-infinite-scroll": "^20.0.0",
|
||||
"ngx-extended-pdf-viewer": "^25.6.4",
|
||||
"ngx-infinite-scroll": "^21.0.0",
|
||||
"primeicons": "^7.0.0",
|
||||
"primeng": "^20.4.0",
|
||||
"primeng": "^21.0.2",
|
||||
"quill": "^2.0.3",
|
||||
"rxjs": "^7.8.2",
|
||||
"showdown": "^2.1.0",
|
||||
@@ -47,16 +47,16 @@
|
||||
"zone.js": "^0.16.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "^20.3.5",
|
||||
"@angular/cli": "^20.3.5",
|
||||
"@angular/compiler-cli": "^20.3.5",
|
||||
"@angular/build": "^21.0.3",
|
||||
"@angular/cli": "^21.0.3",
|
||||
"@angular/compiler-cli": "^21.0.5",
|
||||
"@tailwindcss/typography": "^0.5.19",
|
||||
"@types/jasmine": "^5.1.13",
|
||||
"@types/showdown": "^2.0.6",
|
||||
"angular-eslint": "^20.3.5",
|
||||
"autoprefixer": "^10.4.22",
|
||||
"eslint": "^9.39.1",
|
||||
"jasmine-core": "^5.12.1",
|
||||
"angular-eslint": "^21.1.0",
|
||||
"autoprefixer": "^10.4.23",
|
||||
"eslint": "^9.39.2",
|
||||
"jasmine-core": "^5.13.0",
|
||||
"karma": "^6.4.4",
|
||||
"karma-chrome-launcher": "^3.2.0",
|
||||
"karma-coverage": "^2.2.1",
|
||||
@@ -64,6 +64,6 @@
|
||||
"karma-jasmine-html-reporter": "^2.1.0",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"typescript": "~5.9.3",
|
||||
"typescript-eslint": "^8.48.0"
|
||||
"typescript-eslint": "^8.50.0"
|
||||
}
|
||||
}
|
||||
@@ -73,7 +73,6 @@ function handle401Error(authService: AuthService, request: HttpRequest<any>, nex
|
||||
}
|
||||
|
||||
function forceLogout(authService: AuthService, router: Router, message?: string): void {
|
||||
authService.logout();
|
||||
router.navigate(['/login']);
|
||||
if (message) console.warn(message);
|
||||
authService.logout();
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Component, OnInit, OnDestroy, ChangeDetectorRef } from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
|
||||
import { FormsModule } from '@angular/forms';
|
||||
import { DynamicDialogRef, DynamicDialogConfig } from 'primeng/dynamicdialog';
|
||||
import { Select } from 'primeng/select';
|
||||
@@ -29,14 +29,13 @@ interface UploadingFile {
|
||||
selector: 'app-additional-file-uploader',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule,
|
||||
FormsModule,
|
||||
Select,
|
||||
Button,
|
||||
FileUpload,
|
||||
Badge,
|
||||
Tooltip
|
||||
],
|
||||
],
|
||||
templateUrl: './additional-file-uploader.component.html',
|
||||
styleUrls: ['./additional-file-uploader.component.scss']
|
||||
})
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import {inject, Injectable} from '@angular/core';
|
||||
import {DialogService, DynamicDialogRef} from 'primeng/dynamicdialog';
|
||||
import {DynamicDialogRef} from 'primeng/dynamicdialog';
|
||||
import {DialogLauncherService} from '../../../../shared/services/dialog-launcher.service';
|
||||
import {ShelfAssignerComponent} from '../shelf-assigner/shelf-assigner.component';
|
||||
import {LockUnlockMetadataDialogComponent} from './lock-unlock-metadata-dialog/lock-unlock-metadata-dialog.component';
|
||||
import {MetadataRefreshType} from '../../../metadata/model/request/metadata-refresh-type.enum';
|
||||
@@ -8,50 +9,61 @@ import {MultiBookMetadataEditorComponent} from '../../../metadata/component/mult
|
||||
import {MultiBookMetadataFetchComponent} from '../../../metadata/component/multi-book-metadata-fetch/multi-book-metadata-fetch-component';
|
||||
import {FileMoverComponent} from '../../../../shared/components/file-mover/file-mover-component';
|
||||
import {ShelfCreatorComponent} from '../shelf-creator/shelf-creator.component';
|
||||
import {BookSenderComponent} from '../book-sender/book-sender.component';
|
||||
import {MetadataFetchOptionsComponent} from '../../../metadata/component/metadata-options-dialog/metadata-fetch-options/metadata-fetch-options.component';
|
||||
import {BookMetadataCenterComponent} from '../../../metadata/component/book-metadata-center/book-metadata-center.component';
|
||||
import {CoverSearchComponent} from '../../../metadata/component/cover-search/cover-search.component';
|
||||
import {Book} from '../../model/book.model';
|
||||
import {AdditionalFileUploaderComponent} from '../additional-file-uploader/additional-file-uploader.component';
|
||||
|
||||
@Injectable({providedIn: 'root'})
|
||||
export class BookDialogHelperService {
|
||||
|
||||
private dialogService = inject(DialogService);
|
||||
private dialogLauncherService = inject(DialogLauncherService);
|
||||
|
||||
openShelfAssigner(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
return this.dialogService.open(ShelfAssignerComponent, {
|
||||
showHeader: false,
|
||||
modal: true,
|
||||
closable: true,
|
||||
contentStyle: {overflow: 'hidden'},
|
||||
styleClass: 'dynamic-dialog-minimal',
|
||||
baseZIndex: 10,
|
||||
private openDialog(component: any, options: {}): DynamicDialogRef | null {
|
||||
return this.dialogLauncherService.openDialog(component, options);
|
||||
}
|
||||
|
||||
openBookDetailsDialog(bookId: number): DynamicDialogRef | null {
|
||||
return this.openDialog(BookMetadataCenterComponent, {
|
||||
header: 'Book Details',
|
||||
styleClass: 'book-details-dialog dialog-maximal',
|
||||
data: {
|
||||
isMultiBooks: true,
|
||||
bookIds,
|
||||
bookId: bookId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
openShelfCreator(): DynamicDialogRef {
|
||||
return this.dialogService.open(ShelfCreatorComponent, {
|
||||
openShelfAssignerDialog(book: Book | null, bookIds: Set<number> | null): DynamicDialogRef | null {
|
||||
const data: any = {};
|
||||
if (book !== null) {
|
||||
data.isMultiBooks = false;
|
||||
data.book = book;
|
||||
} else if (bookIds !== null) {
|
||||
data.isMultiBooks = true;
|
||||
data.bookIds = bookIds;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
return this.openDialog(ShelfAssignerComponent, {
|
||||
showHeader: false,
|
||||
data: data,
|
||||
styleClass: 'dynamic-dialog-minimal',
|
||||
});
|
||||
}
|
||||
|
||||
openShelfCreatorDialog(): DynamicDialogRef {
|
||||
return this.openDialog(ShelfCreatorComponent, {
|
||||
showHeader: false,
|
||||
modal: true,
|
||||
draggable: false,
|
||||
dismissableMask: true,
|
||||
closable: true,
|
||||
contentStyle: {overflow: 'auto'},
|
||||
styleClass: 'dynamic-dialog-minimal',
|
||||
baseZIndex: 10,
|
||||
style: {
|
||||
position: 'absolute',
|
||||
top: '15%',
|
||||
},
|
||||
})!;
|
||||
}
|
||||
|
||||
openLockUnlockMetadataDialog(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
const count = bookIds.size;
|
||||
return this.dialogService.open(LockUnlockMetadataDialogComponent, {
|
||||
return this.openDialog(LockUnlockMetadataDialogComponent, {
|
||||
header: `Lock or Unlock Metadata for ${count} Selected Book${count > 1 ? 's' : ''}`,
|
||||
modal: true,
|
||||
closable: true,
|
||||
data: {
|
||||
bookIds: Array.from(bookIds),
|
||||
},
|
||||
@@ -59,70 +71,83 @@ export class BookDialogHelperService {
|
||||
}
|
||||
|
||||
openMetadataRefreshDialog(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
return this.dialogService.open(MultiBookMetadataFetchComponent, {
|
||||
return this.openDialog(MultiBookMetadataFetchComponent, {
|
||||
header: 'Metadata Refresh Options',
|
||||
modal: true,
|
||||
closable: true,
|
||||
data: {
|
||||
bookIds: Array.from(bookIds),
|
||||
metadataRefreshType: MetadataRefreshType.BOOKS,
|
||||
},
|
||||
styleClass: 'dialog-maximal',
|
||||
});
|
||||
}
|
||||
|
||||
openBulkMetadataEditDialog(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
return this.dialogService.open(BulkMetadataUpdateComponent, {
|
||||
return this.openDialog(BulkMetadataUpdateComponent, {
|
||||
header: 'Bulk Edit Metadata',
|
||||
modal: true,
|
||||
closable: true,
|
||||
style: {
|
||||
width: '90vw',
|
||||
maxWidth: '1200px',
|
||||
position: 'absolute'
|
||||
},
|
||||
data: {
|
||||
bookIds: Array.from(bookIds)
|
||||
bookIds: Array.from(bookIds),
|
||||
},
|
||||
styleClass: 'dialog-maximal'
|
||||
});
|
||||
}
|
||||
|
||||
openMultibookMetadataEditorDialog(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
return this.dialogService.open(MultiBookMetadataEditorComponent, {
|
||||
header: 'Bulk Edit Metadata',
|
||||
showHeader: false,
|
||||
modal: true,
|
||||
closable: true,
|
||||
closeOnEscape: true,
|
||||
dismissableMask: true,
|
||||
style: {
|
||||
width: '95vw',
|
||||
overflow: 'none',
|
||||
},
|
||||
return this.openDialog(MultiBookMetadataEditorComponent, {
|
||||
header: 'Multi-Book Metadata Editor',
|
||||
data: {
|
||||
bookIds: Array.from(bookIds)
|
||||
bookIds: Array.from(bookIds),
|
||||
},
|
||||
styleClass: 'dialog-full'
|
||||
});
|
||||
}
|
||||
|
||||
openFileMoverDialog(selectedBooks: Set<number>) {
|
||||
const count = selectedBooks.size;
|
||||
return this.dialogService.open(FileMoverComponent, {
|
||||
openFileMoverDialog(bookIds: Set<number>): DynamicDialogRef | null {
|
||||
const count = bookIds.size;
|
||||
return this.openDialog(FileMoverComponent, {
|
||||
header: `Organize Book Files (${count} book${count !== 1 ? 's' : ''})`,
|
||||
showHeader: true,
|
||||
maximizable: true,
|
||||
modal: true,
|
||||
closable: true,
|
||||
closeOnEscape: false,
|
||||
dismissableMask: false,
|
||||
style: {
|
||||
width: '95vw',
|
||||
maxWidth: '97.5vw',
|
||||
height: '90vh',
|
||||
maxHeight: '95vh'
|
||||
},
|
||||
data: {
|
||||
bookIds: selectedBooks
|
||||
bookIds: Array.from(bookIds),
|
||||
},
|
||||
styleClass: 'dialog-full',
|
||||
maximizable: true,
|
||||
});
|
||||
}
|
||||
|
||||
openCustomSendDialog(bookId: number): DynamicDialogRef | null {
|
||||
return this.openDialog(BookSenderComponent, {
|
||||
header: 'Send Book to Email',
|
||||
data: {
|
||||
bookId: bookId,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
openCoverSearchDialog(bookId: number): DynamicDialogRef | null {
|
||||
return this.openDialog(CoverSearchComponent, {
|
||||
header: "Search Cover",
|
||||
data: {
|
||||
bookId: bookId,
|
||||
},
|
||||
styleClass: 'dialog-maximal',
|
||||
});
|
||||
}
|
||||
|
||||
openMetadataFetchOptionsDialog(bookId: number): DynamicDialogRef | null {
|
||||
return this.openDialog(MetadataFetchOptionsComponent, {
|
||||
header: 'Metadata Refresh Options',
|
||||
data: {
|
||||
bookIds: [bookId],
|
||||
metadataRefreshType: MetadataRefreshType.BOOKS,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
openAdditionalFileUploaderDialog(book: Book): DynamicDialogRef | null {
|
||||
return this.openDialog(AdditionalFileUploaderComponent, {
|
||||
header: 'Upload Additional File',
|
||||
data: {
|
||||
book: book,
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ import {GroupRule} from '../../../magic-shelf/component/magic-shelf-component';
|
||||
import {TaskHelperService} from '../../../settings/task-management/task-helper.service';
|
||||
import {FilterLabelHelper} from './filter-label.helper';
|
||||
import {LoadingService} from '../../../../core/services/loading.service';
|
||||
import {BookNavigationService} from '../../service/book-navigation.service';
|
||||
|
||||
export enum EntityType {
|
||||
LIBRARY = 'Library',
|
||||
@@ -118,10 +119,10 @@ export class BookBrowserComponent implements OnInit, AfterViewInit {
|
||||
protected confirmationService = inject(ConfirmationService);
|
||||
protected magicShelfService = inject(MagicShelfService);
|
||||
protected bookRuleEvaluatorService = inject(BookRuleEvaluatorService);
|
||||
protected taskHelperService = inject(TaskHelperService);
|
||||
private pageTitle = inject(PageTitleService);
|
||||
private loadingService = inject(LoadingService);
|
||||
|
||||
protected taskHelperService = inject(TaskHelperService);
|
||||
private bookNavigationService = inject(BookNavigationService);
|
||||
|
||||
bookState$: Observable<BookState> | undefined;
|
||||
entity$: Observable<Library | Shelf | MagicShelf | null> | undefined;
|
||||
@@ -256,6 +257,7 @@ export class BookBrowserComponent implements OnInit, AfterViewInit {
|
||||
() => this.fetchMetadata(),
|
||||
() => this.bulkEditMetadata(),
|
||||
() => this.multiBookEditMetadata(),
|
||||
() => this.regenerateCoversForSelected(),
|
||||
);
|
||||
this.tieredMenuItems = this.bookMenuService.getTieredMenuItems(this.selectedBooks);
|
||||
|
||||
@@ -584,6 +586,7 @@ export class BookBrowserComponent implements OnInit, AfterViewInit {
|
||||
)
|
||||
.subscribe(books => {
|
||||
this.currentBooks = books;
|
||||
this.bookNavigationService.setAvailableBookIds(books.map(book => book.id));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -639,7 +642,7 @@ export class BookBrowserComponent implements OnInit, AfterViewInit {
|
||||
}
|
||||
|
||||
openShelfAssigner(): void {
|
||||
this.dynamicDialogRef = this.dialogHelperService.openShelfAssigner(this.selectedBooks);
|
||||
this.dynamicDialogRef = this.dialogHelperService.openShelfAssignerDialog(null, this.selectedBooks);
|
||||
}
|
||||
|
||||
lockUnlockMetadata(): void {
|
||||
@@ -666,6 +669,38 @@ export class BookBrowserComponent implements OnInit, AfterViewInit {
|
||||
this.dialogHelperService.openMultibookMetadataEditorDialog(this.selectedBooks);
|
||||
}
|
||||
|
||||
regenerateCoversForSelected(): void {
|
||||
if (!this.selectedBooks || this.selectedBooks.size === 0) return;
|
||||
const count = this.selectedBooks.size;
|
||||
this.confirmationService.confirm({
|
||||
message: `Are you sure you want to regenerate covers for ${count} book(s)?`,
|
||||
header: 'Confirm Cover Regeneration',
|
||||
icon: 'pi pi-image',
|
||||
acceptLabel: 'Yes',
|
||||
rejectLabel: 'No',
|
||||
accept: () => {
|
||||
this.bookService.regenerateCoversForBooks(Array.from(this.selectedBooks)).subscribe({
|
||||
next: () => {
|
||||
this.messageService.add({
|
||||
severity: 'success',
|
||||
summary: 'Cover Regeneration Started',
|
||||
detail: `Regenerating covers for ${count} book(s). Refresh the page when complete.`,
|
||||
life: 3000
|
||||
});
|
||||
},
|
||||
error: () => {
|
||||
this.messageService.add({
|
||||
severity: 'error',
|
||||
summary: 'Failed',
|
||||
detail: 'Could not start cover regeneration.',
|
||||
life: 3000
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
moveFiles() {
|
||||
this.dialogHelperService.openFileMoverDialog(this.selectedBooks);
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@
|
||||
<p-button [rounded]="true" icon="pi pi-info" class="info-btn" (click)="openBookInfo(book)"></p-button>
|
||||
}
|
||||
|
||||
<p-button [hidden]="isSeriesViewActive()" [rounded]="true" icon="pi pi-book" class="read-btn" (click)="readBook(book)"></p-button>
|
||||
<p-button [hidden]="isSeriesViewActive() || !canReadBook()" [rounded]="true" icon="pi pi-book" class="read-btn" (click)="readBook(book)"></p-button>
|
||||
|
||||
@if (isCheckboxEnabled) {
|
||||
<p-checkbox
|
||||
|
||||
@@ -4,8 +4,6 @@ import {AdditionalFile, Book, ReadStatus} from '../../../model/book.model';
|
||||
import {Button} from 'primeng/button';
|
||||
import {MenuModule} from 'primeng/menu';
|
||||
import {ConfirmationService, MenuItem, MessageService} from 'primeng/api';
|
||||
import {DialogService} from 'primeng/dynamicdialog';
|
||||
import {ShelfAssignerComponent} from '../../shelf-assigner/shelf-assigner.component';
|
||||
import {BookService} from '../../../service/book.service';
|
||||
import {CheckboxChangeEvent, CheckboxModule} from 'primeng/checkbox';
|
||||
import {FormsModule} from '@angular/forms';
|
||||
@@ -16,17 +14,15 @@ import {UserService} from '../../../../settings/user-management/user.service';
|
||||
import {filter, Subject} from 'rxjs';
|
||||
import {EmailService} from '../../../../settings/email-v2/email.service';
|
||||
import {TieredMenu} from 'primeng/tieredmenu';
|
||||
import {BookSenderComponent} from '../../book-sender/book-sender.component';
|
||||
import {Router} from '@angular/router';
|
||||
import {ProgressBar} from 'primeng/progressbar';
|
||||
import {BookMetadataCenterComponent} from '../../../../metadata/component/book-metadata-center/book-metadata-center.component';
|
||||
import {take, takeUntil} from 'rxjs/operators';
|
||||
import {readStatusLabels} from '../book-filter/book-filter.component';
|
||||
import {ResetProgressTypes} from '../../../../../shared/constants/reset-progress-type';
|
||||
import {ReadStatusHelper} from '../../../helpers/read-status.helper';
|
||||
import {BookDialogHelperService} from '../BookDialogHelperService';
|
||||
import {MetadataFetchOptionsComponent} from '../../../../metadata/component/metadata-options-dialog/metadata-fetch-options/metadata-fetch-options.component';
|
||||
import {TaskHelperService} from '../../../../settings/task-management/task-helper.service';
|
||||
import {BookNavigationService} from '../../../service/book-navigation.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-book-card',
|
||||
@@ -59,7 +55,6 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
|
||||
private bookService = inject(BookService);
|
||||
private taskHelperService = inject(TaskHelperService);
|
||||
private dialogService = inject(DialogService);
|
||||
private userService = inject(UserService);
|
||||
private emailService = inject(EmailService);
|
||||
private messageService = inject(MessageService);
|
||||
@@ -67,6 +62,7 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
protected urlHelper = inject(UrlHelperService);
|
||||
private confirmationService = inject(ConfirmationService);
|
||||
private bookDialogHelperService = inject(BookDialogHelperService);
|
||||
private bookNavigationService = inject(BookNavigationService);
|
||||
|
||||
private userPermissions: any;
|
||||
private metadataCenterViewMode: 'route' | 'dialog' = 'route';
|
||||
@@ -116,7 +112,6 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
|
||||
get displayTitle(): string | undefined {
|
||||
return (this.isSeriesCollapsed && this.book.metadata?.seriesName) ? this.book.metadata?.seriesName : this.book.metadata?.title;
|
||||
// return (this.isSeriesCollapsed && this.book.metadata?.seriesName) ? this.book.metadata.seriesName : this.book.metadata?.title;
|
||||
}
|
||||
|
||||
onImageLoad(): void {
|
||||
@@ -138,7 +133,6 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
onMenuToggle(event: Event, menu: TieredMenu): void {
|
||||
menu.toggle(event);
|
||||
|
||||
// Load additional files if not already loaded and needed
|
||||
if (!this.additionalFilesLoaded && !this.isSubMenuLoading && this.needsAdditionalFilesData()) {
|
||||
this.isSubMenuLoading = true;
|
||||
this.bookService.getBookByIdFromAPI(this.book.id, true).subscribe({
|
||||
@@ -293,18 +287,7 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
label: 'Custom Send',
|
||||
icon: 'pi pi-envelope',
|
||||
command: () => {
|
||||
this.dialogService.open(BookSenderComponent, {
|
||||
header: 'Send Book to Email',
|
||||
modal: true,
|
||||
closable: true,
|
||||
style: {
|
||||
position: 'absolute',
|
||||
top: '15%',
|
||||
},
|
||||
data: {
|
||||
bookId: this.book.id,
|
||||
}
|
||||
});
|
||||
this.bookDialogHelperService.openCustomSendDialog(this.book.id);
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -341,15 +324,7 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
label: 'Custom Fetch',
|
||||
icon: 'pi pi-sync',
|
||||
command: () => {
|
||||
this.dialogService.open(MetadataFetchOptionsComponent, {
|
||||
header: 'Metadata Refresh Options',
|
||||
modal: true,
|
||||
closable: true,
|
||||
data: {
|
||||
bookIds: [this.book!.id],
|
||||
metadataRefreshType: MetadataRefreshType.BOOKS,
|
||||
},
|
||||
});
|
||||
this.bookDialogHelperService.openMetadataRefreshDialog(new Set([this.book!.id]))
|
||||
},
|
||||
}
|
||||
]
|
||||
@@ -457,19 +432,7 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
}
|
||||
|
||||
private openShelfDialog(): void {
|
||||
this.dialogService.open(ShelfAssignerComponent, {
|
||||
header: `Update Book's Shelves`,
|
||||
showHeader: false,
|
||||
modal: true,
|
||||
dismissableMask: true,
|
||||
closable: true,
|
||||
contentStyle: {overflow: 'hidden'},
|
||||
styleClass: 'dynamic-dialog-minimal',
|
||||
baseZIndex: 10,
|
||||
data: {
|
||||
book: this.book,
|
||||
},
|
||||
});
|
||||
this.bookDialogHelperService.openShelfAssignerDialog(this.book, null);
|
||||
}
|
||||
|
||||
openSeriesInfo(): void {
|
||||
@@ -483,26 +446,17 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
}
|
||||
|
||||
openBookInfo(book: Book): void {
|
||||
const allBookIds = this.bookNavigationService.getAvailableBookIds();
|
||||
if (allBookIds.length > 0) {
|
||||
this.bookNavigationService.setNavigationContext(allBookIds, book.id);
|
||||
}
|
||||
|
||||
if (this.metadataCenterViewMode === 'route') {
|
||||
this.router.navigate(['/book', book.id], {
|
||||
queryParams: {tab: 'view'}
|
||||
});
|
||||
} else {
|
||||
this.dialogService.open(BookMetadataCenterComponent, {
|
||||
width: '90%',
|
||||
height: '90%',
|
||||
data: {bookId: book.id},
|
||||
modal: true,
|
||||
dismissableMask: true,
|
||||
showHeader: true,
|
||||
closable: true,
|
||||
closeOnEscape: true,
|
||||
draggable: false,
|
||||
maximizable: false,
|
||||
resizable: false,
|
||||
header: 'Book Details',
|
||||
styleClass: 'book-details-dialog'
|
||||
});
|
||||
this.bookDialogHelperService.openBookDetailsDialog(book.id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -669,6 +623,7 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
case 'epub':
|
||||
case 'mobi':
|
||||
case 'azw3':
|
||||
case 'fb2':
|
||||
return 'pi pi-book';
|
||||
case 'cbz':
|
||||
case 'cbr':
|
||||
@@ -692,6 +647,10 @@ export class BookCardComponent implements OnInit, OnChanges, OnDestroy {
|
||||
return this.isAdmin() || (this.userPermissions?.canEditMetadata ?? false);
|
||||
}
|
||||
|
||||
canReadBook(): boolean {
|
||||
return this.book?.bookType !== 'FB2';
|
||||
}
|
||||
|
||||
private hasDownloadPermission(): boolean {
|
||||
return this.isAdmin() || (this.userPermissions?.canDownload ?? false);
|
||||
}
|
||||
|
||||
@@ -13,8 +13,6 @@ import {MessageService} from 'primeng/api';
|
||||
import {Router, RouterLink} from '@angular/router';
|
||||
import {filter, Subject} from 'rxjs';
|
||||
import {UserService} from '../../../../settings/user-management/user.service';
|
||||
import {BookMetadataCenterComponent} from '../../../../metadata/component/book-metadata-center/book-metadata-center.component';
|
||||
import {DialogService} from 'primeng/dynamicdialog';
|
||||
import {take, takeUntil} from 'rxjs/operators';
|
||||
import {ReadStatusHelper} from '../../../helpers/read-status.helper';
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ export class LockUnlockMetadataDialogComponent implements OnInit {
|
||||
'isbn13Locked', 'isbn10Locked', 'asinLocked', 'pageCountLocked', 'thumbnailLocked', 'languageLocked', 'coverLocked',
|
||||
'seriesNameLocked', 'seriesNumberLocked', 'seriesTotalLocked', 'authorsLocked', 'categoriesLocked', 'moodsLocked', 'tagsLocked',
|
||||
'amazonRatingLocked', 'amazonReviewCountLocked', 'goodreadsRatingLocked', 'goodreadsReviewCountLocked',
|
||||
'hardcoverRatingLocked', 'hardcoverReviewCountLocked', 'goodreadsIdLocked', 'hardcoverIdLocked', 'googleIdLocked', 'comicvineIdLocked'
|
||||
'hardcoverRatingLocked', 'hardcoverReviewCountLocked', 'goodreadsIdLocked', 'hardcoverIdLocked', 'hardcoverBookIdLocked', 'googleIdLocked', 'comicvineIdLocked'
|
||||
];
|
||||
|
||||
fieldLabels: Record<string, string> = {
|
||||
@@ -66,6 +66,7 @@ export class LockUnlockMetadataDialogComponent implements OnInit {
|
||||
hardcoverReviewCountLocked: 'Hardcover Reviews',
|
||||
goodreadsIdLocked: 'Goodreads ID',
|
||||
hardcoverIdLocked: 'Hardcover ID',
|
||||
hardcoverBookIdLocked: 'Hardcover Book ID',
|
||||
googleIdLocked: 'Google ID',
|
||||
comicvineIdLocked: 'Comicvine ID',
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user