Compare commits

..

56 Commits

Author SHA1 Message Date
Flaminel
6b94e05092 fixed some comments 2026-02-16 19:07:50 +02:00
Flaminel
d4ac8c8ddf added some tests 2026-02-16 18:14:40 +02:00
Flaminel
9c6560b159 fixed context variables 2026-02-16 17:49:29 +02:00
Flaminel
8fdc49f65a decreased number of concurrent deletes 2026-02-16 17:49:08 +02:00
Flaminel
f906e6ed14 fixed frontend inputs 2026-02-16 16:46:58 +02:00
Flaminel
69b50499b5 fixed files call 2026-02-16 16:46:49 +02:00
Flaminel
cc735bd4e2 fixed with main 2026-02-15 18:03:25 +02:00
Flaminel
76767adb1f added rTorrent support 2026-02-15 17:44:53 +02:00
Flaminel
94acd9afa4 Fix download client inputs (#442) 2026-02-15 04:06:28 +02:00
Flaminel
65d25a72a9 Fix failed import reason display for events (#443) 2026-02-15 03:59:40 +02:00
Flaminel
97eb2fce44 Add strikes page (#438) 2026-02-15 03:57:14 +02:00
Flaminel
701829001c Fix speed and size inputs for queue rules (#440) 2026-02-15 01:19:32 +02:00
Flaminel
8aeeca111c Add strike persistency (#437) 2026-02-14 04:00:05 +02:00
Flaminel
c43936ce81 Remove testing download path (#436) 2026-02-13 11:52:53 +02:00
Flaminel
f35eb0c922 Add full message for logs on the UI (#435) 2026-02-13 02:05:39 +02:00
Flaminel
b2b0626b44 Add external url for notifications (#434) 2026-02-13 02:04:27 +02:00
Flaminel
40f108d7ca Add app status setting to general settings (#433) 2026-02-12 23:12:26 +02:00
Flaminel
6570f74b7e Fix Deluge failing on empty password (#432) 2026-02-12 22:29:55 +02:00
Flaminel
16f216cf84 Add Gotify notification provider (#420) 2026-02-12 18:17:09 +02:00
Flaminel
69551edeff Revamp UI (#429) 2026-02-12 17:51:30 +02:00
Flaminel
7192796e89 Update frontend packages (#422) 2026-01-14 14:17:04 +02:00
Flaminel
1d1ee7972f Use CDN to deliver logos (#421) 2026-01-14 13:51:25 +02:00
Flaminel
8bd6b86018 Add Discord notification provider (#417) 2026-01-13 18:53:40 +02:00
Flaminel
6abb542271 Fix Servarr version dropdown (#414) 2026-01-11 02:33:16 +02:00
Flaminel
2aceae3078 Fix package lock file being out of sync (#411) 2026-01-09 00:05:34 +02:00
Flaminel
65b200a68e Fix MacOS build (#409) 2026-01-07 02:57:52 +02:00
Flaminel
de0c881944 Fix inode count duplication when looking for hardlinks (#407) 2026-01-07 02:27:27 +02:00
Flaminel
d0ef01d79b Add images for Apprise CLI notifications (#408) 2026-01-07 02:11:59 +02:00
Flaminel
9457236e99 Update packages and dotnet version (#398) 2026-01-07 00:27:20 +02:00
Flaminel
d43b4fc1c4 Add Whisparr v3 support (#405) 2026-01-03 23:34:21 +02:00
Flaminel
e9750429eb Add Telegram notification provider (#400) 2026-01-03 23:34:05 +02:00
Flaminel
b71b268b08 Add configurable bind address (#404) 2025-12-31 04:40:43 +02:00
Flaminel
a708d22b27 Add GitAds to README (#403) 2025-12-31 03:35:35 +02:00
Flaminel
a9a3b08ad6 Add item name for dashboard events (#402) 2025-12-31 02:35:55 +02:00
Flaminel
1d1e8679e4 Add supported apps version disclaimer (#399) 2025-12-30 00:26:16 +02:00
Flaminel
142d445ed0 Add Apprise CLI notification provider (#387) 2025-12-25 22:05:23 +02:00
Flaminel
375094862c Add test button for arrs and download clients (#391) 2025-12-20 17:06:03 +02:00
Flaminel
58a72cef0f Add option for multiple ignored root directories (#390) 2025-12-20 17:04:36 +02:00
Flaminel
4ceff127a7 Add option to keep source files when cleaning downloads (#388) 2025-12-19 23:52:59 +02:00
Flaminel
c07b811cf8 Fix Transmission torrent fetch (#389) 2025-12-19 23:35:23 +02:00
Flaminel
b16fa70774 Add Pushover notification provider (#385) 2025-12-13 21:24:34 +02:00
Flaminel
b343165644 Fix Download Cleaner making too many requests (#368) 2025-12-10 09:22:51 +02:00
Flaminel
02dff0bb9b Fix manual release workflows (#380) 2025-12-01 23:42:22 +02:00
Flaminel
ac3be75082 Fix workflow dispatch defaulting to dev version (#379) 2025-11-30 22:53:23 +02:00
Flaminel
a1663b865a Improve workflow dispatch (#378) 2025-11-30 22:27:40 +02:00
Flaminel
c97a416d1e Fix windows workflow (#377) 2025-11-30 16:22:21 +02:00
Flaminel
d28ab42303 Fix frontend workflow using assets instead of cache (#376) 2025-11-30 15:48:53 +02:00
Flaminel
fbb2bba3b6 Update packages (#375) 2025-11-30 13:14:29 +02:00
Flaminel
08eda22587 Add test workflow and improve workflow parallelization (#369) 2025-11-25 23:05:28 +02:00
Flaminel
a4045eebd3 Add downloads volume to detailed installation docs (#365) 2025-11-22 22:15:37 +02:00
Flaminel
a57cbccbb4 Improve UI validations (#366) 2025-11-22 22:14:50 +02:00
Flaminel
2221f118bb Fix qBittorrent tracker check (#363) 2025-11-09 19:03:48 +02:00
Flaminel
2cc3eb4ebb Fix ignored downloads not checking for certain fields (#362) 2025-11-09 18:24:26 +02:00
Flaminel
3a064a22bd Remove hardcoded app status timeout (#356) 2025-11-03 18:38:09 +02:00
Flaminel
ee764ff215 Fix Transmission stalled check (#354) 2025-11-02 17:48:30 +02:00
Flaminel
402677b69b Fix ignored downloads not being saved for Queue Cleaner (#353)
fixed ignored downloads not being saved for Queue Cleaner
2025-10-31 17:39:52 +02:00
759 changed files with 77163 additions and 41698 deletions

View File

@@ -0,0 +1,30 @@
name: 'Get Vault Secrets'
description: 'Retrieves secrets from HashiCorp Vault using AppRole authentication'
inputs:
vault_host:
description: 'Vault server URL'
required: true
vault_role_id:
description: 'Vault AppRole Role ID'
required: true
vault_secret_id:
description: 'Vault AppRole Secret ID'
required: true
secrets:
description: 'Secrets to retrieve (multiline string, one per line in format: path | output_name)'
required: true
default: |
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
secrets/data/github packages_pat | PACKAGES_PAT
runs:
using: "composite"
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ inputs.vault_host }}
method: approle
roleId: ${{ inputs.vault_role_id }}
secretId: ${{ inputs.vault_secret_id }}
secrets: ${{ inputs.secrets }}

View File

@@ -1,14 +1,26 @@
name: Build Docker Images
on:
push:
tags:
- "v*.*.*"
pull_request:
paths:
- 'code/**'
workflow_dispatch:
workflow_call:
inputs:
push_docker:
description: 'Push Docker image to registry'
type: boolean
required: false
default: true
app_version:
description: 'Application version'
type: string
required: false
default: ''
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build_app:
@@ -27,18 +39,32 @@ jobs:
timeout-minutes: 1
run: |
githubHeadRef=${{ env.githubHeadRef }}
inputVersion="${{ inputs.app_version }}"
latestDockerTag=""
versionDockerTag=""
majorVersionDockerTag=""
minorVersionDockerTag=""
version="0.0.1"
if [[ "$githubRef" =~ ^"refs/tags/" ]]; then
if [[ -n "$inputVersion" ]]; then
# Version provided via input (manual release)
branch="main"
latestDockerTag="latest"
versionDockerTag="$inputVersion"
version="$inputVersion"
# Extract major and minor versions for additional tags
if [[ "$versionDockerTag" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+) ]]; then
majorVersionDockerTag="${BASH_REMATCH[1]}"
minorVersionDockerTag="${BASH_REMATCH[1]}.${BASH_REMATCH[2]}"
fi
elif [[ "$githubRef" =~ ^"refs/tags/" ]]; then
# Tag push
branch=${githubRef##*/}
latestDockerTag="latest"
versionDockerTag=${branch#v}
version=${branch#v}
# Extract major and minor versions for additional tags
if [[ "$versionDockerTag" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+) ]]; then
majorVersionDockerTag="${BASH_REMATCH[1]}"
@@ -115,6 +141,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push docker image
id: docker-build
timeout-minutes: 15
uses: docker/build-push-action@v6
with:
@@ -128,11 +155,12 @@ jobs:
VERSION=${{ env.version }}
PACKAGES_USERNAME=${{ secrets.PACKAGES_USERNAME }}
PACKAGES_PAT=${{ env.PACKAGES_PAT }}
outputs: |
type=image
platforms: |
linux/amd64
linux/arm64
push: true
push: ${{ github.event_name == 'pull_request' || inputs.push_docker == true }}
tags: |
${{ env.githubTags }}
${{ env.githubTags }}
# Enable BuildKit cache for faster builds
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,40 +1,55 @@
name: Build Executables
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build:
# Build for each platform in parallel using matrix strategy
build-platform:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
include:
- runtime: win-x64
platform: win-amd64
- runtime: linux-x64
platform: linux-amd64
- runtime: linux-arm64
platform: linux-arm64
- runtime: osx-x64
platform: osx-amd64
- runtime: osx-arm64
platform: osx-arm64
steps:
- name: Gate
if: ${{ !startsWith(github.ref, 'refs/tags/') && github.event_name != 'workflow_dispatch' }}
run: |
echo "This workflow only runs on tag events or manual dispatch. Pipeline finished."
exit 0
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
# Use input version if provided, otherwise determine from ref
if [[ -n "${{ inputs.app_version }}" ]]; then
appVersion="${{ inputs.app_version }}"
releaseVersion="v$appVersion"
elif [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repoFullName=${{ github.repository }}
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=${repoFullName#*/}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
@@ -58,27 +73,28 @@ jobs:
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup dotnet
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
dotnet-version: 10.0.x
- name: Cache NuGet packages
uses: actions/cache@v4
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json', '**/*.csproj') }}
restore-keys: |
${{ runner.os }}-nuget-
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Install dependencies and restore
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ secrets.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
@@ -86,52 +102,25 @@ jobs:
mkdir -p code/backend/${{ env.executableName }}/wwwroot
cp -r code/frontend/dist/ui/browser/* code/backend/${{ env.executableName }}/wwwroot/
- name: Build win-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build ${{ matrix.platform }}
run: |
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime ${{ matrix.runtime }} \
--self-contained \
-o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }} \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugSymbols=false
- name: Build linux-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Zip win-x64
- name: Zip artifact
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }}.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }}/
- name: Zip linux-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/
- name: Zip linux-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/
- name: Zip osx-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/
- name: Zip osx-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/
- name: Upload artifacts
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: cleanuparr-executables
path: |
./artifacts/*.zip
name: executable-${{ matrix.platform }}
path: ./artifacts/*.zip
retention-days: 30
# Removed individual release step - handled by main release workflow

46
.github/workflows/build-frontend.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: Build Frontend
on:
workflow_call:
jobs:
build-frontend:
runs-on: ubuntu-latest
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
- name: Checkout repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ github.repository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '24'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-dist
path: code/frontend/dist/ui/browser
retention-days: 1

View File

@@ -1,28 +1,47 @@
name: Build macOS ARM Installer
name: Build macOS Installers
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build-macos-arm-installer:
name: Build macOS ARM Installer
runs-on: macos-14 # ARM runner for Apple Silicon
build-macos-installer:
name: Build macOS ${{ matrix.arch }} Installer
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- arch: Intel
runner: macos-15-intel
runtime: osx-x64
min_os_version: "10.15"
artifact_suffix: intel
- arch: ARM
runner: macos-15
runtime: osx-arm64
min_os_version: "11.0"
artifact_suffix: arm64
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
# Use input version if provided, otherwise determine from ref
if [[ -n "${{ inputs.app_version }}" ]]; then
appVersion="${{ inputs.app_version }}"
releaseVersion="v$appVersion"
elif [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
@@ -30,9 +49,9 @@ jobs:
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
@@ -58,39 +77,32 @@ jobs:
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
dotnet-version: 10.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS ARM executable
- name: Build macOS ${{ matrix.arch }} executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-arm64 \
--runtime ${{ matrix.runtime }} \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
@@ -103,17 +115,17 @@ jobs:
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
@@ -124,16 +136,16 @@ jobs:
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
@@ -141,12 +153,12 @@ jobs:
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
@@ -158,14 +170,14 @@ jobs:
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
@@ -196,7 +208,7 @@ jobs:
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
@@ -228,7 +240,7 @@ jobs:
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>11.0</string>
<string>${{ matrix.min_os_version }}</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
@@ -245,7 +257,7 @@ jobs:
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
@@ -255,96 +267,96 @@ jobs:
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64.pkg"
# Determine package name - if app_version input was provided, it's a release build
if [[ -n "${{ inputs.app_version }}" ]] || [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-${{ matrix.artifact_suffix }}.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64-dev.pkg"
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-${{ matrix.artifact_suffix }}-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
@@ -353,14 +365,12 @@ jobs:
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-arm64-installer
name: Cleanuparr-macos-${{ matrix.artifact_suffix }}-installer
path: '${{ env.pkgName }}'
retention-days: 30
# Removed individual release step - handled by main release workflow

View File

@@ -1,366 +0,0 @@
name: Build macOS Intel Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-intel-installer:
name: Build macOS Intel Installer
runs-on: macos-13 # Intel runner
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS Intel executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-x64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>10.15</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-intel-installer
path: '${{ env.pkgName }}'
retention-days: 30
# Removed individual release step - handled by main release workflow

View File

@@ -1,11 +1,13 @@
name: Build Windows Installer
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build-windows-installer:
@@ -17,9 +19,13 @@ jobs:
run: |
$repoFullName = "${{ github.repository }}"
$ref = "${{ github.ref }}"
# Handle both tag events and manual dispatch
if ($ref -match "^refs/tags/") {
$inputVersion = "${{ inputs.app_version }}"
# Use input version if provided, otherwise determine from ref
if ($inputVersion -ne "") {
$appVersion = $inputVersion
$releaseVersion = "v$appVersion"
} elseif ($ref -match "^refs/tags/") {
$releaseVersion = $ref -replace "refs/tags/", ""
$appVersion = $releaseVersion -replace "^v", ""
} else {
@@ -27,15 +33,15 @@ jobs:
$releaseVersion = "dev-$(Get-Date -Format 'yyyyMMdd-HHmmss')"
$appVersion = "0.0.1-dev"
}
$repositoryName = $repoFullName.Split("/")[1]
echo "githubRepository=${{ github.repository }}" >> $env:GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $env:GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $env:GITHUB_ENV
echo "appVersion=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
echo "APP_VERSION=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
@@ -55,23 +61,16 @@ jobs:
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
dotnet-version: 10.0.x
- name: Restore .NET dependencies
run: |

45
.github/workflows/dependency-review.yml vendored Normal file
View File

@@ -0,0 +1,45 @@
name: Dependency Review
on:
pull_request:
branches:
- main
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
pull-requests: write
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Dependency Review
uses: actions/dependency-review-action@v4
with:
# Fail on critical and high severity vulnerabilities
fail-on-severity: high
# Warn on moderate vulnerabilities
warn-on-severity: moderate
# Allow licenses
# allow-licenses: MIT, Apache-2.0, BSD-2-Clause, BSD-3-Clause, ISC, 0BSD
# Comment summarizes the vulnerabilities found
comment-summary-in-pr: on-failure
# Show dependency changes in PR
show-openssf-scorecard: true
vulnerability-check: true
- name: Upload dependency review results
uses: actions/upload-artifact@v4
with:
name: dependency-review-results
path: dependency-review-*.json
if-no-files-found: ignore
retention-days: 30

View File

@@ -27,7 +27,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: 24.x
cache: yarn
cache-dependency-path: docs/yarn.lock

View File

@@ -8,8 +8,32 @@ on:
inputs:
version:
description: 'Version to release (e.g., 1.0.0)'
required: true
runTests:
description: 'Run test suite'
type: boolean
required: false
default: ''
default: true
buildDocker:
description: 'Build Docker image'
type: boolean
required: false
default: true
pushDocker:
description: 'Push Docker image to registry'
type: boolean
required: false
default: false
buildBinaries:
description: 'Build executables and installers'
type: boolean
required: false
default: true
createRelease:
description: 'Create GitHub release'
type: boolean
required: false
default: false
jobs:
# Validate release
@@ -19,7 +43,7 @@ jobs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -32,55 +56,127 @@ jobs:
release_version=${GITHUB_REF##refs/tags/}
app_version=${release_version#v}
is_tag=true
elif [[ -n "${{ github.event.inputs.version }}" ]]; then
else
# Manual workflow with version
app_version="${{ github.event.inputs.version }}"
release_version="v$app_version"
is_tag=false
else
# Manual workflow without version
app_version="0.0.1-dev-$(date +%Y%m%d-%H%M%S)"
# Validate version format (x.x.x)
if ! [[ "$app_version" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "Error: Version must be in format x.x.x (e.g., 1.0.0)"
echo "Provided version: $app_version"
exit 1
fi
release_version="v$app_version"
is_tag=false
fi
echo "app_version=$app_version" >> $GITHUB_OUTPUT
echo "release_version=$release_version" >> $GITHUB_OUTPUT
echo "is_tag=$is_tag" >> $GITHUB_OUTPUT
echo "🏷️ Release Version: $release_version"
echo "📱 App Version: $app_version"
echo "🔖 Is Tag: $is_tag"
echo "Release Version: $release_version"
echo "App Version: $app_version"
echo "Is Tag: $is_tag"
- name: Check if release already exists
run: |
if gh release view "${{ steps.version.outputs.release_version }}" &>/dev/null; then
echo "❌ Release ${{ steps.version.outputs.release_version }} already exists. Stopping workflow."
exit 1
fi
echo "✅ Release ${{ steps.version.outputs.release_version }} does not exist. Proceeding."
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Run tests
test:
needs: validate
if: ${{ needs.validate.outputs.is_tag == 'true' || github.event.inputs.runTests == 'true' }}
uses: ./.github/workflows/test.yml
secrets: inherit
# Build frontend once for all build jobs and cache it
build-frontend:
needs: [validate, test]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-frontend.yml
secrets: inherit
# Build portable executables
build-executables:
needs: validate
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-executable.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build Windows installer
build-windows-installer:
needs: validate
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-windows-installer.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build macOS Intel installer
build-macos-intel:
needs: validate
uses: ./.github/workflows/build-macos-intel-installer.yml
# Build macOS installers (Intel and ARM)
build-macos:
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-macos-installer.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build macOS ARM installer
build-macos-arm:
needs: validate
uses: ./.github/workflows/build-macos-arm-installer.yml
# Build and push Docker image(s)
build-docker:
needs: [validate, test]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildDocker == 'true')
uses: ./.github/workflows/build-docker.yml
with:
push_docker: ${{ needs.validate.outputs.is_tag == 'true' || github.event.inputs.pushDocker == 'true' }}
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Create GitHub release
create-release:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
needs: [validate, build-executables, build-windows-installer, build-macos]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
if: |
always() &&
needs.validate.result == 'success' &&
needs.build-executables.result == 'success' &&
needs.build-windows-installer.result == 'success' &&
needs.build-macos.result == 'success' &&
(
needs.validate.outputs.is_tag == 'true' ||
(github.event.inputs.createRelease == 'true' && github.event.inputs.buildBinaries == 'true')
)
steps:
- name: Get vault secrets
@@ -93,15 +189,32 @@ jobs:
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
- name: Download all artifacts
- name: Download executable artifacts
uses: actions/download-artifact@v4
with:
pattern: executable-*
path: ./artifacts
merge-multiple: true
- name: Download Windows installer
uses: actions/download-artifact@v4
with:
name: Cleanuparr-windows-installer
path: ./artifacts
- name: Download macOS installers
uses: actions/download-artifact@v4
with:
pattern: Cleanuparr-macos-*-installer
path: ./artifacts
merge-multiple: true
- name: List downloaded artifacts
run: |
echo "📦 Downloaded artifacts:"
find ./artifacts -type f -name "*.zip" -o -name "*.pkg" -o -name "*.exe" | sort
echo "Downloaded artifacts:"
find ./artifacts -type f \( -name "*.zip" -o -name "*.pkg" -o -name "*.exe" \) | sort
echo ""
echo "Total files: $(find ./artifacts -type f \( -name "*.zip" -o -name "*.pkg" -o -name "*.exe" \) | wc -l)"
- name: Create release
uses: softprops/action-gh-release@v2
@@ -113,52 +226,62 @@ jobs:
target_commitish: main
generate_release_notes: true
files: |
./artifacts/**/*.zip
./artifacts/**/*.pkg
./artifacts/**/*.exe
./artifacts/*.zip
./artifacts/*.pkg
./artifacts/*.exe
# Summary job
summary:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
needs: [validate, test, build-frontend, build-executables, build-windows-installer, build-macos, build-docker]
runs-on: ubuntu-latest
if: always()
steps:
- name: Record workflow start time
id: workflow-start
run: |
# Get workflow start time from GitHub API
workflow_start=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id }} --jq '.run_started_at')
start_epoch=$(date -d "$workflow_start" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%SZ" "$workflow_start" +%s)
echo "start=$start_epoch" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Build Summary
run: |
# Calculate total workflow duration
start_time=${{ steps.workflow-start.outputs.start }}
end_time=$(date +%s)
duration=$((end_time - start_time))
minutes=$((duration / 60))
seconds=$((duration % 60))
echo "## 🏗️ Cleanuparr Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ needs.validate.outputs.release_version }}" >> $GITHUB_STEP_SUMMARY
echo "**App Version**: ${{ needs.validate.outputs.app_version }}" >> $GITHUB_STEP_SUMMARY
echo "**Is Tag**: ${{ needs.validate.outputs.is_tag }}" >> $GITHUB_STEP_SUMMARY
echo "**Total Duration**: ${minutes}m ${seconds}s" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Build Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check job results
if [[ "${{ needs.build-executables.result }}" == "success" ]]; then
echo "✅ **Portable Executables**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Portable Executables**: ${{ needs.build-executables.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-windows-installer.result }}" == "success" ]]; then
echo "✅ **Windows Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Windows Installer**: ${{ needs.build-windows-installer.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-intel.result }}" == "success" ]]; then
echo "✅ **macOS Intel Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS Intel Installer**: ${{ needs.build-macos-intel.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-arm.result }}" == "success" ]]; then
echo "✅ **macOS ARM Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS ARM Installer**: ${{ needs.build-macos-arm.result }}" >> $GITHUB_STEP_SUMMARY
fi
# Helper function to print job result
print_result() {
local name="$1"
local result="$2"
case "$result" in
success) echo "✅ **$name**: Success" >> $GITHUB_STEP_SUMMARY ;;
skipped) echo "⏭️ **$name**: Skipped" >> $GITHUB_STEP_SUMMARY ;;
*) echo "❌ **$name**: $result" >> $GITHUB_STEP_SUMMARY ;;
esac
}
print_result "Tests" "${{ needs.test.result }}"
print_result "Frontend Build" "${{ needs.build-frontend.result }}"
print_result "Portable Executables" "${{ needs.build-executables.result }}"
print_result "Windows Installer" "${{ needs.build-windows-installer.result }}"
print_result "macOS Installers (Intel & ARM)" "${{ needs.build-macos.result }}"
print_result "Docker Image Build" "${{ needs.build-docker.result }}"
echo "" >> $GITHUB_STEP_SUMMARY
echo "🎉 **Build completed!**" >> $GITHUB_STEP_SUMMARY

99
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,99 @@
name: Tests
on:
push:
branches:
- main
paths:
- 'code/backend/**'
- '.github/workflows/test.yml'
pull_request:
paths:
- 'code/backend/**'
- '.github/workflows/test.yml'
workflow_call:
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@v4
timeout-minutes: 1
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 10.0.x
- name: Cache NuGet packages
uses: actions/cache@v4
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json', '**/*.csproj') }}
restore-keys: |
${{ runner.os }}-nuget-
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github packages_pat | PACKAGES_PAT
- name: Restore dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/cleanuparr.sln
- name: Build solution
run: dotnet build code/backend/cleanuparr.sln --configuration Release --no-restore
- name: Run tests
id: run-tests
run: dotnet test code/backend/cleanuparr.sln --configuration Release --no-build --verbosity normal --logger trx --collect:"XPlat Code Coverage" --settings code/backend/coverage.runsettings --results-directory ./coverage
- name: Upload test results
uses: actions/upload-artifact@v4
with:
name: test-results
path: ./coverage/*.trx
retention-days: 30
- name: Upload coverage reports
uses: actions/upload-artifact@v4
with:
name: coverage-report
path: ./coverage/**/coverage.cobertura.xml
retention-days: 30
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
files: ./coverage/**/coverage.cobertura.xml
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
flags: backend
name: backend-coverage
- name: Test Summary
run: |
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ steps.run-tests.outcome }}" == "success" ]; then
echo "✅ All tests passed!" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Tests failed or were cancelled. Status: ${{ steps.run-tests.outcome }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Test artifacts have been uploaded for detailed analysis." >> $GITHUB_STEP_SUMMARY

66
.github/workflows/version-info.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
name: Get Version Info
on:
workflow_call:
inputs:
manual_version:
description: 'Manual version override (e.g., 1.0.0)'
required: false
type: string
default: ''
outputs:
app_version:
description: 'Application version (without v prefix)'
value: ${{ jobs.version.outputs.app_version }}
release_version:
description: 'Release version (with v prefix)'
value: ${{ jobs.version.outputs.release_version }}
is_tag:
description: 'Whether this is a tag event'
value: ${{ jobs.version.outputs.is_tag }}
repository_name:
description: 'Repository name without owner'
value: ${{ jobs.version.outputs.repository_name }}
jobs:
version:
runs-on: ubuntu-latest
outputs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
repository_name: ${{ steps.version.outputs.repository_name }}
steps:
- name: Calculate version info
id: version
run: |
repoFullName="${{ github.repository }}"
repositoryName="${repoFullName#*/}"
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag event
release_version="${GITHUB_REF##refs/tags/}"
app_version="${release_version#v}"
is_tag="true"
elif [[ -n "${{ inputs.manual_version }}" ]]; then
# Manual workflow with version
app_version="${{ inputs.manual_version }}"
release_version="v${app_version}"
is_tag="false"
else
# Development build
app_version="0.0.1-dev-$(date +%Y%m%d-%H%M%S)"
release_version="v${app_version}"
is_tag="false"
fi
echo "app_version=${app_version}" >> $GITHUB_OUTPUT
echo "release_version=${release_version}" >> $GITHUB_OUTPUT
echo "is_tag=${is_tag}" >> $GITHUB_OUTPUT
echo "repository_name=${repositoryName}" >> $GITHUB_OUTPUT
echo "📦 Repository: ${repositoryName}"
echo "🏷️ Release Version: ${release_version}"
echo "📱 App Version: ${app_version}"
echo "🔖 Is Tag: ${is_tag}"

350
CLAUDE.md Normal file
View File

@@ -0,0 +1,350 @@
# Cleanuparr - Claude AI Rules
## 🚨 Critical Guidelines
**READ THIS FIRST:**
1. ⚠️ **DO NOT break existing functionality** - All features are critical and must continue to work
2.**When in doubt, ASK** - Always clarify before implementing uncertain changes
3. 📋 **Follow existing patterns** - Study the codebase style before making changes
4. 🆕 **Ask before introducing new patterns** - Use current coding standards or get approval first
## Project Overview
Cleanuparr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, Lidarr, Readarr, Whisparr and supported download clients like qBittorrent, Transmission, Deluge, and µTorrent. It provides malware protection, automated cleanup, and queue management for *arr applications.
**Key Features:**
- Strike system for bad downloads
- Malware detection and blocking
- Automatic search triggering after removal
- Orphaned download cleanup with cross-seed support
- Support for multiple notification providers (Discord, etc.)
## Architecture & Tech Stack
### Backend
- **.NET 10.0** (C#) with ASP.NET Core
- **Architecture**: Clean Architecture pattern
- `Cleanuparr.Domain` - Domain models and business logic
- `Cleanuparr.Application` - Application services and use cases
- `Cleanuparr.Infrastructure` - External integrations (*arr apps, download clients)
- `Cleanuparr.Persistence` - Data access with EF Core (SQLite)
- `Cleanuparr.Api` - REST API and web host
- `Cleanuparr.Shared` - Shared utilities
- **Database**: SQLite with Entity Framework Core 10.0
- Two separate contexts: `DataContext` and `EventsContext`
- **Key Libraries**:
- MassTransit (messaging)
- Quartz.NET (scheduling)
- Serilog (logging)
- SignalR (real-time communication)
### Frontend
- **Angular 21** with TypeScript 5.9 (standalone components, zoneless, OnPush)
- **UI**: Custom glassmorphism design system (no external UI frameworks)
- **Icons**: @ng-icons/core + @ng-icons/tabler-icons
- **Design System**: 3-layer SCSS (`_variables``_tokens``_themes`), dark/light themes
- **State Management**: @ngrx/signals (Angular signals-based)
- **Real-time Updates**: SignalR (@microsoft/signalr)
- **PWA**: Service Worker support enabled
### Documentation
- **Docusaurus** (TypeScript-based static site)
- Hosted at https://cleanuparr.github.io/Cleanuparr/
### Deployment
- **Docker** (primary distribution method)
- Standalone executables for Windows, macOS, and Linux
- Platform installers for Windows (.exe) and macOS (.pkg)
## Development Setup
### Prerequisites
- .NET 10.0 SDK
- Node.js 18+
- Git
- (Optional) Make for database migrations
- (Optional) JetBrains Rider or Visual Studio
### GitHub Packages Authentication
Cleanuparr uses GitHub Packages for NuGet dependencies. Configure access:
```bash
dotnet nuget add source \
--username YOUR_GITHUB_USERNAME \
--password YOUR_GITHUB_PAT \
--store-password-in-clear-text \
--name Cleanuparr \
https://nuget.pkg.github.com/Cleanuparr/index.json
```
You need a GitHub PAT with `read:packages` permission.
### Running the Backend
```bash
cd code/backend
dotnet build Cleanuparr.Api/Cleanuparr.Api.csproj
dotnet run --project Cleanuparr.Api/Cleanuparr.Api.csproj
```
API runs at http://localhost:5000
### Running the Frontend
```bash
cd code/frontend
npm install
npm start
```
UI runs at http://localhost:4200
### Running Tests
```bash
cd code/backend
dotnet test
```
### Running Documentation
```bash
cd docs
npm install
npm start
```
Docs run at http://localhost:3000
## Project Structure
```
Cleanuparr/
├── code/
│ ├── backend/
│ │ ├── Cleanuparr.Api/ # API entry point
│ │ ├── Cleanuparr.Application/ # Business logic layer
│ │ ├── Cleanuparr.Domain/ # Domain models
│ │ ├── Cleanuparr.Infrastructure/ # External integrations
│ │ ├── Cleanuparr.Persistence/ # Database & EF Core
│ │ ├── Cleanuparr.Shared/ # Shared utilities
│ │ └── *.Tests/ # Unit tests
│ ├── frontend/ # Angular 21 application
│ ├── ui/ # Built frontend assets
│ ├── Dockerfile # Multi-stage Docker build
│ ├── entrypoint.sh # Docker entrypoint
│ └── Makefile # Build & migration helpers
├── docs/ # Docusaurus documentation
├── Logo/ # Branding assets
├── .github/workflows/ # CI/CD pipelines
├── blacklist # Default malware patterns
├── blacklist_permissive # Alternative blacklist
├── whitelist # Safe file patterns
└── CONTRIBUTING.md # Contribution guidelines
```
## Code Standards & Conventions
**IMPORTANT:** Always study existing code in the relevant area before making changes. Match the existing style exactly.
### Backend (C#)
- Follow [Microsoft C# Coding Conventions](https://docs.microsoft.com/dotnet/csharp/fundamentals/coding-style/coding-conventions)
- Use nullable reference types (`<Nullable>enable</Nullable>`)
- Add XML documentation comments for public APIs
- Write unit tests for business logic
- Use meaningful names - avoid abbreviations unless widely understood
- Keep services focused - single responsibility principle
- **Study existing service implementations before creating new ones**
### Frontend (TypeScript/Angular)
- Follow [Angular Style Guide](https://angular.io/guide/styleguide)
- Use TypeScript strict mode
- All components must be **standalone** (no NgModules) with **ChangeDetectionStrategy.OnPush**
- Use `input()` / `output()` function APIs (not `@Input()` / `@Output()` decorators)
- Use Angular **signals** for reactive state (`signal()`, `computed()`, `effect()`)
- Follow the 3-layer SCSS design system (`_variables``_tokens``_themes`) for styling
- Component naming: `{feature}.component.ts`
- Service naming: `{feature}.service.ts`
- **Look at similar existing components before creating new ones**
### Testing
- Write unit tests for new features and bug fixes
- Use descriptive test names that explain what is being tested
- Backend: xUnit or NUnit conventions
- Frontend: Jasmine/Karma
- **Test that existing functionality still works after changes**
### Git Commit Messages
- Use clear, descriptive messages in imperative mood
- Examples: "Add Discord notification support", "Fix memory leak in download client polling"
- Reference issue numbers when applicable: "Fix #123: Handle null response from Radarr API"
### Discovering Issues
If you encounter potential gotchas, common mistakes, or areas that need special attention during development:
- **Flag them to the maintainer immediately**
- Document them if confirmed
- Consider if they should be added to this guide
## Database Migrations
Cleanuparr uses two separate database contexts:
- **DataContext**: Main application data
- **EventsContext**: Event logging and audit trail
### Creating Migrations
From the `code` directory:
```bash
# Data migrations
make migrate-data name=YourMigrationName
# Events migrations
make migrate-events name=YourMigrationName
```
Example:
```bash
make migrate-data name=AddDownloadClientConfig
make migrate-events name=AddStrikeEvents
```
## Common Development Workflows
### Adding a New *arr Application Integration
1. Add integration in `Cleanuparr.Infrastructure/Arr/`
2. Update domain models in `Cleanuparr.Domain/`
3. Create/update services in `Cleanuparr.Application/`
4. Add API endpoints in `Cleanuparr.Api/`
5. Update frontend in `code/frontend/src/app/`
6. Document in `docs/docs/`
### Adding a New Download Client
1. Add client implementation in `Cleanuparr.Infrastructure/DownloadClients/`
2. Follow existing patterns (qBittorrent, Transmission, etc.)
3. Add configuration models to `Cleanuparr.Domain/`
4. Update API and frontend as above
### Adding a New Notification Provider
1. Add provider in `Cleanuparr.Infrastructure/Notifications/`
2. Update configuration models
3. Add UI configuration in frontend
4. Test with actual service
## Important Files
### Configuration Files
- `code/backend/Cleanuparr.Api/appsettings.json` - Backend configuration
- `code/frontend/angular.json` - Angular build configuration
- `code/Dockerfile` - Docker multi-stage build
- `docs/docusaurus.config.ts` - Documentation site config
### CI/CD Workflows
- `.github/workflows/test.yml` - Run tests
- `.github/workflows/build-docker.yml` - Build Docker images
- `.github/workflows/build-executable.yml` - Build standalone executables
- `.github/workflows/release.yml` - Create releases
- `.github/workflows/docs.yml` - Deploy documentation
### Malware Protection
- `blacklist` - Default malware file patterns (strict)
- `blacklist_permissive` - Less strict patterns
- `whitelist` - Known safe file extensions
- `whitelist_with_subtitles` - Includes subtitle formats
## Contributing Guidelines
### Before Starting Work
1. **Announce your intent** - Comment on an issue or create a new one
2. **Wait for approval** from maintainers
3. Fork the repository and create a feature branch
4. Make your changes following code standards
5. Test thoroughly (both manual and automated tests)
6. Submit a PR with clear description and testing notes
### Pull Request Requirements
- Link to related issue
- Clear description of changes
- Evidence of testing
- Updated documentation if needed
- No breaking changes without discussion
## Docker Development
### Build Local Docker Image
```bash
cd code
docker build \
--build-arg PACKAGES_USERNAME=YOUR_GITHUB_USERNAME \
--build-arg PACKAGES_PAT=YOUR_GITHUB_PAT \
-t cleanuparr:local \
-f Dockerfile .
```
### Multi-Architecture Build
```bash
docker buildx build \
--platform linux/amd64,linux/arm64 \
--build-arg PACKAGES_USERNAME=YOUR_GITHUB_USERNAME \
--build-arg PACKAGES_PAT=YOUR_GITHUB_PAT \
-t cleanuparr:local \
-f Dockerfile .
```
## Environment Variables
When running via Docker:
- `PORT` - API port (default: 11011)
- `PUID` - User ID for file permissions
- `PGID` - Group ID for file permissions
- `TZ` - Timezone (e.g., `America/New_York`)
## Security & Safety
- Never commit sensitive data (API keys, tokens, passwords)
- All *arr and download client credentials are stored encrypted
- The malware detection system uses pattern matching on file extensions and names
- Always validate user input on both frontend and backend
- Follow OWASP guidelines for web application security
## Additional Resources
- **Documentation**: https://cleanuparr.github.io/Cleanuparr/
- **Discord**: https://discord.gg/SCtMCgtsc4
- **GitHub Issues**: https://github.com/Cleanuparr/Cleanuparr/issues
- **Releases**: https://github.com/Cleanuparr/Cleanuparr/releases
## Working with Claude - IMPORTANT
### Core Principles
1. **When in doubt, ASK** - Don't assume, clarify with the maintainer first
2. **Don't break existing functionality** - Everything is important and needs to work
3. **Follow existing coding style** - Study the codebase patterns before making changes
4. **Use current coding standards** - If you want to introduce something new, ask first
### When Modifying Code
- **ALWAYS read existing files before suggesting changes**
- Understand the current architecture and patterns
- Prefer editing existing files over creating new ones
- Follow the established conventions in the codebase exactly
- Test changes locally when possible
- **If you're unsure about an approach, ask before implementing**
### When Adding Features
- Review similar existing features first to understand patterns
- Maintain consistency with existing UI/UX patterns
- Update both backend and frontend together
- Add/update documentation
- Consider backwards compatibility
- **Ask about architectural decisions before implementing new patterns**
### When Fixing Bugs
- Understand the root cause before proposing a fix
- **Be careful not to break other functionality** - test related areas
- Add tests to prevent regression
- Update relevant documentation if behavior changes
- Consider if other parts of the codebase might have similar issues
- **Flag any potential gotchas or issues you discover**
## Notes
- The project uses **Clean Architecture** - respect layer boundaries
- Database migrations require both contexts - don't forget EventsContext
- Frontend uses a **custom glassmorphism design system** - don't introduce external UI frameworks (no PrimeNG, Material, etc.)
- All frontend components are **standalone** with **OnPush** change detection
- All downloads from *arr apps are processed through a **strike system**
- The malware blocker is a critical security feature - changes require careful testing
- Cross-seed integration allows keeping torrents that are actively seeding
- Real-time updates use **SignalR** - maintain websocket patterns when adding features

View File

@@ -19,7 +19,7 @@ This helps us avoid redundant work, git conflicts, and contributions that may no
### Prerequisites
- [.NET 9.0 SDK](https://dotnet.microsoft.com/download/dotnet/9.0)
- [.NET 10.0 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
- [Node.js 18+](https://nodejs.org/)
- [Git](https://git-scm.com/)
- (Optional) [Make](https://www.gnu.org/software/make/) for database migrations

View File

@@ -2,6 +2,11 @@ _Love this project? Give it a ⭐️ and let others know!_
# <img width="24px" src="./Logo/256.png" alt="Cleanuparr"></img> Cleanuparr
![Version](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fcleanuparr-status.pages.dev%2Fstatus.json&query=%24.version&logo=git&label=version&color=blue)
![Total Downloads](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fghcr-badge.elias.eu.org%2Fapi%2FCleanuparr%2FCleanuparr%2Fcleanuparr&query=%24.downloadCount&style=flat&logo=docker&label=Total%20Downloads&color=blue)
[![Tests](https://github.com/Cleanuparr/Cleanuparr/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/Cleanuparr/Cleanuparr/actions/workflows/test.yml)
[![Discord](https://img.shields.io/discord/1306721212587573389?color=7289DA&label=Discord&style=for-the-badge&logo=discord)](https://discord.gg/SCtMCgtsc4)
Cleanuparr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuparr can also trigger a search to replace the deleted shows/movies.
@@ -23,20 +28,23 @@ Cleanuparr was created primarily to address malicious files, such as `*.lnk` or
> - Notify on strike or download removal.
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuparr.
## Sponsored by GitAds
[![Sponsored by GitAds](https://gitads.dev/v1/ad-serve?source=cleanuparr/cleanuparr@github)](https://gitads.dev/v1/ad-track?source=cleanuparr/cleanuparr@github)
## Screenshots
https://cleanuparr.github.io/Cleanuparr/docs/screenshots
## 🎯 Supported Applications
### *Arr Applications
### *Arr Applications (latest version)
- **Sonarr**
- **Radarr**
- **Lidarr**
- **Readarr**
- **Whisparr**
- **Whisparr v2**
### Download Clients
### Download Clients (latest version)
- **qBittorrent**
- **Transmission**
- **Deluge**
@@ -111,4 +119,4 @@ Special thanks for inspiration go to:
# Buy me a coffee
If I made your life just a tiny bit easier, consider buying me a coffee!
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>

View File

@@ -38,4 +38,50 @@ backend/**/Tests/
# Development files
docker-compose*.yml
test/
test/
# ================================
# Node and build output
# ================================
node_modules
dist
out-tsc
.angular
.cache
.tmp
# ================================
# Testing & Coverage
# ================================
coverage
jest
cypress
cypress/screenshots
cypress/videos
reports
playwright-report
.vite
.vitepress
# ================================
# Environment & log files
# ================================
*.env*
!*.env.production
*.log
*.tsbuildinfo
# ================================
# Docker & local orchestration
# ================================
Dockerfile
Dockerfile.*
.dockerignore
docker-compose.yml
docker-compose*.yml
# ================================
# Miscellaneous
# ================================
*.bak
*.old

View File

@@ -1,10 +1,12 @@
# Build Angular frontend
FROM --platform=$BUILDPLATFORM node:18-alpine AS frontend-build
FROM --platform=$BUILDPLATFORM node:25-alpine AS frontend-build
WORKDIR /app
# Copy package files first for better layer caching
COPY frontend/package*.json ./
RUN npm ci && npm install -g @angular/cli
# Use cache mount for npm to speed up builds
RUN --mount=type=cache,target=/root/.npm \
npm ci && npm install -g @angular/cli
# Copy source code
COPY frontend/ .
@@ -13,7 +15,7 @@ COPY frontend/ .
RUN npm run build
# Build .NET backend
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:9.0-bookworm-slim AS build
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:10.0 AS build
ARG TARGETARCH
ARG VERSION=0.0.1
ARG PACKAGES_USERNAME
@@ -21,35 +23,42 @@ ARG PACKAGES_PAT
WORKDIR /app
EXPOSE 11011
# Copy solution and project files first for better layer caching
# COPY backend/*.sln ./backend/
# COPY backend/*/*.csproj ./backend/*/
# Copy source code
COPY backend/ ./backend/
# Restore dependencies
# Add NuGet source
RUN dotnet nuget add source --username ${PACKAGES_USERNAME} --password ${PACKAGES_PAT} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
# Build and publish
RUN dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
# Restore and publish with cache mount
RUN --mount=type=cache,target=/root/.nuget/packages,sharing=locked \
dotnet restore ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj -a $TARGETARCH && \
dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
-a $TARGETARCH \
-c Release \
-o /app/publish \
--no-restore \
/p:Version=${VERSION} \
/p:PublishSingleFile=true \
/p:DebugSymbols=false
# Runtime stage
FROM mcr.microsoft.com/dotnet/aspnet:9.0-bookworm-slim
FROM mcr.microsoft.com/dotnet/aspnet:10.0
# Install required packages for user management and timezone support
RUN apt-get update && apt-get install -y \
# Install required packages for user management, timezone support, and Python for Apprise CLI
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
tzdata \
gosu \
python3 \
python3-venv \
&& rm -rf /var/lib/apt/lists/*
# Create virtual environment and install Apprise CLI
ENV VIRTUAL_ENV=/opt/apprise-venv
RUN python3 -m venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
RUN pip install --no-cache-dir apprise==1.9.6
ENV PUID=1000 \
PGID=1000 \
UMASK=022 \

View File

@@ -13,4 +13,24 @@ migrate-events:
ifndef name
$(error name is required. Usage: make migrate-events name=YourMigrationName)
endif
dotnet ef migrations add $(name) --context EventsContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Events
dotnet ef migrations add $(name) --context EventsContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Events
docker-build:
ifndef tag
$(error tag is required. Usage: make docker-build tag=latest version=1.0.0 user=... pat=...)
endif
ifndef version
$(error version is required. Usage: make docker-build tag=latest version=1.0.0 user=... pat=...)
endif
ifndef user
$(error user is required. Usage: make docker-build tag=latest version=1.0.0 user=... pat=...)
endif
ifndef pat
$(error pat is required. Usage: make docker-build tag=latest version=1.0.0 user=... pat=...)
endif
DOCKER_BUILDKIT=1 docker build \
--build-arg VERSION=$(version) \
--build-arg PACKAGES_USERNAME=$(user) \
--build-arg PACKAGES_PAT=$(pat) \
-t cleanuparr:$(tag) \
.

View File

@@ -3,7 +3,7 @@
<PropertyGroup>
<AssemblyName>Cleanuparr</AssemblyName>
<Version Condition="'$(Version)' == ''">0.0.1</Version>
<TargetFramework>net9.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<PublishReadyToRun>true</PublishReadyToRun>
@@ -23,27 +23,24 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="MassTransit" Version="8.4.1" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.6">
<PackageReference Include="MassTransit" Version="8.5.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="10.0.1">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.6" />
<PackageReference Include="Quartz" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="10.0.1" />
<PackageReference Include="Quartz" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="10.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<!-- API-related packages -->
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@@ -29,12 +29,24 @@ public class EventsController : ControllerBase
[FromQuery] string? eventType = null,
[FromQuery] DateTime? fromDate = null,
[FromQuery] DateTime? toDate = null,
[FromQuery] string? search = null)
[FromQuery] string? search = null,
[FromQuery] string? jobRunId = null)
{
// Validate pagination parameters
if (page < 1) page = 1;
if (pageSize < 1) pageSize = 100;
if (pageSize > 1000) pageSize = 1000; // Cap at 1000 for performance
if (page < 1)
{
page = 1;
}
if (pageSize < 1)
{
pageSize = 100;
}
if (pageSize > 1000)
{
pageSize = 1000; // Cap at 1000 for performance
}
var query = _context.Events.AsQueryable();
@@ -62,6 +74,12 @@ public class EventsController : ControllerBase
query = query.Where(e => e.Timestamp <= toDate.Value);
}
// Apply job run ID exact-match filter
if (!string.IsNullOrWhiteSpace(jobRunId) && Guid.TryParse(jobRunId, out var jobRunGuid))
{
query = query.Where(e => e.JobRunId == jobRunGuid);
}
// Apply search filter if provided
if (!string.IsNullOrWhiteSpace(search))
{
@@ -69,7 +87,10 @@ public class EventsController : ControllerBase
query = query.Where(e =>
EF.Functions.Like(e.Message, pattern) ||
EF.Functions.Like(e.Data, pattern) ||
EF.Functions.Like(e.TrackingId.ToString(), pattern)
EF.Functions.Like(e.TrackingId.ToString(), pattern) ||
EF.Functions.Like(e.InstanceUrl, pattern) ||
EF.Functions.Like(e.DownloadClientName, pattern) ||
EF.Functions.Like(e.JobRunId.ToString(), pattern)
);
}

View File

@@ -1,4 +1,5 @@
using Cleanuparr.Api.Models;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Microsoft.AspNetCore.Mvc;

View File

@@ -66,7 +66,9 @@ public class ManualEventsController : ControllerBase
string pattern = EventsContext.GetLikePattern(search);
query = query.Where(e =>
EF.Functions.Like(e.Message, pattern) ||
EF.Functions.Like(e.Data, pattern)
EF.Functions.Like(e.Data, pattern) ||
EF.Functions.Like(e.InstanceUrl, pattern) ||
EF.Functions.Like(e.DownloadClientName, pattern)
);
}

View File

@@ -1,7 +1,6 @@
using System.Diagnostics;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Persistence;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
@@ -14,18 +13,15 @@ public class StatusController : ControllerBase
{
private readonly ILogger<StatusController> _logger;
private readonly DataContext _dataContext;
private readonly DownloadServiceFactory _downloadServiceFactory;
private readonly ArrClientFactory _arrClientFactory;
private readonly IArrClientFactory _arrClientFactory;
public StatusController(
ILogger<StatusController> logger,
DataContext dataContext,
DownloadServiceFactory downloadServiceFactory,
ArrClientFactory arrClientFactory)
IArrClientFactory arrClientFactory)
{
_logger = logger;
_dataContext = dataContext;
_downloadServiceFactory = downloadServiceFactory;
_arrClientFactory = arrClientFactory;
}
@@ -178,8 +174,8 @@ public class StatusController : ControllerBase
{
try
{
var sonarrClient = _arrClientFactory.GetClient(InstanceType.Sonarr);
await sonarrClient.TestConnectionAsync(instance);
var sonarrClient = _arrClientFactory.GetClient(InstanceType.Sonarr, instance.Version);
await sonarrClient.HealthCheckAsync(instance);
sonarrStatus.Add(new
{
@@ -210,8 +206,8 @@ public class StatusController : ControllerBase
{
try
{
var radarrClient = _arrClientFactory.GetClient(InstanceType.Radarr);
await radarrClient.TestConnectionAsync(instance);
var radarrClient = _arrClientFactory.GetClient(InstanceType.Radarr, instance.Version);
await radarrClient.HealthCheckAsync(instance);
radarrStatus.Add(new
{
@@ -242,8 +238,8 @@ public class StatusController : ControllerBase
{
try
{
var lidarrClient = _arrClientFactory.GetClient(InstanceType.Lidarr);
await lidarrClient.TestConnectionAsync(instance);
var lidarrClient = _arrClientFactory.GetClient(InstanceType.Lidarr, instance.Version);
await lidarrClient.HealthCheckAsync(instance);
lidarrStatus.Add(new
{

View File

@@ -0,0 +1,189 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.State;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class StrikesController : ControllerBase
{
private readonly EventsContext _context;
public StrikesController(EventsContext context)
{
_context = context;
}
/// <summary>
/// Gets download items with their strikes (grouped), with pagination and filtering
/// </summary>
[HttpGet]
public async Task<ActionResult<PaginatedResult<DownloadItemStrikesDto>>> GetStrikes(
[FromQuery] int page = 1,
[FromQuery] int pageSize = 50,
[FromQuery] string? search = null,
[FromQuery] string? type = null)
{
if (page < 1) page = 1;
if (pageSize < 1) pageSize = 50;
if (pageSize > 100) pageSize = 100;
var query = _context.DownloadItems
.Include(d => d.Strikes)
.Where(d => d.Strikes.Any());
// Filter by strike type: only show items that have strikes of this type
if (!string.IsNullOrWhiteSpace(type))
{
if (Enum.TryParse<StrikeType>(type, true, out var strikeType))
query = query.Where(d => d.Strikes.Any(s => s.Type == strikeType));
}
// Apply search filter on title or download hash
if (!string.IsNullOrWhiteSpace(search))
{
string pattern = EventsContext.GetLikePattern(search);
query = query.Where(d =>
EF.Functions.Like(d.Title, pattern) ||
EF.Functions.Like(d.DownloadId, pattern));
}
var totalCount = await query.CountAsync();
var totalPages = (int)Math.Ceiling(totalCount / (double)pageSize);
var skip = (page - 1) * pageSize;
var items = await query
.OrderByDescending(d => d.Strikes.Max(s => s.CreatedAt))
.Skip(skip)
.Take(pageSize)
.ToListAsync();
var dtos = items.Select(d => new DownloadItemStrikesDto
{
DownloadItemId = d.Id,
DownloadId = d.DownloadId,
Title = d.Title,
TotalStrikes = d.Strikes.Count,
StrikesByType = d.Strikes
.GroupBy(s => s.Type)
.ToDictionary(g => g.Key.ToString(), g => g.Count()),
LatestStrikeAt = d.Strikes.Max(s => s.CreatedAt),
FirstStrikeAt = d.Strikes.Min(s => s.CreatedAt),
IsMarkedForRemoval = d.IsMarkedForRemoval,
IsRemoved = d.IsRemoved,
IsReturning = d.IsReturning,
Strikes = d.Strikes
.OrderByDescending(s => s.CreatedAt)
.Select(s => new StrikeDetailDto
{
Id = s.Id,
Type = s.Type.ToString(),
CreatedAt = s.CreatedAt,
LastDownloadedBytes = s.LastDownloadedBytes,
JobRunId = s.JobRunId,
}).ToList(),
}).ToList();
return Ok(new PaginatedResult<DownloadItemStrikesDto>
{
Items = dtos,
Page = page,
PageSize = pageSize,
TotalCount = totalCount,
TotalPages = totalPages,
});
}
/// <summary>
/// Gets the most recent individual strikes with download item info (for dashboard)
/// </summary>
[HttpGet("recent")]
public async Task<ActionResult<List<RecentStrikeDto>>> GetRecentStrikes(
[FromQuery] int count = 5)
{
if (count < 1) count = 1;
if (count > 50) count = 50;
var strikes = await _context.Strikes
.Include(s => s.DownloadItem)
.OrderByDescending(s => s.CreatedAt)
.Take(count)
.Select(s => new RecentStrikeDto
{
Id = s.Id,
Type = s.Type.ToString(),
CreatedAt = s.CreatedAt,
DownloadId = s.DownloadItem.DownloadId,
Title = s.DownloadItem.Title,
})
.ToListAsync();
return Ok(strikes);
}
/// <summary>
/// Gets all available strike types
/// </summary>
[HttpGet("types")]
public ActionResult<List<string>> GetStrikeTypes()
{
var types = Enum.GetNames(typeof(StrikeType)).ToList();
return Ok(types);
}
/// <summary>
/// Deletes all strikes for a specific download item
/// </summary>
[HttpDelete("{downloadItemId:guid}")]
public async Task<IActionResult> DeleteStrikesForItem(Guid downloadItemId)
{
var item = await _context.DownloadItems
.Include(d => d.Strikes)
.FirstOrDefaultAsync(d => d.Id == downloadItemId);
if (item == null)
return NotFound();
_context.Strikes.RemoveRange(item.Strikes);
_context.DownloadItems.Remove(item);
await _context.SaveChangesAsync();
return NoContent();
}
}
public class DownloadItemStrikesDto
{
public Guid DownloadItemId { get; set; }
public string DownloadId { get; set; } = string.Empty;
public string Title { get; set; } = string.Empty;
public int TotalStrikes { get; set; }
public Dictionary<string, int> StrikesByType { get; set; } = new();
public DateTime LatestStrikeAt { get; set; }
public DateTime FirstStrikeAt { get; set; }
public bool IsMarkedForRemoval { get; set; }
public bool IsRemoved { get; set; }
public bool IsReturning { get; set; }
public List<StrikeDetailDto> Strikes { get; set; } = [];
}
public class StrikeDetailDto
{
public Guid Id { get; set; }
public string Type { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
public long? LastDownloadedBytes { get; set; }
public Guid JobRunId { get; set; }
}
public class RecentStrikeDto
{
public Guid Id { get; set; }
public string Type { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
public string DownloadId { get; set; } = string.Empty;
public string Title { get; set; } = string.Empty;
}

View File

@@ -2,7 +2,6 @@ using System.Text.Json.Serialization;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Hubs;
using Microsoft.AspNetCore.Http.Json;
using Microsoft.OpenApi.Models;
using System.Text;
using Cleanuparr.Api.Middleware;
using Microsoft.Extensions.Options;
@@ -46,20 +45,6 @@ public static class ApiDI
// Add health status broadcaster
services.AddHostedService<HealthStatusBroadcaster>();
services.AddSwaggerGen(options =>
{
options.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Cleanuparr API",
Version = "v1",
Description = "API for managing media downloads and cleanups",
Contact = new OpenApiContact
{
Name = "Cleanuparr Team"
}
});
});
return services;
}
@@ -83,17 +68,6 @@ public static class ApiDI
app.UseCors("Any");
app.UseRouting();
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint("v1/swagger.json", "Cleanuparr API v1");
options.RoutePrefix = "swagger";
options.DocumentTitle = "Cleanuparr API Documentation";
});
}
app.UseAuthorization();
app.MapControllers();

View File

@@ -56,8 +56,8 @@ public static class MainDI
{
e.ConfigureConsumer<DownloadRemoverConsumer<SearchItem>>(context);
e.ConfigureConsumer<DownloadRemoverConsumer<SeriesSearchItem>>(context);
e.ConcurrentMessageLimit = 2;
e.PrefetchCount = 2;
e.ConcurrentMessageLimit = 1;
e.PrefetchCount = 1;
});
cfg.ReceiveEndpoint("download-hunter-queue", e =>

View File

@@ -1,7 +1,11 @@
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Apprise;
using Cleanuparr.Infrastructure.Features.Notifications.Discord;
using Cleanuparr.Infrastructure.Features.Notifications.Notifiarr;
using Cleanuparr.Infrastructure.Features.Notifications.Ntfy;
using Cleanuparr.Infrastructure.Features.Notifications.Pushover;
using Cleanuparr.Infrastructure.Features.Notifications.Telegram;
using Cleanuparr.Infrastructure.Features.Notifications.Gotify;
namespace Cleanuparr.Api.DependencyInjection;
@@ -11,7 +15,13 @@ public static class NotificationsDI
services
.AddScoped<INotifiarrProxy, NotifiarrProxy>()
.AddScoped<IAppriseProxy, AppriseProxy>()
.AddScoped<IAppriseCliProxy, AppriseCliProxy>()
.AddSingleton<IAppriseCliDetector, AppriseCliDetector>()
.AddScoped<INtfyProxy, NtfyProxy>()
.AddScoped<IPushoverProxy, PushoverProxy>()
.AddScoped<ITelegramProxy, TelegramProxy>()
.AddScoped<IDiscordProxy, DiscordProxy>()
.AddScoped<IGotifyProxy, GotifyProxy>()
.AddScoped<INotificationConfigurationService, NotificationConfigurationService>()
.AddScoped<INotificationProviderFactory, NotificationProviderFactory>()
.AddScoped<NotificationProviderFactory>()

View File

@@ -1,5 +1,7 @@
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Events.Interfaces;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Infrastructure.Features.BlacklistSync;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadHunter;
@@ -10,7 +12,6 @@ using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Features.Security;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services;
@@ -23,20 +24,19 @@ public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddScoped<IEncryptionService, AesEncryptionService>()
.AddScoped<SensitiveDataJsonConverter>()
.AddScoped<EventsContext>()
.AddScoped<DataContext>()
.AddScoped<EventPublisher>()
.AddScoped<IEventPublisher, EventPublisher>()
.AddHostedService<EventCleanupService>()
.AddScoped<IDryRunInterceptor, DryRunInterceptor>()
.AddScoped<CertificateValidationService>()
.AddScoped<SonarrClient>()
.AddScoped<RadarrClient>()
.AddScoped<LidarrClient>()
.AddScoped<ReadarrClient>()
.AddScoped<WhisparrClient>()
.AddScoped<ArrClientFactory>()
.AddScoped<ISonarrClient, SonarrClient>()
.AddScoped<IRadarrClient, RadarrClient>()
.AddScoped<ILidarrClient, LidarrClient>()
.AddScoped<IReadarrClient, ReadarrClient>()
.AddScoped<IWhisparrV2Client, WhisparrV2Client>()
.AddScoped<IWhisparrV3Client, WhisparrV3Client>()
.AddScoped<IArrClientFactory, ArrClientFactory>()
.AddScoped<QueueCleaner>()
.AddScoped<BlacklistSynchronizer>()
.AddScoped<MalwareBlocker>()
@@ -45,17 +45,18 @@ public static class ServicesDI
.AddScoped<IDownloadHunter, DownloadHunter>()
.AddScoped<IFilenameEvaluator, FilenameEvaluator>()
.AddScoped<IHardLinkFileService, HardLinkFileService>()
.AddScoped<UnixHardLinkFileService>()
.AddScoped<WindowsHardLinkFileService>()
.AddScoped<ArrQueueIterator>()
.AddScoped<DownloadServiceFactory>()
.AddScoped<IUnixHardLinkFileService, UnixHardLinkFileService>()
.AddScoped<IWindowsHardLinkFileService, WindowsHardLinkFileService>()
.AddScoped<IArrQueueIterator, ArrQueueIterator>()
.AddScoped<IDownloadServiceFactory, DownloadServiceFactory>()
.AddScoped<IStriker, Striker>()
.AddScoped<FileReader>()
.AddScoped<IRuleManager, RuleManager>()
.AddScoped<IRuleEvaluator, RuleEvaluator>()
.AddScoped<IRuleIntervalValidator, RuleIntervalValidator>()
.AddSingleton<IJobManagementService, JobManagementService>()
.AddSingleton<BlocklistProvider>()
.AddSingleton<IBlocklistProvider, BlocklistProvider>()
.AddSingleton(TimeProvider.System)
.AddSingleton<AppStatusSnapshot>()
.AddHostedService<AppStatusRefreshService>();
}

View File

@@ -18,13 +18,20 @@ public sealed record ArrInstanceRequest
[Required]
public required string ApiKey { get; init; }
[Required]
public required float Version { get; init; }
public string? ExternalUrl { get; init; }
public ArrInstance ToEntity(Guid configId) => new()
{
Enabled = Enabled,
Name = Name,
Url = new Uri(Url),
ExternalUrl = ExternalUrl is not null ? new Uri(ExternalUrl) : null,
ApiKey = ApiKey,
ArrConfigId = configId,
Version = Version,
};
public void ApplyTo(ArrInstance instance)
@@ -32,6 +39,8 @@ public sealed record ArrInstanceRequest
instance.Enabled = Enabled;
instance.Name = Name;
instance.Url = new Uri(Url);
instance.ExternalUrl = ExternalUrl is not null ? new Uri(ExternalUrl) : null;
instance.ApiKey = ApiKey;
instance.Version = Version;
}
}

View File

@@ -0,0 +1,28 @@
using System;
using System.ComponentModel.DataAnnotations;
using Cleanuparr.Persistence.Models.Configuration.Arr;
namespace Cleanuparr.Api.Features.Arr.Contracts.Requests;
public sealed record TestArrInstanceRequest
{
[Required]
public required string Url { get; init; }
[Required]
public required string ApiKey { get; init; }
[Required]
public required float Version { get; init; }
public ArrInstance ToTestInstance() => new()
{
Enabled = true,
Name = "Test Instance",
Url = new Uri(Url),
ApiKey = ApiKey,
ArrConfigId = Guid.Empty,
Version = Version,
};
}

View File

@@ -1,16 +1,11 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.Arr.Contracts.Requests;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr.Dtos;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Mapster;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.Arr.Controllers;
@@ -20,13 +15,16 @@ public sealed class ArrConfigController : ControllerBase
{
private readonly ILogger<ArrConfigController> _logger;
private readonly DataContext _dataContext;
private readonly IArrClientFactory _arrClientFactory;
public ArrConfigController(
ILogger<ArrConfigController> logger,
DataContext dataContext)
DataContext dataContext,
IArrClientFactory arrClientFactory)
{
_logger = logger;
_dataContext = dataContext;
_arrClientFactory = arrClientFactory;
}
[HttpGet("sonarr")]
@@ -124,6 +122,26 @@ public sealed class ArrConfigController : ControllerBase
public Task<IActionResult> DeleteWhisparrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Whisparr, id);
[HttpPost("sonarr/instances/test")]
public Task<IActionResult> TestSonarrInstance([FromBody] TestArrInstanceRequest request)
=> TestArrInstance(InstanceType.Sonarr, request);
[HttpPost("radarr/instances/test")]
public Task<IActionResult> TestRadarrInstance([FromBody] TestArrInstanceRequest request)
=> TestArrInstance(InstanceType.Radarr, request);
[HttpPost("lidarr/instances/test")]
public Task<IActionResult> TestLidarrInstance([FromBody] TestArrInstanceRequest request)
=> TestArrInstance(InstanceType.Lidarr, request);
[HttpPost("readarr/instances/test")]
public Task<IActionResult> TestReadarrInstance([FromBody] TestArrInstanceRequest request)
=> TestArrInstance(InstanceType.Readarr, request);
[HttpPost("whisparr/instances/test")]
public Task<IActionResult> TestWhisparrInstance([FromBody] TestArrInstanceRequest request)
=> TestArrInstance(InstanceType.Whisparr, request);
private async Task<IActionResult> GetArrConfig(InstanceType type)
{
await DataContext.Lock.WaitAsync();
@@ -260,6 +278,23 @@ public sealed class ArrConfigController : ControllerBase
}
}
private async Task<IActionResult> TestArrInstance(InstanceType type, TestArrInstanceRequest request)
{
try
{
var testInstance = request.ToTestInstance();
var client = _arrClientFactory.GetClient(type, request.Version);
await client.HealthCheckAsync(testInstance);
return Ok(new { Message = $"Connection to {type} instance successful" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test {Type} instance connection", type);
return BadRequest(new { Message = $"Connection failed: {ex.Message}" });
}
}
private static string GetConfigActionName(InstanceType type) => type switch
{
InstanceType.Sonarr => nameof(GetSonarrConfig),

View File

@@ -2,7 +2,7 @@ using System;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.BlacklistSync.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;

View File

@@ -0,0 +1,29 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
public record SeedingRuleRequest
{
[Required]
public string Name { get; init; } = string.Empty;
/// <summary>
/// Max ratio before removing a download.
/// </summary>
public double MaxRatio { get; init; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
public double MinSeedTime { get; init; }
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
public double MaxSeedTime { get; init; } = -1;
/// <summary>
/// Whether to delete the source files when cleaning the download.
/// </summary>
public bool DeleteSourceFiles { get; init; } = true;
}

View File

@@ -1,8 +1,6 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
public record UpdateDownloadCleanerConfigRequest
public sealed record UpdateDownloadCleanerConfigRequest
{
public bool Enabled { get; init; }
@@ -13,7 +11,7 @@ public record UpdateDownloadCleanerConfigRequest
/// </summary>
public bool UseAdvancedScheduling { get; init; }
public List<CleanCategoryRequest> Categories { get; init; } = [];
public List<SeedingRuleRequest> Categories { get; init; } = [];
public bool DeletePrivate { get; init; }
@@ -26,30 +24,9 @@ public record UpdateDownloadCleanerConfigRequest
public bool UnlinkedUseTag { get; init; }
public string UnlinkedIgnoredRootDir { get; init; } = string.Empty;
public List<string> UnlinkedIgnoredRootDirs { get; init; } = [];
public List<string> UnlinkedCategories { get; init; } = [];
public List<string> IgnoredDownloads { get; init; } = [];
}
public record CleanCategoryRequest
{
[Required]
public string Name { get; init; } = string.Empty;
/// <summary>
/// Max ratio before removing a download.
/// </summary>
public double MaxRatio { get; init; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
public double MinSeedTime { get; init; }
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
public double MaxSeedTime { get; init; } = -1;
}

View File

@@ -3,7 +3,7 @@ using System.IO;
using System.Linq;
using Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;
@@ -62,65 +62,13 @@ public sealed class DownloadCleanerConfigController : ControllerBase
throw new ValidationException("Request body cannot be null");
}
// Validate cron expression format
if (!string.IsNullOrEmpty(newConfigDto.CronExpression))
{
CronValidationHelper.ValidateCronExpression(newConfigDto.CronExpression);
}
if (newConfigDto.Enabled && newConfigDto.Categories.Any())
{
if (newConfigDto.Categories.GroupBy(x => x.Name).Any(x => x.Count() > 1))
{
throw new ValidationException("Duplicate category names found");
}
foreach (var categoryDto in newConfigDto.Categories)
{
if (string.IsNullOrWhiteSpace(categoryDto.Name))
{
throw new ValidationException("Category name cannot be empty");
}
if (categoryDto is { MaxRatio: < 0, MaxSeedTime: < 0 })
{
throw new ValidationException("Either max ratio or max seed time must be enabled");
}
if (categoryDto.MinSeedTime < 0)
{
throw new ValidationException("Min seed time cannot be negative");
}
}
}
if (newConfigDto.UnlinkedEnabled)
{
if (string.IsNullOrWhiteSpace(newConfigDto.UnlinkedTargetCategory))
{
throw new ValidationException("Unlinked target category cannot be empty");
}
if (newConfigDto.UnlinkedCategories?.Count is null or 0)
{
throw new ValidationException("Unlinked categories cannot be empty");
}
if (newConfigDto.UnlinkedCategories.Contains(newConfigDto.UnlinkedTargetCategory))
{
throw new ValidationException("The unlinked target category should not be present in unlinked categories");
}
if (newConfigDto.UnlinkedCategories.Any(string.IsNullOrWhiteSpace))
{
throw new ValidationException("Empty unlinked category filter found");
}
if (!string.IsNullOrEmpty(newConfigDto.UnlinkedIgnoredRootDir) && !Directory.Exists(newConfigDto.UnlinkedIgnoredRootDir))
{
throw new ValidationException($"{newConfigDto.UnlinkedIgnoredRootDir} root directory does not exist");
}
}
// Get existing configuration
var oldConfig = await _dataContext.DownloadCleanerConfigs
.Include(x => x.Categories)
.FirstAsync();
@@ -132,25 +80,29 @@ public sealed class DownloadCleanerConfigController : ControllerBase
oldConfig.UnlinkedEnabled = newConfigDto.UnlinkedEnabled;
oldConfig.UnlinkedTargetCategory = newConfigDto.UnlinkedTargetCategory;
oldConfig.UnlinkedUseTag = newConfigDto.UnlinkedUseTag;
oldConfig.UnlinkedIgnoredRootDir = newConfigDto.UnlinkedIgnoredRootDir;
oldConfig.UnlinkedIgnoredRootDirs = newConfigDto.UnlinkedIgnoredRootDirs;
oldConfig.UnlinkedCategories = newConfigDto.UnlinkedCategories;
oldConfig.IgnoredDownloads = newConfigDto.IgnoredDownloads;
oldConfig.Categories.Clear();
_dataContext.CleanCategories.RemoveRange(oldConfig.Categories);
_dataContext.SeedingRules.RemoveRange(oldConfig.Categories);
_dataContext.DownloadCleanerConfigs.Update(oldConfig);
foreach (var categoryDto in newConfigDto.Categories)
{
_dataContext.CleanCategories.Add(new CleanCategory
_dataContext.SeedingRules.Add(new SeedingRule
{
Name = categoryDto.Name,
MaxRatio = categoryDto.MaxRatio,
MinSeedTime = categoryDto.MinSeedTime,
MaxSeedTime = categoryDto.MaxSeedTime,
DeleteSourceFiles = categoryDto.DeleteSourceFiles,
DownloadCleanerConfigId = oldConfig.Id
});
}
oldConfig.Validate();
await _dataContext.SaveChangesAsync();
await UpdateJobSchedule(oldConfig, JobType.DownloadCleaner);

View File

@@ -16,7 +16,7 @@ public sealed record CreateDownloadClientRequest
public DownloadClientType Type { get; init; }
public Uri? Host { get; init; }
public string? Host { get; init; }
public string? Username { get; init; }
@@ -24,6 +24,8 @@ public sealed record CreateDownloadClientRequest
public string? UrlBase { get; init; }
public string? ExternalUrl { get; init; }
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
@@ -31,10 +33,20 @@ public sealed record CreateDownloadClientRequest
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
if (string.IsNullOrWhiteSpace(Host))
{
throw new ValidationException("Host cannot be empty");
}
if (!Uri.TryCreate(Host, UriKind.RelativeOrAbsolute, out _))
{
throw new ValidationException("Host is not a valid URL");
}
if (!string.IsNullOrWhiteSpace(ExternalUrl) && !Uri.TryCreate(ExternalUrl, UriKind.RelativeOrAbsolute, out _))
{
throw new ValidationException("External URL is not a valid URL");
}
}
public DownloadClientConfig ToEntity() => new()
@@ -43,9 +55,10 @@ public sealed record CreateDownloadClientRequest
Name = Name,
TypeName = TypeName,
Type = Type,
Host = Host,
Host = new Uri(Host!, UriKind.RelativeOrAbsolute),
Username = Username,
Password = Password,
UrlBase = UrlBase,
ExternalUrl = !string.IsNullOrWhiteSpace(ExternalUrl) ? new Uri(ExternalUrl, UriKind.RelativeOrAbsolute) : null,
};
}

View File

@@ -0,0 +1,48 @@
using System;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Persistence.Models.Configuration;
namespace Cleanuparr.Api.Features.DownloadClient.Contracts.Requests;
public sealed record TestDownloadClientRequest
{
public DownloadClientTypeName TypeName { get; init; }
public DownloadClientType Type { get; init; }
public string? Host { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public string? UrlBase { get; init; }
public void Validate()
{
if (string.IsNullOrWhiteSpace(Host))
{
throw new ValidationException("Host cannot be empty");
}
if (!Uri.TryCreate(Host, UriKind.RelativeOrAbsolute, out _))
{
throw new ValidationException("Host is not a valid URL");
}
}
public DownloadClientConfig ToTestConfig() => new()
{
Id = Guid.NewGuid(),
Enabled = true,
Name = "Test Client",
TypeName = TypeName,
Type = Type,
Host = new Uri(Host!, UriKind.RelativeOrAbsolute),
Username = Username,
Password = Password,
UrlBase = UrlBase,
};
}

View File

@@ -16,7 +16,7 @@ public sealed record UpdateDownloadClientRequest
public DownloadClientType Type { get; init; }
public Uri? Host { get; init; }
public string? Host { get; init; }
public string? Username { get; init; }
@@ -24,6 +24,8 @@ public sealed record UpdateDownloadClientRequest
public string? UrlBase { get; init; }
public string? ExternalUrl { get; init; }
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
@@ -31,10 +33,20 @@ public sealed record UpdateDownloadClientRequest
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
if (string.IsNullOrWhiteSpace(Host))
{
throw new ValidationException("Host cannot be empty");
}
if (!Uri.TryCreate(Host, UriKind.RelativeOrAbsolute, out _))
{
throw new ValidationException("Host is not a valid URL");
}
if (!string.IsNullOrWhiteSpace(ExternalUrl) && !Uri.TryCreate(ExternalUrl, UriKind.RelativeOrAbsolute, out _))
{
throw new ValidationException("External URL is not a valid URL");
}
}
public DownloadClientConfig ApplyTo(DownloadClientConfig existing) => existing with
@@ -43,9 +55,10 @@ public sealed record UpdateDownloadClientRequest
Name = Name,
TypeName = TypeName,
Type = Type,
Host = Host,
Host = new Uri(Host!, UriKind.RelativeOrAbsolute),
Username = Username,
Password = Password,
UrlBase = UrlBase,
ExternalUrl = !string.IsNullOrWhiteSpace(ExternalUrl) ? new Uri(ExternalUrl, UriKind.RelativeOrAbsolute) : null,
};
}

View File

@@ -2,12 +2,11 @@ using System;
using System.Linq;
using Cleanuparr.Api.Features.DownloadClient.Contracts.Requests;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.DownloadClient.Controllers;
@@ -18,15 +17,18 @@ public sealed class DownloadClientController : ControllerBase
private readonly ILogger<DownloadClientController> _logger;
private readonly DataContext _dataContext;
private readonly IDynamicHttpClientFactory _dynamicHttpClientFactory;
private readonly IDownloadServiceFactory _downloadServiceFactory;
public DownloadClientController(
ILogger<DownloadClientController> logger,
DataContext dataContext,
IDynamicHttpClientFactory dynamicHttpClientFactory)
IDynamicHttpClientFactory dynamicHttpClientFactory,
IDownloadServiceFactory downloadServiceFactory)
{
_logger = logger;
_dataContext = dataContext;
_dynamicHttpClientFactory = dynamicHttpClientFactory;
_downloadServiceFactory = downloadServiceFactory;
}
[HttpGet("download_client")]
@@ -146,4 +148,33 @@ public sealed class DownloadClientController : ControllerBase
DataContext.Lock.Release();
}
}
[HttpPost("download_client/test")]
public async Task<IActionResult> TestDownloadClient([FromBody] TestDownloadClientRequest request)
{
try
{
request.Validate();
var testConfig = request.ToTestConfig();
using var downloadService = _downloadServiceFactory.GetDownloadService(testConfig);
var healthResult = await downloadService.HealthCheckAsync();
if (healthResult.IsHealthy)
{
return Ok(new
{
Message = $"Connection to {request.TypeName} successful",
ResponseTime = healthResult.ResponseTime.TotalMilliseconds
});
}
return BadRequest(new { Message = healthResult.ErrorMessage ?? "Connection failed" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test {TypeName} client connection", request.TypeName);
return BadRequest(new { Message = $"Connection failed: {ex.Message}" });
}
}
}

View File

@@ -24,10 +24,14 @@ public sealed record UpdateGeneralConfigRequest
public ushort SearchDelay { get; init; } = Constants.DefaultSearchDelaySeconds;
public bool StatusCheckEnabled { get; init; } = true;
public string EncryptionKey { get; init; } = Guid.NewGuid().ToString();
public List<string> IgnoredDownloads { get; init; } = [];
public ushort StrikeInactivityWindowHours { get; init; } = 24;
public UpdateLoggingConfigRequest Log { get; init; } = new();
public GeneralConfig ApplyTo(GeneralConfig existingConfig, IServiceProvider services, ILogger logger)
@@ -39,8 +43,10 @@ public sealed record UpdateGeneralConfigRequest
existingConfig.HttpCertificateValidation = HttpCertificateValidation;
existingConfig.SearchEnabled = SearchEnabled;
existingConfig.SearchDelay = SearchDelay;
existingConfig.StatusCheckEnabled = StatusCheckEnabled;
existingConfig.EncryptionKey = EncryptionKey;
existingConfig.IgnoredDownloads = IgnoredDownloads;
existingConfig.StrikeInactivityWindowHours = StrikeInactivityWindowHours;
bool loggingChanged = Log.ApplyTo(existingConfig.Log);
@@ -58,6 +64,16 @@ public sealed record UpdateGeneralConfigRequest
throw new ValidationException("HTTP_TIMEOUT must be greater than 0");
}
if (config.StrikeInactivityWindowHours is 0)
{
throw new ValidationException("STRIKE_INACTIVITY_WINDOW_HOURS must be greater than 0");
}
if (config.StrikeInactivityWindowHours > 168)
{
throw new ValidationException("STRIKE_INACTIVITY_WINDOW_HOURS must be less than or equal to 168");
}
config.Log.Validate();
}

View File

@@ -78,6 +78,21 @@ public sealed class GeneralConfigController : ControllerBase
}
}
[HttpPost("strikes/purge")]
public async Task<IActionResult> PurgeAllStrikes(
[FromServices] EventsContext eventsContext)
{
var deletedStrikes = await eventsContext.Strikes.ExecuteDeleteAsync();
var deletedItems = await eventsContext.DownloadItems
.Where(d => !d.Strikes.Any())
.ExecuteDeleteAsync();
_logger.LogWarning("Purged all strikes: {strikes} strikes, {items} download items removed",
deletedStrikes, deletedItems);
return Ok(new { DeletedStrikes = deletedStrikes, DeletedItems = deletedItems });
}
private void ClearStrikesCacheIfNeeded(bool wasDryRun, bool isDryRun)
{
if (!wasDryRun || isDryRun)

View File

@@ -2,7 +2,7 @@ using System.ComponentModel.DataAnnotations;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.MalwareBlocker.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;

View File

@@ -1,10 +1,18 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateAppriseProviderRequest : CreateNotificationProviderRequestBase
{
public AppriseMode Mode { get; init; } = AppriseMode.Api;
// API mode fields
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
// CLI mode fields
public string? ServiceUrls { get; init; }
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateDiscordProviderRequest : CreateNotificationProviderRequestBase
{
public string WebhookUrl { get; init; } = string.Empty;
public string Username { get; init; } = string.Empty;
public string AvatarUrl { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateGotifyProviderRequest : CreateNotificationProviderRequestBase
{
public string ServerUrl { get; init; } = string.Empty;
public string ApplicationToken { get; init; } = string.Empty;
public int Priority { get; init; } = 5;
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreatePushoverProviderRequest : CreateNotificationProviderRequestBase
{
public string ApiToken { get; init; } = string.Empty;
public string UserKey { get; init; } = string.Empty;
public List<string> Devices { get; init; } = [];
public PushoverPriority Priority { get; init; } = PushoverPriority.Normal;
public string? Sound { get; init; }
public int? Retry { get; init; }
public int? Expire { get; init; }
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,12 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public sealed record CreateTelegramProviderRequest : CreateNotificationProviderRequestBase
{
public string BotToken { get; init; } = string.Empty;
public string ChatId { get; init; } = string.Empty;
public string? TopicId { get; init; }
public bool SendSilently { get; init; }
}

View File

@@ -1,10 +1,18 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestAppriseProviderRequest
{
public AppriseMode Mode { get; init; } = AppriseMode.Api;
// API mode fields
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
// CLI mode fields
public string? ServiceUrls { get; init; }
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestDiscordProviderRequest
{
public string WebhookUrl { get; init; } = string.Empty;
public string Username { get; init; } = string.Empty;
public string AvatarUrl { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestGotifyProviderRequest
{
public string ServerUrl { get; init; } = string.Empty;
public string ApplicationToken { get; init; } = string.Empty;
public int Priority { get; init; } = 5;
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestPushoverProviderRequest
{
public string ApiToken { get; init; } = string.Empty;
public string UserKey { get; init; } = string.Empty;
public List<string> Devices { get; init; } = [];
public PushoverPriority Priority { get; init; } = PushoverPriority.Normal;
public string? Sound { get; init; }
public int? Retry { get; init; }
public int? Expire { get; init; }
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,12 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public sealed record TestTelegramProviderRequest
{
public string BotToken { get; init; } = string.Empty;
public string ChatId { get; init; } = string.Empty;
public string? TopicId { get; init; }
public bool SendSilently { get; init; }
}

View File

@@ -1,10 +1,18 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateAppriseProviderRequest : UpdateNotificationProviderRequestBase
{
public AppriseMode Mode { get; init; } = AppriseMode.Api;
// API mode fields
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
// CLI mode fields
public string? ServiceUrls { get; init; }
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateDiscordProviderRequest : UpdateNotificationProviderRequestBase
{
public string WebhookUrl { get; init; } = string.Empty;
public string Username { get; init; } = string.Empty;
public string AvatarUrl { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateGotifyProviderRequest : UpdateNotificationProviderRequestBase
{
public string ServerUrl { get; init; } = string.Empty;
public string ApplicationToken { get; init; } = string.Empty;
public int Priority { get; init; } = 5;
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdatePushoverProviderRequest : UpdateNotificationProviderRequestBase
{
public string ApiToken { get; init; } = string.Empty;
public string UserKey { get; init; } = string.Empty;
public List<string> Devices { get; init; } = [];
public PushoverPriority Priority { get; init; } = PushoverPriority.Normal;
public string? Sound { get; init; }
public int? Retry { get; init; }
public int? Expire { get; init; }
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,12 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public sealed record UpdateTelegramProviderRequest : CreateNotificationProviderRequestBase
{
public string BotToken { get; init; } = string.Empty;
public string ChatId { get; init; } = string.Empty;
public string? TopicId { get; init; }
public bool SendSilently { get; init; }
}

View File

@@ -1,15 +1,18 @@
using System.Net;
using Cleanuparr.Api.Features.Notifications.Contracts.Requests;
using Cleanuparr.Api.Features.Notifications.Contracts.Responses;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Apprise;
using Cleanuparr.Infrastructure.Features.Notifications.Discord;
using Cleanuparr.Infrastructure.Features.Notifications.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Features.Notifications.Telegram;
using Cleanuparr.Infrastructure.Features.Notifications.Gotify;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Notification;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.Notifications.Controllers;
@@ -21,17 +24,20 @@ public sealed class NotificationProvidersController : ControllerBase
private readonly DataContext _dataContext;
private readonly INotificationConfigurationService _notificationConfigurationService;
private readonly NotificationService _notificationService;
private readonly IAppriseCliDetector _appriseCliDetector;
public NotificationProvidersController(
ILogger<NotificationProvidersController> logger,
DataContext dataContext,
INotificationConfigurationService notificationConfigurationService,
NotificationService notificationService)
NotificationService notificationService,
IAppriseCliDetector appriseCliDetector)
{
_logger = logger;
_dataContext = dataContext;
_notificationConfigurationService = notificationConfigurationService;
_notificationService = notificationService;
_appriseCliDetector = appriseCliDetector;
}
[HttpGet]
@@ -44,6 +50,10 @@ public sealed class NotificationProvidersController : ControllerBase
.Include(p => p.NotifiarrConfiguration)
.Include(p => p.AppriseConfiguration)
.Include(p => p.NtfyConfiguration)
.Include(p => p.PushoverConfiguration)
.Include(p => p.TelegramConfiguration)
.Include(p => p.DiscordConfiguration)
.Include(p => p.GotifyConfiguration)
.AsNoTracking()
.ToListAsync();
@@ -68,6 +78,10 @@ public sealed class NotificationProvidersController : ControllerBase
NotificationProviderType.Notifiarr => p.NotifiarrConfiguration ?? new object(),
NotificationProviderType.Apprise => p.AppriseConfiguration ?? new object(),
NotificationProviderType.Ntfy => p.NtfyConfiguration ?? new object(),
NotificationProviderType.Pushover => p.PushoverConfiguration ?? new object(),
NotificationProviderType.Telegram => p.TelegramConfiguration ?? new object(),
NotificationProviderType.Discord => p.DiscordConfiguration ?? new object(),
NotificationProviderType.Gotify => p.GotifyConfiguration ?? new object(),
_ => new object()
}
})
@@ -84,6 +98,18 @@ public sealed class NotificationProvidersController : ControllerBase
}
}
[HttpGet("apprise/cli-status")]
public async Task<IActionResult> GetAppriseCliStatus()
{
string? version = await _appriseCliDetector.GetAppriseVersionAsync();
return Ok(new
{
Available = version is not null,
Version = version
});
}
[HttpPost("notifiarr")]
public async Task<IActionResult> CreateNotifiarrProvider([FromBody] CreateNotifiarrProviderRequest newProvider)
{
@@ -160,9 +186,11 @@ public sealed class NotificationProvidersController : ControllerBase
var appriseConfig = new AppriseConfig
{
Mode = newProvider.Mode,
Url = newProvider.Url,
Key = newProvider.Key,
Tags = newProvider.Tags
Tags = newProvider.Tags,
ServiceUrls = newProvider.ServiceUrls
};
appriseConfig.Validate();
@@ -270,6 +298,69 @@ public sealed class NotificationProvidersController : ControllerBase
}
}
[HttpPost("telegram")]
public async Task<IActionResult> CreateTelegramProvider([FromBody] CreateTelegramProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var telegramConfig = new TelegramConfig
{
BotToken = newProvider.BotToken,
ChatId = newProvider.ChatId,
TopicId = newProvider.TopicId,
SendSilently = newProvider.SendSilently
};
telegramConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Telegram,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
TelegramConfiguration = telegramConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Telegram provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("notifiarr/{id:guid}")]
public async Task<IActionResult> UpdateNotifiarrProvider(Guid id, [FromBody] UpdateNotifiarrProviderRequest updatedProvider)
{
@@ -380,9 +471,11 @@ public sealed class NotificationProvidersController : ControllerBase
var appriseConfig = new AppriseConfig
{
Mode = updatedProvider.Mode,
Url = updatedProvider.Url,
Key = updatedProvider.Key,
Tags = updatedProvider.Tags
Tags = updatedProvider.Tags,
ServiceUrls = updatedProvider.ServiceUrls
};
if (existingProvider.AppriseConfiguration != null)
@@ -514,6 +607,87 @@ public sealed class NotificationProvidersController : ControllerBase
}
}
[HttpPut("telegram/{id:guid}")]
public async Task<IActionResult> UpdateTelegramProvider(Guid id, [FromBody] UpdateTelegramProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.TelegramConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Telegram);
if (existingProvider == null)
{
return NotFound($"Telegram provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var telegramConfig = new TelegramConfig
{
BotToken = updatedProvider.BotToken,
ChatId = updatedProvider.ChatId,
TopicId = updatedProvider.TopicId,
SendSilently = updatedProvider.SendSilently
};
if (existingProvider.TelegramConfiguration != null)
{
telegramConfig = telegramConfig with { Id = existingProvider.TelegramConfiguration.Id };
}
telegramConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
TelegramConfiguration = telegramConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Telegram provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpDelete("{id:guid}")]
public async Task<IActionResult> DeleteNotificationProvider(Guid id)
{
@@ -524,6 +698,10 @@ public sealed class NotificationProvidersController : ControllerBase
.Include(p => p.NotifiarrConfiguration)
.Include(p => p.AppriseConfiguration)
.Include(p => p.NtfyConfiguration)
.Include(p => p.PushoverConfiguration)
.Include(p => p.TelegramConfiguration)
.Include(p => p.DiscordConfiguration)
.Include(p => p.GotifyConfiguration)
.FirstOrDefaultAsync(p => p.Id == id);
if (existingProvider == null)
@@ -583,12 +761,12 @@ public sealed class NotificationProvidersController : ControllerBase
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
return Ok(new { Message = "Test notification sent successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Notifiarr provider");
throw;
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
@@ -599,9 +777,11 @@ public sealed class NotificationProvidersController : ControllerBase
{
var appriseConfig = new AppriseConfig
{
Mode = testRequest.Mode,
Url = testRequest.Url,
Key = testRequest.Key,
Tags = testRequest.Tags
Tags = testRequest.Tags,
ServiceUrls = testRequest.ServiceUrls
};
appriseConfig.Validate();
@@ -624,12 +804,16 @@ public sealed class NotificationProvidersController : ControllerBase
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
return Ok(new { Message = "Test notification sent successfully" });
}
catch (AppriseException exception)
{
return StatusCode((int)HttpStatusCode.InternalServerError, exception.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Apprise provider");
throw;
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
@@ -670,12 +854,59 @@ public sealed class NotificationProvidersController : ControllerBase
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
return Ok(new { Message = "Test notification sent successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Ntfy provider");
throw;
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
[HttpPost("telegram/test")]
public async Task<IActionResult> TestTelegramProvider([FromBody] TestTelegramProviderRequest testRequest)
{
try
{
var telegramConfig = new TelegramConfig
{
BotToken = testRequest.BotToken,
ChatId = testRequest.ChatId,
TopicId = testRequest.TopicId,
SendSilently = testRequest.SendSilently
};
telegramConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Telegram,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = telegramConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully" });
}
catch (TelegramException ex)
{
_logger.LogWarning(ex, "Failed to test Telegram provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Telegram provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
@@ -701,8 +932,586 @@ public sealed class NotificationProvidersController : ControllerBase
NotificationProviderType.Notifiarr => provider.NotifiarrConfiguration ?? new object(),
NotificationProviderType.Apprise => provider.AppriseConfiguration ?? new object(),
NotificationProviderType.Ntfy => provider.NtfyConfiguration ?? new object(),
NotificationProviderType.Pushover => provider.PushoverConfiguration ?? new object(),
NotificationProviderType.Telegram => provider.TelegramConfiguration ?? new object(),
NotificationProviderType.Discord => provider.DiscordConfiguration ?? new object(),
NotificationProviderType.Gotify => provider.GotifyConfiguration ?? new object(),
_ => new object()
}
};
}
[HttpPost("discord")]
public async Task<IActionResult> CreateDiscordProvider([FromBody] CreateDiscordProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var discordConfig = new DiscordConfig
{
WebhookUrl = newProvider.WebhookUrl,
Username = newProvider.Username,
AvatarUrl = newProvider.AvatarUrl
};
discordConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Discord,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
DiscordConfiguration = discordConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Discord provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("discord/{id:guid}")]
public async Task<IActionResult> UpdateDiscordProvider(Guid id, [FromBody] UpdateDiscordProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.DiscordConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Discord);
if (existingProvider == null)
{
return NotFound($"Discord provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var discordConfig = new DiscordConfig
{
WebhookUrl = updatedProvider.WebhookUrl,
Username = updatedProvider.Username,
AvatarUrl = updatedProvider.AvatarUrl
};
if (existingProvider.DiscordConfiguration != null)
{
discordConfig = discordConfig with { Id = existingProvider.DiscordConfiguration.Id };
}
discordConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
DiscordConfiguration = discordConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Discord provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("discord/test")]
public async Task<IActionResult> TestDiscordProvider([FromBody] TestDiscordProviderRequest testRequest)
{
try
{
var discordConfig = new DiscordConfig
{
WebhookUrl = testRequest.WebhookUrl,
Username = testRequest.Username,
AvatarUrl = testRequest.AvatarUrl
};
discordConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Discord,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = discordConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully" });
}
catch (DiscordException ex)
{
_logger.LogWarning(ex, "Failed to test Discord provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Discord provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
[HttpPost("pushover")]
public async Task<IActionResult> CreatePushoverProvider([FromBody] CreatePushoverProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var pushoverConfig = new PushoverConfig
{
ApiToken = newProvider.ApiToken,
UserKey = newProvider.UserKey,
Devices = newProvider.Devices,
Priority = newProvider.Priority,
Sound = newProvider.Sound,
Retry = newProvider.Retry,
Expire = newProvider.Expire,
Tags = newProvider.Tags
};
pushoverConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Pushover,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
PushoverConfiguration = pushoverConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Pushover provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("pushover/{id:guid}")]
public async Task<IActionResult> UpdatePushoverProvider(Guid id, [FromBody] UpdatePushoverProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.PushoverConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Pushover);
if (existingProvider == null)
{
return NotFound($"Pushover provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var pushoverConfig = new PushoverConfig
{
ApiToken = updatedProvider.ApiToken,
UserKey = updatedProvider.UserKey,
Devices = updatedProvider.Devices,
Priority = updatedProvider.Priority,
Sound = updatedProvider.Sound,
Retry = updatedProvider.Retry,
Expire = updatedProvider.Expire,
Tags = updatedProvider.Tags
};
if (existingProvider.PushoverConfiguration != null)
{
pushoverConfig = pushoverConfig with { Id = existingProvider.PushoverConfiguration.Id };
}
pushoverConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
PushoverConfiguration = pushoverConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Pushover provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("pushover/test")]
public async Task<IActionResult> TestPushoverProvider([FromBody] TestPushoverProviderRequest testRequest)
{
try
{
var pushoverConfig = new PushoverConfig
{
ApiToken = testRequest.ApiToken,
UserKey = testRequest.UserKey,
Devices = testRequest.Devices,
Priority = testRequest.Priority,
Sound = testRequest.Sound,
Retry = testRequest.Retry,
Expire = testRequest.Expire,
Tags = testRequest.Tags
};
pushoverConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Pushover,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = pushoverConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Pushover provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
[HttpPost("gotify")]
public async Task<IActionResult> CreateGotifyProvider([FromBody] CreateGotifyProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var gotifyConfig = new GotifyConfig
{
ServerUrl = newProvider.ServerUrl,
ApplicationToken = newProvider.ApplicationToken,
Priority = newProvider.Priority
};
gotifyConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Gotify,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
GotifyConfiguration = gotifyConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Gotify provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("gotify/{id:guid}")]
public async Task<IActionResult> UpdateGotifyProvider(Guid id, [FromBody] UpdateGotifyProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.GotifyConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Gotify);
if (existingProvider == null)
{
return NotFound($"Gotify provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var gotifyConfig = new GotifyConfig
{
ServerUrl = updatedProvider.ServerUrl,
ApplicationToken = updatedProvider.ApplicationToken,
Priority = updatedProvider.Priority
};
if (existingProvider.GotifyConfiguration != null)
{
gotifyConfig = gotifyConfig with { Id = existingProvider.GotifyConfiguration.Id };
}
gotifyConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
GotifyConfiguration = gotifyConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Gotify provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("gotify/test")]
public async Task<IActionResult> TestGotifyProvider([FromBody] TestGotifyProviderRequest testRequest)
{
try
{
var gotifyConfig = new GotifyConfig
{
ServerUrl = testRequest.ServerUrl,
ApplicationToken = testRequest.ApplicationToken,
Priority = testRequest.Priority
};
gotifyConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Gotify,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = gotifyConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully" });
}
catch (GotifyException ex)
{
_logger.LogWarning(ex, "Failed to test Gotify provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Gotify provider");
return BadRequest(new { Message = $"Test failed: {ex.Message}" });
}
}
}

View File

@@ -13,4 +13,6 @@ public sealed record UpdateQueueCleanerConfigRequest
public FailedImportConfig FailedImport { get; init; } = new();
public ushort DownloadingMetadataMaxStrikes { get; init; }
public List<string> IgnoredDownloads { get; set; } = [];
}

View File

@@ -1,7 +1,7 @@
using System.ComponentModel.DataAnnotations;
using Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;
@@ -59,8 +59,6 @@ public sealed class QueueCleanerConfigController : ControllerBase
CronValidationHelper.ValidateCronExpression(newConfigDto.CronExpression);
}
newConfigDto.FailedImport.Validate();
var oldConfig = await _dataContext.QueueCleanerConfigs
.FirstAsync();
@@ -69,6 +67,9 @@ public sealed class QueueCleanerConfigController : ControllerBase
oldConfig.UseAdvancedScheduling = newConfigDto.UseAdvancedScheduling;
oldConfig.FailedImport = newConfigDto.FailedImport;
oldConfig.DownloadingMetadataMaxStrikes = newConfigDto.DownloadingMetadataMaxStrikes;
oldConfig.IgnoredDownloads = newConfigDto.IgnoredDownloads;
oldConfig.Validate();
await _dataContext.SaveChangesAsync();

View File

@@ -1,7 +1,12 @@
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.State;
using Microsoft.AspNetCore.SignalR;
using Quartz;
using Serilog.Context;
@@ -14,48 +19,73 @@ public sealed class GenericJob<T> : IJob
{
private readonly ILogger<GenericJob<T>> _logger;
private readonly IServiceScopeFactory _scopeFactory;
public GenericJob(ILogger<GenericJob<T>> logger, IServiceScopeFactory scopeFactory)
{
_logger = logger;
_scopeFactory = scopeFactory;
}
public async Task Execute(IJobExecutionContext context)
{
using var _ = LogContext.PushProperty("JobName", typeof(T).Name);
Guid jobRunId = Guid.CreateVersion7();
JobType jobType = Enum.Parse<JobType>(typeof(T).Name);
JobRunStatus? status = null;
try
{
await using var scope = _scopeFactory.CreateAsyncScope();
var eventsContext = scope.ServiceProvider.GetRequiredService<EventsContext>();
var hubContext = scope.ServiceProvider.GetRequiredService<IHubContext<AppHub>>();
var jobManagementService = scope.ServiceProvider.GetRequiredService<IJobManagementService>();
await BroadcastJobStatus(hubContext, jobManagementService, false);
var jobRun = new JobRun { Id = jobRunId, Type = jobType };
eventsContext.JobRuns.Add(jobRun);
await eventsContext.SaveChangesAsync();
ContextProvider.SetJobRunId(jobRunId);
using var __ = LogContext.PushProperty(LogProperties.JobRunId, jobRunId.ToString());
await BroadcastJobStatus(hubContext, jobManagementService, jobType, false);
var handler = scope.ServiceProvider.GetRequiredService<T>();
await handler.ExecuteAsync();
await BroadcastJobStatus(hubContext, jobManagementService, true);
status = JobRunStatus.Completed;
await BroadcastJobStatus(hubContext, jobManagementService, jobType, true);
}
catch (Exception ex)
{
_logger.LogError(ex, "{name} failed", typeof(T).Name);
status = JobRunStatus.Failed;
}
finally
{
await using var finalScope = _scopeFactory.CreateAsyncScope();
var eventsContext = finalScope.ServiceProvider.GetRequiredService<EventsContext>();
var jobRun = await eventsContext.JobRuns.FindAsync(jobRunId);
if (jobRun is not null)
{
jobRun.CompletedAt = DateTime.UtcNow;
jobRun.Status = status;
await eventsContext.SaveChangesAsync();
}
}
}
private async Task BroadcastJobStatus(IHubContext<AppHub> hubContext, IJobManagementService jobManagementService, bool isFinished)
private async Task BroadcastJobStatus(IHubContext<AppHub> hubContext, IJobManagementService jobManagementService, JobType jobType, bool isFinished)
{
try
{
JobType jobType = Enum.Parse<JobType>(typeof(T).Name);
JobInfo jobInfo = await jobManagementService.GetJob(jobType);
if (isFinished)
{
jobInfo.Status = "Scheduled";
}
await hubContext.Clients.All.SendAsync("JobStatusUpdate", jobInfo);
}
catch (Exception ex)

View File

@@ -1,23 +0,0 @@
using System.Diagnostics.CodeAnalysis;
using Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
namespace Cleanuparr.Api.Models;
/// <summary>
/// Legacy namespace shim; prefer <see cref="UpdateDownloadCleanerConfigRequest"/> from
/// <c>Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests</c>.
/// </summary>
[Obsolete("Use Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests.UpdateDownloadCleanerConfigRequest instead")]
[SuppressMessage("Design", "CA1000", Justification = "Temporary alias during refactor")]
[SuppressMessage("Usage", "CA2225", Justification = "Alias type")]
public record UpdateDownloadCleanerConfigDto : UpdateDownloadCleanerConfigRequest;
/// <summary>
/// Legacy namespace shim; prefer <see cref="CleanCategoryRequest"/> from
/// <c>Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests</c>.
/// </summary>
[Obsolete("Use Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests.CleanCategoryRequest instead")]
[SuppressMessage("Design", "CA1000", Justification = "Temporary alias during refactor")]
[SuppressMessage("Usage", "CA2225", Justification = "Alias type")]
public record CleanCategoryDto : CleanCategoryRequest;

View File

@@ -1,3 +1,4 @@
using System.Net;
using System.Runtime.InteropServices;
using System.Text.Json.Serialization;
using Cleanuparr.Api;
@@ -33,12 +34,24 @@ builder.Configuration
int.TryParse(builder.Configuration.GetValue<string>("PORT"), out int port);
port = port is 0 ? 11011 : port;
string? bindAddress = builder.Configuration.GetValue<string>("BIND_ADDRESS");
if (!builder.Environment.IsDevelopment())
{
// If no port is configured, default to 11011
builder.WebHost.ConfigureKestrel(options =>
{
options.ListenAnyIP(port);
if (string.IsNullOrEmpty(bindAddress) || bindAddress is "0.0.0.0" || bindAddress is "*")
{
options.ListenAnyIP(port);
}
else if (IPAddress.TryParse(bindAddress, out var ipAddress))
{
options.Listen(ipAddress, port);
}
else
{
throw new ArgumentException($"Invalid BIND_ADDRESS: '{bindAddress}'");
}
});
}
@@ -124,7 +137,7 @@ if (basePath is not null)
}
}
logger.LogInformation("Server configuration: PORT={port}, BASE_PATH={basePath}", port, basePath ?? "/");
logger.LogInformation("Server configuration: BIND_ADDRESS={bindAddress}, PORT={port}, BASE_PATH={basePath}", bindAddress ?? "0.0.0.0", port, basePath ?? "/");
// Initialize the host
app.Init();

View File

@@ -1,13 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
</ItemGroup>
</Project>

View File

@@ -2,17 +2,17 @@ namespace Cleanuparr.Domain.Entities.Arr.Queue;
public sealed record QueueRecord
{
// Sonarr and Whisparr
// Sonarr and Whisparr v2
public long SeriesId { get; init; }
public long EpisodeId { get; init; }
public long SeasonNumber { get; init; }
public QueueSeries? Series { get; init; }
// Radarr
// Radarr and Whisparr v3
public long MovieId { get; init; }
public QueueSeries? Movie { get; init; }
public QueueMovie? Movie { get; init; }
// Lidarr
public long ArtistId { get; init; }

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Domain.Entities.HealthCheck;
public sealed record HealthCheckResult
{
public bool IsHealthy { get; set; }
public string? ErrorMessage { get; set; }
public TimeSpan ResponseTime { get; set; }
}

View File

@@ -4,49 +4,36 @@ namespace Cleanuparr.Domain.Entities;
/// Universal abstraction for a torrent item across all download clients.
/// Provides a unified interface for accessing torrent properties and state.
/// </summary>
public interface ITorrentItem
public interface ITorrentItemWrapper
{
// Basic identification
string Hash { get; }
string Name { get; }
// Privacy and tracking
bool IsPrivate { get; }
IReadOnlyList<string> Trackers { get; }
// Size and progress
long Size { get; }
double CompletionPercentage { get; }
long DownloadedBytes { get; }
long TotalUploaded { get; }
// Speed and transfer rates
long DownloadSpeed { get; }
long UploadSpeed { get; }
double Ratio { get; }
// Time tracking
long Eta { get; }
DateTime? DateAdded { get; }
DateTime? DateCompleted { get; }
long SeedingTimeSeconds { get; }
// Categories and tags
string? Category { get; }
IReadOnlyList<string> Tags { get; }
string? Category { get; set; }
string SavePath { get; }
// State checking methods
bool IsDownloading();
bool IsStalled();
bool IsSeeding();
bool IsCompleted();
bool IsPaused();
bool IsQueued();
bool IsChecking();
bool IsAllocating();
bool IsMetadataDownloading();
// Filtering methods
/// <summary>
/// Determines if this torrent should be ignored based on the provided patterns.
/// Checks if any pattern matches the torrent name, hash, or tracker.

View File

@@ -0,0 +1,37 @@
namespace Cleanuparr.Domain.Entities.RTorrent.Response;
/// <summary>
/// Represents a file within a torrent from rTorrent's XML-RPC f.multicall response
/// </summary>
public sealed record RTorrentFile
{
/// <summary>
/// File index within the torrent (0-based)
/// </summary>
public int Index { get; init; }
/// <summary>
/// File path relative to the torrent base directory
/// </summary>
public required string Path { get; init; }
/// <summary>
/// File size in bytes
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Download priority: 0 = skip/don't download, 1 = normal, 2 = high
/// </summary>
public int Priority { get; init; }
/// <summary>
/// Number of completed chunks for this file
/// </summary>
public long CompletedChunks { get; init; }
/// <summary>
/// Total number of chunks for this file
/// </summary>
public long SizeChunks { get; init; }
}

View File

@@ -0,0 +1,72 @@
namespace Cleanuparr.Domain.Entities.RTorrent.Response;
/// <summary>
/// Represents a torrent from rTorrent's XML-RPC multicall response
/// </summary>
public sealed record RTorrentTorrent
{
/// <summary>
/// Torrent info hash (40-character hex string, uppercase)
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// Torrent name
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Whether the torrent is from a private tracker (0 or 1)
/// </summary>
public int IsPrivate { get; init; }
/// <summary>
/// Total size of the torrent in bytes
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Number of bytes completed/downloaded
/// </summary>
public long CompletedBytes { get; init; }
/// <summary>
/// Current download rate in bytes per second
/// </summary>
public long DownRate { get; init; }
/// <summary>
/// Upload/download ratio multiplied by 1000 (e.g., 1500 = 1.5 ratio)
/// </summary>
public long Ratio { get; init; }
/// <summary>
/// Torrent state: 0 = stopped, 1 = started
/// </summary>
public int State { get; init; }
/// <summary>
/// Completion status: 0 = incomplete, 1 = complete
/// </summary>
public int Complete { get; init; }
/// <summary>
/// Unix timestamp when the torrent finished downloading (0 if not finished)
/// </summary>
public long TimestampFinished { get; init; }
/// <summary>
/// Label/category from d.custom1 (commonly used by ruTorrent for labels)
/// </summary>
public string? Label { get; init; }
/// <summary>
/// Base path where the torrent data is stored
/// </summary>
public string? BasePath { get; init; }
/// <summary>
/// List of tracker URLs for this torrent
/// </summary>
public IReadOnlyList<string>? Trackers { get; init; }
}

View File

@@ -2,7 +2,7 @@ using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Domain.Entities.Whisparr;
public sealed record WhisparrCommand
public sealed record WhisparrV2Command
{
public string Name { get; set; }

View File

@@ -0,0 +1,8 @@
namespace Cleanuparr.Domain.Entities.Whisparr;
public sealed record WhisparrV3Command
{
public required string Name { get; init; }
public required List<long> MovieIds { get; init; }
}

View File

@@ -0,0 +1,7 @@
namespace Cleanuparr.Domain.Enums;
public enum AppriseMode
{
Api,
Cli
}

View File

@@ -6,4 +6,5 @@ public enum DownloadClientTypeName
Deluge,
Transmission,
uTorrent,
rTorrent,
}

View File

@@ -0,0 +1,7 @@
namespace Cleanuparr.Domain.Enums;
public enum JobRunStatus
{
Completed,
Failed
}

View File

@@ -0,0 +1,9 @@
namespace Cleanuparr.Domain.Enums;
public enum JobType
{
QueueCleaner,
MalwareBlocker,
DownloadCleaner,
BlacklistSynchronizer,
}

View File

@@ -4,5 +4,9 @@ public enum NotificationProviderType
{
Notifiarr,
Apprise,
Ntfy
Ntfy,
Pushover,
Telegram,
Discord,
Gotify,
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Domain.Enums;
public enum PushoverPriority
{
Lowest = -2,
Low = -1,
Normal = 0,
High = 1,
Emergency = 2
}

View File

@@ -0,0 +1,36 @@
namespace Cleanuparr.Domain.Enums;
public static class PushoverSounds
{
public const string Pushover = "pushover";
public const string Bike = "bike";
public const string Bugle = "bugle";
public const string Cashregister = "cashregister";
public const string Classical = "classical";
public const string Cosmic = "cosmic";
public const string Falling = "falling";
public const string Gamelan = "gamelan";
public const string Incoming = "incoming";
public const string Intermission = "intermission";
public const string Magic = "magic";
public const string Mechanical = "mechanical";
public const string Pianobar = "pianobar";
public const string Siren = "siren";
public const string Spacealarm = "spacealarm";
public const string Tugboat = "tugboat";
public const string Alien = "alien";
public const string Climb = "climb";
public const string Persistent = "persistent";
public const string Echo = "echo";
public const string Updown = "updown";
public const string Vibrate = "vibrate";
public const string None = "none";
public static readonly string[] All =
[
Pushover, Bike, Bugle, Cashregister, Classical, Cosmic, Falling,
Gamelan, Incoming, Intermission, Magic, Mechanical, Pianobar,
Siren, Spacealarm, Tugboat, Alien, Climb, Persistent, Echo,
Updown, Vibrate, None
];
}

View File

@@ -0,0 +1,12 @@
namespace Cleanuparr.Domain.Exceptions;
public class RTorrentClientException : Exception
{
public RTorrentClientException(string message) : base(message)
{
}
public RTorrentClientException(string message, Exception innerException) : base(message, innerException)
{
}
}

View File

@@ -1,11 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
</ItemGroup>
@@ -15,20 +19,21 @@
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.6" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.InMemory" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="10.0.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<PackageReference Include="Shouldly" Version="4.3.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.2">
<PrivateAssets>all</PrivateAssets>
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.5">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>

View File

@@ -0,0 +1,130 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Events;
/// <summary>
/// Integration tests for the cleanup logic that actually deletes events
/// </summary>
public class EventCleanupServiceIntegrationTests : IDisposable
{
private readonly EventsContext _context;
private readonly Mock<ILogger<EventCleanupService>> _loggerMock;
private readonly IServiceProvider _serviceProvider;
private readonly string _dbName;
public EventCleanupServiceIntegrationTests()
{
_dbName = Guid.NewGuid().ToString();
var services = new ServiceCollection();
// Setup in-memory database
services.AddDbContext<EventsContext>(options =>
options.UseInMemoryDatabase(databaseName: _dbName));
_serviceProvider = services.BuildServiceProvider();
_loggerMock = new Mock<ILogger<EventCleanupService>>();
using var scope = _serviceProvider.CreateScope();
_context = scope.ServiceProvider.GetRequiredService<EventsContext>();
}
public void Dispose()
{
using var scope = _serviceProvider.CreateScope();
var context = scope.ServiceProvider.GetRequiredService<EventsContext>();
context.Database.EnsureDeleted();
}
[Fact]
public async Task CleanupService_PreservesRecentEvents()
{
// Arrange - Add recent events (within retention period)
using (var scope = _serviceProvider.CreateScope())
{
var context = scope.ServiceProvider.GetRequiredService<EventsContext>();
context.Events.Add(new AppEvent
{
Id = Guid.NewGuid(),
EventType = EventType.QueueItemDeleted,
Message = "Recent event 1",
Severity = EventSeverity.Information,
Timestamp = DateTime.UtcNow.AddDays(-5)
});
context.Events.Add(new AppEvent
{
Id = Guid.NewGuid(),
EventType = EventType.DownloadCleaned,
Message = "Recent event 2",
Severity = EventSeverity.Important,
Timestamp = DateTime.UtcNow.AddDays(-10)
});
await context.SaveChangesAsync();
}
// Verify events exist
using (var scope = _serviceProvider.CreateScope())
{
var context = scope.ServiceProvider.GetRequiredService<EventsContext>();
var count = await context.Events.CountAsync();
Assert.Equal(2, count);
}
}
[Fact]
public async Task EventCleanupService_CanStartAndStop()
{
// Arrange
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
var cts = new CancellationTokenSource();
// Act
cts.CancelAfter(100);
await service.StartAsync(cts.Token);
// Give some time for the service to process
await Task.Delay(150);
await service.StopAsync(CancellationToken.None);
// Assert - the service should complete without throwing
Assert.True(true);
}
[Fact]
public async Task EventCleanupService_HandlesExceptionsGracefully()
{
// Arrange
// Note: In-memory provider doesn't support ExecuteDeleteAsync,
// so the cleanup will fail. This test verifies the service handles errors gracefully.
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
var cts = new CancellationTokenSource();
// Act
cts.CancelAfter(100);
await service.StartAsync(cts.Token);
await Task.Delay(150);
await service.StopAsync(CancellationToken.None);
// Assert - the service should handle the error and continue (log it but not crash)
_loggerMock.Verify(
x => x.Log(
LogLevel.Error,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("Failed to perform event cleanup")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.AtLeastOnce);
}
}

View File

@@ -0,0 +1,130 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Events;
public class EventCleanupServiceTests : IDisposable
{
private readonly Mock<ILogger<EventCleanupService>> _loggerMock;
private readonly ServiceCollection _services;
private readonly IServiceProvider _serviceProvider;
private readonly string _dbName;
public EventCleanupServiceTests()
{
_loggerMock = new Mock<ILogger<EventCleanupService>>();
_services = new ServiceCollection();
_dbName = Guid.NewGuid().ToString();
// Setup in-memory database for testing
_services.AddDbContext<EventsContext>(options =>
options.UseInMemoryDatabase(databaseName: _dbName));
_serviceProvider = _services.BuildServiceProvider();
}
public void Dispose()
{
// Cleanup the in-memory database
using var scope = _serviceProvider.CreateScope();
var context = scope.ServiceProvider.GetRequiredService<EventsContext>();
context.Database.EnsureDeleted();
}
[Fact]
public async Task ExecuteAsync_LogsStartMessage()
{
// Arrange
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
var cts = new CancellationTokenSource();
// Act - start and immediately cancel
cts.CancelAfter(100);
await service.StartAsync(cts.Token);
await Task.Delay(200); // Give it time to process
await service.StopAsync(CancellationToken.None);
// Assert
_loggerMock.Verify(
x => x.Log(
LogLevel.Information,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("started")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public async Task StopAsync_LogsStopMessage()
{
// Arrange
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
var cts = new CancellationTokenSource();
// Act
cts.CancelAfter(50);
await service.StartAsync(cts.Token);
await Task.Delay(100);
await service.StopAsync(CancellationToken.None);
// Assert
_loggerMock.Verify(
x => x.Log(
LogLevel.Information,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("stopping")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public void Constructor_InitializesWithCorrectParameters()
{
// Arrange
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
// Act
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
// Assert - service should be created without exception
Assert.NotNull(service);
}
[Fact]
public async Task ExecuteAsync_GracefullyHandlesCancellation()
{
// Arrange
var scopeFactory = _serviceProvider.GetRequiredService<IServiceScopeFactory>();
var service = new EventCleanupService(_loggerMock.Object, scopeFactory);
var cts = new CancellationTokenSource();
// Act - cancel immediately
cts.Cancel();
// Start should not throw
await service.StartAsync(cts.Token);
await Task.Delay(50);
await service.StopAsync(CancellationToken.None);
// Assert - should have logged stopped message
_loggerMock.Verify(
x => x.Log(
LogLevel.Information,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("stopped")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
}

View File

@@ -0,0 +1,526 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.AspNetCore.SignalR;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Events;
public class EventPublisherTests : IDisposable
{
private readonly EventsContext _context;
private readonly Mock<IHubContext<AppHub>> _hubContextMock;
private readonly Mock<ILogger<EventPublisher>> _loggerMock;
private readonly Mock<INotificationPublisher> _notificationPublisherMock;
private readonly Mock<IDryRunInterceptor> _dryRunInterceptorMock;
private readonly Mock<IClientProxy> _clientProxyMock;
private readonly EventPublisher _publisher;
public EventPublisherTests()
{
// Setup in-memory database
var options = new DbContextOptionsBuilder<EventsContext>()
.UseInMemoryDatabase(databaseName: Guid.NewGuid().ToString())
.Options;
_context = new EventsContext(options);
// Setup mocks
_hubContextMock = new Mock<IHubContext<AppHub>>();
_loggerMock = new Mock<ILogger<EventPublisher>>();
_notificationPublisherMock = new Mock<INotificationPublisher>();
_dryRunInterceptorMock = new Mock<IDryRunInterceptor>();
_clientProxyMock = new Mock<IClientProxy>();
// Setup HubContext to return client proxy
var clientsMock = new Mock<IHubClients>();
clientsMock.Setup(c => c.All).Returns(_clientProxyMock.Object);
_hubContextMock.Setup(h => h.Clients).Returns(clientsMock.Object);
// Setup dry run interceptor to execute the delegate
_dryRunInterceptorMock.Setup(d => d.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns<Delegate, object[]>(async (del, args) =>
{
if (del is Func<AppEvent, Task> func && args.Length > 0 && args[0] is AppEvent appEvent)
{
await func(appEvent);
}
else if (del is Func<ManualEvent, Task> manualFunc && args.Length > 0 && args[0] is ManualEvent manualEvent)
{
await manualFunc(manualEvent);
}
});
_publisher = new EventPublisher(
_context,
_hubContextMock.Object,
_loggerMock.Object,
_notificationPublisherMock.Object,
_dryRunInterceptorMock.Object);
// Setup JobRunId in context for tests
ContextProvider.SetJobRunId(Guid.NewGuid());
}
public void Dispose()
{
_context.Database.EnsureDeleted();
_context.Dispose();
}
#region PublishAsync Tests
[Fact]
public async Task PublishAsync_SavesEventToDatabase()
{
// Arrange
var eventType = EventType.QueueItemDeleted;
var message = "Test message";
var severity = EventSeverity.Important;
// Act
await _publisher.PublishAsync(eventType, message, severity);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(eventType, savedEvent.EventType);
Assert.Equal(message, savedEvent.Message);
Assert.Equal(severity, savedEvent.Severity);
}
[Fact]
public async Task PublishAsync_WithData_SerializesDataToJson()
{
// Arrange
var eventType = EventType.DownloadCleaned;
var message = "Download cleaned";
var severity = EventSeverity.Information;
var data = new { Name = "TestDownload", Hash = "abc123" };
// Act
await _publisher.PublishAsync(eventType, message, severity, data);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.NotNull(savedEvent.Data);
Assert.Contains("TestDownload", savedEvent.Data);
Assert.Contains("abc123", savedEvent.Data);
}
[Fact]
public async Task PublishAsync_WithTrackingId_SavesTrackingId()
{
// Arrange
var eventType = EventType.StalledStrike;
var message = "Strike received";
var severity = EventSeverity.Warning;
var trackingId = Guid.NewGuid();
// Act
await _publisher.PublishAsync(eventType, message, severity, trackingId: trackingId);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(trackingId, savedEvent.TrackingId);
}
[Fact]
public async Task PublishAsync_NotifiesSignalRClients()
{
// Arrange
var eventType = EventType.CategoryChanged;
var message = "Category changed";
var severity = EventSeverity.Information;
// Act
await _publisher.PublishAsync(eventType, message, severity);
// Assert
_clientProxyMock.Verify(c => c.SendCoreAsync(
"EventReceived",
It.Is<object[]>(args => args.Length == 1 && args[0] is AppEvent),
It.IsAny<CancellationToken>()), Times.Once);
}
[Fact]
public async Task PublishAsync_WhenSignalRFails_LogsError()
{
// Arrange
var eventType = EventType.QueueItemDeleted;
var message = "Test message";
var severity = EventSeverity.Important;
_clientProxyMock.Setup(c => c.SendCoreAsync(
It.IsAny<string>(),
It.IsAny<object[]>(),
It.IsAny<CancellationToken>()))
.ThrowsAsync(new Exception("SignalR connection failed"));
// Act - should not throw
await _publisher.PublishAsync(eventType, message, severity);
// Assert - verify event was still saved
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
}
[Fact]
public async Task PublishAsync_NullData_DoesNotSerialize()
{
// Arrange
var eventType = EventType.DownloadCleaned;
var message = "Test";
var severity = EventSeverity.Information;
// Act
await _publisher.PublishAsync(eventType, message, severity, data: null);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Null(savedEvent.Data);
}
#endregion
#region PublishManualAsync Tests
[Fact]
public async Task PublishManualAsync_SavesManualEventToDatabase()
{
// Arrange
var message = "Manual event message";
var severity = EventSeverity.Warning;
// Act
await _publisher.PublishManualAsync(message, severity);
// Assert
var savedEvent = await _context.ManualEvents.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(message, savedEvent.Message);
Assert.Equal(severity, savedEvent.Severity);
}
[Fact]
public async Task PublishManualAsync_WithData_SerializesDataToJson()
{
// Arrange
var message = "Manual event";
var severity = EventSeverity.Important;
var data = new { ItemName = "TestItem", Count = 5 };
// Act
await _publisher.PublishManualAsync(message, severity, data);
// Assert
var savedEvent = await _context.ManualEvents.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.NotNull(savedEvent.Data);
Assert.Contains("TestItem", savedEvent.Data);
Assert.Contains("5", savedEvent.Data);
}
[Fact]
public async Task PublishManualAsync_NotifiesSignalRClients()
{
// Arrange
var message = "Manual event";
var severity = EventSeverity.Information;
// Act
await _publisher.PublishManualAsync(message, severity);
// Assert
_clientProxyMock.Verify(c => c.SendCoreAsync(
"ManualEventReceived",
It.Is<object[]>(args => args.Length == 1 && args[0] is ManualEvent),
It.IsAny<CancellationToken>()), Times.Once);
}
#endregion
#region DryRun Interceptor Tests
[Fact]
public async Task PublishAsync_UsesDryRunInterceptor()
{
// Arrange
var eventType = EventType.StalledStrike;
var message = "Test";
var severity = EventSeverity.Warning;
// Act
await _publisher.PublishAsync(eventType, message, severity);
// Assert
_dryRunInterceptorMock.Verify(d => d.InterceptAsync(
It.IsAny<Delegate>(),
It.IsAny<object[]>()), Times.Once);
}
[Fact]
public async Task PublishManualAsync_UsesDryRunInterceptor()
{
// Arrange
var message = "Manual test";
var severity = EventSeverity.Important;
// Act
await _publisher.PublishManualAsync(message, severity);
// Assert
_dryRunInterceptorMock.Verify(d => d.InterceptAsync(
It.IsAny<Delegate>(),
It.IsAny<object[]>()), Times.Once);
}
#endregion
#region Data Serialization Tests
[Fact]
public async Task PublishAsync_SerializesEnumsAsStrings()
{
// Arrange
var eventType = EventType.QueueItemDeleted;
var message = "Test";
var severity = EventSeverity.Important;
var data = new { Reason = DeleteReason.Stalled };
// Act
await _publisher.PublishAsync(eventType, message, severity, data);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.NotNull(savedEvent.Data);
Assert.Contains("Stalled", savedEvent.Data);
}
[Fact]
public async Task PublishAsync_HandlesComplexData()
{
// Arrange
var eventType = EventType.DownloadCleaned;
var message = "Test";
var severity = EventSeverity.Information;
var data = new
{
Items = new[] { "item1", "item2" },
Nested = new { Value = 123 },
NullableValue = (string?)null
};
// Act
await _publisher.PublishAsync(eventType, message, severity, data);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.NotNull(savedEvent.Data);
Assert.Contains("item1", savedEvent.Data);
Assert.Contains("123", savedEvent.Data);
}
#endregion
#region PublishQueueItemDeleted Tests
[Fact]
public async Task PublishQueueItemDeleted_SavesEventWithContextData()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Test Download");
ContextProvider.Set(ContextProvider.Keys.Hash, "abc123");
// Act
await _publisher.PublishQueueItemDeleted(removeFromClient: true, DeleteReason.Stalled);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(EventType.QueueItemDeleted, savedEvent.EventType);
Assert.Equal(EventSeverity.Important, savedEvent.Severity);
Assert.NotNull(savedEvent.Data);
Assert.Contains("Test Download", savedEvent.Data);
Assert.Contains("abc123", savedEvent.Data);
Assert.Contains("Stalled", savedEvent.Data);
}
[Fact]
public async Task PublishQueueItemDeleted_SendsNotification()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Test Download");
ContextProvider.Set(ContextProvider.Keys.Hash, "abc123");
// Act
await _publisher.PublishQueueItemDeleted(removeFromClient: false, DeleteReason.FailedImport);
// Assert
_notificationPublisherMock.Verify(n => n.NotifyQueueItemDeleted(false, DeleteReason.FailedImport), Times.Once);
}
#endregion
#region PublishDownloadCleaned Tests
[Fact]
public async Task PublishDownloadCleaned_SavesEventWithContextData()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Cleaned Download");
ContextProvider.Set(ContextProvider.Keys.Hash, "def456");
// Act
await _publisher.PublishDownloadCleaned(
ratio: 2.5,
seedingTime: TimeSpan.FromHours(48),
categoryName: "movies",
reason: CleanReason.MaxSeedTimeReached);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(EventType.DownloadCleaned, savedEvent.EventType);
Assert.Equal(EventSeverity.Important, savedEvent.Severity);
Assert.NotNull(savedEvent.Data);
Assert.Contains("Cleaned Download", savedEvent.Data);
Assert.Contains("def456", savedEvent.Data);
Assert.Contains("movies", savedEvent.Data);
Assert.Contains("MaxSeedTimeReached", savedEvent.Data);
}
[Fact]
public async Task PublishDownloadCleaned_SendsNotification()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Test");
ContextProvider.Set(ContextProvider.Keys.Hash, "xyz");
var ratio = 1.5;
var seedingTime = TimeSpan.FromHours(24);
var categoryName = "tv";
var reason = CleanReason.MaxRatioReached;
// Act
await _publisher.PublishDownloadCleaned(ratio, seedingTime, categoryName, reason);
// Assert
_notificationPublisherMock.Verify(n => n.NotifyDownloadCleaned(ratio, seedingTime, categoryName, reason), Times.Once);
}
#endregion
#region PublishSearchNotTriggered Tests
[Fact]
public async Task PublishSearchNotTriggered_SavesManualEvent()
{
// Arrange
ContextProvider.Set(nameof(InstanceType), (object)InstanceType.Sonarr);
ContextProvider.Set(ContextProvider.Keys.ArrInstanceUrl, new Uri("http://localhost:8989"));
// Act
await _publisher.PublishSearchNotTriggered("abc123", "Test Item");
// Assert
var savedEvent = await _context.ManualEvents.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(EventSeverity.Warning, savedEvent.Severity);
Assert.Contains("Replacement search was not triggered", savedEvent.Message);
Assert.NotNull(savedEvent.Data);
Assert.Contains("Test Item", savedEvent.Data);
Assert.Contains("abc123", savedEvent.Data);
}
#endregion
#region PublishRecurringItem Tests
[Fact]
public async Task PublishRecurringItem_SavesManualEvent()
{
// Arrange
ContextProvider.Set(nameof(InstanceType), (object)InstanceType.Radarr);
ContextProvider.Set(ContextProvider.Keys.ArrInstanceUrl, new Uri("http://localhost:7878"));
// Act
await _publisher.PublishRecurringItem("hash123", "Recurring Item", 5);
// Assert
var savedEvent = await _context.ManualEvents.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(EventSeverity.Important, savedEvent.Severity);
Assert.Contains("keeps coming back", savedEvent.Message);
Assert.NotNull(savedEvent.Data);
Assert.Contains("Recurring Item", savedEvent.Data);
Assert.Contains("hash123", savedEvent.Data);
}
#endregion
#region PublishCategoryChanged Tests
[Fact]
public async Task PublishCategoryChanged_SavesEventWithContextData()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Category Test");
ContextProvider.Set(ContextProvider.Keys.Hash, "cat123");
// Act
await _publisher.PublishCategoryChanged("oldCat", "newCat", isTag: false);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Equal(EventType.CategoryChanged, savedEvent.EventType);
Assert.Equal(EventSeverity.Information, savedEvent.Severity);
Assert.Contains("Category changed from 'oldCat' to 'newCat'", savedEvent.Message);
}
[Fact]
public async Task PublishCategoryChanged_WithTag_SavesCorrectMessage()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Tag Test");
ContextProvider.Set(ContextProvider.Keys.Hash, "tag123");
// Act
await _publisher.PublishCategoryChanged("", "cleanuperr-done", isTag: true);
// Assert
var savedEvent = await _context.Events.FirstOrDefaultAsync();
Assert.NotNull(savedEvent);
Assert.Contains("Tag 'cleanuperr-done' added", savedEvent.Message);
}
[Fact]
public async Task PublishCategoryChanged_SendsNotification()
{
// Arrange
ContextProvider.Set(ContextProvider.Keys.ItemName, "Test");
ContextProvider.Set(ContextProvider.Keys.Hash, "xyz");
// Act
await _publisher.PublishCategoryChanged("old", "new", isTag: true);
// Assert
_notificationPublisherMock.Verify(n => n.NotifyCategoryChanged("old", "new", true), Times.Once);
}
#endregion
}

View File

@@ -0,0 +1,147 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.Arr;
public class ArrClientFactoryTests
{
private readonly Mock<ISonarrClient> _sonarrClientMock;
private readonly Mock<IRadarrClient> _radarrClientMock;
private readonly Mock<ILidarrClient> _lidarrClientMock;
private readonly Mock<IReadarrClient> _readarrClientMock;
private readonly Mock<IWhisparrV2Client> _whisparrClientMock;
private readonly Mock<IWhisparrV3Client> _whisparrV3ClientMock;
private readonly ArrClientFactory _factory;
public ArrClientFactoryTests()
{
_sonarrClientMock = new Mock<ISonarrClient>();
_radarrClientMock = new Mock<IRadarrClient>();
_lidarrClientMock = new Mock<ILidarrClient>();
_readarrClientMock = new Mock<IReadarrClient>();
_whisparrClientMock = new Mock<IWhisparrV2Client>();
_whisparrV3ClientMock = new Mock<IWhisparrV3Client>();
_factory = new ArrClientFactory(
_sonarrClientMock.Object,
_radarrClientMock.Object,
_lidarrClientMock.Object,
_readarrClientMock.Object,
_whisparrClientMock.Object,
_whisparrV3ClientMock.Object
);
}
#region GetClient Tests
[Fact]
public void GetClient_Sonarr_ReturnsSonarrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Sonarr, 0);
// Assert
Assert.Same(_sonarrClientMock.Object, result);
}
[Fact]
public void GetClient_Radarr_ReturnsRadarrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Radarr, 0);
// Assert
Assert.Same(_radarrClientMock.Object, result);
}
[Fact]
public void GetClient_Lidarr_ReturnsLidarrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Lidarr, 0);
// Assert
Assert.Same(_lidarrClientMock.Object, result);
}
[Fact]
public void GetClient_Readarr_ReturnsReadarrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Readarr, 0);
// Assert
Assert.Same(_readarrClientMock.Object, result);
}
[Fact]
public void GetClient_Whisparr_ReturnsWhisparrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Whisparr, 2);
// Assert
Assert.Same(_whisparrClientMock.Object, result);
}
[Fact]
public void GetClient_WhisparrV3_ReturnsWhisparrClient()
{
// Act
var result = _factory.GetClient(InstanceType.Whisparr, 3);
// Assert
Assert.Same(_whisparrV3ClientMock.Object, result);
}
[Fact]
public void GetClient_UnsupportedType_ThrowsNotImplementedException()
{
// Arrange
var unsupportedType = (InstanceType)999;
// Act & Assert
var exception = Assert.Throws<NotImplementedException>(() => _factory.GetClient(unsupportedType, It.IsAny<float>()));
Assert.Contains("not yet supported", exception.Message);
Assert.Contains("999", exception.Message);
}
[Theory]
[MemberData(nameof(InstancesData))]
public void GetClient_AllSupportedTypes_ReturnsNonNullClient(InstanceType instanceType, float? version)
{
// Act
var result = _factory.GetClient(instanceType, version ?? 0f);
// Assert
Assert.NotNull(result);
Assert.IsAssignableFrom<IArrClient>(result);
}
[Theory]
[MemberData(nameof(InstancesData))]
public void GetClient_CalledMultipleTimes_ReturnsSameInstance(InstanceType instanceType, float? version)
{
// Act
var result1 = _factory.GetClient(instanceType, version ?? 0f);
var result2 = _factory.GetClient(instanceType, version ?? 0f);
// Assert
Assert.Same(result1, result2);
}
public static IEnumerable<object?[]> InstancesData =>
[
[InstanceType.Sonarr, null],
[InstanceType.Radarr, null],
[InstanceType.Lidarr, null],
[InstanceType.Readarr, null],
[InstanceType.Whisparr, 2f],
[InstanceType.Whisparr, 3f]
];
#endregion
}

View File

@@ -0,0 +1,178 @@
using Cleanuparr.Domain.Entities.Arr.Queue;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Interceptors;
using Microsoft.Extensions.Logging;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.Arr;
public class WhisparrV2ClientTests
{
private readonly Mock<ILogger<WhisparrV2Client>> _loggerMock;
private readonly Mock<IHttpClientFactory> _httpClientFactoryMock;
private readonly Mock<IStriker> _strikerMock;
private readonly Mock<IDryRunInterceptor> _dryRunInterceptorMock;
private readonly Mock<HttpMessageHandler> _httpMessageHandlerMock;
private readonly WhisparrV2Client _client;
public WhisparrV2ClientTests()
{
_loggerMock = new Mock<ILogger<WhisparrV2Client>>();
_httpClientFactoryMock = new Mock<IHttpClientFactory>();
_strikerMock = new Mock<IStriker>();
_dryRunInterceptorMock = new Mock<IDryRunInterceptor>();
_httpMessageHandlerMock = new Mock<HttpMessageHandler>();
var httpClient = new HttpClient(_httpMessageHandlerMock.Object);
_httpClientFactoryMock.Setup(x => x.CreateClient(It.IsAny<string>())).Returns(httpClient);
_client = new WhisparrV2Client(
_loggerMock.Object,
_httpClientFactoryMock.Object,
_strikerMock.Object,
_dryRunInterceptorMock.Object
);
}
#region IsRecordValid Tests
[Fact]
public void IsRecordValid_WhenEpisodeIdIsZero_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = "abc123",
Protocol = "torrent",
EpisodeId = 0,
SeriesId = 1
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("episode id and/or series id missing")),
It.IsAny<Exception?>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()
),
Times.Once
);
}
[Fact]
public void IsRecordValid_WhenSeriesIdIsZero_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = "abc123",
Protocol = "torrent",
EpisodeId = 1,
SeriesId = 0
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
[Fact]
public void IsRecordValid_WhenBothIdsAreZero_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = "abc123",
Protocol = "torrent",
EpisodeId = 0,
SeriesId = 0
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
[Fact]
public void IsRecordValid_WhenBothIdsAreSet_ReturnsTrue()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = "abc123",
Protocol = "torrent",
EpisodeId = 42,
SeriesId = 10
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.True(result);
}
[Fact]
public void IsRecordValid_WhenDownloadIdIsNull_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = null!,
Protocol = "torrent",
EpisodeId = 42,
SeriesId = 10
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
[Fact]
public void IsRecordValid_WhenDownloadIdIsEmpty_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Episode",
DownloadId = "",
Protocol = "torrent",
EpisodeId = 42,
SeriesId = 10
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
#endregion
}

View File

@@ -0,0 +1,132 @@
using Cleanuparr.Domain.Entities.Arr.Queue;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Interceptors;
using Microsoft.Extensions.Logging;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.Arr;
public class WhisparrV3ClientTests
{
private readonly Mock<ILogger<WhisparrV3Client>> _loggerMock;
private readonly Mock<IHttpClientFactory> _httpClientFactoryMock;
private readonly Mock<IStriker> _strikerMock;
private readonly Mock<IDryRunInterceptor> _dryRunInterceptorMock;
private readonly Mock<HttpMessageHandler> _httpMessageHandlerMock;
private readonly WhisparrV3Client _client;
public WhisparrV3ClientTests()
{
_loggerMock = new Mock<ILogger<WhisparrV3Client>>();
_httpClientFactoryMock = new Mock<IHttpClientFactory>();
_strikerMock = new Mock<IStriker>();
_dryRunInterceptorMock = new Mock<IDryRunInterceptor>();
_httpMessageHandlerMock = new Mock<HttpMessageHandler>();
var httpClient = new HttpClient(_httpMessageHandlerMock.Object);
_httpClientFactoryMock.Setup(x => x.CreateClient(It.IsAny<string>())).Returns(httpClient);
_client = new WhisparrV3Client(
_loggerMock.Object,
_httpClientFactoryMock.Object,
_strikerMock.Object,
_dryRunInterceptorMock.Object
);
}
#region IsRecordValid Tests
[Fact]
public void IsRecordValid_WhenMovieIdIsZero_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Movie",
DownloadId = "abc123",
Protocol = "torrent",
MovieId = 0
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("movie id missing")),
It.IsAny<Exception?>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()
),
Times.Once
);
}
[Fact]
public void IsRecordValid_WhenMovieIdIsSet_ReturnsTrue()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Movie",
DownloadId = "abc123",
Protocol = "torrent",
MovieId = 42
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.True(result);
}
[Fact]
public void IsRecordValid_WhenDownloadIdIsNull_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Movie",
DownloadId = null!,
Protocol = "torrent",
MovieId = 42
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
[Fact]
public void IsRecordValid_WhenDownloadIdIsEmpty_ReturnsFalse()
{
// Arrange
var record = new QueueRecord
{
Id = 1,
Title = "Test Movie",
DownloadId = "",
Protocol = "torrent",
MovieId = 42
};
// Act
var result = _client.IsRecordValid(record);
// Assert
Assert.False(result);
}
#endregion
}

View File

@@ -0,0 +1,365 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.BlacklistSync;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadClient.QBittorrent;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;
using Cleanuparr.Persistence.Models.State;
using Microsoft.Data.Sqlite;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Moq;
using Moq.Protected;
using System.Net;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.BlacklistSync;
public class BlacklistSynchronizerTests : IDisposable
{
private readonly Mock<ILogger<BlacklistSynchronizer>> _loggerMock;
private readonly DataContext _dataContext;
private readonly Mock<IDownloadServiceFactory> _downloadServiceFactoryMock;
private readonly Mock<IDryRunInterceptor> _dryRunInterceptorMock;
private readonly FileReader _fileReader;
private readonly BlacklistSynchronizer _synchronizer;
private readonly Mock<HttpMessageHandler> _httpMessageHandlerMock;
private readonly SqliteConnection _connection;
public BlacklistSynchronizerTests()
{
_loggerMock = new Mock<ILogger<BlacklistSynchronizer>>();
// Use SQLite in-memory with shared connection to support complex types
_connection = new SqliteConnection("DataSource=:memory:");
_connection.Open();
var options = new DbContextOptionsBuilder<DataContext>()
.UseSqlite(_connection)
.Options;
_dataContext = new DataContext(options);
_dataContext.Database.EnsureCreated();
_downloadServiceFactoryMock = new Mock<IDownloadServiceFactory>();
_dryRunInterceptorMock = new Mock<IDryRunInterceptor>();
// Setup interceptor to execute the action with params using DynamicInvoke
_dryRunInterceptorMock
.Setup(d => d.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
var result = action.DynamicInvoke(parameters);
if (result is Task task)
{
return task;
}
return Task.CompletedTask;
});
// Setup mock HTTP handler for FileReader
_httpMessageHandlerMock = new Mock<HttpMessageHandler>();
var httpClient = new HttpClient(_httpMessageHandlerMock.Object);
var httpClientFactoryMock = new Mock<IHttpClientFactory>();
httpClientFactoryMock
.Setup(f => f.CreateClient(It.IsAny<string>()))
.Returns(httpClient);
_fileReader = new FileReader(httpClientFactoryMock.Object);
_synchronizer = new BlacklistSynchronizer(
_loggerMock.Object,
_dataContext,
_downloadServiceFactoryMock.Object,
_fileReader,
_dryRunInterceptorMock.Object
);
}
public void Dispose()
{
_dataContext.Dispose();
_connection.Dispose();
}
#region ExecuteAsync - Disabled Tests
[Fact]
public async Task ExecuteAsync_WhenDisabled_ReturnsEarlyWithoutProcessing()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: false);
// Act
await _synchronizer.ExecuteAsync();
// Assert
_downloadServiceFactoryMock.Verify(
f => f.GetDownloadService(It.IsAny<DownloadClientConfig>()),
Times.Never);
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("disabled")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
#endregion
#region ExecuteAsync - Path Not Configured Tests
[Fact]
public async Task ExecuteAsync_WhenPathNotConfigured_LogsWarningAndReturns()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: null);
// Act
await _synchronizer.ExecuteAsync();
// Assert
_downloadServiceFactoryMock.Verify(
f => f.GetDownloadService(It.IsAny<DownloadClientConfig>()),
Times.Never);
_loggerMock.Verify(
x => x.Log(
LogLevel.Warning,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("path is not configured")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public async Task ExecuteAsync_WhenPathIsWhitespace_LogsWarningAndReturns()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: " ");
// Act
await _synchronizer.ExecuteAsync();
// Assert
_downloadServiceFactoryMock.Verify(
f => f.GetDownloadService(It.IsAny<DownloadClientConfig>()),
Times.Never);
_loggerMock.Verify(
x => x.Log(
LogLevel.Warning,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("path is not configured")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
#endregion
#region ExecuteAsync - No Clients Tests
[Fact]
public async Task ExecuteAsync_WhenNoQBittorrentClients_LogsDebugAndReturns()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: "https://example.com/blocklist.txt");
SetupHttpResponse("pattern1\npattern2");
// Don't add any download clients
// Act
await _synchronizer.ExecuteAsync();
// Assert
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("No enabled qBittorrent clients")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public async Task ExecuteAsync_WhenOnlyDelugeClients_LogsDebugAndReturns()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: "https://example.com/blocklist.txt");
SetupHttpResponse("pattern1\npattern2");
// Add only a Deluge client
await AddDownloadClient(DownloadClientTypeName.Deluge, enabled: true);
// Act
await _synchronizer.ExecuteAsync();
// Assert
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("No enabled qBittorrent clients")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public async Task ExecuteAsync_WhenDisabledQBittorrentClient_DoesNotProcess()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: "https://example.com/blocklist.txt");
SetupHttpResponse("pattern1\npattern2");
// Add a disabled qBittorrent client
await AddDownloadClient(DownloadClientTypeName.qBittorrent, enabled: false);
// Act
await _synchronizer.ExecuteAsync();
// Assert
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("No enabled qBittorrent clients")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
#endregion
#region ExecuteAsync - Already Synced Tests
[Fact]
public async Task ExecuteAsync_WhenClientAlreadySynced_SkipsClient()
{
// Arrange
var patterns = "pattern1\npattern2";
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: "https://example.com/blocklist.txt");
SetupHttpResponse(patterns);
var clientId = await AddDownloadClient(DownloadClientTypeName.qBittorrent, enabled: true);
// Calculate the expected hash (same as ComputeHash in BlacklistSynchronizer)
var cleanPatterns = string.Join('\n', patterns.Split(['\r', '\n'], StringSplitOptions.RemoveEmptyEntries)
.Where(p => !string.IsNullOrWhiteSpace(p)));
var hash = ComputeHash(cleanPatterns);
// Add sync history for this client with the same hash
_dataContext.BlacklistSyncHistory.Add(new BlacklistSyncHistory
{
Hash = hash,
DownloadClientId = clientId
});
await _dataContext.SaveChangesAsync();
// Act
await _synchronizer.ExecuteAsync();
// Assert
_downloadServiceFactoryMock.Verify(
f => f.GetDownloadService(It.IsAny<DownloadClientConfig>()),
Times.Never);
_loggerMock.Verify(
x => x.Log(
LogLevel.Debug,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("already synced")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
#endregion
#region ExecuteAsync - Dry Run Tests
[Fact]
public async Task ExecuteAsync_UsesDryRunInterceptor()
{
// Arrange
await SetupBlacklistSyncConfig(enabled: true, blacklistPath: "https://example.com/blocklist.txt");
SetupHttpResponse("pattern1\npattern2");
// Act
await _synchronizer.ExecuteAsync();
// Assert - Verify interceptor was called (with Delegate, not Func<object, object, Task>)
_dryRunInterceptorMock.Verify(
d => d.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()),
Times.AtLeastOnce);
}
#endregion
#region Helper Methods
private async Task SetupBlacklistSyncConfig(bool enabled, string? blacklistPath = null)
{
var config = new BlacklistSyncConfig
{
Enabled = enabled,
BlacklistPath = blacklistPath
};
_dataContext.BlacklistSyncConfigs.Add(config);
await _dataContext.SaveChangesAsync();
}
private async Task<Guid> AddDownloadClient(DownloadClientTypeName typeName, bool enabled)
{
var client = new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = $"Test {typeName} Client",
TypeName = typeName,
Type = DownloadClientType.Torrent,
Host = new Uri("http://test.example.com"),
Enabled = enabled
};
_dataContext.DownloadClients.Add(client);
await _dataContext.SaveChangesAsync();
return client.Id;
}
private void SetupHttpResponse(string content)
{
_httpMessageHandlerMock
.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.ReturnsAsync(new HttpResponseMessage
{
StatusCode = HttpStatusCode.OK,
Content = new StringContent(content)
});
}
private static string ComputeHash(string content)
{
using var sha = System.Security.Cryptography.SHA256.Create();
byte[] bytes = System.Text.Encoding.UTF8.GetBytes(content);
byte[] hash = sha.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
#endregion
}

View File

@@ -1,269 +0,0 @@
using Cleanuparr.Domain.Entities.Deluge.Response;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Shouldly;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DelugeItemTests
{
[Fact]
public void Constructor_WithNullDownloadStatus_ThrowsArgumentNullException()
{
// Act & Assert
Should.Throw<ArgumentNullException>(() => new DelugeItem(null!));
}
[Fact]
public void Hash_ReturnsCorrectValue()
{
// Arrange
var expectedHash = "test-hash-123";
var downloadStatus = new DownloadStatus
{
Hash = expectedHash,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Hash;
// Assert
result.ShouldBe(expectedHash);
}
[Fact]
public void Hash_WithNullValue_ReturnsEmptyString()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = null,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Hash;
// Assert
result.ShouldBe(string.Empty);
}
[Fact]
public void Name_ReturnsCorrectValue()
{
// Arrange
var expectedName = "Test Torrent";
var downloadStatus = new DownloadStatus
{
Name = expectedName,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Name;
// Assert
result.ShouldBe(expectedName);
}
[Fact]
public void Name_WithNullValue_ReturnsEmptyString()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Name = null,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Name;
// Assert
result.ShouldBe(string.Empty);
}
[Fact]
public void IsPrivate_ReturnsCorrectValue()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Private = true,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.IsPrivate;
// Assert
result.ShouldBeTrue();
}
[Fact]
public void Size_ReturnsCorrectValue()
{
// Arrange
var expectedSize = 1024L * 1024 * 1024; // 1GB
var downloadStatus = new DownloadStatus
{
Size = expectedSize,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Size;
// Assert
result.ShouldBe(expectedSize);
}
[Theory]
[InlineData(0, 1024, 0.0)]
[InlineData(512, 1024, 50.0)]
[InlineData(768, 1024, 75.0)]
[InlineData(1024, 1024, 100.0)]
[InlineData(0, 0, 0.0)] // Edge case: zero size
public void CompletionPercentage_ReturnsCorrectValue(long totalDone, long size, double expectedPercentage)
{
// Arrange
var downloadStatus = new DownloadStatus
{
TotalDone = totalDone,
Size = size,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.CompletionPercentage;
// Assert
result.ShouldBe(expectedPercentage);
}
[Fact]
public void Trackers_WithValidUrls_ReturnsHostNames()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Trackers = new List<Tracker>
{
new() { Url = "http://tracker1.example.com:8080/announce" },
new() { Url = "https://tracker2.example.com/announce" },
new() { Url = "udp://tracker3.example.com:1337/announce" }
},
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(3);
result.ShouldContain("tracker1.example.com");
result.ShouldContain("tracker2.example.com");
result.ShouldContain("tracker3.example.com");
}
[Fact]
public void Trackers_WithDuplicateHosts_ReturnsDistinctHosts()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Trackers = new List<Tracker>
{
new() { Url = "http://tracker1.example.com:8080/announce" },
new() { Url = "https://tracker1.example.com/announce" },
new() { Url = "udp://tracker1.example.com:1337/announce" }
},
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(1);
result.ShouldContain("tracker1.example.com");
}
[Fact]
public void Trackers_WithInvalidUrls_SkipsInvalidEntries()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Trackers = new List<Tracker>
{
new() { Url = "http://valid.example.com/announce" },
new() { Url = "invalid-url" },
new() { Url = "" },
new() { Url = null! }
},
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(1);
result.ShouldContain("valid.example.com");
}
[Fact]
public void Trackers_WithEmptyList_ReturnsEmptyList()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Trackers;
// Assert
result.ShouldBeEmpty();
}
[Fact]
public void Trackers_WithNullTrackers_ReturnsEmptyList()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Trackers = null!,
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItem(downloadStatus);
// Act
var result = wrapper.Trackers;
// Assert
result.ShouldBeEmpty();
}
}

View File

@@ -0,0 +1,453 @@
using Cleanuparr.Domain.Entities.Deluge.Response;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Shouldly;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DelugeItemWrapperTests
{
[Fact]
public void Constructor_WithNullDownloadStatus_ThrowsArgumentNullException()
{
// Act & Assert
Should.Throw<ArgumentNullException>(() => new DelugeItemWrapper(null!));
}
[Fact]
public void Hash_ReturnsCorrectValue()
{
// Arrange
var expectedHash = "test-hash-123";
var downloadStatus = new DownloadStatus
{
Hash = expectedHash,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Hash;
// Assert
result.ShouldBe(expectedHash);
}
[Fact]
public void Hash_WithNullValue_ReturnsEmptyString()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = null,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Hash;
// Assert
result.ShouldBe(string.Empty);
}
[Fact]
public void Name_ReturnsCorrectValue()
{
// Arrange
var expectedName = "Test Torrent";
var downloadStatus = new DownloadStatus
{
Name = expectedName,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Name;
// Assert
result.ShouldBe(expectedName);
}
[Fact]
public void Name_WithNullValue_ReturnsEmptyString()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Name = null,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Name;
// Assert
result.ShouldBe(string.Empty);
}
[Fact]
public void IsPrivate_ReturnsCorrectValue()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Private = true,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.IsPrivate;
// Assert
result.ShouldBeTrue();
}
[Fact]
public void Size_ReturnsCorrectValue()
{
// Arrange
var expectedSize = 1024L * 1024 * 1024; // 1GB
var downloadStatus = new DownloadStatus
{
Size = expectedSize,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Size;
// Assert
result.ShouldBe(expectedSize);
}
[Theory]
[InlineData(0, 1024, 0.0)]
[InlineData(512, 1024, 50.0)]
[InlineData(768, 1024, 75.0)]
[InlineData(1024, 1024, 100.0)]
[InlineData(0, 0, 0.0)] // Edge case: zero size
public void CompletionPercentage_ReturnsCorrectValue(long totalDone, long size, double expectedPercentage)
{
// Arrange
var downloadStatus = new DownloadStatus
{
TotalDone = totalDone,
Size = size,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.CompletionPercentage;
// Assert
result.ShouldBe(expectedPercentage);
}
[Theory]
[InlineData(1024L * 1024 * 100, 1024L * 1024 * 100)] // 100MB
[InlineData(0L, 0L)]
public void DownloadedBytes_ReturnsCorrectValue(long totalDone, long expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
TotalDone = totalDone,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.DownloadedBytes;
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(2.0f, 2.0)]
[InlineData(0.5f, 0.5)]
[InlineData(1.0f, 1.0)]
[InlineData(0.0f, 0.0)]
public void Ratio_ReturnsCorrectValue(float ratio, double expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
Ratio = ratio,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Ratio;
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(3600UL, 3600L)] // 1 hour
[InlineData(0UL, 0L)]
[InlineData(86400UL, 86400L)] // 1 day
public void Eta_ReturnsCorrectValue(ulong eta, long expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
Eta = eta,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.Eta;
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(86400L, 86400L)] // 1 day
[InlineData(0L, 0L)]
[InlineData(3600L, 3600L)] // 1 hour
public void SeedingTimeSeconds_ReturnsCorrectValue(long seedingTime, long expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
SeedingTime = seedingTime,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.SeedingTimeSeconds;
// Assert
result.ShouldBe(expected);
}
[Fact]
public void IsIgnored_WithEmptyList_ReturnsFalse()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = "abc123",
Name = "Test Torrent",
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.IsIgnored(Array.Empty<string>());
// Assert
result.ShouldBeFalse();
}
[Fact]
public void IsIgnored_MatchingHash_ReturnsTrue()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = "abc123",
Name = "Test Torrent",
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
var ignoredDownloads = new[] { "abc123" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
// Assert
result.ShouldBeTrue();
}
[Fact]
public void IsIgnored_MatchingCategory_ReturnsTrue()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = "abc123",
Name = "Test Torrent",
Label = "test-category",
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
var ignoredDownloads = new[] { "test-category" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
// Assert
result.ShouldBeTrue();
}
[Fact]
public void IsIgnored_MatchingTracker_ReturnsTrue()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = "abc123",
Name = "Test Torrent",
Trackers = new List<Tracker>
{
new() { Url = "http://tracker.example.com/announce" }
},
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
var ignoredDownloads = new[] { "tracker.example.com" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
// Assert
result.ShouldBeTrue();
}
[Fact]
public void IsIgnored_NotMatching_ReturnsFalse()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Hash = "abc123",
Name = "Test Torrent",
Label = "some-category",
Trackers = new List<Tracker>
{
new() { Url = "http://tracker.example.com/announce" }
},
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
var ignoredDownloads = new[] { "notmatching" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
// Assert
result.ShouldBeFalse();
}
[Theory]
[InlineData(1024L * 1024, 1024L * 1024)] // 1MB/s
[InlineData(0L, 0L)]
[InlineData(500L, 500L)]
public void DownloadSpeed_ReturnsCorrectValue(long downloadSpeed, long expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
DownloadSpeed = downloadSpeed,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.DownloadSpeed;
// Assert
result.ShouldBe(expected);
}
[Fact]
public void Category_Setter_SetsLabel()
{
// Arrange
var downloadStatus = new DownloadStatus
{
Label = "original-category",
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
wrapper.Category = "new-category";
// Assert
wrapper.Category.ShouldBe("new-category");
downloadStatus.Label.ShouldBe("new-category");
}
[Theory]
[InlineData("Downloading", true)]
[InlineData("downloading", true)]
[InlineData("DOWNLOADING", true)]
[InlineData("Seeding", false)]
[InlineData("Paused", false)]
[InlineData(null, false)]
public void IsDownloading_ReturnsCorrectValue(string? state, bool expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
State = state,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.IsDownloading();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData("Downloading", 0, 0UL, true)] // Downloading with no speed and no ETA = stalled
[InlineData("Downloading", 1000, 0UL, false)] // Has download speed = not stalled
[InlineData("Downloading", 0, 100UL, false)] // Has ETA = not stalled
[InlineData("Downloading", 1000, 100UL, false)] // Has both = not stalled
[InlineData("Seeding", 0, 0UL, false)] // Not downloading state = not stalled
[InlineData("Paused", 0, 0UL, false)] // Not downloading state = not stalled
[InlineData(null, 0, 0UL, false)] // Null state = not stalled
public void IsStalled_ReturnsCorrectValue(string? state, long downloadSpeed, ulong eta, bool expected)
{
// Arrange
var downloadStatus = new DownloadStatus
{
State = state,
DownloadSpeed = downloadSpeed,
Eta = eta,
Trackers = new List<Tracker>(),
DownloadLocation = "/test/path"
};
var wrapper = new DelugeItemWrapper(downloadStatus);
// Act
var result = wrapper.IsStalled();
// Assert
result.ShouldBe(expected);
}
}

View File

@@ -0,0 +1,757 @@
using Cleanuparr.Domain.Entities;
using Cleanuparr.Domain.Entities.Deluge.Response;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DelugeServiceDCTests : IClassFixture<DelugeServiceFixture>
{
private readonly DelugeServiceFixture _fixture;
public DelugeServiceDCTests(DelugeServiceFixture fixture)
{
_fixture = fixture;
_fixture.ResetMocks();
}
public class GetSeedingDownloads_Tests : DelugeServiceDCTests
{
public GetSeedingDownloads_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task FiltersSeedingState()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<DownloadStatus>
{
new DownloadStatus { Hash = "hash1", Name = "Torrent 1", State = "Seeding", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" },
new DownloadStatus { Hash = "hash2", Name = "Torrent 2", State = "Downloading", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" },
new DownloadStatus { Hash = "hash3", Name = "Torrent 3", State = "Seeding", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }
};
_fixture.ClientWrapper
.Setup(x => x.GetStatusForAllTorrents())
.ReturnsAsync(downloads);
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Equal(2, result.Count);
Assert.All(result, item => Assert.NotNull(item.Hash));
}
[Fact]
public async Task IsCaseInsensitive()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<DownloadStatus>
{
new DownloadStatus { Hash = "hash1", Name = "Torrent 1", State = "SEEDING", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" },
new DownloadStatus { Hash = "hash2", Name = "Torrent 2", State = "seeding", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }
};
_fixture.ClientWrapper
.Setup(x => x.GetStatusForAllTorrents())
.ReturnsAsync(downloads);
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Equal(2, result.Count);
}
[Fact]
public async Task ReturnsEmptyList_WhenNull()
{
// Arrange
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetStatusForAllTorrents())
.ReturnsAsync((List<DownloadStatus>?)null);
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Empty(result);
}
[Fact]
public async Task SkipsTorrentsWithEmptyHash()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<DownloadStatus>
{
new DownloadStatus { Hash = "", Name = "No Hash", State = "Seeding", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" },
new DownloadStatus { Hash = "hash1", Name = "Valid Hash", State = "Seeding", Private = false, Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }
};
_fixture.ClientWrapper
.Setup(x => x.GetStatusForAllTorrents())
.ReturnsAsync(downloads);
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Single(result);
Assert.Equal("hash1", result[0].Hash);
}
}
public class FilterDownloadsToBeCleanedAsync_Tests : DelugeServiceDCTests
{
public FilterDownloadsToBeCleanedAsync_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void MatchesCategories()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }),
new DelugeItemWrapper(new DownloadStatus { Hash = "hash2", Label = "tv", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }),
new DelugeItemWrapper(new DownloadStatus { Hash = "hash3", Label = "music", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true },
new SeedingRule { Name = "tv", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Equal(2, result.Count);
Assert.Contains(result, x => x.Category == "movies");
Assert.Contains(result, x => x.Category == "tv");
}
[Fact]
public void IsCaseInsensitive()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "Movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Single(result);
}
[Fact]
public void ReturnsEmptyList_WhenNoMatches()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "music", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Empty(result);
}
}
public class FilterDownloadsToChangeCategoryAsync_Tests : DelugeServiceDCTests
{
public FilterDownloadsToChangeCategoryAsync_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void FiltersCorrectly()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }),
new DelugeItemWrapper(new DownloadStatus { Hash = "hash2", Label = "tv", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
var result = sut.FilterDownloadsToChangeCategoryAsync(downloads, new List<string> { "movies" });
// Assert
Assert.NotNull(result);
Assert.Single(result);
Assert.Equal("hash1", result[0].Hash);
}
[Fact]
public void IsCaseInsensitive()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "Movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
var result = sut.FilterDownloadsToChangeCategoryAsync(downloads, new List<string> { "movies" });
// Assert
Assert.NotNull(result);
Assert.Single(result);
}
[Fact]
public void SkipsDownloadsWithEmptyHash()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" }),
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
var result = sut.FilterDownloadsToChangeCategoryAsync(downloads, new List<string> { "movies" });
// Assert
Assert.NotNull(result);
Assert.Single(result);
Assert.Equal("hash1", result[0].Hash);
}
}
public class CreateCategoryAsync_Tests : DelugeServiceDCTests
{
public CreateCategoryAsync_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task CreatesLabel_WhenMissing()
{
// Arrange
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetLabels())
.ReturnsAsync(new List<string>());
_fixture.ClientWrapper
.Setup(x => x.CreateLabel("new-label"))
.Returns(Task.CompletedTask);
// Act
await sut.CreateCategoryAsync("new-label");
// Assert
_fixture.ClientWrapper.Verify(x => x.CreateLabel("new-label"), Times.Once);
}
[Fact]
public async Task SkipsCreation_WhenLabelExists()
{
// Arrange
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetLabels())
.ReturnsAsync(new List<string> { "existing" });
// Act
await sut.CreateCategoryAsync("existing");
// Assert
_fixture.ClientWrapper.Verify(x => x.CreateLabel(It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task IsCaseInsensitive()
{
// Arrange
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetLabels())
.ReturnsAsync(new List<string> { "Existing" });
// Act
await sut.CreateCategoryAsync("existing");
// Assert
_fixture.ClientWrapper.Verify(x => x.CreateLabel(It.IsAny<string>()), Times.Never);
}
}
public class DeleteDownload_Tests : DelugeServiceDCTests
{
public DeleteDownload_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task CallsClientDelete()
{
// Arrange
var sut = _fixture.CreateSut();
const string hash = "TEST-HASH";
var mockTorrent = new Mock<ITorrentItemWrapper>();
mockTorrent.Setup(x => x.Hash).Returns(hash);
_fixture.ClientWrapper
.Setup(x => x.DeleteTorrents(It.Is<List<string>>(h => h.Contains("test-hash")), true))
.Returns(Task.CompletedTask);
// Act
await sut.DeleteDownload(mockTorrent.Object, true);
// Assert
_fixture.ClientWrapper.Verify(
x => x.DeleteTorrents(It.Is<List<string>>(h => h.Contains("test-hash")), true),
Times.Once);
}
[Fact]
public async Task NormalizesHashToLowercase()
{
// Arrange
var sut = _fixture.CreateSut();
const string hash = "UPPERCASE-HASH";
var mockTorrent = new Mock<ITorrentItemWrapper>();
mockTorrent.Setup(x => x.Hash).Returns(hash);
_fixture.ClientWrapper
.Setup(x => x.DeleteTorrents(It.IsAny<List<string>>(), true))
.Returns(Task.CompletedTask);
// Act
await sut.DeleteDownload(mockTorrent.Object, true);
// Assert
_fixture.ClientWrapper.Verify(
x => x.DeleteTorrents(It.Is<List<string>>(h => h.Contains("uppercase-hash")), true),
Times.Once);
}
[Fact]
public async Task CallsClientDeleteWithoutSourceFiles()
{
// Arrange
var sut = _fixture.CreateSut();
const string hash = "TEST-HASH";
var mockTorrent = new Mock<ITorrentItemWrapper>();
mockTorrent.Setup(x => x.Hash).Returns(hash);
_fixture.ClientWrapper
.Setup(x => x.DeleteTorrents(It.Is<List<string>>(h => h.Contains("test-hash")), false))
.Returns(Task.CompletedTask);
// Act
await sut.DeleteDownload(mockTorrent.Object, false);
// Assert
_fixture.ClientWrapper.Verify(
x => x.DeleteTorrents(It.Is<List<string>>(h => h.Contains("test-hash")), false),
Times.Once);
}
}
public class ChangeCategoryForNoHardLinksAsync_Tests : DelugeServiceDCTests
{
public ChangeCategoryForNoHardLinksAsync_Tests(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task NullDownloads_DoesNothing()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(null);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task EmptyDownloads_DoesNothing()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(new List<Domain.Entities.ITorrentItemWrapper>());
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task MissingHash_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task MissingName_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task MissingCategory_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task ExceptionGettingFiles_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ThrowsAsync(new InvalidOperationException("Failed to get files"));
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task NoHardlinks_ChangesLabel()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0, Path = "file1.mkv" } }
}
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetTorrentLabel("hash1", "unlinked"),
Times.Once);
}
[Fact]
public async Task HasHardlinks_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0, Path = "file1.mkv" } }
}
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(2);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task FileNotFound_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0, Path = "file1.mkv" } }
}
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(-1);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(x => x.SetTorrentLabel(It.IsAny<string>(), It.IsAny<string>()), Times.Never);
}
[Fact]
public async Task SkippedFiles_IgnoredInCheck()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 0, Index = 0, Path = "file1.mkv" } },
{ "file2.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 1, Path = "file2.mkv" } }
}
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.HardLinkFileService.Verify(
x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()),
Times.Once);
}
[Fact]
public async Task PublishesCategoryChangedEvent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<Domain.Entities.ITorrentItemWrapper>
{
new DelugeItemWrapper(new DownloadStatus { Hash = "hash1", Name = "Test", Label = "movies", Trackers = new List<Tracker>(), DownloadLocation = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles("hash1"))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0, Path = "file1.mkv" } }
}
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert - EventPublisher is not mocked, so we just verify the method completed
_fixture.ClientWrapper.Verify(
x => x.SetTorrentLabel("hash1", "unlinked"),
Times.Once);
}
}
}

View File

@@ -0,0 +1,112 @@
using Cleanuparr.Infrastructure.Events.Interfaces;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.Extensions.Logging;
using Moq;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DelugeServiceFixture : IDisposable
{
public Mock<ILogger<DelugeService>> Logger { get; }
public Mock<IFilenameEvaluator> FilenameEvaluator { get; }
public Mock<IStriker> Striker { get; }
public Mock<IDryRunInterceptor> DryRunInterceptor { get; }
public Mock<IHardLinkFileService> HardLinkFileService { get; }
public Mock<IDynamicHttpClientProvider> HttpClientProvider { get; }
public Mock<IEventPublisher> EventPublisher { get; }
public Mock<IBlocklistProvider> BlocklistProvider { get; }
public Mock<IRuleEvaluator> RuleEvaluator { get; }
public Mock<IRuleManager> RuleManager { get; }
public Mock<IDelugeClientWrapper> ClientWrapper { get; }
public DelugeServiceFixture()
{
Logger = new Mock<ILogger<DelugeService>>();
FilenameEvaluator = new Mock<IFilenameEvaluator>();
Striker = new Mock<IStriker>();
DryRunInterceptor = new Mock<IDryRunInterceptor>();
HardLinkFileService = new Mock<IHardLinkFileService>();
HttpClientProvider = new Mock<IDynamicHttpClientProvider>();
EventPublisher = new Mock<IEventPublisher>();
BlocklistProvider = new Mock<IBlocklistProvider>();
RuleEvaluator = new Mock<IRuleEvaluator>();
RuleManager = new Mock<IRuleManager>();
ClientWrapper = new Mock<IDelugeClientWrapper>();
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public DelugeService CreateSut(DownloadClientConfig? config = null)
{
config ??= new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = "Test Client",
TypeName = Domain.Enums.DownloadClientTypeName.Deluge,
Type = Domain.Enums.DownloadClientType.Torrent,
Enabled = true,
Host = new Uri("http://localhost:8112"),
Username = "admin",
Password = "admin",
UrlBase = ""
};
var httpClient = new HttpClient();
HttpClientProvider
.Setup(x => x.CreateClient(It.IsAny<DownloadClientConfig>()))
.Returns(httpClient);
return new DelugeService(
Logger.Object,
FilenameEvaluator.Object,
Striker.Object,
DryRunInterceptor.Object,
HardLinkFileService.Object,
HttpClientProvider.Object,
EventPublisher.Object,
BlocklistProvider.Object,
config,
RuleEvaluator.Object,
RuleManager.Object,
ClientWrapper.Object
);
}
public void ResetMocks()
{
Logger.Reset();
FilenameEvaluator.Reset();
Striker.Reset();
DryRunInterceptor.Reset();
HardLinkFileService.Reset();
HttpClientProvider.Reset();
EventPublisher.Reset();
RuleEvaluator.Reset();
RuleManager.Reset();
ClientWrapper.Reset();
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public void Dispose()
{
GC.SuppressFinalize(this);
}
}

View File

@@ -0,0 +1,499 @@
using Cleanuparr.Domain.Entities.Deluge.Response;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DelugeServiceTests : IClassFixture<DelugeServiceFixture>
{
private readonly DelugeServiceFixture _fixture;
public DelugeServiceTests(DelugeServiceFixture fixture)
{
_fixture = fixture;
_fixture.ResetMocks();
}
public class ShouldRemoveFromArrQueueAsync_BasicScenarios : DelugeServiceTests
{
public ShouldRemoveFromArrQueueAsync_BasicScenarios(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task TorrentNotFound_ReturnsEmptyResult()
{
const string hash = "nonexistent";
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync((DownloadStatus?)null);
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.False(result.Found);
Assert.False(result.ShouldRemove);
Assert.Equal(DeleteReason.None, result.DeleteReason);
}
[Fact]
public async Task TorrentFound_SetsIsPrivateCorrectly_WhenPrivate()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = true,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.True(result.Found);
Assert.True(result.IsPrivate);
}
[Fact]
public async Task TorrentFound_SetsIsPrivateCorrectly_WhenPublic()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.True(result.Found);
Assert.False(result.IsPrivate);
}
}
public class ShouldRemoveFromArrQueueAsync_AllFilesSkippedScenarios : DelugeServiceTests
{
public ShouldRemoveFromArrQueueAsync_AllFilesSkippedScenarios(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task AllFilesUnwanted_DeletesFromClient()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 0, Index = 0 } },
{ "file2.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 0, Index = 1 } }
}
});
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.True(result.ShouldRemove);
Assert.Equal(DeleteReason.AllFilesSkipped, result.DeleteReason);
Assert.True(result.DeleteFromClient);
}
[Fact]
public async Task SomeFilesWanted_DoesNotRemove()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 0, Index = 0 } },
{ "file2.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 1 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.False(result.ShouldRemove);
}
}
public class ShouldRemoveFromArrQueueAsync_IgnoredDownloadScenarios : DelugeServiceTests
{
public ShouldRemoveFromArrQueueAsync_IgnoredDownloadScenarios(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task TorrentIgnoredByHash_ReturnsEmptyResult()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, new[] { hash });
Assert.True(result.Found);
Assert.False(result.ShouldRemove);
}
[Fact]
public async Task TorrentIgnoredByCategory_ReturnsEmptyResult()
{
const string hash = "test-hash";
const string category = "test-category";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Label = category,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, new[] { category });
Assert.True(result.Found);
Assert.False(result.ShouldRemove);
}
[Fact]
public async Task TorrentIgnoredByTrackerDomain_ReturnsEmptyResult()
{
const string hash = "test-hash";
const string trackerDomain = "tracker.example.com";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>
{
new Tracker { Url = $"https://{trackerDomain}/announce" }
},
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, new[] { trackerDomain });
Assert.True(result.Found);
Assert.False(result.ShouldRemove);
}
}
public class ShouldRemoveFromArrQueueAsync_StateCheckScenarios : DelugeServiceTests
{
public ShouldRemoveFromArrQueueAsync_StateCheckScenarios(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task NotDownloadingState_SkipsSlowCheck()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Seeding",
Private = false,
DownloadSpeed = 0,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.False(result.ShouldRemove);
_fixture.RuleEvaluator.Verify(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()), Times.Never);
}
[Fact]
public async Task ZeroDownloadSpeed_SkipsSlowCheck()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 0,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((false, DeleteReason.None, false));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.False(result.ShouldRemove);
_fixture.RuleEvaluator.Verify(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()), Times.Never);
}
}
public class ShouldRemoveFromArrQueueAsync_SlowAndStalledScenarios : DelugeServiceTests
{
public ShouldRemoveFromArrQueueAsync_SlowAndStalledScenarios(DelugeServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task SlowDownload_MatchesRule_RemovesFromQueue()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
Private = false,
DownloadSpeed = 1000,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateSlowRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((true, DeleteReason.SlowSpeed, true));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.True(result.ShouldRemove);
Assert.Equal(DeleteReason.SlowSpeed, result.DeleteReason);
Assert.True(result.DeleteFromClient);
}
[Fact]
public async Task StalledDownload_MatchesRule_RemovesFromQueue()
{
const string hash = "test-hash";
var sut = _fixture.CreateSut();
var downloadStatus = new DownloadStatus
{
Hash = hash,
Name = "Test Torrent",
State = "Downloading",
DownloadSpeed = 0,
Eta = 0,
Private = false,
Trackers = new List<Tracker>(),
DownloadLocation = "/downloads"
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentStatus(hash))
.ReturnsAsync(downloadStatus);
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFiles(hash))
.ReturnsAsync(new DelugeContents
{
Contents = new Dictionary<string, DelugeFileOrDirectory>
{
{ "file1.mkv", new DelugeFileOrDirectory { Type = "file", Priority = 1, Index = 0 } }
}
});
_fixture.RuleEvaluator
.Setup(x => x.EvaluateStallRulesAsync(It.IsAny<DelugeItemWrapper>()))
.ReturnsAsync((true, DeleteReason.Stalled, true));
var result = await sut.ShouldRemoveFromArrQueueAsync(hash, Array.Empty<string>());
Assert.True(result.ShouldRemove);
Assert.Equal(DeleteReason.Stalled, result.DeleteReason);
Assert.True(result.DeleteFromClient);
}
}
}

View File

@@ -0,0 +1,281 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Events.Interfaces;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadClient.Deluge;
using Cleanuparr.Infrastructure.Features.DownloadClient.QBittorrent;
using Cleanuparr.Infrastructure.Features.DownloadClient.Transmission;
using Cleanuparr.Infrastructure.Features.DownloadClient.UTorrent;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.AspNetCore.SignalR;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class DownloadServiceFactoryTests : IDisposable
{
private readonly Mock<ILogger<DownloadServiceFactory>> _loggerMock;
private readonly IServiceProvider _serviceProvider;
private readonly DownloadServiceFactory _factory;
private readonly MemoryCache _memoryCache;
public DownloadServiceFactoryTests()
{
_loggerMock = new Mock<ILogger<DownloadServiceFactory>>();
var services = new ServiceCollection();
// Use real MemoryCache - mocks don't work properly with cache operations
_memoryCache = new MemoryCache(Options.Create(new MemoryCacheOptions()));
services.AddSingleton<IMemoryCache>(_memoryCache);
// Register loggers
services.AddSingleton(Mock.Of<ILogger<QBitService>>());
services.AddSingleton(Mock.Of<ILogger<DelugeService>>());
services.AddSingleton(Mock.Of<ILogger<TransmissionService>>());
services.AddSingleton(Mock.Of<ILogger<UTorrentService>>());
services.AddSingleton(Mock.Of<IFilenameEvaluator>());
services.AddSingleton(Mock.Of<IStriker>());
services.AddSingleton(Mock.Of<IDryRunInterceptor>());
services.AddSingleton(Mock.Of<IHardLinkFileService>());
// IDynamicHttpClientProvider must return a real HttpClient for download services
var httpClientProviderMock = new Mock<IDynamicHttpClientProvider>();
httpClientProviderMock.Setup(p => p.CreateClient(It.IsAny<DownloadClientConfig>())).Returns(new HttpClient());
services.AddSingleton(httpClientProviderMock.Object);
services.AddSingleton(Mock.Of<IRuleEvaluator>());
services.AddSingleton(Mock.Of<IRuleManager>());
// UTorrentService needs ILoggerFactory
services.AddLogging();
// EventPublisher requires specific constructor arguments
var eventsContextOptions = new DbContextOptionsBuilder<EventsContext>()
.UseInMemoryDatabase(databaseName: Guid.NewGuid().ToString())
.Options;
var eventsContext = new EventsContext(eventsContextOptions);
var hubContextMock = new Mock<IHubContext<AppHub>>();
var clientsMock = new Mock<IHubClients>();
clientsMock.Setup(c => c.All).Returns(Mock.Of<IClientProxy>());
hubContextMock.Setup(h => h.Clients).Returns(clientsMock.Object);
services.AddSingleton<IEventPublisher>(new EventPublisher(
eventsContext,
hubContextMock.Object,
Mock.Of<ILogger<EventPublisher>>(),
Mock.Of<INotificationPublisher>(),
Mock.Of<IDryRunInterceptor>()));
// BlocklistProvider requires specific constructor arguments
var scopeFactoryMock = new Mock<IServiceScopeFactory>();
services.AddSingleton<IBlocklistProvider>(new BlocklistProvider(
Mock.Of<ILogger<BlocklistProvider>>(),
scopeFactoryMock.Object,
_memoryCache));
_serviceProvider = services.BuildServiceProvider();
_factory = new DownloadServiceFactory(_loggerMock.Object, _serviceProvider);
}
public void Dispose()
{
_memoryCache.Dispose();
}
#region GetDownloadService Tests
[Fact]
public void GetDownloadService_QBittorrent_ReturnsQBitService()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.qBittorrent);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
Assert.IsType<QBitService>(service);
}
[Fact]
public void GetDownloadService_Deluge_ReturnsDelugeService()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.Deluge);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
Assert.IsType<DelugeService>(service);
}
[Fact]
public void GetDownloadService_Transmission_ReturnsTransmissionService()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.Transmission);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
Assert.IsType<TransmissionService>(service);
}
[Fact]
public void GetDownloadService_UTorrent_ReturnsUTorrentService()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.uTorrent);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
Assert.IsType<UTorrentService>(service);
}
[Fact]
public void GetDownloadService_UnsupportedType_ThrowsNotSupportedException()
{
// Arrange
var config = new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = "Unsupported Client",
TypeName = (DownloadClientTypeName)999, // Invalid type
Type = DownloadClientType.Torrent,
Host = new Uri("http://test.example.com"),
Enabled = true
};
// Act & Assert
var exception = Assert.Throws<NotSupportedException>(() => _factory.GetDownloadService(config));
Assert.Contains("not supported", exception.Message);
}
[Fact]
public void GetDownloadService_DisabledClient_LogsWarningButReturnsService()
{
// Arrange
var config = new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = "Disabled qBittorrent",
TypeName = DownloadClientTypeName.qBittorrent,
Type = DownloadClientType.Torrent,
Host = new Uri("http://test.example.com"),
Enabled = false
};
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
_loggerMock.Verify(
x => x.Log(
LogLevel.Warning,
It.IsAny<EventId>(),
It.Is<It.IsAnyType>((v, t) => v.ToString()!.Contains("disabled")),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Once);
}
[Fact]
public void GetDownloadService_EnabledClient_DoesNotLogWarning()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.qBittorrent);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
_loggerMock.Verify(
x => x.Log(
LogLevel.Warning,
It.IsAny<EventId>(),
It.IsAny<It.IsAnyType>(),
It.IsAny<Exception>(),
It.IsAny<Func<It.IsAnyType, Exception?, string>>()),
Times.Never);
}
[Theory]
[InlineData(DownloadClientTypeName.qBittorrent, typeof(QBitService))]
[InlineData(DownloadClientTypeName.Deluge, typeof(DelugeService))]
[InlineData(DownloadClientTypeName.Transmission, typeof(TransmissionService))]
[InlineData(DownloadClientTypeName.uTorrent, typeof(UTorrentService))]
public void GetDownloadService_AllSupportedTypes_ReturnCorrectServiceType(
DownloadClientTypeName typeName, Type expectedServiceType)
{
// Arrange
var config = CreateClientConfig(typeName);
// Act
var service = _factory.GetDownloadService(config);
// Assert
Assert.NotNull(service);
Assert.IsType(expectedServiceType, service);
}
[Fact]
public void GetDownloadService_ReturnsNewInstanceEachTime()
{
// Arrange
var config = CreateClientConfig(DownloadClientTypeName.qBittorrent);
// Act
var service1 = _factory.GetDownloadService(config);
var service2 = _factory.GetDownloadService(config);
// Assert
Assert.NotSame(service1, service2);
}
#endregion
#region Helper Methods
private static DownloadClientConfig CreateClientConfig(DownloadClientTypeName typeName)
{
return new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = $"Test {typeName} Client",
TypeName = typeName,
Type = DownloadClientType.Torrent,
Host = new Uri("http://test.example.com"),
Enabled = true
};
}
#endregion
}

View File

@@ -5,7 +5,7 @@ using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class QBitItemTests
public class QBitItemWrapperTests
{
[Fact]
public void Constructor_WithNullTorrentInfo_ThrowsArgumentNullException()
@@ -14,7 +14,7 @@ public class QBitItemTests
var trackers = new List<TorrentTracker>();
// Act & Assert
Should.Throw<ArgumentNullException>(() => new QBitItem(null!, trackers, false));
Should.Throw<ArgumentNullException>(() => new QBitItemWrapper(null!, trackers, false));
}
[Fact]
@@ -24,7 +24,7 @@ public class QBitItemTests
var torrentInfo = new TorrentInfo();
// Act & Assert
Should.Throw<ArgumentNullException>(() => new QBitItem(torrentInfo, null!, false));
Should.Throw<ArgumentNullException>(() => new QBitItemWrapper(torrentInfo, null!, false));
}
[Fact]
@@ -34,7 +34,7 @@ public class QBitItemTests
var expectedHash = "test-hash-123";
var torrentInfo = new TorrentInfo { Hash = expectedHash };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Hash;
@@ -49,7 +49,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { Hash = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Hash;
@@ -65,7 +65,7 @@ public class QBitItemTests
var expectedName = "Test Torrent";
var torrentInfo = new TorrentInfo { Name = expectedName };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Name;
@@ -80,7 +80,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { Name = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Name;
@@ -95,7 +95,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo();
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, true);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, true);
// Act
var result = wrapper.IsPrivate;
@@ -111,7 +111,7 @@ public class QBitItemTests
var expectedSize = 1024L * 1024 * 1024; // 1GB
var torrentInfo = new TorrentInfo { Size = expectedSize };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Size;
@@ -126,7 +126,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { Size = 0 };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Size;
@@ -145,7 +145,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { Progress = progress };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.CompletionPercentage;
@@ -155,86 +155,210 @@ public class QBitItemTests
}
[Fact]
public void Trackers_WithValidUrls_ReturnsHostNames()
public void DownloadedBytes_ReturnsCorrectValue()
{
// Arrange
var torrentInfo = new TorrentInfo();
var trackers = new List<TorrentTracker>
{
new() { Url = "http://tracker1.example.com:8080/announce" },
new() { Url = "https://tracker2.example.com/announce" },
new() { Url = "udp://tracker3.example.com:1337/announce" }
};
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(3);
result.ShouldContain("tracker1.example.com");
result.ShouldContain("tracker2.example.com");
result.ShouldContain("tracker3.example.com");
}
[Fact]
public void Trackers_WithDuplicateHosts_ReturnsDistinctHosts()
{
// Arrange
var torrentInfo = new TorrentInfo();
var trackers = new List<TorrentTracker>
{
new() { Url = "http://tracker1.example.com:8080/announce" },
new() { Url = "https://tracker1.example.com/announce" },
new() { Url = "udp://tracker1.example.com:1337/announce" }
};
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(1);
result.ShouldContain("tracker1.example.com");
}
[Fact]
public void Trackers_WithInvalidUrls_SkipsInvalidEntries()
{
// Arrange
var torrentInfo = new TorrentInfo();
var trackers = new List<TorrentTracker>
{
new() { Url = "http://valid.example.com/announce" },
new() { Url = "invalid-url" },
new() { Url = "" },
new() { Url = null }
};
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.Trackers;
// Assert
result.Count.ShouldBe(1);
result.ShouldContain("valid.example.com");
}
[Fact]
public void Trackers_WithEmptyList_ReturnsEmptyList()
{
// Arrange
var torrentInfo = new TorrentInfo();
var expectedDownloaded = 1024L * 1024 * 500; // 500MB
var torrentInfo = new TorrentInfo { Downloaded = expectedDownloaded };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Trackers;
var result = wrapper.DownloadedBytes;
// Assert
result.ShouldBe(expectedDownloaded);
}
[Fact]
public void DownloadedBytes_WithNullValue_ReturnsZero()
{
// Arrange
var torrentInfo = new TorrentInfo { Downloaded = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.DownloadedBytes;
// Assert
result.ShouldBe(0);
}
[Fact]
public void DownloadSpeed_ReturnsCorrectValue()
{
// Arrange
var expectedSpeed = 1024 * 512; // 512 KB/s
var torrentInfo = new TorrentInfo { DownloadSpeed = expectedSpeed };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.DownloadSpeed;
// Assert
result.ShouldBe(expectedSpeed);
}
[Theory]
[InlineData(0.0)]
[InlineData(0.5)]
[InlineData(1.0)]
[InlineData(2.5)]
public void Ratio_ReturnsCorrectValue(double expectedRatio)
{
// Arrange
var torrentInfo = new TorrentInfo { Ratio = expectedRatio };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Ratio;
// Assert
result.ShouldBe(expectedRatio);
}
[Fact]
public void Eta_ReturnsCorrectValue()
{
// Arrange
var expectedEta = TimeSpan.FromMinutes(30);
var torrentInfo = new TorrentInfo { EstimatedTime = expectedEta };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Eta;
// Assert
result.ShouldBe((long)expectedEta.TotalSeconds);
}
[Fact]
public void Eta_WithNullValue_ReturnsZero()
{
// Arrange
var torrentInfo = new TorrentInfo { EstimatedTime = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Eta;
// Assert
result.ShouldBe(0);
}
[Fact]
public void SeedingTimeSeconds_ReturnsCorrectValue()
{
// Arrange
var expectedTime = TimeSpan.FromHours(5);
var torrentInfo = new TorrentInfo { SeedingTime = expectedTime };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.SeedingTimeSeconds;
// Assert
result.ShouldBe((long)expectedTime.TotalSeconds);
}
[Fact]
public void SeedingTimeSeconds_WithNullValue_ReturnsZero()
{
// Arrange
var torrentInfo = new TorrentInfo { SeedingTime = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.SeedingTimeSeconds;
// Assert
result.ShouldBe(0);
}
[Fact]
public void Tags_ReturnsCorrectValue()
{
// Arrange
var expectedTags = new List<string> { "tag1", "tag2", "tag3" };
var torrentInfo = new TorrentInfo { Tags = expectedTags.AsReadOnly() };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Tags;
// Assert
result.ShouldBe(expectedTags);
}
[Fact]
public void Tags_WithNullValue_ReturnsEmptyList()
{
// Arrange
var torrentInfo = new TorrentInfo { Tags = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Tags;
// Assert
result.ShouldBeEmpty();
}
[Fact]
public void Tags_WithEmptyList_ReturnsEmptyList()
{
// Arrange
var torrentInfo = new TorrentInfo { Tags = new List<string>().AsReadOnly() };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Tags;
// Assert
result.ShouldBeEmpty();
}
[Fact]
public void Category_ReturnsCorrectValue()
{
// Arrange
var expectedCategory = "movies";
var torrentInfo = new TorrentInfo { Category = expectedCategory };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Category;
// Assert
result.ShouldBe(expectedCategory);
}
[Fact]
public void Category_WithNullValue_ReturnsNull()
{
// Arrange
var torrentInfo = new TorrentInfo { Category = null };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.Category;
// Assert
result.ShouldBeNull();
}
// State checking method tests
[Theory]
[InlineData(TorrentState.Downloading, true)]
@@ -247,7 +371,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.IsDownloading();
@@ -266,7 +390,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.IsStalled();
@@ -286,7 +410,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.IsSeeding();
@@ -295,101 +419,6 @@ public class QBitItemTests
result.ShouldBe(expected);
}
[Theory]
[InlineData(0.0, false)]
[InlineData(0.5, false)]
[InlineData(0.99, false)]
[InlineData(1.0, true)]
public void IsCompleted_ReturnsCorrectValue(double progress, bool expected)
{
// Arrange
var torrentInfo = new TorrentInfo { Progress = progress };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.IsCompleted();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(TorrentState.PausedDownload, true)]
[InlineData(TorrentState.PausedUpload, true)]
[InlineData(TorrentState.Downloading, false)]
[InlineData(TorrentState.Uploading, false)]
public void IsPaused_ReturnsCorrectValue(TorrentState state, bool expected)
{
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.IsPaused();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(TorrentState.QueuedDownload, true)]
[InlineData(TorrentState.QueuedUpload, true)]
[InlineData(TorrentState.Downloading, false)]
[InlineData(TorrentState.Uploading, false)]
public void IsQueued_ReturnsCorrectValue(TorrentState state, bool expected)
{
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.IsQueued();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(TorrentState.CheckingDownload, true)]
[InlineData(TorrentState.CheckingUpload, true)]
[InlineData(TorrentState.CheckingResumeData, true)]
[InlineData(TorrentState.Downloading, false)]
[InlineData(TorrentState.Uploading, false)]
public void IsChecking_ReturnsCorrectValue(TorrentState state, bool expected)
{
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.IsChecking();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(TorrentState.Allocating, true)]
[InlineData(TorrentState.Downloading, false)]
[InlineData(TorrentState.Uploading, false)]
public void IsAllocating_ReturnsCorrectValue(TorrentState state, bool expected)
{
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
// Act
var result = wrapper.IsAllocating();
// Assert
result.ShouldBe(expected);
}
[Theory]
[InlineData(TorrentState.FetchingMetadata, true)]
[InlineData(TorrentState.ForcedFetchingMetadata, true)]
@@ -400,7 +429,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { State = state };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.IsMetadataDownloading();
@@ -415,7 +444,7 @@ public class QBitItemTests
// Arrange
var torrentInfo = new TorrentInfo { Name = "Test Torrent", Hash = "abc123" };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
// Act
var result = wrapper.IsIgnored(Array.Empty<string>());
@@ -425,13 +454,13 @@ public class QBitItemTests
}
[Fact]
public void IsIgnored_MatchingName_ReturnsTrue()
public void IsIgnored_MatchingHash_ReturnsTrue()
{
// Arrange
var torrentInfo = new TorrentInfo { Name = "Test Torrent", Hash = "abc123" };
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "test" };
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "abc123" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
@@ -441,13 +470,39 @@ public class QBitItemTests
}
[Fact]
public void IsIgnored_MatchingHash_ReturnsTrue()
public void IsIgnored_MatchingTag_ReturnsTrue()
{
// Arrange
var torrentInfo = new TorrentInfo { Name = "Test Torrent", Hash = "abc123" };
var torrentInfo = new TorrentInfo
{
Name = "Test Torrent",
Hash = "abc123",
Tags = new List<string> { "test-tag" }.AsReadOnly()
};
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItem(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "abc123" };
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "test-tag" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
// Assert
result.ShouldBeTrue();
}
[Fact]
public void IsIgnored_MatchingCategory_ReturnsTrue()
{
// Arrange
var torrentInfo = new TorrentInfo
{
Name = "Test Torrent",
Hash = "abc123",
Category = "test-category"
};
var trackers = new List<TorrentTracker>();
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "test-category" };
// Act
var result = wrapper.IsIgnored(ignoredDownloads);
@@ -460,12 +515,16 @@ public class QBitItemTests
public void IsIgnored_MatchingTracker_ReturnsTrue()
{
// Arrange
var torrentInfo = new TorrentInfo { Name = "Test Torrent", Hash = "abc123" };
var torrentInfo = new TorrentInfo
{
Name = "Test Torrent",
Hash = "abc123"
};
var trackers = new List<TorrentTracker>
{
new() { Url = "http://tracker.example.com/announce" }
};
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "tracker.example.com" };
// Act
@@ -479,12 +538,18 @@ public class QBitItemTests
public void IsIgnored_NotMatching_ReturnsFalse()
{
// Arrange
var torrentInfo = new TorrentInfo { Name = "Test Torrent", Hash = "abc123" };
var torrentInfo = new TorrentInfo
{
Name = "Test Torrent",
Hash = "abc123",
Category = "some-category",
Tags = new List<string> { "some-tag" }.AsReadOnly()
};
var trackers = new List<TorrentTracker>
{
new() { Url = "http://tracker.example.com/announce" }
};
var wrapper = new QBitItem(torrentInfo, trackers, false);
var wrapper = new QBitItemWrapper(torrentInfo, trackers, false);
var ignoredDownloads = new[] { "notmatching" };
// Act

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,115 @@
using Cleanuparr.Infrastructure.Events.Interfaces;
using Cleanuparr.Infrastructure.Features.DownloadClient.QBittorrent;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.Extensions.Logging;
using Moq;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class QBitServiceFixture : IDisposable
{
public Mock<ILogger<QBitService>> Logger { get; }
public Mock<IFilenameEvaluator> FilenameEvaluator { get; }
public Mock<IStriker> Striker { get; }
public Mock<IDryRunInterceptor> DryRunInterceptor { get; }
public Mock<IHardLinkFileService> HardLinkFileService { get; }
public Mock<IDynamicHttpClientProvider> HttpClientProvider { get; }
public Mock<IEventPublisher> EventPublisher { get; }
public Mock<IBlocklistProvider> BlocklistProvider { get; }
public Mock<IRuleEvaluator> RuleEvaluator { get; }
public Mock<IRuleManager> RuleManager { get; }
public Mock<IQBittorrentClientWrapper> ClientWrapper { get; }
public QBitServiceFixture()
{
Logger = new Mock<ILogger<QBitService>>();
FilenameEvaluator = new Mock<IFilenameEvaluator>();
Striker = new Mock<IStriker>();
DryRunInterceptor = new Mock<IDryRunInterceptor>();
HardLinkFileService = new Mock<IHardLinkFileService>();
HttpClientProvider = new Mock<IDynamicHttpClientProvider>();
EventPublisher = new Mock<IEventPublisher>();
BlocklistProvider =new Mock<IBlocklistProvider>();
RuleEvaluator = new Mock<IRuleEvaluator>();
RuleManager = new Mock<IRuleManager>();
ClientWrapper = new Mock<IQBittorrentClientWrapper>();
// Setup default behavior for DryRunInterceptor to execute actions directly
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public QBitService CreateSut(DownloadClientConfig? config = null)
{
config ??= new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = "Test Client",
TypeName = Domain.Enums.DownloadClientTypeName.qBittorrent,
Type = Domain.Enums.DownloadClientType.Torrent,
Enabled = true,
Host = new Uri("http://localhost:8080"),
Username = "admin",
Password = "admin",
UrlBase = ""
};
// Setup HTTP client provider
var httpClient = new HttpClient();
HttpClientProvider
.Setup(x => x.CreateClient(It.IsAny<DownloadClientConfig>()))
.Returns(httpClient);
return new QBitService(
Logger.Object,
FilenameEvaluator.Object,
Striker.Object,
DryRunInterceptor.Object,
HardLinkFileService.Object,
HttpClientProvider.Object,
EventPublisher.Object,
BlocklistProvider.Object,
config,
RuleEvaluator.Object,
RuleManager.Object,
ClientWrapper.Object
);
}
public void ResetMocks()
{
Logger.Reset();
FilenameEvaluator.Reset();
Striker.Reset();
DryRunInterceptor.Reset();
HardLinkFileService.Reset();
HttpClientProvider.Reset();
EventPublisher.Reset();
RuleEvaluator.Reset();
RuleManager.Reset();
ClientWrapper.Reset();
// Re-setup default DryRunInterceptor behavior
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public void Dispose()
{
GC.SuppressFinalize(this);
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,582 @@
using Cleanuparr.Domain.Entities.RTorrent.Response;
using Cleanuparr.Infrastructure.Features.DownloadClient.RTorrent;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class RTorrentItemWrapperTests
{
public class PropertyMapping_Tests
{
[Fact]
public void MapsHash()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "ABC123DEF456", Name = "Test" };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal("ABC123DEF456", wrapper.Hash);
}
[Fact]
public void MapsName()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test Torrent Name" };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal("Test Torrent Name", wrapper.Name);
}
[Fact]
public void MapsIsPrivate_True()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", IsPrivate = 1 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.True(wrapper.IsPrivate);
}
[Fact]
public void MapsIsPrivate_False()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", IsPrivate = 0 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.False(wrapper.IsPrivate);
}
[Fact]
public void MapsSize()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", SizeBytes = 1024000 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(1024000, wrapper.Size);
}
[Fact]
public void MapsDownloadSpeed()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", DownRate = 500000 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(500000, wrapper.DownloadSpeed);
}
[Fact]
public void MapsDownloadedBytes()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", CompletedBytes = 750000 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(750000, wrapper.DownloadedBytes);
}
[Fact]
public void MapsCategory()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies" };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal("movies", wrapper.Category);
}
[Fact]
public void CategoryIsSettable()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies" };
var wrapper = new RTorrentItemWrapper(torrent);
// Act
wrapper.Category = "tv";
// Assert
Assert.Equal("tv", wrapper.Category);
}
}
public class Ratio_Tests
{
[Fact]
public void ConvertsRatioFromRTorrentFormat()
{
// rTorrent returns ratio * 1000, so 1500 = 1.5 ratio
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Ratio = 1500 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(1.5, wrapper.Ratio);
}
[Fact]
public void HandlesZeroRatio()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Ratio = 0 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0, wrapper.Ratio);
}
[Fact]
public void HandlesHighRatio()
{
// Arrange - 10.0 ratio = 10000 in rTorrent
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Ratio = 10000 };
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(10.0, wrapper.Ratio);
}
}
public class CompletionPercentage_Tests
{
[Fact]
public void CalculatesCorrectPercentage()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 1000,
CompletedBytes = 500
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(50.0, wrapper.CompletionPercentage);
}
[Fact]
public void ReturnsZero_WhenSizeIsZero()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 0,
CompletedBytes = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0.0, wrapper.CompletionPercentage);
}
[Fact]
public void ReturnsHundred_WhenComplete()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 1000,
CompletedBytes = 1000
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(100.0, wrapper.CompletionPercentage);
}
}
public class IsDownloading_Tests
{
[Fact]
public void ReturnsTrue_WhenStateIsStartedAndNotComplete()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 1, // Started
Complete = 0 // Not complete
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.True(wrapper.IsDownloading());
}
[Fact]
public void ReturnsFalse_WhenStopped()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 0, // Stopped
Complete = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.False(wrapper.IsDownloading());
}
[Fact]
public void ReturnsFalse_WhenComplete()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 1, // Started
Complete = 1 // Complete (seeding)
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.False(wrapper.IsDownloading());
}
}
public class IsStalled_Tests
{
[Fact]
public void ReturnsTrue_WhenDownloadingWithNoSpeed()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 1,
Complete = 0,
DownRate = 0,
SizeBytes = 1000,
CompletedBytes = 500
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.True(wrapper.IsStalled());
}
[Fact]
public void ReturnsFalse_WhenDownloadingWithSpeed()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 1,
Complete = 0,
DownRate = 100000,
SizeBytes = 1000,
CompletedBytes = 500
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.False(wrapper.IsStalled());
}
[Fact]
public void ReturnsFalse_WhenNotDownloading()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
State = 0, // Stopped
Complete = 0,
DownRate = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.False(wrapper.IsStalled());
}
}
public class SeedingTime_Tests
{
[Fact]
public void ReturnsZero_WhenNotComplete()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
Complete = 0,
TimestampFinished = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0, wrapper.SeedingTimeSeconds);
}
[Fact]
public void ReturnsZero_WhenNoFinishTimestamp()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
Complete = 1,
TimestampFinished = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0, wrapper.SeedingTimeSeconds);
}
[Fact]
public void CalculatesSeedingTime_WhenComplete()
{
// Arrange
var finishedTime = DateTimeOffset.UtcNow.AddHours(-2).ToUnixTimeSeconds();
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
Complete = 1,
TimestampFinished = finishedTime
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert - should be approximately 2 hours (7200 seconds)
Assert.True(wrapper.SeedingTimeSeconds >= 7190 && wrapper.SeedingTimeSeconds <= 7210);
}
}
public class Eta_Tests
{
[Fact]
public void ReturnsZero_WhenNoDownloadSpeed()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 1000,
CompletedBytes = 500,
DownRate = 0
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0, wrapper.Eta);
}
[Fact]
public void CalculatesEta_WhenDownloading()
{
// Arrange - 500 bytes remaining at 100 bytes/sec = 5 seconds ETA
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 1000,
CompletedBytes = 500,
DownRate = 100
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(5, wrapper.Eta);
}
[Fact]
public void ReturnsZero_WhenComplete()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
SizeBytes = 1000,
CompletedBytes = 1000,
DownRate = 100
};
// Act
var wrapper = new RTorrentItemWrapper(torrent);
// Assert
Assert.Equal(0, wrapper.Eta);
}
}
public class IsIgnored_Tests
{
[Fact]
public void ReturnsFalse_WhenEmptyIgnoreList()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies" };
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string>());
// Assert
Assert.False(result);
}
[Fact]
public void ReturnsTrue_WhenHashMatches()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "ABC123", Name = "Test", Label = "movies" };
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string> { "ABC123" });
// Assert
Assert.True(result);
}
[Fact]
public void ReturnsTrue_WhenHashMatchesCaseInsensitive()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "ABC123", Name = "Test", Label = "movies" };
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string> { "abc123" });
// Assert
Assert.True(result);
}
[Fact]
public void ReturnsTrue_WhenCategoryMatches()
{
// Arrange
var torrent = new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies" };
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string> { "movies" });
// Assert
Assert.True(result);
}
[Fact]
public void ReturnsTrue_WhenTrackerDomainMatches()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
Label = "movies",
Trackers = new List<string> { "https://tracker.example.com/announce" }
};
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string> { "example.com" });
// Assert
Assert.True(result);
}
[Fact]
public void ReturnsFalse_WhenNoMatch()
{
// Arrange
var torrent = new RTorrentTorrent
{
Hash = "HASH1",
Name = "Test",
Label = "movies",
Trackers = new List<string> { "https://tracker.example.com/announce" }
};
var wrapper = new RTorrentItemWrapper(torrent);
// Act
var result = wrapper.IsIgnored(new List<string> { "other.com", "tv", "HASH2" });
// Assert
Assert.False(result);
}
}
}

View File

@@ -0,0 +1,689 @@
using Cleanuparr.Domain.Entities;
using Cleanuparr.Domain.Entities.RTorrent.Response;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.DownloadClient.RTorrent;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Moq;
using Xunit;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class RTorrentServiceDCTests : IClassFixture<RTorrentServiceFixture>
{
private readonly RTorrentServiceFixture _fixture;
public RTorrentServiceDCTests(RTorrentServiceFixture fixture)
{
_fixture = fixture;
_fixture.ResetMocks();
}
public class GetSeedingDownloads_Tests : RTorrentServiceDCTests
{
public GetSeedingDownloads_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task FiltersSeedingState()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<RTorrentTorrent>
{
new RTorrentTorrent { Hash = "HASH1", Name = "Torrent 1", State = 1, Complete = 1, IsPrivate = 0, Label = "" },
new RTorrentTorrent { Hash = "HASH2", Name = "Torrent 2", State = 1, Complete = 0, IsPrivate = 0, Label = "" }, // Downloading, not seeding
new RTorrentTorrent { Hash = "HASH3", Name = "Torrent 3", State = 1, Complete = 1, IsPrivate = 0, Label = "" },
new RTorrentTorrent { Hash = "HASH4", Name = "Torrent 4", State = 0, Complete = 1, IsPrivate = 0, Label = "" } // Stopped, not seeding
};
_fixture.ClientWrapper
.Setup(x => x.GetAllTorrentsAsync())
.ReturnsAsync(downloads);
// Act
var result = await sut.GetSeedingDownloads();
// Assert - only torrents with State=1 AND Complete=1 should be returned
Assert.Equal(2, result.Count);
Assert.All(result, item => Assert.NotNull(item.Hash));
}
[Fact]
public async Task ReturnsEmptyList_WhenNoTorrents()
{
// Arrange
var sut = _fixture.CreateSut();
_fixture.ClientWrapper
.Setup(x => x.GetAllTorrentsAsync())
.ReturnsAsync(new List<RTorrentTorrent>());
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Empty(result);
}
[Fact]
public async Task SkipsTorrentsWithEmptyHash()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<RTorrentTorrent>
{
new RTorrentTorrent { Hash = "", Name = "No Hash", State = 1, Complete = 1, IsPrivate = 0, Label = "" },
new RTorrentTorrent { Hash = "HASH1", Name = "Valid Hash", State = 1, Complete = 1, IsPrivate = 0, Label = "" }
};
_fixture.ClientWrapper
.Setup(x => x.GetAllTorrentsAsync())
.ReturnsAsync(downloads);
// Act
var result = await sut.GetSeedingDownloads();
// Assert
Assert.Single(result);
Assert.Equal("HASH1", result[0].Hash);
}
}
public class FilterDownloadsToBeCleanedAsync_Tests : RTorrentServiceDCTests
{
public FilterDownloadsToBeCleanedAsync_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void MatchesCategories()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Torrent 1", Label = "movies" }),
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH2", Name = "Torrent 2", Label = "tv" }),
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH3", Name = "Torrent 3", Label = "music" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true },
new SeedingRule { Name = "tv", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Equal(2, result.Count);
Assert.Contains(result, x => x.Category == "movies");
Assert.Contains(result, x => x.Category == "tv");
}
[Fact]
public void IsCaseInsensitive()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Torrent 1", Label = "Movies" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Single(result);
}
[Fact]
public void ReturnsEmptyList_WhenNoMatches()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Torrent 1", Label = "music" })
};
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Empty(result);
}
[Fact]
public void ReturnsNull_WhenDownloadsNull()
{
// Arrange
var sut = _fixture.CreateSut();
var categories = new List<SeedingRule>
{
new SeedingRule { Name = "movies", MaxRatio = -1, MinSeedTime = 0, MaxSeedTime = -1, DeleteSourceFiles = true }
};
// Act
var result = sut.FilterDownloadsToBeCleanedAsync(null, categories);
// Assert
Assert.Null(result);
}
}
public class FilterDownloadsToChangeCategoryAsync_Tests : RTorrentServiceDCTests
{
public FilterDownloadsToChangeCategoryAsync_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void MatchesCategories()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Torrent 1", Label = "movies" }),
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH2", Name = "Torrent 2", Label = "tv" }),
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH3", Name = "Torrent 3", Label = "music" })
};
var categories = new List<string> { "movies", "tv" };
// Act
var result = sut.FilterDownloadsToChangeCategoryAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Equal(2, result.Count);
}
[Fact]
public void SkipsEmptyHashes()
{
// Arrange
var sut = _fixture.CreateSut();
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "", Name = "No Hash", Label = "movies" }),
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Valid Hash", Label = "movies" })
};
var categories = new List<string> { "movies" };
// Act
var result = sut.FilterDownloadsToChangeCategoryAsync(downloads, categories);
// Assert
Assert.NotNull(result);
Assert.Single(result);
Assert.Equal("HASH1", result[0].Hash);
}
}
public class DeleteDownload_Tests : RTorrentServiceDCTests
{
public DeleteDownload_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task NormalizesHashToUppercase()
{
// Arrange
var sut = _fixture.CreateSut();
var hash = "lowercase";
var mockTorrent = new Mock<ITorrentItemWrapper>();
mockTorrent.Setup(x => x.Hash).Returns(hash);
mockTorrent.Setup(x => x.SavePath).Returns("/test/path");
_fixture.ClientWrapper
.Setup(x => x.DeleteTorrentAsync("LOWERCASE"))
.Returns(Task.CompletedTask);
// Act
await sut.DeleteDownload(mockTorrent.Object, deleteSourceFiles: false);
// Assert
_fixture.ClientWrapper.Verify(
x => x.DeleteTorrentAsync("LOWERCASE"),
Times.Once);
}
}
public class CreateCategoryAsync_Tests : RTorrentServiceDCTests
{
public CreateCategoryAsync_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task IsNoOp_BecauseRTorrentDoesNotSupportCategories()
{
// Arrange
var sut = _fixture.CreateSut();
// Act
await sut.CreateCategoryAsync("test-category");
// Assert - no client calls should be made
_fixture.ClientWrapper.VerifyNoOtherCalls();
}
}
public class ChangeCategoryForNoHardLinksAsync_Tests : RTorrentServiceDCTests
{
public ChangeCategoryForNoHardLinksAsync_Tests(RTorrentServiceFixture fixture) : base(fixture)
{
}
[Fact]
public async Task NullDownloads_DoesNothing()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(null);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task EmptyDownloads_DoesNothing()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(new List<ITorrentItemWrapper>());
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task MissingHash_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task MissingName_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "", Label = "movies", BasePath = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task MissingCategory_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "", BasePath = "/downloads" })
};
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task GetFilesThrows_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ThrowsAsync(new Exception("XML-RPC error"));
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task SkippedFiles_IgnoredInCheck()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 0 }, // Skipped
new RTorrentFile { Index = 1, Path = "file2.mkv", Priority = 1 } // Active
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert - only called for file2.mkv (the active file)
_fixture.HardLinkFileService.Verify(
x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()),
Times.Once);
}
[Fact]
public async Task NoHardlinks_ChangesLabel()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 1 }
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert - rTorrent uses SetLabelAsync (not SetTorrentCategoryAsync)
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync("HASH1", "unlinked"),
Times.Once);
}
[Fact]
public async Task HasHardlinks_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 1 }
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(2); // Has hardlinks
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task FileNotFound_SkipsTorrent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 1 }
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(-1); // Error / file not found
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.ClientWrapper.Verify(
x => x.SetLabelAsync(It.IsAny<string>(), It.IsAny<string>()),
Times.Never);
}
[Fact]
public async Task PublishesCategoryChangedEvent()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var downloads = new List<ITorrentItemWrapper>
{
new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" })
};
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 1 }
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
_fixture.EventPublisher.Verify(
x => x.PublishCategoryChanged("movies", "unlinked", false),
Times.Once);
}
[Fact]
public async Task UpdatesCategoryOnWrapper()
{
// Arrange
var sut = _fixture.CreateSut();
var config = new DownloadCleanerConfig
{
Id = Guid.NewGuid(),
UnlinkedTargetCategory = "unlinked"
};
ContextProvider.Set(nameof(DownloadCleanerConfig), config);
var wrapper = new RTorrentItemWrapper(new RTorrentTorrent { Hash = "HASH1", Name = "Test", Label = "movies", BasePath = "/downloads" });
var downloads = new List<ITorrentItemWrapper> { wrapper };
_fixture.ClientWrapper
.Setup(x => x.GetTorrentFilesAsync("HASH1"))
.ReturnsAsync(new List<RTorrentFile>
{
new RTorrentFile { Index = 0, Path = "file1.mkv", Priority = 1 }
});
_fixture.HardLinkFileService
.Setup(x => x.GetHardLinkCount(It.IsAny<string>(), It.IsAny<bool>()))
.Returns(0);
// Act
await sut.ChangeCategoryForNoHardLinksAsync(downloads);
// Assert
Assert.Equal("unlinked", wrapper.Category);
}
}
}

View File

@@ -0,0 +1,112 @@
using Cleanuparr.Infrastructure.Events.Interfaces;
using Cleanuparr.Infrastructure.Features.DownloadClient.RTorrent;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.Extensions.Logging;
using Moq;
namespace Cleanuparr.Infrastructure.Tests.Features.DownloadClient;
public class RTorrentServiceFixture : IDisposable
{
public Mock<ILogger<RTorrentService>> Logger { get; }
public Mock<IFilenameEvaluator> FilenameEvaluator { get; }
public Mock<IStriker> Striker { get; }
public Mock<IDryRunInterceptor> DryRunInterceptor { get; }
public Mock<IHardLinkFileService> HardLinkFileService { get; }
public Mock<IDynamicHttpClientProvider> HttpClientProvider { get; }
public Mock<IEventPublisher> EventPublisher { get; }
public Mock<IBlocklistProvider> BlocklistProvider { get; }
public Mock<IRuleEvaluator> RuleEvaluator { get; }
public Mock<IRuleManager> RuleManager { get; }
public Mock<IRTorrentClientWrapper> ClientWrapper { get; }
public RTorrentServiceFixture()
{
Logger = new Mock<ILogger<RTorrentService>>();
FilenameEvaluator = new Mock<IFilenameEvaluator>();
Striker = new Mock<IStriker>();
DryRunInterceptor = new Mock<IDryRunInterceptor>();
HardLinkFileService = new Mock<IHardLinkFileService>();
HttpClientProvider = new Mock<IDynamicHttpClientProvider>();
EventPublisher = new Mock<IEventPublisher>();
BlocklistProvider = new Mock<IBlocklistProvider>();
RuleEvaluator = new Mock<IRuleEvaluator>();
RuleManager = new Mock<IRuleManager>();
ClientWrapper = new Mock<IRTorrentClientWrapper>();
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public RTorrentService CreateSut(DownloadClientConfig? config = null)
{
config ??= new DownloadClientConfig
{
Id = Guid.NewGuid(),
Name = "Test rTorrent Client",
TypeName = Domain.Enums.DownloadClientTypeName.rTorrent,
Type = Domain.Enums.DownloadClientType.Torrent,
Enabled = true,
Host = new Uri("http://localhost/RPC2"),
Username = "admin",
Password = "admin",
UrlBase = ""
};
var httpClient = new HttpClient();
HttpClientProvider
.Setup(x => x.CreateClient(It.IsAny<DownloadClientConfig>()))
.Returns(httpClient);
return new RTorrentService(
Logger.Object,
FilenameEvaluator.Object,
Striker.Object,
DryRunInterceptor.Object,
HardLinkFileService.Object,
HttpClientProvider.Object,
EventPublisher.Object,
BlocklistProvider.Object,
config,
RuleEvaluator.Object,
RuleManager.Object,
ClientWrapper.Object
);
}
public void ResetMocks()
{
Logger.Reset();
FilenameEvaluator.Reset();
Striker.Reset();
DryRunInterceptor.Reset();
HardLinkFileService.Reset();
HttpClientProvider.Reset();
EventPublisher.Reset();
RuleEvaluator.Reset();
RuleManager.Reset();
ClientWrapper.Reset();
DryRunInterceptor
.Setup(x => x.InterceptAsync(It.IsAny<Delegate>(), It.IsAny<object[]>()))
.Returns((Delegate action, object[] parameters) =>
{
return (Task)(action.DynamicInvoke(parameters) ?? Task.CompletedTask);
});
}
public void Dispose()
{
GC.SuppressFinalize(this);
}
}

Some files were not shown because too many files have changed in this diff Show More