Compare commits

...

103 Commits

Author SHA1 Message Date
Flaminel
ac3be75082 Fix workflow dispatch defaulting to dev version (#379) 2025-11-30 22:53:23 +02:00
Flaminel
a1663b865a Improve workflow dispatch (#378) 2025-11-30 22:27:40 +02:00
Flaminel
c97a416d1e Fix windows workflow (#377) 2025-11-30 16:22:21 +02:00
Flaminel
d28ab42303 Fix frontend workflow using assets instead of cache (#376) 2025-11-30 15:48:53 +02:00
Flaminel
fbb2bba3b6 Update packages (#375) 2025-11-30 13:14:29 +02:00
Flaminel
08eda22587 Add test workflow and improve workflow parallelization (#369) 2025-11-25 23:05:28 +02:00
Flaminel
a4045eebd3 Add downloads volume to detailed installation docs (#365) 2025-11-22 22:15:37 +02:00
Flaminel
a57cbccbb4 Improve UI validations (#366) 2025-11-22 22:14:50 +02:00
Flaminel
2221f118bb Fix qBittorrent tracker check (#363) 2025-11-09 19:03:48 +02:00
Flaminel
2cc3eb4ebb Fix ignored downloads not checking for certain fields (#362) 2025-11-09 18:24:26 +02:00
Flaminel
3a064a22bd Remove hardcoded app status timeout (#356) 2025-11-03 18:38:09 +02:00
Flaminel
ee764ff215 Fix Transmission stalled check (#354) 2025-11-02 17:48:30 +02:00
Flaminel
402677b69b Fix ignored downloads not being saved for Queue Cleaner (#353)
fixed ignored downloads not being saved for Queue Cleaner
2025-10-31 17:39:52 +02:00
Flaminel
97f63434fd Recreate faulty migration (#349) 2025-10-23 23:59:11 +03:00
Flaminel
07d0cf07e3 Add more screenshots to the docs (#350) 2025-10-23 23:53:01 +03:00
Flaminel
4be107439a Fix ntfy notification priority being overwritten (#348) 2025-10-23 23:36:22 +03:00
Flaminel
89ef03a859 Add failed import safeguard for private torrents when download client is unavailable (#347) 2025-10-23 18:27:28 +03:00
Flaminel
905384034d Improve docs (#342) 2025-10-23 18:17:43 +03:00
Flaminel
bf826da1ae Add handling for items that are not being blocked (#346) 2025-10-23 18:12:42 +03:00
Flaminel
6aac35181b Change minimum and default value for search delay (#345) 2025-10-23 11:50:13 +03:00
Flaminel
efbf60dcdd Add option to remove only specified failed import message patterns (#297) 2025-10-22 23:45:05 +03:00
Flaminel
ebb166a7b9 Add queue rules (#332) 2025-10-22 13:46:43 +03:00
Flaminel
7aced28262 Add contributing guide (#340) 2025-10-17 23:12:35 +03:00
Flaminel
ae3e793498 Update GitHub issue templates (#339)
updated issue templates
2025-10-17 22:30:35 +03:00
Flaminel
4eb98b18a1 Update the feature request template (#329) 2025-10-06 16:11:31 +03:00
Copilot
128e7e5f11 Add on-demand job triggers (#310) 2025-10-01 10:42:02 +03:00
Flaminel
d224b2dea0 Add app version to the UI (#318) 2025-10-01 10:36:13 +03:00
Flaminel
16e823b8d3 Add status json to Cloudflare pages (#323) 2025-09-29 21:43:15 +03:00
Flaminel
f2f11e3472 Fix UI caching for Download Cleaner page (#316) 2025-09-29 21:37:25 +03:00
Flaminel
a3549c80a9 Fix icons paths (#322) 2025-09-29 21:36:59 +03:00
Flaminel
2b9c347ed6 Fix docs build workflow (#315) 2025-09-25 12:23:20 +03:00
Flaminel
98ccee866d Fix sidebar styling (#313) 2025-09-25 12:06:46 +03:00
Flaminel
911849c6dd Fix blacklist synchronizer docs (#307) 2025-09-16 23:30:38 +03:00
Flaminel
cce3bb2c4a Add ntfy support (#300) 2025-09-15 22:08:48 +03:00
Flaminel
bcc117cd0d Fix slow strikes not being reset (#305) 2025-09-15 22:03:18 +03:00
Flaminel
8e20a68ae2 Improve frontend layout (#299) 2025-09-15 22:03:03 +03:00
Flaminel
736c146f25 Add ignored downloads setting per job (#301) 2025-09-15 22:02:03 +03:00
Flaminel
6398ef1cc6 Add option to inject blacklist into qBittorrent (#304) 2025-09-15 21:59:49 +03:00
Flaminel
83e6a289be Change docs build triggers (#303) 2025-09-15 20:55:35 +03:00
Julien Virey
5662118b01 Add documentation about archlinux package (#296) 2025-09-06 01:30:06 +03:00
Flaminel
22dfc7b40d Fix blocklist provider reporting wrong number of loaded blocklists (#293) 2025-09-04 22:14:46 +03:00
Flaminel
a51e387453 Fix log level change not taking effect (#292) 2025-09-04 22:12:57 +03:00
Flaminel
c7d2ec7311 Fix notification provider update (#291) 2025-09-03 23:48:02 +03:00
Flaminel
bb9ac5b67b Fix notifications migration when no event type is enabled (#290) 2025-09-03 21:12:55 +03:00
Flaminel
f93494adb2 Rework notifications system (#284) 2025-09-02 23:18:22 +03:00
Flaminel
7201520411 Add configurable log retention (#279) 2025-09-02 00:17:16 +03:00
Flaminel
2a1e65e1af Make sidebar scrollable (#285) 2025-09-02 00:16:38 +03:00
Flaminel
da318c3339 Fix HTTPS schema for Cloudflare pages links (#286) 2025-09-02 00:16:27 +03:00
Flaminel
7149b6243f Add .sql to the blacklist (#287) 2025-09-02 00:16:12 +03:00
Flaminel
11f5a28c04 Improve download client health checks (#288) 2025-09-02 00:15:09 +03:00
Flaminel
9cc36c7a50 Add qBittorrent basic auth support (#246) 2025-08-11 10:52:44 +03:00
Flaminel
861c135cc6 fixed Malware Blocker docs path 2025-08-07 11:55:46 +03:00
Flaminel
3b0275c411 Finish rebranding Content Blocker to Malware Blocker (#271) 2025-08-06 22:55:39 +03:00
Flaminel
cad1b51202 Improve logs and events ordering to be descending from the top (#270) 2025-08-06 22:51:20 +03:00
Flaminel
f50acd29f4 Disable MassTransit telemetry (#268) 2025-08-06 22:50:48 +03:00
LucasFA
af11d595d8 Fix detailed installation docs (#260)
https://cleanuparr.github.io/Cleanuparr/docs/installation/detailed
2025-08-06 22:49:14 +03:00
Flaminel
44994d5b21 Fix Notifiarr channel id input (#267) 2025-08-04 22:07:33 +03:00
Flaminel
592fd2d846 Fix Malware Blocker renaming issue (#259) 2025-08-02 15:54:26 +03:00
Flaminel
e96be1fca2 Small general fixes (#257)
* renamed ContentBlocker into MalwareBlocker in the logs

* fixed "Delete Private" input description
2025-08-02 11:36:47 +03:00
Flaminel
ee44e2b5ac Rework sidebar navigation (#255) 2025-08-02 05:31:25 +03:00
Flaminel
323bfc4d2e added major and minor tags for Docker images 2025-08-01 19:51:10 +03:00
Flaminel
dca45585ca General frontend improvements (#252) 2025-08-01 19:45:01 +03:00
Flaminel
8b5918d221 Improve malware detection for known malware (#251) 2025-08-01 19:33:35 +03:00
Flaminel
9c227c1f59 add Cloudflare static assets 2025-08-01 18:37:45 +03:00
Flaminel
2ad4499a6f Fix DownloadCleaner failing when using multiple download clients (#248) 2025-07-31 22:20:01 +03:00
Flaminel
33a5bf9ab3 Add uTorrent support (#240) 2025-07-28 23:09:19 +03:00
Flaminel
de06d1c2d3 Fix download client type being sent as number instead of string (#245) 2025-07-27 14:23:48 +03:00
Flaminel
72855bc030 small fix on how_it_works page of the docs 2025-07-24 18:41:05 +03:00
eatsleepcoderepeat-gl
b185ea6899 Added new whitelist which includes subtitles (#243) 2025-07-24 12:50:03 +03:00
Flaminel
1e0127e97e Add more states to be picked up by Download Cleaner (#242) 2025-07-23 23:54:20 +03:00
Flaminel
5bdbc98d68 fixed Docker image path in docs 2025-07-23 11:39:50 +03:00
Flaminel
e1aeb3da31 Try #1 to fix memory leak (#241) 2025-07-22 12:24:38 +03:00
Flaminel
283b09e8f1 fixed release name 2025-07-22 12:03:23 +03:00
Flaminel
b03c96249b Improve torrent protocol detection (#235) 2025-07-07 20:42:59 +03:00
Flaminel
2971445090 Add handling type of malware when containing thepirateheaven.org file (#232) 2025-07-07 14:29:39 +03:00
Flaminel
55c23419cd Improve download removal to be separate from download search (#233) 2025-07-07 14:28:34 +03:00
Flaminel
c4b9d9503a Add more logs for debug (#201) 2025-07-07 14:28:15 +03:00
Flaminel
823b73d9f0 Fix arr max strikes not being used instead of global setting (#231) 2025-07-04 21:16:02 +03:00
Flaminel
31632d25a4 Add Whisparr support (#215) 2025-07-04 21:15:35 +03:00
Flaminel
c59951a39c Add Progressive Web App (PWA) support (#228) 2025-07-04 21:15:14 +03:00
Flaminel
d9140d7b5b Add support for Apprise tags (#229) 2025-07-04 21:14:40 +03:00
Flaminel
90865a73b5 Add failed import messages to logs (#230) 2025-07-04 21:14:27 +03:00
Flaminel
cc45233223 Add support for basic auth for Apprise (#221) 2025-07-03 12:43:43 +03:00
Flaminel
5d12d601ae fixed repo links in the docs 2025-07-01 22:02:14 +03:00
Flaminel
88f40438af Fix validations and increased strikes limits (#212) 2025-07-01 13:18:50 +03:00
Flaminel
0a9ec06841 removed forgotten release step from MacOS workflow 2025-07-01 11:05:00 +03:00
Flaminel
a0ca6ec4b8 Add curl to the Docker image (#211) 2025-07-01 10:06:22 +03:00
Flaminel
eb6cf96470 Fix cron expression inputs (#203) 2025-07-01 01:00:43 +03:00
Flaminel
2ca0616771 Add date on dashboard logs and events (#205) 2025-07-01 01:00:30 +03:00
Flaminel
bc85144e60 Improve deploy workflows (#206) 2025-07-01 01:00:16 +03:00
Flaminel
236e31c841 Add download client name on debug logs (#207) 2025-07-01 00:59:52 +03:00
Flaminel
7a15139aa6 Fix autocomplete input on mobile phones (#196) 2025-06-30 13:28:14 +03:00
Flaminel
fb6ccfd011 Add Readarr support (#191) 2025-06-29 19:54:15 +03:00
Flaminel
ef85e2b690 Fix docs broken links (#190) 2025-06-29 01:03:24 +03:00
Flaminel
bb734230aa Add health checks (#181) 2025-06-29 00:00:55 +03:00
Flaminel
aa31c31955 Remove right-side icons from settings cards (#183) 2025-06-29 00:00:25 +03:00
Flaminel
1a89822f36 Change icon direction for UI accordions (#182) 2025-06-29 00:00:11 +03:00
Flaminel
fc9e0eca36 Fix some small UI stuff (#185) 2025-06-28 23:59:49 +03:00
Flaminel
0010dcb1c6 Fix jobs not being scheduled according to the cron expression (#187) 2025-06-28 23:55:08 +03:00
Flaminel
0ab8611f29 removed Docker Hub reference 2025-06-28 11:52:34 +03:00
Flaminel
9e02408a7e Fix download cleaner categories not being fetched (#177) 2025-06-28 00:08:58 +03:00
Flaminel
1bd0db05e6 updated readme 2025-06-27 21:32:22 +03:00
Flaminel
fb438f2ca7 Fix base paths being incorrectly configured for download clients (#173) 2025-06-27 19:44:46 +03:00
608 changed files with 56145 additions and 16769 deletions

View File

@@ -7,6 +7,13 @@ body:
attributes:
value: |
Thanks for taking the time to improve Cleanuparr!
- type: checkboxes
id: duplicate-check
attributes:
label: "Duplicate check"
options:
- label: I have searched for existing issues and confirmed this is not a duplicate.
required: true
- type: checkboxes
id: init
attributes:

View File

@@ -7,6 +7,25 @@ body:
attributes:
value: |
Thanks for taking the time to improve Cleanuparr!
- type: checkboxes
id: duplicate-check
attributes:
label: "Duplicate check"
options:
- label: I have searched for existing issues and confirmed this is not a duplicate.
required: true
- type: checkboxes
id: init
attributes:
label: Implementation & testing support
description: The requester should help answer questions, provide support for the implementation and test changes.
options:
- label: I understand I must be available to assist with implementation questions and to test the feature before being released.
required: true
- label: I understand that joining the Discord server may be necessary for better coordination and faster communication.
required: true
- label: I understand that failure to assist in the development process of my request will result in the request being closed.
required: true
- type: textarea
id: description
attributes:

View File

@@ -7,6 +7,13 @@ body:
attributes:
value: |
If you are experiencing unexpected behavior, please consider submitting a bug report instead.
- type: checkboxes
id: duplicate-check
attributes:
label: "Duplicate check"
options:
- label: I have searched for existing issues and confirmed this is not a duplicate.
required: true
- type: checkboxes
id: init
attributes:

View File

@@ -1,2 +1,8 @@
blank_issues_enabled: false
contact_links: []
contact_links:
- name: Discord Community
url: https://discord.gg/SCtMCgtsc4
about: Join our Discord for real-time help and discussions
- name: Documentation
url: https://cleanuparr.github.io/Cleanuparr/
about: Check the documentation for configurations and usage guidelines

24
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,24 @@
## Description
<!-- Brief description of what this PR does -->
## Related Issue
Closes #ISSUE_NUMBER
## Type of Change
- [ ] Bug fix
- [ ] New feature
- [ ] Breaking change
- [ ] Documentation update
## Testing
<!-- Describe how you tested your changes -->
## Screenshots (if applicable)
<!-- Add screenshots here -->
## Checklist
- [ ] I have read the [Contributing Guide](../CONTRIBUTING.md)
- [ ] I have announced my intent to work on this and received approval
- [ ] My code follows the project's code standards
- [ ] I have tested my changes thoroughly
- [ ] I have updated relevant documentation

View File

@@ -0,0 +1,30 @@
name: 'Get Vault Secrets'
description: 'Retrieves secrets from HashiCorp Vault using AppRole authentication'
inputs:
vault_host:
description: 'Vault server URL'
required: true
vault_role_id:
description: 'Vault AppRole Role ID'
required: true
vault_secret_id:
description: 'Vault AppRole Secret ID'
required: true
secrets:
description: 'Secrets to retrieve (multiline string, one per line in format: path | output_name)'
required: true
default: |
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
secrets/data/github packages_pat | PACKAGES_PAT
runs:
using: "composite"
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ inputs.vault_host }}
method: approle
roleId: ${{ inputs.vault_role_id }}
secretId: ${{ inputs.vault_secret_id }}
secrets: ${{ inputs.secrets }}

View File

@@ -1,14 +1,21 @@
name: Build Docker Images
on:
push:
tags:
- "v*.*.*"
pull_request:
paths:
- 'code/**'
workflow_dispatch:
workflow_call:
inputs:
push_docker:
description: 'Push Docker image to registry'
type: boolean
required: false
default: true
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build_app:
@@ -29,6 +36,8 @@ jobs:
githubHeadRef=${{ env.githubHeadRef }}
latestDockerTag=""
versionDockerTag=""
majorVersionDockerTag=""
minorVersionDockerTag=""
version="0.0.1"
if [[ "$githubRef" =~ ^"refs/tags/" ]]; then
@@ -36,6 +45,12 @@ jobs:
latestDockerTag="latest"
versionDockerTag=${branch#v}
version=${branch#v}
# Extract major and minor versions for additional tags
if [[ "$versionDockerTag" =~ ^([0-9]+)\.([0-9]+)\.([0-9]+) ]]; then
majorVersionDockerTag="${BASH_REMATCH[1]}"
minorVersionDockerTag="${BASH_REMATCH[1]}.${BASH_REMATCH[2]}"
fi
else
# Determine if this run is for the main branch or another branch
if [[ -z "$githubHeadRef" ]]; then
@@ -58,6 +73,12 @@ jobs:
if [ -n "$versionDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr/cleanuparr:$versionDockerTag"
fi
if [ -n "$minorVersionDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr/cleanuparr:$minorVersionDockerTag"
fi
if [ -n "$majorVersionDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr/cleanuparr:$majorVersionDockerTag"
fi
# set env vars
echo "branch=$branch" >> $GITHUB_ENV
@@ -101,6 +122,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push docker image
id: docker-build
timeout-minutes: 15
uses: docker/build-push-action@v6
with:
@@ -119,6 +141,9 @@ jobs:
platforms: |
linux/amd64
linux/arm64
push: true
push: ${{ inputs.push_docker }}
tags: |
${{ env.githubTags }}
${{ env.githubTags }}
# Enable BuildKit cache for faster builds
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,40 +1,55 @@
name: Build Executables
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build:
# Build for each platform in parallel using matrix strategy
build-platform:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
include:
- runtime: win-x64
platform: win-amd64
- runtime: linux-x64
platform: linux-amd64
- runtime: linux-arm64
platform: linux-arm64
- runtime: osx-x64
platform: osx-amd64
- runtime: osx-arm64
platform: osx-arm64
steps:
- name: Gate
if: ${{ !startsWith(github.ref, 'refs/tags/') && github.event_name != 'workflow_dispatch' }}
run: |
echo "This workflow only runs on tag events or manual dispatch. Pipeline finished."
exit 0
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
# Use input version if provided, otherwise determine from ref
if [[ -n "${{ inputs.app_version }}" ]]; then
appVersion="${{ inputs.app_version }}"
releaseVersion="v$appVersion"
elif [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repoFullName=${{ github.repository }}
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=${repoFullName#*/}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
@@ -58,27 +73,28 @@ jobs:
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup dotnet
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Cache NuGet packages
uses: actions/cache@v4
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json', '**/*.csproj') }}
restore-keys: |
${{ runner.os }}-nuget-
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Install dependencies and restore
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ secrets.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
@@ -86,70 +102,49 @@ jobs:
mkdir -p code/backend/${{ env.executableName }}/wwwroot
cp -r code/frontend/dist/ui/browser/* code/backend/${{ env.executableName }}/wwwroot/
- name: Build win-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build ${{ matrix.platform }}
run: |
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime ${{ matrix.runtime }} \
--self-contained \
-o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }} \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugSymbols=false
- name: Build linux-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Zip win-x64
- name: Zip artifact
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }}.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-${{ matrix.platform }}/
- name: Zip linux-x64
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: executable-${{ matrix.platform }}
path: ./artifacts/*.zip
retention-days: 30
# Consolidate all executable artifacts
consolidate:
needs: build-platform
runs-on: ubuntu-latest
steps:
- name: Download all platform artifacts
uses: actions/download-artifact@v4
with:
pattern: executable-*
path: ./artifacts
merge-multiple: true
- name: List downloaded artifacts
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/
echo "Consolidated executable artifacts:"
find ./artifacts -type f -name "*.zip" | sort
- name: Zip linux-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/
- name: Zip osx-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/
- name: Zip osx-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/
- name: Upload artifacts
- name: Upload consolidated artifacts
uses: actions/upload-artifact@v4
with:
name: cleanuparr-executables
path: |
./artifacts/*.zip
path: ./artifacts/*.zip
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
id: release
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
fail_on_unmatched_files: true
target_commitish: main
generate_release_notes: true
files: |
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip

46
.github/workflows/build-frontend.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: Build Frontend
on:
workflow_call:
jobs:
build-frontend:
runs-on: ubuntu-latest
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
- name: Checkout repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ github.repository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '24'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Upload frontend artifact
uses: actions/upload-artifact@v4
with:
name: frontend-dist
path: code/frontend/dist/ui/browser
retention-days: 1

View File

@@ -1,376 +0,0 @@
name: Build macOS ARM Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-arm-installer:
name: Build macOS ARM Installer
runs-on: macos-14 # ARM runner for Apple Silicon
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS ARM executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-arm64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>11.0</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-arm64-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -1,28 +1,47 @@
name: Build macOS Intel Installer
name: Build macOS Installers
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build-macos-intel-installer:
name: Build macOS Intel Installer
runs-on: macos-13 # Intel runner
build-macos-installer:
name: Build macOS ${{ matrix.arch }} Installer
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
include:
- arch: Intel
runner: macos-13
runtime: osx-x64
min_os_version: "10.15"
artifact_suffix: intel
- arch: ARM
runner: macos-14
runtime: osx-arm64
min_os_version: "11.0"
artifact_suffix: arm64
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
# Use input version if provided, otherwise determine from ref
if [[ -n "${{ inputs.app_version }}" ]]; then
appVersion="${{ inputs.app_version }}"
releaseVersion="v$appVersion"
elif [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
@@ -30,9 +49,9 @@ jobs:
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
@@ -58,18 +77,11 @@ jobs:
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Setup .NET
uses: actions/setup-dotnet@v4
@@ -81,16 +93,16 @@ jobs:
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS Intel executable
- name: Build macOS ${{ matrix.arch }} executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-x64 \
--runtime ${{ matrix.runtime }} \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
@@ -103,17 +115,17 @@ jobs:
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
@@ -124,16 +136,16 @@ jobs:
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
@@ -141,12 +153,12 @@ jobs:
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
@@ -158,14 +170,14 @@ jobs:
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
@@ -196,7 +208,7 @@ jobs:
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
@@ -228,7 +240,7 @@ jobs:
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>10.15</string>
<string>${{ matrix.min_os_version }}</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
@@ -245,7 +257,7 @@ jobs:
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
@@ -255,96 +267,96 @@ jobs:
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel.pkg"
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-${{ matrix.artifact_suffix }}.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel-dev.pkg"
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-${{ matrix.artifact_suffix }}-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
@@ -353,24 +365,12 @@ jobs:
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-intel-installer
name: Cleanuparr-macos-${{ matrix.artifact_suffix }}-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -1,11 +1,13 @@
name: Build Windows Installer
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
inputs:
app_version:
description: 'Application version'
type: string
required: false
default: ''
jobs:
build-windows-installer:
@@ -17,9 +19,13 @@ jobs:
run: |
$repoFullName = "${{ github.repository }}"
$ref = "${{ github.ref }}"
# Handle both tag events and manual dispatch
if ($ref -match "^refs/tags/") {
$inputVersion = "${{ inputs.app_version }}"
# Use input version if provided, otherwise determine from ref
if ($inputVersion -ne "") {
$appVersion = $inputVersion
$releaseVersion = "v$appVersion"
} elseif ($ref -match "^refs/tags/") {
$releaseVersion = $ref -replace "refs/tags/", ""
$appVersion = $releaseVersion -replace "^v", ""
} else {
@@ -27,15 +33,15 @@ jobs:
$releaseVersion = "dev-$(Get-Date -Format 'yyyyMMdd-HHmmss')"
$appVersion = "0.0.1-dev"
}
$repositoryName = $repoFullName.Split("/")[1]
echo "githubRepository=${{ github.repository }}" >> $env:GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $env:GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $env:GITHUB_ENV
echo "appVersion=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
echo "APP_VERSION=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
@@ -55,18 +61,11 @@ jobs:
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
- name: Download frontend artifact
uses: actions/download-artifact@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
name: frontend-dist
path: code/frontend/dist/ui/browser
- name: Setup .NET
uses: actions/setup-dotnet@v4
@@ -88,19 +87,6 @@ jobs:
run: |
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o dist /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugType=None /p:DebugSymbols=false
- name: Create sample configuration
shell: pwsh
run: |
# Create config directory
New-Item -ItemType Directory -Force -Path "config"
$config = @{
"HTTP_PORTS" = 11011
"BASE_PATH" = "/"
}
$config | ConvertTo-Json | Out-File -FilePath "config/cleanuparr.json" -Encoding UTF8
- name: Setup Inno Setup
shell: pwsh
run: |
@@ -158,14 +144,4 @@ jobs:
path: installer/${{ env.installerName }}
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
installer/${{ env.installerName }}
# Removed individual release step - handled by main release workflow

View File

@@ -0,0 +1,36 @@
name: Deploy to Cloudflare Pages
on:
push:
branches:
- main
paths:
- 'Cloudflare/**'
- 'blacklist'
- 'blacklist_permissive'
- 'whitelist'
- 'whitelist_with_subtitles'
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy to Cloudflare Pages
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Copy root static files to Cloudflare static directory
run: |
cp blacklist Cloudflare/static/
cp blacklist_permissive Cloudflare/static/
cp whitelist Cloudflare/static/
cp whitelist_with_subtitles Cloudflare/static/
- name: Deploy to Cloudflare Pages
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_PAGES_TOKEN }}
workingDirectory: "Cloudflare"
command: pages deploy . --project-name=cleanuparr

View File

@@ -0,0 +1,30 @@
name: Deploy to Cloudflare Pages
on:
push:
tags:
- "v*.*.*"
jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy to Cloudflare Pages
steps:
- name: Create status files
run: |
mkdir -p status
echo "{ \"version\": \"${GITHUB_REF_NAME}\" }" > status/status.json
# Cache static files for 10 minutes
cat > status/_headers << 'EOF'
/*
Cache-Control: public, max-age=600, s-maxage=600
EOF
- name: Deploy to Cloudflare Pages
uses: cloudflare/wrangler-action@v3
with:
apiToken: ${{ secrets.CLOUDFLARE_PAGES_TOKEN }}
workingDirectory: "status"
command: pages deploy . --project-name=cleanuparr-status

45
.github/workflows/dependency-review.yml vendored Normal file
View File

@@ -0,0 +1,45 @@
name: Dependency Review
on:
pull_request:
branches:
- main
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
pull-requests: write
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Dependency Review
uses: actions/dependency-review-action@v4
with:
# Fail on critical and high severity vulnerabilities
fail-on-severity: high
# Warn on moderate vulnerabilities
warn-on-severity: moderate
# Allow licenses
# allow-licenses: MIT, Apache-2.0, BSD-2-Clause, BSD-3-Clause, ISC, 0BSD
# Comment summarizes the vulnerabilities found
comment-summary-in-pr: on-failure
# Show dependency changes in PR
show-openssf-scorecard: true
vulnerability-check: true
- name: Upload dependency review results
uses: actions/upload-artifact@v4
with:
name: dependency-review-results
path: dependency-review-*.json
if-no-files-found: ignore
retention-days: 30

View File

@@ -2,9 +2,9 @@ name: Deploy Docusaurus to GitHub Pages
on:
push:
branches: [main]
paths:
- 'docs/**'
tags:
- "v*.*.*"
workflow_dispatch: {}
permissions:
contents: read
@@ -22,11 +22,12 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: main
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
node-version: 24.x
cache: yarn
cache-dependency-path: docs/yarn.lock

View File

@@ -10,6 +10,31 @@ on:
description: 'Version to release (e.g., 1.0.0)'
required: false
default: ''
runTests:
description: 'Run test suite'
type: boolean
required: false
default: true
buildDocker:
description: 'Build Docker image'
type: boolean
required: false
default: true
pushDocker:
description: 'Push Docker image to registry'
type: boolean
required: false
default: false
buildBinaries:
description: 'Build executables and installers'
type: boolean
required: false
default: true
createRelease:
description: 'Create GitHub release'
type: boolean
required: false
default: false
jobs:
# Validate release
@@ -19,7 +44,7 @@ jobs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -47,40 +72,98 @@ jobs:
echo "app_version=$app_version" >> $GITHUB_OUTPUT
echo "release_version=$release_version" >> $GITHUB_OUTPUT
echo "is_tag=$is_tag" >> $GITHUB_OUTPUT
echo "🏷️ Release Version: $release_version"
echo "📱 App Version: $app_version"
echo "🔖 Is Tag: $is_tag"
# Run tests
test:
needs: validate
if: ${{ needs.validate.outputs.is_tag == 'true' || github.event.inputs.runTests == 'true' }}
uses: ./.github/workflows/test.yml
secrets: inherit
# Build frontend once for all build jobs and cache it
build-frontend:
needs: [validate, test]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-frontend.yml
secrets: inherit
# Build portable executables
build-executables:
needs: validate
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-executable.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build Windows installer
build-windows-installer:
needs: validate
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-windows-installer.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build macOS Intel installer
build-macos-intel:
needs: validate
uses: ./.github/workflows/build-macos-intel-installer.yml
# Build macOS installers (Intel and ARM)
build-macos:
needs: [validate, test, build-frontend]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
needs.build-frontend.result == 'success' &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildBinaries == 'true')
uses: ./.github/workflows/build-macos-installer.yml
with:
app_version: ${{ needs.validate.outputs.app_version }}
secrets: inherit
# Build macOS ARM installer
build-macos-arm:
needs: validate
uses: ./.github/workflows/build-macos-arm-installer.yml
# Build and push Docker image(s)
build-docker:
needs: [validate, test]
if: |
always() &&
needs.validate.result == 'success' &&
(needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.validate.outputs.is_tag == 'true' || github.event.inputs.buildDocker == 'true')
uses: ./.github/workflows/build-docker.yml
with:
push_docker: ${{ needs.validate.outputs.is_tag == 'true' || github.event.inputs.pushDocker == 'true' }}
secrets: inherit
# Create GitHub release
create-release:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
needs: [validate, build-executables, build-windows-installer, build-macos]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
if: |
always() &&
needs.validate.result == 'success' &&
needs.build-executables.result == 'success' &&
needs.build-windows-installer.result == 'success' &&
needs.build-macos.result == 'success' &&
(
needs.validate.outputs.is_tag == 'true' ||
(github.event.inputs.createRelease == 'true' && github.event.inputs.buildBinaries == 'true')
)
steps:
- name: Get vault secrets
@@ -106,7 +189,7 @@ jobs:
- name: Create release
uses: softprops/action-gh-release@v2
with:
name: Cleanuparr ${{ needs.validate.outputs.release_version }}
name: ${{ needs.validate.outputs.release_version }}
tag_name: ${{ needs.validate.outputs.release_version }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
@@ -119,46 +202,56 @@ jobs:
# Summary job
summary:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
needs: [validate, test, build-frontend, build-executables, build-windows-installer, build-macos, build-docker]
runs-on: ubuntu-latest
if: always()
steps:
- name: Record workflow start time
id: workflow-start
run: |
# Get workflow start time from GitHub API
workflow_start=$(gh api repos/${{ github.repository }}/actions/runs/${{ github.run_id }} --jq '.run_started_at')
start_epoch=$(date -d "$workflow_start" +%s 2>/dev/null || date -j -f "%Y-%m-%dT%H:%M:%SZ" "$workflow_start" +%s)
echo "start=$start_epoch" >> $GITHUB_OUTPUT
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Build Summary
run: |
# Calculate total workflow duration
start_time=${{ steps.workflow-start.outputs.start }}
end_time=$(date +%s)
duration=$((end_time - start_time))
minutes=$((duration / 60))
seconds=$((duration % 60))
echo "## 🏗️ Cleanuparr Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ needs.validate.outputs.release_version }}" >> $GITHUB_STEP_SUMMARY
echo "**App Version**: ${{ needs.validate.outputs.app_version }}" >> $GITHUB_STEP_SUMMARY
echo "**Is Tag**: ${{ needs.validate.outputs.is_tag }}" >> $GITHUB_STEP_SUMMARY
echo "**Total Duration**: ${minutes}m ${seconds}s" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Build Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check job results
if [[ "${{ needs.build-executables.result }}" == "success" ]]; then
echo "✅ **Portable Executables**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Portable Executables**: ${{ needs.build-executables.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-windows-installer.result }}" == "success" ]]; then
echo "✅ **Windows Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Windows Installer**: ${{ needs.build-windows-installer.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-intel.result }}" == "success" ]]; then
echo "✅ **macOS Intel Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS Intel Installer**: ${{ needs.build-macos-intel.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-arm.result }}" == "success" ]]; then
echo "✅ **macOS ARM Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS ARM Installer**: ${{ needs.build-macos-arm.result }}" >> $GITHUB_STEP_SUMMARY
fi
# Helper function to print job result
print_result() {
local name="$1"
local result="$2"
case "$result" in
success) echo "✅ **$name**: Success" >> $GITHUB_STEP_SUMMARY ;;
skipped) echo "⏭️ **$name**: Skipped" >> $GITHUB_STEP_SUMMARY ;;
*) echo "❌ **$name**: $result" >> $GITHUB_STEP_SUMMARY ;;
esac
}
print_result "Tests" "${{ needs.test.result }}"
print_result "Frontend Build" "${{ needs.build-frontend.result }}"
print_result "Portable Executables" "${{ needs.build-executables.result }}"
print_result "Windows Installer" "${{ needs.build-windows-installer.result }}"
print_result "macOS Installers (Intel & ARM)" "${{ needs.build-macos.result }}"
print_result "Docker Image Build" "${{ needs.build-docker.result }}"
echo "" >> $GITHUB_STEP_SUMMARY
echo "🎉 **Build completed!**" >> $GITHUB_STEP_SUMMARY

99
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,99 @@
name: Tests
on:
push:
branches:
- main
paths:
- 'code/backend/**'
- '.github/workflows/test.yml'
pull_request:
paths:
- 'code/backend/**'
- '.github/workflows/test.yml'
workflow_call:
# Cancel in-progress runs for the same PR
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout repository
uses: actions/checkout@v4
timeout-minutes: 1
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Cache NuGet packages
uses: actions/cache@v4
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json', '**/*.csproj') }}
restore-keys: |
${{ runner.os }}-nuget-
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github packages_pat | PACKAGES_PAT
- name: Restore dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/cleanuparr.sln
- name: Build solution
run: dotnet build code/backend/cleanuparr.sln --configuration Release --no-restore
- name: Run tests
id: run-tests
run: dotnet test code/backend/cleanuparr.sln --configuration Release --no-build --verbosity normal --logger trx --collect:"XPlat Code Coverage" --results-directory ./coverage
- name: Upload test results
uses: actions/upload-artifact@v4
with:
name: test-results
path: ./coverage/*.trx
retention-days: 30
- name: Upload coverage reports
uses: actions/upload-artifact@v4
with:
name: coverage-report
path: ./coverage/**/coverage.cobertura.xml
retention-days: 30
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
files: ./coverage/**/coverage.cobertura.xml
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: false
flags: backend
name: backend-coverage
- name: Test Summary
run: |
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "${{ steps.run-tests.outcome }}" == "success" ]; then
echo "✅ All tests passed!" >> $GITHUB_STEP_SUMMARY
else
echo "❌ Tests failed or were cancelled. Status: ${{ steps.run-tests.outcome }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "Test artifacts have been uploaded for detailed analysis." >> $GITHUB_STEP_SUMMARY

66
.github/workflows/version-info.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
name: Get Version Info
on:
workflow_call:
inputs:
manual_version:
description: 'Manual version override (e.g., 1.0.0)'
required: false
type: string
default: ''
outputs:
app_version:
description: 'Application version (without v prefix)'
value: ${{ jobs.version.outputs.app_version }}
release_version:
description: 'Release version (with v prefix)'
value: ${{ jobs.version.outputs.release_version }}
is_tag:
description: 'Whether this is a tag event'
value: ${{ jobs.version.outputs.is_tag }}
repository_name:
description: 'Repository name without owner'
value: ${{ jobs.version.outputs.repository_name }}
jobs:
version:
runs-on: ubuntu-latest
outputs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
repository_name: ${{ steps.version.outputs.repository_name }}
steps:
- name: Calculate version info
id: version
run: |
repoFullName="${{ github.repository }}"
repositoryName="${repoFullName#*/}"
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag event
release_version="${GITHUB_REF##refs/tags/}"
app_version="${release_version#v}"
is_tag="true"
elif [[ -n "${{ inputs.manual_version }}" ]]; then
# Manual workflow with version
app_version="${{ inputs.manual_version }}"
release_version="v${app_version}"
is_tag="false"
else
# Development build
app_version="0.0.1-dev-$(date +%Y%m%d-%H%M%S)"
release_version="v${app_version}"
is_tag="false"
fi
echo "app_version=${app_version}" >> $GITHUB_OUTPUT
echo "release_version=${release_version}" >> $GITHUB_OUTPUT
echo "is_tag=${is_tag}" >> $GITHUB_OUTPUT
echo "repository_name=${repositoryName}" >> $GITHUB_OUTPUT
echo "📦 Repository: ${repositoryName}"
echo "🏷️ Release Version: ${release_version}"
echo "📱 App Version: ${app_version}"
echo "🔖 Is Tag: ${is_tag}"

325
CONTRIBUTING.md Normal file
View File

@@ -0,0 +1,325 @@
# Contributing to Cleanuparr
Thanks for your interest in contributing to Cleanuparr! This guide will help you get started with development.
## Before You Start
### Announce Your Intent
Before starting any work, please let us know what you want to contribute:
- For existing issues: Comment on the issue stating you'd like to work on it
- For new features/changes: Create a new issue first and mention that you want to work on it
This helps us avoid redundant work, git conflicts, and contributions that may not align with the project's direction.
**Wait for approval from the maintainers before proceeding with your contribution.**
## Development Setup
### Prerequisites
- [.NET 9.0 SDK](https://dotnet.microsoft.com/download/dotnet/9.0)
- [Node.js 18+](https://nodejs.org/)
- [Git](https://git-scm.com/)
- (Optional) [Make](https://www.gnu.org/software/make/) for database migrations
- (Optional) IDE: [JetBrains Rider](https://www.jetbrains.com/rider/) or [Visual Studio](https://visualstudio.microsoft.com/)
### Repository Setup
1. Fork the repository on GitHub
2. Clone your fork locally:
```bash
git clone https://github.com/YOUR_USERNAME/Cleanuparr.git
cd Cleanuparr
```
3. Add the upstream repository:
```bash
git remote add upstream https://github.com/Cleanuparr/Cleanuparr.git
```
## Backend Development
### Initial Setup
#### 1. Create a GitHub Personal Access Token (PAT)
Cleanuparr uses GitHub Packages for NuGet dependencies. You'll need a PAT with `read:packages` permission:
1. Go to [GitHub Settings > Developer Settings > Personal Access Tokens > Tokens (classic)](https://github.com/settings/tokens)
2. Click "Generate new token" → "Generate new token (classic)"
3. Give it a descriptive name (e.g., "Cleanuparr NuGet Access")
4. Set an expiration (recommend 90 days or longer for development)
5. Select only the `read:packages` scope
6. Click "Generate token" and copy it
#### 2. Configure NuGet Source
Add the Cleanuparr NuGet repository:
```bash
dotnet nuget add source \
--username YOUR_GITHUB_USERNAME \
--password YOUR_GITHUB_PAT \
--store-password-in-clear-text \
--name Cleanuparr \
https://nuget.pkg.github.com/Cleanuparr/index.json
```
Replace `YOUR_GITHUB_USERNAME` and `YOUR_GITHUB_PAT` with your GitHub username and the PAT you created.
### Running the Backend
#### Option 1: Using .NET CLI
Navigate to the backend directory:
```bash
cd code/backend
```
Build the application:
```bash
dotnet build Cleanuparr.Api/Cleanuparr.Api.csproj
```
Run the application:
```bash
dotnet run --project Cleanuparr.Api/Cleanuparr.Api.csproj
```
Run tests:
```bash
dotnet test
```
The API will be available at http://localhost:5000
#### Option 2: Using an IDE
For JetBrains Rider or Visual Studio:
1. Open the solution file: `code/backend/cleanuparr.sln`
2. Set `Cleanuparr.Api` as the startup project
3. Press `F5` to start the application
### Database Migrations
Cleanuparr uses two separate database contexts: `DataContext` and `EventsContext`.
#### Prerequisites
Install Make if not already installed:
- Windows: Install via [Chocolatey](https://chocolatey.org/) (`choco install make`) or use [WSL](https://docs.microsoft.com/windows/wsl/)
- macOS: Install via Homebrew (`brew install make`)
- Linux: Usually pre-installed, or install via package manager (`apt install make`, `yum install make`, etc.)
#### Creating Migrations
From the `code` directory:
For data migrations (DataContext):
```bash
make migrate-data name=YourMigrationName
```
For events migrations (EventsContext):
```bash
make migrate-events name=YourMigrationName
```
Example:
```bash
make migrate-data name=AddUserPreferences
make migrate-events name=AddAuditLogEvents
```
## Frontend Development
### Setup
1. Navigate to the frontend directory:
```bash
cd code/frontend
```
2. Install dependencies:
```bash
npm install
```
3. Start the development server:
```bash
npm start
```
The UI will be available at http://localhost:4200
## Documentation Development
### Setup
1. Navigate to the docs directory:
```bash
cd docs
```
2. Install dependencies:
```bash
npm install
```
3. Start the development server:
```bash
npm start
```
The documentation site will be available at http://localhost:3000
## Building with Docker
### Building a Local Docker Image
To build the Docker image locally for testing:
1. Navigate to the `code` directory:
```bash
cd code
```
2. Build the image:
```bash
docker build \
--build-arg PACKAGES_USERNAME=YOUR_GITHUB_USERNAME \
--build-arg PACKAGES_PAT=YOUR_GITHUB_PAT \
-t cleanuparr:local \
-f Dockerfile .
```
Replace `YOUR_GITHUB_USERNAME` and `YOUR_GITHUB_PAT` with your credentials.
3. Run the container:
```bash
docker run -d \
--name cleanuparr-dev \
-p 11011:11011 \
-v /path/to/config:/config \
-e PUID=1000 \
-e PGID=1000 \
-e TZ=Etc/UTC \
cleanuparr:local
```
4. Access the application at http://localhost:11011
### Building for Multiple Architectures
Use Docker Buildx for multi-platform builds:
```bash
docker buildx build \
--platform linux/amd64,linux/arm64 \
--build-arg PACKAGES_USERNAME=YOUR_GITHUB_USERNAME \
--build-arg PACKAGES_PAT=YOUR_GITHUB_PAT \
-t cleanuparr:local \
-f Dockerfile .
```
## Code Standards
### Backend (.NET/C#)
- Follow existing conventions and [Microsoft C# Coding Conventions](https://docs.microsoft.com/dotnet/csharp/fundamentals/coding-style/coding-conventions)
- Use meaningful variable and method names
- Add XML documentation comments for public APIs
- Write unit tests whenever possible
### Frontend (Angular/TypeScript)
- Follow existing conventions and the [Angular Style Guide](https://angular.io/guide/styleguide)
- Use TypeScript strict mode
- Write unit tests whenever possible
### Documentation
- Use clear, concise language
- Include code examples where appropriate
- Update relevant documentation when adding/changing features
- Check for spelling and grammar
## Submitting Your Contribution
### 1. Create a Feature Branch
```bash
git checkout -b feature/your-feature-name
# or
git checkout -b fix/your-bug-fix-name
```
### 2. Make Your Changes
- Write clean, well-documented code
- Follow the code standards outlined above
- **Test your changes thoroughly!**
### 3. Commit Your Changes
Write clear, descriptive commit messages:
```bash
git add .
git commit -m "Add feature: brief description of your changes"
```
### 4. Keep Your Branch Updated
```bash
git fetch upstream
git rebase upstream/main
```
### 5. Push to Your Fork
```bash
git push origin feature/your-feature-name
```
### 6. Create a Pull Request
1. Go to the [Cleanuparr repository](https://github.com/Cleanuparr/Cleanuparr)
2. Click "New Pull Request"
3. Select your fork and branch
4. Fill out the PR template with:
- A descriptive title (e.g., "Add support for Prowlarr integration" or "Fix memory leak in download client polling")
- Description of changes
- Related issue number
- Testing performed
- Screenshots (if applicable)
### 7. Code Review Process
- Maintainers will review your PR
- Address any feedback or requested changes
- Once approved, your PR will be merged
## Other Ways to Contribute
### Help Test New Features
We're always looking for testers to help validate new features before they are released. If you'd like to help test upcoming changes:
1. Join our [Discord community](https://discord.gg/SCtMCgtsc4)
2. Let us know you're interested in testing
3. We'll provide you with pre-release builds and testing instructions
Your feedback helps us catch issues early and deliver better releases.
## Getting Help
- Discord: Join our [Discord community](https://discord.gg/SCtMCgtsc4) for real-time help
- Issues: Check existing [GitHub issues](https://github.com/Cleanuparr/Cleanuparr/issues) or create a new one
- Documentation: Review the [complete documentation](https://cleanuparr.github.io/Cleanuparr/)
## License
By contributing to Cleanuparr, you agree that your contributions will be licensed under the same license as the project.
---
Thanks for contributing to Cleanuparr!

3
Cloudflare/_headers Normal file
View File

@@ -0,0 +1,3 @@
# Cache static files for 5 minutes
/static/*
Cache-Control: public, max-age=300, s-maxage=300

View File

@@ -0,0 +1,2 @@
thepirateheaven.org
RARBG.work

View File

@@ -2,6 +2,11 @@ _Love this project? Give it a ⭐️ and let others know!_
# <img width="24px" src="./Logo/256.png" alt="Cleanuparr"></img> Cleanuparr
![Version](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fcleanuparr-status.pages.dev%2Fstatus.json&query=%24.version&logo=git&label=version&color=blue)
![Total Downloads](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fghcr-badge.elias.eu.org%2Fapi%2FCleanuparr%2FCleanuparr%2Fcleanuparr&query=%24.downloadCount&style=flat&logo=docker&label=Total%20Downloads&color=blue)
[![Tests](https://github.com/Cleanuparr/Cleanuparr/actions/workflows/test.yml/badge.svg?branch=main)](https://github.com/Cleanuparr/Cleanuparr/actions/workflows/test.yml)
[![Discord](https://img.shields.io/discord/1306721212587573389?color=7289DA&label=Discord&style=for-the-badge&logo=discord)](https://discord.gg/SCtMCgtsc4)
Cleanuparr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuparr can also trigger a search to replace the deleted shows/movies.
@@ -12,34 +17,86 @@ Cleanuparr was created primarily to address malicious files, such as `*.lnk` or
> **Features:**
> - Strike system to mark bad downloads.
> - Remove and block downloads that reached a maximum number of strikes.
> - Remove and block downloads that are **failing to be imported** by the arrs. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/queue-cleaner#failed-import-max-strikes)
> - Remove and block downloads that are **stalled** or in **metadata downloading** state. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/queue-cleaner#stalled-max-strikes)
> - Remove and block downloads that have a **low download speed** or **high estimated completion time**. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/queue-cleaner#slow-max-strikes)
> - Remove and block downloads blocked by qBittorrent or by Cleanuparr's **Content Blocker**. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/content-blocker)
> - Remove and block downloads that are **failing to be imported** by the arrs.
> - Remove and block downloads that are **stalled** or in **metadata downloading** state.
> - Remove and block downloads that have a **low download speed** or **high estimated completion time**.
> - Remove and block downloads blocked by qBittorrent or by Cleanuparr's **Malware Blocker**.
> - Remove and block known malware based on patterns found by the community.
> - Automatically trigger a search for downloads removed from the arrs.
> - Clean up downloads that have been **seeding** for a certain amount of time. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/download-cleaner#seeding-settings)
> - Remove downloads that are **orphaned**/have no **hardlinks**/are not referenced by the arrs anymore (with [cross-seed](https://www.cross-seed.org/) support). [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/download-cleaner#enable-unlinked-downloads-management)
> - Notify on strike or download removal. [configuration](https://cleanuparr.github.io/Cleanuparr/docs/configuration/notifications)
> - Clean up downloads that have been **seeding** for a certain amount of time.
> - Remove downloads that are **orphaned**/have no **hardlinks**/are not referenced by the arrs anymore (with [cross-seed](https://www.cross-seed.org/) support).
> - Notify on strike or download removal.
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuparr.
Cleanuparr supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
## Screenshots
## Quick Start
https://cleanuparr.github.io/Cleanuparr/docs/screenshots
> [!NOTE]
>
> 1. **Docker (Recommended)**
> Pull the Docker image from `ghcr.io/cleanuparr/cleanuparr:latest`.
>
> 2. **Unraid (for Unraid users)**
> Use the Unraid Community App.
>
> 3. **Manual Installation (if you're not using Docker)**
> Go to [Windows](#windows), [Linux](#linux) or [MacOS](#macos).
## 🎯 Supported Applications
# Docs
### *Arr Applications
- **Sonarr**
- **Radarr**
- **Lidarr**
- **Readarr**
- **Whisparr**
Docs can be found [here](https://cleanuparr.github.io/Cleanuparr/).
### Download Clients
- **qBittorrent**
- **Transmission**
- **Deluge**
- **µTorrent**
### Platforms
- **Docker**
- **Windows**
- **macOS**
- **Linux**
- **Unraid**
## 🚀 Quick Start
```bash
docker run -d --name cleanuparr \
--restart unless-stopped \
-p 11011:11011 \
-v /path/to/config:/config \
-e PORT=11011 \
-e PUID=1000 \
-e PGID=1000 \
-e TZ=Etc/UTC \
ghcr.io/cleanuparr/cleanuparr:latest
```
For Docker Compose, health checks, and other installation methods, see the [Complete Installation Guide](https://cleanuparr.github.io/Cleanuparr/docs/installation/detailed), but not before reading the [Prerequisites](https://cleanuparr.github.io/Cleanuparr/docs/installation/).
### 🌐 Access the Web Interface
After installation, open your browser and navigate to:
```
http://localhost:11011
```
**Next Steps:** Check out the [📖 Complete Documentation](https://cleanuparr.github.io/Cleanuparr/) for detailed configuration guides and setup instructions.
## 📖 Documentation & Support
- **📚 [Complete Documentation](https://cleanuparr.github.io/Cleanuparr/)** - Installation guides, configuration, and troubleshooting
- **⚙️ [Configuration Guide](https://cleanuparr.github.io/Cleanuparr/docs/category/configuration)** - Set up download clients, *arr apps, and features
- **🔧 [Setup Scenarios](https://cleanuparr.github.io/Cleanuparr/docs/category/setup-scenarios)** - Common use cases and examples
- **💬 [Discord Community](https://discord.gg/SCtMCgtsc4)** - Get help and discuss with other users
- **🔗 [GitHub Releases](https://github.com/Cleanuparr/Cleanuparr/releases)** - Download binaries and view changelog
## 🤝 Contributing
We welcome contributions from the community! Whether it's bug fixes, new features, documentation improvements, or testing, your help is appreciated.
**Before contributing:** Please read our [Contributing Guide](CONTRIBUTING.md) and announce your intent to work on an issue before starting.
- **[Contributing Guide](CONTRIBUTING.md)** - Learn how to set up your development environment and submit contributions
- **[Report Issues](https://github.com/Cleanuparr/Cleanuparr/issues/new/choose)** - Found a bug? Let us know!
- **[Feature Requests](https://github.com/Cleanuparr/Cleanuparr/issues/new/choose)** - Share your ideas for new features
- **[Help Test Features](https://discord.gg/SCtMCgtsc4)** - Join Discord to test pre-release features and provide feedback
# <img style="vertical-align: middle;" width="24px" src="./Logo/256.png" alt="Cleanuparr"> <span style="vertical-align: middle;">Cleanuparr</span> <img src="https://raw.githubusercontent.com/FortAwesome/Font-Awesome/6.x/svgs/solid/x.svg" height="24px" width="30px" style="vertical-align: middle;"> <span style="vertical-align: middle;">Huntarr</span> <img style="vertical-align: middle;" width="24px" src="https://github.com/plexguide/Huntarr.io/blob/main/frontend/static/logo/512.png?raw=true" alt Huntarr></img>

View File

@@ -640,6 +640,7 @@
*.spm
*.spr
*.spt
*.sql
*.sqf
*.sqx
*.sqz

View File

@@ -1,10 +1,12 @@
# Build Angular frontend
FROM --platform=$BUILDPLATFORM node:18-alpine AS frontend-build
FROM --platform=$BUILDPLATFORM node:24-alpine AS frontend-build
WORKDIR /app
# Copy package files first for better layer caching
COPY frontend/package*.json ./
RUN npm ci && npm install -g @angular/cli
# Use cache mount for npm to speed up builds
RUN --mount=type=cache,target=/root/.npm \
npm ci && npm install -g @angular/cli
# Copy source code
COPY frontend/ .
@@ -28,14 +30,17 @@ EXPOSE 11011
# Copy source code
COPY backend/ ./backend/
# Restore dependencies
# Add NuGet source
RUN dotnet nuget add source --username ${PACKAGES_USERNAME} --password ${PACKAGES_PAT} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
# Build and publish
RUN dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
# Restore and publish with cache mount
RUN --mount=type=cache,target=/root/.nuget/packages,sharing=locked \
dotnet restore ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj -a $TARGETARCH && \
dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
-a $TARGETARCH \
-c Release \
-o /app/publish \
--no-restore \
/p:Version=${VERSION} \
/p:PublishSingleFile=true \
/p:DebugSymbols=false
@@ -45,6 +50,7 @@ FROM mcr.microsoft.com/dotnet/aspnet:9.0-bookworm-slim
# Install required packages for user management and timezone support
RUN apt-get update && apt-get install -y \
curl \
tzdata \
gosu \
&& rm -rf /var/lib/apt/lists/*

View File

@@ -19,31 +19,28 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Application\Cleanuparr.Application.csproj" />
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MassTransit" Version="8.5.7" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.6" />
<PackageReference Include="Quartz" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="10.0.0" />
<PackageReference Include="Quartz" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.15.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="10.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.1.1" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<!-- API-related packages -->
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

File diff suppressed because it is too large Load Diff

View File

@@ -87,10 +87,6 @@ public class EventsController : ControllerBase
.Take(pageSize)
.ToListAsync();
events = events
.OrderBy(e => e.Timestamp)
.ToList();
// Return paginated result
var result = new PaginatedResult<AppEvent>
{

View File

@@ -0,0 +1,125 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Diagnostics.HealthChecks;
namespace Cleanuparr.Api.Controllers;
/// <summary>
/// Health check endpoints for Docker and Kubernetes
/// </summary>
[ApiController]
[Route("[controller]")]
public class HealthController : ControllerBase
{
private readonly HealthCheckService _healthCheckService;
private readonly ILogger<HealthController> _logger;
public HealthController(HealthCheckService healthCheckService, ILogger<HealthController> logger)
{
_healthCheckService = healthCheckService;
_logger = logger;
}
/// <summary>
/// Basic liveness probe - checks if the application is running
/// Used by Docker HEALTHCHECK and Kubernetes liveness probes
/// </summary>
[HttpGet]
[Route("/health")]
public async Task<IActionResult> GetHealth()
{
try
{
var result = await _healthCheckService.CheckHealthAsync(
registration => registration.Tags.Contains("liveness"));
return result.Status == HealthStatus.Healthy
? Ok(new { status = "healthy", timestamp = DateTime.UtcNow })
: StatusCode(503, new { status = "unhealthy", timestamp = DateTime.UtcNow });
}
catch (Exception ex)
{
_logger.LogError(ex, "Health check failed");
return StatusCode(503, new { status = "unhealthy", error = "Health check failed", timestamp = DateTime.UtcNow });
}
}
/// <summary>
/// Readiness probe - checks if the application is ready to serve traffic
/// Used by Kubernetes readiness probes
/// </summary>
[HttpGet]
[Route("/health/ready")]
public async Task<IActionResult> GetReadiness()
{
try
{
var result = await _healthCheckService.CheckHealthAsync(
registration => registration.Tags.Contains("readiness"));
if (result.Status == HealthStatus.Healthy)
{
return Ok(new { status = "ready", timestamp = DateTime.UtcNow });
}
// For readiness, we consider degraded as not ready
return StatusCode(503, new {
status = "not_ready",
timestamp = DateTime.UtcNow,
details = result.Entries.Where(e => e.Value.Status != HealthStatus.Healthy)
.ToDictionary(e => e.Key, e => new {
status = e.Value.Status.ToString().ToLowerInvariant(),
description = e.Value.Description
})
});
}
catch (Exception ex)
{
_logger.LogError(ex, "Readiness check failed");
return StatusCode(503, new { status = "not_ready", error = "Readiness check failed", timestamp = DateTime.UtcNow });
}
}
/// <summary>
/// Detailed health status - for monitoring and debugging
/// </summary>
[HttpGet]
[Route("/health/detailed")]
public async Task<IActionResult> GetDetailedHealth()
{
try
{
var result = await _healthCheckService.CheckHealthAsync();
var response = new
{
status = result.Status.ToString().ToLowerInvariant(),
timestamp = DateTime.UtcNow,
totalDuration = result.TotalDuration.TotalMilliseconds,
entries = result.Entries.ToDictionary(
e => e.Key,
e => new
{
status = e.Value.Status.ToString().ToLowerInvariant(),
description = e.Value.Description,
duration = e.Value.Duration.TotalMilliseconds,
tags = e.Value.Tags,
data = e.Value.Data,
exception = e.Value.Exception?.Message
})
};
return result.Status == HealthStatus.Healthy
? Ok(response)
: StatusCode(503, response);
}
catch (Exception ex)
{
_logger.LogError(ex, "Detailed health check failed");
return StatusCode(503, new {
status = "unhealthy",
error = "Detailed health check failed",
timestamp = DateTime.UtcNow
});
}
}
}

View File

@@ -1,6 +1,6 @@
using Cleanuparr.Api.Models;
using Cleanuparr.Infrastructure.Models;
using Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
@@ -76,63 +76,23 @@ public class JobsController : ControllerBase
}
}
[HttpPost("{jobType}/stop")]
public async Task<IActionResult> StopJob(JobType jobType)
[HttpPost("{jobType}/trigger")]
public async Task<IActionResult> TriggerJob(JobType jobType)
{
try
{
var result = await _jobManagementService.StopJob(jobType);
var result = await _jobManagementService.TriggerJobOnce(jobType);
if (!result)
{
return BadRequest($"Failed to stop job '{jobType}'");
return BadRequest($"Failed to trigger job '{jobType}' - job may not exist or be configured");
}
return Ok(new { Message = $"Job '{jobType}' stopped successfully" });
return Ok(new { Message = $"Job '{jobType}' triggered successfully for one-time execution" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error stopping job {jobType}", jobType);
return StatusCode(500, $"An error occurred while stopping job '{jobType}'");
}
}
[HttpPost("{jobType}/pause")]
public async Task<IActionResult> PauseJob(JobType jobType)
{
try
{
var result = await _jobManagementService.PauseJob(jobType);
if (!result)
{
return BadRequest($"Failed to pause job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' paused successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error pausing job {jobType}", jobType);
return StatusCode(500, $"An error occurred while pausing job '{jobType}'");
}
}
[HttpPost("{jobType}/resume")]
public async Task<IActionResult> ResumeJob(JobType jobType)
{
try
{
var result = await _jobManagementService.ResumeJob(jobType);
if (!result)
{
return BadRequest($"Failed to resume job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' resumed successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error resuming job {jobType}", jobType);
return StatusCode(500, $"An error occurred while resuming job '{jobType}'");
_logger.LogError(ex, "Error triggering job {jobType}", jobType);
return StatusCode(500, $"An error occurred while triggering job '{jobType}'");
}
}

View File

@@ -0,0 +1,180 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class ManualEventsController : ControllerBase
{
private readonly EventsContext _context;
public ManualEventsController(EventsContext context)
{
_context = context;
}
/// <summary>
/// Gets manual events with pagination and filtering
/// </summary>
[HttpGet]
public async Task<ActionResult<PaginatedResult<ManualEvent>>> GetManualEvents(
[FromQuery] int page = 1,
[FromQuery] int pageSize = 100,
[FromQuery] bool? isResolved = null,
[FromQuery] string? severity = null,
[FromQuery] DateTime? fromDate = null,
[FromQuery] DateTime? toDate = null,
[FromQuery] string? search = null)
{
// Validate pagination parameters
if (page < 1) page = 1;
if (pageSize < 1) pageSize = 100;
if (pageSize > 1000) pageSize = 1000; // Cap at 1000 for performance
var query = _context.ManualEvents.AsQueryable();
// Apply filters
if (isResolved.HasValue)
{
query = query.Where(e => e.IsResolved == isResolved.Value);
}
if (!string.IsNullOrWhiteSpace(severity))
{
if (Enum.TryParse<EventSeverity>(severity, true, out var severityEnum))
query = query.Where(e => e.Severity == severityEnum);
}
// Apply date range filters
if (fromDate.HasValue)
{
query = query.Where(e => e.Timestamp >= fromDate.Value);
}
if (toDate.HasValue)
{
query = query.Where(e => e.Timestamp <= toDate.Value);
}
// Apply search filter if provided
if (!string.IsNullOrWhiteSpace(search))
{
string pattern = EventsContext.GetLikePattern(search);
query = query.Where(e =>
EF.Functions.Like(e.Message, pattern) ||
EF.Functions.Like(e.Data, pattern)
);
}
// Count total matching records for pagination
var totalCount = await query.CountAsync();
// Calculate pagination
var totalPages = (int)Math.Ceiling(totalCount / (double)pageSize);
var skip = (page - 1) * pageSize;
// Get paginated data
var events = await query
.OrderByDescending(e => e.Timestamp)
.Skip(skip)
.Take(pageSize)
.ToListAsync();
// Return paginated result
var result = new PaginatedResult<ManualEvent>
{
Items = events,
Page = page,
PageSize = pageSize,
TotalCount = totalCount,
TotalPages = totalPages
};
return Ok(result);
}
/// <summary>
/// Gets a specific manual event by ID
/// </summary>
[HttpGet("{id}")]
public async Task<ActionResult<ManualEvent>> GetManualEvent(Guid id)
{
var eventEntity = await _context.ManualEvents.FindAsync(id);
if (eventEntity == null)
return NotFound();
return Ok(eventEntity);
}
/// <summary>
/// Marks a manual event as resolved
/// </summary>
[HttpPost("{id}/resolve")]
public async Task<ActionResult> ResolveManualEvent(Guid id)
{
var eventEntity = await _context.ManualEvents.FindAsync(id);
if (eventEntity == null)
return NotFound();
eventEntity.IsResolved = true;
await _context.SaveChangesAsync();
return Ok();
}
/// <summary>
/// Gets manual event statistics
/// </summary>
[HttpGet("stats")]
public async Task<ActionResult<object>> GetManualEventStats()
{
var stats = new
{
TotalEvents = await _context.ManualEvents.CountAsync(),
UnresolvedEvents = await _context.ManualEvents.CountAsync(e => !e.IsResolved),
ResolvedEvents = await _context.ManualEvents.CountAsync(e => e.IsResolved),
EventsBySeverity = await _context.ManualEvents
.GroupBy(e => e.Severity)
.Select(g => new { Severity = g.Key.ToString(), Count = g.Count() })
.ToListAsync(),
UnresolvedBySeverity = await _context.ManualEvents
.Where(e => !e.IsResolved)
.GroupBy(e => e.Severity)
.Select(g => new { Severity = g.Key.ToString(), Count = g.Count() })
.ToListAsync()
};
return Ok(stats);
}
/// <summary>
/// Gets unique severities for manual events
/// </summary>
[HttpGet("severities")]
public async Task<ActionResult<List<string>>> GetSeverities()
{
var severities = Enum.GetNames(typeof(EventSeverity)).ToList();
return Ok(severities);
}
/// <summary>
/// Manually triggers cleanup of old resolved events
/// </summary>
[HttpPost("cleanup")]
public async Task<ActionResult<object>> CleanupOldResolvedEvents([FromQuery] int retentionDays = 30)
{
var cutoffDate = DateTime.UtcNow.AddDays(-retentionDays);
var deletedCount = await _context.ManualEvents
.Where(e => e.IsResolved && e.Timestamp < cutoffDate)
.ExecuteDeleteAsync();
return Ok(new { DeletedCount = deletedCount });
}
}

View File

@@ -0,0 +1 @@
// Queue rules endpoints have moved to Cleanuparr.Api.Features.QueueCleaner.Controllers

View File

@@ -52,6 +52,10 @@ public class StatusController : ControllerBase
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Lidarr);
var readarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Readarr);
var status = new
{
@@ -80,6 +84,10 @@ public class StatusController : ControllerBase
Lidarr = new
{
InstanceCount = lidarrConfig.Instances.Count
},
Readarr = new
{
InstanceCount = readarrConfig.Instances.Count
}
}
};

View File

@@ -1,11 +1,10 @@
using System.Text.Json.Serialization;
using Cleanuparr.Api.Middleware;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Logging;
using Microsoft.AspNetCore.Http.Json;
using Microsoft.OpenApi.Models;
using System.Text;
using Cleanuparr.Api.Middleware;
using Microsoft.Extensions.Options;
namespace Cleanuparr.Api.DependencyInjection;
@@ -15,15 +14,21 @@ public static class ApiDI
{
services.Configure<JsonOptions>(options =>
{
options.SerializerOptions.PropertyNameCaseInsensitive = true;
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.SerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
// Make JsonSerializerOptions available for injection
services.AddSingleton(sp =>
sp.GetRequiredService<IOptions<JsonOptions>>().Value.SerializerOptions);
// Add API-specific services
services
.AddControllers()
.AddJsonOptions(options =>
{
options.JsonSerializerOptions.PropertyNameCaseInsensitive = true;
options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.JsonSerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
@@ -34,28 +39,12 @@ public static class ApiDI
.AddSignalR()
.AddJsonProtocol(options =>
{
options.PayloadSerializerOptions.PropertyNameCaseInsensitive = true;
options.PayloadSerializerOptions.Converters.Add(new JsonStringEnumConverter());
});
// Add health status broadcaster
services.AddHostedService<HealthStatusBroadcaster>();
// Add logging initializer service
services.AddHostedService<LoggingInitializer>();
services.AddSwaggerGen(options =>
{
options.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Cleanuparr API",
Version = "v1",
Description = "API for managing media downloads and cleanups",
Contact = new OpenApiContact
{
Name = "Cleanuparr Team"
}
});
});
return services;
}
@@ -70,33 +59,15 @@ public static class ApiDI
// Serve static files with caching
app.UseStaticFiles(new StaticFileOptions
{
OnPrepareResponse = ctx =>
{
// Cache static assets for 30 days
// if (ctx.File.Name.EndsWith(".js") || ctx.File.Name.EndsWith(".css"))
// {
// ctx.Context.Response.Headers.CacheControl = "public,max-age=2592000";
// }
}
OnPrepareResponse = _ => {}
});
// Add the global exception handling middleware first
app.UseMiddleware<ExceptionMiddleware>();
app.UseCors("Any");
app.UseRouting();
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint("v1/swagger.json", "Cleanuparr API v1");
options.RoutePrefix = "swagger";
options.DocumentTitle = "Cleanuparr API Documentation";
});
}
app.UseAuthorization();
app.MapControllers();
@@ -142,6 +113,38 @@ public static class ApiDI
// Map SignalR hubs
app.MapHub<HealthStatusHub>("/api/hubs/health");
app.MapHub<AppHub>("/api/hubs/app");
app.MapGet("/manifest.webmanifest", (HttpContext context) =>
{
var basePath = context.Request.PathBase.HasValue
? context.Request.PathBase.Value
: "/";
var manifest = new
{
name = "Cleanuparr",
short_name = "Cleanuparr",
start_url = basePath,
display = "standalone",
background_color = "#ffffff",
theme_color = "#ffffff",
icons = new[]
{
new {
src = "icons/icon-192x192.png",
sizes = "192x192",
type = "image/png"
},
new {
src = "icons/icon-512x512.png",
sizes = "512x512",
type = "image/png"
}
}
};
return Results.Json(manifest, contentType: "application/manifest+json");
});
return app;
}

View File

@@ -1,10 +1,5 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Shared.Helpers;
using Cleanuparr.Infrastructure.Logging;
using Serilog;
using Serilog.Events;
using Serilog.Templates;
using Serilog.Templates.Themes;
namespace Cleanuparr.Api.DependencyInjection;
@@ -12,82 +7,10 @@ public static class LoggingDI
{
public static ILoggingBuilder AddLogging(this ILoggingBuilder builder)
{
Log.Logger = GetDefaultLoggerConfiguration().CreateLogger();
Log.Logger = LoggingConfigManager
.CreateLoggerConfiguration()
.CreateLogger();
return builder.ClearProviders().AddSerilog();
}
public static LoggerConfiguration GetDefaultLoggerConfiguration()
{
LoggerConfiguration logConfig = new();
const string categoryTemplate = "{#if Category is not null} {Concat('[',Category,']'),CAT_PAD}{#end}";
const string jobNameTemplate = "{#if JobName is not null} {Concat('[',JobName,']'),JOB_PAD}{#end}";
const string consoleOutputTemplate = $"[{{@t:yyyy-MM-dd HH:mm:ss.fff}} {{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m}}\n{{@x}}";
const string fileOutputTemplate = $"{{@t:yyyy-MM-dd HH:mm:ss.fff zzz}} [{{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m:lj}}\n{{@x}}";
// Determine job name padding
List<string> jobNames = [nameof(JobType.QueueCleaner), nameof(JobType.ContentBlocker), nameof(JobType.DownloadCleaner)];
int jobPadding = jobNames.Max(x => x.Length) + 2;
// Determine instance name padding
List<string> categoryNames = [
InstanceType.Sonarr.ToString(),
InstanceType.Radarr.ToString(),
InstanceType.Lidarr.ToString(),
InstanceType.Readarr.ToString(),
InstanceType.Whisparr.ToString(),
"SYSTEM"
];
int catPadding = categoryNames.Max(x => x.Length) + 2;
// Apply padding values to templates
string consoleTemplate = consoleOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
string fileTemplate = fileOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
// Configure base logger with dynamic level control
logConfig
.MinimumLevel.Is(LogEventLevel.Information)
.Enrich.FromLogContext()
.WriteTo.Console(new ExpressionTemplate(consoleTemplate, theme: TemplateTheme.Literate));
// Create the logs directory
string logsPath = Path.Combine(ConfigurationPathProvider.GetConfigPath(), "logs");
if (!Directory.Exists(logsPath))
{
try
{
Directory.CreateDirectory(logsPath);
}
catch (Exception exception)
{
throw new Exception($"Failed to create log directory | {logsPath}", exception);
}
}
// Add main log file
logConfig.WriteTo.File(
path: Path.Combine(logsPath, "cleanuparr-.txt"),
formatter: new ExpressionTemplate(fileTemplate),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true,
shared: true
);
logConfig
.MinimumLevel.Override("MassTransit", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.Enrich.WithProperty("ApplicationName", "Cleanuparr");
return logConfig;
}
}

View File

@@ -1,11 +1,13 @@
using System.Text.Json.Serialization;
using Cleanuparr.Domain.Entities.Arr;
using Cleanuparr.Infrastructure.Features.DownloadHunter.Consumers;
using Cleanuparr.Infrastructure.Features.DownloadRemover.Consumers;
using Cleanuparr.Infrastructure.Features.Notifications.Consumers;
using Cleanuparr.Infrastructure.Features.Notifications.Models;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Data.Models.Arr;
using Infrastructure.Verticals.Notifications.Models;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
@@ -15,22 +17,26 @@ public static class MainDI
{
public static IServiceCollection AddInfrastructure(this IServiceCollection services, IConfiguration configuration) =>
services
.AddLogging(builder => builder.ClearProviders().AddConsole())
.AddHttpClients(configuration)
.AddSingleton<MemoryCache>()
.AddSingleton<IMemoryCache>(serviceProvider => serviceProvider.GetRequiredService<MemoryCache>())
.AddServices()
.AddHealthServices()
.AddQuartzServices(configuration)
.AddNotifications(configuration)
.AddNotifications()
.AddMassTransit(config =>
{
config.DisableUsageTelemetry();
config.AddConsumer<DownloadRemoverConsumer<SearchItem>>();
config.AddConsumer<DownloadRemoverConsumer<SonarrSearchItem>>();
config.AddConsumer<DownloadRemoverConsumer<SeriesSearchItem>>();
config.AddConsumer<DownloadHunterConsumer<SearchItem>>();
config.AddConsumer<DownloadHunterConsumer<SeriesSearchItem>>();
config.AddConsumer<NotificationConsumer<FailedImportStrikeNotification>>();
config.AddConsumer<NotificationConsumer<StalledStrikeNotification>>();
config.AddConsumer<NotificationConsumer<SlowStrikeNotification>>();
config.AddConsumer<NotificationConsumer<SlowSpeedStrikeNotification>>();
config.AddConsumer<NotificationConsumer<SlowTimeStrikeNotification>>();
config.AddConsumer<NotificationConsumer<QueueItemDeletedNotification>>();
config.AddConsumer<NotificationConsumer<DownloadCleanedNotification>>();
config.AddConsumer<NotificationConsumer<CategoryChangedNotification>>();
@@ -39,6 +45,7 @@ public static class MainDI
{
cfg.ConfigureJsonSerializerOptions(options =>
{
options.PropertyNameCaseInsensitive = true;
options.Converters.Add(new JsonStringEnumConverter());
options.ReferenceHandler = ReferenceHandler.IgnoreCycles;
@@ -48,7 +55,15 @@ public static class MainDI
cfg.ReceiveEndpoint("download-remover-queue", e =>
{
e.ConfigureConsumer<DownloadRemoverConsumer<SearchItem>>(context);
e.ConfigureConsumer<DownloadRemoverConsumer<SonarrSearchItem>>(context);
e.ConfigureConsumer<DownloadRemoverConsumer<SeriesSearchItem>>(context);
e.ConcurrentMessageLimit = 2;
e.PrefetchCount = 2;
});
cfg.ReceiveEndpoint("download-hunter-queue", e =>
{
e.ConfigureConsumer<DownloadHunterConsumer<SearchItem>>(context);
e.ConfigureConsumer<DownloadHunterConsumer<SeriesSearchItem>>(context);
e.ConcurrentMessageLimit = 1;
e.PrefetchCount = 1;
});
@@ -57,7 +72,8 @@ public static class MainDI
{
e.ConfigureConsumer<NotificationConsumer<FailedImportStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<StalledStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<SlowStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<SlowSpeedStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<SlowTimeStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<QueueItemDeletedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<DownloadCleanedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<CategoryChangedNotification>>(context);
@@ -83,9 +99,17 @@ public static class MainDI
/// </summary>
private static IServiceCollection AddHealthServices(this IServiceCollection services) =>
services
// Register the health check service
// Register the existing health check service for download clients
.AddSingleton<IHealthCheckService, HealthCheckService>()
// Register the background service for periodic health checks
.AddHostedService<HealthCheckBackgroundService>();
.AddHostedService<HealthCheckBackgroundService>()
// Add ASP.NET Core health checks
.AddHealthChecks()
.AddCheck<ApplicationHealthCheck>("application", tags: ["liveness"])
.AddCheck<DatabaseHealthCheck>("database", tags: ["readiness"])
.AddCheck<FileSystemHealthCheck>("filesystem", tags: ["readiness"])
.AddCheck<DownloadClientsHealthCheck>("download_clients", tags: ["readiness"])
.Services;
}

View File

@@ -1,20 +1,20 @@
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Apprise;
using Cleanuparr.Infrastructure.Features.Notifications.Notifiarr;
using Infrastructure.Verticals.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Ntfy;
namespace Cleanuparr.Api.DependencyInjection;
public static class NotificationsDI
{
public static IServiceCollection AddNotifications(this IServiceCollection services, IConfiguration configuration) =>
public static IServiceCollection AddNotifications(this IServiceCollection services) =>
services
// Notification configs are now managed through ConfigManager
.AddTransient<INotifiarrProxy, NotifiarrProxy>()
.AddTransient<INotificationProvider, NotifiarrProvider>()
.AddTransient<IAppriseProxy, AppriseProxy>()
.AddTransient<INotificationProvider, AppriseProvider>()
.AddTransient<INotificationPublisher, NotificationPublisher>()
.AddTransient<INotificationFactory, NotificationFactory>()
.AddTransient<NotificationService>();
.AddScoped<INotifiarrProxy, NotifiarrProxy>()
.AddScoped<IAppriseProxy, AppriseProxy>()
.AddScoped<INtfyProxy, NtfyProxy>()
.AddScoped<INotificationConfigurationService, NotificationConfigurationService>()
.AddScoped<INotificationProviderFactory, NotificationProviderFactory>()
.AddScoped<NotificationProviderFactory>()
.AddScoped<INotificationPublisher, NotificationPublisher>()
.AddScoped<NotificationService>();
}

View File

@@ -1,21 +1,21 @@
using Cleanuparr.Application.Features.ContentBlocker;
using Cleanuparr.Application.Features.DownloadCleaner;
using Cleanuparr.Application.Features.QueueCleaner;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.ContentBlocker;
using Cleanuparr.Infrastructure.Features.BlacklistSync;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadHunter;
using Cleanuparr.Infrastructure.Features.DownloadHunter.Interfaces;
using Cleanuparr.Infrastructure.Features.DownloadRemover;
using Cleanuparr.Infrastructure.Features.DownloadRemover.Interfaces;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Features.MalwareBlocker;
using Cleanuparr.Infrastructure.Features.Security;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Infrastructure.Interceptors;
using Infrastructure.Services.Interfaces;
using Infrastructure.Verticals.Files;
namespace Cleanuparr.Api.DependencyInjection;
@@ -23,31 +23,39 @@ public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddSingleton<IEncryptionService, AesEncryptionService>()
.AddTransient<SensitiveDataJsonConverter>()
.AddTransient<EventsContext>()
.AddTransient<DataContext>()
.AddTransient<EventPublisher>()
.AddScoped<IEncryptionService, AesEncryptionService>()
.AddScoped<SensitiveDataJsonConverter>()
.AddScoped<EventsContext>()
.AddScoped<DataContext>()
.AddScoped<EventPublisher>()
.AddHostedService<EventCleanupService>()
// API services
.AddScoped<IDryRunInterceptor, DryRunInterceptor>()
.AddScoped<CertificateValidationService>()
.AddScoped<SonarrClient>()
.AddScoped<RadarrClient>()
.AddScoped<LidarrClient>()
.AddScoped<ReadarrClient>()
.AddScoped<WhisparrClient>()
.AddScoped<ArrClientFactory>()
.AddScoped<QueueCleaner>()
.AddScoped<BlacklistSynchronizer>()
.AddScoped<MalwareBlocker>()
.AddScoped<DownloadCleaner>()
.AddScoped<IQueueItemRemover, QueueItemRemover>()
.AddScoped<IDownloadHunter, DownloadHunter>()
.AddScoped<IFilenameEvaluator, FilenameEvaluator>()
.AddScoped<IHardLinkFileService, HardLinkFileService>()
.AddScoped<UnixHardLinkFileService>()
.AddScoped<WindowsHardLinkFileService>()
.AddScoped<ArrQueueIterator>()
.AddScoped<DownloadServiceFactory>()
.AddScoped<IStriker, Striker>()
.AddScoped<FileReader>()
.AddScoped<IRuleManager, RuleManager>()
.AddScoped<IRuleEvaluator, RuleEvaluator>()
.AddScoped<IRuleIntervalValidator, RuleIntervalValidator>()
.AddSingleton<IJobManagementService, JobManagementService>()
// Core services
.AddTransient<IDryRunInterceptor, DryRunInterceptor>()
.AddTransient<CertificateValidationService>()
.AddTransient<SonarrClient>()
.AddTransient<RadarrClient>()
.AddTransient<LidarrClient>()
.AddTransient<ArrClientFactory>()
.AddTransient<QueueCleaner>()
.AddTransient<ContentBlocker>()
.AddTransient<DownloadCleaner>()
.AddTransient<IQueueItemRemover, QueueItemRemover>()
.AddTransient<IFilenameEvaluator, FilenameEvaluator>()
.AddTransient<IHardLinkFileService, HardLinkFileService>()
.AddTransient<UnixHardLinkFileService>()
.AddTransient<WindowsHardLinkFileService>()
.AddTransient<ArrQueueIterator>()
.AddTransient<DownloadServiceFactory>()
.AddTransient<IStriker, Striker>()
.AddSingleton<BlocklistProvider>();
.AddSingleton<BlocklistProvider>()
.AddSingleton<AppStatusSnapshot>()
.AddHostedService<AppStatusRefreshService>();
}

View File

@@ -0,0 +1,37 @@
using System;
using System.ComponentModel.DataAnnotations;
using Cleanuparr.Persistence.Models.Configuration.Arr;
namespace Cleanuparr.Api.Features.Arr.Contracts.Requests;
public sealed record ArrInstanceRequest
{
public bool Enabled { get; init; } = true;
[Required]
public required string Name { get; init; }
[Required]
public required string Url { get; init; }
[Required]
public required string ApiKey { get; init; }
public ArrInstance ToEntity(Guid configId) => new()
{
Enabled = Enabled,
Name = Name,
Url = new Uri(Url),
ApiKey = ApiKey,
ArrConfigId = configId,
};
public void ApplyTo(ArrInstance instance)
{
instance.Enabled = Enabled;
instance.Name = Name;
instance.Url = new Uri(Url);
instance.ApiKey = ApiKey;
}
}

View File

@@ -0,0 +1,6 @@
namespace Cleanuparr.Api.Features.Arr.Contracts.Requests;
public sealed record UpdateArrConfigRequest
{
public short FailedImportMaxStrikes { get; init; } = -1;
}

View File

@@ -0,0 +1,272 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.Arr.Contracts.Requests;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr.Dtos;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Mapster;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.Arr.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class ArrConfigController : ControllerBase
{
private readonly ILogger<ArrConfigController> _logger;
private readonly DataContext _dataContext;
public ArrConfigController(
ILogger<ArrConfigController> logger,
DataContext dataContext)
{
_logger = logger;
_dataContext = dataContext;
}
[HttpGet("sonarr")]
public Task<IActionResult> GetSonarrConfig() => GetArrConfig(InstanceType.Sonarr);
[HttpGet("radarr")]
public Task<IActionResult> GetRadarrConfig() => GetArrConfig(InstanceType.Radarr);
[HttpGet("lidarr")]
public Task<IActionResult> GetLidarrConfig() => GetArrConfig(InstanceType.Lidarr);
[HttpGet("readarr")]
public Task<IActionResult> GetReadarrConfig() => GetArrConfig(InstanceType.Readarr);
[HttpGet("whisparr")]
public Task<IActionResult> GetWhisparrConfig() => GetArrConfig(InstanceType.Whisparr);
[HttpPut("sonarr")]
public Task<IActionResult> UpdateSonarrConfig([FromBody] UpdateArrConfigRequest request)
=> UpdateArrConfig(InstanceType.Sonarr, request);
[HttpPut("radarr")]
public Task<IActionResult> UpdateRadarrConfig([FromBody] UpdateArrConfigRequest request)
=> UpdateArrConfig(InstanceType.Radarr, request);
[HttpPut("lidarr")]
public Task<IActionResult> UpdateLidarrConfig([FromBody] UpdateArrConfigRequest request)
=> UpdateArrConfig(InstanceType.Lidarr, request);
[HttpPut("readarr")]
public Task<IActionResult> UpdateReadarrConfig([FromBody] UpdateArrConfigRequest request)
=> UpdateArrConfig(InstanceType.Readarr, request);
[HttpPut("whisparr")]
public Task<IActionResult> UpdateWhisparrConfig([FromBody] UpdateArrConfigRequest request)
=> UpdateArrConfig(InstanceType.Whisparr, request);
[HttpPost("sonarr/instances")]
public Task<IActionResult> CreateSonarrInstance([FromBody] ArrInstanceRequest request)
=> CreateArrInstance(InstanceType.Sonarr, request);
[HttpPut("sonarr/instances/{id}")]
public Task<IActionResult> UpdateSonarrInstance(Guid id, [FromBody] ArrInstanceRequest request)
=> UpdateArrInstance(InstanceType.Sonarr, id, request);
[HttpDelete("sonarr/instances/{id}")]
public Task<IActionResult> DeleteSonarrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Sonarr, id);
[HttpPost("radarr/instances")]
public Task<IActionResult> CreateRadarrInstance([FromBody] ArrInstanceRequest request)
=> CreateArrInstance(InstanceType.Radarr, request);
[HttpPut("radarr/instances/{id}")]
public Task<IActionResult> UpdateRadarrInstance(Guid id, [FromBody] ArrInstanceRequest request)
=> UpdateArrInstance(InstanceType.Radarr, id, request);
[HttpDelete("radarr/instances/{id}")]
public Task<IActionResult> DeleteRadarrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Radarr, id);
[HttpPost("lidarr/instances")]
public Task<IActionResult> CreateLidarrInstance([FromBody] ArrInstanceRequest request)
=> CreateArrInstance(InstanceType.Lidarr, request);
[HttpPut("lidarr/instances/{id}")]
public Task<IActionResult> UpdateLidarrInstance(Guid id, [FromBody] ArrInstanceRequest request)
=> UpdateArrInstance(InstanceType.Lidarr, id, request);
[HttpDelete("lidarr/instances/{id}")]
public Task<IActionResult> DeleteLidarrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Lidarr, id);
[HttpPost("readarr/instances")]
public Task<IActionResult> CreateReadarrInstance([FromBody] ArrInstanceRequest request)
=> CreateArrInstance(InstanceType.Readarr, request);
[HttpPut("readarr/instances/{id}")]
public Task<IActionResult> UpdateReadarrInstance(Guid id, [FromBody] ArrInstanceRequest request)
=> UpdateArrInstance(InstanceType.Readarr, id, request);
[HttpDelete("readarr/instances/{id}")]
public Task<IActionResult> DeleteReadarrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Readarr, id);
[HttpPost("whisparr/instances")]
public Task<IActionResult> CreateWhisparrInstance([FromBody] ArrInstanceRequest request)
=> CreateArrInstance(InstanceType.Whisparr, request);
[HttpPut("whisparr/instances/{id}")]
public Task<IActionResult> UpdateWhisparrInstance(Guid id, [FromBody] ArrInstanceRequest request)
=> UpdateArrInstance(InstanceType.Whisparr, id, request);
[HttpDelete("whisparr/instances/{id}")]
public Task<IActionResult> DeleteWhisparrInstance(Guid id)
=> DeleteArrInstance(InstanceType.Whisparr, id);
private async Task<IActionResult> GetArrConfig(InstanceType type)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == type);
config.Instances = config.Instances
.OrderBy(i => i.Name)
.ToList();
return Ok(config.Adapt<ArrConfigDto>());
}
finally
{
DataContext.Lock.Release();
}
}
private async Task<IActionResult> UpdateArrConfig(InstanceType type, UpdateArrConfigRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ArrConfigs
.FirstAsync(x => x.Type == type);
config.FailedImportMaxStrikes = request.FailedImportMaxStrikes;
config.Validate();
await _dataContext.SaveChangesAsync();
return Ok(new { Message = $"{type} configuration updated successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save {Type} configuration", type);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task<IActionResult> CreateArrInstance(InstanceType type, ArrInstanceRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ArrConfigs
.FirstAsync(x => x.Type == type);
var instance = request.ToEntity(config.Id);
await _dataContext.ArrInstances.AddAsync(instance);
await _dataContext.SaveChangesAsync();
return CreatedAtAction(GetConfigActionName(type), new { id = instance.Id }, instance.Adapt<ArrInstanceDto>());
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create {Type} instance", type);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task<IActionResult> UpdateArrInstance(InstanceType type, Guid id, ArrInstanceRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ArrConfigs
.Include(c => c.Instances)
.FirstAsync(x => x.Type == type);
var instance = config.Instances.FirstOrDefault(i => i.Id == id);
if (instance is null)
{
return NotFound($"{type} instance with ID {id} not found");
}
request.ApplyTo(instance);
await _dataContext.SaveChangesAsync();
return Ok(instance.Adapt<ArrInstanceDto>());
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update {Type} instance with ID {Id}", type, id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task<IActionResult> DeleteArrInstance(InstanceType type, Guid id)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ArrConfigs
.Include(c => c.Instances)
.FirstAsync(x => x.Type == type);
var instance = config.Instances.FirstOrDefault(i => i.Id == id);
if (instance is null)
{
return NotFound($"{type} instance with ID {id} not found");
}
config.Instances.Remove(instance);
await _dataContext.SaveChangesAsync();
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete {Type} instance with ID {Id}", type, id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private static string GetConfigActionName(InstanceType type) => type switch
{
InstanceType.Sonarr => nameof(GetSonarrConfig),
InstanceType.Radarr => nameof(GetRadarrConfig),
InstanceType.Lidarr => nameof(GetLidarrConfig),
InstanceType.Readarr => nameof(GetReadarrConfig),
InstanceType.Whisparr => nameof(GetWhisparrConfig),
_ => nameof(GetSonarrConfig),
};
}

View File

@@ -0,0 +1,26 @@
using System;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;
namespace Cleanuparr.Api.Features.BlacklistSync.Contracts.Requests;
public sealed record UpdateBlacklistSyncConfigRequest
{
public bool Enabled { get; init; }
public string? BlacklistPath { get; init; }
/// <summary>
/// Applies the request to the provided configuration instance.
/// </summary>
public BlacklistSyncConfig ApplyTo(BlacklistSyncConfig config)
{
config.Enabled = Enabled;
config.BlacklistPath = BlacklistPath;
return config;
}
public bool HasPathChanged(string? currentPath)
=> !string.Equals(currentPath, BlacklistPath, StringComparison.InvariantCultureIgnoreCase);
}

View File

@@ -0,0 +1,100 @@
using System;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.BlacklistSync.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.BlacklistSync.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class BlacklistSyncConfigController : ControllerBase
{
private readonly ILogger<BlacklistSyncConfigController> _logger;
private readonly DataContext _dataContext;
private readonly IJobManagementService _jobManagementService;
public BlacklistSyncConfigController(
ILogger<BlacklistSyncConfigController> logger,
DataContext dataContext,
IJobManagementService jobManagementService)
{
_logger = logger;
_dataContext = dataContext;
_jobManagementService = jobManagementService;
}
[HttpGet("blacklist_sync")]
public async Task<IActionResult> GetBlacklistSyncConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.BlacklistSyncConfigs
.AsNoTracking()
.FirstAsync();
return Ok(config);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("blacklist_sync")]
public async Task<IActionResult> UpdateBlacklistSyncConfig([FromBody] UpdateBlacklistSyncConfigRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.BlacklistSyncConfigs
.FirstAsync();
bool enabledChanged = config.Enabled != request.Enabled;
bool becameEnabled = !config.Enabled && request.Enabled;
bool pathChanged = request.HasPathChanged(config.BlacklistPath);
request.ApplyTo(config);
config.Validate();
await _dataContext.SaveChangesAsync();
if (enabledChanged)
{
if (becameEnabled)
{
_logger.LogInformation("BlacklistSynchronizer enabled, starting job");
await _jobManagementService.StartJob(JobType.BlacklistSynchronizer, null, config.CronExpression);
await _jobManagementService.TriggerJobOnce(JobType.BlacklistSynchronizer);
}
else
{
_logger.LogInformation("BlacklistSynchronizer disabled, stopping the job");
await _jobManagementService.StopJob(JobType.BlacklistSynchronizer);
}
}
else if (pathChanged && config.Enabled)
{
_logger.LogDebug("BlacklistSynchronizer path changed");
await _jobManagementService.TriggerJobOnce(JobType.BlacklistSynchronizer);
}
return Ok(new { Message = "BlacklistSynchronizer configuration updated successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save BlacklistSync configuration");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
}

View File

@@ -0,0 +1,55 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
public record UpdateDownloadCleanerConfigRequest
{
public bool Enabled { get; init; }
public string CronExpression { get; init; } = "0 0 * * * ?";
/// <summary>
/// Indicates whether to use the CronExpression directly or convert from a user-friendly schedule.
/// </summary>
public bool UseAdvancedScheduling { get; init; }
public List<CleanCategoryRequest> Categories { get; init; } = [];
public bool DeletePrivate { get; init; }
/// <summary>
/// Indicates whether unlinked download handling is enabled.
/// </summary>
public bool UnlinkedEnabled { get; init; }
public string UnlinkedTargetCategory { get; init; } = "cleanuparr-unlinked";
public bool UnlinkedUseTag { get; init; }
public string UnlinkedIgnoredRootDir { get; init; } = string.Empty;
public List<string> UnlinkedCategories { get; init; } = [];
public List<string> IgnoredDownloads { get; init; } = [];
}
public record CleanCategoryRequest
{
[Required]
public string Name { get; init; } = string.Empty;
/// <summary>
/// Max ratio before removing a download.
/// </summary>
public double MaxRatio { get; init; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
public double MinSeedTime { get; init; }
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
public double MaxSeedTime { get; init; } = -1;
}

View File

@@ -0,0 +1,148 @@
using System.ComponentModel.DataAnnotations;
using System.IO;
using System.Linq;
using Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.DownloadCleaner.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class DownloadCleanerConfigController : ControllerBase
{
private readonly ILogger<DownloadCleanerConfigController> _logger;
private readonly DataContext _dataContext;
private readonly IJobManagementService _jobManagementService;
public DownloadCleanerConfigController(
ILogger<DownloadCleanerConfigController> logger,
DataContext dataContext,
IJobManagementService jobManagementService)
{
_logger = logger;
_dataContext = dataContext;
_jobManagementService = jobManagementService;
}
[HttpGet("download_cleaner")]
public async Task<IActionResult> GetDownloadCleanerConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.DownloadCleanerConfigs
.Include(x => x.Categories)
.AsNoTracking()
.FirstAsync();
return Ok(config);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("download_cleaner")]
public async Task<IActionResult> UpdateDownloadCleanerConfig([FromBody] UpdateDownloadCleanerConfigRequest newConfigDto)
{
await DataContext.Lock.WaitAsync();
try
{
if (newConfigDto is null)
{
throw new ValidationException("Request body cannot be null");
}
// Validate cron expression format
if (!string.IsNullOrEmpty(newConfigDto.CronExpression))
{
CronValidationHelper.ValidateCronExpression(newConfigDto.CronExpression);
}
// Get existing configuration
var oldConfig = await _dataContext.DownloadCleanerConfigs
.Include(x => x.Categories)
.FirstAsync();
oldConfig.Enabled = newConfigDto.Enabled;
oldConfig.CronExpression = newConfigDto.CronExpression;
oldConfig.UseAdvancedScheduling = newConfigDto.UseAdvancedScheduling;
oldConfig.DeletePrivate = newConfigDto.DeletePrivate;
oldConfig.UnlinkedEnabled = newConfigDto.UnlinkedEnabled;
oldConfig.UnlinkedTargetCategory = newConfigDto.UnlinkedTargetCategory;
oldConfig.UnlinkedUseTag = newConfigDto.UnlinkedUseTag;
oldConfig.UnlinkedIgnoredRootDir = newConfigDto.UnlinkedIgnoredRootDir;
oldConfig.UnlinkedCategories = newConfigDto.UnlinkedCategories;
oldConfig.IgnoredDownloads = newConfigDto.IgnoredDownloads;
oldConfig.Categories.Clear();
_dataContext.CleanCategories.RemoveRange(oldConfig.Categories);
_dataContext.DownloadCleanerConfigs.Update(oldConfig);
foreach (var categoryDto in newConfigDto.Categories)
{
_dataContext.CleanCategories.Add(new CleanCategory
{
Name = categoryDto.Name,
MaxRatio = categoryDto.MaxRatio,
MinSeedTime = categoryDto.MinSeedTime,
MaxSeedTime = categoryDto.MaxSeedTime,
DownloadCleanerConfigId = oldConfig.Id
});
}
oldConfig.Validate();
await _dataContext.SaveChangesAsync();
await UpdateJobSchedule(oldConfig, JobType.DownloadCleaner);
return Ok(new { Message = "DownloadCleaner configuration updated successfully" });
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save DownloadCleaner configuration");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task UpdateJobSchedule(IJobConfig config, JobType jobType)
{
if (config.Enabled)
{
if (!string.IsNullOrEmpty(config.CronExpression))
{
_logger.LogInformation("{name} is enabled, updating job schedule with cron expression: {CronExpression}",
jobType.ToString(), config.CronExpression);
await _jobManagementService.StartJob(jobType, null, config.CronExpression);
}
else
{
_logger.LogWarning("{name} is enabled, but no cron expression was found in the configuration", jobType.ToString());
}
return;
}
_logger.LogInformation("{name} is disabled, stopping the job", jobType.ToString());
await _jobManagementService.StopJob(jobType);
}
}

View File

@@ -0,0 +1,51 @@
using System;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Persistence.Models.Configuration;
namespace Cleanuparr.Api.Features.DownloadClient.Contracts.Requests;
public sealed record CreateDownloadClientRequest
{
public bool Enabled { get; init; }
public string Name { get; init; } = string.Empty;
public DownloadClientTypeName TypeName { get; init; }
public DownloadClientType Type { get; init; }
public Uri? Host { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public string? UrlBase { get; init; }
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
{
throw new ValidationException("Host cannot be empty");
}
}
public DownloadClientConfig ToEntity() => new()
{
Enabled = Enabled,
Name = Name,
TypeName = TypeName,
Type = Type,
Host = Host,
Username = Username,
Password = Password,
UrlBase = UrlBase,
};
}

View File

@@ -0,0 +1,51 @@
using System;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Persistence.Models.Configuration;
namespace Cleanuparr.Api.Features.DownloadClient.Contracts.Requests;
public sealed record UpdateDownloadClientRequest
{
public bool Enabled { get; init; }
public string Name { get; init; } = string.Empty;
public DownloadClientTypeName TypeName { get; init; }
public DownloadClientType Type { get; init; }
public Uri? Host { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public string? UrlBase { get; init; }
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
{
throw new ValidationException("Host cannot be empty");
}
}
public DownloadClientConfig ApplyTo(DownloadClientConfig existing) => existing with
{
Enabled = Enabled,
Name = Name,
TypeName = TypeName,
Type = Type,
Host = Host,
Username = Username,
Password = Password,
UrlBase = UrlBase,
};
}

View File

@@ -0,0 +1,149 @@
using System;
using System.Linq;
using Cleanuparr.Api.Features.DownloadClient.Contracts.Requests;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.DownloadClient.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class DownloadClientController : ControllerBase
{
private readonly ILogger<DownloadClientController> _logger;
private readonly DataContext _dataContext;
private readonly IDynamicHttpClientFactory _dynamicHttpClientFactory;
public DownloadClientController(
ILogger<DownloadClientController> logger,
DataContext dataContext,
IDynamicHttpClientFactory dynamicHttpClientFactory)
{
_logger = logger;
_dataContext = dataContext;
_dynamicHttpClientFactory = dynamicHttpClientFactory;
}
[HttpGet("download_client")]
public async Task<IActionResult> GetDownloadClientConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var clients = await _dataContext.DownloadClients
.AsNoTracking()
.ToListAsync();
clients = clients
.OrderBy(c => c.TypeName)
.ThenBy(c => c.Name)
.ToList();
return Ok(new { clients });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("download_client")]
public async Task<IActionResult> CreateDownloadClientConfig([FromBody] CreateDownloadClientRequest newClient)
{
await DataContext.Lock.WaitAsync();
try
{
newClient.Validate();
var clientConfig = newClient.ToEntity();
_dataContext.DownloadClients.Add(clientConfig);
await _dataContext.SaveChangesAsync();
return CreatedAtAction(nameof(GetDownloadClientConfig), new { id = clientConfig.Id }, clientConfig);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create download client");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("download_client/{id}")]
public async Task<IActionResult> UpdateDownloadClientConfig(Guid id, [FromBody] UpdateDownloadClientRequest updatedClient)
{
await DataContext.Lock.WaitAsync();
try
{
updatedClient.Validate();
var existingClient = await _dataContext.DownloadClients
.FirstOrDefaultAsync(c => c.Id == id);
if (existingClient is null)
{
return NotFound($"Download client with ID {id} not found");
}
var clientToPersist = updatedClient.ApplyTo(existingClient);
_dataContext.Entry(existingClient).CurrentValues.SetValues(clientToPersist);
await _dataContext.SaveChangesAsync();
return Ok(clientToPersist);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update download client with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpDelete("download_client/{id}")]
public async Task<IActionResult> DeleteDownloadClientConfig(Guid id)
{
await DataContext.Lock.WaitAsync();
try
{
var existingClient = await _dataContext.DownloadClients
.FirstOrDefaultAsync(c => c.Id == id);
if (existingClient is null)
{
return NotFound($"Download client with ID {id} not found");
}
_dataContext.DownloadClients.Remove(existingClient);
await _dataContext.SaveChangesAsync();
var clientName = $"DownloadClient_{id}";
_dynamicHttpClientFactory.UnregisterConfiguration(clientName);
_logger.LogInformation("Removed HTTP client configuration for deleted download client {ClientName}", clientName);
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete download client with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
}

View File

@@ -0,0 +1,131 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Cleanuparr.Infrastructure.Logging;
using Cleanuparr.Persistence.Models.Configuration.General;
using Cleanuparr.Shared.Helpers;
using Serilog.Events;
using ValidationException = Cleanuparr.Domain.Exceptions.ValidationException;
namespace Cleanuparr.Api.Features.General.Contracts.Requests;
public sealed record UpdateGeneralConfigRequest
{
public bool DisplaySupportBanner { get; init; } = true;
public bool DryRun { get; init; }
public ushort HttpMaxRetries { get; init; }
public ushort HttpTimeout { get; init; } = 100;
public CertificateValidationType HttpCertificateValidation { get; init; } = CertificateValidationType.Enabled;
public bool SearchEnabled { get; init; } = true;
public ushort SearchDelay { get; init; } = Constants.DefaultSearchDelaySeconds;
public string EncryptionKey { get; init; } = Guid.NewGuid().ToString();
public List<string> IgnoredDownloads { get; init; } = [];
public UpdateLoggingConfigRequest Log { get; init; } = new();
public GeneralConfig ApplyTo(GeneralConfig existingConfig, IServiceProvider services, ILogger logger)
{
existingConfig.DisplaySupportBanner = DisplaySupportBanner;
existingConfig.DryRun = DryRun;
existingConfig.HttpMaxRetries = HttpMaxRetries;
existingConfig.HttpTimeout = HttpTimeout;
existingConfig.HttpCertificateValidation = HttpCertificateValidation;
existingConfig.SearchEnabled = SearchEnabled;
existingConfig.SearchDelay = SearchDelay;
existingConfig.EncryptionKey = EncryptionKey;
existingConfig.IgnoredDownloads = IgnoredDownloads;
bool loggingChanged = Log.ApplyTo(existingConfig.Log);
Validate(existingConfig);
ApplySideEffects(existingConfig, services, logger, loggingChanged);
return existingConfig;
}
private static void Validate(GeneralConfig config)
{
if (config.HttpTimeout is 0)
{
throw new ValidationException("HTTP_TIMEOUT must be greater than 0");
}
config.Log.Validate();
}
private void ApplySideEffects(GeneralConfig config, IServiceProvider services, ILogger logger, bool loggingChanged)
{
var dynamicHttpClientFactory = services.GetRequiredService<IDynamicHttpClientFactory>();
dynamicHttpClientFactory.UpdateAllClientsFromGeneralConfig(config);
logger.LogInformation("Updated all HTTP client configurations with new general settings");
if (!loggingChanged)
{
return;
}
if (Log.LevelOnlyChange)
{
logger.LogCritical("Setting global log level to {level}", config.Log.Level);
LoggingConfigManager.SetLogLevel(config.Log.Level);
return;
}
logger.LogCritical("Reconfiguring logger due to configuration changes");
LoggingConfigManager.ReconfigureLogging(config);
}
}
public sealed record UpdateLoggingConfigRequest
{
public LogEventLevel Level { get; init; } = LogEventLevel.Information;
public ushort RollingSizeMB { get; init; } = 10;
public ushort RetainedFileCount { get; init; } = 5;
public ushort TimeLimitHours { get; init; } = 24;
public bool ArchiveEnabled { get; init; } = true;
public ushort ArchiveRetainedCount { get; init; } = 60;
public ushort ArchiveTimeLimitHours { get; init; } = 24 * 30;
public bool ApplyTo(LoggingConfig existingConfig)
{
bool levelChanged = existingConfig.Level != Level;
bool otherPropertiesChanged =
existingConfig.RollingSizeMB != RollingSizeMB ||
existingConfig.RetainedFileCount != RetainedFileCount ||
existingConfig.TimeLimitHours != TimeLimitHours ||
existingConfig.ArchiveEnabled != ArchiveEnabled ||
existingConfig.ArchiveRetainedCount != ArchiveRetainedCount ||
existingConfig.ArchiveTimeLimitHours != ArchiveTimeLimitHours;
existingConfig.Level = Level;
existingConfig.RollingSizeMB = RollingSizeMB;
existingConfig.RetainedFileCount = RetainedFileCount;
existingConfig.TimeLimitHours = TimeLimitHours;
existingConfig.ArchiveEnabled = ArchiveEnabled;
existingConfig.ArchiveRetainedCount = ArchiveRetainedCount;
existingConfig.ArchiveTimeLimitHours = ArchiveTimeLimitHours;
existingConfig.Validate();
LevelOnlyChange = levelChanged && !otherPropertiesChanged;
return levelChanged || otherPropertiesChanged;
}
public bool LevelOnlyChange { get; private set; }
}

View File

@@ -0,0 +1,115 @@
using System;
using System.Linq;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.General.Contracts.Requests;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.General;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.General.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class GeneralConfigController : ControllerBase
{
private readonly ILogger<GeneralConfigController> _logger;
private readonly DataContext _dataContext;
private readonly MemoryCache _cache;
public GeneralConfigController(
ILogger<GeneralConfigController> logger,
DataContext dataContext,
MemoryCache cache)
{
_logger = logger;
_dataContext = dataContext;
_cache = cache;
}
[HttpGet("general")]
public async Task<IActionResult> GetGeneralConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.GeneralConfigs
.AsNoTracking()
.FirstAsync();
return Ok(config);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("general")]
public async Task<IActionResult> UpdateGeneralConfig([FromBody] UpdateGeneralConfigRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.GeneralConfigs
.FirstAsync();
bool wasDryRun = config.DryRun;
request.ApplyTo(config, HttpContext.RequestServices, _logger);
await _dataContext.SaveChangesAsync();
ClearStrikesCacheIfNeeded(wasDryRun, config.DryRun);
return Ok(new { Message = "General configuration updated successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save General configuration");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private void ClearStrikesCacheIfNeeded(bool wasDryRun, bool isDryRun)
{
if (!wasDryRun || isDryRun)
{
return;
}
List<object> keys;
// Remove strikes
foreach (string strikeType in Enum.GetNames(typeof(StrikeType)))
{
keys = _cache.Keys
.Where(key => key.ToString()?.StartsWith(strikeType, StringComparison.InvariantCultureIgnoreCase) is true)
.ToList();
foreach (object key in keys)
{
_cache.Remove(key);
}
_logger.LogTrace("Removed all cache entries for strike type: {StrikeType}", strikeType);
}
// Remove strike cache items
keys = _cache.Keys
.Where(key => key.ToString()?.StartsWith("item_", StringComparison.InvariantCultureIgnoreCase) is true)
.ToList();
foreach (object key in keys)
{
_cache.Remove(key);
}
}
}

View File

@@ -0,0 +1,50 @@
using System.Collections.Generic;
using Cleanuparr.Persistence.Models.Configuration.MalwareBlocker;
namespace Cleanuparr.Api.Features.MalwareBlocker.Contracts.Requests;
public sealed record UpdateMalwareBlockerConfigRequest
{
public bool Enabled { get; init; }
public string CronExpression { get; init; } = "0/5 * * * * ?";
public bool UseAdvancedScheduling { get; init; }
public bool IgnorePrivate { get; init; }
public bool DeletePrivate { get; init; }
public bool DeleteKnownMalware { get; init; }
public BlocklistSettings Sonarr { get; init; } = new();
public BlocklistSettings Radarr { get; init; } = new();
public BlocklistSettings Lidarr { get; init; } = new();
public BlocklistSettings Readarr { get; init; } = new();
public BlocklistSettings Whisparr { get; init; } = new();
public List<string> IgnoredDownloads { get; init; } = [];
public ContentBlockerConfig ApplyTo(ContentBlockerConfig config)
{
config.Enabled = Enabled;
config.CronExpression = CronExpression;
config.UseAdvancedScheduling = UseAdvancedScheduling;
config.IgnorePrivate = IgnorePrivate;
config.DeletePrivate = DeletePrivate;
config.DeleteKnownMalware = DeleteKnownMalware;
config.Sonarr = Sonarr;
config.Radarr = Radarr;
config.Lidarr = Lidarr;
config.Readarr = Readarr;
config.Whisparr = Whisparr;
config.IgnoredDownloads = IgnoredDownloads;
return config;
}
}

View File

@@ -0,0 +1,112 @@
using System.ComponentModel.DataAnnotations;
using System.Threading.Tasks;
using Cleanuparr.Api.Features.MalwareBlocker.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.MalwareBlocker;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.MalwareBlocker.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class MalwareBlockerConfigController : ControllerBase
{
private readonly ILogger<MalwareBlockerConfigController> _logger;
private readonly DataContext _dataContext;
private readonly IJobManagementService _jobManagementService;
public MalwareBlockerConfigController(
ILogger<MalwareBlockerConfigController> logger,
DataContext dataContext,
IJobManagementService jobManagementService)
{
_logger = logger;
_dataContext = dataContext;
_jobManagementService = jobManagementService;
}
[HttpGet("malware_blocker")]
public async Task<IActionResult> GetMalwareBlockerConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.ContentBlockerConfigs
.AsNoTracking()
.FirstAsync();
return Ok(config);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("malware_blocker")]
public async Task<IActionResult> UpdateMalwareBlockerConfig([FromBody] UpdateMalwareBlockerConfigRequest request)
{
await DataContext.Lock.WaitAsync();
try
{
if (!string.IsNullOrEmpty(request.CronExpression))
{
CronValidationHelper.ValidateCronExpression(request.CronExpression, JobType.MalwareBlocker);
}
var config = await _dataContext.ContentBlockerConfigs
.FirstAsync();
request.ApplyTo(config);
config.Validate();
await _dataContext.SaveChangesAsync();
await UpdateJobSchedule(config, JobType.MalwareBlocker);
return Ok(new { Message = "MalwareBlocker configuration updated successfully" });
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save MalwareBlocker configuration");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task UpdateJobSchedule(IJobConfig config, JobType jobType)
{
if (config.Enabled)
{
if (!string.IsNullOrEmpty(config.CronExpression))
{
_logger.LogInformation("{name} is enabled, updating job schedule with cron expression: {CronExpression}",
jobType.ToString(), config.CronExpression);
await _jobManagementService.StartJob(jobType, null, config.CronExpression);
}
else
{
_logger.LogWarning("{name} is enabled, but no cron expression was found in the configuration", jobType.ToString());
}
return;
}
_logger.LogInformation("{name} is disabled, stopping the job", jobType.ToString());
await _jobManagementService.StopJob(jobType);
}
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateAppriseProviderRequest : CreateNotificationProviderRequestBase
{
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,8 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateNotifiarrProviderRequest : CreateNotificationProviderRequestBase
{
public string ApiKey { get; init; } = string.Empty;
public string ChannelId { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,20 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public abstract record CreateNotificationProviderRequestBase
{
public string Name { get; init; } = string.Empty;
public bool IsEnabled { get; init; } = true;
public bool OnFailedImportStrike { get; init; }
public bool OnStalledStrike { get; init; }
public bool OnSlowStrike { get; init; }
public bool OnQueueItemDeleted { get; init; }
public bool OnDownloadCleaned { get; init; }
public bool OnCategoryChanged { get; init; }
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record CreateNtfyProviderRequest : CreateNotificationProviderRequestBase
{
public string ServerUrl { get; init; } = string.Empty;
public List<string> Topics { get; init; } = [];
public NtfyAuthenticationType AuthenticationType { get; init; } = NtfyAuthenticationType.None;
public string Username { get; init; } = string.Empty;
public string Password { get; init; } = string.Empty;
public string AccessToken { get; init; } = string.Empty;
public NtfyPriority Priority { get; init; } = NtfyPriority.Default;
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestAppriseProviderRequest
{
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,8 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestNotifiarrProviderRequest
{
public string ApiKey { get; init; } = string.Empty;
public string ChannelId { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record TestNtfyProviderRequest
{
public string ServerUrl { get; init; } = string.Empty;
public List<string> Topics { get; init; } = [];
public NtfyAuthenticationType AuthenticationType { get; init; } = NtfyAuthenticationType.None;
public string Username { get; init; } = string.Empty;
public string Password { get; init; } = string.Empty;
public string AccessToken { get; init; } = string.Empty;
public NtfyPriority Priority { get; init; } = NtfyPriority.Default;
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,10 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateAppriseProviderRequest : UpdateNotificationProviderRequestBase
{
public string Url { get; init; } = string.Empty;
public string Key { get; init; } = string.Empty;
public string Tags { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,8 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateNotifiarrProviderRequest : UpdateNotificationProviderRequestBase
{
public string ApiKey { get; init; } = string.Empty;
public string ChannelId { get; init; } = string.Empty;
}

View File

@@ -0,0 +1,20 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public abstract record UpdateNotificationProviderRequestBase
{
public string Name { get; init; } = string.Empty;
public bool IsEnabled { get; init; }
public bool OnFailedImportStrike { get; init; }
public bool OnStalledStrike { get; init; }
public bool OnSlowStrike { get; init; }
public bool OnQueueItemDeleted { get; init; }
public bool OnDownloadCleaned { get; init; }
public bool OnCategoryChanged { get; init; }
}

View File

@@ -0,0 +1,22 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Requests;
public record UpdateNtfyProviderRequest : UpdateNotificationProviderRequestBase
{
public string ServerUrl { get; init; } = string.Empty;
public List<string> Topics { get; init; } = [];
public NtfyAuthenticationType AuthenticationType { get; init; } = NtfyAuthenticationType.None;
public string Username { get; init; } = string.Empty;
public string Password { get; init; } = string.Empty;
public string AccessToken { get; init; } = string.Empty;
public NtfyPriority Priority { get; init; } = NtfyPriority.Default;
public List<string> Tags { get; init; } = [];
}

View File

@@ -0,0 +1,19 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Notifications.Models;
namespace Cleanuparr.Api.Features.Notifications.Contracts.Responses;
public sealed record NotificationProviderResponse
{
public Guid Id { get; init; }
public string Name { get; init; } = string.Empty;
public NotificationProviderType Type { get; init; }
public bool IsEnabled { get; init; }
public NotificationEventFlags Events { get; init; } = new();
public object Configuration { get; init; } = new();
}

View File

@@ -0,0 +1,6 @@
namespace Cleanuparr.Api.Features.Notifications.Contracts.Responses;
public sealed record NotificationProvidersResponse
{
public List<NotificationProviderResponse> Providers { get; init; } = [];
}

View File

@@ -0,0 +1,708 @@
using Cleanuparr.Api.Features.Notifications.Contracts.Requests;
using Cleanuparr.Api.Features.Notifications.Contracts.Responses;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Notification;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.Notifications.Controllers;
[ApiController]
[Route("api/configuration/notification_providers")]
public sealed class NotificationProvidersController : ControllerBase
{
private readonly ILogger<NotificationProvidersController> _logger;
private readonly DataContext _dataContext;
private readonly INotificationConfigurationService _notificationConfigurationService;
private readonly NotificationService _notificationService;
public NotificationProvidersController(
ILogger<NotificationProvidersController> logger,
DataContext dataContext,
INotificationConfigurationService notificationConfigurationService,
NotificationService notificationService)
{
_logger = logger;
_dataContext = dataContext;
_notificationConfigurationService = notificationConfigurationService;
_notificationService = notificationService;
}
[HttpGet]
public async Task<IActionResult> GetNotificationProviders()
{
await DataContext.Lock.WaitAsync();
try
{
var providers = await _dataContext.NotificationConfigs
.Include(p => p.NotifiarrConfiguration)
.Include(p => p.AppriseConfiguration)
.Include(p => p.NtfyConfiguration)
.AsNoTracking()
.ToListAsync();
var providerDtos = providers
.Select(p => new NotificationProviderResponse
{
Id = p.Id,
Name = p.Name,
Type = p.Type,
IsEnabled = p.IsEnabled,
Events = new NotificationEventFlags
{
OnFailedImportStrike = p.OnFailedImportStrike,
OnStalledStrike = p.OnStalledStrike,
OnSlowStrike = p.OnSlowStrike,
OnQueueItemDeleted = p.OnQueueItemDeleted,
OnDownloadCleaned = p.OnDownloadCleaned,
OnCategoryChanged = p.OnCategoryChanged
},
Configuration = p.Type switch
{
NotificationProviderType.Notifiarr => p.NotifiarrConfiguration ?? new object(),
NotificationProviderType.Apprise => p.AppriseConfiguration ?? new object(),
NotificationProviderType.Ntfy => p.NtfyConfiguration ?? new object(),
_ => new object()
}
})
.OrderBy(x => x.Type.ToString())
.ThenBy(x => x.Name)
.ToList();
var response = new NotificationProvidersResponse { Providers = providerDtos };
return Ok(response);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("notifiarr")]
public async Task<IActionResult> CreateNotifiarrProvider([FromBody] CreateNotifiarrProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var notifiarrConfig = new NotifiarrConfig
{
ApiKey = newProvider.ApiKey,
ChannelId = newProvider.ChannelId
};
notifiarrConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Notifiarr,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
NotifiarrConfiguration = notifiarrConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Notifiarr provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("apprise")]
public async Task<IActionResult> CreateAppriseProvider([FromBody] CreateAppriseProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var appriseConfig = new AppriseConfig
{
Url = newProvider.Url,
Key = newProvider.Key,
Tags = newProvider.Tags
};
appriseConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Apprise,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
AppriseConfiguration = appriseConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Apprise provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("ntfy")]
public async Task<IActionResult> CreateNtfyProvider([FromBody] CreateNtfyProviderRequest newProvider)
{
await DataContext.Lock.WaitAsync();
try
{
if (string.IsNullOrWhiteSpace(newProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs.CountAsync(x => x.Name == newProvider.Name);
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var ntfyConfig = new NtfyConfig
{
ServerUrl = newProvider.ServerUrl,
Topics = newProvider.Topics,
AuthenticationType = newProvider.AuthenticationType,
Username = newProvider.Username,
Password = newProvider.Password,
AccessToken = newProvider.AccessToken,
Priority = newProvider.Priority,
Tags = newProvider.Tags
};
ntfyConfig.Validate();
var provider = new NotificationConfig
{
Name = newProvider.Name,
Type = NotificationProviderType.Ntfy,
IsEnabled = newProvider.IsEnabled,
OnFailedImportStrike = newProvider.OnFailedImportStrike,
OnStalledStrike = newProvider.OnStalledStrike,
OnSlowStrike = newProvider.OnSlowStrike,
OnQueueItemDeleted = newProvider.OnQueueItemDeleted,
OnDownloadCleaned = newProvider.OnDownloadCleaned,
OnCategoryChanged = newProvider.OnCategoryChanged,
NtfyConfiguration = ntfyConfig
};
_dataContext.NotificationConfigs.Add(provider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(provider);
return CreatedAtAction(nameof(GetNotificationProviders), new { id = provider.Id }, providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create Ntfy provider");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("notifiarr/{id:guid}")]
public async Task<IActionResult> UpdateNotifiarrProvider(Guid id, [FromBody] UpdateNotifiarrProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.NotifiarrConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Notifiarr);
if (existingProvider == null)
{
return NotFound($"Notifiarr provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var notifiarrConfig = new NotifiarrConfig
{
ApiKey = updatedProvider.ApiKey,
ChannelId = updatedProvider.ChannelId
};
if (existingProvider.NotifiarrConfiguration != null)
{
notifiarrConfig = notifiarrConfig with { Id = existingProvider.NotifiarrConfiguration.Id };
}
notifiarrConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
NotifiarrConfiguration = notifiarrConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Notifiarr provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("apprise/{id:guid}")]
public async Task<IActionResult> UpdateAppriseProvider(Guid id, [FromBody] UpdateAppriseProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.AppriseConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Apprise);
if (existingProvider == null)
{
return NotFound($"Apprise provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var appriseConfig = new AppriseConfig
{
Url = updatedProvider.Url,
Key = updatedProvider.Key,
Tags = updatedProvider.Tags
};
if (existingProvider.AppriseConfiguration != null)
{
appriseConfig = appriseConfig with { Id = existingProvider.AppriseConfiguration.Id };
}
appriseConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
AppriseConfiguration = appriseConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Apprise provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("ntfy/{id:guid}")]
public async Task<IActionResult> UpdateNtfyProvider(Guid id, [FromBody] UpdateNtfyProviderRequest updatedProvider)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.NtfyConfiguration)
.FirstOrDefaultAsync(p => p.Id == id && p.Type == NotificationProviderType.Ntfy);
if (existingProvider == null)
{
return NotFound($"Ntfy provider with ID {id} not found");
}
if (string.IsNullOrWhiteSpace(updatedProvider.Name))
{
return BadRequest("Provider name is required");
}
var duplicateConfig = await _dataContext.NotificationConfigs
.Where(x => x.Id != id)
.Where(x => x.Name == updatedProvider.Name)
.CountAsync();
if (duplicateConfig > 0)
{
return BadRequest("A provider with this name already exists");
}
var ntfyConfig = new NtfyConfig
{
ServerUrl = updatedProvider.ServerUrl,
Topics = updatedProvider.Topics,
AuthenticationType = updatedProvider.AuthenticationType,
Username = updatedProvider.Username,
Password = updatedProvider.Password,
AccessToken = updatedProvider.AccessToken,
Priority = updatedProvider.Priority,
Tags = updatedProvider.Tags
};
if (existingProvider.NtfyConfiguration != null)
{
ntfyConfig = ntfyConfig with { Id = existingProvider.NtfyConfiguration.Id };
}
ntfyConfig.Validate();
var newProvider = existingProvider with
{
Name = updatedProvider.Name,
IsEnabled = updatedProvider.IsEnabled,
OnFailedImportStrike = updatedProvider.OnFailedImportStrike,
OnStalledStrike = updatedProvider.OnStalledStrike,
OnSlowStrike = updatedProvider.OnSlowStrike,
OnQueueItemDeleted = updatedProvider.OnQueueItemDeleted,
OnDownloadCleaned = updatedProvider.OnDownloadCleaned,
OnCategoryChanged = updatedProvider.OnCategoryChanged,
NtfyConfiguration = ntfyConfig,
UpdatedAt = DateTime.UtcNow
};
_dataContext.NotificationConfigs.Remove(existingProvider);
_dataContext.NotificationConfigs.Add(newProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
var providerDto = MapProvider(newProvider);
return Ok(providerDto);
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update Ntfy provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpDelete("{id:guid}")]
public async Task<IActionResult> DeleteNotificationProvider(Guid id)
{
await DataContext.Lock.WaitAsync();
try
{
var existingProvider = await _dataContext.NotificationConfigs
.Include(p => p.NotifiarrConfiguration)
.Include(p => p.AppriseConfiguration)
.Include(p => p.NtfyConfiguration)
.FirstOrDefaultAsync(p => p.Id == id);
if (existingProvider == null)
{
return NotFound($"Notification provider with ID {id} not found");
}
_dataContext.NotificationConfigs.Remove(existingProvider);
await _dataContext.SaveChangesAsync();
await _notificationConfigurationService.InvalidateCacheAsync();
_logger.LogInformation("Removed notification provider {ProviderName} with ID {ProviderId}",
existingProvider.Name, existingProvider.Id);
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete notification provider with ID {Id}", id);
throw;
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("notifiarr/test")]
public async Task<IActionResult> TestNotifiarrProvider([FromBody] TestNotifiarrProviderRequest testRequest)
{
try
{
var notifiarrConfig = new NotifiarrConfig
{
ApiKey = testRequest.ApiKey,
ChannelId = testRequest.ChannelId
};
notifiarrConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Notifiarr,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = notifiarrConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Notifiarr provider");
throw;
}
}
[HttpPost("apprise/test")]
public async Task<IActionResult> TestAppriseProvider([FromBody] TestAppriseProviderRequest testRequest)
{
try
{
var appriseConfig = new AppriseConfig
{
Url = testRequest.Url,
Key = testRequest.Key,
Tags = testRequest.Tags
};
appriseConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Apprise,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = appriseConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Apprise provider");
throw;
}
}
[HttpPost("ntfy/test")]
public async Task<IActionResult> TestNtfyProvider([FromBody] TestNtfyProviderRequest testRequest)
{
try
{
var ntfyConfig = new NtfyConfig
{
ServerUrl = testRequest.ServerUrl,
Topics = testRequest.Topics,
AuthenticationType = testRequest.AuthenticationType,
Username = testRequest.Username,
Password = testRequest.Password,
AccessToken = testRequest.AccessToken,
Priority = testRequest.Priority,
Tags = testRequest.Tags
};
ntfyConfig.Validate();
var providerDto = new NotificationProviderDto
{
Id = Guid.NewGuid(),
Name = "Test Provider",
Type = NotificationProviderType.Ntfy,
IsEnabled = true,
Events = new NotificationEventFlags
{
OnFailedImportStrike = true,
OnStalledStrike = false,
OnSlowStrike = false,
OnQueueItemDeleted = false,
OnDownloadCleaned = false,
OnCategoryChanged = false
},
Configuration = ntfyConfig
};
await _notificationService.SendTestNotificationAsync(providerDto);
return Ok(new { Message = "Test notification sent successfully", Success = true });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to test Ntfy provider");
throw;
}
}
private static NotificationProviderResponse MapProvider(NotificationConfig provider)
{
return new NotificationProviderResponse
{
Id = provider.Id,
Name = provider.Name,
Type = provider.Type,
IsEnabled = provider.IsEnabled,
Events = new NotificationEventFlags
{
OnFailedImportStrike = provider.OnFailedImportStrike,
OnStalledStrike = provider.OnStalledStrike,
OnSlowStrike = provider.OnSlowStrike,
OnQueueItemDeleted = provider.OnQueueItemDeleted,
OnDownloadCleaned = provider.OnDownloadCleaned,
OnCategoryChanged = provider.OnCategoryChanged
},
Configuration = provider.Type switch
{
NotificationProviderType.Notifiarr => provider.NotifiarrConfiguration ?? new object(),
NotificationProviderType.Apprise => provider.AppriseConfiguration ?? new object(),
NotificationProviderType.Ntfy => provider.NtfyConfiguration ?? new object(),
_ => new object()
}
};
}
}

View File

@@ -0,0 +1,27 @@
using System.ComponentModel.DataAnnotations;
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
public abstract record QueueRuleDto
{
public Guid? Id { get; set; }
[Required]
public string Name { get; set; } = string.Empty;
public bool Enabled { get; set; } = true;
[Range(3, int.MaxValue, ErrorMessage = "Max strikes must be at least 3")]
public int MaxStrikes { get; set; } = 3;
public TorrentPrivacyType PrivacyType { get; set; } = TorrentPrivacyType.Public;
[Range(0, 100, ErrorMessage = "Minimum completion percentage must be between 0 and 100")]
public ushort MinCompletionPercentage { get; set; }
[Range(0, 100, ErrorMessage = "Maximum completion percentage must be between 0 and 100")]
public ushort MaxCompletionPercentage { get; set; }
public bool DeletePrivateTorrentsFromClient { get; set; } = false;
}

View File

@@ -0,0 +1,15 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
public sealed record SlowRuleDto : QueueRuleDto
{
public bool ResetStrikesOnProgress { get; set; } = true;
public string MinSpeed { get; set; } = string.Empty;
[Range(0, double.MaxValue, ErrorMessage = "Maximum time cannot be negative")]
public double MaxTimeHours { get; set; } = 0;
public string? IgnoreAboveSize { get; set; }
}

View File

@@ -0,0 +1,8 @@
namespace Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
public sealed record StallRuleDto : QueueRuleDto
{
public bool ResetStrikesOnProgress { get; set; } = true;
public string? MinimumProgress { get; set; }
}

View File

@@ -0,0 +1,18 @@
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
namespace Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
public sealed record UpdateQueueCleanerConfigRequest
{
public bool Enabled { get; init; }
public string CronExpression { get; init; } = "0 0/5 * * * ?";
public bool UseAdvancedScheduling { get; init; }
public FailedImportConfig FailedImport { get; init; } = new();
public ushort DownloadingMetadataMaxStrikes { get; init; }
public List<string> IgnoredDownloads { get; set; } = [];
}

View File

@@ -0,0 +1,117 @@
using System.ComponentModel.DataAnnotations;
using Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Infrastructure.Utilities;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.QueueCleaner.Controllers;
[ApiController]
[Route("api/configuration")]
public sealed class QueueCleanerConfigController : ControllerBase
{
private readonly ILogger<QueueCleanerConfigController> _logger;
private readonly DataContext _dataContext;
private readonly IJobManagementService _jobManagementService;
public QueueCleanerConfigController(
ILogger<QueueCleanerConfigController> logger,
DataContext dataContext,
IJobManagementService jobManagementService)
{
_logger = logger;
_dataContext = dataContext;
_jobManagementService = jobManagementService;
}
[HttpGet("queue_cleaner")]
public async Task<IActionResult> GetQueueCleanerConfig()
{
await DataContext.Lock.WaitAsync();
try
{
var config = await _dataContext.QueueCleanerConfigs
.AsNoTracking()
.FirstAsync();
return Ok(config);
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("queue_cleaner")]
public async Task<IActionResult> UpdateQueueCleanerConfig([FromBody] UpdateQueueCleanerConfigRequest newConfigDto)
{
await DataContext.Lock.WaitAsync();
try
{
if (!string.IsNullOrEmpty(newConfigDto.CronExpression))
{
CronValidationHelper.ValidateCronExpression(newConfigDto.CronExpression);
}
var oldConfig = await _dataContext.QueueCleanerConfigs
.FirstAsync();
oldConfig.Enabled = newConfigDto.Enabled;
oldConfig.CronExpression = newConfigDto.CronExpression;
oldConfig.UseAdvancedScheduling = newConfigDto.UseAdvancedScheduling;
oldConfig.FailedImport = newConfigDto.FailedImport;
oldConfig.DownloadingMetadataMaxStrikes = newConfigDto.DownloadingMetadataMaxStrikes;
oldConfig.IgnoredDownloads = newConfigDto.IgnoredDownloads;
oldConfig.Validate();
await _dataContext.SaveChangesAsync();
await UpdateJobSchedule(oldConfig, JobType.QueueCleaner);
return Ok(new { Message = "QueueCleaner configuration updated successfully" });
}
catch (ValidationException ex)
{
return BadRequest(ex.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to save QueueCleaner configuration");
throw;
}
finally
{
DataContext.Lock.Release();
}
}
private async Task UpdateJobSchedule(IJobConfig config, JobType jobType)
{
if (config.Enabled)
{
if (!string.IsNullOrEmpty(config.CronExpression))
{
_logger.LogInformation("{name} is enabled, updating job schedule with cron expression: {CronExpression}",
jobType.ToString(), config.CronExpression);
await _jobManagementService.StartJob(jobType, null, config.CronExpression);
}
else
{
_logger.LogWarning("{name} is enabled, but no cron expression was found in the configuration", jobType.ToString());
}
return;
}
_logger.LogInformation("{name} is disabled, stopping the job", jobType.ToString());
await _jobManagementService.StopJob(jobType);
}
}

View File

@@ -0,0 +1,437 @@
using Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Api.Features.QueueCleaner.Controllers;
[ApiController]
[Route("api/queue-rules")]
public class QueueRulesController : ControllerBase
{
private readonly ILogger<QueueRulesController> _logger;
private readonly DataContext _dataContext;
private readonly IRuleIntervalValidator _ruleIntervalValidator;
public QueueRulesController(
ILogger<QueueRulesController> logger,
DataContext dataContext,
IRuleIntervalValidator ruleIntervalValidator)
{
_logger = logger;
_dataContext = dataContext;
_ruleIntervalValidator = ruleIntervalValidator;
}
[HttpGet("stall")]
public async Task<IActionResult> GetStallRules()
{
await DataContext.Lock.WaitAsync();
try
{
var rules = await _dataContext.StallRules
.OrderBy(r => r.MinCompletionPercentage)
.ThenBy(r => r.Name)
.AsNoTracking()
.ToListAsync();
return Ok(rules);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to retrieve stall rules");
return StatusCode(500, new { Message = "Failed to retrieve stall rules", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("stall")]
public async Task<IActionResult> CreateStallRule([FromBody] StallRuleDto ruleDto)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
await DataContext.Lock.WaitAsync();
try
{
var queueCleanerConfig = await _dataContext.QueueCleanerConfigs
.FirstAsync();
var existingRule = await _dataContext.StallRules
.FirstOrDefaultAsync(r => r.Name.ToLower() == ruleDto.Name.ToLower());
if (existingRule != null)
{
return BadRequest(new { Message = "A stall rule with this name already exists" });
}
var rule = new StallRule
{
Id = Guid.NewGuid(),
QueueCleanerConfigId = queueCleanerConfig.Id,
Name = ruleDto.Name.Trim(),
Enabled = ruleDto.Enabled,
MaxStrikes = ruleDto.MaxStrikes,
PrivacyType = ruleDto.PrivacyType,
MinCompletionPercentage = ruleDto.MinCompletionPercentage,
MaxCompletionPercentage = ruleDto.MaxCompletionPercentage,
ResetStrikesOnProgress = ruleDto.ResetStrikesOnProgress,
DeletePrivateTorrentsFromClient = ruleDto.DeletePrivateTorrentsFromClient,
MinimumProgress = ruleDto.MinimumProgress?.Trim(),
};
var existingRules = await _dataContext.StallRules.ToListAsync();
var intervalValidationResult = _ruleIntervalValidator.ValidateStallRuleIntervals(rule, existingRules);
if (!intervalValidationResult.IsValid)
{
return BadRequest(new { Message = intervalValidationResult.ErrorMessage });
}
rule.Validate();
_dataContext.StallRules.Add(rule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Created stall rule: {RuleName} with ID: {RuleId}", rule.Name, rule.Id);
return CreatedAtAction(nameof(GetStallRules), new { id = rule.Id }, rule);
}
catch (ValidationException ex)
{
_logger.LogWarning("Validation failed for stall rule creation: {Message}", ex.Message);
return BadRequest(new { Message = ex.Message });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create stall rule: {RuleName}", ruleDto.Name);
return StatusCode(500, new { Message = "Failed to create stall rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("stall/{id}")]
public async Task<IActionResult> UpdateStallRule(Guid id, [FromBody] StallRuleDto ruleDto)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
await DataContext.Lock.WaitAsync();
try
{
var existingRule = await _dataContext.StallRules
.FirstOrDefaultAsync(r => r.Id == id);
if (existingRule == null)
{
return NotFound(new { Message = $"Stall rule with ID {id} not found" });
}
var duplicateRule = await _dataContext.StallRules
.FirstOrDefaultAsync(r => r.Id != id && r.Name.ToLower() == ruleDto.Name.ToLower());
if (duplicateRule != null)
{
return BadRequest(new { Message = "A stall rule with this name already exists" });
}
var updatedRule = existingRule with
{
Name = ruleDto.Name.Trim(),
Enabled = ruleDto.Enabled,
MaxStrikes = ruleDto.MaxStrikes,
PrivacyType = ruleDto.PrivacyType,
MinCompletionPercentage = ruleDto.MinCompletionPercentage,
MaxCompletionPercentage = ruleDto.MaxCompletionPercentage,
ResetStrikesOnProgress = ruleDto.ResetStrikesOnProgress,
DeletePrivateTorrentsFromClient = ruleDto.DeletePrivateTorrentsFromClient,
MinimumProgress = ruleDto.MinimumProgress?.Trim(),
};
var existingRules = await _dataContext.StallRules
.Where(r => r.Id != id)
.ToListAsync();
var intervalValidationResult = _ruleIntervalValidator.ValidateStallRuleIntervals(updatedRule, existingRules);
if (!intervalValidationResult.IsValid)
{
return BadRequest(new { Message = intervalValidationResult.ErrorMessage });
}
updatedRule.Validate();
_dataContext.Entry(existingRule).CurrentValues.SetValues(updatedRule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Updated stall rule: {RuleName} with ID: {RuleId}", updatedRule.Name, id);
return Ok(updatedRule);
}
catch (ValidationException ex)
{
_logger.LogWarning("Validation failed for stall rule update: {Message}", ex.Message);
return BadRequest(new { Message = ex.Message });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update stall rule with ID: {RuleId}", id);
return StatusCode(500, new { Message = "Failed to update stall rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpDelete("stall/{id}")]
public async Task<IActionResult> DeleteStallRule(Guid id)
{
await DataContext.Lock.WaitAsync();
try
{
var existingRule = await _dataContext.StallRules
.FirstOrDefaultAsync(r => r.Id == id);
if (existingRule == null)
{
return NotFound(new { Message = $"Stall rule with ID {id} not found" });
}
_dataContext.StallRules.Remove(existingRule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Deleted stall rule: {RuleName} with ID: {RuleId}", existingRule.Name, id);
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete stall rule with ID: {RuleId}", id);
return StatusCode(500, new { Message = "Failed to delete stall rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpGet("slow")]
public async Task<IActionResult> GetSlowRules()
{
await DataContext.Lock.WaitAsync();
try
{
var rules = await _dataContext.SlowRules
.OrderBy(r => r.MinCompletionPercentage)
.ThenBy(r => r.Name)
.AsNoTracking()
.ToListAsync();
return Ok(rules);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to retrieve slow rules");
return StatusCode(500, new { Message = "Failed to retrieve slow rules", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPost("slow")]
public async Task<IActionResult> CreateSlowRule([FromBody] SlowRuleDto ruleDto)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
await DataContext.Lock.WaitAsync();
try
{
var queueCleanerConfig = await _dataContext.QueueCleanerConfigs
.FirstAsync();
var existingRule = await _dataContext.SlowRules
.FirstOrDefaultAsync(r => r.Name.ToLower() == ruleDto.Name.ToLower());
if (existingRule != null)
{
return BadRequest(new { Message = "A slow rule with this name already exists" });
}
var rule = new SlowRule
{
Id = Guid.NewGuid(),
QueueCleanerConfigId = queueCleanerConfig.Id,
Name = ruleDto.Name.Trim(),
Enabled = ruleDto.Enabled,
MaxStrikes = ruleDto.MaxStrikes,
PrivacyType = ruleDto.PrivacyType,
MinCompletionPercentage = ruleDto.MinCompletionPercentage,
MaxCompletionPercentage = ruleDto.MaxCompletionPercentage,
ResetStrikesOnProgress = ruleDto.ResetStrikesOnProgress,
MinSpeed = ruleDto.MinSpeed?.Trim() ?? string.Empty,
MaxTimeHours = ruleDto.MaxTimeHours,
IgnoreAboveSize = ruleDto.IgnoreAboveSize,
DeletePrivateTorrentsFromClient = ruleDto.DeletePrivateTorrentsFromClient,
};
var existingRules = await _dataContext.SlowRules.ToListAsync();
var intervalValidationResult = _ruleIntervalValidator.ValidateSlowRuleIntervals(rule, existingRules);
if (!intervalValidationResult.IsValid)
{
return BadRequest(new { Message = intervalValidationResult.ErrorMessage });
}
rule.Validate();
_dataContext.SlowRules.Add(rule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Created slow rule: {RuleName} with ID: {RuleId}", rule.Name, rule.Id);
return CreatedAtAction(nameof(GetSlowRules), new { id = rule.Id }, rule);
}
catch (ValidationException ex)
{
_logger.LogWarning("Validation failed for slow rule creation: {Message}", ex.Message);
return BadRequest(new { Message = ex.Message });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create slow rule: {RuleName}", ruleDto.Name);
return StatusCode(500, new { Message = "Failed to create slow rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpPut("slow/{id}")]
public async Task<IActionResult> UpdateSlowRule(Guid id, [FromBody] SlowRuleDto ruleDto)
{
if (!ModelState.IsValid)
{
return BadRequest(ModelState);
}
await DataContext.Lock.WaitAsync();
try
{
var existingRule = await _dataContext.SlowRules
.FirstOrDefaultAsync(r => r.Id == id);
if (existingRule == null)
{
return NotFound(new { Message = $"Slow rule with ID {id} not found" });
}
var duplicateRule = await _dataContext.SlowRules
.FirstOrDefaultAsync(r => r.Id != id && r.Name.ToLower() == ruleDto.Name.ToLower());
if (duplicateRule != null)
{
return BadRequest(new { Message = "A slow rule with this name already exists" });
}
var updatedRule = existingRule with
{
Name = ruleDto.Name.Trim(),
Enabled = ruleDto.Enabled,
MaxStrikes = ruleDto.MaxStrikes,
PrivacyType = ruleDto.PrivacyType,
MinCompletionPercentage = ruleDto.MinCompletionPercentage,
MaxCompletionPercentage = ruleDto.MaxCompletionPercentage,
ResetStrikesOnProgress = ruleDto.ResetStrikesOnProgress,
MinSpeed = ruleDto.MinSpeed?.Trim() ?? string.Empty,
MaxTimeHours = ruleDto.MaxTimeHours,
IgnoreAboveSize = ruleDto.IgnoreAboveSize,
DeletePrivateTorrentsFromClient = ruleDto.DeletePrivateTorrentsFromClient,
};
var existingRules = await _dataContext.SlowRules
.Where(r => r.Id != id)
.ToListAsync();
var intervalValidationResult = _ruleIntervalValidator.ValidateSlowRuleIntervals(updatedRule, existingRules);
if (!intervalValidationResult.IsValid)
{
return BadRequest(new { Message = intervalValidationResult.ErrorMessage });
}
updatedRule.Validate();
_dataContext.Entry(existingRule).CurrentValues.SetValues(updatedRule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Updated slow rule: {RuleName} with ID: {RuleId}", updatedRule.Name, id);
return Ok(updatedRule);
}
catch (ValidationException ex)
{
_logger.LogWarning("Validation failed for slow rule update: {Message}", ex.Message);
return BadRequest(new { Message = ex.Message });
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update slow rule with ID: {RuleId}", id);
return StatusCode(500, new { Message = "Failed to update slow rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
[HttpDelete("slow/{id}")]
public async Task<IActionResult> DeleteSlowRule(Guid id)
{
await DataContext.Lock.WaitAsync();
try
{
var existingRule = await _dataContext.SlowRules
.FirstOrDefaultAsync(r => r.Id == id);
if (existingRule == null)
{
return NotFound(new { Message = $"Slow rule with ID {id} not found" });
}
_dataContext.SlowRules.Remove(existingRule);
await _dataContext.SaveChangesAsync();
_logger.LogInformation("Deleted slow rule: {RuleName} with ID: {RuleId}", existingRule.Name, id);
return NoContent();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to delete slow rule with ID: {RuleId}", id);
return StatusCode(500, new { Message = "Failed to delete slow rule", Error = ex.Message });
}
finally
{
DataContext.Lock.Release();
}
}
}

View File

@@ -0,0 +1,28 @@
using Microsoft.Extensions.Diagnostics.HealthChecks;
using System.Text;
namespace Cleanuparr.Api;
/// <summary>
/// Custom health check response writers for different formats
/// </summary>
public static class HealthCheckResponseWriter
{
/// <summary>
/// Writes a minimal plain text response suitable for Docker health checks
/// </summary>
public static async Task WriteMinimalPlaintext(HttpContext context, HealthReport report)
{
context.Response.ContentType = "text/plain";
var status = report.Status switch
{
HealthStatus.Healthy => "healthy",
HealthStatus.Degraded => "degraded",
HealthStatus.Unhealthy => "unhealthy",
_ => "unknown"
};
await context.Response.WriteAsync(status, Encoding.UTF8);
}
}

View File

@@ -1,4 +1,7 @@
using System.Reflection;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Logging;
using Cleanuparr.Infrastructure.Services;
using Cleanuparr.Persistence;
using Microsoft.EntityFrameworkCore;
@@ -6,33 +9,61 @@ namespace Cleanuparr.Api;
public static class HostExtensions
{
public static async Task<IHost> Init(this WebApplication app)
public static IHost Init(this WebApplication app)
{
ILogger<Program> logger = app.Services.GetRequiredService<ILogger<Program>>();
AppStatusSnapshot statusSnapshot = app.Services.GetRequiredService<AppStatusSnapshot>();
Version? version = Assembly.GetExecutingAssembly().GetName().Version;
string? formattedVersion = FormatVersion(version);
if (statusSnapshot.UpdateCurrentVersion(formattedVersion, out _))
{
logger.LogDebug("App status current version set to {Version}", formattedVersion);
}
logger.LogInformation(
version is null
? "Cleanuparr version not detected"
: $"Cleanuparr v{version.Major}.{version.Minor}.{version.Build}"
: $"Cleanuparr {formattedVersion}"
);
logger.LogInformation("timezone: {tz}", TimeZoneInfo.Local.DisplayName);
// Apply db migrations
var eventsContext = app.Services.GetRequiredService<EventsContext>();
return app;
}
private static string? FormatVersion(Version? version)
{
if (version is null)
{
return null;
}
if (version.Build >= 0)
{
return $"v{version.Major}.{version.Minor}.{version.Build}";
}
return $"v{version.Major}.{version.Minor}";
}
public static async Task<WebApplicationBuilder> InitAsync(this WebApplicationBuilder builder)
{
// Apply events db migrations
await using var eventsContext = EventsContext.CreateStaticInstance();
if ((await eventsContext.Database.GetPendingMigrationsAsync()).Any())
{
await eventsContext.Database.MigrateAsync();
}
var configContext = app.Services.GetRequiredService<DataContext>();
// Apply data db migrations
await using var configContext = DataContext.CreateStaticInstance();
if ((await configContext.Database.GetPendingMigrationsAsync()).Any())
{
await configContext.Database.MigrateAsync();
}
return app;
return builder;
}
}

View File

@@ -1,13 +1,11 @@
using Cleanuparr.Application.Features.ContentBlocker;
using Cleanuparr.Application.Features.DownloadCleaner;
using Cleanuparr.Application.Features.QueueCleaner;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Infrastructure.Features.BlacklistSync;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.ContentBlocker;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Cleanuparr.Persistence.Models.Configuration.MalwareBlocker;
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;
using Cleanuparr.Shared.Helpers;
using Microsoft.EntityFrameworkCore;
using Quartz;
@@ -22,18 +20,18 @@ namespace Cleanuparr.Api.Jobs;
public class BackgroundJobManager : IHostedService
{
private readonly ISchedulerFactory _schedulerFactory;
private readonly DataContext _dataContext;
private readonly IServiceScopeFactory _scopeFactory;
private readonly ILogger<BackgroundJobManager> _logger;
private IScheduler? _scheduler;
public BackgroundJobManager(
ISchedulerFactory schedulerFactory,
DataContext dataContext,
IServiceScopeFactory scopeFactory,
ILogger<BackgroundJobManager> logger
)
{
_schedulerFactory = schedulerFactory;
_dataContext = dataContext;
_scopeFactory = scopeFactory;
_logger = logger;
}
@@ -45,12 +43,12 @@ public class BackgroundJobManager : IHostedService
{
try
{
_logger.LogInformation("Starting BackgroundJobManager");
_logger.LogDebug("Starting BackgroundJobManager");
_scheduler = await _schedulerFactory.GetScheduler(cancellationToken);
await InitializeJobsFromConfiguration(cancellationToken);
_logger.LogInformation("BackgroundJobManager started");
_logger.LogDebug("BackgroundJobManager started");
}
catch (Exception ex)
{
@@ -64,15 +62,15 @@ public class BackgroundJobManager : IHostedService
/// </summary>
public async Task StopAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Stopping BackgroundJobManager");
_logger.LogDebug("Stopping BackgroundJobManager");
if (_scheduler != null)
{
// Don't shutdown the scheduler as it's managed by QuartzHostedService
// Don't shut down the scheduler as it's managed by QuartzHostedService
await _scheduler.Standby(cancellationToken);
}
_logger.LogInformation("BackgroundJobManager stopped");
_logger.LogDebug("BackgroundJobManager stopped");
}
/// <summary>
@@ -86,21 +84,28 @@ public class BackgroundJobManager : IHostedService
throw new InvalidOperationException("Scheduler not initialized");
}
await using var scope = _scopeFactory.CreateAsyncScope();
await using var dataContext = scope.ServiceProvider.GetRequiredService<DataContext>();
// Get configurations from db
QueueCleanerConfig queueCleanerConfig = await _dataContext.QueueCleanerConfigs
QueueCleanerConfig queueCleanerConfig = await dataContext.QueueCleanerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
ContentBlockerConfig contentBlockerConfig = await _dataContext.ContentBlockerConfigs
ContentBlockerConfig malwareBlockerConfig = await dataContext.ContentBlockerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
DownloadCleanerConfig downloadCleanerConfig = await _dataContext.DownloadCleanerConfigs
DownloadCleanerConfig downloadCleanerConfig = await dataContext.DownloadCleanerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
BlacklistSyncConfig blacklistSyncConfig = await dataContext.BlacklistSyncConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
// Always register jobs, regardless of enabled status
await RegisterQueueCleanerJob(queueCleanerConfig, cancellationToken);
await RegisterContentBlockerJob(contentBlockerConfig, cancellationToken);
await RegisterMalwareBlockerJob(malwareBlockerConfig, cancellationToken);
await RegisterDownloadCleanerJob(downloadCleanerConfig, cancellationToken);
await RegisterBlacklistSyncJob(blacklistSyncConfig, cancellationToken);
}
/// <summary>
@@ -116,24 +121,24 @@ public class BackgroundJobManager : IHostedService
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<QueueCleaner>(config, config.CronExpression, cancellationToken);
await AddTriggersForJob<QueueCleaner>(config.CronExpression, cancellationToken);
}
}
/// <summary>
/// Registers the QueueCleaner job and optionally adds triggers based on configuration.
/// </summary>
public async Task RegisterContentBlockerJob(
public async Task RegisterMalwareBlockerJob(
ContentBlockerConfig config,
CancellationToken cancellationToken = default)
{
// Always register the job definition
await AddJobWithoutTrigger<ContentBlocker>(cancellationToken);
await AddJobWithoutTrigger<MalwareBlocker>(cancellationToken);
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<ContentBlocker>(config, config.CronExpression, cancellationToken);
await AddTriggersForJob<MalwareBlocker>(config.CronExpression, cancellationToken);
}
}
@@ -148,7 +153,21 @@ public class BackgroundJobManager : IHostedService
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<DownloadCleaner>(config, config.CronExpression, cancellationToken);
await AddTriggersForJob<DownloadCleaner>(config.CronExpression, cancellationToken);
}
}
/// <summary>
/// Registers the BlacklistSync job and optionally adds triggers based on general configuration.
/// </summary>
public async Task RegisterBlacklistSyncJob(BlacklistSyncConfig config, CancellationToken cancellationToken = default)
{
// Always register the job definition
await AddJobWithoutTrigger<BlacklistSynchronizer>(cancellationToken);
if (config.Enabled)
{
await AddTriggersForJob<BlacklistSynchronizer>(config.CronExpression, cancellationToken);
}
}
@@ -156,10 +175,9 @@ public class BackgroundJobManager : IHostedService
/// Helper method to add triggers for an existing job.
/// </summary>
private async Task AddTriggersForJob<T>(
IJobConfig config,
string cronExpression,
CancellationToken cancellationToken = default)
where T : GenericHandler
where T : IHandler
{
if (_scheduler == null)
{
@@ -175,7 +193,7 @@ public class BackgroundJobManager : IHostedService
IOperableTrigger triggerObj = (IOperableTrigger)TriggerBuilder.Create()
.WithIdentity("ValidationTrigger")
.StartNow()
.WithCronSchedule(cronExpression)
.WithCronSchedule(cronExpression, x => x.WithMisfireHandlingInstructionDoNothing())
.Build();
IReadOnlyList<DateTimeOffset> nextFireTimes = TriggerUtils.ComputeFireTimes(triggerObj, null, 2);
@@ -186,7 +204,7 @@ public class BackgroundJobManager : IHostedService
throw new ValidationException($"{cronExpression} should have a fire time of maximum {Constants.TriggerMaxLimit.TotalHours} hours");
}
if (typeof(T) != typeof(ContentBlocker) && triggerValue < Constants.TriggerMinLimit)
if (typeof(T) != typeof(MalwareBlocker) && triggerValue < Constants.TriggerMinLimit)
{
throw new ValidationException($"{cronExpression} should have a fire time of minimum {Constants.TriggerMinLimit.TotalSeconds} seconds");
}
@@ -197,26 +215,17 @@ public class BackgroundJobManager : IHostedService
}
}
// Create cron trigger
// Create main cron trigger with consistent naming (matches JobManagementService)
var trigger = TriggerBuilder.Create()
.WithIdentity($"{typeName}-trigger")
.ForJob(jobKey)
.WithCronSchedule(cronExpression, x => x.WithMisfireHandlingInstructionDoNothing())
.StartNow()
.Build();
// Create startup trigger to run immediately
var startupTrigger = TriggerBuilder.Create()
.WithIdentity($"{typeName}-startup-trigger")
.ForJob(jobKey)
.StartNow()
.Build();
// Schedule job with both triggers
// Schedule the main trigger
await _scheduler.ScheduleJob(trigger, cancellationToken);
await _scheduler.ScheduleJob(startupTrigger, cancellationToken);
_logger.LogInformation("Added triggers for job {name} with cron expression {CronExpression}",
_logger.LogInformation("Added trigger for job {name} with cron expression {CronExpression}",
typeName, cronExpression);
}
@@ -224,7 +233,7 @@ public class BackgroundJobManager : IHostedService
/// Helper method to add a job without a trigger (for chained jobs).
/// </summary>
private async Task AddJobWithoutTrigger<T>(CancellationToken cancellationToken = default)
where T : GenericHandler
where T : IHandler
{
if (_scheduler == null)
{
@@ -250,6 +259,6 @@ public class BackgroundJobManager : IHostedService
// Add job to scheduler
await _scheduler.AddJob(jobDetail, true, cancellationToken);
_logger.LogInformation("Registered job {name} without trigger", typeName);
_logger.LogDebug("Registered job {name} without trigger", typeName);
}
}

View File

@@ -1,4 +1,8 @@
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Infrastructure.Services.Interfaces;
using Microsoft.AspNetCore.SignalR;
using Quartz;
using Serilog.Context;
@@ -9,12 +13,12 @@ public sealed class GenericJob<T> : IJob
where T : IHandler
{
private readonly ILogger<GenericJob<T>> _logger;
private readonly T _handler;
public GenericJob(ILogger<GenericJob<T>> logger, T handler)
private readonly IServiceScopeFactory _scopeFactory;
public GenericJob(ILogger<GenericJob<T>> logger, IServiceScopeFactory scopeFactory)
{
_logger = logger;
_handler = handler;
_scopeFactory = scopeFactory;
}
public async Task Execute(IJobExecutionContext context)
@@ -23,11 +27,40 @@ public sealed class GenericJob<T> : IJob
try
{
await _handler.ExecuteAsync();
await using var scope = _scopeFactory.CreateAsyncScope();
var hubContext = scope.ServiceProvider.GetRequiredService<IHubContext<AppHub>>();
var jobManagementService = scope.ServiceProvider.GetRequiredService<IJobManagementService>();
await BroadcastJobStatus(hubContext, jobManagementService, false);
var handler = scope.ServiceProvider.GetRequiredService<T>();
await handler.ExecuteAsync();
await BroadcastJobStatus(hubContext, jobManagementService, true);
}
catch (Exception ex)
{
_logger.LogError(ex, "{name} failed", typeof(T).Name);
}
}
private async Task BroadcastJobStatus(IHubContext<AppHub> hubContext, IJobManagementService jobManagementService, bool isFinished)
{
try
{
JobType jobType = Enum.Parse<JobType>(typeof(T).Name);
JobInfo jobInfo = await jobManagementService.GetJob(jobType);
if (isFinished)
{
jobInfo.Status = "Scheduled";
}
await hubContext.Clients.All.SendAsync("JobStatusUpdate", jobInfo);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to broadcast job status update");
}
}
}

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using CreateAppriseProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateAppriseProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateAppriseProviderRequest instead.")]
public sealed record CreateAppriseProviderDto : CreateAppriseProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using CreateNotifiarrProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateNotifiarrProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateNotifiarrProviderRequest instead.")]
public sealed record CreateNotifiarrProviderDto : CreateNotifiarrProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using CreateNtfyProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateNtfyProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.CreateNtfyProviderRequest instead.")]
public sealed record CreateNtfyProviderDto : CreateNtfyProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using TestAppriseProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestAppriseProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestAppriseProviderRequest instead.")]
public sealed record TestAppriseProviderDto : TestAppriseProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using TestNotifiarrProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestNotifiarrProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestNotifiarrProviderRequest instead.")]
public sealed record TestNotifiarrProviderDto : TestNotifiarrProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using TestNtfyProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestNtfyProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.TestNtfyProviderRequest instead.")]
public sealed record TestNtfyProviderDto : TestNtfyProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using UpdateAppriseProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateAppriseProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateAppriseProviderRequest instead.")]
public sealed record UpdateAppriseProviderDto : UpdateAppriseProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using UpdateNotifiarrProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateNotifiarrProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateNotifiarrProviderRequest instead.")]
public sealed record UpdateNotifiarrProviderDto : UpdateNotifiarrProviderRequest;

View File

@@ -0,0 +1,8 @@
using System;
namespace Cleanuparr.Api.Models.NotificationProviders;
using UpdateNtfyProviderRequest = Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateNtfyProviderRequest;
[Obsolete("Use Cleanuparr.Api.Features.Notifications.Contracts.Requests.UpdateNtfyProviderRequest instead.")]
public sealed record UpdateNtfyProviderDto : UpdateNtfyProviderRequest;

View File

@@ -0,0 +1 @@
// Moved to Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests

View File

@@ -0,0 +1 @@
// Moved to Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests

View File

@@ -0,0 +1 @@
// Moved to Cleanuparr.Api.Features.QueueCleaner.Contracts.Requests

View File

@@ -1,53 +1,23 @@
using System.ComponentModel.DataAnnotations;
using System.Diagnostics.CodeAnalysis;
using Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests;
namespace Cleanuparr.Api.Models;
public class UpdateDownloadCleanerConfigDto
{
public bool Enabled { get; set; }
/// <summary>
/// Legacy namespace shim; prefer <see cref="UpdateDownloadCleanerConfigRequest"/> from
/// <c>Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests</c>.
/// </summary>
[Obsolete("Use Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests.UpdateDownloadCleanerConfigRequest instead")]
[SuppressMessage("Design", "CA1000", Justification = "Temporary alias during refactor")]
[SuppressMessage("Usage", "CA2225", Justification = "Alias type")]
public record UpdateDownloadCleanerConfigDto : UpdateDownloadCleanerConfigRequest;
public string CronExpression { get; set; } = "0 0 * * * ?";
/// <summary>
/// Indicates whether to use the CronExpression directly or convert from a user-friendly schedule
/// </summary>
public bool UseAdvancedScheduling { get; set; }
public List<CleanCategoryDto> Categories { get; set; } = [];
public bool DeletePrivate { get; set; }
/// <summary>
/// Indicates whether unlinked download handling is enabled
/// </summary>
public bool UnlinkedEnabled { get; set; } = false;
public string UnlinkedTargetCategory { get; set; } = "cleanuparr-unlinked";
public bool UnlinkedUseTag { get; set; }
public string UnlinkedIgnoredRootDir { get; set; } = string.Empty;
public List<string> UnlinkedCategories { get; set; } = [];
}
public class CleanCategoryDto
{
[Required]
public string Name { get; set; } = string.Empty;
/// <summary>
/// Max ratio before removing a download.
/// </summary>
public double MaxRatio { get; set; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
public double MinSeedTime { get; set; }
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
public double MaxSeedTime { get; set; } = -1;
}
/// <summary>
/// Legacy namespace shim; prefer <see cref="CleanCategoryRequest"/> from
/// <c>Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests</c>.
/// </summary>
[Obsolete("Use Cleanuparr.Api.Features.DownloadCleaner.Contracts.Requests.CleanCategoryRequest instead")]
[SuppressMessage("Design", "CA1000", Justification = "Temporary alias during refactor")]
[SuppressMessage("Usage", "CA2225", Justification = "Alias type")]
public record CleanCategoryDto : CleanCategoryRequest;

View File

@@ -2,12 +2,18 @@ using System.Runtime.InteropServices;
using System.Text.Json.Serialization;
using Cleanuparr.Api;
using Cleanuparr.Api.DependencyInjection;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Logging;
using Cleanuparr.Shared.Helpers;
using Microsoft.AspNetCore.Diagnostics.HealthChecks;
using Microsoft.AspNetCore.SignalR;
using Serilog;
var builder = WebApplication.CreateBuilder(args);
await builder.InitAsync();
builder.Logging.AddLogging();
// Fix paths for single-file deployment on macOS
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
@@ -44,6 +50,7 @@ builder.Services.AddResponseCompression(options =>
// Configure JSON options to serialize enums as strings
builder.Services.ConfigureHttpJsonOptions(options =>
{
options.SerializerOptions.PropertyNameCaseInsensitive = true;
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter());
});
@@ -66,14 +73,6 @@ builder.Services.AddCors(options =>
});
});
// Register services needed for logging first
builder.Services
.AddTransient<LoggingConfigManager>()
.AddSingleton<SignalRLogSink>();
// Add logging with proper service provider
builder.Logging.AddLogging();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
builder.Host.UseWindowsService(options =>
@@ -128,25 +127,30 @@ if (basePath is not null)
logger.LogInformation("Server configuration: PORT={port}, BASE_PATH={basePath}", port, basePath ?? "/");
// Initialize the host
await app.Init();
app.Init();
// Get LoggingConfigManager (will be created if not already registered)
var configManager = app.Services.GetRequiredService<LoggingConfigManager>();
// Get the dynamic level switch for controlling log levels
var levelSwitch = configManager.GetLevelSwitch();
// Get the SignalRLogSink instance
var signalRSink = app.Services.GetRequiredService<SignalRLogSink>();
// Configure the app hub for SignalR
var appHub = app.Services.GetRequiredService<IHubContext<AppHub>>();
SignalRLogSink.Instance.SetAppHubContext(appHub);
var logConfig = LoggingDI.GetDefaultLoggerConfiguration();
logConfig.MinimumLevel.ControlledBy(levelSwitch);
// Add to Serilog pipeline
logConfig.WriteTo.Sink(signalRSink);
// Configure health check endpoints before the API configuration
app.MapHealthChecks("/health", new HealthCheckOptions
{
Predicate = registration => registration.Tags.Contains("liveness"),
ResponseWriter = HealthCheckResponseWriter.WriteMinimalPlaintext
});
Log.Logger = logConfig.CreateLogger();
app.MapHealthChecks("/health/ready", new HealthCheckOptions
{
Predicate = registration => registration.Tags.Contains("readiness"),
ResponseWriter = HealthCheckResponseWriter.WriteMinimalPlaintext
});
app.ConfigureApi();
await app.RunAsync();
await app.RunAsync();
await Log.CloseAndFlushAsync();
// Make Program class accessible for testing
public partial class Program { }

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Cleanuparr.Api.Tests")]

View File

@@ -1,20 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Domain\Cleanuparr.Domain.csproj" />
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
<ProjectReference Include="..\Cleanuparr.Persistence\Cleanuparr.Persistence.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MassTransit" Version="8.4.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.6" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,162 @@
using System.Security.Cryptography;
using System.Text;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadClient.QBittorrent;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.BlacklistSync;
using Cleanuparr.Persistence.Models.State;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Cleanuparr.Application.Features.BlacklistSync;
public sealed class BlacklistSynchronizer : IHandler
{
private readonly ILogger<BlacklistSynchronizer> _logger;
private readonly DataContext _dataContext;
private readonly DownloadServiceFactory _downloadServiceFactory;
private readonly FileReader _fileReader;
private readonly IDryRunInterceptor _dryRunInterceptor;
public BlacklistSynchronizer(
ILogger<BlacklistSynchronizer> logger,
DataContext dataContext,
DownloadServiceFactory downloadServiceFactory,
FileReader fileReader,
IDryRunInterceptor dryRunInterceptor
)
{
_logger = logger;
_dataContext = dataContext;
_downloadServiceFactory = downloadServiceFactory;
_fileReader = fileReader;
_dryRunInterceptor = dryRunInterceptor;
}
public async Task ExecuteAsync()
{
BlacklistSyncConfig config = await _dataContext.BlacklistSyncConfigs
.AsNoTracking()
.FirstAsync();
if (!config.Enabled)
{
_logger.LogDebug("Blacklist sync is disabled");
return;
}
if (string.IsNullOrWhiteSpace(config.BlacklistPath))
{
_logger.LogWarning("Blacklist sync path is not configured");
return;
}
string[] patterns = await _fileReader.ReadContentAsync(config.BlacklistPath);
string excludedFileNames = string.Join('\n', patterns.Where(p => !string.IsNullOrWhiteSpace(p)));
string currentHash = ComputeHash(excludedFileNames);
await _dryRunInterceptor.InterceptAsync(SyncBlacklist, currentHash, excludedFileNames);
await _dryRunInterceptor.InterceptAsync(RemoveOldSyncDataAsync, currentHash);
_logger.LogDebug("Blacklist synchronization completed");
}
private async Task SyncBlacklist(string currentHash, string excludedFileNames)
{
List<DownloadClientConfig> qBittorrentClients = await _dataContext.DownloadClients
.AsNoTracking()
.Where(c => c.Enabled && c.TypeName == DownloadClientTypeName.qBittorrent)
.ToListAsync();
if (qBittorrentClients.Count is 0)
{
_logger.LogDebug("No enabled qBittorrent clients found for blacklist sync");
return;
}
_logger.LogDebug("Starting blacklist synchronization for {Count} qBittorrent clients", qBittorrentClients.Count);
// Pull existing sync history for this hash
var alreadySynced = await _dataContext.BlacklistSyncHistory
.AsNoTracking()
.Where(s => s.Hash == currentHash)
.Select(x => x.DownloadClientId)
.ToListAsync();
// Only update clients not present in history for current hash
foreach (var clientConfig in qBittorrentClients)
{
try
{
if (alreadySynced.Contains(clientConfig.Id))
{
_logger.LogDebug("Client {ClientName} already synced for current blacklist, skipping", clientConfig.Name);
continue;
}
var downloadService = _downloadServiceFactory.GetDownloadService(clientConfig);
if (downloadService is not QBitService qBitService)
{
_logger.LogError("Expected QBitService but got {ServiceType} for client {ClientName}", downloadService.GetType().Name, clientConfig.Name);
continue;
}
try
{
await qBitService.LoginAsync();
await qBitService.UpdateBlacklistAsync(excludedFileNames);
_logger.LogDebug("Successfully updated blacklist for qBittorrent client {ClientName}", clientConfig.Name);
// Insert history row marking this client as synced for current hash
_dataContext.BlacklistSyncHistory.Add(new BlacklistSyncHistory
{
Hash = currentHash,
DownloadClientId = clientConfig.Id
});
await _dataContext.SaveChangesAsync();
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update blacklist for qBittorrent client {ClientName}", clientConfig.Name);
}
finally
{
qBitService.Dispose();
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create download service for client {ClientName}", clientConfig.Name);
}
}
}
private static string ComputeHash(string excludedFileNames)
{
using var sha = SHA256.Create();
byte[] bytes = Encoding.UTF8.GetBytes(excludedFileNames);
byte[] hash = sha.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task RemoveOldSyncDataAsync(string currentHash)
{
try
{
await _dataContext.BlacklistSyncHistory
.Where(s => s.Hash != currentHash)
.ExecuteDeleteAsync();
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to cleanup old blacklist sync history");
}
}
}

View File

@@ -1,66 +0,0 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
namespace Cleanuparr.Application.Features.DownloadClient.Dtos;
/// <summary>
/// DTO for creating a new download client (without ID)
/// </summary>
public sealed record CreateDownloadClientDto
{
/// <summary>
/// Whether this client is enabled
/// </summary>
public bool Enabled { get; init; } = false;
/// <summary>
/// Friendly name for this client
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Type name of download client
/// </summary>
public required DownloadClientTypeName TypeName { get; init; }
/// <summary>
/// Type of download client
/// </summary>
public required DownloadClientType Type { get; init; }
/// <summary>
/// Host address for the download client
/// </summary>
public Uri? Host { get; init; }
/// <summary>
/// Username for authentication
/// </summary>
public string? Username { get; init; }
/// <summary>
/// Password for authentication
/// </summary>
public string? Password { get; init; }
/// <summary>
/// The base URL path component, used by clients like Transmission and Deluge
/// </summary>
public string? UrlBase { get; init; }
/// <summary>
/// Validates the configuration
/// </summary>
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
{
throw new ValidationException("Host cannot be empty");
}
}
}

View File

@@ -7,7 +7,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,6 @@
namespace Cleanuparr.Domain.Entities.AppStatus;
public sealed record Status
{
public string? Version { get; set; }
}

View File

@@ -1,4 +1,4 @@
namespace Data.Models.Arr.Queue;
namespace Cleanuparr.Domain.Entities.Arr.Queue;
public record Image
{

View File

@@ -1,4 +1,4 @@
namespace Data.Models.Arr.Queue;
namespace Cleanuparr.Domain.Entities.Arr.Queue;
public record LidarrImage
{

View File

@@ -1,4 +1,4 @@
namespace Data.Models.Arr.Queue;
namespace Cleanuparr.Domain.Entities.Arr.Queue;
public sealed record QueueAlbum
{

Some files were not shown because too many files have changed in this diff Show More