Compare commits

...

75 Commits

Author SHA1 Message Date
Flaminel
5a0ef56074 Remove empty clean categories list validation (#131) 2025-05-07 14:12:36 +03:00
Flaminel
09bd4321fb fixed readme icons 2025-05-06 19:59:00 +03:00
Flaminel
4939e37210 updated discord invite 2025-05-06 15:57:12 +03:00
Flaminel
9463d7587f Add support for unstrusted certificates (#128) 2025-05-06 15:42:41 +03:00
Flaminel
7d2bf41bec updated readme to mention Huntarr 2025-05-06 15:35:37 +03:00
Flaminel
93bb8cc18d updated README 2025-05-05 12:25:22 +03:00
Flaminel
449d9e623f fixed missing config variables 2025-05-05 12:25:09 +03:00
Flaminel
3a50d9be3c updated docs 2025-05-05 00:35:10 +03:00
Flaminel
693f80fe6a Add category change for downloads with no additional hardlinks (#65) 2025-05-04 17:26:51 +03:00
Flaminel
8cfc73213a Add separate strikes for downloading metadata (#104) 2025-05-04 17:11:38 +03:00
Flaminel
6fbae768a4 removed test stuff 2025-05-04 15:24:31 +03:00
Flaminel
8e9d0127e0 removed docs homepage features 2025-05-04 15:23:01 +03:00
Flaminel
b92d70769a Add documentation (#125) 2025-05-04 15:21:41 +03:00
Flaminel
75b001cf6a Add Apprise support (#124) 2025-05-01 21:00:01 +03:00
Flaminel
479ca7884e Fix crashing when tracker url is malformed (#121) 2025-04-28 16:48:54 +03:00
Flaminel
00d8910118 Update README.md 2025-04-12 23:43:44 +03:00
Flaminel
bd28c7ab05 Fix missing notifications for new strike types (#112) 2025-04-08 22:20:51 +03:00
Flaminel
720279df65 Update README.md 2025-04-08 18:14:01 +03:00
Flaminel
2d4ec648b8 Update README.md 2025-04-06 18:10:46 +03:00
Flaminel
704fdaca4a Add cleanup for slow downloads (#110) 2025-04-06 13:28:05 +03:00
Flaminel
b134136e51 Update README.md 2025-03-29 01:11:41 +02:00
Flaminel
5ca717d7e0 Update README.md 2025-03-27 19:53:57 +02:00
Flaminel
7068ee5e5a Update README.md 2025-03-26 13:30:55 +02:00
Flaminel
9f770473e5 Remove Transmission downloads cache (#105) 2025-03-26 00:26:10 +02:00
Flaminel
5fe0f5750a Fix qBit queued items being processed (#102) 2025-03-21 23:06:31 +02:00
Flaminel
b8ce225ccc Fix Deluge service crashing when download is not found (#97) 2025-03-20 00:09:58 +02:00
Flaminel
f21f7388b7 Add download client customizable url base (#43) 2025-03-20 00:09:24 +02:00
Flaminel
a1354f231a Add base path support for arrs (#96) 2025-03-20 00:08:51 +02:00
Flaminel
4bc1c33e81 Add option to explicitly disable the download client (#93) 2025-03-19 16:02:46 +02:00
Flaminel
32bcbab523 added docs for FreeBSD 2025-03-19 01:26:04 +02:00
Flaminel
b94ae21e11 update permissive blacklist 2025-03-13 10:16:52 +02:00
Flaminel
a92ebd75c2 Update docs (#88) 2025-03-11 23:42:21 +02:00
Flaminel
e6d3929fc9 Restrict max strikes to a minimum value (#87) 2025-03-11 23:35:07 +02:00
Flaminel
a68e13af35 Fix notifications when poster is not found (#89) 2025-03-11 23:34:44 +02:00
Flaminel
324c3ace8f Fix multiple runs on queue cleaner when download cleaner is enabled (#90) 2025-03-11 23:34:27 +02:00
Flaminel
3a9d5d9085 Fix patterns being loaded for disabled arrs (#80) 2025-03-11 23:18:34 +02:00
Flaminel
89a6eaf0ce Disable cleanup on torrent items if download client is not configured (#85) 2025-03-10 00:13:40 +02:00
Flaminel
027c4a0f4d Add option to ignore specific downloads (#79) 2025-03-09 23:38:27 +02:00
Flaminel
81990c6768 fixed missing README link 2025-03-03 22:37:22 +02:00
Flaminel
ba02aa0e49 Fix notifications failing when poster image is not set (#78) 2025-03-02 22:48:21 +02:00
Flaminel
5adbdbd920 Fix weird time zone display name on startup (#70) 2025-02-25 21:32:19 +02:00
Flaminel
b3b211d956 Add configurable time zone (#69) 2025-02-24 23:21:44 +02:00
Flaminel
279bd6d82d Fix Deluge timeout not being configurable (#68) 2025-02-24 18:32:44 +02:00
Flaminel
5dced28228 fixed errors on download cleaner when download client is none (#67) 2025-02-24 12:43:06 +02:00
Flaminel
51bdaf64e4 Fix interceptor memory leaks (#66) 2025-02-23 17:50:08 +02:00
Flaminel
9c8e0ebedc updated README 2025-02-18 13:16:49 +02:00
Flaminel
e1bea8a8c8 updated README 2025-02-17 23:59:36 +02:00
Marius Nechifor
a6d3820104 Improve Transmission category detection (#62) 2025-02-17 02:48:27 +02:00
Flaminel
36c793a5fb updated chart values 2025-02-16 17:43:35 +02:00
Flaminel
aade8a91c3 fixed dummy download service 2025-02-16 12:17:31 +02:00
Flaminel
3fe7c3de1a added null check for torrent properties 2025-02-16 03:37:50 +02:00
Marius Nechifor
596a5aed8d Add download cleaner and dry run (#58) 2025-02-16 03:20:00 +02:00
Marius Nechifor
19b3675701 Add Notifiarr support (#52) 2025-02-16 03:17:54 +02:00
Flaminel
1713d0fd1e updated README 2025-02-03 23:05:31 +02:00
Flaminel
3a95a302c0 updated issue templates 2025-02-03 20:40:28 +02:00
Marius Nechifor
e738ba2334 Fix queue items with no title not being processed (#54) 2025-02-02 18:20:42 +02:00
Marius Nechifor
c813215f3e Add more Lidarr checks for failed imports (#48) 2025-01-28 19:10:07 +02:00
Flaminel
0f63a2d271 updated README 2025-01-26 01:36:08 +02:00
Marius Nechifor
133c34de53 Add option to reset stalled strikes on download progress (#50) 2025-01-25 03:27:40 +02:00
Flaminel
a3ca735b12 updated deployment 2025-01-25 01:18:03 +02:00
Marius Nechifor
519ab6a0cd Fix strike defaults (#49) 2025-01-22 22:18:31 +02:00
Marius Nechifor
0c691a540a Add missing failed import status (#47) 2025-01-21 00:14:55 +02:00
Marius Nechifor
209f78717f Fix usenet usage (#46) 2025-01-18 19:12:28 +02:00
Flaminel
a02be80ac1 updated README 2025-01-18 17:25:15 +02:00
Marius Nechifor
8a8b906b6f Add option to not remove private downloads from the download client (#45) 2025-01-18 17:20:23 +02:00
Marius Nechifor
b88ddde417 Fix content blocker env var usage (#44) 2025-01-18 16:23:34 +02:00
Flaminel
666c2656ec added svg logo 2025-01-17 22:11:05 +02:00
Marius Nechifor
7786776ed8 Fix logging template (#42) 2025-01-16 11:55:38 +02:00
Flaminel
2c60b38edf fixed README ports 2025-01-16 00:10:02 +02:00
Marius Nechifor
922f586706 Add Lidarr support (#30) 2025-01-15 23:55:34 +02:00
Marius Nechifor
2bc8e445ce Add configurable number of retries and timeout for http calls (#40) 2025-01-14 22:58:03 +02:00
Marius Nechifor
058507ac39 Add option to ignore private downloads when blocking files (#39) 2025-01-13 15:15:58 +02:00
Marius Nechifor
f0dc51f10b Improve stalled and failed imports (#37) 2025-01-13 13:18:58 +02:00
Flaminel
c7ad1c5ee6 fixed README typo 2025-01-11 01:45:45 +02:00
Marius Nechifor
d7913ae2b8 Add option to not use a download client (#35) 2025-01-11 01:45:12 +02:00
255 changed files with 35290 additions and 759 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
github: Flaminel

View File

@@ -1,6 +1,6 @@
name: Bug report
description: File a bug report if something is not working right.
title: "[BUG]: "
title: "[BUG] "
labels: ["bug"]
body:
- type: markdown
@@ -14,8 +14,12 @@ body:
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/flmorg/cleanuperr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true
- label: Enabled verbose logging.
required: true
- type: textarea
id: what-happened
attributes:
@@ -23,14 +27,6 @@ body:
description: If applicable, mention what you expected to happen.
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: What version of our software are you running?
placeholder: e.g. 1.3.0 or latest
validations:
required: true
- type: dropdown
id: os
attributes:
@@ -40,6 +36,7 @@ body:
- Windows
- Linux
- MacOS
- Unraid
validations:
required: true
- type: dropdown

View File

@@ -1,6 +1,6 @@
name: Feature request
description: File a feature request.
title: "[FEATURE]: "
title: "[FEATURE] "
labels: ["enhancement"]
body:
- type: markdown

View File

@@ -1,12 +1,25 @@
name: Help request
description: Ask a question to receive help.
title: "[HELP]: "
title: "[HELP] "
labels: ["question"]
body:
- type: markdown
attributes:
value: |
If you are experiencing unexpected behavior, please consider submitting a bug report instead.
- type: checkboxes
id: init
attributes:
label: "Before submitting a help request, I have:"
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/flmorg/cleanuperr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true
- label: Enabled verbose logging.
required: true
- type: textarea
id: description
attributes:

49
.github/workflows/docs.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: Deploy Docusaurus to GitHub Pages
on:
push:
branches: [main]
paths:
- 'docs/**'
permissions:
contents: read
pages: write
id-token: write
jobs:
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: yarn
cache-dependency-path: docs/yarn.lock
- name: Install dependencies
working-directory: docs
run: yarn install --frozen-lockfile
- name: Build Docusaurus
working-directory: docs
run: yarn build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: docs/build
retention-days: 1
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

1
.gitignore vendored
View File

@@ -105,7 +105,6 @@ _NCrunch_*
_TeamCity*
# Sonarr
config.xml
nzbdrone.log*txt
UpdateLogs/
*workspace.xml

10
Logo/cleanuperr.svg Normal file
View File

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 112 KiB

230
README.md
View File

@@ -1,219 +1,59 @@
# <img width="24px" src="./Logo/256.png" alt="cleanuperr"></img> cleanuperr
_Love this project? Give it a ⭐️ and let others know!_
cleanuperr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, cleanuperr can also trigger a search to replace the deleted shows/movies.
# <img width="24px" src="./Logo/256.png" alt="cleanuperr"></img> Cleanuperr
cleanuperr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made cleanuperr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
[![Discord](https://img.shields.io/discord/1306721212587573389?color=7289DA&label=Discord&style=for-the-badge&logo=discord)](https://discord.gg/SCtMCgtsc4)
The tool supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
Cleanuperr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuperr can also trigger a search to replace the deleted shows/movies.
Refer to the [Environment variables](#Environment-variables) section for detailed configuration instructions and the [Setup](#Setup) section for an in-depth explanation of the cleanup process.
Cleanuperr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made Cleanuperr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
## Key features
- Marks unwanted files as skip/unwanted in the download client.
- Automatically strikes stalled or stuck downloads.
- Removes and blocks downloads that reached the maximum number of strikes or are marked as unwanted by the download client or by cleanuperr and triggers a search for removed downloads.
> [!IMPORTANT]
> **Features:**
> - Strike system to mark stalled or downloads stuck in metadata downloading.
> - Remove and block downloads that reached a maximum number of strikes.
> - Remove and block downloads that have a low download speed or high estimated completion time.
> - Remove downloads blocked by qBittorrent or by Cleanuperr's **content blocker**.
> - Trigger a search for downloads removed from the *arrs.
> - Clean up downloads that have been seeding for a certain amount of time.
> - Notify on strike or download removal.
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuperr.
## Important note
Cleanuperr supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
Only the **latest versions** of the following apps are supported, or earlier versions that have the same API as the latest version:
- qBittorrent
- Deluge
- Transmission
- Sonarr
- Radarr
## Quick Start
This tool is actively developed and still a work in progress. Join the Discord server if you want to reach out to me quickly (or just stay updated on new releases) so we can squash those pesky bugs together:
> [!NOTE]
>
> 1. **Docker (Recommended)**
> Pull the Docker image from `ghcr.io/flmorg/cleanuperr:latest`.
>
> 2. **Unraid (for Unraid users)**
> Use the Unraid Community App.
>
> 3. **Manual Installation (if you're not using Docker)**
> Go to [Windows](#windows), [Linux](#linux) or [MacOS](#macos).
> https://discord.gg/sWggpnmGNY
# Docs
# How it works
Docs can be found [here](https://flmorg.github.io/cleanuperr/).
1. **Content blocker** will:
- Run every 5 minutes (or configured cron).
- Process all items in the *arr queue.
- Find the corresponding item from the download client for each queue item.
- Mark the files that were found in the queue as **unwanted/skipped** if:
- They **are listed in the blacklist**, or
- They **are not included in the whitelist**.
2. **Queue cleaner** will:
- Run every 5 minutes (or configured cron).
- Process all items in the *arr queue.
- Check each queue item if it is **stalled (download speed is 0)**, **stuck in matadata downloading** or **failed to be imported**.
- If it is, the item receives a **strike** and will continue to accumulate strikes every time it meets any of these conditions.
- Check each queue item if it meets one of the following condition in the download client:
- **Marked as completed, but 0 bytes have been downloaded** (due to files being blocked by qBittorrent or the **content blocker**).
- All associated files of are marked as **unwanted/skipped**.
- If the item **DOES NOT** match the above criteria, it will be skipped.
- If the item **DOES** match the criteria or has received the **maximum number of strikes**:
- It will be removed from the *arr's queue and blocked.
- It will be deleted from the download client.
- A new search will be triggered for the *arr item.
# <img style="vertical-align: middle;" width="24px" src="./Logo/256.png" alt="Cleanuperr"> <span style="vertical-align: middle;">Cleanuperr</span> <img src="https://raw.githubusercontent.com/FortAwesome/Font-Awesome/6.x/svgs/solid/x.svg" height="24px" width="30px" style="vertical-align: middle;"> <span style="vertical-align: middle;">Huntarr</span> <img style="vertical-align: middle;" width="24px" src="https://github.com/plexguide/Huntarr.io/blob/main/frontend/static/logo/512.png?raw=true" alt Huntarr></img>
# Setup
Think of **Cleanuperr** as the janitor of your server; it keeps your download queue spotless, removes clutter, and blocks malicious files. Now imagine combining that with **Huntarr**, the compulsive librarian who finds missing and upgradable media to complete your collection
## Using qBittorrent's built-in feature (works only with qBittorrent)
While **Huntarr** fills in the blanks and improves what you already have, **Cleanuperr** makes sure that only clean downloads get through. If you're aiming for a reliable and self-sufficient setup, **Cleanuperr** and **Huntarr** will take your automated media stack to another level.
1. Go to qBittorrent -> Options -> Downloads -> make sure `Excluded file names` is checked -> Paste an exclusion list that you have copied.
- [blacklist](https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist), or
- [permissive blacklist](https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist_permissive), or
- create your own
2. qBittorrent will block files from being downloaded. In the case of malicious content, **nothing is downloaded and the torrent is marked as complete**.
3. Start **cleanuperr** with `QUEUECLEANER__ENABLED` set to `true`.
4. The **queue cleaner** will perform a cleanup process as described in the [How it works](#how-it-works) section.
## Using cleanuperr's blocklist (works with all supported download clients)
1. Set both `QUEUECLEANER_ENABLED` and `CONTENTBLOCKER_ENABLED` to `true` in your environment variables.
2. Configure and enable either a **blacklist** or a **whitelist** as described in the [Environment variables](#Environment-variables) section.
3. Once configured, cleanuperr will perform the following tasks:
- Execute the **content blocker** job, as explained in the [How it works](#how-it-works) section.
- Execute the **queue cleaner** job, as explained in the [How it works](#how-it-works) section.
## Usage
### Docker compose yaml
```
version: "3.3"
services:
cleanuperr:
volumes:
- ./cleanuperr/logs:/var/logs
environment:
- LOGGING__LOGLEVEL=Information
- LOGGING__FILE__ENABLED=false
- LOGGING__FILE__PATH=/var/logs/
- LOGGING__ENHANCED=true
- TRIGGERS__QUEUECLEANER=0 0/5 * * * ?
- TRIGGERS__CONTENTBLOCKER=0 0/5 * * * ?
- QUEUECLEANER__ENABLED=true
- QUEUECLEANER__RUNSEQUENTIALLY=true
- QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES=5
- QUEUECLEANER__STALLED_MAX_STRIKES=5
- CONTENTBLOCKER__ENABLED=true
- CONTENTBLOCKER__BLACKLIST__ENABLED=true
- CONTENTBLOCKER__BLACKLIST__PATH=https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
# OR
# - CONTENTBLOCKER__WHITELIST__ENABLED=true
# - CONTENTBLOCKER__WHITELIST__PATH=https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/whitelist
- DOWNLOAD_CLIENT=qBittorrent
- QBITTORRENT__URL=http://localhost:8080
- QBITTORRENT__USERNAME=user
- QBITTORRENT__PASSWORD=pass
# OR
# - DOWNLOAD_CLIENT=deluge
# - DELUGE__URL=http://localhost:8112
# - DELUGE__PASSWORD=testing
# OR
# - DOWNLOAD_CLIENT=transmission
# - TRANSMISSION__URL=http://localhost:9091
# - TRANSMISSION__USERNAME=test
# - TRANSMISSION__PASSWORD=testing
- SONARR__ENABLED=true
- SONARR__SEARCHTYPE=Episode
- SONARR__INSTANCES__0__URL=http://localhost:8989
- SONARR__INSTANCES__0__APIKEY=secret1
- SONARR__INSTANCES__1__URL=http://localhost:8990
- SONARR__INSTANCES__1__APIKEY=secret2
- RADARR__ENABLED=true
- RADARR__INSTANCES__0__URL=http://localhost:7878
- RADARR__INSTANCES__0__APIKEY=secret3
- RADARR__INSTANCES__1__URL=http://localhost:7879
- RADARR__INSTANCES__1__APIKEY=secret4
image: ghcr.io/flmorg/cleanuperr:latest
restart: unless-stopped
```
### Environment variables
| Variable | Required | Description | Default value |
|---|---|---|---|
| LOGGING__LOGLEVEL | No | Can be `Verbose`, `Debug`, `Information`, `Warning`, `Error` or `Fatal` | `Information` |
| LOGGING__FILE__ENABLED | No | Enable or disable logging to file | false |
| LOGGING__FILE__PATH | No | Directory where to save the log files | empty |
| LOGGING__ENHANCED | No | Enhance logs whenever possible<br>A more detailed description is provided [here](variables.md#LOGGING__ENHANCED) | true |
|||||
| TRIGGERS__QUEUECLEANER | Yes if queue cleaner is enabled | [Quartz cron trigger](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)<br>Can be a max of 6h interval<br>**Is ignored if `QUEUECLEANER__RUNSEQUENTIALLY=true` and `CONTENTBLOCKER__ENABLED=true`** | 0 0/5 * * * ? |
| TRIGGERS__CONTENTBLOCKER | Yes if content blocker is enabled | [Quartz cron trigger](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)<br>Can be a max of 6h interval | 0 0/5 * * * ? |
|||||
| QUEUECLEANER__ENABLED | No | Enable or disable the queue cleaner | true |
| QUEUECLEANER__RUNSEQUENTIALLY | No | If set to true, the queue cleaner will run after the content blocker instead of running in parallel, streamlining the cleaning process | true |
| QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES | No | After how many strikes should a failed import be removed<br>0 means never | 0 |
| QUEUECLEANER__STALLED_MAX_STRIKES | No | After how many strikes should a stalled download be removed<br>0 means never | 0 |
|||||
| CONTENTBLOCKER__ENABLED | No | Enable or disable the content blocker | false |
| CONTENTBLOCKER__BLACKLIST__ENABLED | Yes if content blocker is enabled and whitelist is not enabled | Enable or disable the blacklist | false |
| CONTENTBLOCKER__BLACKLIST__PATH | Yes if blacklist is enabled | Path to the blacklist (local file or url)<br>Needs to be json compatible | empty |
| CONTENTBLOCKER__WHITELIST__ENABLED | Yes if content blocker is enabled and blacklist is not enabled | Enable or disable the whitelist | false |
| CONTENTBLOCKER__WHITELIST__PATH | Yes if whitelist is enabled | Path to the whitelist (local file or url)<br>Needs to be json compatible | empty |
|||||
| DOWNLOAD_CLIENT | No | Download client that is used by *arrs<br>Can be `qbittorrent`, `deluge` or `transmission` | `qbittorrent` |
| QBITTORRENT__URL | No | qBittorrent instance url | http://localhost:8112 |
| QBITTORRENT__USERNAME | No | qBittorrent user | empty |
| QBITTORRENT__PASSWORD | No | qBittorrent password | empty |
|||||
| DELUGE__URL | No | Deluge instance url | http://localhost:8080 |
| DELUGE__PASSWORD | No | Deluge password | empty |
|||||
| TRANSMISSION__URL | No | Transmission instance url | http://localhost:9091 |
| TRANSMISSION__USERNAME | No | Transmission user | empty |
| TRANSMISSION__PASSWORD | No | Transmission password | empty |
|||||
| SONARR__ENABLED | No | Enable or disable Sonarr cleanup | true |
| SONARR__SEARCHTYPE | No | What to search for after removing a queue item<br>Can be `Episode`, `Season` or `Series` | `Episode` |
| SONARR__INSTANCES__0__URL | No | First Sonarr instance url | http://localhost:8989 |
| SONARR__INSTANCES__0__APIKEY | No | First Sonarr instance API key | empty |
|||||
| RADARR__ENABLED | No | Enable or disable Radarr cleanup | false |
| RADARR__INSTANCES__0__URL | No | First Radarr instance url | http://localhost:8989 |
| RADARR__INSTANCES__0__APIKEY | No | First Radarr instance API key | empty |
#
### To be noted
1. The blacklist and the whitelist can not be both enabled at the same time.
2. The queue cleaner and content blocker can be enabled or disabled separately, if you want to run only one of them.
3. Only one download client can be enabled at a time. If you have more than one download client, you should deploy multiple instances of cleanuperr.
4. The blocklists (blacklist/whitelist) should have a single pattern on each line and supports the following:
```
*example // file name ends with "example"
example* // file name starts with "example"
*example* // file name has "example" in the name
example // file name is exactly the word "example"
regex:<ANY_REGEX> // regex that needs to be marked at the start of the line with "regex:"
```
5. Multiple Sonarr/Radarr instances can be specified using this format, where `<NUMBER>` starts from 0:
```
SONARR__INSTANCES__<NUMBER>__URL
SONARR__INSTANCES__<NUMBER>__APIKEY
```
#
### Binaries (if you're not using Docker)
1. Download the binaries from [releases](https://github.com/flmorg/cleanuperr/releases).
2. Extract them from the zip file.
3. Edit **appsettings.json**. The paths from this json file correspond with the docker env vars, as described [above](#environment-variables).
### Run as a Windows Service
Check out this stackoverflow answer on how to do it: https://stackoverflow.com/a/15719678
<span style="font-size:24px"> ➡️ [**Huntarr**](https://github.com/plexguide/Huntarr.io) <span style="vertical-align: middle">![Huntarr](https://img.shields.io/github/stars/plexguide/Huntarr.io?style=social)</span></span>
# Credits
Special thanks for inspiration go to:
- [ThijmenGThN/swaparr](https://github.com/ThijmenGThN/swaparr)
- [ManiMatter/decluttarr](https://github.com/ManiMatter/decluttarr)
- [PaeyMoopy/sonarr-radarr-queue-cleaner](https://github.com/PaeyMoopy/sonarr-radarr-queue-cleaner)
- [Sonarr](https://github.com/Sonarr/Sonarr) & [Radarr](https://github.com/Radarr/Radarr) for the logo
- [Sonarr](https://github.com/Sonarr/Sonarr) & [Radarr](https://github.com/Radarr/Radarr)
# Buy me a coffee
If I made your life just a tiny bit easier, consider buying me a coffee!
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>

View File

@@ -1,4 +1,5 @@
*.apk
*.arj
*.bat
*.bin
*.bmp

View File

@@ -10,50 +10,89 @@ deployment:
repository: ghcr.io/flmorg/cleanuperr
tag: latest
env:
- name: DRY_RUN
value: "false"
- name: LOGGING__LOGLEVEL
value: Information
value: Verbose
- name: LOGGING__FILE__ENABLED
value: "true"
- name: LOGGING__FILE__PATH
value: /var/logs
- name: LOGGING__ENHANCED
value: "true"
- name: TRIGGERS__QUEUECLEANER
value: 0 0/5 * * * ?
- name: TRIGGERS__CONTENTBLOCKER
value: 0 0/5 * * * ?
- name: QUEUECLEANER__ENABLED
value: "true"
- name: QUEUECLEANER__RUNSEQUENTIALLY
value: "true"
- name: QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES
value: "3"
- name: QUEUECLEANER__IMPORT_FAILED_IGNORE_PRIVATE
value: "false"
- name: QUEUECLEANER__IMPORT_FAILED_DELETE_PRIVATE
value: "false"
- name: QUEUECLEANER__STALLED_MAX_STRIKES
value: "3"
- name: QUEUECLEANER__STALLED_IGNORE_PRIVATE
value: "false"
- name: QUEUECLEANER__STALLED_DELETE_PRIVATE
value: "false"
- name: CONTENTBLOCKER__ENABLED
value: "true"
- name: CONTENTBLOCKER__BLACKLIST__ENABLED
- name: CONTENTBLOCKER__IGNORE_PRIVATE
value: "true"
- name: CONTENTBLOCKER__BLACKLIST__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: CONTENTBLOCKER__DELETE_PRIVATE
value: "false"
- name: DOWNLOADCLEANER__ENABLED
value: "false"
- name: DOWNLOAD_CLIENT
value: qbittorrent
- name: QBITTORRENT__URL
value: http://service.qbittorrent-videos.svc.cluster.local
- name: SONARR__ENABLED
value: "true"
- name: SONARR__SEARCHTYPE
value: Episode
- name: SONARR__BLOCK__TYPE
value: blacklist
- name: SONARR__BLOCK__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: SONARR__INSTANCES__0__URL
value: http://service.sonarr-low-res.svc.cluster.local
- name: SONARR__INSTANCES__1__URL
value: http://service.sonarr-high-res.svc.cluster.local
- name: RADARR__ENABLED
value: "true"
- name: RADARR__BLOCK__TYPE
value: blacklist
- name: RADARR__BLOCK__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: RADARR__INSTANCES__0__URL
value: http://service.radarr-low-res.svc.cluster.local
- name: RADARR__INSTANCES__1__URL
value: http://service.radarr-high-res.svc.cluster.local
- name: NOTIFIARR__ON_IMPORT_FAILED_STRIKE
value: "true"
- name: NOTIFIARR__ON_STALLED_STRIKE
value: "true"
- name: NOTIFIARR__ON_QUEUE_ITEM_DELETED
value: "true"
- name: NOTIFIARR__ON_DOWNLOAD_CLEANED
value: "true"
- name: NOTIFIARR__CHANNEL_ID
value: "1340708411259748413"
envFromSecret:
- secretName: qbit-auth
envs:
@@ -73,6 +112,10 @@ deployment:
key: RDRL_API_KEY
- name: RADARR__INSTANCES__1__APIKEY
key: RDRH_API_KEY
- secretName: notifiarr-auth
envs:
- name: NOTIFIARR__API_KEY
key: API_KEY
resources:
requests:
cpu: 0m
@@ -112,4 +155,8 @@ vaultSecrets:
path: secrets/sonarr
templates:
SNRL_API_KEY: "{% .Secrets.low_api_key %}"
SNRH_API_KEY: "{% .Secrets.high_api_key %}"
SNRH_API_KEY: "{% .Secrets.high_api_key %}"
- name: notifiarr-auth
path: secrets/notifiarr
templates:
API_KEY: "{% .Secrets.passthrough_api_key %}"

View File

@@ -0,0 +1,6 @@
namespace Common.Attributes;
[AttributeUsage(AttributeTargets.Method, Inherited = true)]
public class DryRunSafeguardAttribute : Attribute
{
}

View File

@@ -7,7 +7,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="9.0.2" />
<PackageReference Include="Serilog" Version="4.2.0" />
</ItemGroup>

View File

@@ -1,8 +1,19 @@
using Common.Configuration.ContentBlocker;
namespace Common.Configuration.Arr;
public abstract record ArrConfig
{
public required bool Enabled { get; init; }
public Block Block { get; init; } = new();
public required List<ArrInstance> Instances { get; init; }
}
public readonly record struct Block
{
public BlocklistType Type { get; init; }
public string? Path { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace Common.Configuration.Arr;
public sealed record LidarrConfig : ArrConfig
{
public const string SectionName = "Lidarr";
}

View File

@@ -1,4 +1,4 @@
namespace Domain.Enums;
namespace Common.Configuration.ContentBlocker;
public enum BlocklistType
{

View File

@@ -1,40 +1,23 @@
namespace Common.Configuration.ContentBlocker;
using Microsoft.Extensions.Configuration;
public sealed record ContentBlockerConfig : IJobConfig
namespace Common.Configuration.ContentBlocker;
public sealed record ContentBlockerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "ContentBlocker";
public required bool Enabled { get; init; }
public PatternConfig? Blacklist { get; init; }
[ConfigurationKeyName("IGNORE_PRIVATE")]
public bool IgnorePrivate { get; init; }
public PatternConfig? Whitelist { get; init; }
[ConfigurationKeyName("DELETE_PRIVATE")]
public bool DeletePrivate { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
public void Validate()
{
if (!Enabled)
{
return;
}
if (Blacklist is null && Whitelist is null)
{
throw new Exception("content blocker is enabled, but both blacklist and whitelist are missing");
}
if (Blacklist?.Enabled is true && Whitelist?.Enabled is true)
{
throw new Exception("only one exclusion (blacklist/whitelist) list is allowed");
}
if (Blacklist?.Enabled is true && string.IsNullOrEmpty(Blacklist.Path))
{
throw new Exception("blacklist path is required");
}
if (Whitelist?.Enabled is true && string.IsNullOrEmpty(Whitelist.Path))
{
throw new Exception("blacklist path is required");
}
}
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.ContentBlocker;
public sealed record PatternConfig
{
public bool Enabled { get; init; }
public string? Path { get; init; }
}

View File

@@ -0,0 +1,45 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadCleaner;
public sealed record CleanCategory : IConfig
{
public required string Name { get; init; }
/// <summary>
/// Max ratio before removing a download.
/// </summary>
[ConfigurationKeyName("MAX_RATIO")]
public required double MaxRatio { get; init; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
[ConfigurationKeyName("MIN_SEED_TIME")]
public required double MinSeedTime { get; init; } = 0;
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
[ConfigurationKeyName("MAX_SEED_TIME")]
public required double MaxSeedTime { get; init; } = -1;
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException($"{nameof(Name)} can not be empty");
}
if (MaxRatio < 0 && MaxSeedTime < 0)
{
throw new ValidationException($"both {nameof(MaxRatio)} and {nameof(MaxSeedTime)} are disabled");
}
if (MinSeedTime < 0)
{
throw new ValidationException($"{nameof(MinSeedTime)} can not be negative");
}
}
}

View File

@@ -0,0 +1,68 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadCleaner;
public sealed record DownloadCleanerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "DownloadCleaner";
public bool Enabled { get; init; }
public List<CleanCategory>? Categories { get; init; }
[ConfigurationKeyName("DELETE_PRIVATE")]
public bool DeletePrivate { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
[ConfigurationKeyName("UNLINKED_TARGET_CATEGORY")]
public string UnlinkedTargetCategory { get; init; } = "cleanuperr-unlinked";
[ConfigurationKeyName("UNLINKED_IGNORED_ROOT_DIR")]
public string UnlinkedIgnoredRootDir { get; init; } = string.Empty;
[ConfigurationKeyName("UNLINKED_CATEGORIES")]
public List<string>? UnlinkedCategories { get; init; }
public void Validate()
{
if (!Enabled)
{
return;
}
if (Categories?.GroupBy(x => x.Name).Any(x => x.Count() > 1) is true)
{
throw new ValidationException("duplicated clean categories found");
}
Categories?.ForEach(x => x.Validate());
if (string.IsNullOrEmpty(UnlinkedTargetCategory))
{
return;
}
if (UnlinkedCategories?.Count is null or 0)
{
throw new ValidationException("no unlinked categories configured");
}
if (UnlinkedCategories.Contains(UnlinkedTargetCategory))
{
throw new ValidationException($"{SectionName.ToUpperInvariant()}__UNLINKED_TARGET_CATEGORY should not be present in {SectionName.ToUpperInvariant()}__UNLINKED_CATEGORIES");
}
if (UnlinkedCategories.Any(string.IsNullOrEmpty))
{
throw new ValidationException("empty unlinked category filter found");
}
if (!string.IsNullOrEmpty(UnlinkedIgnoredRootDir) && !Directory.Exists(UnlinkedIgnoredRootDir))
{
throw new ValidationException($"{UnlinkedIgnoredRootDir} root directory does not exist");
}
}
}

View File

@@ -1,4 +1,7 @@
namespace Common.Configuration.DownloadClient;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed record DelugeConfig : IConfig
{
@@ -6,13 +9,16 @@ public sealed record DelugeConfig : IConfig
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = string.Empty;
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -0,0 +1,9 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed record DownloadClientConfig
{
[ConfigurationKeyName("DOWNLOAD_CLIENT")]
public Enums.DownloadClient DownloadClient { get; init; } = Enums.DownloadClient.None;
}

View File

@@ -1,4 +1,7 @@
namespace Common.Configuration.DownloadClient;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed class QBitConfig : IConfig
{
@@ -6,6 +9,9 @@ public sealed class QBitConfig : IConfig
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = string.Empty;
public string? Username { get; init; }
public string? Password { get; init; }
@@ -14,7 +20,7 @@ public sealed class QBitConfig : IConfig
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -1,4 +1,7 @@
namespace Common.Configuration.DownloadClient;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public record TransmissionConfig : IConfig
{
@@ -6,6 +9,9 @@ public record TransmissionConfig : IConfig
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = "transmission";
public string? Username { get; init; }
public string? Password { get; init; }
@@ -14,7 +20,7 @@ public record TransmissionConfig : IConfig
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public static class EnvironmentVariables
{
public const string DownloadClient = "DOWNLOAD_CLIENT";
}

View File

@@ -0,0 +1,9 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.General;
public sealed record DryRunConfig
{
[ConfigurationKeyName("DRY_RUN")]
public bool IsDryRun { get; init; }
}

View File

@@ -0,0 +1,25 @@
using Common.Enums;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.General;
public sealed record HttpConfig : IConfig
{
[ConfigurationKeyName("HTTP_MAX_RETRIES")]
public ushort MaxRetries { get; init; }
[ConfigurationKeyName("HTTP_TIMEOUT")]
public ushort Timeout { get; init; } = 100;
[ConfigurationKeyName("HTTP_VALIDATE_CERT")]
public CertificateValidationType CertificateValidation { get; init; } = CertificateValidationType.Enabled;
public void Validate()
{
if (Timeout is 0)
{
throw new ValidationException("HTTP_TIMEOUT must be greater than 0");
}
}
}

View File

@@ -1,4 +1,4 @@
namespace Common.Configuration;
namespace Common.Configuration.General;
public sealed class TriggersConfig
{
@@ -7,4 +7,6 @@ public sealed class TriggersConfig
public required string QueueCleaner { get; init; }
public required string ContentBlocker { get; init; }
public required string DownloadCleaner { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace Common.Configuration;
public interface IIgnoredDownloadsConfig
{
string? IgnoredDownloadsPath { get; }
}

View File

@@ -0,0 +1,34 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.Notification;
public abstract record NotificationConfig
{
[ConfigurationKeyName("ON_IMPORT_FAILED_STRIKE")]
public bool OnImportFailedStrike { get; init; }
[ConfigurationKeyName("ON_STALLED_STRIKE")]
public bool OnStalledStrike { get; init; }
[ConfigurationKeyName("ON_SLOW_STRIKE")]
public bool OnSlowStrike { get; init; }
[ConfigurationKeyName("ON_QUEUE_ITEM_DELETED")]
public bool OnQueueItemDeleted { get; init; }
[ConfigurationKeyName("ON_DOWNLOAD_CLEANED")]
public bool OnDownloadCleaned { get; init; }
[ConfigurationKeyName("ON_CATEGORY_CHANGED")]
public bool OnCategoryChanged { get; init; }
public bool IsEnabled =>
OnImportFailedStrike ||
OnStalledStrike ||
OnSlowStrike ||
OnQueueItemDeleted ||
OnDownloadCleaned ||
OnCategoryChanged;
public abstract bool IsValid();
}

View File

@@ -1,8 +1,10 @@
using Microsoft.Extensions.Configuration;
using Common.CustomDataTypes;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.QueueCleaner;
public sealed record QueueCleanerConfig : IJobConfig
public sealed record QueueCleanerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "QueueCleaner";
@@ -10,13 +12,108 @@ public sealed record QueueCleanerConfig : IJobConfig
public required bool RunSequentially { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_MAX_STRIKES")]
public ushort ImportFailedMaxStrikes { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_IGNORE_PRIVATE")]
public bool ImportFailedIgnorePrivate { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_DELETE_PRIVATE")]
public bool ImportFailedDeletePrivate { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_IGNORE_PATTERNS")]
public IReadOnlyList<string>? ImportFailedIgnorePatterns { get; init; }
[ConfigurationKeyName("STALLED_MAX_STRIKES")]
public ushort StalledMaxStrikes { get; init; }
[ConfigurationKeyName("STALLED_RESET_STRIKES_ON_PROGRESS")]
public bool StalledResetStrikesOnProgress { get; init; }
[ConfigurationKeyName("STALLED_IGNORE_PRIVATE")]
public bool StalledIgnorePrivate { get; init; }
[ConfigurationKeyName("STALLED_DELETE_PRIVATE")]
public bool StalledDeletePrivate { get; init; }
[ConfigurationKeyName("DOWNLOADING_METADATA_MAX_STRIKES")]
public ushort DownloadingMetadataMaxStrikes { get; init; }
[ConfigurationKeyName("SLOW_MAX_STRIKES")]
public ushort SlowMaxStrikes { get; init; }
[ConfigurationKeyName("SLOW_RESET_STRIKES_ON_PROGRESS")]
public bool SlowResetStrikesOnProgress { get; init; }
[ConfigurationKeyName("SLOW_IGNORE_PRIVATE")]
public bool SlowIgnorePrivate { get; init; }
[ConfigurationKeyName("SLOW_DELETE_PRIVATE")]
public bool SlowDeletePrivate { get; init; }
[ConfigurationKeyName("SLOW_MIN_SPEED")]
public string SlowMinSpeed { get; init; } = string.Empty;
public ByteSize SlowMinSpeedByteSize => string.IsNullOrEmpty(SlowMinSpeed) ? new ByteSize(0) : ByteSize.Parse(SlowMinSpeed);
[ConfigurationKeyName("SLOW_MAX_TIME")]
public double SlowMaxTime { get; init; }
[ConfigurationKeyName("SLOW_IGNORE_ABOVE_SIZE")]
public string SlowIgnoreAboveSize { get; init; } = string.Empty;
public ByteSize? SlowIgnoreAboveSizeByteSize => string.IsNullOrEmpty(SlowIgnoreAboveSize) ? null : ByteSize.Parse(SlowIgnoreAboveSize);
public void Validate()
{
if (ImportFailedMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__IMPORT_FAILED_MAX_STRIKES must be 3");
}
if (StalledMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__STALLED_MAX_STRIKES must be 3");
}
if (DownloadingMetadataMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__DOWNLOADING_METADATA_MAX_STRIKES must be 3");
}
if (SlowMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__SLOW_MAX_STRIKES must be 3");
}
if (SlowMaxStrikes > 0)
{
bool isSlowSpeedSet = !string.IsNullOrEmpty(SlowMinSpeed);
if (isSlowSpeedSet && ByteSize.TryParse(SlowMinSpeed, out _) is false)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_MIN_SPEED");
}
if (SlowMaxTime < 0)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_MAX_TIME");
}
if (!isSlowSpeedSet && SlowMaxTime is 0)
{
throw new ValidationException($"either {SectionName.ToUpperInvariant()}__SLOW_MIN_SPEED or {SectionName.ToUpperInvariant()}__SLOW_MAX_STRIKES must be set");
}
bool isSlowIgnoreAboveSizeSet = !string.IsNullOrEmpty(SlowIgnoreAboveSize);
if (isSlowIgnoreAboveSizeSet && ByteSize.TryParse(SlowIgnoreAboveSize, out _) is false)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_IGNORE_ABOVE_SIZE");
}
}
}
}

View File

@@ -0,0 +1,115 @@
using System.Globalization;
namespace Common.CustomDataTypes;
public readonly struct ByteSize : IComparable<ByteSize>, IEquatable<ByteSize>
{
public long Bytes { get; }
private const long BytesPerKB = 1024;
private const long BytesPerMB = 1024 * 1024;
private const long BytesPerGB = 1024 * 1024 * 1024;
public ByteSize(long bytes)
{
if (bytes < 0)
{
throw new ArgumentOutOfRangeException(nameof(bytes), "bytes can not be negative");
}
Bytes = bytes;
}
public static ByteSize FromKilobytes(double kilobytes) => new((long)(kilobytes * BytesPerKB));
public static ByteSize FromMegabytes(double megabytes) => new((long)(megabytes * BytesPerMB));
public static ByteSize FromGigabytes(double gigabytes) => new((long)(gigabytes * BytesPerGB));
public static ByteSize Parse(string input)
{
if (string.IsNullOrWhiteSpace(input))
{
throw new ArgumentNullException(nameof(input));
}
input = input.Trim().ToUpperInvariant();
double value;
if (input.EndsWith("KB", StringComparison.InvariantCultureIgnoreCase))
{
value = double.Parse(input[..^2], CultureInfo.InvariantCulture);
return FromKilobytes(value);
}
if (input.EndsWith("MB", StringComparison.InvariantCultureIgnoreCase))
{
value = double.Parse(input[..^2], CultureInfo.InvariantCulture);
return FromMegabytes(value);
}
if (input.EndsWith("GB", StringComparison.InvariantCultureIgnoreCase))
{
value = double.Parse(input[..^2], CultureInfo.InvariantCulture);
return FromGigabytes(value);
}
throw new FormatException("invalid size format | only KB, MB and GB are supported");
}
public static bool TryParse(string? input, out ByteSize? result)
{
result = default;
if (string.IsNullOrWhiteSpace(input))
{
return false;
}
input = input.Trim().ToUpperInvariant();
if (input.EndsWith("KB", StringComparison.InvariantCultureIgnoreCase) &&
double.TryParse(input[..^2], NumberStyles.Float, CultureInfo.InvariantCulture, out double kb))
{
result = FromKilobytes(kb);
return true;
}
if (input.EndsWith("MB", StringComparison.InvariantCultureIgnoreCase) &&
double.TryParse(input[..^2], NumberStyles.Float, CultureInfo.InvariantCulture, out double mb))
{
result = FromMegabytes(mb);
return true;
}
if (input.EndsWith("GB", StringComparison.InvariantCultureIgnoreCase) &&
double.TryParse(input[..^2], NumberStyles.Float, CultureInfo.InvariantCulture, out double gb))
{
result = FromGigabytes(gb);
return true;
}
return false;
}
public override string ToString() =>
Bytes switch
{
>= BytesPerGB => $"{Bytes / (double)BytesPerGB:0.##} GB",
>= BytesPerMB => $"{Bytes / (double)BytesPerMB:0.##} MB",
_ => $"{Bytes / (double)BytesPerKB:0.##} KB"
};
public int CompareTo(ByteSize other) => Bytes.CompareTo(other.Bytes);
public bool Equals(ByteSize other) => Bytes == other.Bytes;
public override bool Equals(object? obj) => obj is ByteSize other && Equals(other);
public override int GetHashCode() => Bytes.GetHashCode();
public static bool operator ==(ByteSize left, ByteSize right) => left.Equals(right);
public static bool operator !=(ByteSize left, ByteSize right) => !(left == right);
public static bool operator <(ByteSize left, ByteSize right) => left.Bytes < right.Bytes;
public static bool operator >(ByteSize left, ByteSize right) => left.Bytes > right.Bytes;
public static bool operator <=(ByteSize left, ByteSize right) => left.Bytes <= right.Bytes;
public static bool operator >=(ByteSize left, ByteSize right) => left.Bytes >= right.Bytes;
public static ByteSize operator +(ByteSize left, ByteSize right) => new(left.Bytes + right.Bytes);
public static ByteSize operator -(ByteSize left, ByteSize right) => new(Math.Max(left.Bytes - right.Bytes, 0));
}

View File

@@ -0,0 +1,66 @@
using System.Text;
namespace Common.CustomDataTypes;
public readonly struct SmartTimeSpan : IComparable<SmartTimeSpan>, IEquatable<SmartTimeSpan>
{
public TimeSpan Time { get; }
public SmartTimeSpan(TimeSpan time)
{
Time = time;
}
public override string ToString()
{
if (Time == TimeSpan.Zero)
{
return "0 seconds";
}
StringBuilder sb = new();
if (Time.Days > 0)
{
sb.Append($"{Time.Days} day{(Time.Days > 1 ? "s" : "")} ");
}
if (Time.Hours > 0)
{
sb.Append($"{Time.Hours} hour{(Time.Hours > 1 ? "s" : "")} ");
}
if (Time.Minutes > 0)
{
sb.Append($"{Time.Minutes} minute{(Time.Minutes > 1 ? "s" : "")} ");
}
if (Time.Seconds > 0)
{
sb.Append($"{Time.Seconds} second{(Time.Seconds > 1 ? "s" : "")}");
}
return sb.ToString().TrimEnd();
}
public static SmartTimeSpan FromMinutes(double minutes) => new(TimeSpan.FromMinutes(minutes));
public static SmartTimeSpan FromSeconds(double seconds) => new(TimeSpan.FromSeconds(seconds));
public static SmartTimeSpan FromHours(double hours) => new(TimeSpan.FromHours(hours));
public static SmartTimeSpan FromDays(double days) => new(TimeSpan.FromDays(days));
public int CompareTo(SmartTimeSpan other) => Time.CompareTo(other.Time);
public bool Equals(SmartTimeSpan other) => Time.Equals(other.Time);
public override bool Equals(object? obj) => obj is SmartTimeSpan other && Equals(other);
public override int GetHashCode() => Time.GetHashCode();
public static bool operator ==(SmartTimeSpan left, SmartTimeSpan right) => left.Equals(right);
public static bool operator !=(SmartTimeSpan left, SmartTimeSpan right) => !left.Equals(right);
public static bool operator <(SmartTimeSpan left, SmartTimeSpan right) => left.Time < right.Time;
public static bool operator >(SmartTimeSpan left, SmartTimeSpan right) => left.Time > right.Time;
public static bool operator <=(SmartTimeSpan left, SmartTimeSpan right) => left.Time <= right.Time;
public static bool operator >=(SmartTimeSpan left, SmartTimeSpan right) => left.Time >= right.Time;
public static SmartTimeSpan operator +(SmartTimeSpan left, SmartTimeSpan right) => new(left.Time + right.Time);
public static SmartTimeSpan operator -(SmartTimeSpan left, SmartTimeSpan right) => new(left.Time - right.Time);
}

View File

@@ -0,0 +1,8 @@
namespace Common.Enums;
public enum CertificateValidationType
{
Enabled = 0,
DisabledForLocalAddresses = 1,
Disabled = 2
}

View File

@@ -0,0 +1,10 @@
namespace Common.Enums;
public enum DownloadClient
{
QBittorrent,
Deluge,
Transmission,
None,
Disabled
}

View File

@@ -0,0 +1,12 @@
namespace Common.Exceptions;
public class FatalException : Exception
{
public FatalException()
{
}
public FatalException(string message) : base(message)
{
}
}

View File

@@ -0,0 +1,12 @@
namespace Common.Exceptions;
public sealed class ValidationException : Exception
{
public ValidationException()
{
}
public ValidationException(string message) : base(message)
{
}
}

View File

@@ -4,4 +4,6 @@ public static class Constants
{
public static readonly TimeSpan TriggerMaxLimit = TimeSpan.FromHours(6);
public static readonly TimeSpan CacheLimitBuffer = TimeSpan.FromHours(2);
public const string HttpClientWithRetryName = "retry";
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Enums;
public enum CleanReason
{
None,
MaxRatioReached,
MaxSeedTimeReached,
}

View File

@@ -0,0 +1,14 @@
namespace Domain.Enums;
public enum DeleteReason
{
None,
Stalled,
ImportFailed,
DownloadingMetadata,
SlowSpeed,
SlowTime,
AllFilesSkipped,
AllFilesSkippedByQBit,
AllFilesBlocked,
}

View File

@@ -1,8 +0,0 @@
namespace Domain.Enums;
public enum DownloadClient
{
QBittorrent,
Deluge,
Transmission
}

View File

@@ -3,5 +3,8 @@
public enum StrikeType
{
Stalled,
ImportFailed
DownloadingMetadata,
ImportFailed,
SlowSpeed,
SlowTime,
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Arr.Blocking;
public record BlockedItem
{
public required string Hash { get; init; }
public required Uri InstanceUrl { get; init; }
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Arr.Blocking;
public sealed record LidarrBlockedItem : BlockedItem
{
public required long AlbumId { get; init; }
public required long ArtistId { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace Domain.Models.Arr.Blocking;
public sealed record RadarrBlockedItem : BlockedItem
{
public required long MovieId { get; init; }
}

View File

@@ -0,0 +1,10 @@
namespace Domain.Models.Arr.Blocking;
public sealed record SonarrBlockedItem : BlockedItem
{
public required long EpisodeId { get; init; }
public required long SeasonNumber { get; init; }
public required long SeriesId { get; init; }
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Arr.Queue;
public record Image
{
public required string CoverType { get; init; }
public required Uri RemoteUrl { get; init; }
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Arr.Queue;
public record LidarrImage
{
public required string CoverType { get; init; }
public required Uri Url { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace Domain.Models.Arr.Queue;
public sealed record QueueAlbum
{
public List<LidarrImage> Images { get; init; } = [];
}

View File

@@ -0,0 +1,6 @@
namespace Domain.Models.Arr.Queue;
public sealed record QueueMovie
{
public List<Image> Images { get; init; } = [];
}

View File

@@ -1,16 +1,33 @@
namespace Domain.Models.Arr.Queue;
namespace Domain.Models.Arr.Queue;
public record QueueRecord
public sealed record QueueRecord
{
public int SeriesId { get; init; }
public int EpisodeId { get; init; }
public int SeasonNumber { get; init; }
public int MovieId { get; init; }
// Sonarr
public long SeriesId { get; init; }
public long EpisodeId { get; init; }
public long SeasonNumber { get; init; }
public QueueSeries? Series { get; init; }
// Radarr
public long MovieId { get; init; }
public QueueSeries? Movie { get; init; }
// Lidarr
public long ArtistId { get; init; }
public long AlbumId { get; init; }
public QueueAlbum? Album { get; init; }
// common
public required string Title { get; init; }
public string Status { get; init; }
public string TrackedDownloadStatus { get; init; }
public string TrackedDownloadState { get; init; }
public List<TrackedDownloadStatusMessage>? StatusMessages { get; init; }
public required string DownloadId { get; init; }
public required string Protocol { get; init; }
public required int Id { get; init; }
public required long Id { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace Domain.Models.Arr.Queue;
public sealed record QueueSeries
{
public List<Image> Images { get; init; } = [];
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Arr.Queue;
public sealed record TrackedDownloadStatusMessage
{
public string Title { get; set; }
public List<string>? Messages { get; set; }
}

View File

@@ -0,0 +1,9 @@
namespace Domain.Models.Cache;
public sealed record StalledCacheItem
{
/// <summary>
/// The amount of bytes that have been downloaded.
/// </summary>
public long Downloaded { get; set; }
}

View File

@@ -0,0 +1,42 @@
using Newtonsoft.Json;
namespace Domain.Models.Deluge.Response;
public sealed record DownloadStatus
{
public string? Hash { get; init; }
public string? State { get; init; }
public string? Name { get; init; }
public ulong Eta { get; init; }
[JsonProperty("download_payload_rate")]
public long DownloadSpeed { get; init; }
public bool Private { get; init; }
[JsonProperty("total_size")]
public long Size { get; init; }
[JsonProperty("total_done")]
public long TotalDone { get; init; }
public string? Label { get; set; }
[JsonProperty("seeding_time")]
public long SeedingTime { get; init; }
public float Ratio { get; init; }
public required IReadOnlyList<Tracker> Trackers { get; init; }
[JsonProperty("download_location")]
public required string DownloadLocation { get; init; }
}
public sealed record Tracker
{
public required string Url { get; init; }
}

View File

@@ -1,12 +0,0 @@
namespace Domain.Models.Deluge.Response;
public sealed record TorrentStatus
{
public string? Hash { get; set; }
public string? State { get; set; }
public string? Name { get; set; }
public ulong Eta { get; set; }
}

View File

@@ -0,0 +1,12 @@
namespace Domain.Models.Lidarr;
public sealed record Album
{
public long Id { get; set; }
public string Title { get; set; }
public long ArtistId { get; set; }
public Artist Artist { get; set; }
}

View File

@@ -0,0 +1,8 @@
namespace Domain.Models.Lidarr;
public sealed record Artist
{
public long Id { get; set; }
public string ArtistName { get; set; }
}

View File

@@ -0,0 +1,10 @@
namespace Domain.Models.Lidarr;
public sealed record LidarrCommand
{
public string Name { get; set; }
public List<long> AlbumIds { get; set; }
public long ArtistId { get; set; }
}

View File

@@ -1,10 +1,10 @@
using Common.Configuration;
using Common.Configuration.Arr;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Common.Configuration.General;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
namespace Executable.DependencyInjection;
@@ -12,12 +12,16 @@ public static class ConfigurationDI
{
public static IServiceCollection AddConfiguration(this IServiceCollection services, IConfiguration configuration) =>
services
.Configure<DryRunConfig>(configuration)
.Configure<QueueCleanerConfig>(configuration.GetSection(QueueCleanerConfig.SectionName))
.Configure<ContentBlockerConfig>(configuration.GetSection(ContentBlockerConfig.SectionName))
.Configure<DownloadCleanerConfig>(configuration.GetSection(DownloadCleanerConfig.SectionName))
.Configure<DownloadClientConfig>(configuration)
.Configure<QBitConfig>(configuration.GetSection(QBitConfig.SectionName))
.Configure<DelugeConfig>(configuration.GetSection(DelugeConfig.SectionName))
.Configure<TransmissionConfig>(configuration.GetSection(TransmissionConfig.SectionName))
.Configure<SonarrConfig>(configuration.GetSection(SonarrConfig.SectionName))
.Configure<RadarrConfig>(configuration.GetSection(RadarrConfig.SectionName))
.Configure<LidarrConfig>(configuration.GetSection(LidarrConfig.SectionName))
.Configure<LoggingConfig>(configuration.GetSection(LoggingConfig.SectionName));
}

View File

@@ -1,5 +1,7 @@
using Common.Configuration.Logging;
using Domain.Enums;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.QueueCleaner;
using Serilog;
using Serilog.Events;
@@ -27,11 +29,22 @@ public static class LoggingDI
}
LoggerConfiguration logConfig = new();
const string consoleOutputTemplate = "[{@t:yyyy-MM-dd HH:mm:ss.fff} {@l:u3}]{#if JobName is not null} {Concat('[',JobName,']'),PAD}{#end} {@m}\n{@x}";
const string fileOutputTemplate = "{@t:yyyy-MM-dd HH:mm:ss.fff zzz} [{@l:u3}]{#if JobName is not null} {Concat('[',JobName,']'),PAD}{#end} {@m:lj}\n{@x}";
const string jobNameTemplate = "{#if JobName is not null} {Concat('[',JobName,']'),JOB_PAD}{#end}";
const string instanceNameTemplate = "{#if InstanceName is not null} {Concat('[',InstanceName,']'),ARR_PAD}{#end}";
const string consoleOutputTemplate = $"[{{@t:yyyy-MM-dd HH:mm:ss.fff}} {{@l:u3}}]{jobNameTemplate}{instanceNameTemplate} {{@m}}\n{{@x}}";
const string fileOutputTemplate = $"{{@t:yyyy-MM-dd HH:mm:ss.fff zzz}} [{{@l:u3}}]{jobNameTemplate}{instanceNameTemplate} {{@m:lj}}\n{{@x}}";
LogEventLevel level = LogEventLevel.Information;
List<string> jobNames = [nameof(ContentBlocker), nameof(QueueCleaner)];
int padding = jobNames.Max(x => x.Length) + 2;
List<string> names = [nameof(ContentBlocker), nameof(QueueCleaner), nameof(DownloadCleaner)];
int jobPadding = names.Max(x => x.Length) + 2;
names = [InstanceType.Sonarr.ToString(), InstanceType.Radarr.ToString(), InstanceType.Lidarr.ToString()];
int arrPadding = names.Max(x => x.Length) + 2;
string consoleTemplate = consoleOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("ARR_PAD", arrPadding.ToString());
string fileTemplate = fileOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("ARR_PAD", arrPadding.ToString());
if (config is not null)
{
@@ -41,7 +54,7 @@ public static class LoggingDI
{
logConfig.WriteTo.File(
path: Path.Combine(config.File.Path, "cleanuperr-.txt"),
formatter: new ExpressionTemplate(fileOutputTemplate.Replace("PAD", padding.ToString())),
formatter: new ExpressionTemplate(fileTemplate),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true
@@ -51,11 +64,12 @@ public static class LoggingDI
Log.Logger = logConfig
.MinimumLevel.Is(level)
.MinimumLevel.Override("MassTransit", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft.Extensions.Http", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.WriteTo.Console(new ExpressionTemplate(consoleOutputTemplate.Replace("PAD", padding.ToString())))
.WriteTo.Console(new ExpressionTemplate(consoleTemplate))
.Enrich.FromLogContext()
.Enrich.WithProperty("ApplicationName", "cleanuperr")
.CreateLogger();

View File

@@ -1,14 +1,14 @@
using System.Net;
using Common.Configuration;
using Common.Configuration.ContentBlocker;
using Executable.Jobs;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Common.Configuration.General;
using Common.Helpers;
using Infrastructure.Services;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Infrastructure.Verticals.QueueCleaner;
using Infrastructure.Verticals.Notifications.Consumers;
using Infrastructure.Verticals.Notifications.Models;
using MassTransit;
using Microsoft.Extensions.Options;
using Polly;
using Polly.Extensions.Http;
namespace Executable.DependencyInjection;
@@ -17,22 +17,69 @@ public static class MainDI
public static IServiceCollection AddInfrastructure(this IServiceCollection services, IConfiguration configuration) =>
services
.AddLogging(builder => builder.ClearProviders().AddConsole())
.AddHttpClients()
.AddHttpClients(configuration)
.AddConfiguration(configuration)
.AddMemoryCache()
.AddMemoryCache(options => {
options.ExpirationScanFrequency = TimeSpan.FromMinutes(1);
})
.AddServices()
.AddQuartzServices(configuration);
.AddQuartzServices(configuration)
.AddNotifications(configuration)
.AddMassTransit(config =>
{
config.AddConsumer<NotificationConsumer<FailedImportStrikeNotification>>();
config.AddConsumer<NotificationConsumer<StalledStrikeNotification>>();
config.AddConsumer<NotificationConsumer<SlowStrikeNotification>>();
config.AddConsumer<NotificationConsumer<QueueItemDeletedNotification>>();
config.AddConsumer<NotificationConsumer<DownloadCleanedNotification>>();
config.AddConsumer<NotificationConsumer<CategoryChangedNotification>>();
config.UsingInMemory((context, cfg) =>
{
cfg.ReceiveEndpoint("notification-queue", e =>
{
e.ConfigureConsumer<NotificationConsumer<FailedImportStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<StalledStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<SlowStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<QueueItemDeletedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<DownloadCleanedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<CategoryChangedNotification>>(context);
e.ConcurrentMessageLimit = 1;
e.PrefetchCount = 1;
});
});
});
private static IServiceCollection AddHttpClients(this IServiceCollection services)
private static IServiceCollection AddHttpClients(this IServiceCollection services, IConfiguration configuration)
{
// add default HttpClient
services.AddHttpClient();
HttpConfig config = configuration.Get<HttpConfig>() ?? new();
config.Validate();
// add retry HttpClient
services
.AddHttpClient(Constants.HttpClientWithRetryName, x =>
{
x.Timeout = TimeSpan.FromSeconds(config.Timeout);
})
.ConfigurePrimaryHttpMessageHandler(provider =>
{
CertificateValidationService service = provider.GetRequiredService<CertificateValidationService>();
return new HttpClientHandler
{
ServerCertificateCustomValidationCallback = service.ShouldByPassValidationError
};
})
.AddRetryPolicyHandler(config);
// add Deluge HttpClient
services
.AddHttpClient(nameof(DelugeService), x =>
{
x.Timeout = TimeSpan.FromSeconds(5);
x.Timeout = TimeSpan.FromSeconds(config.Timeout);
})
.ConfigurePrimaryHttpMessageHandler(_ =>
{
@@ -44,8 +91,18 @@ public static class MainDI
AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate,
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
};
});
})
.AddRetryPolicyHandler(config);
return services;
}
private static IHttpClientBuilder AddRetryPolicyHandler(this IHttpClientBuilder builder, HttpConfig config) =>
builder.AddPolicyHandler(
HttpPolicyExtensions
.HandleTransientHttpError()
// do not retry on Unauthorized
.OrResult(response => !response.IsSuccessStatusCode && response.StatusCode != HttpStatusCode.Unauthorized)
.WaitAndRetryAsync(config.MaxRetries, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)))
);
}

View File

@@ -0,0 +1,20 @@
using Infrastructure.Verticals.Notifications;
using Infrastructure.Verticals.Notifications.Apprise;
using Infrastructure.Verticals.Notifications.Notifiarr;
namespace Executable.DependencyInjection;
public static class NotificationsDI
{
public static IServiceCollection AddNotifications(this IServiceCollection services, IConfiguration configuration) =>
services
.Configure<NotifiarrConfig>(configuration.GetSection(NotifiarrConfig.SectionName))
.Configure<AppriseConfig>(configuration.GetSection(AppriseConfig.SectionName))
.AddTransient<INotifiarrProxy, NotifiarrProxy>()
.AddTransient<INotificationProvider, NotifiarrProvider>()
.AddTransient<IAppriseProxy, AppriseProxy>()
.AddTransient<INotificationProvider, AppriseProvider>()
.AddTransient<INotificationPublisher, NotificationPublisher>()
.AddTransient<INotificationFactory, NotificationFactory>()
.AddTransient<NotificationService>();
}

View File

@@ -1,9 +1,12 @@
using Common.Configuration;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.General;
using Common.Configuration.QueueCleaner;
using Common.Helpers;
using Executable.Jobs;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.QueueCleaner;
using Quartz;
@@ -52,12 +55,18 @@ public static class QuartzDI
if (contentBlockerConfig?.Enabled is true && queueCleanerConfig is { Enabled: true, RunSequentially: true })
{
q.AddJob<QueueCleaner>(queueCleanerConfig, string.Empty);
q.AddJobListener(new JobChainingListener(nameof(QueueCleaner)));
q.AddJobListener(new JobChainingListener(nameof(ContentBlocker), nameof(QueueCleaner)));
}
else
{
q.AddJob<QueueCleaner>(queueCleanerConfig, triggersConfig.QueueCleaner);
}
DownloadCleanerConfig? downloadCleanerConfig = configuration
.GetRequiredSection(DownloadCleanerConfig.SectionName)
.Get<DownloadCleanerConfig>();
q.AddJob<DownloadCleaner>(downloadCleanerConfig, triggersConfig.DownloadCleaner);
}
private static void AddJob<T>(
@@ -108,7 +117,7 @@ public static class QuartzDI
if (triggerValue > Constants.TriggerMaxLimit)
{
throw new Exception($"{trigger} should have a fire time of maximum 1 hour");
throw new Exception($"{trigger} should have a fire time of maximum {Constants.TriggerMaxLimit.TotalHours} hours");
}
if (triggerValue > StaticConfiguration.TriggerValue)

View File

@@ -1,9 +1,17 @@
using Infrastructure.Verticals.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Infrastructure.Interceptors;
using Infrastructure.Providers;
using Infrastructure.Services;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.QueueCleaner;
@@ -13,16 +21,27 @@ public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddTransient<IDryRunInterceptor, DryRunInterceptor>()
.AddTransient<CertificateValidationService>()
.AddTransient<SonarrClient>()
.AddTransient<RadarrClient>()
.AddTransient<LidarrClient>()
.AddTransient<QueueCleaner>()
.AddTransient<ContentBlocker>()
.AddTransient<FilenameEvaluator>()
.AddTransient<DownloadCleaner>()
.AddTransient<IFilenameEvaluator, FilenameEvaluator>()
.AddTransient<IHardLinkFileService, HardLinkFileService>()
.AddTransient<UnixHardLinkFileService>()
.AddTransient<WindowsHardLinkFileService>()
.AddTransient<DummyDownloadService>()
.AddTransient<QBitService>()
.AddTransient<DelugeService>()
.AddTransient<TransmissionService>()
.AddTransient<ArrQueueIterator>()
.AddTransient<DownloadServiceFactory>()
.AddTransient<IStriker, Striker>()
.AddSingleton<BlocklistProvider>()
.AddSingleton<Striker>();
.AddSingleton<IgnoredDownloadsProvider<QueueCleanerConfig>>()
.AddSingleton<IgnoredDownloadsProvider<ContentBlockerConfig>>()
.AddSingleton<IgnoredDownloadsProvider<DownloadCleanerConfig>>();
}

View File

@@ -10,8 +10,9 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.2" />
<PackageReference Include="Quartz" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.13.1" />

View File

@@ -0,0 +1,23 @@
using System.Reflection;
namespace Executable;
public static class HostExtensions
{
public static IHost Init(this IHost host)
{
ILogger<Program> logger = host.Services.GetRequiredService<ILogger<Program>>();
Version? version = Assembly.GetExecutingAssembly().GetName().Version;
logger.LogInformation(
version is null
? "cleanuperr version not detected"
: $"cleanuperr v{version.Major}.{version.Minor}.{version.Build}"
);
logger.LogInformation("timezone: {tz}", TimeZoneInfo.Local.DisplayName);
return host;
}
}

View File

@@ -6,7 +6,7 @@ namespace Executable.Jobs;
[DisallowConcurrentExecution]
public sealed class GenericJob<T> : IJob
where T : GenericHandler
where T : IHandler
{
private readonly ILogger<GenericJob<T>> _logger;
private readonly T _handler;

View File

@@ -1,4 +1,4 @@
using System.Reflection;
using Executable;
using Executable.DependencyInjection;
var builder = Host.CreateApplicationBuilder(args);
@@ -7,15 +7,6 @@ builder.Services.AddInfrastructure(builder.Configuration);
builder.Logging.AddLogging(builder.Configuration);
var host = builder.Build();
var logger = host.Services.GetRequiredService<ILogger<Program>>();
var version = Assembly.GetExecutingAssembly().GetName().Version;
logger.LogInformation(
version is null
? "cleanuperr version not detected"
: $"cleanuperr v{version.Major}.{version.Minor}.{version.Build}"
);
host.Init();
host.Run();

View File

@@ -1,6 +1,10 @@
{
"DRY_RUN": true,
"HTTP_MAX_RETRIES": 0,
"HTTP_TIMEOUT": 100,
"HTTP_VALIDATE_CERT": "enabled",
"Logging": {
"LogLevel": "Debug",
"LogLevel": "Verbose",
"Enhanced": true,
"File": {
"Enabled": false,
@@ -9,57 +13,133 @@
},
"Triggers": {
"QueueCleaner": "0/10 * * * * ?",
"ContentBlocker": "0/10 * * * * ?"
"ContentBlocker": "0/10 * * * * ?",
"DownloadCleaner": "0/10 * * * * ?"
},
"ContentBlocker": {
"Enabled": true,
"Blacklist": {
"Enabled": false,
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Whitelist": {
"Enabled": false,
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/whitelist"
}
"IGNORE_PRIVATE": true,
"DELETE_PRIVATE": false,
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads"
},
"QueueCleaner": {
"Enabled": true,
"RunSequentially": true,
"IMPORT_FAILED_MAX_STRIKES": 5,
"STALLED_MAX_STRIKES": 5
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads",
"IMPORT_FAILED_MAX_STRIKES": 3,
"IMPORT_FAILED_IGNORE_PRIVATE": true,
"IMPORT_FAILED_DELETE_PRIVATE": false,
"IMPORT_FAILED_IGNORE_PATTERNS": [
"file is a sample"
],
"STALLED_MAX_STRIKES": 3,
"STALLED_RESET_STRIKES_ON_PROGRESS": true,
"STALLED_IGNORE_PRIVATE": true,
"STALLED_DELETE_PRIVATE": false,
"DOWNLOADING_METADATA_MAX_STRIKES": 3,
"SLOW_MAX_STRIKES": 5,
"SLOW_RESET_STRIKES_ON_PROGRESS": true,
"SLOW_IGNORE_PRIVATE": false,
"SLOW_DELETE_PRIVATE": false,
"SLOW_MIN_SPEED": "1MB",
"SLOW_MAX_TIME": 20,
"SLOW_IGNORE_ABOVE_SIZE": "4GB"
},
"DownloadCleaner": {
"Enabled": false,
"DELETE_PRIVATE": false,
"CATEGORIES": [
{
"Name": "tv-sonarr",
"MAX_RATIO": -1,
"MIN_SEED_TIME": 0,
"MAX_SEED_TIME": 240
}
],
"UNLINKED_TARGET_CATEGORY": "cleanuperr-unlinked",
"UNLINKED_IGNORED_ROOT_DIR": "",
"UNLINKED_CATEGORIES": [
"tv-sonarr",
"radarr"
],
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads"
},
"DOWNLOAD_CLIENT": "qbittorrent",
"qBittorrent": {
"Url": "http://localhost:8080",
"URL_BASE": "",
"Username": "test",
"Password": "testing"
},
"Deluge": {
"Url": "http://localhost:8112",
"URL_BASE": "",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"URL_BASE": "transmission",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": true,
"SearchType": "Episode",
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:8989",
"ApiKey": "96736c3eb3144936b8f1d62d27be8cee"
"ApiKey": "425d1e713f0c405cbbf359ac0502c1f4"
}
]
},
"Radarr": {
"Enabled": true,
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": "705b553732ab4167ab23909305d60600"
"ApiKey": "8b7454f668e54c5b8f44f56f93969761"
}
]
},
"Lidarr": {
"Enabled": true,
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:8686",
"ApiKey": "7f677cfdc074414397af53dd633860c5"
}
]
},
"Notifiarr": {
"ON_IMPORT_FAILED_STRIKE": true,
"ON_STALLED_STRIKE": true,
"ON_SLOW_STRIKE": true,
"ON_QUEUE_ITEM_DELETED": true,
"ON_DOWNLOAD_CLEANED": true,
"ON_CATEGORY_CHANGED": true,
"API_KEY": "",
"CHANNEL_ID": ""
},
"Apprise": {
"ON_IMPORT_FAILED_STRIKE": true,
"ON_STALLED_STRIKE": true,
"ON_SLOW_STRIKE": true,
"ON_QUEUE_ITEM_DELETED": true,
"ON_DOWNLOAD_CLEANED": true,
"ON_CATEGORY_CHANGED": true,
"URL": "http://localhost:8000",
"KEY": ""
}
}

View File

@@ -1,4 +1,8 @@
{
"DRY_RUN": false,
"HTTP_MAX_RETRIES": 0,
"HTTP_TIMEOUT": 100,
"HTTP_VALIDATE_CERT": "enabled",
"Logging": {
"LogLevel": "Information",
"Enhanced": true,
@@ -9,43 +13,69 @@
},
"Triggers": {
"QueueCleaner": "0 0/5 * * * ?",
"ContentBlocker": "0 0/5 * * * ?"
"ContentBlocker": "0 0/5 * * * ?",
"DownloadCleaner": "0 0 * * * ?"
},
"ContentBlocker": {
"Enabled": false,
"Blacklist": {
"Enabled": false,
"Path": ""
},
"Whitelist": {
"Enabled": false,
"Path": ""
}
"IGNORE_PRIVATE": false,
"IGNORED_DOWNLOADS_PATH": ""
},
"QueueCleaner": {
"Enabled": true,
"Enabled": false,
"RunSequentially": true,
"IMPORT_FAILED_MAX_STRIKES": 5,
"STALLED_MAX_STRIKES": 5
"IGNORED_DOWNLOADS_PATH": "",
"IMPORT_FAILED_MAX_STRIKES": 0,
"IMPORT_FAILED_IGNORE_PRIVATE": false,
"IMPORT_FAILED_DELETE_PRIVATE": false,
"IMPORT_FAILED_IGNORE_PATTERNS": [],
"STALLED_MAX_STRIKES": 0,
"STALLED_RESET_STRIKES_ON_PROGRESS": false,
"STALLED_IGNORE_PRIVATE": false,
"STALLED_DELETE_PRIVATE": false,
"DOWNLOADING_METADATA_MAX_STRIKES": 0,
"SLOW_MAX_STRIKES": 0,
"SLOW_RESET_STRIKES_ON_PROGRESS": true,
"SLOW_IGNORE_PRIVATE": false,
"SLOW_DELETE_PRIVATE": false,
"SLOW_MIN_SPEED": "",
"SLOW_MAX_TIME": 0,
"SLOW_IGNORE_ABOVE_SIZE": ""
},
"DOWNLOAD_CLIENT": "qbittorrent",
"DownloadCleaner": {
"Enabled": false,
"DELETE_PRIVATE": false,
"CATEGORIES": [],
"UNLINKED_TARGET_CATEGORY": "cleanuperr-unlinked",
"UNLINKED_IGNORED_ROOT_DIR": "",
"UNLINKED_CATEGORIES": [],
"IGNORED_DOWNLOADS_PATH": ""
},
"DOWNLOAD_CLIENT": "none",
"qBittorrent": {
"Url": "http://localhost:8080",
"URL_BASE": "",
"Username": "",
"Password": ""
},
"Deluge": {
"Url": "http://localhost:8112",
"URL_BASE": "",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"URL_BASE": "transmission",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": true,
"Enabled": false,
"SearchType": "Episode",
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:8989",
@@ -55,11 +85,48 @@
},
"Radarr": {
"Enabled": false,
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": ""
}
]
},
"Lidarr": {
"Enabled": false,
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:8686",
"ApiKey": ""
}
]
},
"Notifiarr": {
"ON_IMPORT_FAILED_STRIKE": false,
"ON_STALLED_STRIKE": false,
"ON_SLOW_STRIKE": false,
"ON_QUEUE_ITEM_DELETED": false,
"ON_DOWNLOAD_CLEANED": false,
"ON_CATEGORY_CHANGED": false,
"API_KEY": "",
"CHANNEL_ID": ""
},
"Apprise": {
"ON_IMPORT_FAILED_STRIKE": false,
"ON_STALLED_STRIKE": false,
"ON_SLOW_STRIKE": false,
"ON_QUEUE_ITEM_DELETED": false,
"ON_DOWNLOAD_CLEANED": false,
"ON_CATEGORY_CHANGED": false,
"URL": "",
"KEY": ""
}
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.13.0" />
<PackageReference Include="NSubstitute" Version="5.3.0" />
<PackageReference Include="Shouldly" Version="4.3.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.2">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<Using Include="Xunit"/>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Infrastructure\Infrastructure.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,20 @@
using Infrastructure.Verticals.ContentBlocker;
using Microsoft.Extensions.Logging;
using NSubstitute;
namespace Infrastructure.Tests.Verticals.ContentBlocker;
public class FilenameEvaluatorFixture
{
public ILogger<FilenameEvaluator> Logger { get; }
public FilenameEvaluatorFixture()
{
Logger = Substitute.For<ILogger<FilenameEvaluator>>();
}
public FilenameEvaluator CreateSut()
{
return new FilenameEvaluator(Logger);
}
}

View File

@@ -0,0 +1,219 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Shouldly;
namespace Infrastructure.Tests.Verticals.ContentBlocker;
public class FilenameEvaluatorTests : IClassFixture<FilenameEvaluatorFixture>
{
private readonly FilenameEvaluatorFixture _fixture;
public FilenameEvaluatorTests(FilenameEvaluatorFixture fixture)
{
_fixture = fixture;
}
public class PatternTests : FilenameEvaluatorTests
{
public PatternTests(FilenameEvaluatorFixture fixture) : base(fixture) { }
[Fact]
public void WhenNoPatterns_ShouldReturnTrue()
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>();
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid("test.txt", BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeTrue();
}
[Theory]
[InlineData("test.txt", "test.txt", true)] // Exact match
[InlineData("test.txt", "*.txt", true)] // End wildcard
[InlineData("test.txt", "test.*", true)] // Start wildcard
[InlineData("test.txt", "*test*", true)] // Both wildcards
[InlineData("test.txt", "other.txt", false)] // No match
public void Blacklist_ShouldMatchPatterns(string filename, string pattern, bool shouldBeBlocked)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string> { pattern };
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid(filename, BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBe(!shouldBeBlocked);
}
[Theory]
[InlineData("test.txt", "test.txt", true)] // Exact match
[InlineData("test.txt", "*.txt", true)] // End wildcard
[InlineData("test.txt", "test.*", true)] // Start wildcard
[InlineData("test.txt", "*test*", true)] // Both wildcards
[InlineData("test.txt", "other.txt", false)] // No match
public void Whitelist_ShouldMatchPatterns(string filename, string pattern, bool shouldBeAllowed)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string> { pattern };
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid(filename, BlocklistType.Whitelist, patterns, regexes);
// Assert
result.ShouldBe(shouldBeAllowed);
}
[Theory]
[InlineData("TEST.TXT", "test.txt")]
[InlineData("test.txt", "TEST.TXT")]
public void ShouldBeCaseInsensitive(string filename, string pattern)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string> { pattern };
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid(filename, BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeFalse();
}
[Fact]
public void MultiplePatterns_ShouldMatchAny()
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>
{
"other.txt",
"*.pdf",
"test.*"
};
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid("test.txt", BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeFalse();
}
}
public class RegexTests : FilenameEvaluatorTests
{
public RegexTests(FilenameEvaluatorFixture fixture) : base(fixture) { }
[Fact]
public void WhenNoRegexes_ShouldReturnTrue()
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>();
var regexes = new ConcurrentBag<Regex>();
// Act
var result = sut.IsValid("test.txt", BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeTrue();
}
[Theory]
[InlineData(@"test\d+\.txt", "test123.txt", true)]
[InlineData(@"test\d+\.txt", "test.txt", false)]
public void Blacklist_ShouldMatchRegexes(string pattern, string filename, bool shouldBeBlocked)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>();
var regexes = new ConcurrentBag<Regex> { new Regex(pattern, RegexOptions.IgnoreCase) };
// Act
var result = sut.IsValid(filename, BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBe(!shouldBeBlocked);
}
[Theory]
[InlineData(@"test\d+\.txt", "test123.txt", true)]
[InlineData(@"test\d+\.txt", "test.txt", false)]
public void Whitelist_ShouldMatchRegexes(string pattern, string filename, bool shouldBeAllowed)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>();
var regexes = new ConcurrentBag<Regex> { new Regex(pattern, RegexOptions.IgnoreCase) };
// Act
var result = sut.IsValid(filename, BlocklistType.Whitelist, patterns, regexes);
// Assert
result.ShouldBe(shouldBeAllowed);
}
[Theory]
[InlineData(@"TEST\d+\.TXT", "test123.txt")]
[InlineData(@"test\d+\.txt", "TEST123.TXT")]
public void ShouldBeCaseInsensitive(string pattern, string filename)
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string>();
var regexes = new ConcurrentBag<Regex> { new Regex(pattern, RegexOptions.IgnoreCase) };
// Act
var result = sut.IsValid(filename, BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeFalse();
}
}
public class CombinedTests : FilenameEvaluatorTests
{
public CombinedTests(FilenameEvaluatorFixture fixture) : base(fixture) { }
[Fact]
public void WhenBothPatternsAndRegexes_ShouldMatchBoth()
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string> { "*.txt" };
var regexes = new ConcurrentBag<Regex> { new Regex(@"test\d+", RegexOptions.IgnoreCase) };
// Act
var result = sut.IsValid("test123.txt", BlocklistType.Blacklist, patterns, regexes);
// Assert
result.ShouldBeFalse();
}
[Fact]
public void WhenPatternMatchesButRegexDoesNot_ShouldReturnFalse()
{
// Arrange
var sut = _fixture.CreateSut();
var patterns = new ConcurrentBag<string> { "*.txt" };
var regexes = new ConcurrentBag<Regex> { new Regex(@"test\d+", RegexOptions.IgnoreCase) };
// Act
var result = sut.IsValid("other.txt", BlocklistType.Whitelist, patterns, regexes);
// Assert
result.ShouldBeFalse();
}
}
}

View File

@@ -0,0 +1,80 @@
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using NSubstitute;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class DownloadServiceFixture : IDisposable
{
public ILogger<DownloadService> Logger { get; set; }
public IMemoryCache Cache { get; set; }
public IStriker Striker { get; set; }
public DownloadServiceFixture()
{
Logger = Substitute.For<ILogger<DownloadService>>();
Cache = Substitute.For<IMemoryCache>();
Striker = Substitute.For<IStriker>();
}
public TestDownloadService CreateSut(
QueueCleanerConfig? queueCleanerConfig = null,
ContentBlockerConfig? contentBlockerConfig = null
)
{
queueCleanerConfig ??= new QueueCleanerConfig
{
Enabled = true,
RunSequentially = true,
StalledResetStrikesOnProgress = true,
StalledMaxStrikes = 3
};
var queueCleanerOptions = Substitute.For<IOptions<QueueCleanerConfig>>();
queueCleanerOptions.Value.Returns(queueCleanerConfig);
contentBlockerConfig ??= new ContentBlockerConfig
{
Enabled = true
};
var contentBlockerOptions = Substitute.For<IOptions<ContentBlockerConfig>>();
contentBlockerOptions.Value.Returns(contentBlockerConfig);
var downloadCleanerOptions = Substitute.For<IOptions<DownloadCleanerConfig>>();
downloadCleanerOptions.Value.Returns(new DownloadCleanerConfig());
var filenameEvaluator = Substitute.For<IFilenameEvaluator>();
var notifier = Substitute.For<INotificationPublisher>();
var dryRunInterceptor = Substitute.For<IDryRunInterceptor>();
var hardlinkFileService = Substitute.For<IHardLinkFileService>();
return new TestDownloadService(
Logger,
queueCleanerOptions,
contentBlockerOptions,
downloadCleanerOptions,
Cache,
filenameEvaluator,
Striker,
notifier,
dryRunInterceptor,
hardlinkFileService
);
}
public void Dispose()
{
// Cleanup if needed
}
}

View File

@@ -0,0 +1,214 @@
using Common.Configuration.DownloadCleaner;
using Domain.Enums;
using Domain.Models.Cache;
using Infrastructure.Helpers;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.DownloadClient;
using NSubstitute;
using NSubstitute.ClearExtensions;
using Shouldly;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class DownloadServiceTests : IClassFixture<DownloadServiceFixture>
{
private readonly DownloadServiceFixture _fixture;
public DownloadServiceTests(DownloadServiceFixture fixture)
{
_fixture = fixture;
_fixture.Cache.ClearSubstitute();
_fixture.Striker.ClearSubstitute();
}
public class ResetStrikesOnProgressTests : DownloadServiceTests
{
public ResetStrikesOnProgressTests(DownloadServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void WhenStalledStrikeDisabled_ShouldNotResetStrikes()
{
// Arrange
TestDownloadService sut = _fixture.CreateSut(queueCleanerConfig: new()
{
Enabled = true,
RunSequentially = true,
StalledResetStrikesOnProgress = false,
});
// Act
sut.ResetStalledStrikesOnProgress("test-hash", 100);
// Assert
_fixture.Cache.ReceivedCalls().ShouldBeEmpty();
}
[Fact]
public void WhenProgressMade_ShouldResetStrikes()
{
// Arrange
const string hash = "test-hash";
StalledCacheItem stalledCacheItem = new StalledCacheItem { Downloaded = 100 };
_fixture.Cache.TryGetValue(Arg.Any<object>(), out Arg.Any<object?>())
.Returns(x =>
{
x[1] = stalledCacheItem;
return true;
});
TestDownloadService sut = _fixture.CreateSut();
// Act
sut.ResetStalledStrikesOnProgress(hash, 200);
// Assert
_fixture.Cache.Received(1).Remove(CacheKeys.Strike(StrikeType.Stalled, hash));
}
[Fact]
public void WhenNoProgress_ShouldNotResetStrikes()
{
// Arrange
const string hash = "test-hash";
StalledCacheItem stalledCacheItem = new StalledCacheItem { Downloaded = 200 };
_fixture.Cache
.TryGetValue(Arg.Any<object>(), out Arg.Any<object?>())
.Returns(x =>
{
x[1] = stalledCacheItem;
return true;
});
TestDownloadService sut = _fixture.CreateSut();
// Act
sut.ResetStalledStrikesOnProgress(hash, 100);
// Assert
_fixture.Cache.DidNotReceive().Remove(Arg.Any<object>());
}
}
public class StrikeAndCheckLimitTests : DownloadServiceTests
{
public StrikeAndCheckLimitTests(DownloadServiceFixture fixture) : base(fixture)
{
}
}
public class ShouldCleanDownloadTests : DownloadServiceTests
{
public ShouldCleanDownloadTests(DownloadServiceFixture fixture) : base(fixture)
{
ContextProvider.Set("downloadName", "test-download");
}
[Fact]
public void WhenRatioAndMinSeedTimeReached_ShouldReturnTrue()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 1.0,
MinSeedTime = 1,
MaxSeedTime = -1
};
const double ratio = 1.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeTrue(),
() => result.Reason.ShouldBe(CleanReason.MaxRatioReached)
);
}
[Fact]
public void WhenRatioReachedAndMinSeedTimeNotReached_ShouldReturnFalse()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 1.0,
MinSeedTime = 3,
MaxSeedTime = -1
};
const double ratio = 1.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeFalse(),
() => result.Reason.ShouldBe(CleanReason.None)
);
}
[Fact]
public void WhenMaxSeedTimeReached_ShouldReturnTrue()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = -1,
MinSeedTime = 0,
MaxSeedTime = 1
};
const double ratio = 0.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
SeedingCheckResult result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeTrue(),
() => result.Reason.ShouldBe(CleanReason.MaxSeedTimeReached)
);
}
[Fact]
public void WhenNeitherConditionMet_ShouldReturnFalse()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 2.0,
MinSeedTime = 0,
MaxSeedTime = 3
};
const double ratio = 1.0;
TimeSpan seedingTime = TimeSpan.FromHours(1);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeFalse(),
() => result.Reason.ShouldBe(CleanReason.None)
);
}
}
}

View File

@@ -0,0 +1,54 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class TestDownloadService : DownloadService
{
public TestDownloadService(
ILogger<DownloadService> logger,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig, cache,
filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
}
public override void Dispose() { }
public override Task LoginAsync() => Task.CompletedTask;
public override Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads) => Task.FromResult(new DownloadCheckResult());
public override Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash, BlocklistType blocklistType,
ConcurrentBag<string> patterns, ConcurrentBag<Regex> regexes, IReadOnlyList<string> ignoredDownloads) => Task.FromResult(new BlockFilesResult());
public override Task DeleteDownload(string hash) => Task.CompletedTask;
public override Task CreateCategoryAsync(string name) => Task.CompletedTask;
public override Task<List<object>?> GetSeedingDownloads() => Task.FromResult<List<object>?>(null);
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories) => null;
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories) => null;
public override Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads) => Task.CompletedTask;
public override Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads) => Task.CompletedTask;
// Expose protected methods for testing
public new void ResetStalledStrikesOnProgress(string hash, long downloaded) => base.ResetStalledStrikesOnProgress(hash, downloaded);
public new SeedingCheckResult ShouldCleanDownload(double ratio, TimeSpan seedingTime, CleanCategory category) => base.ShouldCleanDownload(ratio, seedingTime, category);
}

View File

@@ -0,0 +1,30 @@
using Domain.Models.Deluge.Response;
using Infrastructure.Helpers;
namespace Infrastructure.Extensions;
public static class DelugeExtensions
{
public static bool ShouldIgnore(this DownloadStatus download, IReadOnlyList<string> ignoredDownloads)
{
foreach (string value in ignoredDownloads)
{
if (download.Hash?.Equals(value, StringComparison.InvariantCultureIgnoreCase) ?? false)
{
return true;
}
if (download.Label?.Equals(value, StringComparison.InvariantCultureIgnoreCase) ?? false)
{
return true;
}
if (download.Trackers.Any(x => UriService.GetDomain(x.Url)?.EndsWith(value, StringComparison.InvariantCultureIgnoreCase) ?? false))
{
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,55 @@
using System.Net;
using System.Net.Sockets;
namespace Infrastructure.Extensions;
public static class IpAddressExtensions
{
public static bool IsLocalAddress(this IPAddress ipAddress)
{
// Map back to IPv4 if mapped to IPv6, for example "::ffff:1.2.3.4" to "1.2.3.4".
if (ipAddress.IsIPv4MappedToIPv6)
{
ipAddress = ipAddress.MapToIPv4();
}
// Checks loopback ranges for both IPv4 and IPv6.
if (IPAddress.IsLoopback(ipAddress))
{
return true;
}
// IPv4
if (ipAddress.AddressFamily == AddressFamily.InterNetwork)
{
return IsLocalIPv4(ipAddress.GetAddressBytes());
}
// IPv6
if (ipAddress.AddressFamily == AddressFamily.InterNetworkV6)
{
return ipAddress.IsIPv6LinkLocal ||
ipAddress.IsIPv6UniqueLocal ||
ipAddress.IsIPv6SiteLocal;
}
return false;
}
private static bool IsLocalIPv4(byte[] ipv4Bytes)
{
// Link local (no IP assigned by DHCP): 169.254.0.0 to 169.254.255.255 (169.254.0.0/16)
bool IsLinkLocal() => ipv4Bytes[0] == 169 && ipv4Bytes[1] == 254;
// Class A private range: 10.0.0.0 10.255.255.255 (10.0.0.0/8)
bool IsClassA() => ipv4Bytes[0] == 10;
// Class B private range: 172.16.0.0 172.31.255.255 (172.16.0.0/12)
bool IsClassB() => ipv4Bytes[0] == 172 && ipv4Bytes[1] >= 16 && ipv4Bytes[1] <= 31;
// Class C private range: 192.168.0.0 192.168.255.255 (192.168.0.0/16)
bool IsClassC() => ipv4Bytes[0] == 192 && ipv4Bytes[1] == 168;
return IsLinkLocal() || IsClassA() || IsClassC() || IsClassB();
}
}

View File

@@ -0,0 +1,50 @@
using Infrastructure.Helpers;
using QBittorrent.Client;
namespace Infrastructure.Extensions;
public static class QBitExtensions
{
public static bool ShouldIgnore(this TorrentInfo download, IReadOnlyList<string> ignoredDownloads)
{
foreach (string value in ignoredDownloads)
{
if (download.Hash.Equals(value, StringComparison.InvariantCultureIgnoreCase))
{
return true;
}
if (download.Category.Equals(value, StringComparison.InvariantCultureIgnoreCase))
{
return true;
}
if (download.Tags.Contains(value, StringComparer.InvariantCultureIgnoreCase))
{
return true;
}
}
return false;
}
public static bool ShouldIgnore(this TorrentTracker tracker, IReadOnlyList<string> ignoredDownloads)
{
string? trackerUrl = UriService.GetDomain(tracker.Url);
if (trackerUrl is null)
{
return false;
}
foreach (string value in ignoredDownloads)
{
if (trackerUrl.EndsWith(value, StringComparison.InvariantCultureIgnoreCase))
{
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,43 @@
using Infrastructure.Helpers;
using Transmission.API.RPC.Entity;
namespace Infrastructure.Extensions;
public static class TransmissionExtensions
{
public static bool ShouldIgnore(this TorrentInfo download, IReadOnlyList<string> ignoredDownloads)
{
foreach (string value in ignoredDownloads)
{
if (download.HashString?.Equals(value, StringComparison.InvariantCultureIgnoreCase) ?? false)
{
return true;
}
if (download.GetCategory().Equals(value, StringComparison.InvariantCultureIgnoreCase))
{
return true;
}
bool? hasIgnoredTracker = download.Trackers?
.Any(x => UriService.GetDomain(x.Announce)?.EndsWith(value, StringComparison.InvariantCultureIgnoreCase) ?? false);
if (hasIgnoredTracker is true)
{
return true;
}
}
return false;
}
public static string GetCategory(this TorrentInfo download)
{
if (string.IsNullOrEmpty(download.DownloadDir))
{
return string.Empty;
}
return Path.GetFileName(Path.TrimEndingDirectorySeparator(download.DownloadDir));
}
}

View File

@@ -0,0 +1,16 @@
using Domain.Enums;
namespace Infrastructure.Helpers;
public static class CacheKeys
{
public static string Strike(StrikeType strikeType, string hash) => $"{strikeType.ToString()}_{hash}";
public static string BlocklistType(InstanceType instanceType) => $"{instanceType.ToString()}_type";
public static string BlocklistPatterns(InstanceType instanceType) => $"{instanceType.ToString()}_patterns";
public static string BlocklistRegexes(InstanceType instanceType) => $"{instanceType.ToString()}_regexes";
public static string StrikeItem(string hash, StrikeType strikeType) => $"item_{hash}_{strikeType.ToString()}";
public static string IgnoredDownloads(string name) => $"{name}_ignored";
}

View File

@@ -0,0 +1,37 @@
using System.Text.RegularExpressions;
namespace Infrastructure.Helpers;
public static class UriService
{
public static string? GetDomain(string? input)
{
if (string.IsNullOrWhiteSpace(input))
{
return null;
}
// add "http://" if scheme is missing to help Uri.TryCreate
if (!input.StartsWith("http", StringComparison.OrdinalIgnoreCase))
{
input = "http://" + input;
}
if (Uri.TryCreate(input, UriKind.Absolute, out var uri))
{
return uri.Host;
}
// url might be malformed
var regex = new Regex(@"^(?:https?:\/\/)?([^\/\?:]+)", RegexOptions.IgnoreCase);
var match = regex.Match(input);
if (match.Success)
{
return match.Groups[1].Value;
}
// could not extract
return null;
}
}

View File

@@ -12,11 +12,15 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="FLM.Transmission" Version="1.0.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="QBittorrent.Client" Version="1.9.24285.1" />
<PackageReference Include="FLM.QBittorrent" Version="1.0.1" />
<PackageReference Include="FLM.Transmission" Version="1.0.3" />
<PackageReference Include="Mapster" Version="7.4.0" />
<PackageReference Include="MassTransit" Version="8.3.6" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.2" />
<PackageReference Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageReference Include="Quartz" Version="3.13.1" />
<PackageReference Include="Scrutor" Version="6.0.1" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,77 @@
using System.Reflection;
using Common.Attributes;
using Common.Configuration.General;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Interceptors;
public class DryRunInterceptor : IDryRunInterceptor
{
private readonly ILogger<DryRunInterceptor> _logger;
private readonly DryRunConfig _config;
public DryRunInterceptor(ILogger<DryRunInterceptor> logger, IOptions<DryRunConfig> config)
{
_logger = logger;
_config = config.Value;
}
public void Intercept(Action action)
{
MethodInfo methodInfo = action.Method;
if (IsDryRun(methodInfo))
{
_logger.LogInformation("[DRY RUN] skipping method: {name}", methodInfo.Name);
return;
}
action();
}
public Task InterceptAsync(Delegate action, params object[] parameters)
{
MethodInfo methodInfo = action.Method;
if (IsDryRun(methodInfo))
{
_logger.LogInformation("[DRY RUN] skipping method: {name}", methodInfo.Name);
return Task.CompletedTask;
}
object? result = action.DynamicInvoke(parameters);
if (result is Task task)
{
return task;
}
return Task.CompletedTask;
}
public Task<T?> InterceptAsync<T>(Delegate action, params object[] parameters)
{
MethodInfo methodInfo = action.Method;
if (IsDryRun(methodInfo))
{
_logger.LogInformation("[DRY RUN] skipping method: {name}", methodInfo.Name);
return Task.FromResult(default(T));
}
object? result = action.DynamicInvoke(parameters);
if (result is Task<T?> task)
{
return task;
}
return Task.FromResult(default(T));
}
private bool IsDryRun(MethodInfo method)
{
return method.GetCustomAttributes(typeof(DryRunSafeguardAttribute), true).Any() && _config.IsDryRun;
}
}

View File

@@ -0,0 +1,10 @@
namespace Infrastructure.Interceptors;
public interface IDryRunInterceptor
{
void Intercept(Action action);
Task InterceptAsync(Delegate action, params object[] parameters);
Task<T?> InterceptAsync<T>(Delegate action, params object[] parameters);
}

View File

@@ -0,0 +1,82 @@
using Common.Configuration;
using Infrastructure.Helpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Providers;
public sealed class IgnoredDownloadsProvider<T>
where T : IIgnoredDownloadsConfig
{
private readonly ILogger<IgnoredDownloadsProvider<T>> _logger;
private IIgnoredDownloadsConfig _config;
private readonly IMemoryCache _cache;
private DateTime _lastModified = DateTime.MinValue;
public IgnoredDownloadsProvider(ILogger<IgnoredDownloadsProvider<T>> logger, IOptionsMonitor<T> config, IMemoryCache cache)
{
_config = config.CurrentValue;
config.OnChange((newValue) => _config = newValue);
_logger = logger;
_cache = cache;
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return;
}
if (!File.Exists(_config.IgnoredDownloadsPath))
{
throw new FileNotFoundException("file not found", _config.IgnoredDownloadsPath);
}
}
public async Task<IReadOnlyList<string>> GetIgnoredDownloads()
{
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return Array.Empty<string>();
}
FileInfo fileInfo = new(_config.IgnoredDownloadsPath);
if (fileInfo.LastWriteTime > _lastModified ||
!_cache.TryGetValue(CacheKeys.IgnoredDownloads(typeof(T).Name), out IReadOnlyList<string>? ignoredDownloads) ||
ignoredDownloads is null)
{
_lastModified = fileInfo.LastWriteTime;
return await LoadFile();
}
return ignoredDownloads;
}
private async Task<IReadOnlyList<string>> LoadFile()
{
try
{
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return Array.Empty<string>();
}
string[] ignoredDownloads = (await File.ReadAllLinesAsync(_config.IgnoredDownloadsPath))
.Where(x => !string.IsNullOrWhiteSpace(x))
.ToArray();
_cache.Set(CacheKeys.IgnoredDownloads(typeof(T).Name), ignoredDownloads);
_logger.LogInformation("ignored downloads reloaded");
return ignoredDownloads;
}
catch (Exception exception)
{
_logger.LogError(exception, "error while reading ignored downloads file | {file}", _config.IgnoredDownloadsPath);
}
return Array.Empty<string>();
}
}

View File

@@ -0,0 +1,86 @@
using System.Net;
using System.Net.Security;
using System.Security.Cryptography.X509Certificates;
using Common.Configuration.General;
using Common.Enums;
using Infrastructure.Extensions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Services;
public class CertificateValidationService
{
private readonly ILogger<CertificateValidationService> _logger;
private readonly HttpConfig _config;
public CertificateValidationService(ILogger<CertificateValidationService> logger, IOptions<HttpConfig> config)
{
_logger = logger;
_config = config.Value;
}
public bool ShouldByPassValidationError(object sender, X509Certificate? certificate, X509Chain? chain, SslPolicyErrors sslPolicyErrors)
{
var targetHostName = string.Empty;
if (sender is not SslStream && sender is not string)
{
return true;
}
if (sender is SslStream request)
{
targetHostName = request.TargetHostName;
}
// Mailkit passes host in sender as string
if (sender is string stringHost)
{
targetHostName = stringHost;
}
if (certificate is X509Certificate2 cert2 && cert2.SignatureAlgorithm.FriendlyName == "md5RSA")
{
_logger.LogError(
$"https://{targetHostName} uses the obsolete md5 hash in its https certificate, if that is your certificate, please (re)create certificate with better algorithm as soon as possible.");
}
if (sslPolicyErrors == SslPolicyErrors.None)
{
return true;
}
if (targetHostName is "localhost" or "127.0.0.1")
{
return true;
}
var ipAddresses = GetIpAddresses(targetHostName);
if (_config.CertificateValidation == CertificateValidationType.Disabled)
{
return true;
}
if (_config.CertificateValidation == CertificateValidationType.DisabledForLocalAddresses &&
ipAddresses.All(i => i.IsLocalAddress()))
{
return true;
}
_logger.LogError($"certificate validation for {targetHostName} failed. {sslPolicyErrors}");
return false;
}
private static IPAddress[] GetIpAddresses(string host)
{
if (IPAddress.TryParse(host, out var ipAddress))
{
return [ipAddress];
}
return Dns.GetHostEntry(host).AddressList;
}
}

View File

@@ -1,9 +1,13 @@
using Common.Configuration.Arr;
using Common.Attributes;
using Common.Configuration.Arr;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
using Common.Helpers;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -11,35 +15,39 @@ using Newtonsoft.Json;
namespace Infrastructure.Verticals.Arr;
public abstract class ArrClient
public abstract class ArrClient : IArrClient
{
protected readonly ILogger<ArrClient> _logger;
protected readonly HttpClient _httpClient;
protected readonly LoggingConfig _loggingConfig;
protected readonly QueueCleanerConfig _queueCleanerConfig;
protected readonly Striker _striker;
protected readonly IStriker _striker;
protected readonly IDryRunInterceptor _dryRunInterceptor;
protected ArrClient(
ILogger<ArrClient> logger,
IHttpClientFactory httpClientFactory,
IOptions<LoggingConfig> loggingConfig,
IOptions<QueueCleanerConfig> queueCleanerConfig,
Striker striker
IStriker striker,
IDryRunInterceptor dryRunInterceptor
)
{
_logger = logger;
_striker = striker;
_httpClient = httpClientFactory.CreateClient();
_httpClient = httpClientFactory.CreateClient(Constants.HttpClientWithRetryName);
_loggingConfig = loggingConfig.Value;
_queueCleanerConfig = queueCleanerConfig.Value;
_striker = striker;
_dryRunInterceptor = dryRunInterceptor;
}
public virtual async Task<QueueListResponse> GetQueueItemsAsync(ArrInstance arrInstance, int page)
{
Uri uri = new(arrInstance.Url, GetQueueUrlPath(page));
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/{GetQueueUrlPath().TrimStart('/')}";
uriBuilder.Query = GetQueueUrlQuery(page);
using HttpRequestMessage request = new(HttpMethod.Get, uri);
using HttpRequestMessage request = new(HttpMethod.Get, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
@@ -50,7 +58,7 @@ public abstract class ArrClient
}
catch
{
_logger.LogError("queue list failed | {uri}", uri);
_logger.LogError("queue list failed | {uri}", uriBuilder.Uri);
throw;
}
@@ -59,24 +67,43 @@ public abstract class ArrClient
if (queueResponse is null)
{
throw new Exception($"unrecognized queue list response | {uri} | {responseBody}");
throw new Exception($"unrecognized queue list response | {uriBuilder.Uri} | {responseBody}");
}
return queueResponse;
}
public virtual bool ShouldRemoveFromQueue(QueueRecord record)
public virtual async Task<bool> ShouldRemoveFromQueue(InstanceType instanceType, QueueRecord record, bool isPrivateDownload)
{
if (_queueCleanerConfig.ImportFailedIgnorePrivate && isPrivateDownload)
{
// ignore private trackers
_logger.LogDebug("skip failed import check | download is private | {name}", record.Title);
return false;
}
bool hasWarn() => record.TrackedDownloadStatus
.Equals("warning", StringComparison.InvariantCultureIgnoreCase);
bool isImportBlocked() => record.TrackedDownloadState
.Equals("importBlocked", StringComparison.InvariantCultureIgnoreCase);
bool isImportPending() => record.TrackedDownloadState
.Equals("importPending", StringComparison.InvariantCultureIgnoreCase);
if (hasWarn() && (isImportBlocked() || isImportPending()))
bool isImportFailed() => record.TrackedDownloadState
.Equals("importFailed", StringComparison.InvariantCultureIgnoreCase);
bool isFailedLidarr() => instanceType is InstanceType.Lidarr &&
(record.Status.Equals("failed", StringComparison.InvariantCultureIgnoreCase) ||
record.Status.Equals("completed", StringComparison.InvariantCultureIgnoreCase)) &&
hasWarn();
if (hasWarn() && (isImportBlocked() || isImportPending() || isImportFailed()) || isFailedLidarr())
{
return _striker.StrikeAndCheckLimit(
if (HasIgnoredPatterns(record))
{
_logger.LogDebug("skip failed import check | contains ignored pattern | {name}", record.Title);
return false;
}
return await _striker.StrikeAndCheckLimit(
record.DownloadId,
record.Title,
_queueCleanerConfig.ImportFailedMaxStrikes,
@@ -87,29 +114,42 @@ public abstract class ArrClient
return false;
}
public virtual async Task DeleteQueueItemAsync(ArrInstance arrInstance, QueueRecord queueRecord)
public virtual async Task DeleteQueueItemAsync(
ArrInstance arrInstance,
QueueRecord record,
bool removeFromClient,
DeleteReason deleteReason
)
{
Uri uri = new(arrInstance.Url, $"/api/v3/queue/{queueRecord.Id}?removeFromClient=true&blocklist=true&skipRedownload=true&changeCategory=false");
using HttpRequestMessage request = new(HttpMethod.Delete, uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/{GetQueueDeleteUrlPath(record.Id).TrimStart('/')}";
uriBuilder.Query = GetQueueDeleteUrlQuery(removeFromClient);
try
{
response.EnsureSuccessStatusCode();
using HttpRequestMessage request = new(HttpMethod.Delete, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
HttpResponseMessage? response = await _dryRunInterceptor.InterceptAsync<HttpResponseMessage>(SendRequestAsync, request);
response?.Dispose();
_logger.LogInformation("queue item deleted | {url} | {title}", arrInstance.Url, queueRecord.Title);
_logger.LogInformation(
removeFromClient
? "queue item deleted with reason {reason} | {url} | {title}"
: "queue item removed from arr with reason {reason} | {url} | {title}",
deleteReason.ToString(),
arrInstance.Url,
record.Title
);
}
catch
{
_logger.LogError("queue delete failed | {uri} | {title}", uri, queueRecord.Title);
_logger.LogError("queue delete failed | {uri} | {title}", uriBuilder.Uri, record.Title);
throw;
}
}
public abstract Task RefreshItemsAsync(ArrInstance arrInstance, ArrConfig config, HashSet<SearchItem>? items);
public abstract Task RefreshItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items);
public virtual bool IsRecordValid(QueueRecord record)
{
@@ -119,19 +159,57 @@ public abstract class ArrClient
return false;
}
if (record.DownloadId.Equals(record.Title, StringComparison.InvariantCultureIgnoreCase))
{
_logger.LogDebug("skip | item is not ready yet | {title}", record.Title);
return false;
}
return true;
}
protected abstract string GetQueueUrlPath(int page);
protected abstract string GetQueueUrlPath();
protected abstract string GetQueueUrlQuery(int page);
protected abstract string GetQueueDeleteUrlPath(long recordId);
protected abstract string GetQueueDeleteUrlQuery(bool removeFromClient);
protected virtual void SetApiKey(HttpRequestMessage request, string apiKey)
{
request.Headers.Add("x-api-key", apiKey);
}
[DryRunSafeguard]
protected virtual async Task<HttpResponseMessage> SendRequestAsync(HttpRequestMessage request)
{
HttpResponseMessage response = await _httpClient.SendAsync(request);
response.EnsureSuccessStatusCode();
return response;
}
private bool HasIgnoredPatterns(QueueRecord record)
{
if (_queueCleanerConfig.ImportFailedIgnorePatterns?.Count is null or 0)
{
// no patterns are configured
return false;
}
if (record.StatusMessages?.Count is null or 0)
{
// no status message found
return false;
}
HashSet<string> messages = record.StatusMessages
.SelectMany(x => x.Messages ?? Enumerable.Empty<string>())
.ToHashSet();
record.StatusMessages.Select(x => x.Title)
.ToList()
.ForEach(x => messages.Add(x));
return messages.Any(
m => _queueCleanerConfig.ImportFailedIgnorePatterns.Any(
p => !string.IsNullOrWhiteSpace(p.Trim()) && m.Contains(p, StringComparison.InvariantCultureIgnoreCase)
)
);
}
}

View File

@@ -1,6 +1,7 @@
using Common.Configuration;
using Common.Configuration.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.Arr.Interfaces;
using Microsoft.Extensions.Logging;
namespace Infrastructure.Verticals.Arr;
@@ -14,7 +15,7 @@ public sealed class ArrQueueIterator
_logger = logger;
}
public async Task Iterate(ArrClient arrClient, ArrInstance arrInstance, Func<IReadOnlyList<QueueRecord>, Task> action)
public async Task Iterate(IArrClient arrClient, ArrInstance arrInstance, Func<IReadOnlyList<QueueRecord>, Task> action)
{
const ushort maxPage = 100;
ushort page = 1;

View File

@@ -0,0 +1,19 @@
using Common.Configuration.Arr;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface IArrClient
{
Task<QueueListResponse> GetQueueItemsAsync(ArrInstance arrInstance, int page);
Task<bool> ShouldRemoveFromQueue(InstanceType instanceType, QueueRecord record, bool isPrivateDownload);
Task DeleteQueueItemAsync(ArrInstance arrInstance, QueueRecord record, bool removeFromClient, DeleteReason deleteReason);
Task RefreshItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items);
bool IsRecordValid(QueueRecord record);
}

View File

@@ -0,0 +1,5 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface ILidarrClient : IArrClient
{
}

View File

@@ -0,0 +1,5 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface IRadarrClient : IArrClient
{
}

View File

@@ -0,0 +1,5 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface ISonarrClient : IArrClient
{
}

View File

@@ -0,0 +1,168 @@
using System.Text;
using Common.Configuration.Arr;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Domain.Models.Lidarr;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
namespace Infrastructure.Verticals.Arr;
public class LidarrClient : ArrClient, ILidarrClient
{
public LidarrClient(
ILogger<LidarrClient> logger,
IHttpClientFactory httpClientFactory,
IOptions<LoggingConfig> loggingConfig,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IStriker striker,
IDryRunInterceptor dryRunInterceptor
) : base(logger, httpClientFactory, loggingConfig, queueCleanerConfig, striker, dryRunInterceptor)
{
}
protected override string GetQueueUrlPath()
{
return "/api/v1/queue";
}
protected override string GetQueueUrlQuery(int page)
{
return $"page={page}&pageSize=200&includeUnknownArtistItems=true&includeArtist=true&includeAlbum=true";
}
protected override string GetQueueDeleteUrlPath(long recordId)
{
return $"/api/v1/queue/{recordId}";
}
protected override string GetQueueDeleteUrlQuery(bool removeFromClient)
{
string query = "blocklist=true&skipRedownload=true&changeCategory=false";
query += removeFromClient ? "&removeFromClient=true" : "&removeFromClient=false";
return query;
}
public override async Task RefreshItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items)
{
if (items?.Count is null or 0)
{
return;
}
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v1/command";
foreach (var command in GetSearchCommands(items))
{
using HttpRequestMessage request = new(HttpMethod.Post, uriBuilder.Uri);
request.Content = new StringContent(
JsonConvert.SerializeObject(command, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }),
Encoding.UTF8,
"application/json"
);
SetApiKey(request, arrInstance.ApiKey);
string? logContext = await ComputeCommandLogContextAsync(arrInstance, command);
try
{
HttpResponseMessage? response = await _dryRunInterceptor.InterceptAsync<HttpResponseMessage>(SendRequestAsync, request);
response?.Dispose();
_logger.LogInformation("{log}", GetSearchLog(arrInstance.Url, command, true, logContext));
}
catch
{
_logger.LogError("{log}", GetSearchLog(arrInstance.Url, command, false, logContext));
throw;
}
}
}
public override bool IsRecordValid(QueueRecord record)
{
if (record.ArtistId is 0 || record.AlbumId is 0)
{
_logger.LogDebug("skip | artist id and/or album id missing | {title}", record.Title);
return false;
}
return base.IsRecordValid(record);
}
private static string GetSearchLog(
Uri instanceUrl,
LidarrCommand command,
bool success,
string? logContext
)
{
string status = success ? "triggered" : "failed";
return $"album search {status} | {instanceUrl} | {logContext ?? $"albums: {string.Join(',', command.AlbumIds)}"}";
}
private async Task<string?> ComputeCommandLogContextAsync(ArrInstance arrInstance, LidarrCommand command)
{
try
{
if (!_loggingConfig.Enhanced) return null;
StringBuilder log = new();
var albums = await GetAlbumsAsync(arrInstance, command.AlbumIds);
if (albums?.Count is null or 0) return null;
var groups = albums
.GroupBy(x => x.Artist.Id)
.ToList();
foreach (var group in groups)
{
var first = group.First();
log.Append($"[{first.Artist.ArtistName} albums {string.Join(',', group.Select(x => x.Title).ToList())}]");
}
return log.ToString();
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to compute log context");
}
return null;
}
private async Task<List<Album>?> GetAlbumsAsync(ArrInstance arrInstance, List<long> albumIds)
{
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v1/album";
uriBuilder.Query = string.Join('&', albumIds.Select(x => $"albumIds={x}"));
using HttpRequestMessage request = new(HttpMethod.Get, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
using var response = await _httpClient.SendAsync(request);
response.EnsureSuccessStatusCode();
string responseBody = await response.Content.ReadAsStringAsync();
return JsonConvert.DeserializeObject<List<Album>>(responseBody);
}
private List<LidarrCommand> GetSearchCommands(HashSet<SearchItem> items)
{
const string albumSearch = "AlbumSearch";
return [new LidarrCommand { Name = albumSearch, AlbumIds = items.Select(i => i.Id).ToList() }];
}
}

View File

@@ -5,32 +5,52 @@ using Common.Configuration.QueueCleaner;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Domain.Models.Radarr;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
namespace Infrastructure.Verticals.Arr;
public sealed class RadarrClient : ArrClient
public class RadarrClient : ArrClient, IRadarrClient
{
public RadarrClient(
ILogger<ArrClient> logger,
IHttpClientFactory httpClientFactory,
IOptions<LoggingConfig> loggingConfig,
IOptions<QueueCleanerConfig> queueCleanerConfig,
Striker striker
) : base(logger, httpClientFactory, loggingConfig, queueCleanerConfig, striker)
IStriker striker,
IDryRunInterceptor dryRunInterceptor
) : base(logger, httpClientFactory, loggingConfig, queueCleanerConfig, striker, dryRunInterceptor)
{
}
protected override string GetQueueUrlPath(int page)
protected override string GetQueueUrlPath()
{
return $"/api/v3/queue?page={page}&pageSize=200&includeUnknownMovieItems=true&includeMovie=true";
return "/api/v3/queue";
}
public override async Task RefreshItemsAsync(ArrInstance arrInstance, ArrConfig config, HashSet<SearchItem>? items)
protected override string GetQueueUrlQuery(int page)
{
return $"page={page}&pageSize=200&includeUnknownMovieItems=true&includeMovie=true";
}
protected override string GetQueueDeleteUrlPath(long recordId)
{
return $"/api/v3/queue/{recordId}";
}
protected override string GetQueueDeleteUrlQuery(bool removeFromClient)
{
string query = "blocklist=true&skipRedownload=true&changeCategory=false";
query += removeFromClient ? "&removeFromClient=true" : "&removeFromClient=false";
return query;
}
public override async Task RefreshItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items)
{
if (items?.Count is null or 0)
{
@@ -39,14 +59,16 @@ public sealed class RadarrClient : ArrClient
List<long> ids = items.Select(item => item.Id).ToList();
Uri uri = new(arrInstance.Url, "/api/v3/command");
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v3/command";
RadarrCommand command = new()
{
Name = "MoviesSearch",
MovieIds = ids,
};
using HttpRequestMessage request = new(HttpMethod.Post, uri);
using HttpRequestMessage request = new(HttpMethod.Post, uriBuilder.Uri);
request.Content = new StringContent(
JsonConvert.SerializeObject(command),
Encoding.UTF8,
@@ -54,12 +76,12 @@ public sealed class RadarrClient : ArrClient
);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
string? logContext = await ComputeCommandLogContextAsync(arrInstance, command);
try
{
response.EnsureSuccessStatusCode();
HttpResponseMessage? response = await _dryRunInterceptor.InterceptAsync<HttpResponseMessage>(SendRequestAsync, request);
response?.Dispose();
_logger.LogInformation("{log}", GetSearchLog(arrInstance.Url, command, true, logContext));
}
@@ -74,7 +96,7 @@ public sealed class RadarrClient : ArrClient
{
if (record.MovieId is 0)
{
_logger.LogDebug("skip | item information missing | {title}", record.Title);
_logger.LogDebug("skip | movie id missing | {title}", record.Title);
return false;
}
@@ -124,8 +146,10 @@ public sealed class RadarrClient : ArrClient
private async Task<Movie?> GetMovie(ArrInstance arrInstance, long movieId)
{
Uri uri = new(arrInstance.Url, $"api/v3/movie/{movieId}");
using HttpRequestMessage request = new(HttpMethod.Get, uri);
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v3/movie/{movieId}";
using HttpRequestMessage request = new(HttpMethod.Get, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);

View File

@@ -5,43 +5,65 @@ using Common.Configuration.QueueCleaner;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Domain.Models.Sonarr;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
using Series = Domain.Models.Sonarr.Series;
namespace Infrastructure.Verticals.Arr;
public sealed class SonarrClient : ArrClient
public class SonarrClient : ArrClient, ISonarrClient
{
public SonarrClient(
ILogger<SonarrClient> logger,
IHttpClientFactory httpClientFactory,
IOptions<LoggingConfig> loggingConfig,
IOptions<QueueCleanerConfig> queueCleanerConfig,
Striker striker
) : base(logger, httpClientFactory, loggingConfig, queueCleanerConfig, striker)
IStriker striker,
IDryRunInterceptor dryRunInterceptor
) : base(logger, httpClientFactory, loggingConfig, queueCleanerConfig, striker, dryRunInterceptor)
{
}
protected override string GetQueueUrlPath(int page)
protected override string GetQueueUrlPath()
{
return $"/api/v3/queue?page={page}&pageSize=200&includeUnknownSeriesItems=true&includeSeries=true";
return "/api/v3/queue";
}
public override async Task RefreshItemsAsync(ArrInstance arrInstance, ArrConfig config, HashSet<SearchItem>? items)
protected override string GetQueueUrlQuery(int page)
{
return $"page={page}&pageSize=200&includeUnknownSeriesItems=true&includeSeries=true&includeEpisode=true";
}
protected override string GetQueueDeleteUrlPath(long recordId)
{
return $"/api/v3/queue/{recordId}";
}
protected override string GetQueueDeleteUrlQuery(bool removeFromClient)
{
string query = "blocklist=true&skipRedownload=true&changeCategory=false";
query += removeFromClient ? "&removeFromClient=true" : "&removeFromClient=false";
return query;
}
public override async Task RefreshItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items)
{
if (items?.Count is null or 0)
{
return;
}
Uri uri = new(arrInstance.Url, "/api/v3/command");
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v3/command";
foreach (SonarrCommand command in GetSearchCommands(items.Cast<SonarrSearchItem>().ToHashSet()))
{
using HttpRequestMessage request = new(HttpMethod.Post, uri);
using HttpRequestMessage request = new(HttpMethod.Post, uriBuilder.Uri);
request.Content = new StringContent(
JsonConvert.SerializeObject(command, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }),
Encoding.UTF8,
@@ -49,13 +71,13 @@ public sealed class SonarrClient : ArrClient
);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
string? logContext = await ComputeCommandLogContextAsync(arrInstance, command, command.SearchType);
try
{
response.EnsureSuccessStatusCode();
HttpResponseMessage? response = await _dryRunInterceptor.InterceptAsync<HttpResponseMessage>(SendRequestAsync, request);
response?.Dispose();
_logger.LogInformation("{log}", GetSearchLog(command.SearchType, arrInstance.Url, command, true, logContext));
}
catch
@@ -70,7 +92,7 @@ public sealed class SonarrClient : ArrClient
{
if (record.EpisodeId is 0 || record.SeriesId is 0)
{
_logger.LogDebug("skip | item information missing | {title}", record.Title);
_logger.LogDebug("skip | episode id and/or series id missing | {title}", record.Title);
return false;
}
@@ -187,8 +209,11 @@ public sealed class SonarrClient : ArrClient
private async Task<List<Episode>?> GetEpisodesAsync(ArrInstance arrInstance, List<long> episodeIds)
{
Uri uri = new(arrInstance.Url, $"api/v3/episode?{string.Join('&', episodeIds.Select(x => $"episodeIds={x}"))}");
using HttpRequestMessage request = new(HttpMethod.Get, uri);
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v3/episode";
uriBuilder.Query = string.Join('&', episodeIds.Select(x => $"episodeIds={x}"));
using HttpRequestMessage request = new(HttpMethod.Get, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
@@ -200,8 +225,10 @@ public sealed class SonarrClient : ArrClient
private async Task<Series?> GetSeriesAsync(ArrInstance arrInstance, long seriesId)
{
Uri uri = new(arrInstance.Url, $"api/v3/series/{seriesId}");
using HttpRequestMessage request = new(HttpMethod.Get, uri);
UriBuilder uriBuilder = new(arrInstance.Url);
uriBuilder.Path = $"{uriBuilder.Path.TrimEnd('/')}/api/v3/series/{seriesId}";
using HttpRequestMessage request = new(HttpMethod.Get, uriBuilder.Uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);

View File

@@ -1,8 +1,12 @@
using System.Collections.Concurrent;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.RegularExpressions;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Helpers;
using Domain.Enums;
using Infrastructure.Helpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -11,78 +15,99 @@ namespace Infrastructure.Verticals.ContentBlocker;
public sealed class BlocklistProvider
{
private readonly ILogger<BlocklistProvider> _logger;
private readonly ContentBlockerConfig _config;
private readonly SonarrConfig _sonarrConfig;
private readonly RadarrConfig _radarrConfig;
private readonly LidarrConfig _lidarrConfig;
private readonly HttpClient _httpClient;
public BlocklistType BlocklistType { get; }
public ConcurrentBag<string> Patterns { get; } = [];
public ConcurrentBag<Regex> Regexes { get; } = [];
private readonly IMemoryCache _cache;
private bool _initialized;
public BlocklistProvider(
ILogger<BlocklistProvider> logger,
IOptions<ContentBlockerConfig> config,
IHttpClientFactory httpClientFactory)
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
IMemoryCache cache,
IHttpClientFactory httpClientFactory
)
{
_logger = logger;
_config = config.Value;
_httpClient = httpClientFactory.CreateClient();
_config.Validate();
if (_config.Blacklist?.Enabled is true)
{
BlocklistType = BlocklistType.Blacklist;
}
if (_config.Whitelist?.Enabled is true)
{
BlocklistType = BlocklistType.Whitelist;
}
_sonarrConfig = sonarrConfig.Value;
_radarrConfig = radarrConfig.Value;
_lidarrConfig = lidarrConfig.Value;
_cache = cache;
_httpClient = httpClientFactory.CreateClient(Constants.HttpClientWithRetryName);
}
public async Task LoadBlocklistAsync()
public async Task LoadBlocklistsAsync()
{
if (Patterns.Count > 0 || Regexes.Count > 0)
if (_initialized)
{
_logger.LogDebug("blocklist already loaded");
_logger.LogTrace("blocklists already loaded");
return;
}
try
{
await LoadPatternsAndRegexesAsync();
await LoadPatternsAndRegexesAsync(_sonarrConfig, InstanceType.Sonarr);
await LoadPatternsAndRegexesAsync(_radarrConfig, InstanceType.Radarr);
await LoadPatternsAndRegexesAsync(_lidarrConfig, InstanceType.Lidarr);
_initialized = true;
}
catch
{
_logger.LogError("failed to load {type}", BlocklistType.ToString());
_logger.LogError("failed to load blocklists");
throw;
}
}
private async Task LoadPatternsAndRegexesAsync()
public BlocklistType GetBlocklistType(InstanceType instanceType)
{
string[] patterns;
_cache.TryGetValue(CacheKeys.BlocklistType(instanceType), out BlocklistType? blocklistType);
return blocklistType ?? BlocklistType.Blacklist;
}
public ConcurrentBag<string> GetPatterns(InstanceType instanceType)
{
_cache.TryGetValue(CacheKeys.BlocklistPatterns(instanceType), out ConcurrentBag<string>? patterns);
return patterns ?? [];
}
public ConcurrentBag<Regex> GetRegexes(InstanceType instanceType)
{
_cache.TryGetValue(CacheKeys.BlocklistRegexes(instanceType), out ConcurrentBag<Regex>? regexes);
if (BlocklistType is BlocklistType.Blacklist)
return regexes ?? [];
}
private async Task LoadPatternsAndRegexesAsync(ArrConfig arrConfig, InstanceType instanceType)
{
if (!arrConfig.Enabled)
{
patterns = await ReadContentAsync(_config.Blacklist.Path);
return;
}
else
if (string.IsNullOrEmpty(arrConfig.Block.Path))
{
patterns = await ReadContentAsync(_config.Whitelist.Path);
return;
}
string[] filePatterns = await ReadContentAsync(arrConfig.Block.Path);
long startTime = Stopwatch.GetTimestamp();
ParallelOptions options = new() { MaxDegreeOfParallelism = 5 };
const string regexId = "regex:";
ConcurrentBag<string> patterns = [];
ConcurrentBag<Regex> regexes = [];
Parallel.ForEach(patterns, options, pattern =>
Parallel.ForEach(filePatterns, options, pattern =>
{
if (!pattern.StartsWith(regexId))
{
Patterns.Add(pattern);
patterns.Add(pattern);
return;
}
@@ -91,7 +116,7 @@ public sealed class BlocklistProvider
try
{
Regex regex = new(pattern, RegexOptions.Compiled);
Regexes.Add(regex);
regexes.Add(regex);
}
catch (ArgumentException)
{
@@ -100,10 +125,14 @@ public sealed class BlocklistProvider
});
TimeSpan elapsed = Stopwatch.GetElapsedTime(startTime);
_cache.Set(CacheKeys.BlocklistType(instanceType), arrConfig.Block.Type);
_cache.Set(CacheKeys.BlocklistPatterns(instanceType), patterns);
_cache.Set(CacheKeys.BlocklistRegexes(instanceType), regexes);
_logger.LogDebug("loaded {count} patterns", Patterns.Count);
_logger.LogDebug("loaded {count} regexes", Regexes.Count);
_logger.LogDebug("blocklist loaded in {elapsed} ms", elapsed.TotalMilliseconds);
_logger.LogDebug("loaded {count} patterns", patterns.Count);
_logger.LogDebug("loaded {count} regexes", regexes.Count);
_logger.LogDebug("blocklist loaded in {elapsed} ms | {path}", elapsed.TotalMilliseconds, arrConfig.Block.Path);
}
private async Task<string[]> ReadContentAsync(string path)

View File

@@ -1,47 +1,106 @@
using Common.Configuration;
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadClient;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Providers;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Serilog.Context;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class ContentBlocker : GenericHandler
{
private readonly ContentBlockerConfig _config;
private readonly BlocklistProvider _blocklistProvider;
private readonly IgnoredDownloadsProvider<ContentBlockerConfig> _ignoredDownloadsProvider;
public ContentBlocker(
ILogger<ContentBlocker> logger,
IOptions<ContentBlockerConfig> config,
IOptions<DownloadClientConfig> downloadClientConfig,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
SonarrClient sonarrClient,
RadarrClient radarrClient,
LidarrClient lidarrClient,
ArrQueueIterator arrArrQueueIterator,
BlocklistProvider blocklistProvider,
DownloadServiceFactory downloadServiceFactory
) : base(logger, sonarrConfig.Value, radarrConfig.Value, sonarrClient, radarrClient, arrArrQueueIterator, downloadServiceFactory)
DownloadServiceFactory downloadServiceFactory,
INotificationPublisher notifier,
IgnoredDownloadsProvider<ContentBlockerConfig> ignoredDownloadsProvider
) : base(
logger, downloadClientConfig,
sonarrConfig, radarrConfig, lidarrConfig,
sonarrClient, radarrClient, lidarrClient,
arrArrQueueIterator, downloadServiceFactory,
notifier
)
{
_config = config.Value;
_blocklistProvider = blocklistProvider;
_ignoredDownloadsProvider = ignoredDownloadsProvider;
}
public override async Task ExecuteAsync()
{
await _blocklistProvider.LoadBlocklistAsync();
if (_downloadClientConfig.DownloadClient is Common.Enums.DownloadClient.None or Common.Enums.DownloadClient.Disabled)
{
_logger.LogWarning("download client is not set");
return;
}
bool blocklistIsConfigured = _sonarrConfig.Enabled && !string.IsNullOrEmpty(_sonarrConfig.Block.Path) ||
_radarrConfig.Enabled && !string.IsNullOrEmpty(_radarrConfig.Block.Path) ||
_lidarrConfig.Enabled && !string.IsNullOrEmpty(_lidarrConfig.Block.Path);
if (!blocklistIsConfigured)
{
_logger.LogWarning("no blocklist is configured");
return;
}
await _blocklistProvider.LoadBlocklistsAsync();
await base.ExecuteAsync();
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
ArrClient arrClient = GetClient(instanceType);
IReadOnlyList<string> ignoredDownloads = await _ignoredDownloadsProvider.GetIgnoredDownloads();
using var _ = LogContext.PushProperty("InstanceName", instanceType.ToString());
HashSet<SearchItem> itemsToBeRefreshed = [];
IArrClient arrClient = GetClient(instanceType);
BlocklistType blocklistType = _blocklistProvider.GetBlocklistType(instanceType);
ConcurrentBag<string> patterns = _blocklistProvider.GetPatterns(instanceType);
ConcurrentBag<Regex> regexes = _blocklistProvider.GetRegexes(instanceType);
// push to context
ContextProvider.Set(nameof(ArrInstance) + nameof(ArrInstance.Url), instance.Url);
ContextProvider.Set(nameof(InstanceType), instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
foreach (QueueRecord record in items)
var groups = items
.GroupBy(x => x.DownloadId)
.ToList();
foreach (var group in groups)
{
QueueRecord record = group.First();
if (record.Protocol is not "torrent")
{
continue;
@@ -53,9 +112,41 @@ public sealed class ContentBlocker : GenericHandler
continue;
}
if (ignoredDownloads.Contains(record.DownloadId, StringComparer.InvariantCultureIgnoreCase))
{
_logger.LogInformation("skip | {title} | ignored", record.Title);
continue;
}
// push record to context
ContextProvider.Set(nameof(QueueRecord), record);
_logger.LogDebug("searching unwanted files for {title}", record.Title);
await _downloadService.BlockUnwantedFilesAsync(record.DownloadId);
BlockFilesResult result = await _downloadService
.BlockUnwantedFilesAsync(record.DownloadId, blocklistType, patterns, regexes, ignoredDownloads);
if (!result.ShouldRemove)
{
continue;
}
_logger.LogDebug("all files are marked as unwanted | {hash}", record.Title);
itemsToBeRefreshed.Add(GetRecordSearchItem(instanceType, record, group.Count() > 1));
bool removeFromClient = true;
if (result.IsPrivate && !_config.DeletePrivate)
{
removeFromClient = false;
}
await arrClient.DeleteQueueItemAsync(instance, record, removeFromClient, DeleteReason.AllFilesBlocked);
await _notifier.NotifyQueueItemDeleted(removeFromClient, DeleteReason.AllFilesBlocked);
}
});
await arrClient.RefreshItemsAsync(instance, itemsToBeRefreshed);
}
}

View File

@@ -1,52 +1,50 @@
using Domain.Enums;
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Microsoft.Extensions.Logging;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class FilenameEvaluator
public class FilenameEvaluator : IFilenameEvaluator
{
private readonly ILogger<FilenameEvaluator> _logger;
private readonly BlocklistProvider _blocklistProvider;
public FilenameEvaluator(ILogger<FilenameEvaluator> logger, BlocklistProvider blocklistProvider)
public FilenameEvaluator(ILogger<FilenameEvaluator> logger)
{
_logger = logger;
_blocklistProvider = blocklistProvider;
}
// TODO create unit tests
public bool IsValid(string filename)
public bool IsValid(string filename, BlocklistType type, ConcurrentBag<string> patterns, ConcurrentBag<Regex> regexes)
{
return IsValidAgainstPatterns(filename) && IsValidAgainstRegexes(filename);
return IsValidAgainstPatterns(filename, type, patterns) && IsValidAgainstRegexes(filename, type, regexes);
}
private bool IsValidAgainstPatterns(string filename)
private static bool IsValidAgainstPatterns(string filename, BlocklistType type, ConcurrentBag<string> patterns)
{
if (_blocklistProvider.Patterns.Count is 0)
if (patterns.Count is 0)
{
return true;
}
return _blocklistProvider.BlocklistType switch
return type switch
{
BlocklistType.Blacklist => !_blocklistProvider.Patterns.Any(pattern => MatchesPattern(filename, pattern)),
BlocklistType.Whitelist => _blocklistProvider.Patterns.Any(pattern => MatchesPattern(filename, pattern)),
_ => true
BlocklistType.Blacklist => !patterns.Any(pattern => MatchesPattern(filename, pattern)),
BlocklistType.Whitelist => patterns.Any(pattern => MatchesPattern(filename, pattern)),
};
}
private bool IsValidAgainstRegexes(string filename)
private static bool IsValidAgainstRegexes(string filename, BlocklistType type, ConcurrentBag<Regex> regexes)
{
if (_blocklistProvider.Regexes.Count is 0)
if (regexes.Count is 0)
{
return true;
}
return _blocklistProvider.BlocklistType switch
return type switch
{
BlocklistType.Blacklist => !_blocklistProvider.Regexes.Any(regex => regex.IsMatch(filename)),
BlocklistType.Whitelist => _blocklistProvider.Regexes.Any(regex => regex.IsMatch(filename)),
_ => true
BlocklistType.Blacklist => !regexes.Any(regex => regex.IsMatch(filename)),
BlocklistType.Whitelist => regexes.Any(regex => regex.IsMatch(filename)),
};
}
@@ -76,6 +74,6 @@ public sealed class FilenameEvaluator
);
}
return filename == pattern;
return filename.Equals(pattern, StringComparison.InvariantCultureIgnoreCase);
}
}

View File

@@ -0,0 +1,10 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
namespace Infrastructure.Verticals.ContentBlocker;
public interface IFilenameEvaluator
{
bool IsValid(string filename, BlocklistType type, ConcurrentBag<string> patterns, ConcurrentBag<Regex> regexes);
}

Some files were not shown because too many files have changed in this diff Show More