Compare commits

..

399 Commits
v1.3.1 ... dev

Author SHA1 Message Date
Flaminel
d9341ff441 fixed readme 2025-06-27 15:26:37 +03:00
Flaminel
555be79a7a fixed docs favicon 2025-06-27 15:13:15 +03:00
Flaminel
66be8aaa32 removed flmorg references 2025-06-27 15:07:55 +03:00
Flaminel
737b683285 fixed release workflow 2025-06-27 14:56:41 +03:00
Flaminel
c0e94733cd removed test directory 2025-06-27 14:52:31 +03:00
Flaminel
5843ec1d15 changed docs #8 2025-06-27 14:51:06 +03:00
Flaminel
51c968fb2d changed docs #7 2025-06-27 14:29:00 +03:00
Flaminel
ea50a7a535 changed docs #6 2025-06-27 14:20:24 +03:00
Flaminel
b1477704b6 changed docs #5 2025-06-27 13:21:14 +03:00
Flaminel
04d287e1e0 hidden username for deluge 2025-06-26 23:03:21 +03:00
Flaminel
59a5eea684 fixed docs tag location 2025-06-26 22:47:15 +03:00
Flaminel
2078eab054 changed docs #4 2025-06-26 22:35:56 +03:00
Flaminel
3278a24931 changed docs #3 2025-06-26 20:00:22 +03:00
Flaminel
f9b83cf3b2 changed docs #2 2025-06-26 19:37:34 +03:00
Flaminel
54997b184c changed docs #1 2025-06-26 16:27:08 +03:00
Flaminel
2e7d89e1c5 fixed frontend error handling 2025-06-26 12:13:38 +03:00
Flaminel
68c9d41f5f fixed ignored downloads description 2025-06-25 20:17:00 +03:00
Flaminel
a9503bbca3 fixed blocklist reload time 2025-06-25 20:14:45 +03:00
Flaminel
07553131a0 try fix macos installers #4 2025-06-25 16:58:40 +03:00
Flaminel
b21bab2a8a try fix macos installers #3 2025-06-25 15:35:29 +03:00
Flaminel
c9e033c9d1 fixed port configuration 2025-06-25 13:35:02 +03:00
Flaminel
a26fd1da5d removed usenet type 2025-06-24 21:28:38 +03:00
Flaminel
d904240aa8 try fix macos installers #2 2025-06-24 20:56:32 +03:00
Flaminel
4f066c3f2a try fix macos installers 2025-06-24 20:40:37 +03:00
Flaminel
84bee5f63a fixed download cleaner reset 2025-06-24 20:40:23 +03:00
Flaminel
8d7b8e5848 fixed cache strikes not being reset when dry run is disabled 2025-06-24 19:27:34 +03:00
Flaminel
8115205158 fixed frontend name 2025-06-24 19:08:26 +03:00
Flaminel
ada2b9fa4a try fix installers and removed default configuration file 2025-06-23 23:15:40 +03:00
Flaminel
222c21a85e revert base path validator 2025-06-23 21:01:42 +03:00
Flaminel
d6cea762eb fixed macos installer 2025-06-23 19:57:44 +03:00
Flaminel
f9d096d76a fixed base path validator 2025-06-23 19:57:38 +03:00
Flaminel
ee89570cad fixed executable build 2025-06-23 19:44:03 +03:00
Flaminel
42145e58eb fixed naming 2025-06-23 19:22:51 +03:00
Flaminel
0cda75157e fixed config for installers and icons 2025-06-23 19:09:57 +03:00
Flaminel
4ef223b0cc removed debug symbols from app build 2025-06-23 18:48:00 +03:00
Flaminel
0df1fd0d19 fixed release workflows 2025-06-23 18:36:44 +03:00
Flaminel
7409ddeb33 removed windsurf 2025-06-23 17:51:10 +03:00
Flaminel
f46702f073 fixed missing port when not in docker 2025-06-23 17:51:01 +03:00
Flaminel
9f22db9cad updated input description 2025-06-23 15:21:35 +03:00
Flaminel
bbd2052086 fixed queue cleaner input states 2025-06-23 15:20:08 +03:00
Flaminel
8436f93727 fixed content blocker input states 2025-06-23 15:20:01 +03:00
Flaminel
8c020d5aa5 fixed download cleaner input states 2025-06-23 15:17:29 +03:00
Flaminel
cd36a32b91 fixed column value type 2025-06-23 01:51:20 +03:00
Flaminel
9e43ce76f6 changed settings page layout 2025-06-22 22:10:41 +03:00
Flaminel
a068b93458 fixed blocklist input width 2025-06-22 22:09:12 +03:00
Flaminel
96dc8bec51 fixed unlinked categories validation 2025-06-22 22:03:42 +03:00
Flaminel
910633a413 added download cleaner pop-up 2025-06-22 21:51:02 +03:00
Flaminel
823b9fec3f added general settings pop-up 2025-06-22 21:50:48 +03:00
Flaminel
b99cbd0d6e reorganized content blocker settings 2025-06-22 12:06:25 +03:00
Flaminel
534a8a9c66 reorganized download cleaner settings 2025-06-22 12:06:16 +03:00
Flaminel
0473d14462 fixed setting message 2025-06-22 04:55:56 +03:00
Flaminel
febb9c4432 fixed some settings 2025-06-22 04:53:33 +03:00
Flaminel
0933b99cea fixed encryption key seeding 2025-06-22 02:12:18 +03:00
Flaminel
20483fab9a fix unlink categories not being sent 2025-06-22 01:53:35 +03:00
Flaminel
60f96589cd fixed general setting HTTPS description 2025-06-22 00:45:23 +03:00
Flaminel
eec0cb9605 fixed general settings dropdowns 2025-06-22 00:08:20 +03:00
Flaminel
20fe4c5b3f fixed cleanup settings 2025-06-21 23:51:47 +03:00
Flaminel
8d7b207181 fixed queue cleaner and download cleaner layout 2025-06-21 20:29:13 +03:00
Flaminel
8f34bdc780 fixed port configuration and logs 2025-06-21 20:20:20 +03:00
Flaminel
a61a10af8f updated support section 2025-06-21 19:52:47 +03:00
Flaminel
17c37e8eef reorganized sidebar 2025-06-21 19:43:02 +03:00
Flaminel
bfbd2c16dd fixed support component icons; removed p-card 2025-06-21 16:40:20 +03:00
Flaminel
eef21ac734 added huntarr link 2025-06-21 16:28:01 +03:00
Flaminel
978b1c9ade added makefile 2025-06-21 16:16:29 +03:00
Flaminel
5ea1361832 removed enabled per arr config and added enabled per arr instance 2025-06-21 16:16:21 +03:00
Flaminel
861c74e452 fixed entrypoint to create user and group if they don't exist 2025-06-21 02:12:49 +03:00
Flaminel
6f437de698 added support card 2025-06-21 02:12:12 +03:00
Flaminel
2cdbc938fd fixed qbit label name 2025-06-20 23:13:41 +03:00
Flaminel
26d98f815f added support for PUID, PGID and UMASK 2025-06-20 23:06:25 +03:00
Flaminel
1a7e86aca4 removed https redirection 2025-06-20 21:12:59 +03:00
Flaminel
4b2aa6c4f6 Merge branch 'main' into dev 2025-06-20 15:00:40 +03:00
Flaminel
2dcd495da7 fixed default base path 2025-06-20 14:57:20 +03:00
Flaminel
38b7d1d4bb fixed content blocker 2025-06-20 14:39:00 +03:00
Flaminel
c109b15ec1 added content blocker back 2025-06-20 12:06:48 +03:00
Flaminel
1cc749243c fixed base path and port 2025-06-20 01:32:05 +03:00
Flaminel
a7c8f37bed fixed job enabling failing 2025-06-19 00:08:06 +03:00
Flaminel
65d0d5188d gitignore 2025-06-18 23:53:49 +03:00
Flaminel
15b0e4218d fixed settings page 2025-06-18 23:38:42 +03:00
Flaminel
de7e7d244b fixed initial docker build 2025-06-18 23:31:49 +03:00
Flaminel
7a8cdbb354 fixed frontend build 2025-06-18 23:02:52 +03:00
Flaminel
4c5e4e95d9 try fix Dockerfile 2025-06-18 21:59:14 +03:00
Flaminel
5881c24ff3 moved frontend 2025-06-18 21:59:04 +03:00
Flaminel
b323235227 moved backend 2025-06-18 21:58:48 +03:00
Flaminel
ee0f915aaf fixed some icons 2025-06-18 18:17:21 +03:00
Flaminel
8883a2ca3f fixed notifications page layout 2025-06-18 18:00:16 +03:00
Flaminel
bbfde4bb17 added notifications endpoint 2025-06-18 17:48:50 +03:00
Flaminel
4d8d3ea732 fixed mobile menu bar 2025-06-17 18:49:47 +03:00
Flaminel
e9718c3a66 fixed tooltip positions 2025-06-17 18:47:34 +03:00
Flaminel
3e8fb01f44 fixed cron validations 2025-06-17 18:44:49 +03:00
Flaminel
e5d7d8ed12 reorganized project 2025-06-17 18:21:18 +03:00
Flaminel
d76216665b try #1 for deployment 2025-06-17 14:41:55 +03:00
Flaminel
1255d0a50a fixed serialization options 2025-06-17 10:36:24 +03:00
Flaminel
cb53ee09da removed commented code 2025-06-16 23:59:18 +03:00
Flaminel
f2622b129d removed TODO comment 2025-06-16 23:58:19 +03:00
Flaminel
7ac72cbece fixed download cleaner processing 2025-06-16 23:57:51 +03:00
Flaminel
b7902ca7ba fixed queue cleaner processing 2025-06-16 23:35:01 +03:00
Flaminel
84dd43a85a removed dry run attribute 2025-06-16 23:14:10 +03:00
Flaminel
fe54813abf fixed download clients not being invalidated on config change 2025-06-16 23:11:00 +03:00
Flaminel
7b2af6dd5d try fix download clients factory 2025-06-16 22:40:12 +03:00
Flaminel
cacd62058f fixed singletons using datacontext 2025-06-16 22:23:40 +03:00
Flaminel
0a8d1450dd try fix download client factory 2025-06-16 21:38:27 +03:00
Flaminel
f651663fd3 fixed health checks and download service factory 2025-06-16 20:17:06 +03:00
Flaminel
b4548573ee fixed download client UI 2025-06-16 18:26:16 +03:00
Flaminel
f39e8eca46 fixed download services 2025-06-16 18:26:09 +03:00
Flaminel
ba1ced3b84 some more layout fixes 2025-06-16 17:37:49 +03:00
Flaminel
ef8aa4c002 fixed download button context menu 2025-06-16 17:28:05 +03:00
Flaminel
cd2b66a525 fix events and logs layout 2025-06-16 17:21:56 +03:00
Flaminel
b4dad7ebc9 fixed download clients layout 2025-06-16 14:42:32 +03:00
Flaminel
9411ab815e fixed lidarr layout 2025-06-15 23:51:23 +03:00
Flaminel
9ebf49f0b8 fixed mobile menu 2025-06-15 23:51:17 +03:00
Flaminel
369a75b4c5 fixed radarr layout 2025-06-15 23:38:15 +03:00
Flaminel
1f55929ab1 fixed sonarr disabled states 2025-06-15 23:12:53 +03:00
Flaminel
4558fa462f fixed sonarr layout 2025-06-15 23:08:37 +03:00
Flaminel
f23c0e0186 changed heart icon color 2025-06-15 23:01:43 +03:00
Flaminel
d5d661543f try fix sonarr layout 2025-06-15 22:55:58 +03:00
Flaminel
53163774a5 separated settings pages 2025-06-15 22:31:34 +03:00
Flaminel
00dfd63797 try fix sonarr 2025-06-15 22:23:52 +03:00
Flaminel
d5e3e9954d try fix arrs again 2025-06-15 21:46:01 +03:00
Flaminel
bf37668dcb combine arr configs #1 2025-06-15 21:15:50 +03:00
Flaminel
62eee94497 removed search type from sonarr config 2025-06-15 19:46:25 +03:00
Flaminel
91274dac4b try fix arr #3 2025-06-15 19:30:11 +03:00
Flaminel
689adb13d8 try fix arr #2 2025-06-15 18:54:33 +03:00
Flaminel
1194db6c1e try fix arr #1 2025-06-15 18:54:28 +03:00
Flaminel
5f412c2e6a try fix config controller 2025-06-15 11:27:16 +03:00
Flaminel
bd9f24eb1a fixed general settings 2025-06-15 03:30:25 +03:00
Flaminel
458688770a try fix download client #4 2025-06-15 03:06:24 +03:00
Flaminel
cf208754a3 try fix download client #3 2025-06-15 03:01:53 +03:00
Flaminel
4b5f4dc447 try fix download client #2 2025-06-15 02:45:49 +03:00
Flaminel
3c2e36eb9e try fix download client UI 2025-06-15 02:26:37 +03:00
Flaminel
95d39c40ed fixed initial db state 2025-06-15 01:21:58 +03:00
Flaminel
9e393eebcc try switch to db 2025-06-15 00:42:20 +03:00
Flaminel
033b50519b db config checkpoint 2025-06-14 01:20:37 +03:00
Flaminel
0be5f48717 fixed db naming 2025-06-13 18:52:51 +03:00
Flaminel
9353a55cff added config db 2025-06-13 18:16:03 +03:00
Flaminel
0cb737a7e1 try fix config controller 2025-06-13 16:42:22 +03:00
Flaminel
6a0641ef63 try fix config update 2025-06-13 15:46:00 +03:00
Flaminel
26bfa5adb2 try fix again 2025-06-13 14:29:01 +03:00
Flaminel
96130501c3 try fix download client settings 2025-06-13 14:18:17 +03:00
Flaminel
fef7efb7dc added download client config UI 2025-06-13 14:09:04 +03:00
Flaminel
0050c45f09 updated deployment 2025-06-12 23:45:46 +03:00
Flaminel
857a6a88b4 added lidarr UI 2025-06-12 23:18:13 +03:00
Flaminel
b85842d3f0 added radarr config UI; fixed some messages 2025-06-12 22:57:14 +03:00
Flaminel
4800ec66d5 fixed some buttons 2025-06-12 22:28:50 +03:00
Flaminel
225e80cdbe try add sonarr config #2 2025-06-11 17:39:53 +03:00
Flaminel
64a24051d7 try add sonarr config #1 2025-06-11 16:01:55 +03:00
Flaminel
d73cb46006 try fix download cleaner again 2025-06-11 00:28:29 +03:00
Flaminel
0ab5c01ebc added toggle for unlinked settings 2025-06-10 22:31:42 +03:00
Flaminel
f2303023b4 try fix download cleaner #2 2025-06-10 21:48:26 +03:00
Flaminel
7b67ab6967 try fix download cleaner settings #1 2025-06-10 20:25:53 +03:00
Flaminel
3ae1f9c81d added download cleaner settings 2025-06-10 19:19:16 +03:00
Flaminel
3b676a8c60 try fix settings notifications and stuff 2025-06-10 09:42:49 +03:00
Flaminel
0f45b298b8 fixed dropdown 2025-06-09 12:52:34 +03:00
Flaminel
0260e43d79 try basic/advanced scheduling mode 2025-06-09 12:50:26 +03:00
Flaminel
a85687f687 fixed some other stuff 2025-06-09 03:28:20 +03:00
Flaminel
ee24856504 added new event type and fixed more stuff 2025-06-09 02:40:41 +03:00
Flaminel
9b337134c9 fixed queue settings validations 2025-06-09 02:21:34 +03:00
Flaminel
64d4abf25b fixed setting log level 2025-06-09 02:21:20 +03:00
Flaminel
a4423a28e9 some more fucking fixing 2025-06-09 01:47:22 +03:00
Flaminel
fc26f40fb3 fixed some stuff on queue cleaner 2025-06-09 00:59:42 +03:00
Flaminel
e2e775c073 fix some job stuff 2025-06-09 00:52:53 +03:00
Flaminel
ca13171b82 try fix settings 2025-06-08 23:59:55 +03:00
Flaminel
ee71eab6fa more fixes 2025-06-08 23:10:35 +03:00
Flaminel
565b6b3fde fixed general settings layout 2025-06-08 23:03:02 +03:00
Flaminel
91e7e92cc6 fixed api path 2025-06-08 22:37:16 +03:00
Flaminel
c8626c220e fixed missing certificate validation option 2025-06-08 22:35:28 +03:00
Flaminel
c9dc917401 try fix general settings #1 2025-06-08 22:25:30 +03:00
Flaminel
20e1df722e added general settings 2025-06-08 21:38:16 +03:00
Flaminel
06a2b82881 added loading/error screen for settings 2025-06-08 18:21:05 +03:00
Flaminel
1afbf5c573 fixed content blocker checkbox state 2025-06-08 02:45:15 +03:00
Flaminel
e5586fc774 fixed settings dirty state management and added leave page popup 2025-06-08 02:37:24 +03:00
Flaminel
565d6a78a7 fixed queue cleaner settings button 2025-06-08 01:53:01 +03:00
Flaminel
9cac4fdf8c fixed too many OnFileChanged being trigger when starting the app 2025-06-08 01:45:16 +03:00
Flaminel
d28389641a fixed trace and debug logs icon 2025-06-08 01:44:37 +03:00
Flaminel
2a8f4634e5 Update blacklists (#155) 2025-06-07 16:57:06 +03:00
Flaminel
46c48d7c00 try fix config manager 2025-06-07 02:15:36 +03:00
Flaminel
d2bcf8ac89 removed content blocker config file 2025-06-07 02:14:07 +03:00
Flaminel
8463b8b786 removed ignored downloads path and config 2025-06-07 02:05:51 +03:00
Flaminel
fd74455951 removed fallback code 2025-06-07 00:41:56 +03:00
Flaminel
34314de284 fixed accordion toggle 2025-06-07 00:36:27 +03:00
Flaminel
195d361364 fixed patterns input 2025-06-07 00:11:46 +03:00
Flaminel
cc07466c2b try change to accordion panel 2025-06-06 23:39:35 +03:00
Flaminel
30aaac5e67 fixed dropdowns and schedule 2025-06-06 23:12:34 +03:00
Flaminel
44a6c37530 fixed small stuff 2025-06-06 22:53:29 +03:00
Flaminel
9526f133a1 changed some info logs 2025-06-06 22:45:19 +03:00
Flaminel
ddbfee33d1 fixed queue cleaner config retrieval 2025-06-06 22:44:08 +03:00
Flaminel
12ab97825b fixed queue cleaner UI settings 2025-06-06 22:20:56 +03:00
Flaminel
bcf093148c fixed UI queue cleaner config 2025-06-06 21:26:51 +03:00
Flaminel
d2cc8517c1 try fix blocklist provider to be dynamic 2025-06-06 21:11:15 +03:00
Flaminel
cae4e323a5 try remove content blocker 2025-06-06 20:46:38 +03:00
Flaminel
f6b0014ec6 fixed some configuration stuff 2025-06-02 12:59:33 +03:00
Flaminel
5d0a48e7cd fixed download cleaner job on api update 2025-06-01 18:49:59 +03:00
Flaminel
d177790d6f fixed queue cleaner job scheduling on API call 2025-06-01 18:37:58 +03:00
Flaminel
d7e28fc6e7 change logs layout 2025-06-01 18:12:16 +03:00
Flaminel
14f16cbe70 fixed events layout 2025-06-01 02:37:15 +03:00
Flaminel
8b8dc672d1 updated some names 2025-06-01 02:36:59 +03:00
Flaminel
d80d897acb test 2025-05-31 21:38:45 +03:00
Flaminel
d9d1f86897 try fix #4 2025-05-31 21:38:41 +03:00
Flaminel
2de3cccac7 try fix #3 2025-05-31 20:39:30 +03:00
Flaminel
b2bb48a260 try fix #2 2025-05-31 00:54:25 +03:00
Flaminel
d078ea288c try fix settings enablement 2025-05-30 23:18:36 +03:00
Flaminel
84d984082c created separate component for settings 2025-05-30 19:13:02 +03:00
Flaminel
b289b2ee39 added queue cleaner settings 2025-05-30 18:36:38 +03:00
Flaminel
97473b47fd updated config controller 2025-05-30 16:27:20 +03:00
Flaminel
a38d370925 added global exception handler 2025-05-30 16:02:56 +03:00
Flaminel
1e3a4cb220 added config DTOs 2025-05-30 15:19:04 +03:00
Flaminel
62e31a2497 changed db file name 2025-05-30 04:14:28 +03:00
Flaminel
c58f6080f0 added encryption for sensitive configs 2025-05-29 23:01:42 +03:00
Flaminel
7cfd69b1f7 fixed search on events 2025-05-29 20:14:07 +03:00
Flaminel
86c356c3a9 added missing event search; fixed connection status for events page 2025-05-29 04:31:54 +03:00
Flaminel
36cfd3b4e5 fixed events viewer 2025-05-29 03:25:05 +03:00
Flaminel
c6f34432b7 removed auto scroll on events 2025-05-29 02:55:24 +03:00
Flaminel
97fe7138c2 fixed event timestamp 2025-05-29 01:57:12 +03:00
Flaminel
2b83e1a334 events api #1 2025-05-28 22:39:15 +03:00
Flaminel
599f8959a9 fixed some logs 2025-05-28 21:41:38 +03:00
Flaminel
f6bcd29ea0 removed old hubs 2025-05-28 19:31:02 +03:00
Flaminel
62fc39251b added unified signalr hub 2025-05-28 19:01:54 +03:00
Flaminel
f5fe9405cd removed some stuff from LoggingController 2025-05-28 15:12:59 +03:00
Flaminel
e4bb0ac04c added glow on logo and heart 2025-05-28 15:12:45 +03:00
Flaminel
fdab02a937 fixed some things; renamed stuff 2025-05-28 11:34:18 +03:00
Flaminel
b7546a7015 fixed dashboard streams not being populated at first 2025-05-28 02:17:52 +03:00
Flaminel
7482f963e3 removed weird left border on event and log entries 2025-05-28 01:21:59 +03:00
Flaminel
90b0ea8306 fixed connection status styling on dashboard 2025-05-28 01:18:36 +03:00
Flaminel
25ae542e87 fixed timeline colors 2025-05-28 01:06:15 +03:00
Flaminel
e96d76091b fixes to layout and styling 2025-05-27 23:03:21 +03:00
Flaminel
28b6c10917 fixed sidebar items 2025-05-27 20:31:32 +03:00
Flaminel
60884198ff removed icons; fixed buttons 2025-05-27 20:30:58 +03:00
Flaminel
367a499155 removed title from topbar 2025-05-27 20:04:18 +03:00
Flaminel
414ad299b6 removed dashboard refresh button 2025-05-27 18:56:44 +03:00
Flaminel
b3bc071943 added dashboard cards 2025-05-27 18:45:53 +03:00
Flaminel
7bd69046ea removed notification publisher definitions 2025-05-27 15:25:13 +03:00
Flaminel
7cfe1333d2 fixed config path 2025-05-27 15:21:18 +03:00
Flaminel
4bb54517c1 changed failed import naming 2025-05-27 14:37:39 +03:00
Flaminel
607bebaf0f fixed event architecture 2025-05-27 14:30:59 +03:00
Flaminel
0da1ef518a events #5 2025-05-27 03:03:06 +03:00
Flaminel
a660480a7c events #4 2025-05-27 02:21:34 +03:00
Flaminel
e8a7373b0d events #3 2025-05-27 01:32:02 +03:00
Flaminel
0785fe9a12 events #2 2025-05-26 23:16:22 +03:00
Flaminel
153c490198 added events #1 2025-05-26 23:08:02 +03:00
Flaminel
4f8d2c57d7 fixed toggle switch 2025-05-25 23:34:56 +03:00
Flaminel
0ba1979016 update theming 2025-05-25 23:16:44 +03:00
Flaminel
7ec9dfe68d remove collapsed sidebar 2025-05-25 22:50:37 +03:00
Flaminel
8b7e39fa86 try sidebar #2 2025-05-25 19:23:00 +03:00
Flaminel
2143c74767 try sidebar 2025-05-25 02:33:21 +03:00
Flaminel
9e596cfe17 topbar 2025-05-25 02:13:24 +03:00
Flaminel
dd95c40bec fixed logs 2025-05-24 01:28:07 +03:00
Flaminel
df37aee2a2 try #3 2025-05-24 00:54:45 +03:00
Flaminel
cb57b06abb try fix collapse #2 2025-05-24 00:48:26 +03:00
Flaminel
ac924d1294 try fix collapse animation 2025-05-24 00:38:43 +03:00
Flaminel
ba4379417e fixed nav bar icon alignment 2025-05-24 00:31:36 +03:00
Flaminel
ff565895ce fixed download logs 2025-05-24 00:12:54 +03:00
Flaminel
1f5e8e5711 fixed mobile side bar 2025-05-24 00:04:12 +03:00
Flaminel
e284ea2c13 changing main layout and styling 2025-05-23 23:49:45 +03:00
Flaminel
d5dab0a726 styling again 2025-05-23 23:13:31 +03:00
Flaminel
8b36e363ac style adjustments 2025-05-23 22:11:01 +03:00
Flaminel
d3c5f7a210 debounce 2025-05-23 22:00:58 +03:00
Flaminel
a97b898d33 #37 2025-05-23 20:58:22 +03:00
Flaminel
aa3ec6c49c theme 2025-05-23 20:18:04 +03:00
Flaminel
2a55ad456c rules 2025-05-23 19:59:15 +03:00
Flaminel
856bd9a93d #36 2025-05-23 17:27:25 +03:00
Flaminel
64518ad071 #35 2025-05-23 17:15:15 +03:00
Flaminel
100528ab06 fix #34 2025-05-22 19:18:47 +03:00
Flaminel
a8e188aa01 #33 2025-05-22 17:03:09 +03:00
Flaminel
91bd85708c #32 2025-05-22 15:13:25 +03:00
Flaminel
0bd4e77e9d #31 2025-05-21 20:49:40 +03:00
Flaminel
9bd46d7255 fix #30 2025-05-20 16:28:54 +03:00
Flaminel
1604e56a89 huntarr 2025-05-20 13:36:10 +03:00
Flaminel
6c9b60dff5 #29 2025-05-20 13:35:28 +03:00
Flaminel
ee02666dc1 #28 2025-05-20 13:21:32 +03:00
Flaminel
21e1fde1c8 #27 2025-05-20 12:32:42 +03:00
Flaminel
8721bc411e #26 2025-05-20 10:35:30 +03:00
Flaminel
3d0ab5f4a6 #25 2025-05-19 23:11:16 +03:00
Flaminel
0e4535d7a6 add ui 2025-05-19 22:16:52 +03:00
Flaminel
7a1e019c76 fix #24 2025-05-19 19:40:39 +03:00
Flaminel
701a7dc417 fix #23 2025-05-19 19:30:12 +03:00
Flaminel
eb0f782f53 #22 2025-05-19 15:32:57 +03:00
Flaminel
a1bd278652 #21 2025-05-19 13:40:59 +03:00
Flaminel
9409346732 fix #20 2025-05-19 12:38:52 +03:00
Flaminel
3d9b286206 #19 2025-05-19 12:35:58 +03:00
Flaminel
c675924be7 fix 2025-05-19 12:08:58 +03:00
Flaminel
2bd8f69aff fix 2025-05-18 02:39:20 +03:00
Flaminel
ef9868be4d fix 2025-05-18 02:17:45 +03:00
Flaminel
4a394928bb #17 2025-05-17 22:14:00 +03:00
Flaminel
60d3ddb3d2 fix #16 2025-05-17 21:33:02 +03:00
Flaminel
0173598519 #15 2025-05-17 21:30:09 +03:00
Flaminel
96b9a54b64 fix #14 2025-05-17 21:03:14 +03:00
Flaminel
212aeccaf3 fix env 2025-05-17 20:14:30 +03:00
Flaminel
d2eb9e50e0 fix #13 2025-05-17 20:12:53 +03:00
Flaminel
1b47921ac5 fix #12 2025-05-17 19:16:59 +03:00
Flaminel
d8d6e31395 fix #11 2025-05-16 21:52:28 +03:00
Flaminel
10be7d4a73 fix #10 2025-05-16 21:15:35 +03:00
Flaminel
e98ecfcb2a fix #9 2025-05-16 20:48:52 +03:00
Flaminel
bc1da2113c fix #8 2025-05-16 20:05:21 +03:00
Flaminel
46ef6123cc fix #7 2025-05-16 19:28:52 +03:00
Flaminel
f2027f77a9 #12 2025-05-16 19:16:32 +03:00
Flaminel
3c2bb7a289 #11 2025-05-16 18:53:01 +03:00
Flaminel
f9fd118f88 #10 2025-05-16 18:25:47 +03:00
Flaminel
4d79307d24 #9 2025-05-16 18:06:18 +03:00
Flaminel
a52fd9198d fix #6 2025-05-16 17:22:59 +03:00
Flaminel
4cb36e6727 #8 2025-05-16 16:24:38 +03:00
Flaminel
a7c2b698d6 rules #2 2025-05-16 16:24:23 +03:00
Flaminel
57326b2f8e fix #5 2025-05-15 22:09:42 +03:00
Flaminel
374f0f72a7 fix #4 2025-05-15 20:29:27 +03:00
Flaminel
06f514c3ea #7 2025-05-15 20:25:21 +03:00
Flaminel
f386bf700c fix #3 2025-05-15 20:15:20 +03:00
Flaminel
b6950f545f #6 2025-05-15 19:34:12 +03:00
Flaminel
6eb457ed8f fix #2 2025-05-15 18:18:41 +03:00
Flaminel
0e99a510a8 #5 2025-05-15 18:15:42 +03:00
Flaminel
b4316a4f0d #4 2025-05-15 17:46:25 +03:00
Flaminel
fa3aebde9a #3 2025-05-15 16:33:39 +03:00
Flaminel
a68a4c733f fix #1 2025-05-15 16:18:09 +03:00
Flaminel
81da704e6f updated docs with missing variable 2025-05-15 15:25:55 +03:00
Flaminel
0fc7352db6 #2 2025-05-14 22:49:39 +03:00
Flaminel
69788d55d2 #1 2025-05-14 22:42:52 +03:00
Flaminel
461e935128 initial windsurf 2025-05-14 22:42:24 +03:00
Flaminel
c82b5e11b1 Add rate limiting for download removal (#141) 2025-05-11 13:27:51 +03:00
Flaminel
c36d9eb9cf fixed Transmission docs 2025-05-11 10:16:13 +03:00
Flaminel
2f21603e8e fixed docs urls 2025-05-10 14:28:26 +03:00
Flaminel
586f9964b5 Update docs (#138) 2025-05-10 14:28:40 +03:00
Flaminel
124670bb98 updated features 2025-05-09 14:39:39 +03:00
Flaminel
baf6a8c2f4 Add option to set failed import strikes per arr (#135) 2025-05-09 01:17:16 +03:00
Flaminel
cd345afc54 Fix logs when using qBit tag instead of category (#134) 2025-05-08 22:50:14 +03:00
Flaminel
246ec4d6eb Add option to set a tag instead of changing the category for unlinked downloads (#133) 2025-05-08 21:51:08 +03:00
Flaminel
569eeae181 Fix hardlinks on ARM64 (#130) 2025-05-07 21:44:49 +03:00
Flaminel
5a0ef56074 Remove empty clean categories list validation (#131) 2025-05-07 14:12:36 +03:00
Flaminel
09bd4321fb fixed readme icons 2025-05-06 19:59:00 +03:00
Flaminel
4939e37210 updated discord invite 2025-05-06 15:57:12 +03:00
Flaminel
9463d7587f Add support for unstrusted certificates (#128) 2025-05-06 15:42:41 +03:00
Flaminel
7d2bf41bec updated readme to mention Huntarr 2025-05-06 15:35:37 +03:00
Flaminel
93bb8cc18d updated README 2025-05-05 12:25:22 +03:00
Flaminel
449d9e623f fixed missing config variables 2025-05-05 12:25:09 +03:00
Flaminel
3a50d9be3c updated docs 2025-05-05 00:35:10 +03:00
Flaminel
693f80fe6a Add category change for downloads with no additional hardlinks (#65) 2025-05-04 17:26:51 +03:00
Flaminel
8cfc73213a Add separate strikes for downloading metadata (#104) 2025-05-04 17:11:38 +03:00
Flaminel
6fbae768a4 removed test stuff 2025-05-04 15:24:31 +03:00
Flaminel
8e9d0127e0 removed docs homepage features 2025-05-04 15:23:01 +03:00
Flaminel
b92d70769a Add documentation (#125) 2025-05-04 15:21:41 +03:00
Flaminel
75b001cf6a Add Apprise support (#124) 2025-05-01 21:00:01 +03:00
Flaminel
479ca7884e Fix crashing when tracker url is malformed (#121) 2025-04-28 16:48:54 +03:00
Flaminel
00d8910118 Update README.md 2025-04-12 23:43:44 +03:00
Flaminel
bd28c7ab05 Fix missing notifications for new strike types (#112) 2025-04-08 22:20:51 +03:00
Flaminel
720279df65 Update README.md 2025-04-08 18:14:01 +03:00
Flaminel
2d4ec648b8 Update README.md 2025-04-06 18:10:46 +03:00
Flaminel
704fdaca4a Add cleanup for slow downloads (#110) 2025-04-06 13:28:05 +03:00
Flaminel
b134136e51 Update README.md 2025-03-29 01:11:41 +02:00
Flaminel
5ca717d7e0 Update README.md 2025-03-27 19:53:57 +02:00
Flaminel
7068ee5e5a Update README.md 2025-03-26 13:30:55 +02:00
Flaminel
9f770473e5 Remove Transmission downloads cache (#105) 2025-03-26 00:26:10 +02:00
Flaminel
5fe0f5750a Fix qBit queued items being processed (#102) 2025-03-21 23:06:31 +02:00
Flaminel
b8ce225ccc Fix Deluge service crashing when download is not found (#97) 2025-03-20 00:09:58 +02:00
Flaminel
f21f7388b7 Add download client customizable url base (#43) 2025-03-20 00:09:24 +02:00
Flaminel
a1354f231a Add base path support for arrs (#96) 2025-03-20 00:08:51 +02:00
Flaminel
4bc1c33e81 Add option to explicitly disable the download client (#93) 2025-03-19 16:02:46 +02:00
Flaminel
32bcbab523 added docs for FreeBSD 2025-03-19 01:26:04 +02:00
Flaminel
b94ae21e11 update permissive blacklist 2025-03-13 10:16:52 +02:00
Flaminel
a92ebd75c2 Update docs (#88) 2025-03-11 23:42:21 +02:00
Flaminel
e6d3929fc9 Restrict max strikes to a minimum value (#87) 2025-03-11 23:35:07 +02:00
Flaminel
a68e13af35 Fix notifications when poster is not found (#89) 2025-03-11 23:34:44 +02:00
Flaminel
324c3ace8f Fix multiple runs on queue cleaner when download cleaner is enabled (#90) 2025-03-11 23:34:27 +02:00
Flaminel
3a9d5d9085 Fix patterns being loaded for disabled arrs (#80) 2025-03-11 23:18:34 +02:00
Flaminel
89a6eaf0ce Disable cleanup on torrent items if download client is not configured (#85) 2025-03-10 00:13:40 +02:00
Flaminel
027c4a0f4d Add option to ignore specific downloads (#79) 2025-03-09 23:38:27 +02:00
Flaminel
81990c6768 fixed missing README link 2025-03-03 22:37:22 +02:00
Flaminel
ba02aa0e49 Fix notifications failing when poster image is not set (#78) 2025-03-02 22:48:21 +02:00
Flaminel
5adbdbd920 Fix weird time zone display name on startup (#70) 2025-02-25 21:32:19 +02:00
Flaminel
b3b211d956 Add configurable time zone (#69) 2025-02-24 23:21:44 +02:00
Flaminel
279bd6d82d Fix Deluge timeout not being configurable (#68) 2025-02-24 18:32:44 +02:00
Flaminel
5dced28228 fixed errors on download cleaner when download client is none (#67) 2025-02-24 12:43:06 +02:00
Flaminel
51bdaf64e4 Fix interceptor memory leaks (#66) 2025-02-23 17:50:08 +02:00
Flaminel
9c8e0ebedc updated README 2025-02-18 13:16:49 +02:00
Flaminel
e1bea8a8c8 updated README 2025-02-17 23:59:36 +02:00
Marius Nechifor
a6d3820104 Improve Transmission category detection (#62) 2025-02-17 02:48:27 +02:00
Flaminel
36c793a5fb updated chart values 2025-02-16 17:43:35 +02:00
Flaminel
aade8a91c3 fixed dummy download service 2025-02-16 12:17:31 +02:00
Flaminel
3fe7c3de1a added null check for torrent properties 2025-02-16 03:37:50 +02:00
Marius Nechifor
596a5aed8d Add download cleaner and dry run (#58) 2025-02-16 03:20:00 +02:00
Marius Nechifor
19b3675701 Add Notifiarr support (#52) 2025-02-16 03:17:54 +02:00
Flaminel
1713d0fd1e updated README 2025-02-03 23:05:31 +02:00
Flaminel
3a95a302c0 updated issue templates 2025-02-03 20:40:28 +02:00
Marius Nechifor
e738ba2334 Fix queue items with no title not being processed (#54) 2025-02-02 18:20:42 +02:00
Marius Nechifor
c813215f3e Add more Lidarr checks for failed imports (#48) 2025-01-28 19:10:07 +02:00
Flaminel
0f63a2d271 updated README 2025-01-26 01:36:08 +02:00
Marius Nechifor
133c34de53 Add option to reset stalled strikes on download progress (#50) 2025-01-25 03:27:40 +02:00
Flaminel
a3ca735b12 updated deployment 2025-01-25 01:18:03 +02:00
Marius Nechifor
519ab6a0cd Fix strike defaults (#49) 2025-01-22 22:18:31 +02:00
Marius Nechifor
0c691a540a Add missing failed import status (#47) 2025-01-21 00:14:55 +02:00
Marius Nechifor
209f78717f Fix usenet usage (#46) 2025-01-18 19:12:28 +02:00
Flaminel
a02be80ac1 updated README 2025-01-18 17:25:15 +02:00
Marius Nechifor
8a8b906b6f Add option to not remove private downloads from the download client (#45) 2025-01-18 17:20:23 +02:00
Marius Nechifor
b88ddde417 Fix content blocker env var usage (#44) 2025-01-18 16:23:34 +02:00
Flaminel
666c2656ec added svg logo 2025-01-17 22:11:05 +02:00
Marius Nechifor
7786776ed8 Fix logging template (#42) 2025-01-16 11:55:38 +02:00
Flaminel
2c60b38edf fixed README ports 2025-01-16 00:10:02 +02:00
Marius Nechifor
922f586706 Add Lidarr support (#30) 2025-01-15 23:55:34 +02:00
Marius Nechifor
2bc8e445ce Add configurable number of retries and timeout for http calls (#40) 2025-01-14 22:58:03 +02:00
Marius Nechifor
058507ac39 Add option to ignore private downloads when blocking files (#39) 2025-01-13 15:15:58 +02:00
Marius Nechifor
f0dc51f10b Improve stalled and failed imports (#37) 2025-01-13 13:18:58 +02:00
Flaminel
c7ad1c5ee6 fixed README typo 2025-01-11 01:45:45 +02:00
Marius Nechifor
d7913ae2b8 Add option to not use a download client (#35) 2025-01-11 01:45:12 +02:00
Marius Nechifor
21e59072d0 Increase trigger interval limit (#34) 2025-01-09 23:03:53 +02:00
624 changed files with 85039 additions and 4521 deletions

1
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1 @@
github: Flaminel

View File

@@ -1,12 +1,12 @@
name: Bug report
description: File a bug report if something is not working right.
title: "[BUG]: "
title: "[BUG] "
labels: ["bug"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to improve cleanuperr!
Thanks for taking the time to improve Cleanuparr!
- type: checkboxes
id: init
attributes:
@@ -14,8 +14,12 @@ body:
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/Cleanuparr/Cleanuparr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true
- label: Enabled verbose logging.
required: true
- type: textarea
id: what-happened
attributes:
@@ -23,14 +27,6 @@ body:
description: If applicable, mention what you expected to happen.
validations:
required: true
- type: input
id: version
attributes:
label: Version
description: What version of our software are you running?
placeholder: e.g. 1.3.0 or latest
validations:
required: true
- type: dropdown
id: os
attributes:
@@ -40,6 +36,7 @@ body:
- Windows
- Linux
- MacOS
- Unraid
validations:
required: true
- type: dropdown

View File

@@ -1,12 +1,12 @@
name: Feature request
description: File a feature request.
title: "[FEATURE]: "
title: "[FEATURE] "
labels: ["enhancement"]
body:
- type: markdown
attributes:
value: |
Thanks for taking the time to improve cleanuperr!
Thanks for taking the time to improve Cleanuparr!
- type: textarea
id: description
attributes:

View File

@@ -1,12 +1,25 @@
name: Help request
description: Ask a question to receive help.
title: "[HELP]: "
title: "[HELP] "
labels: ["question"]
body:
- type: markdown
attributes:
value: |
If you are experiencing unexpected behavior, please consider submitting a bug report instead.
- type: checkboxes
id: init
attributes:
label: "Before submitting a help request, I have:"
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/Cleanuparr/Cleanuparr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true
- label: Enabled verbose logging.
required: true
- type: textarea
id: description
attributes:

125
.github/workflows/build-docker.yml vendored Normal file
View File

@@ -0,0 +1,125 @@
name: Build Docker Images
on:
push:
tags:
- "v*.*.*"
pull_request:
paths:
- 'code/**'
workflow_dispatch:
workflow_call:
jobs:
build_app:
runs-on: ubuntu-latest
steps:
- name: Set github context
timeout-minutes: 1
run: |
echo 'githubRepository=${{ github.repository }}' >> $GITHUB_ENV
echo 'githubSha=${{ github.sha }}' >> $GITHUB_ENV
echo 'githubRef=${{ github.ref }}' >> $GITHUB_ENV
echo 'githubHeadRef=${{ github.head_ref }}' >> $GITHUB_ENV
- name: Initialize build info
timeout-minutes: 1
run: |
githubHeadRef=${{ env.githubHeadRef }}
latestDockerTag=""
versionDockerTag=""
version="0.0.1"
if [[ "$githubRef" =~ ^"refs/tags/" ]]; then
branch=${githubRef##*/}
latestDockerTag="latest"
versionDockerTag=${branch#v}
version=${branch#v}
else
# Determine if this run is for the main branch or another branch
if [[ -z "$githubHeadRef" ]]; then
# Main branch
githubRef=${{ env.githubRef }}
branch=${githubRef##*/}
versionDockerTag="$branch"
else
# Pull request
branch=$githubHeadRef
versionDockerTag="$branch"
fi
fi
githubTags=""
if [ -n "$latestDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr:$latestDockerTag"
fi
if [ -n "$versionDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr:$versionDockerTag"
fi
# set env vars
echo "branch=$branch" >> $GITHUB_ENV
echo "githubTags=$githubTags" >> $GITHUB_ENV
echo "versionDockerTag=$versionDockerTag" >> $GITHUB_ENV
echo "version=$version" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/docker username | DOCKER_USERNAME;
secrets/data/docker password | DOCKER_PASSWORD;
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout target repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ env.githubRepository }}
ref: ${{ env.branch }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
timeout-minutes: 5
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push docker image
timeout-minutes: 15
uses: docker/build-push-action@v6
with:
context: ${{ github.workspace }}/code
file: ${{ github.workspace }}/code/Dockerfile
provenance: false
labels: |
commit=sha-${{ env.githubSha }}
version=${{ env.versionDockerTag }}
build-args: |
VERSION=${{ env.version }}
PACKAGES_USERNAME=${{ env.PACKAGES_USERNAME }}
PACKAGES_PAT=${{ env.PACKAGES_PAT }}
outputs: |
type=image
platforms: |
linux/amd64
linux/arm64
push: true
tags: |
${{ env.githubTags }}

177
.github/workflows/build-executable.yml vendored Normal file
View File

@@ -0,0 +1,177 @@
name: Build Executables
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Gate
if: ${{ !startsWith(github.ref, 'refs/tags/') && github.event_name != 'workflow_dispatch' }}
run: |
echo "This workflow only runs on tag events or manual dispatch. Pipeline finished."
exit 0
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=${repoFullName#*/}" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout target repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup dotnet
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Install dependencies and restore
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ secrets.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
run: |
mkdir -p code/backend/${{ env.executableName }}/wwwroot
cp -r code/frontend/dist/ui/browser/* code/backend/${{ env.executableName }}/wwwroot/
- name: Build win-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Create sample configuration files
run: |
# Create a sample appsettings.json for each platform
cat > sample-config.json << 'EOF'
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}
EOF
# Copy to each build directory
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/appsettings.json
- name: Zip win-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/
- name: Zip linux-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/
- name: Zip linux-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/
- name: Zip osx-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/
- name: Zip osx-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: cleanuparr-executables
path: |
./artifacts/*.zip
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
id: release
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
fail_on_unmatched_files: true
target_commitish: main
generate_release_notes: true
files: |
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip

View File

@@ -0,0 +1,376 @@
name: Build macOS ARM Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-arm-installer:
name: Build macOS ARM Installer
runs-on: macos-14 # ARM runner for Apple Silicon
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS ARM executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-arm64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>11.0</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-arm64-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -0,0 +1,376 @@
name: Build macOS Intel Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-intel-installer:
name: Build macOS Intel Installer
runs-on: macos-13 # Intel runner
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS Intel executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-x64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>10.15</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-intel-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -0,0 +1,171 @@
name: Build Windows Installer
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-windows-installer:
runs-on: windows-latest
steps:
- name: Set variables
shell: pwsh
run: |
$repoFullName = "${{ github.repository }}"
$ref = "${{ github.ref }}"
# Handle both tag events and manual dispatch
if ($ref -match "^refs/tags/") {
$releaseVersion = $ref -replace "refs/tags/", ""
$appVersion = $releaseVersion -replace "^v", ""
} else {
# For manual dispatch, use a default version
$releaseVersion = "dev-$(Get-Date -Format 'yyyyMMdd-HHmmss')"
$appVersion = "0.0.1-dev"
}
$repositoryName = $repoFullName.Split("/")[1]
echo "githubRepository=${{ github.repository }}" >> $env:GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $env:GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $env:GITHUB_ENV
echo "appVersion=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
echo "APP_VERSION=$appVersion" >> $env:GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
shell: pwsh
run: |
New-Item -ItemType Directory -Force -Path "code/backend/${{ env.executableName }}/wwwroot"
Copy-Item -Path "code/frontend/dist/ui/browser/*" -Destination "code/backend/${{ env.executableName }}/wwwroot/" -Recurse -Force
- name: Build Windows executable
run: |
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o dist /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugType=None /p:DebugSymbols=false
- name: Create sample configuration
shell: pwsh
run: |
# Create config directory
New-Item -ItemType Directory -Force -Path "config"
$config = @{
"HTTP_PORTS" = 11011
"BASE_PATH" = "/"
}
$config | ConvertTo-Json | Out-File -FilePath "config/cleanuparr.json" -Encoding UTF8
- name: Setup Inno Setup
shell: pwsh
run: |
# Download and install Inno Setup
$url = "https://jrsoftware.org/download.php/is.exe"
$output = "innosetup-installer.exe"
Invoke-WebRequest -Uri $url -OutFile $output
Start-Process -FilePath $output -ArgumentList "/VERYSILENT", "/SUPPRESSMSGBOXES", "/NORESTART" -Wait
# Add Inno Setup to PATH
$innoPath = "C:\Program Files (x86)\Inno Setup 6"
echo "$innoPath" >> $env:GITHUB_PATH
- name: Verify LICENSE file exists
shell: pwsh
run: |
if (-not (Test-Path "LICENSE")) {
Write-Error "LICENSE file not found in repository root"
exit 1
}
Write-Host "LICENSE file found successfully"
- name: Build Windows installer
shell: pwsh
run: |
# Copy installer script to root
Copy-Item "installers/windows/cleanuparr-installer.iss" -Destination "cleanuparr-installer.iss"
# The installer script has been pre-updated with proper icon and config paths
# No dynamic modifications needed as the base script now includes correct references
# Run Inno Setup compiler
& "C:\Program Files (x86)\Inno Setup 6\ISCC.exe" "cleanuparr-installer.iss"
# Check if installer was created
if (Test-Path "installer/Cleanuparr_Setup.exe") {
Write-Host "Installer created successfully"
} else {
Write-Error "Installer creation failed"
exit 1
}
- name: Rename installer with version
shell: pwsh
run: |
$installerName = "Cleanuparr-${{ env.appVersion }}-Setup.exe"
Move-Item "installer/Cleanuparr_Setup.exe" "installer/$installerName"
echo "installerName=$installerName" >> $env:GITHUB_ENV
- name: Upload installer artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-windows-installer
path: installer/${{ env.installerName }}
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
installer/${{ env.installerName }}

View File

@@ -1,12 +0,0 @@
on:
workflow_dispatch:
workflow_call:
jobs:
build:
uses: flmorg/universal-workflows/.github/workflows/dotnet.build.app.yml@main
with:
dockerRepository: flaminel/cleanuperr
githubContext: ${{ toJSON(github) }}
outputName: cleanuperr
secrets: inherit

View File

@@ -1,19 +0,0 @@
on:
workflow_call:
workflow_dispatch:
push:
paths:
- 'chart/**'
branches: [ main ]
jobs:
deploy:
uses: flmorg/universal-workflows/.github/workflows/chart.install.yml@main
with:
githubContext: ${{ toJSON(github) }}
chartRepo: oci://ghcr.io/flmorg
chartName: universal-chart
version: ^1.0.0
valuesPath: chart/values.yaml
releaseName: main
secrets: inherit

49
.github/workflows/docs.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: Deploy Docusaurus to GitHub Pages
on:
push:
branches: [main]
paths:
- 'docs/**'
permissions:
contents: read
pages: write
id-token: write
jobs:
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: yarn
cache-dependency-path: docs/yarn.lock
- name: Install dependencies
working-directory: docs
run: yarn install --frozen-lockfile
- name: Build Docusaurus
working-directory: docs
run: yarn build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: docs/build
retention-days: 1
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@@ -1,20 +0,0 @@
on:
push:
tags:
- "v*.*.*"
# paths:
# - 'code/**'
# branches: [ main ]
pull_request:
paths:
- 'code/**'
jobs:
build:
uses: flmorg/cleanuperr/.github/workflows/build.yml@main
secrets: inherit
# deploy:
# needs: [ build ]
# uses: flmorg/cleanuperr/.github/workflows/deploy.yml@main
# secrets: inherit

View File

@@ -1,11 +1,164 @@
name: Release Build
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
inputs:
version:
description: 'Version to release (e.g., 1.0.0)'
required: false
default: ''
jobs:
release:
uses: flmorg/universal-workflows/.github/workflows/dotnet.release.yml@main
with:
githubContext: ${{ toJSON(github) }}
secrets: inherit
# Validate release
validate:
runs-on: ubuntu-latest
outputs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get version info
id: version
run: |
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag event
release_version=${GITHUB_REF##refs/tags/}
app_version=${release_version#v}
is_tag=true
elif [[ -n "${{ github.event.inputs.version }}" ]]; then
# Manual workflow with version
app_version="${{ github.event.inputs.version }}"
release_version="v$app_version"
is_tag=false
else
# Manual workflow without version
app_version="0.0.1-dev-$(date +%Y%m%d-%H%M%S)"
release_version="v$app_version"
is_tag=false
fi
echo "app_version=$app_version" >> $GITHUB_OUTPUT
echo "release_version=$release_version" >> $GITHUB_OUTPUT
echo "is_tag=$is_tag" >> $GITHUB_OUTPUT
echo "🏷️ Release Version: $release_version"
echo "📱 App Version: $app_version"
echo "🔖 Is Tag: $is_tag"
# Build portable executables
build-executables:
needs: validate
uses: ./.github/workflows/build_executable.yml
secrets: inherit
# Build Windows installer
build-windows-installer:
needs: validate
uses: ./.github/workflows/build-windows-installer.yml
secrets: inherit
# Build macOS Intel installer
build-macos-intel:
needs: validate
uses: ./.github/workflows/build-macos-intel-installer.yml
secrets: inherit
# Build macOS ARM installer
build-macos-arm:
needs: validate
uses: ./.github/workflows/build-macos-arm-installer.yml
secrets: inherit
# Create GitHub release
create-release:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
- name: List downloaded artifacts
run: |
echo "📦 Downloaded artifacts:"
find ./artifacts -type f -name "*.zip" -o -name "*.pkg" -o -name "*.exe" | sort
- name: Create release
uses: softprops/action-gh-release@v2
with:
name: Cleanuparr ${{ needs.validate.outputs.release_version }}
tag_name: ${{ needs.validate.outputs.release_version }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
generate_release_notes: true
prerelease: ${{ contains(needs.validate.outputs.app_version, '-') }}
files: |
./artifacts/**/*.zip
./artifacts/**/*.pkg
./artifacts/**/*.exe
# Summary job
summary:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
runs-on: ubuntu-latest
if: always()
steps:
- name: Build Summary
run: |
echo "## 🏗️ Cleanuparr Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ needs.validate.outputs.release_version }}" >> $GITHUB_STEP_SUMMARY
echo "**App Version**: ${{ needs.validate.outputs.app_version }}" >> $GITHUB_STEP_SUMMARY
echo "**Is Tag**: ${{ needs.validate.outputs.is_tag }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Build Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check job results
if [[ "${{ needs.build-executables.result }}" == "success" ]]; then
echo "✅ **Portable Executables**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Portable Executables**: ${{ needs.build-executables.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-windows-installer.result }}" == "success" ]]; then
echo "✅ **Windows Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Windows Installer**: ${{ needs.build-windows-installer.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-intel.result }}" == "success" ]]; then
echo "✅ **macOS Intel Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS Intel Installer**: ${{ needs.build-macos-intel.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-arm.result }}" == "success" ]]; then
echo "✅ **macOS ARM Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS ARM Installer**: ${{ needs.build-macos-arm.result }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "🎉 **Build completed!**" >> $GITHUB_STEP_SUMMARY

1
.gitignore vendored
View File

@@ -105,7 +105,6 @@ _NCrunch_*
_TeamCity*
# Sonarr
config.xml
nzbdrone.log*txt
UpdateLogs/
*workspace.xml

BIN
Logo/favicon.ico Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

10
Logo/logo.svg Normal file
View File

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 112 KiB

233
README.md
View File

@@ -1,219 +1,62 @@
# <img width="24px" src="./Logo/256.png" alt="cleanuperr"></img> cleanuperr
_Love this project? Give it a ⭐️ and let others know!_
cleanuperr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, cleanuperr can also trigger a search to replace the deleted shows/movies.
# <img width="24px" src="./Logo/256.png" alt="Cleanuparr"></img> Cleanuparr
cleanuperr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made cleanuperr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
[![Discord](https://img.shields.io/discord/1306721212587573389?color=7289DA&label=Discord&style=for-the-badge&logo=discord)](https://discord.gg/SCtMCgtsc4)
The tool supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
Cleanuparr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuparr can also trigger a search to replace the deleted shows/movies.
Refer to the [Environment variables](#Environment-variables) section for detailed configuration instructions and the [Setup](#Setup) section for an in-depth explanation of the cleanup process.
Cleanuparr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made Cleanuparr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
## Key features
- Marks unwanted files as skip/unwanted in the download client.
- Automatically strikes stalled or stuck downloads.
- Removes and blocks downloads that reached the maximum number of strikes or are marked as unwanted by the download client or by cleanuperr and triggers a search for removed downloads.
> [!IMPORTANT]
> **Features:**
> - Strike system to mark bad downloads.
> - Remove and block downloads that reached a maximum number of strikes.
> - Remove and block downloads that are **failing to be imported** by the arrs. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/import-failed)
> - Remove and block downloads that are **stalled** or in **metadata downloading** state. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/stalled)
> - Remove and block downloads that have a **low download speed** or **high estimated completion time**. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/slow)
> - Remove and block downloads blocked by qBittorrent or by Cleanuparr's **Content Blocker**. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/content-blocker/general)
> - Automatically trigger a search for downloads removed from the arrs.
> - Clean up downloads that have been **seeding** for a certain amount of time. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/download-cleaner/seeding)
> - Remove downloads that are **orphaned**/have no **hardlinks**/are not referenced by the arrs anymore (with [cross-seed](https://www.cross-seed.org/) support). [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/download-cleaner/hardlinks)
> - Notify on strike or download removal. [configuration](https://cleanuparr.github.io/cleanuparr/docs/category/notifications)
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuparr.
## Important note
Cleanuparr supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
Only the **latest versions** of the following apps are supported, or earlier versions that have the same API as the latest version:
- qBittorrent
- Deluge
- Transmission
- Sonarr
- Radarr
## Quick Start
This tool is actively developed and still a work in progress. Join the Discord server if you want to reach out to me quickly (or just stay updated on new releases) so we can squash those pesky bugs together:
> [!NOTE]
>
> 1. **Docker (Recommended)**
> Pull the Docker image from `ghcr.io/Cleanuparr/Cleanuparr:latest`.
>
> 2. **Unraid (for Unraid users)**
> Use the Unraid Community App.
>
> 3. **Manual Installation (if you're not using Docker)**
> Go to [Windows](#windows), [Linux](#linux) or [MacOS](#macos).
> https://discord.gg/sWggpnmGNY
# Docs
# How it works
Docs can be found [here](https://Cleanuparr.github.io/Cleanuparr/).
1. **Content blocker** will:
- Run every 5 minutes (or configured cron).
- Process all items in the *arr queue.
- Find the corresponding item from the download client for each queue item.
- Mark the files that were found in the queue as **unwanted/skipped** if:
- They **are listed in the blacklist**, or
- They **are not included in the whitelist**.
2. **Queue cleaner** will:
- Run every 5 minutes (or configured cron).
- Process all items in the *arr queue.
- Check each queue item if it is **stalled (download speed is 0)**, **stuck in matadata downloading** or **failed to be imported**.
- If it is, the item receives a **strike** and will continue to accumulate strikes every time it meets any of these conditions.
- Check each queue item if it meets one of the following condition in the download client:
- **Marked as completed, but 0 bytes have been downloaded** (due to files being blocked by qBittorrent or the **content blocker**).
- All associated files of are marked as **unwanted/skipped**.
- If the item **DOES NOT** match the above criteria, it will be skipped.
- If the item **DOES** match the criteria or has received the **maximum number of strikes**:
- It will be removed from the *arr's queue and blocked.
- It will be deleted from the download client.
- A new search will be triggered for the *arr item.
# <img style="vertical-align: middle;" width="24px" src="./Logo/256.png" alt="Cleanuparr"> <span style="vertical-align: middle;">Cleanuparr</span> <img src="https://raw.githubusercontent.com/FortAwesome/Font-Awesome/6.x/svgs/solid/x.svg" height="24px" width="30px" style="vertical-align: middle;"> <span style="vertical-align: middle;">Huntarr</span> <img style="vertical-align: middle;" width="24px" src="https://github.com/plexguide/Huntarr.io/blob/main/frontend/static/logo/512.png?raw=true" alt Huntarr></img>
# Setup
Think of **Cleanuparr** as the janitor of your server; it keeps your download queue spotless, removes clutter, and blocks malicious files. Now imagine combining that with **Huntarr**, the compulsive librarian who finds missing and upgradable media to complete your collection
## Using qBittorrent's built-in feature (works only with qBittorrent)
While **Huntarr** fills in the blanks and improves what you already have, **Cleanuparr** makes sure that only clean downloads get through. If you're aiming for a reliable and self-sufficient setup, **Cleanuparr** and **Huntarr** will take your automated media stack to another level.
1. Go to qBittorrent -> Options -> Downloads -> make sure `Excluded file names` is checked -> Paste an exclusion list that you have copied.
- [blacklist](https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist), or
- [permissive blacklist](https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist_permissive), or
- create your own
2. qBittorrent will block files from being downloaded. In the case of malicious content, **nothing is downloaded and the torrent is marked as complete**.
3. Start **cleanuperr** with `QUEUECLEANER__ENABLED` set to `true`.
4. The **queue cleaner** will perform a cleanup process as described in the [How it works](#how-it-works) section.
## Using cleanuperr's blocklist (works with all supported download clients)
1. Set both `QUEUECLEANER_ENABLED` and `CONTENTBLOCKER_ENABLED` to `true` in your environment variables.
2. Configure and enable either a **blacklist** or a **whitelist** as described in the [Environment variables](#Environment-variables) section.
3. Once configured, cleanuperr will perform the following tasks:
- Execute the **content blocker** job, as explained in the [How it works](#how-it-works) section.
- Execute the **queue cleaner** job, as explained in the [How it works](#how-it-works) section.
## Usage
### Docker compose yaml
```
version: "3.3"
services:
cleanuperr:
volumes:
- ./cleanuperr/logs:/var/logs
environment:
- LOGGING__LOGLEVEL=Information
- LOGGING__FILE__ENABLED=false
- LOGGING__FILE__PATH=/var/logs/
- LOGGING__ENHANCED=true
- TRIGGERS__QUEUECLEANER=0 0/5 * * * ?
- TRIGGERS__CONTENTBLOCKER=0 0/5 * * * ?
- QUEUECLEANER__ENABLED=true
- QUEUECLEANER__RUNSEQUENTIALLY=true
- QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES=5
- QUEUECLEANER__STALLED_MAX_STRIKES=5
- CONTENTBLOCKER__ENABLED=true
- CONTENTBLOCKER__BLACKLIST__ENABLED=true
- CONTENTBLOCKER__BLACKLIST__PATH=https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
# OR
# - CONTENTBLOCKER__WHITELIST__ENABLED=true
# - CONTENTBLOCKER__WHITELIST__PATH=https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/whitelist
- DOWNLOAD_CLIENT=qBittorrent
- QBITTORRENT__URL=http://localhost:8080
- QBITTORRENT__USERNAME=user
- QBITTORRENT__PASSWORD=pass
# OR
# - DOWNLOAD_CLIENT=deluge
# - DELUGE__URL=http://localhost:8112
# - DELUGE__PASSWORD=testing
# OR
# - DOWNLOAD_CLIENT=transmission
# - TRANSMISSION__URL=http://localhost:9091
# - TRANSMISSION__USERNAME=test
# - TRANSMISSION__PASSWORD=testing
- SONARR__ENABLED=true
- SONARR__SEARCHTYPE=Episode
- SONARR__INSTANCES__0__URL=http://localhost:8989
- SONARR__INSTANCES__0__APIKEY=secret1
- SONARR__INSTANCES__1__URL=http://localhost:8990
- SONARR__INSTANCES__1__APIKEY=secret2
- RADARR__ENABLED=true
- RADARR__INSTANCES__0__URL=http://localhost:7878
- RADARR__INSTANCES__0__APIKEY=secret3
- RADARR__INSTANCES__1__URL=http://localhost:7879
- RADARR__INSTANCES__1__APIKEY=secret4
image: ghcr.io/flmorg/cleanuperr:latest
restart: unless-stopped
```
### Environment variables
| Variable | Required | Description | Default value |
|---|---|---|---|
| LOGGING__LOGLEVEL | No | Can be `Verbose`, `Debug`, `Information`, `Warning`, `Error` or `Fatal` | `Information` |
| LOGGING__FILE__ENABLED | No | Enable or disable logging to file | false |
| LOGGING__FILE__PATH | No | Directory where to save the log files | empty |
| LOGGING__ENHANCED | No | Enhance logs whenever possible<br>A more detailed description is provided [here](variables.md#LOGGING__ENHANCED) | true |
|||||
| TRIGGERS__QUEUECLEANER | Yes if queue cleaner is enabled | [Quartz cron trigger](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)<br>Can be a max of 1h interval | 0 0/5 * * * ? |
| TRIGGERS__CONTENTBLOCKER | Yes if content blocker is enabled | [Quartz cron trigger](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html)<br>Can be a max of 1h interval | 0 0/5 * * * ? |
|||||
| QUEUECLEANER__ENABLED | No | Enable or disable the queue cleaner | true |
| QUEUECLEANER__RUNSEQUENTIALLY | No | If set to true, the queue cleaner will run after the content blocker instead of running in parallel, streamlining the cleaning process | true |
| QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES | No | After how many strikes should a failed import be removed<br>0 means never | 0 |
| QUEUECLEANER__STALLED_MAX_STRIKES | No | After how many strikes should a stalled download be removed<br>0 means never | 0 |
|||||
| CONTENTBLOCKER__ENABLED | No | Enable or disable the content blocker | false |
| CONTENTBLOCKER__BLACKLIST__ENABLED | Yes if content blocker is enabled and whitelist is not enabled | Enable or disable the blacklist | false |
| CONTENTBLOCKER__BLACKLIST__PATH | Yes if blacklist is enabled | Path to the blacklist (local file or url)<br>Needs to be json compatible | empty |
| CONTENTBLOCKER__WHITELIST__ENABLED | Yes if content blocker is enabled and blacklist is not enabled | Enable or disable the whitelist | false |
| CONTENTBLOCKER__WHITELIST__PATH | Yes if whitelist is enabled | Path to the whitelist (local file or url)<br>Needs to be json compatible | empty |
|||||
| DOWNLOAD_CLIENT | No | Download client that is used by *arrs<br>Can be `qbittorrent`, `deluge` or `transmission` | `qbittorrent` |
| QBITTORRENT__URL | No | qBittorrent instance url | http://localhost:8112 |
| QBITTORRENT__USERNAME | No | qBittorrent user | empty |
| QBITTORRENT__PASSWORD | No | qBittorrent password | empty |
|||||
| DELUGE__URL | No | Deluge instance url | http://localhost:8080 |
| DELUGE__PASSWORD | No | Deluge password | empty |
|||||
| TRANSMISSION__URL | No | Transmission instance url | http://localhost:9091 |
| TRANSMISSION__USERNAME | No | Transmission user | empty |
| TRANSMISSION__PASSWORD | No | Transmission password | empty |
|||||
| SONARR__ENABLED | No | Enable or disable Sonarr cleanup | true |
| SONARR__SEARCHTYPE | No | What to search for after removing a queue item<br>Can be `Episode`, `Season` or `Series` | `Episode` |
| SONARR__INSTANCES__0__URL | No | First Sonarr instance url | http://localhost:8989 |
| SONARR__INSTANCES__0__APIKEY | No | First Sonarr instance API key | empty |
|||||
| RADARR__ENABLED | No | Enable or disable Radarr cleanup | false |
| RADARR__INSTANCES__0__URL | No | First Radarr instance url | http://localhost:8989 |
| RADARR__INSTANCES__0__APIKEY | No | First Radarr instance API key | empty |
#
### To be noted
1. The blacklist and the whitelist can not be both enabled at the same time.
2. The queue cleaner and content blocker can be enabled or disabled separately, if you want to run only one of them.
3. Only one download client can be enabled at a time. If you have more than one download client, you should deploy multiple instances of cleanuperr.
4. The blocklists (blacklist/whitelist) should have a single pattern on each line and supports the following:
```
*example // file name ends with "example"
example* // file name starts with "example"
*example* // file name has "example" in the name
example // file name is exactly the word "example"
regex:<ANY_REGEX> // regex that needs to be marked at the start of the line with "regex:"
```
5. Multiple Sonarr/Radarr instances can be specified using this format, where `<NUMBER>` starts from 0:
```
SONARR__INSTANCES__<NUMBER>__URL
SONARR__INSTANCES__<NUMBER>__APIKEY
```
#
### Binaries (if you're not using Docker)
1. Download the binaries from [releases](https://github.com/flmorg/cleanuperr/releases).
2. Extract them from the zip file.
3. Edit **appsettings.json**. The paths from this json file correspond with the docker env vars, as described [above](#environment-variables).
### Run as a Windows Service
Check out this stackoverflow answer on how to do it: https://stackoverflow.com/a/15719678
<span style="font-size:24px"> ➡️ [**Huntarr**](https://github.com/plexguide/Huntarr.io) <span style="vertical-align: middle">![Huntarr](https://img.shields.io/github/stars/plexguide/Huntarr.io?style=social)</span></span>
# Credits
Special thanks for inspiration go to:
- [ThijmenGThN/swaparr](https://github.com/ThijmenGThN/swaparr)
- [ManiMatter/decluttarr](https://github.com/ManiMatter/decluttarr)
- [PaeyMoopy/sonarr-radarr-queue-cleaner](https://github.com/PaeyMoopy/sonarr-radarr-queue-cleaner)
- [Sonarr](https://github.com/Sonarr/Sonarr) & [Radarr](https://github.com/Radarr/Radarr) for the logo
- [Sonarr](https://github.com/Sonarr/Sonarr) & [Radarr](https://github.com/Radarr/Radarr)
# Buy me a coffee
If I made your life just a tiny bit easier, consider buying me a coffee!
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
<a href="https://buymeacoffee.com/flaminel" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>

View File

@@ -1,5 +1,6 @@
*(sample).*
*.0xe
*.001
*.73k
*.73p
*.7z

View File

@@ -1,4 +1,6 @@
*.001
*.apk
*.arj
*.bat
*.bin
*.bmp

View File

@@ -1,115 +0,0 @@
deployment:
replicas: 1
strategy:
type: RollingUpdate
maxSurge: 1
maxUnavailable: 0
containers:
- name: qbit
image:
repository: ghcr.io/flmorg/cleanuperr
tag: latest
env:
- name: LOGGING__LOGLEVEL
value: Information
- name: LOGGING__FILE__ENABLED
value: "true"
- name: LOGGING__FILE__PATH
value: /var/logs
- name: LOGGING__ENHANCED
value: "true"
- name: TRIGGERS__QUEUECLEANER
value: 0 0/5 * * * ?
- name: TRIGGERS__CONTENTBLOCKER
value: 0 0/5 * * * ?
- name: QUEUECLEANER__ENABLED
value: "true"
- name: QUEUECLEANER__RUNSEQUENTIALLY
value: "true"
- name: QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES
value: "3"
- name: QUEUECLEANER__STALLED_MAX_STRIKES
value: "3"
- name: CONTENTBLOCKER__ENABLED
value: "true"
- name: CONTENTBLOCKER__BLACKLIST__ENABLED
value: "true"
- name: CONTENTBLOCKER__BLACKLIST__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: DOWNLOAD_CLIENT
value: qbittorrent
- name: QBITTORRENT__URL
value: http://service.qbittorrent-videos.svc.cluster.local
- name: SONARR__ENABLED
value: "true"
- name: SONARR__SEARCHTYPE
value: Episode
- name: SONARR__INSTANCES__0__URL
value: http://service.sonarr-low-res.svc.cluster.local
- name: SONARR__INSTANCES__1__URL
value: http://service.sonarr-high-res.svc.cluster.local
- name: RADARR__ENABLED
value: "true"
- name: RADARR__INSTANCES__0__URL
value: http://service.radarr-low-res.svc.cluster.local
- name: RADARR__INSTANCES__1__URL
value: http://service.radarr-high-res.svc.cluster.local
envFromSecret:
- secretName: qbit-auth
envs:
- name: QBITTORRENT__USERNAME
key: QBIT_USER
- name: QBITTORRENT__PASSWORD
key: QBIT_PASS
- secretName: sonarr-auth
envs:
- name: SONARR__INSTANCES__0__APIKEY
key: SNRL_API_KEY
- name: SONARR__INSTANCES__1__APIKEY
key: SNRH_API_KEY
- secretName: radarr-auth
envs:
- name: RADARR__INSTANCES__0__APIKEY
key: RDRL_API_KEY
- name: RADARR__INSTANCES__1__APIKEY
key: RDRH_API_KEY
resources:
requests:
cpu: 0m
memory: 0Mi
limits:
cpu: 1000m
memory: 1000Mi
volumeMounts:
- name: storage
mountPath: /var/logs
subPath: cleanuperr/logs
volumes:
- name: storage
type: pvc
typeName: storage-pvc
pvcs:
- name: storage-pvc
storageClassName: local-path-persistent
accessModes:
- ReadWriteOnce
size: 1Gi
volumeMode: Filesystem
vaultSecrets:
- name: qbit-auth
path: secrets/qbittorrent
templates:
QBIT_USER: "{% .Secrets.username %}"
QBIT_PASS: "{% .Secrets.password %}"
- name: radarr-auth
path: secrets/radarr
templates:
RDRL_API_KEY: "{% .Secrets.low_api_key %}"
RDRH_API_KEY: "{% .Secrets.high_api_key %}"
- name: sonarr-auth
path: secrets/sonarr
templates:
SNRL_API_KEY: "{% .Secrets.low_api_key %}"
SNRH_API_KEY: "{% .Secrets.high_api_key %}"

41
code/.dockerignore Normal file
View File

@@ -0,0 +1,41 @@
# Documentation
*.md
docs/
# Version control
.git/
.gitignore
# IDE files
.vscode/
.idea/
*.swp
*.swo
# OS files
.DS_Store
Thumbs.db
# Node.js
frontend/node_modules/
frontend/dist/
frontend/.angular/
# .NET
backend/bin/
backend/obj/
backend/*/bin/
backend/*/obj/
backend/.vs/
# Build artifacts
artifacts/
dist/
# Test files
backend/**/*Tests/
backend/**/Tests/
# Development files
docker-compose*.yml
test/

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.Arr;
public abstract record ArrConfig
{
public required bool Enabled { get; init; }
public required List<ArrInstance> Instances { get; init; }
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.Arr;
public sealed class ArrInstance
{
public required Uri Url { get; set; }
public required string ApiKey { get; set; }
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration.Arr;
public sealed record RadarrConfig : ArrConfig
{
public const string SectionName = "Radarr";
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.Arr;
public sealed record SonarrConfig : ArrConfig
{
public const string SectionName = "Sonarr";
public SonarrSearchType SearchType { get; init; }
}

View File

@@ -1,40 +0,0 @@
namespace Common.Configuration.ContentBlocker;
public sealed record ContentBlockerConfig : IJobConfig
{
public const string SectionName = "ContentBlocker";
public required bool Enabled { get; init; }
public PatternConfig? Blacklist { get; init; }
public PatternConfig? Whitelist { get; init; }
public void Validate()
{
if (!Enabled)
{
return;
}
if (Blacklist is null && Whitelist is null)
{
throw new Exception("content blocker is enabled, but both blacklist and whitelist are missing");
}
if (Blacklist?.Enabled is true && Whitelist?.Enabled is true)
{
throw new Exception("only one exclusion (blacklist/whitelist) list is allowed");
}
if (Blacklist?.Enabled is true && string.IsNullOrEmpty(Blacklist.Path))
{
throw new Exception("blacklist path is required");
}
if (Whitelist?.Enabled is true && string.IsNullOrEmpty(Whitelist.Path))
{
throw new Exception("blacklist path is required");
}
}
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.ContentBlocker;
public sealed record PatternConfig
{
public bool Enabled { get; init; }
public string? Path { get; init; }
}

View File

@@ -1,18 +0,0 @@
namespace Common.Configuration.DownloadClient;
public sealed record DelugeConfig : IConfig
{
public const string SectionName = "Deluge";
public Uri? Url { get; init; }
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
}
}
}

View File

@@ -1,20 +0,0 @@
namespace Common.Configuration.DownloadClient;
public sealed class QBitConfig : IConfig
{
public const string SectionName = "qBittorrent";
public Uri? Url { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
}
}
}

View File

@@ -1,20 +0,0 @@
namespace Common.Configuration.DownloadClient;
public record TransmissionConfig : IConfig
{
public const string SectionName = "Transmission";
public Uri? Url { get; init; }
public string? Username { get; init; }
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ArgumentNullException(nameof(Url));
}
}
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public static class EnvironmentVariables
{
public const string DownloadClient = "DOWNLOAD_CLIENT";
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public interface IConfig
{
void Validate();
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public interface IJobConfig : IConfig
{
bool Enabled { get; init; }
}

View File

@@ -1,12 +0,0 @@
namespace Common.Configuration.Logging;
public class FileLogConfig : IConfig
{
public bool Enabled { get; set; }
public string Path { get; set; } = string.Empty;
public void Validate()
{
}
}

View File

@@ -1,18 +0,0 @@
using Serilog.Events;
namespace Common.Configuration.Logging;
public class LoggingConfig : IConfig
{
public const string SectionName = "Logging";
public LogEventLevel LogLevel { get; set; }
public bool Enhanced { get; set; }
public FileLogConfig? File { get; set; }
public void Validate()
{
}
}

View File

@@ -1,22 +0,0 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.QueueCleaner;
public sealed record QueueCleanerConfig : IJobConfig
{
public const string SectionName = "QueueCleaner";
public required bool Enabled { get; init; }
public required bool RunSequentially { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_MAX_STRIKES")]
public ushort ImportFailedMaxStrikes { get; init; }
[ConfigurationKeyName("STALLED_MAX_STRIKES")]
public ushort StalledMaxStrikes { get; init; }
public void Validate()
{
}
}

View File

@@ -1,10 +0,0 @@
namespace Common.Configuration;
public sealed class TriggersConfig
{
public const string SectionName = "Triggers";
public required string QueueCleaner { get; init; }
public required string ContentBlocker { get; init; }
}

72
code/Dockerfile Normal file
View File

@@ -0,0 +1,72 @@
# Build Angular frontend
FROM --platform=$BUILDPLATFORM node:18-alpine AS frontend-build
WORKDIR /app
# Copy package files first for better layer caching
COPY frontend/package*.json ./
RUN npm ci && npm install -g @angular/cli
# Copy source code
COPY frontend/ .
# Build with appropriate base-href and deploy-url
RUN npm run build
# Build .NET backend
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:9.0-bookworm-slim AS build
ARG TARGETARCH
ARG VERSION=0.0.1
ARG PACKAGES_USERNAME
ARG PACKAGES_PAT
WORKDIR /app
EXPOSE 11011
# Copy solution and project files first for better layer caching
# COPY backend/*.sln ./backend/
# COPY backend/*/*.csproj ./backend/*/
# Copy source code
COPY backend/ ./backend/
# Restore dependencies
RUN dotnet nuget add source --username ${PACKAGES_USERNAME} --password ${PACKAGES_PAT} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
# Build and publish
RUN dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
-a $TARGETARCH \
-c Release \
-o /app/publish \
/p:Version=${VERSION} \
/p:PublishSingleFile=true \
/p:DebugSymbols=false
# Runtime stage
FROM mcr.microsoft.com/dotnet/aspnet:9.0-bookworm-slim
# Install required packages for user management and timezone support
RUN apt-get update && apt-get install -y \
tzdata \
gosu \
&& rm -rf /var/lib/apt/lists/*
ENV PUID=1000 \
PGID=1000 \
UMASK=022 \
TZ=Etc/UTC \
HTTP_PORTS=11011
# Fix FileSystemWatcher in Docker: https://github.com/dotnet/dotnet-docker/issues/3546
ENV DOTNET_USE_POLLING_FILE_WATCHER=true
WORKDIR /app
# Copy backend
COPY --from=build /app/publish .
# Copy frontend to wwwroot
COPY --from=frontend-build /app/dist/ui/browser ./wwwroot
# Copy entrypoint script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
CMD ["./Cleanuparr"]

View File

@@ -1,8 +0,0 @@
namespace Domain.Enums;
public enum DownloadClient
{
QBittorrent,
Deluge,
Transmission
}

View File

@@ -1,9 +0,0 @@
namespace Domain.Enums;
public enum InstanceType
{
Sonarr,
Radarr,
Lidarr,
Readarr
}

View File

@@ -1,7 +0,0 @@
namespace Domain.Enums;
public enum StrikeType
{
Stalled,
ImportFailed
}

View File

@@ -1,16 +0,0 @@
namespace Domain.Models.Arr.Queue;
public record QueueRecord
{
public int SeriesId { get; init; }
public int EpisodeId { get; init; }
public int SeasonNumber { get; init; }
public int MovieId { get; init; }
public required string Title { get; init; }
public string Status { get; init; }
public string TrackedDownloadStatus { get; init; }
public string TrackedDownloadState { get; init; }
public required string DownloadId { get; init; }
public required string Protocol { get; init; }
public required int Id { get; init; }
}

View File

@@ -1,12 +0,0 @@
namespace Domain.Models.Deluge.Response;
public sealed record TorrentStatus
{
public string? Hash { get; set; }
public string? State { get; set; }
public string? Name { get; set; }
public ulong Eta { get; set; }
}

View File

@@ -1,23 +0,0 @@
using Common.Configuration;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadClient;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
namespace Executable.DependencyInjection;
public static class ConfigurationDI
{
public static IServiceCollection AddConfiguration(this IServiceCollection services, IConfiguration configuration) =>
services
.Configure<QueueCleanerConfig>(configuration.GetSection(QueueCleanerConfig.SectionName))
.Configure<ContentBlockerConfig>(configuration.GetSection(ContentBlockerConfig.SectionName))
.Configure<QBitConfig>(configuration.GetSection(QBitConfig.SectionName))
.Configure<DelugeConfig>(configuration.GetSection(DelugeConfig.SectionName))
.Configure<TransmissionConfig>(configuration.GetSection(TransmissionConfig.SectionName))
.Configure<SonarrConfig>(configuration.GetSection(SonarrConfig.SectionName))
.Configure<RadarrConfig>(configuration.GetSection(RadarrConfig.SectionName))
.Configure<LoggingConfig>(configuration.GetSection(LoggingConfig.SectionName));
}

View File

@@ -1,67 +0,0 @@
using Common.Configuration.Logging;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.QueueCleaner;
using Serilog;
using Serilog.Events;
using Serilog.Templates;
using Serilog.Templates.Themes;
namespace Executable.DependencyInjection;
public static class LoggingDI
{
public static ILoggingBuilder AddLogging(this ILoggingBuilder builder, IConfiguration configuration)
{
LoggingConfig? config = configuration.GetSection(LoggingConfig.SectionName).Get<LoggingConfig>();
if (!string.IsNullOrEmpty(config?.File?.Path) && !Directory.Exists(config.File.Path))
{
try
{
Directory.CreateDirectory(config.File.Path);
}
catch (Exception exception)
{
throw new Exception($"log file path is not a valid directory | {config.File.Path}", exception);
}
}
LoggerConfiguration logConfig = new();
const string consoleOutputTemplate = "[{@t:yyyy-MM-dd HH:mm:ss.fff} {@l:u3}]{#if JobName is not null} {Concat('[',JobName,']'),PAD}{#end} {@m}\n{@x}";
const string fileOutputTemplate = "{@t:yyyy-MM-dd HH:mm:ss.fff zzz} [{@l:u3}]{#if JobName is not null} {Concat('[',JobName,']'),PAD}{#end} {@m:lj}\n{@x}";
LogEventLevel level = LogEventLevel.Information;
List<string> jobNames = [nameof(ContentBlocker), nameof(QueueCleaner)];
int padding = jobNames.Max(x => x.Length) + 2;
if (config is not null)
{
level = config.LogLevel;
if (config.File?.Enabled is true)
{
logConfig.WriteTo.File(
path: Path.Combine(config.File.Path, "cleanuperr-.txt"),
formatter: new ExpressionTemplate(fileOutputTemplate.Replace("PAD", padding.ToString())),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true
);
}
}
Log.Logger = logConfig
.MinimumLevel.Is(level)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft.Extensions.Http", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.WriteTo.Console(new ExpressionTemplate(consoleOutputTemplate.Replace("PAD", padding.ToString())))
.Enrich.FromLogContext()
.Enrich.WithProperty("ApplicationName", "cleanuperr")
.CreateLogger();
return builder
.ClearProviders()
.AddSerilog();
}
}

View File

@@ -1,51 +0,0 @@
using System.Net;
using Common.Configuration;
using Common.Configuration.ContentBlocker;
using Executable.Jobs;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Infrastructure.Verticals.QueueCleaner;
namespace Executable.DependencyInjection;
public static class MainDI
{
public static IServiceCollection AddInfrastructure(this IServiceCollection services, IConfiguration configuration) =>
services
.AddLogging(builder => builder.ClearProviders().AddConsole())
.AddHttpClients()
.AddConfiguration(configuration)
.AddMemoryCache()
.AddServices()
.AddQuartzServices(configuration);
private static IServiceCollection AddHttpClients(this IServiceCollection services)
{
// add default HttpClient
services.AddHttpClient();
// add Deluge HttpClient
services
.AddHttpClient(nameof(DelugeService), x =>
{
x.Timeout = TimeSpan.FromSeconds(5);
})
.ConfigurePrimaryHttpMessageHandler(_ =>
{
return new HttpClientHandler
{
AllowAutoRedirect = true,
UseCookies = true,
CookieContainer = new CookieContainer(),
AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate,
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
};
});
return services;
}
}

View File

@@ -1,128 +0,0 @@
using Common.Configuration;
using Common.Configuration.ContentBlocker;
using Common.Configuration.QueueCleaner;
using Executable.Jobs;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.QueueCleaner;
using Quartz;
using Quartz.Spi;
namespace Executable.DependencyInjection;
public static class QuartzDI
{
public static IServiceCollection AddQuartzServices(this IServiceCollection services, IConfiguration configuration) =>
services
.AddQuartz(q =>
{
TriggersConfig? config = configuration
.GetRequiredSection(TriggersConfig.SectionName)
.Get<TriggersConfig>();
if (config is null)
{
throw new NullReferenceException("triggers configuration is null");
}
q.AddJobs(configuration, config);
})
.AddQuartzHostedService(opt =>
{
opt.WaitForJobsToComplete = true;
});
private static void AddJobs(
this IServiceCollectionQuartzConfigurator q,
IConfiguration configuration,
TriggersConfig triggersConfig
)
{
ContentBlockerConfig? contentBlockerConfig = configuration
.GetRequiredSection(ContentBlockerConfig.SectionName)
.Get<ContentBlockerConfig>();
q.AddJob<ContentBlocker>(contentBlockerConfig, triggersConfig.ContentBlocker);
QueueCleanerConfig? queueCleanerConfig = configuration
.GetRequiredSection(QueueCleanerConfig.SectionName)
.Get<QueueCleanerConfig>();
if (contentBlockerConfig?.Enabled is true && queueCleanerConfig is { Enabled: true, RunSequentially: true })
{
q.AddJob<QueueCleaner>(queueCleanerConfig, string.Empty);
q.AddJobListener(new JobChainingListener(nameof(QueueCleaner)));
}
else
{
q.AddJob<QueueCleaner>(queueCleanerConfig, triggersConfig.QueueCleaner);
}
}
private static void AddJob<T>(
this IServiceCollectionQuartzConfigurator q,
IJobConfig? config,
string trigger
) where T: GenericHandler
{
string typeName = typeof(T).Name;
if (config is null)
{
throw new NullReferenceException($"{typeName} configuration is null");
}
if (!config.Enabled)
{
return;
}
bool hasTrigger = trigger.Length > 0;
q.AddJob<GenericJob<T>>(opts =>
{
opts.WithIdentity(typeName);
if (!hasTrigger)
{
// jobs with no triggers need to be stored durably
opts.StoreDurably();
}
});
// skip empty triggers
if (!hasTrigger)
{
return;
}
var triggerObj = (IOperableTrigger)TriggerBuilder.Create()
.WithIdentity("ExampleTrigger")
.StartNow()
.WithCronSchedule(trigger)
.Build();
var nextFireTimes = TriggerUtils.ComputeFireTimes(triggerObj, null, 2);
if (nextFireTimes[1] - nextFireTimes[0] > TimeSpan.FromHours(1))
{
throw new Exception($"{trigger} should have a fire time of maximum 1 hour");
}
q.AddTrigger(opts =>
{
opts.ForJob(typeName)
.WithIdentity($"{typeName}-trigger")
.WithCronSchedule(trigger, x =>x.WithMisfireHandlingInstructionDoNothing())
.StartNow();
});
// Startup trigger
q.AddTrigger(opts =>
{
opts.ForJob(typeName)
.WithIdentity($"{typeName}-startup-trigger")
.StartNow();
});
}
}

View File

@@ -1,28 +0,0 @@
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.QueueCleaner;
namespace Executable.DependencyInjection;
public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddTransient<SonarrClient>()
.AddTransient<RadarrClient>()
.AddTransient<QueueCleaner>()
.AddTransient<ContentBlocker>()
.AddTransient<FilenameEvaluator>()
.AddTransient<QBitService>()
.AddTransient<DelugeService>()
.AddTransient<TransmissionService>()
.AddTransient<ArrQueueIterator>()
.AddTransient<DownloadServiceFactory>()
.AddSingleton<BlocklistProvider>()
.AddSingleton<Striker>();
}

View File

@@ -1,30 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Worker">
<PropertyGroup>
<AssemblyName>cleanuperr</AssemblyName>
<TargetFramework>net9.0</TargetFramework>
<Version Condition="'$(Version)' == ''">0.0.1</Version>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>dotnet-Executable-6108b2ba-f035-47bc-addf-aaf5e20da4b8</UserSecretsId>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="Quartz" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.13.1" />
<PackageReference Include="Serilog" Version="4.2.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj" />
<ProjectReference Include="..\Infrastructure\Infrastructure.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,21 +0,0 @@
using System.Reflection;
using Executable.DependencyInjection;
var builder = Host.CreateApplicationBuilder(args);
builder.Services.AddInfrastructure(builder.Configuration);
builder.Logging.AddLogging(builder.Configuration);
var host = builder.Build();
var logger = host.Services.GetRequiredService<ILogger<Program>>();
var version = Assembly.GetExecutingAssembly().GetName().Version;
logger.LogInformation(
version is null
? "cleanuperr version not detected"
: $"cleanuperr v{version.Major}.{version.Minor}.{version.Build}"
);
host.Run();

View File

@@ -1,65 +0,0 @@
{
"Logging": {
"LogLevel": "Debug",
"Enhanced": true,
"File": {
"Enabled": false,
"Path": ""
}
},
"Triggers": {
"QueueCleaner": "0/10 * * * * ?",
"ContentBlocker": "0/10 * * * * ?"
},
"ContentBlocker": {
"Enabled": true,
"Blacklist": {
"Enabled": false,
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Whitelist": {
"Enabled": false,
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/whitelist"
}
},
"QueueCleaner": {
"Enabled": true,
"RunSequentially": true,
"IMPORT_FAILED_MAX_STRIKES": 5,
"STALLED_MAX_STRIKES": 5
},
"DOWNLOAD_CLIENT": "qbittorrent",
"qBittorrent": {
"Url": "http://localhost:8080",
"Username": "test",
"Password": "testing"
},
"Deluge": {
"Url": "http://localhost:8112",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": true,
"SearchType": "Episode",
"Instances": [
{
"Url": "http://localhost:8989",
"ApiKey": "96736c3eb3144936b8f1d62d27be8cee"
}
]
},
"Radarr": {
"Enabled": true,
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": "705b553732ab4167ab23909305d60600"
}
]
}
}

View File

@@ -1,65 +0,0 @@
{
"Logging": {
"LogLevel": "Information",
"Enhanced": true,
"File": {
"Enabled": false,
"Path": ""
}
},
"Triggers": {
"QueueCleaner": "0 0/5 * * * ?",
"ContentBlocker": "0 0/5 * * * ?"
},
"ContentBlocker": {
"Enabled": false,
"Blacklist": {
"Enabled": false,
"Path": ""
},
"Whitelist": {
"Enabled": false,
"Path": ""
}
},
"QueueCleaner": {
"Enabled": true,
"RunSequentially": true,
"IMPORT_FAILED_MAX_STRIKES": 5,
"STALLED_MAX_STRIKES": 5
},
"DOWNLOAD_CLIENT": "qbittorrent",
"qBittorrent": {
"Url": "http://localhost:8080",
"Username": "",
"Password": ""
},
"Deluge": {
"Url": "http://localhost:8112",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": true,
"SearchType": "Episode",
"Instances": [
{
"Url": "http://localhost:8989",
"ApiKey": ""
}
]
},
"Radarr": {
"Enabled": false,
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": ""
}
]
}
}

View File

@@ -1,22 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj" />
<ProjectReference Include="..\Domain\Domain.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FLM.Transmission" Version="1.0.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.0" />
<PackageReference Include="QBittorrent.Client" Version="1.9.24285.1" />
<PackageReference Include="Quartz" Version="3.13.1" />
</ItemGroup>
</Project>

View File

@@ -1,137 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
namespace Infrastructure.Verticals.Arr;
public abstract class ArrClient
{
protected readonly ILogger<ArrClient> _logger;
protected readonly HttpClient _httpClient;
protected readonly LoggingConfig _loggingConfig;
protected readonly QueueCleanerConfig _queueCleanerConfig;
protected readonly Striker _striker;
protected ArrClient(
ILogger<ArrClient> logger,
IHttpClientFactory httpClientFactory,
IOptions<LoggingConfig> loggingConfig,
IOptions<QueueCleanerConfig> queueCleanerConfig,
Striker striker
)
{
_logger = logger;
_striker = striker;
_httpClient = httpClientFactory.CreateClient();
_loggingConfig = loggingConfig.Value;
_queueCleanerConfig = queueCleanerConfig.Value;
_striker = striker;
}
public virtual async Task<QueueListResponse> GetQueueItemsAsync(ArrInstance arrInstance, int page)
{
Uri uri = new(arrInstance.Url, GetQueueUrlPath(page));
using HttpRequestMessage request = new(HttpMethod.Get, uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
try
{
response.EnsureSuccessStatusCode();
}
catch
{
_logger.LogError("queue list failed | {uri}", uri);
throw;
}
string responseBody = await response.Content.ReadAsStringAsync();
QueueListResponse? queueResponse = JsonConvert.DeserializeObject<QueueListResponse>(responseBody);
if (queueResponse is null)
{
throw new Exception($"unrecognized queue list response | {uri} | {responseBody}");
}
return queueResponse;
}
public virtual bool ShouldRemoveFromQueue(QueueRecord record)
{
bool hasWarn() => record.TrackedDownloadStatus
.Equals("warning", StringComparison.InvariantCultureIgnoreCase);
bool isImportBlocked() => record.TrackedDownloadState
.Equals("importBlocked", StringComparison.InvariantCultureIgnoreCase);
bool isImportPending() => record.TrackedDownloadState
.Equals("importPending", StringComparison.InvariantCultureIgnoreCase);
if (hasWarn() && (isImportBlocked() || isImportPending()))
{
return _striker.StrikeAndCheckLimit(
record.DownloadId,
record.Title,
_queueCleanerConfig.ImportFailedMaxStrikes,
StrikeType.ImportFailed
);
}
return false;
}
public virtual async Task DeleteQueueItemAsync(ArrInstance arrInstance, QueueRecord queueRecord)
{
Uri uri = new(arrInstance.Url, $"/api/v3/queue/{queueRecord.Id}?removeFromClient=true&blocklist=true&skipRedownload=true&changeCategory=false");
using HttpRequestMessage request = new(HttpMethod.Delete, uri);
SetApiKey(request, arrInstance.ApiKey);
using HttpResponseMessage response = await _httpClient.SendAsync(request);
try
{
response.EnsureSuccessStatusCode();
_logger.LogInformation("queue item deleted | {url} | {title}", arrInstance.Url, queueRecord.Title);
}
catch
{
_logger.LogError("queue delete failed | {uri} | {title}", uri, queueRecord.Title);
throw;
}
}
public abstract Task RefreshItemsAsync(ArrInstance arrInstance, ArrConfig config, HashSet<SearchItem>? items);
public virtual bool IsRecordValid(QueueRecord record)
{
if (string.IsNullOrEmpty(record.DownloadId))
{
_logger.LogDebug("skip | download id is null for {title}", record.Title);
return false;
}
if (record.DownloadId.Equals(record.Title, StringComparison.InvariantCultureIgnoreCase))
{
_logger.LogDebug("skip | item is not ready yet | {title}", record.Title);
return false;
}
return true;
}
protected abstract string GetQueueUrlPath(int page);
protected virtual void SetApiKey(HttpRequestMessage request, string apiKey)
{
request.Headers.Add("x-api-key", apiKey);
}
}

View File

@@ -1,134 +0,0 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Domain.Enums;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class BlocklistProvider
{
private readonly ILogger<BlocklistProvider> _logger;
private readonly ContentBlockerConfig _config;
private readonly HttpClient _httpClient;
public BlocklistType BlocklistType { get; }
public ConcurrentBag<string> Patterns { get; } = [];
public ConcurrentBag<Regex> Regexes { get; } = [];
public BlocklistProvider(
ILogger<BlocklistProvider> logger,
IOptions<ContentBlockerConfig> config,
IHttpClientFactory httpClientFactory)
{
_logger = logger;
_config = config.Value;
_httpClient = httpClientFactory.CreateClient();
_config.Validate();
if (_config.Blacklist?.Enabled is true)
{
BlocklistType = BlocklistType.Blacklist;
}
if (_config.Whitelist?.Enabled is true)
{
BlocklistType = BlocklistType.Whitelist;
}
}
public async Task LoadBlocklistAsync()
{
if (Patterns.Count > 0 || Regexes.Count > 0)
{
_logger.LogDebug("blocklist already loaded");
return;
}
try
{
await LoadPatternsAndRegexesAsync();
}
catch
{
_logger.LogError("failed to load {type}", BlocklistType.ToString());
throw;
}
}
private async Task LoadPatternsAndRegexesAsync()
{
string[] patterns;
if (BlocklistType is BlocklistType.Blacklist)
{
patterns = await ReadContentAsync(_config.Blacklist.Path);
}
else
{
patterns = await ReadContentAsync(_config.Whitelist.Path);
}
long startTime = Stopwatch.GetTimestamp();
ParallelOptions options = new() { MaxDegreeOfParallelism = 5 };
const string regexId = "regex:";
Parallel.ForEach(patterns, options, pattern =>
{
if (!pattern.StartsWith(regexId))
{
Patterns.Add(pattern);
return;
}
pattern = pattern[regexId.Length..];
try
{
Regex regex = new(pattern, RegexOptions.Compiled);
Regexes.Add(regex);
}
catch (ArgumentException)
{
_logger.LogWarning("invalid regex | {pattern}", pattern);
}
});
TimeSpan elapsed = Stopwatch.GetElapsedTime(startTime);
_logger.LogDebug("loaded {count} patterns", Patterns.Count);
_logger.LogDebug("loaded {count} regexes", Regexes.Count);
_logger.LogDebug("blocklist loaded in {elapsed} ms", elapsed.TotalMilliseconds);
}
private async Task<string[]> ReadContentAsync(string path)
{
if (Uri.TryCreate(path, UriKind.Absolute, out var uri) && (uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps))
{
// http(s) url
return await ReadFromUrlAsync(path);
}
if (File.Exists(path))
{
// local file path
return await File.ReadAllLinesAsync(path);
}
throw new ArgumentException($"blocklist not found | {path}");
}
private async Task<string[]> ReadFromUrlAsync(string url)
{
using HttpResponseMessage response = await _httpClient.GetAsync(url);
response.EnsureSuccessStatusCode();
return (await response.Content.ReadAsStringAsync())
.Split(['\r','\n'], StringSplitOptions.RemoveEmptyEntries);
}
}

View File

@@ -1,61 +0,0 @@
using Common.Configuration;
using Common.Configuration.Arr;
using Domain.Enums;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Jobs;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class ContentBlocker : GenericHandler
{
private readonly BlocklistProvider _blocklistProvider;
public ContentBlocker(
ILogger<ContentBlocker> logger,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
SonarrClient sonarrClient,
RadarrClient radarrClient,
ArrQueueIterator arrArrQueueIterator,
BlocklistProvider blocklistProvider,
DownloadServiceFactory downloadServiceFactory
) : base(logger, sonarrConfig.Value, radarrConfig.Value, sonarrClient, radarrClient, arrArrQueueIterator, downloadServiceFactory)
{
_blocklistProvider = blocklistProvider;
}
public override async Task ExecuteAsync()
{
await _blocklistProvider.LoadBlocklistAsync();
await base.ExecuteAsync();
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
ArrClient arrClient = GetClient(instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
foreach (QueueRecord record in items)
{
if (record.Protocol is not "torrent")
{
continue;
}
if (string.IsNullOrEmpty(record.DownloadId))
{
_logger.LogDebug("skip | download id is null for {title}", record.Title);
continue;
}
_logger.LogDebug("searching unwanted files for {title}", record.Title);
await _downloadService.BlockUnwantedFilesAsync(record.DownloadId);
}
});
}
}

View File

@@ -1,81 +0,0 @@
using Domain.Enums;
using Microsoft.Extensions.Logging;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class FilenameEvaluator
{
private readonly ILogger<FilenameEvaluator> _logger;
private readonly BlocklistProvider _blocklistProvider;
public FilenameEvaluator(ILogger<FilenameEvaluator> logger, BlocklistProvider blocklistProvider)
{
_logger = logger;
_blocklistProvider = blocklistProvider;
}
// TODO create unit tests
public bool IsValid(string filename)
{
return IsValidAgainstPatterns(filename) && IsValidAgainstRegexes(filename);
}
private bool IsValidAgainstPatterns(string filename)
{
if (_blocklistProvider.Patterns.Count is 0)
{
return true;
}
return _blocklistProvider.BlocklistType switch
{
BlocklistType.Blacklist => !_blocklistProvider.Patterns.Any(pattern => MatchesPattern(filename, pattern)),
BlocklistType.Whitelist => _blocklistProvider.Patterns.Any(pattern => MatchesPattern(filename, pattern)),
_ => true
};
}
private bool IsValidAgainstRegexes(string filename)
{
if (_blocklistProvider.Regexes.Count is 0)
{
return true;
}
return _blocklistProvider.BlocklistType switch
{
BlocklistType.Blacklist => !_blocklistProvider.Regexes.Any(regex => regex.IsMatch(filename)),
BlocklistType.Whitelist => _blocklistProvider.Regexes.Any(regex => regex.IsMatch(filename)),
_ => true
};
}
private static bool MatchesPattern(string filename, string pattern)
{
bool hasStartWildcard = pattern.StartsWith('*');
bool hasEndWildcard = pattern.EndsWith('*');
if (hasStartWildcard && hasEndWildcard)
{
return filename.Contains(
pattern.Substring(1, pattern.Length - 2),
StringComparison.InvariantCultureIgnoreCase
);
}
if (hasStartWildcard)
{
return filename.EndsWith(pattern.Substring(1), StringComparison.InvariantCultureIgnoreCase);
}
if (hasEndWildcard)
{
return filename.StartsWith(
pattern.Substring(0, pattern.Length - 1),
StringComparison.InvariantCultureIgnoreCase
);
}
return filename == pattern;
}
}

View File

@@ -1,173 +0,0 @@
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Domain.Models.Deluge.Response;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient.Deluge;
public sealed class DelugeService : DownloadServiceBase
{
private readonly DelugeClient _client;
public DelugeService(
ILogger<DelugeService> logger,
IOptions<DelugeConfig> config,
IHttpClientFactory httpClientFactory,
IOptions<QueueCleanerConfig> queueCleanerConfig,
FilenameEvaluator filenameEvaluator,
Striker striker
) : base(logger, queueCleanerConfig, filenameEvaluator, striker)
{
config.Value.Validate();
_client = new (config, httpClientFactory);
}
public override async Task LoginAsync()
{
await _client.LoginAsync();
}
public override async Task<bool> ShouldRemoveFromArrQueueAsync(string hash)
{
hash = hash.ToLowerInvariant();
DelugeContents? contents = null;
TorrentStatus? status = await GetTorrentStatus(hash);
if (status?.Hash is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return false;
}
try
{
contents = await _client.GetTorrentFiles(hash);
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to find torrent {hash} in the download client", hash);
}
bool shouldRemove = contents?.Contents?.Count > 0;
ProcessFiles(contents.Contents, (_, file) =>
{
if (file.Priority > 0)
{
shouldRemove = false;
}
});
return shouldRemove || IsItemStuckAndShouldRemove(status);
}
public override async Task BlockUnwantedFilesAsync(string hash)
{
hash = hash.ToLowerInvariant();
TorrentStatus? status = await GetTorrentStatus(hash);
if (status?.Hash is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return;
}
DelugeContents? contents = null;
try
{
contents = await _client.GetTorrentFiles(hash);
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to find torrent {hash} in the download client", hash);
}
if (contents is null)
{
return;
}
Dictionary<int, int> priorities = [];
bool hasPriorityUpdates = false;
ProcessFiles(contents.Contents, (name, file) =>
{
int priority = file.Priority;
if (file.Priority is not 0 && !_filenameEvaluator.IsValid(name))
{
priority = 0;
hasPriorityUpdates = true;
_logger.LogInformation("unwanted file found | {file}", file.Path);
}
priorities.Add(file.Index, priority);
});
if (!hasPriorityUpdates)
{
return;
}
_logger.LogDebug("changing priorities | torrent {hash}", hash);
List<int> sortedPriorities = priorities
.OrderBy(x => x.Key)
.Select(x => x.Value)
.ToList();
await _client.ChangeFilesPriority(hash, sortedPriorities);
}
private bool IsItemStuckAndShouldRemove(TorrentStatus status)
{
if (status.State is null || !status.State.Equals("Downloading", StringComparison.InvariantCultureIgnoreCase))
{
return false;
}
if (status.Eta > 0)
{
return false;
}
return StrikeAndCheckLimit(status.Hash!, status.Name!);
}
private async Task<TorrentStatus?> GetTorrentStatus(string hash)
{
return await _client.SendRequest<TorrentStatus?>(
"web.get_torrent_status",
hash,
new[] { "hash", "state", "name", "eta" }
);
}
private static void ProcessFiles(Dictionary<string, DelugeFileOrDirectory> contents, Action<string, DelugeFileOrDirectory> processFile)
{
foreach (var (name, data) in contents)
{
switch (data.Type)
{
case "file":
processFile(name, data);
break;
case "dir" when data.Contents is not null:
// Recurse into subdirectories
ProcessFiles(data.Contents, processFile);
break;
}
}
}
public override void Dispose()
{
}
}

View File

@@ -1,42 +0,0 @@
using Common.Configuration.QueueCleaner;
using Domain.Enums;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient;
public abstract class DownloadServiceBase : IDownloadService
{
protected readonly ILogger<DownloadServiceBase> _logger;
protected readonly QueueCleanerConfig _queueCleanerConfig;
protected readonly FilenameEvaluator _filenameEvaluator;
protected readonly Striker _striker;
protected DownloadServiceBase(
ILogger<DownloadServiceBase> logger,
IOptions<QueueCleanerConfig> queueCleanerConfig,
FilenameEvaluator filenameEvaluator,
Striker striker
)
{
_logger = logger;
_queueCleanerConfig = queueCleanerConfig.Value;
_filenameEvaluator = filenameEvaluator;
_striker = striker;
}
public abstract void Dispose();
public abstract Task LoginAsync();
public abstract Task<bool> ShouldRemoveFromArrQueueAsync(string hash);
public abstract Task BlockUnwantedFilesAsync(string hash);
protected bool StrikeAndCheckLimit(string hash, string itemName)
{
return _striker.StrikeAndCheckLimit(hash, itemName, _queueCleanerConfig.StalledMaxStrikes, StrikeType.Stalled);
}
}

View File

@@ -1,35 +0,0 @@
using Common.Configuration;
using Common.Configuration.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient;
public sealed class DownloadServiceFactory
{
private readonly IServiceProvider _serviceProvider;
private readonly Domain.Enums.DownloadClient _downloadClient;
public DownloadServiceFactory(IServiceProvider serviceProvider, IConfiguration configuration)
{
_serviceProvider = serviceProvider;
_downloadClient = (Domain.Enums.DownloadClient)Enum.Parse(
typeof(Domain.Enums.DownloadClient),
configuration[EnvironmentVariables.DownloadClient] ?? Domain.Enums.DownloadClient.QBittorrent.ToString(),
true
);
}
public IDownloadService CreateDownloadClient() =>
_downloadClient switch
{
Domain.Enums.DownloadClient.QBittorrent => _serviceProvider.GetRequiredService<QBitService>(),
Domain.Enums.DownloadClient.Deluge => _serviceProvider.GetRequiredService<DelugeService>(),
Domain.Enums.DownloadClient.Transmission => _serviceProvider.GetRequiredService<TransmissionService>(),
_ => throw new ArgumentOutOfRangeException()
};
}

View File

@@ -1,10 +0,0 @@
namespace Infrastructure.Verticals.DownloadClient;
public interface IDownloadService : IDisposable
{
public Task LoginAsync();
public Task<bool> ShouldRemoveFromArrQueueAsync(string hash);
public Task BlockUnwantedFilesAsync(string hash);
}

View File

@@ -1,109 +0,0 @@
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using QBittorrent.Client;
namespace Infrastructure.Verticals.DownloadClient.QBittorrent;
public sealed class QBitService : DownloadServiceBase
{
private readonly QBitConfig _config;
private readonly QBittorrentClient _client;
public QBitService(
ILogger<QBitService> logger,
IOptions<QBitConfig> config,
IOptions<QueueCleanerConfig> queueCleanerConfig,
FilenameEvaluator filenameEvaluator,
Striker striker
) : base(logger, queueCleanerConfig, filenameEvaluator, striker)
{
_config = config.Value;
_config.Validate();
_client = new(_config.Url);
}
public override async Task LoginAsync()
{
if (string.IsNullOrEmpty(_config.Username) && string.IsNullOrEmpty(_config.Password))
{
return;
}
await _client.LoginAsync(_config.Username, _config.Password);
}
public override async Task<bool> ShouldRemoveFromArrQueueAsync(string hash)
{
TorrentInfo? torrent = (await _client.GetTorrentListAsync(new TorrentListQuery { Hashes = [hash] }))
.FirstOrDefault();
if (torrent is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return false;
}
// if all files were blocked by qBittorrent
if (torrent is { CompletionOn: not null, Downloaded: null or 0 })
{
return true;
}
IReadOnlyList<TorrentContent>? files = await _client.GetTorrentContentsAsync(hash);
// if all files are marked as skip
if (files?.Count is > 0 && files.All(x => x.Priority is TorrentContentPriority.Skip))
{
return true;
}
return IsItemStuckAndShouldRemove(torrent);
}
public override async Task BlockUnwantedFilesAsync(string hash)
{
IReadOnlyList<TorrentContent>? files = await _client.GetTorrentContentsAsync(hash);
if (files is null)
{
return;
}
foreach (TorrentContent file in files)
{
if (!file.Index.HasValue)
{
continue;
}
if (file.Priority is TorrentContentPriority.Skip || _filenameEvaluator.IsValid(file.Name))
{
continue;
}
_logger.LogInformation("unwanted file found | {file}", file.Name);
await _client.SetFilePriorityAsync(hash, file.Index.Value, TorrentContentPriority.Skip);
}
}
public override void Dispose()
{
_client.Dispose();
}
private bool IsItemStuckAndShouldRemove(TorrentInfo torrent)
{
if (torrent.State is not TorrentState.StalledDownload and not TorrentState.FetchingMetadata
and not TorrentState.ForcedFetchingMetadata)
{
// ignore other states
return false;
}
return StrikeAndCheckLimit(torrent.Hash, torrent.Name);
}
}

View File

@@ -1,169 +0,0 @@
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.ItemStriker;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Transmission.API.RPC;
using Transmission.API.RPC.Arguments;
using Transmission.API.RPC.Entity;
namespace Infrastructure.Verticals.DownloadClient.Transmission;
public sealed class TransmissionService : DownloadServiceBase
{
private readonly TransmissionConfig _config;
private readonly Client _client;
private TorrentInfo[]? _torrentsCache;
public TransmissionService(
ILogger<TransmissionService> logger,
IOptions<TransmissionConfig> config,
IOptions<QueueCleanerConfig> queueCleanerConfig,
FilenameEvaluator filenameEvaluator,
Striker striker
) : base(logger, queueCleanerConfig, filenameEvaluator, striker)
{
_config = config.Value;
_config.Validate();
_client = new(
new Uri(_config.Url, "/transmission/rpc").ToString(),
login: _config.Username,
password: _config.Password
);
}
public override async Task LoginAsync()
{
await _client.GetSessionInformationAsync();
}
public override async Task<bool> ShouldRemoveFromArrQueueAsync(string hash)
{
TorrentInfo? torrent = await GetTorrentAsync(hash);
if (torrent is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return false;
}
bool shouldRemove = torrent.FileStats?.Length > 0;
foreach (TransmissionTorrentFileStats? stats in torrent.FileStats ?? [])
{
if (!stats.Wanted.HasValue)
{
// if any files stats are missing, do not remove
shouldRemove = false;
}
if (stats.Wanted.HasValue && stats.Wanted.Value)
{
// if any files are wanted, do not remove
shouldRemove = false;
}
}
// remove if all files are unwanted
return shouldRemove || IsItemStuckAndShouldRemove(torrent);
}
public override async Task BlockUnwantedFilesAsync(string hash)
{
TorrentInfo? torrent = await GetTorrentAsync(hash);
if (torrent?.FileStats is null || torrent.Files is null)
{
return;
}
List<long> unwantedFiles = [];
for (int i = 0; i < torrent.Files.Length; i++)
{
if (torrent.FileStats?[i].Wanted == null)
{
continue;
}
if (!torrent.FileStats[i].Wanted.Value || _filenameEvaluator.IsValid(torrent.Files[i].Name))
{
continue;
}
_logger.LogInformation("unwanted file found | {file}", torrent.Files[i].Name);
unwantedFiles.Add(i);
}
if (unwantedFiles.Count is 0)
{
return;
}
_logger.LogDebug("changing priorities | torrent {hash}", hash);
await _client.TorrentSetAsync(new TorrentSettings
{
Ids = [ torrent.Id ],
FilesUnwanted = unwantedFiles.ToArray(),
});
}
public override void Dispose()
{
}
private bool IsItemStuckAndShouldRemove(TorrentInfo torrent)
{
if (torrent.Status is not 4)
{
// not in downloading state
return false;
}
if (torrent.Eta > 0)
{
return false;
}
return StrikeAndCheckLimit(torrent.HashString!, torrent.Name!);
}
private async Task<TorrentInfo?> GetTorrentAsync(string hash)
{
TorrentInfo? torrent = _torrentsCache?
.FirstOrDefault(x => x.HashString.Equals(hash, StringComparison.InvariantCultureIgnoreCase));
if (_torrentsCache is null || torrent is null)
{
string[] fields = [
TorrentFields.FILES,
TorrentFields.FILE_STATS,
TorrentFields.HASH_STRING,
TorrentFields.ID,
TorrentFields.ETA,
TorrentFields.NAME,
TorrentFields.STATUS
];
// refresh cache
_torrentsCache = (await _client.TorrentGetAsync(fields))
?.Torrents;
}
if (_torrentsCache?.Length is null or 0)
{
_logger.LogDebug("could not list torrents | {url}", _config.Url);
}
torrent = _torrentsCache?.FirstOrDefault(x => x.HashString.Equals(hash, StringComparison.InvariantCultureIgnoreCase));
if (torrent is null)
{
_logger.LogDebug("could not find torrent | {hash} | {url}", hash, _config.Url);
}
return torrent;
}
}

View File

@@ -1,125 +0,0 @@
using Common.Configuration.Arr;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.DownloadClient;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
namespace Infrastructure.Verticals.Jobs;
public abstract class GenericHandler : IDisposable
{
protected readonly ILogger<GenericHandler> _logger;
protected readonly SonarrConfig _sonarrConfig;
protected readonly RadarrConfig _radarrConfig;
protected readonly SonarrClient _sonarrClient;
protected readonly RadarrClient _radarrClient;
protected readonly ArrQueueIterator _arrArrQueueIterator;
protected readonly IDownloadService _downloadService;
protected GenericHandler(
ILogger<GenericHandler> logger,
SonarrConfig sonarrConfig,
RadarrConfig radarrConfig,
SonarrClient sonarrClient,
RadarrClient radarrClient,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory
)
{
_logger = logger;
_sonarrConfig = sonarrConfig;
_radarrConfig = radarrConfig;
_sonarrClient = sonarrClient;
_radarrClient = radarrClient;
_arrArrQueueIterator = arrArrQueueIterator;
_downloadService = downloadServiceFactory.CreateDownloadClient();
}
public virtual async Task ExecuteAsync()
{
await _downloadService.LoginAsync();
await ProcessArrConfigAsync(_sonarrConfig, InstanceType.Sonarr);
await ProcessArrConfigAsync(_radarrConfig, InstanceType.Radarr);
}
public virtual void Dispose()
{
_downloadService.Dispose();
}
protected abstract Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType);
private async Task ProcessArrConfigAsync(ArrConfig config, InstanceType instanceType)
{
if (!config.Enabled)
{
return;
}
foreach (ArrInstance arrInstance in config.Instances)
{
try
{
await ProcessInstanceAsync(arrInstance, instanceType);
}
catch (Exception exception)
{
_logger.LogError(exception, "failed to clean {type} instance | {url}", instanceType, arrInstance.Url);
}
}
}
protected ArrClient GetClient(InstanceType type) =>
type switch
{
InstanceType.Sonarr => _sonarrClient,
InstanceType.Radarr => _radarrClient,
_ => throw new NotImplementedException($"instance type {type} is not yet supported")
};
protected ArrConfig GetConfig(InstanceType type) =>
type switch
{
InstanceType.Sonarr => _sonarrConfig,
InstanceType.Radarr => _radarrConfig,
_ => throw new NotImplementedException($"instance type {type} is not yet supported")
};
protected SearchItem GetRecordSearchItem(InstanceType type, QueueRecord record, bool isPack = false)
{
return type switch
{
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Episode && !isPack => new SonarrSearchItem
{
Id = record.EpisodeId,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Episode
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Episode && isPack => new SonarrSearchItem
{
Id = record.SeasonNumber,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Season
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Season => new SonarrSearchItem
{
Id = record.SeasonNumber,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Series
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Series => new SonarrSearchItem
{
Id = record.SeriesId,
},
InstanceType.Radarr => new SearchItem
{
Id = record.MovieId,
},
_ => throw new NotImplementedException($"instance type {type} is not yet supported")
};
}
}

View File

@@ -1,73 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Jobs;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.QueueCleaner;
public sealed class QueueCleaner : GenericHandler
{
public QueueCleaner(
ILogger<QueueCleaner> logger,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
SonarrClient sonarrClient,
RadarrClient radarrClient,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory
) : base(logger, sonarrConfig.Value, radarrConfig.Value, sonarrClient, radarrClient, arrArrQueueIterator, downloadServiceFactory)
{
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
HashSet<SearchItem> itemsToBeRefreshed = [];
ArrClient arrClient = GetClient(instanceType);
ArrConfig arrConfig = GetConfig(instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.GroupBy(x => x.DownloadId)
.ToList();
foreach (var group in groups)
{
if (group.Any(x => !arrClient.IsRecordValid(x)))
{
continue;
}
QueueRecord record = group.First();
if (record.Protocol is not "torrent")
{
continue;
}
if (!arrClient.IsRecordValid(record))
{
continue;
}
if (!arrClient.ShouldRemoveFromQueue(record) && !await _downloadService.ShouldRemoveFromArrQueueAsync(record.DownloadId))
{
_logger.LogInformation("skip | {title}", record.Title);
continue;
}
itemsToBeRefreshed.Add(GetRecordSearchItem(instanceType, record, group.Count() > 1));
await arrClient.DeleteQueueItemAsync(instance, record);
}
});
await arrClient.RefreshItemsAsync(instance, arrConfig, itemsToBeRefreshed);
}
}

16
code/Makefile Normal file
View File

@@ -0,0 +1,16 @@
.DEFAULT_GOAL := no-default
no-default:
$(error You must specify a make target)
migrate-data:
ifndef name
$(error name is required. Usage: make migrate-data name=YourMigrationName)
endif
dotnet ef migrations add $(name) --context DataContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Data
migrate-events:
ifndef name
$(error name is required. Usage: make migrate-events name=YourMigrationName)
endif
dotnet ef migrations add $(name) --context EventsContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Events

View File

@@ -0,0 +1,49 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<AssemblyName>Cleanuparr</AssemblyName>
<Version Condition="'$(Version)' == ''">0.0.1</Version>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<PublishReadyToRun>true</PublishReadyToRun>
<EnableMacOSCodeSign>false</EnableMacOSCodeSign>
<CodeSignOnCopy>false</CodeSignOnCopy>
<_CodeSignDuringBuild>false</_CodeSignDuringBuild>
<CodesignDisableTimestamp>true</CodesignDisableTimestamp>
<CodesignKeychain></CodesignKeychain>
</PropertyGroup>
<ItemGroup>
<Folder Include="wwwroot\" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Application\Cleanuparr.Application.csproj" />
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.6" />
<PackageReference Include="Quartz" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<!-- API-related packages -->
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api")]
public class ApiDocumentationController : ControllerBase
{
[HttpGet]
public IActionResult RedirectToSwagger()
{
return Redirect("/api/swagger");
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,240 @@
using System.Text.Json.Serialization;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class EventsController : ControllerBase
{
private readonly EventsContext _context;
public EventsController(EventsContext context)
{
_context = context;
}
/// <summary>
/// Gets events with pagination and filtering
/// </summary>
[HttpGet]
public async Task<ActionResult<PaginatedResult<AppEvent>>> GetEvents(
[FromQuery] int page = 1,
[FromQuery] int pageSize = 100,
[FromQuery] string? severity = null,
[FromQuery] string? eventType = null,
[FromQuery] DateTime? fromDate = null,
[FromQuery] DateTime? toDate = null,
[FromQuery] string? search = null)
{
// Validate pagination parameters
if (page < 1) page = 1;
if (pageSize < 1) pageSize = 100;
if (pageSize > 1000) pageSize = 1000; // Cap at 1000 for performance
var query = _context.Events.AsQueryable();
// Apply filters
if (!string.IsNullOrWhiteSpace(severity))
{
if (Enum.TryParse<EventSeverity>(severity, true, out var severityEnum))
query = query.Where(e => e.Severity == severityEnum);
}
if (!string.IsNullOrWhiteSpace(eventType))
{
if (Enum.TryParse<EventType>(eventType, true, out var eventTypeEnum))
query = query.Where(e => e.EventType == eventTypeEnum);
}
// Apply date range filters
if (fromDate.HasValue)
{
query = query.Where(e => e.Timestamp >= fromDate.Value);
}
if (toDate.HasValue)
{
query = query.Where(e => e.Timestamp <= toDate.Value);
}
// Apply search filter if provided
if (!string.IsNullOrWhiteSpace(search))
{
string pattern = EventsContext.GetLikePattern(search);
query = query.Where(e =>
EF.Functions.Like(e.Message, pattern) ||
EF.Functions.Like(e.Data, pattern) ||
EF.Functions.Like(e.TrackingId.ToString(), pattern)
);
}
// Count total matching records for pagination
var totalCount = await query.CountAsync();
// Calculate pagination
var totalPages = (int)Math.Ceiling(totalCount / (double)pageSize);
var skip = (page - 1) * pageSize;
// Get paginated data
var events = await query
.OrderByDescending(e => e.Timestamp)
.Skip(skip)
.Take(pageSize)
.ToListAsync();
events = events
.OrderBy(e => e.Timestamp)
.ToList();
// Return paginated result
var result = new PaginatedResult<AppEvent>
{
Items = events,
Page = page,
PageSize = pageSize,
TotalCount = totalCount,
TotalPages = totalPages
};
return Ok(result);
}
/// <summary>
/// Gets a specific event by ID
/// </summary>
[HttpGet("{id}")]
public async Task<ActionResult<AppEvent>> GetEvent(Guid id)
{
var eventEntity = await _context.Events.FindAsync(id);
if (eventEntity == null)
return NotFound();
return Ok(eventEntity);
}
/// <summary>
/// Gets events by tracking ID
/// </summary>
[HttpGet("tracking/{trackingId}")]
public async Task<ActionResult<List<AppEvent>>> GetEventsByTracking(Guid trackingId)
{
var events = await _context.Events
.Where(e => e.TrackingId == trackingId)
.OrderBy(e => e.Timestamp)
.ToListAsync();
return Ok(events);
}
/// <summary>
/// Gets event statistics
/// </summary>
[HttpGet("stats")]
public async Task<ActionResult<object>> GetEventStats()
{
var stats = new
{
TotalEvents = await _context.Events.CountAsync(),
EventsBySeverity = await _context.Events
.GroupBy(e => e.Severity)
.Select(g => new { Severity = g.Key.ToString(), Count = g.Count() })
.ToListAsync(),
EventsByType = await _context.Events
.GroupBy(e => e.EventType)
.Select(g => new { EventType = g.Key.ToString(), Count = g.Count() })
.OrderByDescending(x => x.Count)
.Take(10)
.ToListAsync(),
RecentEventsCount = await _context.Events
.Where(e => e.Timestamp > DateTime.UtcNow.AddHours(-24))
.CountAsync()
};
return Ok(stats);
}
/// <summary>
/// Manually triggers cleanup of old events
/// </summary>
[HttpPost("cleanup")]
public async Task<ActionResult<object>> CleanupOldEvents([FromQuery] int retentionDays = 30)
{
var cutoffDate = DateTime.UtcNow.AddDays(-retentionDays);
await _context.Events
.Where(e => e.Timestamp < cutoffDate)
.ExecuteDeleteAsync();
return Ok();
}
/// <summary>
/// Gets unique event types
/// </summary>
[HttpGet("types")]
public async Task<ActionResult<List<string>>> GetEventTypes()
{
var types = Enum.GetNames(typeof(EventType)).ToList();
return Ok(types);
}
/// <summary>
/// Gets unique severities
/// </summary>
[HttpGet("severities")]
public async Task<ActionResult<List<string>>> GetSeverities()
{
var severities = Enum.GetNames(typeof(EventSeverity)).ToList();
return Ok(severities);
}
}
/// <summary>
/// Represents a paginated result set
/// </summary>
/// <typeparam name="T">Type of items in the result</typeparam>
public class PaginatedResult<T>
{
/// <summary>
/// The items in the current page
/// </summary>
public List<T> Items { get; set; } = new();
/// <summary>
/// Current page number (1-based)
/// </summary>
public int Page { get; set; }
/// <summary>
/// Number of items per page
/// </summary>
public int PageSize { get; set; }
/// <summary>
/// Total number of items across all pages
/// </summary>
public int TotalCount { get; set; }
/// <summary>
/// Total number of pages
/// </summary>
public int TotalPages { get; set; }
/// <summary>
/// Whether there is a previous page
/// </summary>
[JsonIgnore]
public bool HasPrevious => Page > 1;
/// <summary>
/// Whether there is a next page
/// </summary>
[JsonIgnore]
public bool HasNext => Page < TotalPages;
}

View File

@@ -0,0 +1,103 @@
using Cleanuparr.Infrastructure.Health;
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
/// <summary>
/// Controller for checking the health of download clients
/// </summary>
[ApiController]
[Route("api/health")]
public class HealthCheckController : ControllerBase
{
private readonly ILogger<HealthCheckController> _logger;
private readonly IHealthCheckService _healthCheckService;
/// <summary>
/// Initializes a new instance of the <see cref="HealthCheckController"/> class
/// </summary>
public HealthCheckController(
ILogger<HealthCheckController> logger,
IHealthCheckService healthCheckService)
{
_logger = logger;
_healthCheckService = healthCheckService;
}
/// <summary>
/// Gets the health status of all download clients
/// </summary>
[HttpGet]
public IActionResult GetAllHealth()
{
try
{
var healthStatuses = _healthCheckService.GetAllClientHealth();
return Ok(healthStatuses);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving client health statuses");
return StatusCode(500, new { Error = "An error occurred while retrieving client health statuses" });
}
}
/// <summary>
/// Gets the health status of a specific download client
/// </summary>
[HttpGet("{id:guid}")]
public IActionResult GetClientHealth(Guid id)
{
try
{
var healthStatus = _healthCheckService.GetClientHealth(id);
if (healthStatus == null)
{
return NotFound(new { Message = $"Health status for client with ID '{id}' not found" });
}
return Ok(healthStatus);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving health status for client {id}", id);
return StatusCode(500, new { Error = "An error occurred while retrieving the client health status" });
}
}
/// <summary>
/// Triggers a health check for all download clients
/// </summary>
[HttpPost("check")]
public async Task<IActionResult> CheckAllHealth()
{
try
{
var results = await _healthCheckService.CheckAllClientsHealthAsync();
return Ok(results);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking health for all clients");
return StatusCode(500, new { Error = "An error occurred while checking client health" });
}
}
/// <summary>
/// Triggers a health check for a specific download client
/// </summary>
[HttpPost("check/{id:guid}")]
public async Task<IActionResult> CheckClientHealth(Guid id)
{
try
{
var result = await _healthCheckService.CheckClientHealthAsync(id);
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking health for client {id}", id);
return StatusCode(500, new { Error = "An error occurred while checking client health" });
}
}
}

View File

@@ -0,0 +1,163 @@
using Cleanuparr.Api.Models;
using Cleanuparr.Infrastructure.Models;
using Infrastructure.Services.Interfaces;
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class JobsController : ControllerBase
{
private readonly IJobManagementService _jobManagementService;
private readonly ILogger<JobsController> _logger;
public JobsController(IJobManagementService jobManagementService, ILogger<JobsController> logger)
{
_jobManagementService = jobManagementService;
_logger = logger;
}
[HttpGet]
public async Task<IActionResult> GetAllJobs()
{
try
{
var result = await _jobManagementService.GetAllJobs();
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error getting all jobs");
return StatusCode(500, "An error occurred while retrieving jobs");
}
}
[HttpGet("{jobType}")]
public async Task<IActionResult> GetJob(JobType jobType)
{
try
{
var jobInfo = await _jobManagementService.GetJob(jobType);
if (jobInfo.Status == "Not Found")
{
return NotFound($"Job '{jobType}' not found");
}
return Ok(jobInfo);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error getting job {jobType}", jobType);
return StatusCode(500, $"An error occurred while retrieving job '{jobType}'");
}
}
[HttpPost("{jobType}/start")]
public async Task<IActionResult> StartJob(JobType jobType, [FromBody] ScheduleRequest scheduleRequest = null)
{
try
{
// Get the schedule from the request body if provided
JobSchedule jobSchedule = scheduleRequest.Schedule;
var result = await _jobManagementService.StartJob(jobType, jobSchedule);
if (!result)
{
return BadRequest($"Failed to start job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' started successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error starting job {jobType}", jobType);
return StatusCode(500, $"An error occurred while starting job '{jobType}'");
}
}
[HttpPost("{jobType}/stop")]
public async Task<IActionResult> StopJob(JobType jobType)
{
try
{
var result = await _jobManagementService.StopJob(jobType);
if (!result)
{
return BadRequest($"Failed to stop job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' stopped successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error stopping job {jobType}", jobType);
return StatusCode(500, $"An error occurred while stopping job '{jobType}'");
}
}
[HttpPost("{jobType}/pause")]
public async Task<IActionResult> PauseJob(JobType jobType)
{
try
{
var result = await _jobManagementService.PauseJob(jobType);
if (!result)
{
return BadRequest($"Failed to pause job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' paused successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error pausing job {jobType}", jobType);
return StatusCode(500, $"An error occurred while pausing job '{jobType}'");
}
}
[HttpPost("{jobType}/resume")]
public async Task<IActionResult> ResumeJob(JobType jobType)
{
try
{
var result = await _jobManagementService.ResumeJob(jobType);
if (!result)
{
return BadRequest($"Failed to resume job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' resumed successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error resuming job {jobType}", jobType);
return StatusCode(500, $"An error occurred while resuming job '{jobType}'");
}
}
[HttpPut("{jobType}/schedule")]
public async Task<IActionResult> UpdateJobSchedule(JobType jobType, [FromBody] ScheduleRequest scheduleRequest)
{
if (scheduleRequest?.Schedule == null)
{
return BadRequest("Schedule is required");
}
try
{
var result = await _jobManagementService.UpdateJobSchedule(jobType, scheduleRequest.Schedule);
if (!result)
{
return BadRequest($"Failed to update schedule for job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' schedule updated successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error updating job {jobType} schedule", jobType);
return StatusCode(500, $"An error occurred while updating schedule for job '{jobType}'");
}
}
}

View File

@@ -0,0 +1,270 @@
using System.Diagnostics;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Persistence;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class StatusController : ControllerBase
{
private readonly ILogger<StatusController> _logger;
private readonly DataContext _dataContext;
private readonly DownloadServiceFactory _downloadServiceFactory;
private readonly ArrClientFactory _arrClientFactory;
public StatusController(
ILogger<StatusController> logger,
DataContext dataContext,
DownloadServiceFactory downloadServiceFactory,
ArrClientFactory arrClientFactory)
{
_logger = logger;
_dataContext = dataContext;
_downloadServiceFactory = downloadServiceFactory;
_arrClientFactory = arrClientFactory;
}
[HttpGet]
public async Task<IActionResult> GetSystemStatus()
{
try
{
var process = Process.GetCurrentProcess();
// Get configuration
var downloadClients = await _dataContext.DownloadClients
.AsNoTracking()
.ToListAsync();
var sonarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Sonarr);
var radarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Radarr);
var lidarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Lidarr);
var status = new
{
Application = new
{
Version = GetType().Assembly.GetName().Version?.ToString() ?? "Unknown",
process.StartTime,
UpTime = DateTime.Now - process.StartTime,
MemoryUsageMB = Math.Round(process.WorkingSet64 / 1024.0 / 1024.0, 2),
ProcessorTime = process.TotalProcessorTime
},
DownloadClient = new
{
// TODO
},
MediaManagers = new
{
Sonarr = new
{
InstanceCount = sonarrConfig.Instances.Count
},
Radarr = new
{
InstanceCount = radarrConfig.Instances.Count
},
Lidarr = new
{
InstanceCount = lidarrConfig.Instances.Count
}
}
};
return Ok(status);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving system status");
return StatusCode(500, "An error occurred while retrieving system status");
}
}
[HttpGet("download-client")]
public async Task<IActionResult> GetDownloadClientStatus()
{
try
{
var downloadClients = await _dataContext.DownloadClients
.AsNoTracking()
.ToListAsync();
var result = new Dictionary<string, object>();
// Check for configured clients
if (downloadClients.Count > 0)
{
var clientsStatus = new List<object>();
foreach (var client in downloadClients)
{
clientsStatus.Add(new
{
client.Id,
client.Name,
Type = client.TypeName,
client.Host,
client.Enabled,
IsConnected = client.Enabled, // We can't check connection status without implementing test methods
});
}
result["Clients"] = clientsStatus;
}
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving download client status");
return StatusCode(500, "An error occurred while retrieving download client status");
}
}
[HttpGet("arrs")]
public async Task<IActionResult> GetMediaManagersStatus()
{
try
{
var status = new Dictionary<string, object>();
// Get configurations
var enabledSonarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Sonarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();
var enabledRadarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Radarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();
var enabledLidarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Lidarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();;
// Check Sonarr instances
var sonarrStatus = new List<object>();
foreach (var instance in enabledSonarrInstances)
{
try
{
var sonarrClient = _arrClientFactory.GetClient(InstanceType.Sonarr);
await sonarrClient.TestConnectionAsync(instance);
sonarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
sonarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Sonarr"] = sonarrStatus;
// Check Radarr instances
var radarrStatus = new List<object>();
foreach (var instance in enabledRadarrInstances)
{
try
{
var radarrClient = _arrClientFactory.GetClient(InstanceType.Radarr);
await radarrClient.TestConnectionAsync(instance);
radarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
radarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Radarr"] = radarrStatus;
// Check Lidarr instances
var lidarrStatus = new List<object>();
foreach (var instance in enabledLidarrInstances)
{
try
{
var lidarrClient = _arrClientFactory.GetClient(InstanceType.Lidarr);
await lidarrClient.TestConnectionAsync(instance);
lidarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
lidarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Lidarr"] = lidarrStatus;
return Ok(status);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving media managers status");
return StatusCode(500, "An error occurred while retrieving media managers status");
}
}
}

View File

@@ -0,0 +1,148 @@
using System.Text.Json.Serialization;
using Cleanuparr.Api.Middleware;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Logging;
using Microsoft.AspNetCore.Http.Json;
using Microsoft.OpenApi.Models;
using System.Text;
namespace Cleanuparr.Api.DependencyInjection;
public static class ApiDI
{
public static IServiceCollection AddApiServices(this IServiceCollection services)
{
services.Configure<JsonOptions>(options =>
{
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.SerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
// Add API-specific services
services
.AddControllers()
.AddJsonOptions(options =>
{
options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.JsonSerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
services.AddEndpointsApiExplorer();
// Add SignalR for real-time updates
services
.AddSignalR()
.AddJsonProtocol(options =>
{
options.PayloadSerializerOptions.Converters.Add(new JsonStringEnumConverter());
});
// Add health status broadcaster
services.AddHostedService<HealthStatusBroadcaster>();
// Add logging initializer service
services.AddHostedService<LoggingInitializer>();
services.AddSwaggerGen(options =>
{
options.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Cleanuparr API",
Version = "v1",
Description = "API for managing media downloads and cleanups",
Contact = new OpenApiContact
{
Name = "Cleanuparr Team"
}
});
});
return services;
}
public static WebApplication ConfigureApi(this WebApplication app)
{
ILogger<Program> logger = app.Services.GetRequiredService<ILogger<Program>>();
// Enable compression
app.UseResponseCompression();
// Serve static files with caching
app.UseStaticFiles(new StaticFileOptions
{
OnPrepareResponse = ctx =>
{
// Cache static assets for 30 days
// if (ctx.File.Name.EndsWith(".js") || ctx.File.Name.EndsWith(".css"))
// {
// ctx.Context.Response.Headers.CacheControl = "public,max-age=2592000";
// }
}
});
// Add the global exception handling middleware first
app.UseMiddleware<ExceptionMiddleware>();
app.UseCors("Any");
app.UseRouting();
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint("v1/swagger.json", "Cleanuparr API v1");
options.RoutePrefix = "swagger";
options.DocumentTitle = "Cleanuparr API Documentation";
});
}
app.UseAuthorization();
app.MapControllers();
// Custom SPA fallback to inject base path
app.MapFallback(async context =>
{
var basePath = app.Configuration.GetValue<string>("BASE_PATH") ?? "/";
// Normalize the base path (remove trailing slash if not root)
if (basePath != "/" && basePath.EndsWith("/"))
{
basePath = basePath.TrimEnd('/');
}
var webRoot = app.Environment.WebRootPath ?? Path.Combine(app.Environment.ContentRootPath, "wwwroot");
var indexPath = Path.Combine(webRoot, "index.html");
if (!File.Exists(indexPath))
{
context.Response.StatusCode = 404;
await context.Response.WriteAsync("index.html not found");
return;
}
var indexContent = await File.ReadAllTextAsync(indexPath);
// Inject the base path into the HTML
var scriptInjection = $@"
<script>
window['_server_base_path'] = '{basePath}';
</script>";
// Insert the script right before the existing script tag
indexContent = indexContent.Replace(
" <script>",
scriptInjection + "\n <script>"
);
context.Response.ContentType = "text/html";
await context.Response.WriteAsync(indexContent, Encoding.UTF8);
});
// Map SignalR hubs
app.MapHub<HealthStatusHub>("/api/hubs/health");
app.MapHub<AppHub>("/api/hubs/app");
return app;
}
}

View File

@@ -0,0 +1,93 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Shared.Helpers;
using Serilog;
using Serilog.Events;
using Serilog.Templates;
using Serilog.Templates.Themes;
namespace Cleanuparr.Api.DependencyInjection;
public static class LoggingDI
{
public static ILoggingBuilder AddLogging(this ILoggingBuilder builder)
{
Log.Logger = GetDefaultLoggerConfiguration().CreateLogger();
return builder.ClearProviders().AddSerilog();
}
public static LoggerConfiguration GetDefaultLoggerConfiguration()
{
LoggerConfiguration logConfig = new();
const string categoryTemplate = "{#if Category is not null} {Concat('[',Category,']'),CAT_PAD}{#end}";
const string jobNameTemplate = "{#if JobName is not null} {Concat('[',JobName,']'),JOB_PAD}{#end}";
const string consoleOutputTemplate = $"[{{@t:yyyy-MM-dd HH:mm:ss.fff}} {{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m}}\n{{@x}}";
const string fileOutputTemplate = $"{{@t:yyyy-MM-dd HH:mm:ss.fff zzz}} [{{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m:lj}}\n{{@x}}";
// Determine job name padding
List<string> jobNames = [nameof(JobType.QueueCleaner), nameof(JobType.ContentBlocker), nameof(JobType.DownloadCleaner)];
int jobPadding = jobNames.Max(x => x.Length) + 2;
// Determine instance name padding
List<string> categoryNames = [
InstanceType.Sonarr.ToString(),
InstanceType.Radarr.ToString(),
InstanceType.Lidarr.ToString(),
InstanceType.Readarr.ToString(),
InstanceType.Whisparr.ToString(),
"SYSTEM"
];
int catPadding = categoryNames.Max(x => x.Length) + 2;
// Apply padding values to templates
string consoleTemplate = consoleOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
string fileTemplate = fileOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
// Configure base logger with dynamic level control
logConfig
.MinimumLevel.Is(LogEventLevel.Information)
.Enrich.FromLogContext()
.WriteTo.Console(new ExpressionTemplate(consoleTemplate, theme: TemplateTheme.Literate));
// Create the logs directory
string logsPath = Path.Combine(ConfigurationPathProvider.GetConfigPath(), "logs");
if (!Directory.Exists(logsPath))
{
try
{
Directory.CreateDirectory(logsPath);
}
catch (Exception exception)
{
throw new Exception($"Failed to create log directory | {logsPath}", exception);
}
}
// Add main log file
logConfig.WriteTo.File(
path: Path.Combine(logsPath, "cleanuparr-.txt"),
formatter: new ExpressionTemplate(fileTemplate),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true,
shared: true
);
logConfig
.MinimumLevel.Override("MassTransit", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.Enrich.WithProperty("ApplicationName", "Cleanuparr");
return logConfig;
}
}

View File

@@ -0,0 +1,91 @@
using System.Text.Json.Serialization;
using Cleanuparr.Infrastructure.Features.DownloadRemover.Consumers;
using Cleanuparr.Infrastructure.Features.Notifications.Consumers;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Data.Models.Arr;
using Infrastructure.Verticals.Notifications.Models;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
namespace Cleanuparr.Api.DependencyInjection;
public static class MainDI
{
public static IServiceCollection AddInfrastructure(this IServiceCollection services, IConfiguration configuration) =>
services
.AddLogging(builder => builder.ClearProviders().AddConsole())
.AddHttpClients(configuration)
.AddSingleton<MemoryCache>()
.AddSingleton<IMemoryCache>(serviceProvider => serviceProvider.GetRequiredService<MemoryCache>())
.AddServices()
.AddHealthServices()
.AddQuartzServices(configuration)
.AddNotifications(configuration)
.AddMassTransit(config =>
{
config.AddConsumer<DownloadRemoverConsumer<SearchItem>>();
config.AddConsumer<DownloadRemoverConsumer<SonarrSearchItem>>();
config.AddConsumer<NotificationConsumer<FailedImportStrikeNotification>>();
config.AddConsumer<NotificationConsumer<StalledStrikeNotification>>();
config.AddConsumer<NotificationConsumer<SlowStrikeNotification>>();
config.AddConsumer<NotificationConsumer<QueueItemDeletedNotification>>();
config.AddConsumer<NotificationConsumer<DownloadCleanedNotification>>();
config.AddConsumer<NotificationConsumer<CategoryChangedNotification>>();
config.UsingInMemory((context, cfg) =>
{
cfg.ConfigureJsonSerializerOptions(options =>
{
options.Converters.Add(new JsonStringEnumConverter());
options.ReferenceHandler = ReferenceHandler.IgnoreCycles;
return options;
});
cfg.ReceiveEndpoint("download-remover-queue", e =>
{
e.ConfigureConsumer<DownloadRemoverConsumer<SearchItem>>(context);
e.ConfigureConsumer<DownloadRemoverConsumer<SonarrSearchItem>>(context);
e.ConcurrentMessageLimit = 1;
e.PrefetchCount = 1;
});
cfg.ReceiveEndpoint("notification-queue", e =>
{
e.ConfigureConsumer<NotificationConsumer<FailedImportStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<StalledStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<SlowStrikeNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<QueueItemDeletedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<DownloadCleanedNotification>>(context);
e.ConfigureConsumer<NotificationConsumer<CategoryChangedNotification>>(context);
e.ConcurrentMessageLimit = 1;
e.PrefetchCount = 1;
});
});
});
private static IServiceCollection AddHttpClients(this IServiceCollection services, IConfiguration configuration)
{
// Add the dynamic HTTP client system - this replaces all the previous static configurations
services.AddDynamicHttpClients();
// Add the dynamic HTTP client provider that uses the new system
services.AddSingleton<IDynamicHttpClientProvider, DynamicHttpClientProvider>();
return services;
}
/// <summary>
/// Adds health check services to the service collection
/// </summary>
private static IServiceCollection AddHealthServices(this IServiceCollection services) =>
services
// Register the health check service
.AddSingleton<IHealthCheckService, HealthCheckService>()
// Register the background service for periodic health checks
.AddHostedService<HealthCheckBackgroundService>();
}

View File

@@ -0,0 +1,20 @@
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Apprise;
using Cleanuparr.Infrastructure.Features.Notifications.Notifiarr;
using Infrastructure.Verticals.Notifications;
namespace Cleanuparr.Api.DependencyInjection;
public static class NotificationsDI
{
public static IServiceCollection AddNotifications(this IServiceCollection services, IConfiguration configuration) =>
services
// Notification configs are now managed through ConfigManager
.AddTransient<INotifiarrProxy, NotifiarrProxy>()
.AddTransient<INotificationProvider, NotifiarrProvider>()
.AddTransient<IAppriseProxy, AppriseProxy>()
.AddTransient<INotificationProvider, AppriseProvider>()
.AddTransient<INotificationPublisher, NotificationPublisher>()
.AddTransient<INotificationFactory, NotificationFactory>()
.AddTransient<NotificationService>();
}

View File

@@ -0,0 +1,20 @@
using Cleanuparr.Api.Jobs;
using Quartz;
namespace Cleanuparr.Api.DependencyInjection;
public static class QuartzDI
{
public static IServiceCollection AddQuartzServices(this IServiceCollection services, IConfiguration configuration) =>
services
.AddQuartz()
.AddQuartzHostedService(opt =>
{
opt.WaitForJobsToComplete = true;
})
// Register BackgroundJobManager as a hosted service
.AddSingleton<BackgroundJobManager>()
.AddHostedService(provider => provider.GetRequiredService<BackgroundJobManager>());
// Jobs are now managed by BackgroundJobManager
}

View File

@@ -0,0 +1,53 @@
using Cleanuparr.Application.Features.ContentBlocker;
using Cleanuparr.Application.Features.DownloadCleaner;
using Cleanuparr.Application.Features.QueueCleaner;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.ContentBlocker;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.DownloadRemover;
using Cleanuparr.Infrastructure.Features.DownloadRemover.Interfaces;
using Cleanuparr.Infrastructure.Features.Files;
using Cleanuparr.Infrastructure.Features.ItemStriker;
using Cleanuparr.Infrastructure.Features.Security;
using Cleanuparr.Infrastructure.Interceptors;
using Cleanuparr.Infrastructure.Services;
using Cleanuparr.Persistence;
using Infrastructure.Interceptors;
using Infrastructure.Services.Interfaces;
using Infrastructure.Verticals.Files;
namespace Cleanuparr.Api.DependencyInjection;
public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddSingleton<IEncryptionService, AesEncryptionService>()
.AddTransient<SensitiveDataJsonConverter>()
.AddTransient<EventsContext>()
.AddTransient<DataContext>()
.AddTransient<EventPublisher>()
.AddHostedService<EventCleanupService>()
// API services
.AddSingleton<IJobManagementService, JobManagementService>()
// Core services
.AddTransient<IDryRunInterceptor, DryRunInterceptor>()
.AddTransient<CertificateValidationService>()
.AddTransient<SonarrClient>()
.AddTransient<RadarrClient>()
.AddTransient<LidarrClient>()
.AddTransient<ArrClientFactory>()
.AddTransient<QueueCleaner>()
.AddTransient<ContentBlocker>()
.AddTransient<DownloadCleaner>()
.AddTransient<IQueueItemRemover, QueueItemRemover>()
.AddTransient<IFilenameEvaluator, FilenameEvaluator>()
.AddTransient<IHardLinkFileService, HardLinkFileService>()
.AddTransient<UnixHardLinkFileService>()
.AddTransient<WindowsHardLinkFileService>()
.AddTransient<ArrQueueIterator>()
.AddTransient<DownloadServiceFactory>()
.AddTransient<IStriker, Striker>()
.AddSingleton<BlocklistProvider>();
}

View File

@@ -0,0 +1,38 @@
using System.Reflection;
using Cleanuparr.Persistence;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api;
public static class HostExtensions
{
public static async Task<IHost> Init(this WebApplication app)
{
ILogger<Program> logger = app.Services.GetRequiredService<ILogger<Program>>();
Version? version = Assembly.GetExecutingAssembly().GetName().Version;
logger.LogInformation(
version is null
? "Cleanuparr version not detected"
: $"Cleanuparr v{version.Major}.{version.Minor}.{version.Build}"
);
logger.LogInformation("timezone: {tz}", TimeZoneInfo.Local.DisplayName);
// Apply db migrations
var eventsContext = app.Services.GetRequiredService<EventsContext>();
if ((await eventsContext.Database.GetPendingMigrationsAsync()).Any())
{
await eventsContext.Database.MigrateAsync();
}
var configContext = app.Services.GetRequiredService<DataContext>();
if ((await configContext.Database.GetPendingMigrationsAsync()).Any())
{
await configContext.Database.MigrateAsync();
}
return app;
}
}

View File

@@ -0,0 +1,255 @@
using Cleanuparr.Application.Features.ContentBlocker;
using Cleanuparr.Application.Features.DownloadCleaner;
using Cleanuparr.Application.Features.QueueCleaner;
using Cleanuparr.Domain.Exceptions;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.ContentBlocker;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
using Cleanuparr.Shared.Helpers;
using Microsoft.EntityFrameworkCore;
using Quartz;
using Quartz.Spi;
namespace Cleanuparr.Api.Jobs;
/// <summary>
/// Manages background jobs in the application.
/// This class is responsible for reading configurations and scheduling jobs.
/// </summary>
public class BackgroundJobManager : IHostedService
{
private readonly ISchedulerFactory _schedulerFactory;
private readonly DataContext _dataContext;
private readonly ILogger<BackgroundJobManager> _logger;
private IScheduler? _scheduler;
public BackgroundJobManager(
ISchedulerFactory schedulerFactory,
DataContext dataContext,
ILogger<BackgroundJobManager> logger
)
{
_schedulerFactory = schedulerFactory;
_dataContext = dataContext;
_logger = logger;
}
/// <summary>
/// Starts the background job manager.
/// This method is called when the application starts.
/// </summary>
public async Task StartAsync(CancellationToken cancellationToken)
{
try
{
_logger.LogInformation("Starting BackgroundJobManager");
_scheduler = await _schedulerFactory.GetScheduler(cancellationToken);
await InitializeJobsFromConfiguration(cancellationToken);
_logger.LogInformation("BackgroundJobManager started");
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to start BackgroundJobManager");
}
}
/// <summary>
/// Stops the background job manager.
/// This method is called when the application stops.
/// </summary>
public async Task StopAsync(CancellationToken cancellationToken)
{
_logger.LogInformation("Stopping BackgroundJobManager");
if (_scheduler != null)
{
// Don't shutdown the scheduler as it's managed by QuartzHostedService
await _scheduler.Standby(cancellationToken);
}
_logger.LogInformation("BackgroundJobManager stopped");
}
/// <summary>
/// Initializes jobs based on current configuration settings.
/// Always registers jobs in the scheduler, but only adds triggers for enabled jobs.
/// </summary>
private async Task InitializeJobsFromConfiguration(CancellationToken cancellationToken = default)
{
if (_scheduler == null)
{
throw new InvalidOperationException("Scheduler not initialized");
}
// Get configurations from db
QueueCleanerConfig queueCleanerConfig = await _dataContext.QueueCleanerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
ContentBlockerConfig contentBlockerConfig = await _dataContext.ContentBlockerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
DownloadCleanerConfig downloadCleanerConfig = await _dataContext.DownloadCleanerConfigs
.AsNoTracking()
.FirstAsync(cancellationToken);
// Always register jobs, regardless of enabled status
await RegisterQueueCleanerJob(queueCleanerConfig, cancellationToken);
await RegisterContentBlockerJob(contentBlockerConfig, cancellationToken);
await RegisterDownloadCleanerJob(downloadCleanerConfig, cancellationToken);
}
/// <summary>
/// Registers the QueueCleaner job and optionally adds triggers based on configuration.
/// </summary>
public async Task RegisterQueueCleanerJob(
QueueCleanerConfig config,
CancellationToken cancellationToken = default)
{
// Always register the job definition
await AddJobWithoutTrigger<QueueCleaner>(cancellationToken);
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<QueueCleaner>(config, config.CronExpression, cancellationToken);
}
}
/// <summary>
/// Registers the QueueCleaner job and optionally adds triggers based on configuration.
/// </summary>
public async Task RegisterContentBlockerJob(
ContentBlockerConfig config,
CancellationToken cancellationToken = default)
{
// Always register the job definition
await AddJobWithoutTrigger<ContentBlocker>(cancellationToken);
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<ContentBlocker>(config, config.CronExpression, cancellationToken);
}
}
/// <summary>
/// Registers the DownloadCleaner job and optionally adds triggers based on configuration.
/// </summary>
public async Task RegisterDownloadCleanerJob(DownloadCleanerConfig config, CancellationToken cancellationToken = default)
{
// Always register the job definition
await AddJobWithoutTrigger<DownloadCleaner>(cancellationToken);
// Only add triggers if the job is enabled
if (config.Enabled)
{
await AddTriggersForJob<DownloadCleaner>(config, config.CronExpression, cancellationToken);
}
}
/// <summary>
/// Helper method to add triggers for an existing job.
/// </summary>
private async Task AddTriggersForJob<T>(
IJobConfig config,
string cronExpression,
CancellationToken cancellationToken = default)
where T : GenericHandler
{
if (_scheduler == null)
{
throw new InvalidOperationException("Scheduler not initialized");
}
string typeName = typeof(T).Name;
var jobKey = new JobKey(typeName);
// Validate the cron expression
if (!string.IsNullOrEmpty(cronExpression))
{
IOperableTrigger triggerObj = (IOperableTrigger)TriggerBuilder.Create()
.WithIdentity("ValidationTrigger")
.StartNow()
.WithCronSchedule(cronExpression)
.Build();
IReadOnlyList<DateTimeOffset> nextFireTimes = TriggerUtils.ComputeFireTimes(triggerObj, null, 2);
TimeSpan triggerValue = nextFireTimes[1] - nextFireTimes[0];
if (triggerValue > Constants.TriggerMaxLimit)
{
throw new ValidationException($"{cronExpression} should have a fire time of maximum {Constants.TriggerMaxLimit.TotalHours} hours");
}
if (typeof(T) != typeof(ContentBlocker) && triggerValue < Constants.TriggerMinLimit)
{
throw new ValidationException($"{cronExpression} should have a fire time of minimum {Constants.TriggerMinLimit.TotalSeconds} seconds");
}
if (triggerValue > StaticConfiguration.TriggerValue)
{
StaticConfiguration.TriggerValue = triggerValue;
}
}
// Create cron trigger
var trigger = TriggerBuilder.Create()
.WithIdentity($"{typeName}-trigger")
.ForJob(jobKey)
.WithCronSchedule(cronExpression, x => x.WithMisfireHandlingInstructionDoNothing())
.StartNow()
.Build();
// Create startup trigger to run immediately
var startupTrigger = TriggerBuilder.Create()
.WithIdentity($"{typeName}-startup-trigger")
.ForJob(jobKey)
.StartNow()
.Build();
// Schedule job with both triggers
await _scheduler.ScheduleJob(trigger, cancellationToken);
await _scheduler.ScheduleJob(startupTrigger, cancellationToken);
_logger.LogInformation("Added triggers for job {name} with cron expression {CronExpression}",
typeName, cronExpression);
}
/// <summary>
/// Helper method to add a job without a trigger (for chained jobs).
/// </summary>
private async Task AddJobWithoutTrigger<T>(CancellationToken cancellationToken = default)
where T : GenericHandler
{
if (_scheduler == null)
{
throw new InvalidOperationException("Scheduler not initialized");
}
string typeName = typeof(T).Name;
var jobKey = new JobKey(typeName);
// Check if job already exists
if (await _scheduler.CheckExists(jobKey, cancellationToken))
{
_logger.LogDebug("Job {name} already exists, skipping registration", typeName);
return;
}
// Create job detail that is durable (can exist without triggers)
var jobDetail = JobBuilder.Create<GenericJob<T>>()
.WithIdentity(jobKey)
.StoreDurably()
.Build();
// Add job to scheduler
await _scheduler.AddJob(jobDetail, true, cancellationToken);
_logger.LogInformation("Registered job {name} without trigger", typeName);
}
}

View File

@@ -1,12 +1,12 @@
using Infrastructure.Verticals.Jobs;
using Cleanuparr.Infrastructure.Features.Jobs;
using Quartz;
using Serilog.Context;
namespace Executable.Jobs;
namespace Cleanuparr.Api.Jobs;
[DisallowConcurrentExecution]
public sealed class GenericJob<T> : IJob
where T : GenericHandler
where T : IHandler
{
private readonly ILogger<GenericJob<T>> _logger;
private readonly T _handler;

View File

@@ -0,0 +1,77 @@
using System.Net;
using System.Text.Json;
using Cleanuparr.Api.Models;
using Cleanuparr.Domain.Exceptions;
namespace Cleanuparr.Api.Middleware;
public class ExceptionMiddleware
{
private readonly RequestDelegate _next;
private readonly ILogger<ExceptionMiddleware> _logger;
public ExceptionMiddleware(RequestDelegate next, ILogger<ExceptionMiddleware> logger)
{
_next = next;
_logger = logger;
}
public async Task InvokeAsync(HttpContext context)
{
try
{
await _next(context);
}
catch (Exception ex)
{
await HandleExceptionAsync(context, ex);
}
}
private async Task HandleExceptionAsync(HttpContext context, Exception exception)
{
// Generate a unique identifier for this error
string traceId = Guid.NewGuid().ToString();
// Default status code and message
int statusCode = (int)HttpStatusCode.InternalServerError;
string message = "An unexpected error occurred";
switch (exception)
{
// Handle different exception types
case ValidationException:
statusCode = (int)HttpStatusCode.BadRequest;
message = exception.Message; // Use the validation message directly
_logger.LogWarning(exception,
"Validation error {TraceId} occurred during request to {Path}",
traceId, context.Request.Path);
break;
default:
// Log other exceptions as errors with more details
_logger.LogError(exception,
"Error {TraceId} occurred during request to {Path}: {Message}",
traceId, context.Request.Path, exception.Message);
break;
}
// Create the error response
ErrorResponse errorResponse = new()
{
TraceId = traceId,
Error = message
};
// Set the response
context.Response.ContentType = "application/json";
context.Response.StatusCode = statusCode;
// Write the response
await context.Response.WriteAsync(JsonSerializer.Serialize(errorResponse, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
}));
}
}

View File

@@ -0,0 +1,17 @@
namespace Cleanuparr.Api.Models;
/// <summary>
/// Standardized error response model for API endpoints
/// </summary>
public class ErrorResponse
{
/// <summary>
/// User-friendly error message
/// </summary>
public required string Error { get; set; }
/// <summary>
/// Trace ID for error tracking (GUID)
/// </summary>
public required string TraceId { get; set; }
}

View File

@@ -0,0 +1,14 @@
using Cleanuparr.Infrastructure.Models;
namespace Cleanuparr.Api.Models;
/// <summary>
/// Represents a request to schedule a job
/// </summary>
public class ScheduleRequest
{
/// <summary>
/// The schedule information for the job
/// </summary>
public JobSchedule Schedule { get; set; }
}

View File

@@ -0,0 +1,53 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Api.Models;
public class UpdateDownloadCleanerConfigDto
{
public bool Enabled { get; set; }
public string CronExpression { get; set; } = "0 0 * * * ?";
/// <summary>
/// Indicates whether to use the CronExpression directly or convert from a user-friendly schedule
/// </summary>
public bool UseAdvancedScheduling { get; set; }
public List<CleanCategoryDto> Categories { get; set; } = [];
public bool DeletePrivate { get; set; }
/// <summary>
/// Indicates whether unlinked download handling is enabled
/// </summary>
public bool UnlinkedEnabled { get; set; } = false;
public string UnlinkedTargetCategory { get; set; } = "cleanuparr-unlinked";
public bool UnlinkedUseTag { get; set; }
public string UnlinkedIgnoredRootDir { get; set; } = string.Empty;
public List<string> UnlinkedCategories { get; set; } = [];
}
public class CleanCategoryDto
{
[Required]
public string Name { get; set; } = string.Empty;
/// <summary>
/// Max ratio before removing a download.
/// </summary>
public double MaxRatio { get; set; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
public double MinSeedTime { get; set; }
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
public double MaxSeedTime { get; set; } = -1;
}

View File

@@ -0,0 +1,152 @@
using System.Runtime.InteropServices;
using System.Text.Json.Serialization;
using Cleanuparr.Api;
using Cleanuparr.Api.DependencyInjection;
using Cleanuparr.Infrastructure.Logging;
using Cleanuparr.Shared.Helpers;
using Serilog;
var builder = WebApplication.CreateBuilder(args);
// Fix paths for single-file deployment on macOS
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
var appDir = AppContext.BaseDirectory;
builder.Environment.ContentRootPath = appDir;
var wwwrootPath = Path.Combine(appDir, "wwwroot");
if (Directory.Exists(wwwrootPath))
{
builder.Environment.WebRootPath = wwwrootPath;
}
}
builder.Configuration
.AddJsonFile(Path.Combine(ConfigurationPathProvider.GetConfigPath(), "cleanuparr.json"), optional: true, reloadOnChange: true);
int.TryParse(builder.Configuration.GetValue<string>("PORT"), out int port);
port = port is 0 ? 11011 : port;
if (!builder.Environment.IsDevelopment())
{
// If no port is configured, default to 11011
builder.WebHost.ConfigureKestrel(options =>
{
options.ListenAnyIP(port);
});
}
builder.Services.AddResponseCompression(options =>
{
options.EnableForHttps = true;
});
// Configure JSON options to serialize enums as strings
builder.Services.ConfigureHttpJsonOptions(options =>
{
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter());
});
// Add services to the container
builder.Services
.AddInfrastructure(builder.Configuration)
.AddApiServices();
// Add CORS before SignalR
builder.Services.AddCors(options =>
{
options.AddPolicy("Any", policy =>
{
policy
// https://github.com/dotnet/aspnetcore/issues/4457#issuecomment-465669576
.SetIsOriginAllowed(_ => true)
.AllowAnyHeader()
.AllowAnyMethod()
.AllowCredentials(); // Required for SignalR auth
});
});
// Register services needed for logging first
builder.Services
.AddTransient<LoggingConfigManager>()
.AddSingleton<SignalRLogSink>();
// Add logging with proper service provider
builder.Logging.AddLogging();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
builder.Host.UseWindowsService(options =>
{
options.ServiceName = "Cleanuparr";
});
builder.Logging.AddEventLog(settings =>
{
settings.SourceName = "Cleanuparr";
});
}
var app = builder.Build();
// Configure BASE_PATH immediately after app build and before any other configuration
string? basePath = app.Configuration.GetValue<string>("BASE_PATH");
ILogger<Program> logger = app.Services.GetRequiredService<ILogger<Program>>();
if (basePath is not null)
{
// Validate the base path
var validationResult = BasePathValidator.Validate(basePath);
if (!validationResult.IsValid)
{
logger.LogError("Invalid BASE_PATH configuration: {ErrorMessage}", validationResult.ErrorMessage);
return;
}
// Normalize the base path
basePath = BasePathValidator.Normalize(basePath);
if (!string.IsNullOrEmpty(basePath))
{
app.Use(async (context, next) =>
{
if (!string.IsNullOrEmpty(basePath) && !context.Request.Path.StartsWithSegments(basePath, StringComparison.OrdinalIgnoreCase))
{
context.Response.StatusCode = StatusCodes.Status404NotFound;
return;
}
await next();
});
app.UsePathBase(basePath);
}
else
{
logger.LogInformation("No base path configured - serving from root");
}
}
logger.LogInformation("Server configuration: PORT={port}, BASE_PATH={basePath}", port, basePath ?? "/");
// Initialize the host
await app.Init();
// Get LoggingConfigManager (will be created if not already registered)
var configManager = app.Services.GetRequiredService<LoggingConfigManager>();
// Get the dynamic level switch for controlling log levels
var levelSwitch = configManager.GetLevelSwitch();
// Get the SignalRLogSink instance
var signalRSink = app.Services.GetRequiredService<SignalRLogSink>();
var logConfig = LoggingDI.GetDefaultLoggerConfiguration();
logConfig.MinimumLevel.ControlledBy(levelSwitch);
// Add to Serilog pipeline
logConfig.WriteTo.Sink(signalRSink);
Log.Logger = logConfig.CreateLogger();
app.ConfigureApi();
await app.RunAsync();

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Domain\Cleanuparr.Domain.csproj" />
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
<ProjectReference Include="..\Cleanuparr.Persistence\Cleanuparr.Persistence.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="MassTransit" Version="8.4.1" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.6" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
using Cleanuparr.Domain.Enums;
namespace Cleanuparr.Application.Features.Arr.Dtos;
public class ArrConfigDto
{
public Guid Id { get; set; }
public required InstanceType Type { get; set; }
public short FailedImportMaxStrikes { get; set; } = -1;
public List<ArrInstanceDto> Instances { get; set; } = [];
}

View File

@@ -0,0 +1,20 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Application.Features.Arr.Dtos;
/// <summary>
/// DTO for creating new Arr instances without requiring an ID
/// </summary>
public record CreateArrInstanceDto
{
public bool Enabled { get; init; } = true;
[Required]
public required string Name { get; init; }
[Required]
public required string Url { get; init; }
[Required]
public required string ApiKey { get; init; }
}

View File

@@ -0,0 +1,9 @@
namespace Cleanuparr.Application.Features.Arr.Dtos;
/// <summary>
/// DTO for updating Lidarr configuration basic settings (instances managed separately)
/// </summary>
public record UpdateLidarrConfigDto
{
public short FailedImportMaxStrikes { get; init; } = -1;
}

View File

@@ -0,0 +1,9 @@
namespace Cleanuparr.Application.Features.Arr.Dtos;
/// <summary>
/// DTO for updating Radarr configuration basic settings (instances managed separately)
/// </summary>
public record UpdateRadarrConfigDto
{
public short FailedImportMaxStrikes { get; init; } = -1;
}

View File

@@ -0,0 +1,33 @@
using System.ComponentModel.DataAnnotations;
namespace Cleanuparr.Application.Features.Arr.Dtos;
/// <summary>
/// DTO for updating Sonarr configuration basic settings (instances managed separately)
/// </summary>
public record UpdateSonarrConfigDto
{
public short FailedImportMaxStrikes { get; init; } = -1;
}
/// <summary>
/// DTO for Arr instances that can handle both existing (with ID) and new (without ID) instances
/// </summary>
public record ArrInstanceDto
{
/// <summary>
/// ID for existing instances, null for new instances
/// </summary>
public Guid? Id { get; init; }
public bool Enabled { get; init; } = true;
[Required]
public required string Name { get; init; }
[Required]
public required string Url { get; init; }
[Required]
public required string ApiKey { get; init; }
}

View File

@@ -0,0 +1,202 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Infrastructure.Features.ContentBlocker;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Cleanuparr.Persistence.Models.Configuration.ContentBlocker;
using Cleanuparr.Persistence.Models.Configuration.General;
using Data.Models.Arr.Queue;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using LogContext = Serilog.Context.LogContext;
namespace Cleanuparr.Application.Features.ContentBlocker;
public sealed class ContentBlocker : GenericHandler
{
private readonly BlocklistProvider _blocklistProvider;
public ContentBlocker(
ILogger<ContentBlocker> logger,
DataContext dataContext,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory,
BlocklistProvider blocklistProvider,
EventPublisher eventPublisher
) : base(
logger, dataContext, cache, messageBus,
arrClientFactory, arrArrQueueIterator, downloadServiceFactory, eventPublisher
)
{
_blocklistProvider = blocklistProvider;
}
protected override async Task ExecuteInternalAsync()
{
if (ContextProvider.Get<List<DownloadClientConfig>>(nameof(DownloadClientConfig)).Count is 0)
{
_logger.LogWarning("No download clients configured");
return;
}
var config = ContextProvider.Get<ContentBlockerConfig>();
if (!config.Sonarr.Enabled && !config.Radarr.Enabled && !config.Lidarr.Enabled)
{
_logger.LogWarning("No blocklists are enabled");
return;
}
await _blocklistProvider.LoadBlocklistsAsync();
var sonarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Sonarr));
var radarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Radarr));
var lidarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Lidarr));
if (config.Sonarr.Enabled)
{
await ProcessArrConfigAsync(sonarrConfig, InstanceType.Sonarr);
}
if (config.Radarr.Enabled)
{
await ProcessArrConfigAsync(radarrConfig, InstanceType.Radarr);
}
if (config.Lidarr.Enabled)
{
await ProcessArrConfigAsync(lidarrConfig, InstanceType.Lidarr);
}
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
IReadOnlyList<string> ignoredDownloads = ContextProvider.Get<GeneralConfig>().IgnoredDownloads;
using var _ = LogContext.PushProperty(LogProperties.Category, instanceType.ToString());
IArrClient arrClient = _arrClientFactory.GetClient(instanceType);
// push to context
ContextProvider.Set(nameof(ArrInstance) + nameof(ArrInstance.Url), instance.Url);
ContextProvider.Set(nameof(InstanceType), instanceType);
IReadOnlyList<IDownloadService> downloadServices = await GetInitializedDownloadServicesAsync();
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.GroupBy(x => x.DownloadId)
.ToList();
foreach (var group in groups)
{
if (group.Any(x => !arrClient.IsRecordValid(x)))
{
continue;
}
QueueRecord record = group.First();
_logger.LogTrace("processing | {title} | {id}", record.Title, record.DownloadId);
if (!arrClient.IsRecordValid(record))
{
continue;
}
if (ignoredDownloads.Contains(record.DownloadId, StringComparer.InvariantCultureIgnoreCase))
{
_logger.LogInformation("skip | {title} | ignored", record.Title);
continue;
}
string downloadRemovalKey = CacheKeys.DownloadMarkedForRemoval(record.DownloadId, instance.Url);
if (_cache.TryGetValue(downloadRemovalKey, out bool _))
{
_logger.LogDebug("skip | already marked for removal | {title}", record.Title);
continue;
}
// push record to context
ContextProvider.Set(nameof(QueueRecord), record);
BlockFilesResult result = new();
if (record.Protocol is "torrent")
{
var torrentClients = downloadServices
.Where(x => x.ClientConfig.Type is DownloadClientType.Torrent)
.ToList();
_logger.LogDebug("searching unwanted files for {title}", record.Title);
if (torrentClients.Count > 0)
{
// Check each download client for the download item
foreach (var downloadService in torrentClients)
{
try
{
// stalled download check
result = await downloadService
.BlockUnwantedFilesAsync(record.DownloadId, ignoredDownloads);
if (result.Found)
{
break;
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking download {dName} with download client {cName}",
record.Title, downloadService.ClientConfig.Name);
}
}
if (!result.Found)
{
_logger.LogWarning("Download not found in any torrent client | {title}", record.Title);
}
}
}
if (!result.ShouldRemove)
{
continue;
}
var config = ContextProvider.Get<ContentBlockerConfig>();
bool removeFromClient = true;
if (result.IsPrivate && !config.DeletePrivate)
{
removeFromClient = false;
}
await PublishQueueItemRemoveRequest(
downloadRemovalKey,
instanceType,
instance,
record,
group.Count() > 1,
removeFromClient,
DeleteReason.AllFilesBlocked
);
}
});
}
}

View File

@@ -0,0 +1,222 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Cleanuparr.Persistence.Models.Configuration.DownloadCleaner;
using Cleanuparr.Persistence.Models.Configuration.General;
using Data.Models.Arr.Queue;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using LogContext = Serilog.Context.LogContext;
namespace Cleanuparr.Application.Features.DownloadCleaner;
public sealed class DownloadCleaner : GenericHandler
{
private readonly HashSet<string> _excludedHashes = [];
public DownloadCleaner(
ILogger<DownloadCleaner> logger,
DataContext dataContext,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory,
EventPublisher eventPublisher
) : base(
logger, dataContext, cache, messageBus,
arrClientFactory, arrArrQueueIterator, downloadServiceFactory, eventPublisher
)
{
}
protected override async Task ExecuteInternalAsync()
{
var downloadServices = await GetInitializedDownloadServicesAsync();
if (downloadServices.Count is 0)
{
_logger.LogWarning("Processing skipped because no download clients are configured");
return;
}
var config = ContextProvider.Get<DownloadCleanerConfig>();
bool isUnlinkedEnabled = config.UnlinkedEnabled && !string.IsNullOrEmpty(config.UnlinkedTargetCategory) && config.UnlinkedCategories.Count > 0;
bool isCleaningEnabled = config.Categories.Count > 0;
if (!isUnlinkedEnabled && !isCleaningEnabled)
{
_logger.LogWarning("{name} is not configured properly", nameof(DownloadCleaner));
return;
}
IReadOnlyList<string> ignoredDownloads = ContextProvider.Get<GeneralConfig>(nameof(GeneralConfig)).IgnoredDownloads;
// Process each client separately
var allDownloads = new List<object>();
foreach (var downloadService in downloadServices)
{
try
{
await downloadService.LoginAsync();
var clientDownloads = await downloadService.GetSeedingDownloads();
if (clientDownloads?.Count > 0)
{
allDownloads.AddRange(clientDownloads);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get seeding downloads from download client");
}
}
if (allDownloads.Count == 0)
{
_logger.LogDebug("no seeding downloads found");
return;
}
_logger.LogTrace("found {count} seeding downloads", allDownloads.Count);
// List<object>? downloadsToChangeCategory = null;
List<Tuple<IDownloadService, List<object>>> downloadServiceWithDownloads = [];
if (isUnlinkedEnabled)
{
// Create category for all clients
foreach (var downloadService in downloadServices)
{
try
{
await downloadService.CreateCategoryAsync(config.UnlinkedTargetCategory);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create category for download client");
}
}
// Get downloads to change category
foreach (var downloadService in downloadServices)
{
try
{
var clientDownloads = downloadService.FilterDownloadsToChangeCategoryAsync(allDownloads, config.UnlinkedCategories);
if (clientDownloads?.Count > 0)
{
downloadServiceWithDownloads.Add(Tuple.Create(downloadService, clientDownloads));
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to filter downloads for category change");
}
}
}
// wait for the downloads to appear in the arr queue
await Task.Delay(10 * 1000);
await ProcessArrConfigAsync(ContextProvider.Get<ArrConfig>(nameof(InstanceType.Sonarr)), InstanceType.Sonarr, true);
await ProcessArrConfigAsync(ContextProvider.Get<ArrConfig>(nameof(InstanceType.Radarr)), InstanceType.Radarr, true);
await ProcessArrConfigAsync(ContextProvider.Get<ArrConfig>(nameof(InstanceType.Lidarr)), InstanceType.Lidarr, true);
if (isUnlinkedEnabled && downloadServiceWithDownloads.Count > 0)
{
_logger.LogInformation("Found {count} potential downloads to change category", downloadServiceWithDownloads.Sum(x => x.Item2.Count));
// Process each client with its own filtered downloads
foreach (var (downloadService, downloadsToChangeCategory) in downloadServiceWithDownloads)
{
try
{
await downloadService.ChangeCategoryForNoHardLinksAsync(downloadsToChangeCategory, _excludedHashes, ignoredDownloads);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to change category for download client {clientName}", downloadService.ClientConfig.Name);
}
}
_logger.LogInformation("Finished changing category");
}
if (config.Categories.Count is 0)
{
return;
}
// Get downloads to clean
downloadServiceWithDownloads = [];
foreach (var downloadService in downloadServices)
{
try
{
var clientDownloads = downloadService.FilterDownloadsToBeCleanedAsync(allDownloads, config.Categories);
if (clientDownloads?.Count > 0)
{
downloadServiceWithDownloads.Add(Tuple.Create(downloadService, clientDownloads));
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to filter downloads for cleaning for download client {clientName}", downloadService.ClientConfig.Name);
}
}
// release unused objects
allDownloads = null;
_logger.LogInformation("found {count} potential downloads to clean", downloadServiceWithDownloads.Sum(x => x.Item2.Count));
// Process cleaning for each client
foreach (var (downloadService, downloadsToClean) in downloadServiceWithDownloads)
{
try
{
await downloadService.CleanDownloadsAsync(downloadsToClean, config.Categories, _excludedHashes, ignoredDownloads);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to clean downloads for download client {clientName}", downloadService.ClientConfig.Name);
}
}
_logger.LogInformation("finished cleaning downloads");
foreach (var downloadService in downloadServices)
{
downloadService.Dispose();
}
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
using var _ = LogContext.PushProperty(LogProperties.Category, instanceType.ToString());
IArrClient arrClient = _arrClientFactory.GetClient(instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.Where(x => !string.IsNullOrEmpty(x.DownloadId))
.GroupBy(x => x.DownloadId)
.ToList();
foreach (QueueRecord record in groups.Select(group => group.First()))
{
_excludedHashes.Add(record.DownloadId.ToLowerInvariant());
}
});
}
}

View File

@@ -0,0 +1,66 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Domain.Exceptions;
namespace Cleanuparr.Application.Features.DownloadClient.Dtos;
/// <summary>
/// DTO for creating a new download client (without ID)
/// </summary>
public sealed record CreateDownloadClientDto
{
/// <summary>
/// Whether this client is enabled
/// </summary>
public bool Enabled { get; init; } = false;
/// <summary>
/// Friendly name for this client
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Type name of download client
/// </summary>
public required DownloadClientTypeName TypeName { get; init; }
/// <summary>
/// Type of download client
/// </summary>
public required DownloadClientType Type { get; init; }
/// <summary>
/// Host address for the download client
/// </summary>
public Uri? Host { get; init; }
/// <summary>
/// Username for authentication
/// </summary>
public string? Username { get; init; }
/// <summary>
/// Password for authentication
/// </summary>
public string? Password { get; init; }
/// <summary>
/// The base URL path component, used by clients like Transmission and Deluge
/// </summary>
public string? UrlBase { get; init; }
/// <summary>
/// Validates the configuration
/// </summary>
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException("Client name cannot be empty");
}
if (Host is null)
{
throw new ValidationException("Host cannot be empty");
}
}
}

View File

@@ -0,0 +1,189 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Events;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.Arr.Interfaces;
using Cleanuparr.Infrastructure.Features.Context;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Infrastructure.Features.Jobs;
using Cleanuparr.Infrastructure.Helpers;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Configuration;
using Cleanuparr.Persistence.Models.Configuration.Arr;
using Cleanuparr.Persistence.Models.Configuration.General;
using Cleanuparr.Persistence.Models.Configuration.QueueCleaner;
using Data.Models.Arr.Queue;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using LogContext = Serilog.Context.LogContext;
namespace Cleanuparr.Application.Features.QueueCleaner;
public sealed class QueueCleaner : GenericHandler
{
public QueueCleaner(
ILogger<QueueCleaner> logger,
DataContext dataContext,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory,
EventPublisher eventPublisher
) : base(
logger, dataContext, cache, messageBus,
arrClientFactory, arrArrQueueIterator, downloadServiceFactory, eventPublisher
)
{
}
protected override async Task ExecuteInternalAsync()
{
var sonarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Sonarr));
var radarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Radarr));
var lidarrConfig = ContextProvider.Get<ArrConfig>(nameof(InstanceType.Lidarr));
await ProcessArrConfigAsync(sonarrConfig, InstanceType.Sonarr);
await ProcessArrConfigAsync(radarrConfig, InstanceType.Radarr);
await ProcessArrConfigAsync(lidarrConfig, InstanceType.Lidarr);
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType)
{
IReadOnlyList<string> ignoredDownloads = ContextProvider.Get<GeneralConfig>().IgnoredDownloads;
using var _ = LogContext.PushProperty(LogProperties.Category, instanceType.ToString());
IArrClient arrClient = _arrClientFactory.GetClient(instanceType);
// push to context
ContextProvider.Set(nameof(ArrInstance) + nameof(ArrInstance.Url), instance.Url);
ContextProvider.Set(nameof(InstanceType), instanceType);
IReadOnlyList<IDownloadService> downloadServices = await GetInitializedDownloadServicesAsync();
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.GroupBy(x => x.DownloadId)
.ToList();
foreach (var group in groups)
{
if (group.Any(x => !arrClient.IsRecordValid(x)))
{
continue;
}
QueueRecord record = group.First();
_logger.LogTrace("processing | {title} | {id}", record.Title, record.DownloadId);
if (!arrClient.IsRecordValid(record))
{
continue;
}
if (ignoredDownloads.Contains(record.DownloadId, StringComparer.InvariantCultureIgnoreCase))
{
_logger.LogInformation("skip | {title} | ignored", record.Title);
continue;
}
string downloadRemovalKey = CacheKeys.DownloadMarkedForRemoval(record.DownloadId, instance.Url);
if (_cache.TryGetValue(downloadRemovalKey, out bool _))
{
_logger.LogDebug("skip | already marked for removal | {title}", record.Title);
continue;
}
// push record to context
ContextProvider.Set(nameof(QueueRecord), record);
DownloadCheckResult downloadCheckResult = new();
if (record.Protocol is "torrent")
{
var torrentClients = downloadServices
.Where(x => x.ClientConfig.Type is DownloadClientType.Torrent)
.ToList();
if (torrentClients.Count > 0)
{
// Check each download client for the download item
foreach (var downloadService in torrentClients)
{
try
{
// stalled download check
downloadCheckResult = await downloadService
.ShouldRemoveFromArrQueueAsync(record.DownloadId, ignoredDownloads);
if (downloadCheckResult.Found)
{
break;
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking download {dName} with download client {cName}",
record.Title, downloadService.ClientConfig.Name);
}
}
if (!downloadCheckResult.Found)
{
_logger.LogWarning("Download not found in any torrent client | {title}", record.Title);
}
}
}
var config = ContextProvider.Get<QueueCleanerConfig>();
// failed import check
bool shouldRemoveFromArr = await arrClient.ShouldRemoveFromQueue(instanceType, record, downloadCheckResult.IsPrivate, config.FailedImport.MaxStrikes);
DeleteReason deleteReason = downloadCheckResult.ShouldRemove ? downloadCheckResult.DeleteReason : DeleteReason.FailedImport;
if (!shouldRemoveFromArr && !downloadCheckResult.ShouldRemove)
{
_logger.LogInformation("skip | {title}", record.Title);
continue;
}
bool removeFromClient = true;
if (downloadCheckResult.IsPrivate)
{
bool isStalledWithoutPruneFlag =
downloadCheckResult.DeleteReason is DeleteReason.Stalled &&
!config.Stalled.DeletePrivate;
bool isSlowWithoutPruneFlag =
downloadCheckResult.DeleteReason is DeleteReason.SlowSpeed or DeleteReason.SlowTime &&
!config.Slow.DeletePrivate;
bool shouldKeepDueToDeleteRules = downloadCheckResult.ShouldRemove &&
(isStalledWithoutPruneFlag || isSlowWithoutPruneFlag);
bool shouldKeepDueToImportRules = shouldRemoveFromArr && !config.FailedImport.DeletePrivate;
if (shouldKeepDueToDeleteRules || shouldKeepDueToImportRules)
{
removeFromClient = false;
}
}
await PublishQueueItemRemoveRequest(
downloadRemovalKey,
instanceType,
instance,
record,
group.Count() > 1,
removeFromClient,
deleteReason
);
}
});
}
}

View File

@@ -10,8 +10,4 @@
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,8 @@
namespace Data.Models.Arr.Queue;
public record Image
{
public required string CoverType { get; init; }
public required Uri RemoteUrl { get; init; }
}

Some files were not shown because too many files have changed in this diff Show More