Compare commits

...

311 Commits
v2.0.3 ... dev

Author SHA1 Message Date
Flaminel
d9341ff441 fixed readme 2025-06-27 15:26:37 +03:00
Flaminel
555be79a7a fixed docs favicon 2025-06-27 15:13:15 +03:00
Flaminel
66be8aaa32 removed flmorg references 2025-06-27 15:07:55 +03:00
Flaminel
737b683285 fixed release workflow 2025-06-27 14:56:41 +03:00
Flaminel
c0e94733cd removed test directory 2025-06-27 14:52:31 +03:00
Flaminel
5843ec1d15 changed docs #8 2025-06-27 14:51:06 +03:00
Flaminel
51c968fb2d changed docs #7 2025-06-27 14:29:00 +03:00
Flaminel
ea50a7a535 changed docs #6 2025-06-27 14:20:24 +03:00
Flaminel
b1477704b6 changed docs #5 2025-06-27 13:21:14 +03:00
Flaminel
04d287e1e0 hidden username for deluge 2025-06-26 23:03:21 +03:00
Flaminel
59a5eea684 fixed docs tag location 2025-06-26 22:47:15 +03:00
Flaminel
2078eab054 changed docs #4 2025-06-26 22:35:56 +03:00
Flaminel
3278a24931 changed docs #3 2025-06-26 20:00:22 +03:00
Flaminel
f9b83cf3b2 changed docs #2 2025-06-26 19:37:34 +03:00
Flaminel
54997b184c changed docs #1 2025-06-26 16:27:08 +03:00
Flaminel
2e7d89e1c5 fixed frontend error handling 2025-06-26 12:13:38 +03:00
Flaminel
68c9d41f5f fixed ignored downloads description 2025-06-25 20:17:00 +03:00
Flaminel
a9503bbca3 fixed blocklist reload time 2025-06-25 20:14:45 +03:00
Flaminel
07553131a0 try fix macos installers #4 2025-06-25 16:58:40 +03:00
Flaminel
b21bab2a8a try fix macos installers #3 2025-06-25 15:35:29 +03:00
Flaminel
c9e033c9d1 fixed port configuration 2025-06-25 13:35:02 +03:00
Flaminel
a26fd1da5d removed usenet type 2025-06-24 21:28:38 +03:00
Flaminel
d904240aa8 try fix macos installers #2 2025-06-24 20:56:32 +03:00
Flaminel
4f066c3f2a try fix macos installers 2025-06-24 20:40:37 +03:00
Flaminel
84bee5f63a fixed download cleaner reset 2025-06-24 20:40:23 +03:00
Flaminel
8d7b8e5848 fixed cache strikes not being reset when dry run is disabled 2025-06-24 19:27:34 +03:00
Flaminel
8115205158 fixed frontend name 2025-06-24 19:08:26 +03:00
Flaminel
ada2b9fa4a try fix installers and removed default configuration file 2025-06-23 23:15:40 +03:00
Flaminel
222c21a85e revert base path validator 2025-06-23 21:01:42 +03:00
Flaminel
d6cea762eb fixed macos installer 2025-06-23 19:57:44 +03:00
Flaminel
f9d096d76a fixed base path validator 2025-06-23 19:57:38 +03:00
Flaminel
ee89570cad fixed executable build 2025-06-23 19:44:03 +03:00
Flaminel
42145e58eb fixed naming 2025-06-23 19:22:51 +03:00
Flaminel
0cda75157e fixed config for installers and icons 2025-06-23 19:09:57 +03:00
Flaminel
4ef223b0cc removed debug symbols from app build 2025-06-23 18:48:00 +03:00
Flaminel
0df1fd0d19 fixed release workflows 2025-06-23 18:36:44 +03:00
Flaminel
7409ddeb33 removed windsurf 2025-06-23 17:51:10 +03:00
Flaminel
f46702f073 fixed missing port when not in docker 2025-06-23 17:51:01 +03:00
Flaminel
9f22db9cad updated input description 2025-06-23 15:21:35 +03:00
Flaminel
bbd2052086 fixed queue cleaner input states 2025-06-23 15:20:08 +03:00
Flaminel
8436f93727 fixed content blocker input states 2025-06-23 15:20:01 +03:00
Flaminel
8c020d5aa5 fixed download cleaner input states 2025-06-23 15:17:29 +03:00
Flaminel
cd36a32b91 fixed column value type 2025-06-23 01:51:20 +03:00
Flaminel
9e43ce76f6 changed settings page layout 2025-06-22 22:10:41 +03:00
Flaminel
a068b93458 fixed blocklist input width 2025-06-22 22:09:12 +03:00
Flaminel
96dc8bec51 fixed unlinked categories validation 2025-06-22 22:03:42 +03:00
Flaminel
910633a413 added download cleaner pop-up 2025-06-22 21:51:02 +03:00
Flaminel
823b9fec3f added general settings pop-up 2025-06-22 21:50:48 +03:00
Flaminel
b99cbd0d6e reorganized content blocker settings 2025-06-22 12:06:25 +03:00
Flaminel
534a8a9c66 reorganized download cleaner settings 2025-06-22 12:06:16 +03:00
Flaminel
0473d14462 fixed setting message 2025-06-22 04:55:56 +03:00
Flaminel
febb9c4432 fixed some settings 2025-06-22 04:53:33 +03:00
Flaminel
0933b99cea fixed encryption key seeding 2025-06-22 02:12:18 +03:00
Flaminel
20483fab9a fix unlink categories not being sent 2025-06-22 01:53:35 +03:00
Flaminel
60f96589cd fixed general setting HTTPS description 2025-06-22 00:45:23 +03:00
Flaminel
eec0cb9605 fixed general settings dropdowns 2025-06-22 00:08:20 +03:00
Flaminel
20fe4c5b3f fixed cleanup settings 2025-06-21 23:51:47 +03:00
Flaminel
8d7b207181 fixed queue cleaner and download cleaner layout 2025-06-21 20:29:13 +03:00
Flaminel
8f34bdc780 fixed port configuration and logs 2025-06-21 20:20:20 +03:00
Flaminel
a61a10af8f updated support section 2025-06-21 19:52:47 +03:00
Flaminel
17c37e8eef reorganized sidebar 2025-06-21 19:43:02 +03:00
Flaminel
bfbd2c16dd fixed support component icons; removed p-card 2025-06-21 16:40:20 +03:00
Flaminel
eef21ac734 added huntarr link 2025-06-21 16:28:01 +03:00
Flaminel
978b1c9ade added makefile 2025-06-21 16:16:29 +03:00
Flaminel
5ea1361832 removed enabled per arr config and added enabled per arr instance 2025-06-21 16:16:21 +03:00
Flaminel
861c74e452 fixed entrypoint to create user and group if they don't exist 2025-06-21 02:12:49 +03:00
Flaminel
6f437de698 added support card 2025-06-21 02:12:12 +03:00
Flaminel
2cdbc938fd fixed qbit label name 2025-06-20 23:13:41 +03:00
Flaminel
26d98f815f added support for PUID, PGID and UMASK 2025-06-20 23:06:25 +03:00
Flaminel
1a7e86aca4 removed https redirection 2025-06-20 21:12:59 +03:00
Flaminel
4b2aa6c4f6 Merge branch 'main' into dev 2025-06-20 15:00:40 +03:00
Flaminel
2dcd495da7 fixed default base path 2025-06-20 14:57:20 +03:00
Flaminel
38b7d1d4bb fixed content blocker 2025-06-20 14:39:00 +03:00
Flaminel
c109b15ec1 added content blocker back 2025-06-20 12:06:48 +03:00
Flaminel
1cc749243c fixed base path and port 2025-06-20 01:32:05 +03:00
Flaminel
a7c8f37bed fixed job enabling failing 2025-06-19 00:08:06 +03:00
Flaminel
65d0d5188d gitignore 2025-06-18 23:53:49 +03:00
Flaminel
15b0e4218d fixed settings page 2025-06-18 23:38:42 +03:00
Flaminel
de7e7d244b fixed initial docker build 2025-06-18 23:31:49 +03:00
Flaminel
7a8cdbb354 fixed frontend build 2025-06-18 23:02:52 +03:00
Flaminel
4c5e4e95d9 try fix Dockerfile 2025-06-18 21:59:14 +03:00
Flaminel
5881c24ff3 moved frontend 2025-06-18 21:59:04 +03:00
Flaminel
b323235227 moved backend 2025-06-18 21:58:48 +03:00
Flaminel
ee0f915aaf fixed some icons 2025-06-18 18:17:21 +03:00
Flaminel
8883a2ca3f fixed notifications page layout 2025-06-18 18:00:16 +03:00
Flaminel
bbfde4bb17 added notifications endpoint 2025-06-18 17:48:50 +03:00
Flaminel
4d8d3ea732 fixed mobile menu bar 2025-06-17 18:49:47 +03:00
Flaminel
e9718c3a66 fixed tooltip positions 2025-06-17 18:47:34 +03:00
Flaminel
3e8fb01f44 fixed cron validations 2025-06-17 18:44:49 +03:00
Flaminel
e5d7d8ed12 reorganized project 2025-06-17 18:21:18 +03:00
Flaminel
d76216665b try #1 for deployment 2025-06-17 14:41:55 +03:00
Flaminel
1255d0a50a fixed serialization options 2025-06-17 10:36:24 +03:00
Flaminel
cb53ee09da removed commented code 2025-06-16 23:59:18 +03:00
Flaminel
f2622b129d removed TODO comment 2025-06-16 23:58:19 +03:00
Flaminel
7ac72cbece fixed download cleaner processing 2025-06-16 23:57:51 +03:00
Flaminel
b7902ca7ba fixed queue cleaner processing 2025-06-16 23:35:01 +03:00
Flaminel
84dd43a85a removed dry run attribute 2025-06-16 23:14:10 +03:00
Flaminel
fe54813abf fixed download clients not being invalidated on config change 2025-06-16 23:11:00 +03:00
Flaminel
7b2af6dd5d try fix download clients factory 2025-06-16 22:40:12 +03:00
Flaminel
cacd62058f fixed singletons using datacontext 2025-06-16 22:23:40 +03:00
Flaminel
0a8d1450dd try fix download client factory 2025-06-16 21:38:27 +03:00
Flaminel
f651663fd3 fixed health checks and download service factory 2025-06-16 20:17:06 +03:00
Flaminel
b4548573ee fixed download client UI 2025-06-16 18:26:16 +03:00
Flaminel
f39e8eca46 fixed download services 2025-06-16 18:26:09 +03:00
Flaminel
ba1ced3b84 some more layout fixes 2025-06-16 17:37:49 +03:00
Flaminel
ef8aa4c002 fixed download button context menu 2025-06-16 17:28:05 +03:00
Flaminel
cd2b66a525 fix events and logs layout 2025-06-16 17:21:56 +03:00
Flaminel
b4dad7ebc9 fixed download clients layout 2025-06-16 14:42:32 +03:00
Flaminel
9411ab815e fixed lidarr layout 2025-06-15 23:51:23 +03:00
Flaminel
9ebf49f0b8 fixed mobile menu 2025-06-15 23:51:17 +03:00
Flaminel
369a75b4c5 fixed radarr layout 2025-06-15 23:38:15 +03:00
Flaminel
1f55929ab1 fixed sonarr disabled states 2025-06-15 23:12:53 +03:00
Flaminel
4558fa462f fixed sonarr layout 2025-06-15 23:08:37 +03:00
Flaminel
f23c0e0186 changed heart icon color 2025-06-15 23:01:43 +03:00
Flaminel
d5d661543f try fix sonarr layout 2025-06-15 22:55:58 +03:00
Flaminel
53163774a5 separated settings pages 2025-06-15 22:31:34 +03:00
Flaminel
00dfd63797 try fix sonarr 2025-06-15 22:23:52 +03:00
Flaminel
d5e3e9954d try fix arrs again 2025-06-15 21:46:01 +03:00
Flaminel
bf37668dcb combine arr configs #1 2025-06-15 21:15:50 +03:00
Flaminel
62eee94497 removed search type from sonarr config 2025-06-15 19:46:25 +03:00
Flaminel
91274dac4b try fix arr #3 2025-06-15 19:30:11 +03:00
Flaminel
689adb13d8 try fix arr #2 2025-06-15 18:54:33 +03:00
Flaminel
1194db6c1e try fix arr #1 2025-06-15 18:54:28 +03:00
Flaminel
5f412c2e6a try fix config controller 2025-06-15 11:27:16 +03:00
Flaminel
bd9f24eb1a fixed general settings 2025-06-15 03:30:25 +03:00
Flaminel
458688770a try fix download client #4 2025-06-15 03:06:24 +03:00
Flaminel
cf208754a3 try fix download client #3 2025-06-15 03:01:53 +03:00
Flaminel
4b5f4dc447 try fix download client #2 2025-06-15 02:45:49 +03:00
Flaminel
3c2e36eb9e try fix download client UI 2025-06-15 02:26:37 +03:00
Flaminel
95d39c40ed fixed initial db state 2025-06-15 01:21:58 +03:00
Flaminel
9e393eebcc try switch to db 2025-06-15 00:42:20 +03:00
Flaminel
033b50519b db config checkpoint 2025-06-14 01:20:37 +03:00
Flaminel
0be5f48717 fixed db naming 2025-06-13 18:52:51 +03:00
Flaminel
9353a55cff added config db 2025-06-13 18:16:03 +03:00
Flaminel
0cb737a7e1 try fix config controller 2025-06-13 16:42:22 +03:00
Flaminel
6a0641ef63 try fix config update 2025-06-13 15:46:00 +03:00
Flaminel
26bfa5adb2 try fix again 2025-06-13 14:29:01 +03:00
Flaminel
96130501c3 try fix download client settings 2025-06-13 14:18:17 +03:00
Flaminel
fef7efb7dc added download client config UI 2025-06-13 14:09:04 +03:00
Flaminel
857a6a88b4 added lidarr UI 2025-06-12 23:18:13 +03:00
Flaminel
b85842d3f0 added radarr config UI; fixed some messages 2025-06-12 22:57:14 +03:00
Flaminel
4800ec66d5 fixed some buttons 2025-06-12 22:28:50 +03:00
Flaminel
225e80cdbe try add sonarr config #2 2025-06-11 17:39:53 +03:00
Flaminel
64a24051d7 try add sonarr config #1 2025-06-11 16:01:55 +03:00
Flaminel
d73cb46006 try fix download cleaner again 2025-06-11 00:28:29 +03:00
Flaminel
0ab5c01ebc added toggle for unlinked settings 2025-06-10 22:31:42 +03:00
Flaminel
f2303023b4 try fix download cleaner #2 2025-06-10 21:48:26 +03:00
Flaminel
7b67ab6967 try fix download cleaner settings #1 2025-06-10 20:25:53 +03:00
Flaminel
3ae1f9c81d added download cleaner settings 2025-06-10 19:19:16 +03:00
Flaminel
3b676a8c60 try fix settings notifications and stuff 2025-06-10 09:42:49 +03:00
Flaminel
0f45b298b8 fixed dropdown 2025-06-09 12:52:34 +03:00
Flaminel
0260e43d79 try basic/advanced scheduling mode 2025-06-09 12:50:26 +03:00
Flaminel
a85687f687 fixed some other stuff 2025-06-09 03:28:20 +03:00
Flaminel
ee24856504 added new event type and fixed more stuff 2025-06-09 02:40:41 +03:00
Flaminel
9b337134c9 fixed queue settings validations 2025-06-09 02:21:34 +03:00
Flaminel
64d4abf25b fixed setting log level 2025-06-09 02:21:20 +03:00
Flaminel
a4423a28e9 some more fucking fixing 2025-06-09 01:47:22 +03:00
Flaminel
fc26f40fb3 fixed some stuff on queue cleaner 2025-06-09 00:59:42 +03:00
Flaminel
e2e775c073 fix some job stuff 2025-06-09 00:52:53 +03:00
Flaminel
ca13171b82 try fix settings 2025-06-08 23:59:55 +03:00
Flaminel
ee71eab6fa more fixes 2025-06-08 23:10:35 +03:00
Flaminel
565b6b3fde fixed general settings layout 2025-06-08 23:03:02 +03:00
Flaminel
91e7e92cc6 fixed api path 2025-06-08 22:37:16 +03:00
Flaminel
c8626c220e fixed missing certificate validation option 2025-06-08 22:35:28 +03:00
Flaminel
c9dc917401 try fix general settings #1 2025-06-08 22:25:30 +03:00
Flaminel
20e1df722e added general settings 2025-06-08 21:38:16 +03:00
Flaminel
06a2b82881 added loading/error screen for settings 2025-06-08 18:21:05 +03:00
Flaminel
1afbf5c573 fixed content blocker checkbox state 2025-06-08 02:45:15 +03:00
Flaminel
e5586fc774 fixed settings dirty state management and added leave page popup 2025-06-08 02:37:24 +03:00
Flaminel
565d6a78a7 fixed queue cleaner settings button 2025-06-08 01:53:01 +03:00
Flaminel
9cac4fdf8c fixed too many OnFileChanged being trigger when starting the app 2025-06-08 01:45:16 +03:00
Flaminel
d28389641a fixed trace and debug logs icon 2025-06-08 01:44:37 +03:00
Flaminel
46c48d7c00 try fix config manager 2025-06-07 02:15:36 +03:00
Flaminel
d2bcf8ac89 removed content blocker config file 2025-06-07 02:14:07 +03:00
Flaminel
8463b8b786 removed ignored downloads path and config 2025-06-07 02:05:51 +03:00
Flaminel
fd74455951 removed fallback code 2025-06-07 00:41:56 +03:00
Flaminel
34314de284 fixed accordion toggle 2025-06-07 00:36:27 +03:00
Flaminel
195d361364 fixed patterns input 2025-06-07 00:11:46 +03:00
Flaminel
cc07466c2b try change to accordion panel 2025-06-06 23:39:35 +03:00
Flaminel
30aaac5e67 fixed dropdowns and schedule 2025-06-06 23:12:34 +03:00
Flaminel
44a6c37530 fixed small stuff 2025-06-06 22:53:29 +03:00
Flaminel
9526f133a1 changed some info logs 2025-06-06 22:45:19 +03:00
Flaminel
ddbfee33d1 fixed queue cleaner config retrieval 2025-06-06 22:44:08 +03:00
Flaminel
12ab97825b fixed queue cleaner UI settings 2025-06-06 22:20:56 +03:00
Flaminel
bcf093148c fixed UI queue cleaner config 2025-06-06 21:26:51 +03:00
Flaminel
d2cc8517c1 try fix blocklist provider to be dynamic 2025-06-06 21:11:15 +03:00
Flaminel
cae4e323a5 try remove content blocker 2025-06-06 20:46:38 +03:00
Flaminel
f6b0014ec6 fixed some configuration stuff 2025-06-02 12:59:33 +03:00
Flaminel
5d0a48e7cd fixed download cleaner job on api update 2025-06-01 18:49:59 +03:00
Flaminel
d177790d6f fixed queue cleaner job scheduling on API call 2025-06-01 18:37:58 +03:00
Flaminel
d7e28fc6e7 change logs layout 2025-06-01 18:12:16 +03:00
Flaminel
14f16cbe70 fixed events layout 2025-06-01 02:37:15 +03:00
Flaminel
8b8dc672d1 updated some names 2025-06-01 02:36:59 +03:00
Flaminel
d80d897acb test 2025-05-31 21:38:45 +03:00
Flaminel
d9d1f86897 try fix #4 2025-05-31 21:38:41 +03:00
Flaminel
2de3cccac7 try fix #3 2025-05-31 20:39:30 +03:00
Flaminel
b2bb48a260 try fix #2 2025-05-31 00:54:25 +03:00
Flaminel
d078ea288c try fix settings enablement 2025-05-30 23:18:36 +03:00
Flaminel
84d984082c created separate component for settings 2025-05-30 19:13:02 +03:00
Flaminel
b289b2ee39 added queue cleaner settings 2025-05-30 18:36:38 +03:00
Flaminel
97473b47fd updated config controller 2025-05-30 16:27:20 +03:00
Flaminel
a38d370925 added global exception handler 2025-05-30 16:02:56 +03:00
Flaminel
1e3a4cb220 added config DTOs 2025-05-30 15:19:04 +03:00
Flaminel
62e31a2497 changed db file name 2025-05-30 04:14:28 +03:00
Flaminel
c58f6080f0 added encryption for sensitive configs 2025-05-29 23:01:42 +03:00
Flaminel
7cfd69b1f7 fixed search on events 2025-05-29 20:14:07 +03:00
Flaminel
86c356c3a9 added missing event search; fixed connection status for events page 2025-05-29 04:31:54 +03:00
Flaminel
36cfd3b4e5 fixed events viewer 2025-05-29 03:25:05 +03:00
Flaminel
c6f34432b7 removed auto scroll on events 2025-05-29 02:55:24 +03:00
Flaminel
97fe7138c2 fixed event timestamp 2025-05-29 01:57:12 +03:00
Flaminel
2b83e1a334 events api #1 2025-05-28 22:39:15 +03:00
Flaminel
599f8959a9 fixed some logs 2025-05-28 21:41:38 +03:00
Flaminel
f6bcd29ea0 removed old hubs 2025-05-28 19:31:02 +03:00
Flaminel
62fc39251b added unified signalr hub 2025-05-28 19:01:54 +03:00
Flaminel
f5fe9405cd removed some stuff from LoggingController 2025-05-28 15:12:59 +03:00
Flaminel
e4bb0ac04c added glow on logo and heart 2025-05-28 15:12:45 +03:00
Flaminel
fdab02a937 fixed some things; renamed stuff 2025-05-28 11:34:18 +03:00
Flaminel
b7546a7015 fixed dashboard streams not being populated at first 2025-05-28 02:17:52 +03:00
Flaminel
7482f963e3 removed weird left border on event and log entries 2025-05-28 01:21:59 +03:00
Flaminel
90b0ea8306 fixed connection status styling on dashboard 2025-05-28 01:18:36 +03:00
Flaminel
25ae542e87 fixed timeline colors 2025-05-28 01:06:15 +03:00
Flaminel
e96d76091b fixes to layout and styling 2025-05-27 23:03:21 +03:00
Flaminel
28b6c10917 fixed sidebar items 2025-05-27 20:31:32 +03:00
Flaminel
60884198ff removed icons; fixed buttons 2025-05-27 20:30:58 +03:00
Flaminel
367a499155 removed title from topbar 2025-05-27 20:04:18 +03:00
Flaminel
414ad299b6 removed dashboard refresh button 2025-05-27 18:56:44 +03:00
Flaminel
b3bc071943 added dashboard cards 2025-05-27 18:45:53 +03:00
Flaminel
7bd69046ea removed notification publisher definitions 2025-05-27 15:25:13 +03:00
Flaminel
7cfe1333d2 fixed config path 2025-05-27 15:21:18 +03:00
Flaminel
4bb54517c1 changed failed import naming 2025-05-27 14:37:39 +03:00
Flaminel
607bebaf0f fixed event architecture 2025-05-27 14:30:59 +03:00
Flaminel
0da1ef518a events #5 2025-05-27 03:03:06 +03:00
Flaminel
a660480a7c events #4 2025-05-27 02:21:34 +03:00
Flaminel
e8a7373b0d events #3 2025-05-27 01:32:02 +03:00
Flaminel
0785fe9a12 events #2 2025-05-26 23:16:22 +03:00
Flaminel
153c490198 added events #1 2025-05-26 23:08:02 +03:00
Flaminel
4f8d2c57d7 fixed toggle switch 2025-05-25 23:34:56 +03:00
Flaminel
0ba1979016 update theming 2025-05-25 23:16:44 +03:00
Flaminel
7ec9dfe68d remove collapsed sidebar 2025-05-25 22:50:37 +03:00
Flaminel
8b7e39fa86 try sidebar #2 2025-05-25 19:23:00 +03:00
Flaminel
2143c74767 try sidebar 2025-05-25 02:33:21 +03:00
Flaminel
9e596cfe17 topbar 2025-05-25 02:13:24 +03:00
Flaminel
dd95c40bec fixed logs 2025-05-24 01:28:07 +03:00
Flaminel
df37aee2a2 try #3 2025-05-24 00:54:45 +03:00
Flaminel
cb57b06abb try fix collapse #2 2025-05-24 00:48:26 +03:00
Flaminel
ac924d1294 try fix collapse animation 2025-05-24 00:38:43 +03:00
Flaminel
ba4379417e fixed nav bar icon alignment 2025-05-24 00:31:36 +03:00
Flaminel
ff565895ce fixed download logs 2025-05-24 00:12:54 +03:00
Flaminel
1f5e8e5711 fixed mobile side bar 2025-05-24 00:04:12 +03:00
Flaminel
e284ea2c13 changing main layout and styling 2025-05-23 23:49:45 +03:00
Flaminel
d5dab0a726 styling again 2025-05-23 23:13:31 +03:00
Flaminel
8b36e363ac style adjustments 2025-05-23 22:11:01 +03:00
Flaminel
d3c5f7a210 debounce 2025-05-23 22:00:58 +03:00
Flaminel
a97b898d33 #37 2025-05-23 20:58:22 +03:00
Flaminel
aa3ec6c49c theme 2025-05-23 20:18:04 +03:00
Flaminel
2a55ad456c rules 2025-05-23 19:59:15 +03:00
Flaminel
856bd9a93d #36 2025-05-23 17:27:25 +03:00
Flaminel
64518ad071 #35 2025-05-23 17:15:15 +03:00
Flaminel
100528ab06 fix #34 2025-05-22 19:18:47 +03:00
Flaminel
a8e188aa01 #33 2025-05-22 17:03:09 +03:00
Flaminel
91bd85708c #32 2025-05-22 15:13:25 +03:00
Flaminel
0bd4e77e9d #31 2025-05-21 20:49:40 +03:00
Flaminel
9bd46d7255 fix #30 2025-05-20 16:28:54 +03:00
Flaminel
1604e56a89 huntarr 2025-05-20 13:36:10 +03:00
Flaminel
6c9b60dff5 #29 2025-05-20 13:35:28 +03:00
Flaminel
ee02666dc1 #28 2025-05-20 13:21:32 +03:00
Flaminel
21e1fde1c8 #27 2025-05-20 12:32:42 +03:00
Flaminel
8721bc411e #26 2025-05-20 10:35:30 +03:00
Flaminel
3d0ab5f4a6 #25 2025-05-19 23:11:16 +03:00
Flaminel
0e4535d7a6 add ui 2025-05-19 22:16:52 +03:00
Flaminel
7a1e019c76 fix #24 2025-05-19 19:40:39 +03:00
Flaminel
701a7dc417 fix #23 2025-05-19 19:30:12 +03:00
Flaminel
eb0f782f53 #22 2025-05-19 15:32:57 +03:00
Flaminel
a1bd278652 #21 2025-05-19 13:40:59 +03:00
Flaminel
9409346732 fix #20 2025-05-19 12:38:52 +03:00
Flaminel
3d9b286206 #19 2025-05-19 12:35:58 +03:00
Flaminel
c675924be7 fix 2025-05-19 12:08:58 +03:00
Flaminel
2bd8f69aff fix 2025-05-18 02:39:20 +03:00
Flaminel
ef9868be4d fix 2025-05-18 02:17:45 +03:00
Flaminel
4a394928bb #17 2025-05-17 22:14:00 +03:00
Flaminel
60d3ddb3d2 fix #16 2025-05-17 21:33:02 +03:00
Flaminel
0173598519 #15 2025-05-17 21:30:09 +03:00
Flaminel
96b9a54b64 fix #14 2025-05-17 21:03:14 +03:00
Flaminel
212aeccaf3 fix env 2025-05-17 20:14:30 +03:00
Flaminel
d2eb9e50e0 fix #13 2025-05-17 20:12:53 +03:00
Flaminel
1b47921ac5 fix #12 2025-05-17 19:16:59 +03:00
Flaminel
d8d6e31395 fix #11 2025-05-16 21:52:28 +03:00
Flaminel
10be7d4a73 fix #10 2025-05-16 21:15:35 +03:00
Flaminel
e98ecfcb2a fix #9 2025-05-16 20:48:52 +03:00
Flaminel
bc1da2113c fix #8 2025-05-16 20:05:21 +03:00
Flaminel
46ef6123cc fix #7 2025-05-16 19:28:52 +03:00
Flaminel
f2027f77a9 #12 2025-05-16 19:16:32 +03:00
Flaminel
3c2bb7a289 #11 2025-05-16 18:53:01 +03:00
Flaminel
f9fd118f88 #10 2025-05-16 18:25:47 +03:00
Flaminel
4d79307d24 #9 2025-05-16 18:06:18 +03:00
Flaminel
a52fd9198d fix #6 2025-05-16 17:22:59 +03:00
Flaminel
4cb36e6727 #8 2025-05-16 16:24:38 +03:00
Flaminel
a7c2b698d6 rules #2 2025-05-16 16:24:23 +03:00
Flaminel
57326b2f8e fix #5 2025-05-15 22:09:42 +03:00
Flaminel
374f0f72a7 fix #4 2025-05-15 20:29:27 +03:00
Flaminel
06f514c3ea #7 2025-05-15 20:25:21 +03:00
Flaminel
f386bf700c fix #3 2025-05-15 20:15:20 +03:00
Flaminel
b6950f545f #6 2025-05-15 19:34:12 +03:00
Flaminel
6eb457ed8f fix #2 2025-05-15 18:18:41 +03:00
Flaminel
0e99a510a8 #5 2025-05-15 18:15:42 +03:00
Flaminel
b4316a4f0d #4 2025-05-15 17:46:25 +03:00
Flaminel
fa3aebde9a #3 2025-05-15 16:33:39 +03:00
Flaminel
a68a4c733f fix #1 2025-05-15 16:18:09 +03:00
Flaminel
0fc7352db6 #2 2025-05-14 22:49:39 +03:00
Flaminel
69788d55d2 #1 2025-05-14 22:42:52 +03:00
Flaminel
461e935128 initial windsurf 2025-05-14 22:42:24 +03:00
682 changed files with 54593 additions and 9488 deletions

View File

@@ -6,7 +6,7 @@ body:
- type: markdown
attributes:
value: |
Thanks for taking the time to improve cleanuperr!
Thanks for taking the time to improve Cleanuparr!
- type: checkboxes
id: init
attributes:
@@ -14,7 +14,7 @@ body:
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/flmorg/cleanuperr docker repository.
- label: Ensured I am using ghcr.io/Cleanuparr/Cleanuparr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true

View File

@@ -6,7 +6,7 @@ body:
- type: markdown
attributes:
value: |
Thanks for taking the time to improve cleanuperr!
Thanks for taking the time to improve Cleanuparr!
- type: textarea
id: description
attributes:

View File

@@ -14,7 +14,7 @@ body:
options:
- label: Reviewed the documentation.
required: true
- label: Ensured I am using ghcr.io/flmorg/cleanuperr docker repository.
- label: Ensured I am using ghcr.io/Cleanuparr/Cleanuparr docker repository.
required: true
- label: Ensured I am using the latest version.
required: true

125
.github/workflows/build-docker.yml vendored Normal file
View File

@@ -0,0 +1,125 @@
name: Build Docker Images
on:
push:
tags:
- "v*.*.*"
pull_request:
paths:
- 'code/**'
workflow_dispatch:
workflow_call:
jobs:
build_app:
runs-on: ubuntu-latest
steps:
- name: Set github context
timeout-minutes: 1
run: |
echo 'githubRepository=${{ github.repository }}' >> $GITHUB_ENV
echo 'githubSha=${{ github.sha }}' >> $GITHUB_ENV
echo 'githubRef=${{ github.ref }}' >> $GITHUB_ENV
echo 'githubHeadRef=${{ github.head_ref }}' >> $GITHUB_ENV
- name: Initialize build info
timeout-minutes: 1
run: |
githubHeadRef=${{ env.githubHeadRef }}
latestDockerTag=""
versionDockerTag=""
version="0.0.1"
if [[ "$githubRef" =~ ^"refs/tags/" ]]; then
branch=${githubRef##*/}
latestDockerTag="latest"
versionDockerTag=${branch#v}
version=${branch#v}
else
# Determine if this run is for the main branch or another branch
if [[ -z "$githubHeadRef" ]]; then
# Main branch
githubRef=${{ env.githubRef }}
branch=${githubRef##*/}
versionDockerTag="$branch"
else
# Pull request
branch=$githubHeadRef
versionDockerTag="$branch"
fi
fi
githubTags=""
if [ -n "$latestDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr:$latestDockerTag"
fi
if [ -n "$versionDockerTag" ]; then
githubTags="$githubTags,ghcr.io/cleanuparr:$versionDockerTag"
fi
# set env vars
echo "branch=$branch" >> $GITHUB_ENV
echo "githubTags=$githubTags" >> $GITHUB_ENV
echo "versionDockerTag=$versionDockerTag" >> $GITHUB_ENV
echo "version=$version" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/docker username | DOCKER_USERNAME;
secrets/data/docker password | DOCKER_PASSWORD;
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout target repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ env.githubRepository }}
ref: ${{ env.branch }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
timeout-minutes: 5
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push docker image
timeout-minutes: 15
uses: docker/build-push-action@v6
with:
context: ${{ github.workspace }}/code
file: ${{ github.workspace }}/code/Dockerfile
provenance: false
labels: |
commit=sha-${{ env.githubSha }}
version=${{ env.versionDockerTag }}
build-args: |
VERSION=${{ env.version }}
PACKAGES_USERNAME=${{ env.PACKAGES_USERNAME }}
PACKAGES_PAT=${{ env.PACKAGES_PAT }}
outputs: |
type=image
platforms: |
linux/amd64
linux/arm64
push: true
tags: |
${{ env.githubTags }}

177
.github/workflows/build-executable.yml vendored Normal file
View File

@@ -0,0 +1,177 @@
name: Build Executables
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Gate
if: ${{ !startsWith(github.ref, 'refs/tags/') && github.event_name != 'workflow_dispatch' }}
run: |
echo "This workflow only runs on tag events or manual dispatch. Pipeline finished."
exit 0
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=${repoFullName#*/}" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout target repository
uses: actions/checkout@v4
timeout-minutes: 1
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup dotnet
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Install dependencies and restore
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ secrets.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
run: |
mkdir -p code/backend/${{ env.executableName }}/wwwroot
cp -r code/frontend/dist/ui/browser/* code/backend/${{ env.executableName }}/wwwroot/
- name: Build win-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build linux-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime linux-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-x64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-x64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Build osx-arm64
run: dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime osx-arm64 --self-contained -o artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64 /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugSymbols=false
- name: Create sample configuration files
run: |
# Create a sample appsettings.json for each platform
cat > sample-config.json << 'EOF'
{
"Logging": {
"LogLevel": {
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"AllowedHosts": "*"
}
EOF
# Copy to each build directory
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/appsettings.json
cp sample-config.json artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/appsettings.json
- name: Zip win-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64/
- name: Zip linux-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64/
- name: Zip linux-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64/
- name: Zip osx-x64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64/
- name: Zip osx-arm64
run: |
cd ./artifacts
zip -r ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip ./${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64/
- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
name: cleanuparr-executables
path: |
./artifacts/*.zip
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
id: release
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
fail_on_unmatched_files: true
target_commitish: main
generate_release_notes: true
files: |
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-win-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-linux-arm64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-amd64.zip
./artifacts/${{ env.githubRepositoryName }}-${{ env.appVersion }}-osx-arm64.zip

View File

@@ -0,0 +1,376 @@
name: Build macOS ARM Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-arm-installer:
name: Build macOS ARM Installer
runs-on: macos-14 # ARM runner for Apple Silicon
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS ARM executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-arm64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>11.0</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-arm64-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-arm64-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -0,0 +1,376 @@
name: Build macOS Intel Installer
permissions:
contents: write
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-macos-intel-installer:
name: Build macOS Intel Installer
runs-on: macos-13 # Intel runner
steps:
- name: Set variables
run: |
repoFullName=${{ github.repository }}
ref=${{ github.ref }}
# Handle both tag events and manual dispatch
if [[ "$ref" =~ ^refs/tags/ ]]; then
releaseVersion=${ref##refs/tags/}
appVersion=${releaseVersion#v}
else
# For manual dispatch, use a default version
releaseVersion="dev-$(date +%Y%m%d-%H%M%S)"
appVersion="0.0.1-dev"
fi
repositoryName=${repoFullName#*/}
echo "githubRepository=${{ github.repository }}" >> $GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $GITHUB_ENV
echo "appVersion=$appVersion" >> $GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
fetch-depth: 0
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Build macOS Intel executable
run: |
# Clean any existing output directory
rm -rf dist
mkdir -p dist/temp
# Build to a temporary location
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj \
-c Release \
--runtime osx-x64 \
--self-contained true \
-o dist/temp \
/p:PublishSingleFile=true \
/p:Version=${{ env.appVersion }} \
/p:DebugType=None \
/p:DebugSymbols=false \
/p:UseAppHost=true \
/p:EnableMacOSCodeSign=false \
/p:CodeSignOnCopy=false \
/p:_CodeSignDuringBuild=false \
/p:PublishTrimmed=false \
/p:TrimMode=link
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/MacOS
# Copy the built executable (note: AssemblyName is "Cleanuparr" not "Cleanuparr.Api")
cp dist/temp/Cleanuparr dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Copy frontend directly to where it belongs in the app bundle
mkdir -p dist/Cleanuparr.app/Contents/MacOS/wwwroot
cp -r code/frontend/dist/ui/browser/* dist/Cleanuparr.app/Contents/MacOS/wwwroot/
# Copy any additional runtime files if they exist
if [ -d "dist/temp" ]; then
find dist/temp -name "*.dylib" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
find dist/temp -name "createdump" -exec cp {} dist/Cleanuparr.app/Contents/MacOS/ \; 2>/dev/null || true
fi
- name: Post-build setup
run: |
# Make sure the executable is actually executable
chmod +x dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Remove any .pdb files that might have been created
find dist/Cleanuparr.app/Contents/MacOS -name "*.pdb" -delete 2>/dev/null || true
echo "Checking architecture of built binary:"
file dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
if command -v lipo >/dev/null 2>&1; then
lipo -info dist/Cleanuparr.app/Contents/MacOS/Cleanuparr
fi
echo "Files in MacOS directory:"
ls -la dist/Cleanuparr.app/Contents/MacOS/
- name: Create macOS app bundle structure
run: |
# Create proper app bundle structure
mkdir -p dist/Cleanuparr.app/Contents/{MacOS,Resources,Frameworks}
# Convert ICO to ICNS for macOS app bundle
if command -v iconutil >/dev/null 2>&1; then
# Create iconset directory structure
mkdir -p Cleanuparr.iconset
# Use existing PNG files from Logo directory for different sizes
cp Logo/16.png Cleanuparr.iconset/icon_16x16.png
cp Logo/32.png Cleanuparr.iconset/icon_16x16@2x.png
cp Logo/32.png Cleanuparr.iconset/icon_32x32.png
cp Logo/64.png Cleanuparr.iconset/icon_32x32@2x.png
cp Logo/128.png Cleanuparr.iconset/icon_128x128.png
cp Logo/256.png Cleanuparr.iconset/icon_128x128@2x.png
cp Logo/256.png Cleanuparr.iconset/icon_256x256.png
cp Logo/512.png Cleanuparr.iconset/icon_256x256@2x.png
cp Logo/512.png Cleanuparr.iconset/icon_512x512.png
cp Logo/1024.png Cleanuparr.iconset/icon_512x512@2x.png
# Create ICNS file
iconutil -c icns Cleanuparr.iconset -o dist/Cleanuparr.app/Contents/Resources/Cleanuparr.icns
# Clean up iconset directory
rm -rf Cleanuparr.iconset
fi
# Create Launch Daemon plist
cat > dist/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.cleanuparr.daemon</string>
<key>ProgramArguments</key>
<array>
<string>/Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>StandardOutPath</key>
<string>/var/log/cleanuparr.log</string>
<key>StandardErrorPath</key>
<string>/var/log/cleanuparr.error.log</string>
<key>WorkingDirectory</key>
<string>/Applications/Cleanuparr.app/Contents/MacOS</string>
<key>EnvironmentVariables</key>
<dict>
<key>HTTP_PORTS</key>
<string>11011</string>
</dict>
</dict>
</plist>
EOF
# Create Info.plist with proper configuration
cat > dist/Cleanuparr.app/Contents/Info.plist << EOF
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleExecutable</key>
<string>Cleanuparr</string>
<key>CFBundleIdentifier</key>
<string>com.Cleanuparr</string>
<key>CFBundleName</key>
<string>Cleanuparr</string>
<key>CFBundleDisplayName</key>
<string>Cleanuparr</string>
<key>CFBundleVersion</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleShortVersionString</key>
<string>${{ env.appVersion }}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>CLNR</string>
<key>CFBundleIconFile</key>
<string>Cleanuparr</string>
<key>NSHighResolutionCapable</key>
<true/>
<key>NSRequiresAquaSystemAppearance</key>
<false/>
<key>LSMinimumSystemVersion</key>
<string>10.15</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.productivity</string>
<key>NSSupportsAutomaticTermination</key>
<false/>
<key>NSSupportsSuddenTermination</key>
<false/>
<key>LSBackgroundOnly</key>
<false/>
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
</dict>
</plist>
EOF
# Clean up temp directory
rm -rf dist/temp
- name: Create PKG installer
run: |
# Create preinstall script to handle existing installations
mkdir -p scripts
cat > scripts/preinstall << 'EOF'
#!/bin/bash
# Stop and unload existing launch daemon if it exists
if launchctl list | grep -q "com.cleanuparr.daemon"; then
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
fi
# Stop any running instances of Cleanuparr
pkill -f "Cleanuparr" || true
sleep 2
# Remove old installation if it exists
if [[ -d "/Applications/Cleanuparr.app" ]]; then
rm -rf "/Applications/Cleanuparr.app"
fi
# Remove old launch daemon plist if it exists
if [[ -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist" ]]; then
rm -f "/Library/LaunchDaemons/com.cleanuparr.daemon.plist"
fi
exit 0
EOF
chmod +x scripts/preinstall
# Create postinstall script
cat > scripts/postinstall << 'EOF'
#!/bin/bash
# Set proper permissions for the app bundle
chmod -R 755 /Applications/Cleanuparr.app
chmod +x /Applications/Cleanuparr.app/Contents/MacOS/Cleanuparr
# Install the launch daemon
cp /Applications/Cleanuparr.app/Contents/Resources/com.cleanuparr.daemon.plist /Library/LaunchDaemons/
chown root:wheel /Library/LaunchDaemons/com.cleanuparr.daemon.plist
chmod 644 /Library/LaunchDaemons/com.cleanuparr.daemon.plist
# Load and start the service
launchctl load /Library/LaunchDaemons/com.cleanuparr.daemon.plist
launchctl start com.cleanuparr.daemon
# Wait a moment for service to start
sleep 3
# Display as system notification
osascript -e 'display notification "Cleanuparr service started! Visit http://localhost:11011 in your browser." with title "Installation Complete"' 2>/dev/null || true
exit 0
EOF
chmod +x scripts/postinstall
# Create uninstall script (optional, for user reference)
cat > scripts/uninstall_cleanuparr.sh << 'EOF'
#!/bin/bash
# Cleanuparr Uninstall Script
# Run this script with sudo to completely remove Cleanuparr
echo "Stopping Cleanuparr service..."
launchctl stop com.cleanuparr.daemon 2>/dev/null || true
launchctl unload /Library/LaunchDaemons/com.cleanuparr.daemon.plist 2>/dev/null || true
echo "Removing service files..."
rm -f /Library/LaunchDaemons/com.cleanuparr.daemon.plist
echo "Removing application..."
rm -rf /Applications/Cleanuparr.app
echo "Removing logs..."
rm -f /var/log/cleanuparr.log
rm -f /var/log/cleanuparr.error.log
echo "Cleanuparr has been completely removed."
echo "Note: Configuration files in /Applications/Cleanuparr.app/Contents/MacOS/config/ have been removed with the app."
EOF
chmod +x scripts/uninstall_cleanuparr.sh
# Copy uninstall script to app bundle for user access
cp scripts/uninstall_cleanuparr.sh dist/Cleanuparr.app/Contents/Resources/
# Determine package name
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel.pkg"
else
pkg_name="Cleanuparr-${{ env.appVersion }}-macos-intel-dev.pkg"
fi
# Create PKG installer with better metadata
pkgbuild --root dist/ \
--scripts scripts/ \
--identifier com.Cleanuparr \
--version ${{ env.appVersion }} \
--install-location /Applications \
--ownership preserve \
${pkg_name}
echo "pkgName=${pkg_name}" >> $GITHUB_ENV
- name: Upload installer as artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-macos-intel-installer
path: '${{ env.pkgName }}'
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
${{ env.pkgName }}

View File

@@ -0,0 +1,171 @@
name: Build Windows Installer
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
workflow_call:
jobs:
build-windows-installer:
runs-on: windows-latest
steps:
- name: Set variables
shell: pwsh
run: |
$repoFullName = "${{ github.repository }}"
$ref = "${{ github.ref }}"
# Handle both tag events and manual dispatch
if ($ref -match "^refs/tags/") {
$releaseVersion = $ref -replace "refs/tags/", ""
$appVersion = $releaseVersion -replace "^v", ""
} else {
# For manual dispatch, use a default version
$releaseVersion = "dev-$(Get-Date -Format 'yyyyMMdd-HHmmss')"
$appVersion = "0.0.1-dev"
}
$repositoryName = $repoFullName.Split("/")[1]
echo "githubRepository=${{ github.repository }}" >> $env:GITHUB_ENV
echo "githubRepositoryName=$repositoryName" >> $env:GITHUB_ENV
echo "releaseVersion=$releaseVersion" >> $env:GITHUB_ENV
echo "appVersion=$appVersion" >> $env:GITHUB_ENV
echo "executableName=Cleanuparr.Api" >> $env:GITHUB_ENV
echo "APP_VERSION=$appVersion" >> $env:GITHUB_ENV
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT;
secrets/data/github packages_pat | PACKAGES_PAT
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: ${{ env.githubRepository }}
ref: ${{ github.ref_name }}
token: ${{ env.REPO_READONLY_PAT }}
- name: Setup Node.js for frontend build
uses: actions/setup-node@v4
with:
node-version: '18'
cache: 'npm'
cache-dependency-path: code/frontend/package-lock.json
- name: Build frontend
run: |
cd code/frontend
npm ci
npm run build
- name: Setup .NET
uses: actions/setup-dotnet@v4
with:
dotnet-version: 9.0.x
- name: Restore .NET dependencies
run: |
dotnet nuget add source --username ${{ github.repository_owner }} --password ${{ env.PACKAGES_PAT }} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
dotnet restore code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj
- name: Copy frontend to backend wwwroot
shell: pwsh
run: |
New-Item -ItemType Directory -Force -Path "code/backend/${{ env.executableName }}/wwwroot"
Copy-Item -Path "code/frontend/dist/ui/browser/*" -Destination "code/backend/${{ env.executableName }}/wwwroot/" -Recurse -Force
- name: Build Windows executable
run: |
dotnet publish code/backend/${{ env.executableName }}/${{ env.executableName }}.csproj -c Release --runtime win-x64 --self-contained -o dist /p:PublishSingleFile=true /p:Version=${{ env.appVersion }} /p:DebugType=None /p:DebugSymbols=false
- name: Create sample configuration
shell: pwsh
run: |
# Create config directory
New-Item -ItemType Directory -Force -Path "config"
$config = @{
"HTTP_PORTS" = 11011
"BASE_PATH" = "/"
}
$config | ConvertTo-Json | Out-File -FilePath "config/cleanuparr.json" -Encoding UTF8
- name: Setup Inno Setup
shell: pwsh
run: |
# Download and install Inno Setup
$url = "https://jrsoftware.org/download.php/is.exe"
$output = "innosetup-installer.exe"
Invoke-WebRequest -Uri $url -OutFile $output
Start-Process -FilePath $output -ArgumentList "/VERYSILENT", "/SUPPRESSMSGBOXES", "/NORESTART" -Wait
# Add Inno Setup to PATH
$innoPath = "C:\Program Files (x86)\Inno Setup 6"
echo "$innoPath" >> $env:GITHUB_PATH
- name: Verify LICENSE file exists
shell: pwsh
run: |
if (-not (Test-Path "LICENSE")) {
Write-Error "LICENSE file not found in repository root"
exit 1
}
Write-Host "LICENSE file found successfully"
- name: Build Windows installer
shell: pwsh
run: |
# Copy installer script to root
Copy-Item "installers/windows/cleanuparr-installer.iss" -Destination "cleanuparr-installer.iss"
# The installer script has been pre-updated with proper icon and config paths
# No dynamic modifications needed as the base script now includes correct references
# Run Inno Setup compiler
& "C:\Program Files (x86)\Inno Setup 6\ISCC.exe" "cleanuparr-installer.iss"
# Check if installer was created
if (Test-Path "installer/Cleanuparr_Setup.exe") {
Write-Host "Installer created successfully"
} else {
Write-Error "Installer creation failed"
exit 1
}
- name: Rename installer with version
shell: pwsh
run: |
$installerName = "Cleanuparr-${{ env.appVersion }}-Setup.exe"
Move-Item "installer/Cleanuparr_Setup.exe" "installer/$installerName"
echo "installerName=$installerName" >> $env:GITHUB_ENV
- name: Upload installer artifact
uses: actions/upload-artifact@v4
with:
name: Cleanuparr-windows-installer
path: installer/${{ env.installerName }}
retention-days: 30
- name: Release
if: startsWith(github.ref, 'refs/tags/')
uses: softprops/action-gh-release@v2
with:
name: ${{ env.releaseVersion }}
tag_name: ${{ env.releaseVersion }}
repository: ${{ env.githubRepository }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
files: |
installer/${{ env.installerName }}

View File

@@ -1,14 +0,0 @@
on:
workflow_dispatch:
workflow_call:
jobs:
build:
uses: flmorg/universal-workflows-testing/.github/workflows/dotnet.build.app.yml@main
with:
dockerRepository: flaminel/cleanuperr
githubContext: ${{ toJSON(github) }}
outputName: cleanuperr
selfContained: false
baseImage: 9.0-bookworm-slim
secrets: inherit

View File

@@ -1,19 +0,0 @@
on:
workflow_call:
workflow_dispatch:
push:
paths:
- 'chart/**'
branches: [ main ]
jobs:
deploy:
uses: flmorg/universal-workflows/.github/workflows/chart.install.yml@main
with:
githubContext: ${{ toJSON(github) }}
chartRepo: oci://ghcr.io/flmorg
chartName: universal-chart
version: ^1.0.0
valuesPath: chart/values.yaml
releaseName: main
secrets: inherit

View File

@@ -1,20 +0,0 @@
on:
push:
tags:
- "v*.*.*"
# paths:
# - 'code/**'
# branches: [ main ]
pull_request:
paths:
- 'code/**'
jobs:
build:
uses: flmorg/cleanuperr/.github/workflows/build.yml@main
secrets: inherit
# deploy:
# needs: [ build ]
# uses: flmorg/cleanuperr/.github/workflows/deploy.yml@main
# secrets: inherit

View File

@@ -1,11 +1,164 @@
name: Release Build
on:
push:
tags:
- "v*.*.*"
workflow_dispatch:
inputs:
version:
description: 'Version to release (e.g., 1.0.0)'
required: false
default: ''
jobs:
release:
uses: flmorg/universal-workflows/.github/workflows/dotnet.release.yml@main
with:
githubContext: ${{ toJSON(github) }}
secrets: inherit
# Validate release
validate:
runs-on: ubuntu-latest
outputs:
app_version: ${{ steps.version.outputs.app_version }}
release_version: ${{ steps.version.outputs.release_version }}
is_tag: ${{ steps.version.outputs.is_tag }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Get version info
id: version
run: |
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
# Tag event
release_version=${GITHUB_REF##refs/tags/}
app_version=${release_version#v}
is_tag=true
elif [[ -n "${{ github.event.inputs.version }}" ]]; then
# Manual workflow with version
app_version="${{ github.event.inputs.version }}"
release_version="v$app_version"
is_tag=false
else
# Manual workflow without version
app_version="0.0.1-dev-$(date +%Y%m%d-%H%M%S)"
release_version="v$app_version"
is_tag=false
fi
echo "app_version=$app_version" >> $GITHUB_OUTPUT
echo "release_version=$release_version" >> $GITHUB_OUTPUT
echo "is_tag=$is_tag" >> $GITHUB_OUTPUT
echo "🏷️ Release Version: $release_version"
echo "📱 App Version: $app_version"
echo "🔖 Is Tag: $is_tag"
# Build portable executables
build-executables:
needs: validate
uses: ./.github/workflows/build_executable.yml
secrets: inherit
# Build Windows installer
build-windows-installer:
needs: validate
uses: ./.github/workflows/build-windows-installer.yml
secrets: inherit
# Build macOS Intel installer
build-macos-intel:
needs: validate
uses: ./.github/workflows/build-macos-intel-installer.yml
secrets: inherit
# Build macOS ARM installer
build-macos-arm:
needs: validate
uses: ./.github/workflows/build-macos-arm-installer.yml
secrets: inherit
# Create GitHub release
create-release:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
steps:
- name: Get vault secrets
uses: hashicorp/vault-action@v2
with:
url: ${{ secrets.VAULT_HOST }}
method: approle
roleId: ${{ secrets.VAULT_ROLE_ID }}
secretId: ${{ secrets.VAULT_SECRET_ID }}
secrets:
secrets/data/github repo_readonly_pat | REPO_READONLY_PAT
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: ./artifacts
- name: List downloaded artifacts
run: |
echo "📦 Downloaded artifacts:"
find ./artifacts -type f -name "*.zip" -o -name "*.pkg" -o -name "*.exe" | sort
- name: Create release
uses: softprops/action-gh-release@v2
with:
name: Cleanuparr ${{ needs.validate.outputs.release_version }}
tag_name: ${{ needs.validate.outputs.release_version }}
token: ${{ env.REPO_READONLY_PAT }}
make_latest: true
generate_release_notes: true
prerelease: ${{ contains(needs.validate.outputs.app_version, '-') }}
files: |
./artifacts/**/*.zip
./artifacts/**/*.pkg
./artifacts/**/*.exe
# Summary job
summary:
needs: [validate, build-executables, build-windows-installer, build-macos-intel, build-macos-arm]
runs-on: ubuntu-latest
if: always()
steps:
- name: Build Summary
run: |
echo "## 🏗️ Cleanuparr Build Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Version**: ${{ needs.validate.outputs.release_version }}" >> $GITHUB_STEP_SUMMARY
echo "**App Version**: ${{ needs.validate.outputs.app_version }}" >> $GITHUB_STEP_SUMMARY
echo "**Is Tag**: ${{ needs.validate.outputs.is_tag }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "### Build Results" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Check job results
if [[ "${{ needs.build-executables.result }}" == "success" ]]; then
echo "✅ **Portable Executables**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Portable Executables**: ${{ needs.build-executables.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-windows-installer.result }}" == "success" ]]; then
echo "✅ **Windows Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **Windows Installer**: ${{ needs.build-windows-installer.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-intel.result }}" == "success" ]]; then
echo "✅ **macOS Intel Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS Intel Installer**: ${{ needs.build-macos-intel.result }}" >> $GITHUB_STEP_SUMMARY
fi
if [[ "${{ needs.build-macos-arm.result }}" == "success" ]]; then
echo "✅ **macOS ARM Installer**: Success" >> $GITHUB_STEP_SUMMARY
else
echo "❌ **macOS ARM Installer**: ${{ needs.build-macos-arm.result }}" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
echo "🎉 **Build completed!**" >> $GITHUB_STEP_SUMMARY

BIN
Logo/favicon.ico Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

View File

Before

Width:  |  Height:  |  Size: 112 KiB

After

Width:  |  Height:  |  Size: 112 KiB

View File

@@ -1,35 +1,35 @@
_Love this project? Give it a ⭐️ and let others know!_
# <img width="24px" src="./Logo/256.png" alt="cleanuperr"></img> Cleanuperr
# <img width="24px" src="./Logo/256.png" alt="Cleanuparr"></img> Cleanuparr
[![Discord](https://img.shields.io/discord/1306721212587573389?color=7289DA&label=Discord&style=for-the-badge&logo=discord)](https://discord.gg/SCtMCgtsc4)
Cleanuperr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuperr can also trigger a search to replace the deleted shows/movies.
Cleanuparr is a tool for automating the cleanup of unwanted or blocked files in Sonarr, Radarr, and supported download clients like qBittorrent. It removes incomplete or blocked downloads, updates queues, and enforces blacklists or whitelists to manage file selection. After removing blocked content, Cleanuparr can also trigger a search to replace the deleted shows/movies.
Cleanuperr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made Cleanuperr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
Cleanuparr was created primarily to address malicious files, such as `*.lnk` or `*.zipx`, that were getting stuck in Sonarr/Radarr and required manual intervention. Some of the reddit posts that made Cleanuparr come to life can be found [here](https://www.reddit.com/r/sonarr/comments/1gqnx16/psa_sonarr_downloaded_a_virus/), [here](https://www.reddit.com/r/sonarr/comments/1gqwklr/sonar_downloaded_a_mkv_file_which_looked_like_a/), [here](https://www.reddit.com/r/sonarr/comments/1gpw2wa/downloaded_waiting_to_import/) and [here](https://www.reddit.com/r/sonarr/comments/1gpi344/downloads_not_importing_no_files_found/).
> [!IMPORTANT]
> **Features:**
> - Strike system to mark bad downloads.
> - Remove and block downloads that reached a maximum number of strikes.
> - Remove and block downloads that are **failing to be imported** by the arrs. [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/queue-cleaner/import-failed)
> - Remove and block downloads that are **stalled** or in **metadata downloading** state. [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/queue-cleaner/stalled)
> - Remove and block downloads that have a **low download speed** or **high estimated completion time**. [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/queue-cleaner/slow)
> - Remove and block downloads blocked by qBittorrent or by Cleanuperr's **Content Blocker**. [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/content-blocker/general)
> - Remove and block downloads that are **failing to be imported** by the arrs. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/import-failed)
> - Remove and block downloads that are **stalled** or in **metadata downloading** state. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/stalled)
> - Remove and block downloads that have a **low download speed** or **high estimated completion time**. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/queue-cleaner/slow)
> - Remove and block downloads blocked by qBittorrent or by Cleanuparr's **Content Blocker**. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/content-blocker/general)
> - Automatically trigger a search for downloads removed from the arrs.
> - Clean up downloads that have been **seeding** for a certain amount of time. [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/download-cleaner/seeding)
> - Remove downloads that are **orphaned**/have no **hardlinks**/are not referenced by the arrs anymore (with [cross-seed](https://www.cross-seed.org/) support). [configuration](https://flmorg.github.io/cleanuperr/docs/configuration/download-cleaner/hardlinks)
> - Notify on strike or download removal. [configuration](https://flmorg.github.io/cleanuperr/docs/category/notifications)
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuperr.
> - Clean up downloads that have been **seeding** for a certain amount of time. [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/download-cleaner/seeding)
> - Remove downloads that are **orphaned**/have no **hardlinks**/are not referenced by the arrs anymore (with [cross-seed](https://www.cross-seed.org/) support). [configuration](https://cleanuparr.github.io/cleanuparr/docs/configuration/download-cleaner/hardlinks)
> - Notify on strike or download removal. [configuration](https://cleanuparr.github.io/cleanuparr/docs/category/notifications)
> - Ignore certain torrent hashes, categories, tags or trackers from being processed by Cleanuparr.
Cleanuperr supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
Cleanuparr supports both qBittorrent's built-in exclusion features and its own blocklist-based system. Binaries for all platforms are provided, along with Docker images for easy deployment.
## Quick Start
> [!NOTE]
>
> 1. **Docker (Recommended)**
> Pull the Docker image from `ghcr.io/flmorg/cleanuperr:latest`.
> Pull the Docker image from `ghcr.io/Cleanuparr/Cleanuparr:latest`.
>
> 2. **Unraid (for Unraid users)**
> Use the Unraid Community App.
@@ -39,13 +39,13 @@ Cleanuperr supports both qBittorrent's built-in exclusion features and its own b
# Docs
Docs can be found [here](https://flmorg.github.io/cleanuperr/).
Docs can be found [here](https://Cleanuparr.github.io/Cleanuparr/).
# <img style="vertical-align: middle;" width="24px" src="./Logo/256.png" alt="Cleanuperr"> <span style="vertical-align: middle;">Cleanuperr</span> <img src="https://raw.githubusercontent.com/FortAwesome/Font-Awesome/6.x/svgs/solid/x.svg" height="24px" width="30px" style="vertical-align: middle;"> <span style="vertical-align: middle;">Huntarr</span> <img style="vertical-align: middle;" width="24px" src="https://github.com/plexguide/Huntarr.io/blob/main/frontend/static/logo/512.png?raw=true" alt Huntarr></img>
# <img style="vertical-align: middle;" width="24px" src="./Logo/256.png" alt="Cleanuparr"> <span style="vertical-align: middle;">Cleanuparr</span> <img src="https://raw.githubusercontent.com/FortAwesome/Font-Awesome/6.x/svgs/solid/x.svg" height="24px" width="30px" style="vertical-align: middle;"> <span style="vertical-align: middle;">Huntarr</span> <img style="vertical-align: middle;" width="24px" src="https://github.com/plexguide/Huntarr.io/blob/main/frontend/static/logo/512.png?raw=true" alt Huntarr></img>
Think of **Cleanuperr** as the janitor of your server; it keeps your download queue spotless, removes clutter, and blocks malicious files. Now imagine combining that with **Huntarr**, the compulsive librarian who finds missing and upgradable media to complete your collection
Think of **Cleanuparr** as the janitor of your server; it keeps your download queue spotless, removes clutter, and blocks malicious files. Now imagine combining that with **Huntarr**, the compulsive librarian who finds missing and upgradable media to complete your collection
While **Huntarr** fills in the blanks and improves what you already have, **Cleanuperr** makes sure that only clean downloads get through. If you're aiming for a reliable and self-sufficient setup, **Cleanuperr** and **Huntarr** will take your automated media stack to another level.
While **Huntarr** fills in the blanks and improves what you already have, **Cleanuparr** makes sure that only clean downloads get through. If you're aiming for a reliable and self-sufficient setup, **Cleanuparr** and **Huntarr** will take your automated media stack to another level.
<span style="font-size:24px"> ➡️ [**Huntarr**](https://github.com/plexguide/Huntarr.io) <span style="vertical-align: middle">![Huntarr](https://img.shields.io/github/stars/plexguide/Huntarr.io?style=social)</span></span>

View File

@@ -1,187 +0,0 @@
deployment:
replicas: 1
strategy:
type: RollingUpdate
maxSurge: 1
maxUnavailable: 0
containers:
- name: qbit
image:
repository: ghcr.io/flmorg/cleanuperr
tag: latest
env:
- name: DRY_RUN
value: "false"
- name: LOGGING__LOGLEVEL
value: Verbose
- name: LOGGING__FILE__ENABLED
value: "true"
- name: LOGGING__FILE__PATH
value: /var/logs
- name: LOGGING__ENHANCED
value: "true"
- name: TRIGGERS__QUEUECLEANER
value: 0 0/5 * * * ?
- name: TRIGGERS__CONTENTBLOCKER
value: 0 0/5 * * * ?
- name: QUEUECLEANER__ENABLED
value: "true"
- name: QUEUECLEANER__RUNSEQUENTIALLY
value: "true"
- name: QUEUECLEANER__IMPORT_FAILED_MAX_STRIKES
value: "3"
- name: QUEUECLEANER__IMPORT_FAILED_IGNORE_PRIVATE
value: "false"
- name: QUEUECLEANER__IMPORT_FAILED_DELETE_PRIVATE
value: "false"
- name: QUEUECLEANER__STALLED_MAX_STRIKES
value: "3"
- name: QUEUECLEANER__STALLED_IGNORE_PRIVATE
value: "false"
- name: QUEUECLEANER__STALLED_DELETE_PRIVATE
value: "false"
- name: CONTENTBLOCKER__ENABLED
value: "true"
- name: CONTENTBLOCKER__IGNORE_PRIVATE
value: "true"
- name: CONTENTBLOCKER__DELETE_PRIVATE
value: "false"
- name: DOWNLOADCLEANER__ENABLED
value: "true"
- name: DOWNLOADCLEANER__DELETE_PRIVATE
value: "true"
- name: DOWNLOADCLEANER__CATEGORIES__0__NAME
value: unlinked
- name: DOWNLOADCLEANER__CATEGORIES__0__MAX_SEED_TIME
value: "240"
- name: DOWNLOADCLEANER__UNLINKED_TARGET_CATEGORY
value: unlinked
- name: DOWNLOADCLEANER__UNLINKED_IGNORED_ROOT_DIR
value: /downloads
- name: DOWNLOADCLEANER__UNLINKED_CATEGORIES__0
value: sonarr-low
- name: DOWNLOADCLEANER__UNLINKED_CATEGORIES__1
value: sonarr-high
- name: DOWNLOADCLEANER__UNLINKED_CATEGORIES__2
value: radarr-low
- name: DOWNLOADCLEANER__UNLINKED_CATEGORIES__3
value: radarr-high
- name: DOWNLOAD_CLIENT
value: qbittorrent
- name: QBITTORRENT__URL
value: http://service.qbittorrent-videos.svc.cluster.local
- name: SONARR__ENABLED
value: "true"
- name: SONARR__SEARCHTYPE
value: Episode
- name: SONARR__BLOCK__TYPE
value: blacklist
- name: SONARR__BLOCK__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: SONARR__INSTANCES__0__URL
value: http://service.sonarr-low-res.svc.cluster.local
- name: SONARR__INSTANCES__1__URL
value: http://service.sonarr-high-res.svc.cluster.local
- name: RADARR__ENABLED
value: "true"
- name: RADARR__BLOCK__TYPE
value: blacklist
- name: RADARR__BLOCK__PATH
value: https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist
- name: RADARR__INSTANCES__0__URL
value: http://service.radarr-low-res.svc.cluster.local
- name: RADARR__INSTANCES__1__URL
value: http://service.radarr-high-res.svc.cluster.local
- name: NOTIFIARR__ON_IMPORT_FAILED_STRIKE
value: "true"
- name: NOTIFIARR__ON_STALLED_STRIKE
value: "true"
- name: NOTIFIARR__ON_QUEUE_ITEM_DELETED
value: "true"
- name: NOTIFIARR__ON_DOWNLOAD_CLEANED
value: "true"
- name: NOTIFIARR__CHANNEL_ID
value: "1340708411259748413"
envFromSecret:
- secretName: qbit-auth
envs:
- name: QBITTORRENT__USERNAME
key: QBIT_USER
- name: QBITTORRENT__PASSWORD
key: QBIT_PASS
- secretName: sonarr-auth
envs:
- name: SONARR__INSTANCES__0__APIKEY
key: SNRL_API_KEY
- name: SONARR__INSTANCES__1__APIKEY
key: SNRH_API_KEY
- secretName: radarr-auth
envs:
- name: RADARR__INSTANCES__0__APIKEY
key: RDRL_API_KEY
- name: RADARR__INSTANCES__1__APIKEY
key: RDRH_API_KEY
- secretName: notifiarr-auth
envs:
- name: NOTIFIARR__API_KEY
key: API_KEY
resources:
requests:
cpu: 0m
memory: 0Mi
limits:
cpu: 1000m
memory: 1000Mi
volumeMounts:
- name: storage
mountPath: /var/logs
subPath: cleanuperr/logs
- name: storage
mountPath: /downloads/general2
subPath: media/downloads/general2
- name: storage
mountPath: /downloads/cross-seed
subPath: media/downloads/cross-seed
volumes:
- name: storage
type: pvc
typeName: storage-pvc
pvcs:
- name: storage-pvc
storageClassName: local-path-persistent
accessModes:
- ReadWriteOnce
size: 1Gi
volumeMode: Filesystem
vaultSecrets:
- name: qbit-auth
path: secrets/qbittorrent
templates:
QBIT_USER: "{% .Secrets.username %}"
QBIT_PASS: "{% .Secrets.password %}"
- name: radarr-auth
path: secrets/radarr
templates:
RDRL_API_KEY: "{% .Secrets.low_api_key %}"
RDRH_API_KEY: "{% .Secrets.high_api_key %}"
- name: sonarr-auth
path: secrets/sonarr
templates:
SNRL_API_KEY: "{% .Secrets.low_api_key %}"
SNRH_API_KEY: "{% .Secrets.high_api_key %}"
- name: notifiarr-auth
path: secrets/notifiarr
templates:
API_KEY: "{% .Secrets.passthrough_api_key %}"

41
code/.dockerignore Normal file
View File

@@ -0,0 +1,41 @@
# Documentation
*.md
docs/
# Version control
.git/
.gitignore
# IDE files
.vscode/
.idea/
*.swp
*.swo
# OS files
.DS_Store
Thumbs.db
# Node.js
frontend/node_modules/
frontend/dist/
frontend/.angular/
# .NET
backend/bin/
backend/obj/
backend/*/bin/
backend/*/obj/
backend/.vs/
# Build artifacts
artifacts/
dist/
# Test files
backend/**/*Tests/
backend/**/Tests/
# Development files
docker-compose*.yml
test/

View File

@@ -1,6 +0,0 @@
namespace Common.Attributes;
[AttributeUsage(AttributeTargets.Method, Inherited = true)]
public class DryRunSafeguardAttribute : Attribute
{
}

View File

@@ -1,23 +0,0 @@
using Common.Configuration.ContentBlocker;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.Arr;
public abstract record ArrConfig
{
public required bool Enabled { get; init; }
public Block Block { get; init; } = new();
[ConfigurationKeyName("IMPORT_FAILED_MAX_STRIKES")]
public short ImportFailedMaxStrikes { get; init; } = -1;
public required List<ArrInstance> Instances { get; init; }
}
public readonly record struct Block
{
public BlocklistType Type { get; init; }
public string? Path { get; init; }
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.Arr;
public sealed class ArrInstance
{
public required Uri Url { get; set; }
public required string ApiKey { get; set; }
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration.Arr;
public sealed record LidarrConfig : ArrConfig
{
public const string SectionName = "Lidarr";
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration.Arr;
public sealed record RadarrConfig : ArrConfig
{
public const string SectionName = "Radarr";
}

View File

@@ -1,8 +0,0 @@
namespace Common.Configuration.Arr;
public sealed record SonarrConfig : ArrConfig
{
public const string SectionName = "Sonarr";
public SonarrSearchType SearchType { get; init; }
}

View File

@@ -1,23 +0,0 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.ContentBlocker;
public sealed record ContentBlockerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "ContentBlocker";
public required bool Enabled { get; init; }
[ConfigurationKeyName("IGNORE_PRIVATE")]
public bool IgnorePrivate { get; init; }
[ConfigurationKeyName("DELETE_PRIVATE")]
public bool DeletePrivate { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
public void Validate()
{
}
}

View File

@@ -1,45 +0,0 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadCleaner;
public sealed record CleanCategory : IConfig
{
public required string Name { get; init; }
/// <summary>
/// Max ratio before removing a download.
/// </summary>
[ConfigurationKeyName("MAX_RATIO")]
public required double MaxRatio { get; init; } = -1;
/// <summary>
/// Min number of hours to seed before removing a download, if the ratio has been met.
/// </summary>
[ConfigurationKeyName("MIN_SEED_TIME")]
public required double MinSeedTime { get; init; } = 0;
/// <summary>
/// Number of hours to seed before removing a download.
/// </summary>
[ConfigurationKeyName("MAX_SEED_TIME")]
public required double MaxSeedTime { get; init; } = -1;
public void Validate()
{
if (string.IsNullOrWhiteSpace(Name))
{
throw new ValidationException($"{nameof(Name)} can not be empty");
}
if (MaxRatio < 0 && MaxSeedTime < 0)
{
throw new ValidationException($"both {nameof(MaxRatio)} and {nameof(MaxSeedTime)} are disabled");
}
if (MinSeedTime < 0)
{
throw new ValidationException($"{nameof(MinSeedTime)} can not be negative");
}
}
}

View File

@@ -1,71 +0,0 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadCleaner;
public sealed record DownloadCleanerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "DownloadCleaner";
public bool Enabled { get; init; }
public List<CleanCategory>? Categories { get; init; }
[ConfigurationKeyName("DELETE_PRIVATE")]
public bool DeletePrivate { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
[ConfigurationKeyName("UNLINKED_TARGET_CATEGORY")]
public string UnlinkedTargetCategory { get; init; } = "cleanuperr-unlinked";
[ConfigurationKeyName("UNLINKED_USE_TAG")]
public bool UnlinkedUseTag { get; init; }
[ConfigurationKeyName("UNLINKED_IGNORED_ROOT_DIR")]
public string UnlinkedIgnoredRootDir { get; init; } = string.Empty;
[ConfigurationKeyName("UNLINKED_CATEGORIES")]
public List<string>? UnlinkedCategories { get; init; }
public void Validate()
{
if (!Enabled)
{
return;
}
if (Categories?.GroupBy(x => x.Name).Any(x => x.Count() > 1) is true)
{
throw new ValidationException("duplicated clean categories found");
}
Categories?.ForEach(x => x.Validate());
if (string.IsNullOrEmpty(UnlinkedTargetCategory))
{
return;
}
if (UnlinkedCategories?.Count is null or 0)
{
throw new ValidationException("no unlinked categories configured");
}
if (UnlinkedCategories.Contains(UnlinkedTargetCategory))
{
throw new ValidationException($"{SectionName.ToUpperInvariant()}__UNLINKED_TARGET_CATEGORY should not be present in {SectionName.ToUpperInvariant()}__UNLINKED_CATEGORIES");
}
if (UnlinkedCategories.Any(string.IsNullOrEmpty))
{
throw new ValidationException("empty unlinked category filter found");
}
if (!string.IsNullOrEmpty(UnlinkedIgnoredRootDir) && !Directory.Exists(UnlinkedIgnoredRootDir))
{
throw new ValidationException($"{UnlinkedIgnoredRootDir} root directory does not exist");
}
}
}

View File

@@ -1,24 +0,0 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed record DelugeConfig : IConfig
{
public const string SectionName = "Deluge";
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = string.Empty;
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -1,9 +0,0 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed record DownloadClientConfig
{
[ConfigurationKeyName("DOWNLOAD_CLIENT")]
public Enums.DownloadClient DownloadClient { get; init; } = Enums.DownloadClient.None;
}

View File

@@ -1,26 +0,0 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public sealed class QBitConfig : IConfig
{
public const string SectionName = "qBittorrent";
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = string.Empty;
public string? Username { get; init; }
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -1,26 +0,0 @@
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.DownloadClient;
public record TransmissionConfig : IConfig
{
public const string SectionName = "Transmission";
public Uri? Url { get; init; }
[ConfigurationKeyName("URL_BASE")]
public string UrlBase { get; init; } = "transmission";
public string? Username { get; init; }
public string? Password { get; init; }
public void Validate()
{
if (Url is null)
{
throw new ValidationException($"{nameof(Url)} is empty");
}
}
}

View File

@@ -1,9 +0,0 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.General;
public sealed record DryRunConfig
{
[ConfigurationKeyName("DRY_RUN")]
public bool IsDryRun { get; init; }
}

View File

@@ -1,25 +0,0 @@
using Common.Enums;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.General;
public sealed record HttpConfig : IConfig
{
[ConfigurationKeyName("HTTP_MAX_RETRIES")]
public ushort MaxRetries { get; init; }
[ConfigurationKeyName("HTTP_TIMEOUT")]
public ushort Timeout { get; init; } = 100;
[ConfigurationKeyName("HTTP_VALIDATE_CERT")]
public CertificateValidationType CertificateValidation { get; init; } = CertificateValidationType.Enabled;
public void Validate()
{
if (Timeout is 0)
{
throw new ValidationException("HTTP_TIMEOUT must be greater than 0");
}
}
}

View File

@@ -1,12 +0,0 @@
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.General;
public sealed record SearchConfig
{
[ConfigurationKeyName("SEARCH_ENABLED")]
public bool SearchEnabled { get; init; } = true;
[ConfigurationKeyName("SEARCH_DELAY")]
public ushort SearchDelay { get; init; } = 30;
}

View File

@@ -1,12 +0,0 @@
namespace Common.Configuration.General;
public sealed class TriggersConfig
{
public const string SectionName = "Triggers";
public required string QueueCleaner { get; init; }
public required string ContentBlocker { get; init; }
public required string DownloadCleaner { get; init; }
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public interface IConfig
{
void Validate();
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public interface IIgnoredDownloadsConfig
{
string? IgnoredDownloadsPath { get; }
}

View File

@@ -1,6 +0,0 @@
namespace Common.Configuration;
public interface IJobConfig : IConfig
{
bool Enabled { get; init; }
}

View File

@@ -1,12 +0,0 @@
namespace Common.Configuration.Logging;
public class FileLogConfig : IConfig
{
public bool Enabled { get; set; }
public string Path { get; set; } = string.Empty;
public void Validate()
{
}
}

View File

@@ -1,18 +0,0 @@
using Serilog.Events;
namespace Common.Configuration.Logging;
public class LoggingConfig : IConfig
{
public const string SectionName = "Logging";
public LogEventLevel LogLevel { get; set; }
public bool Enhanced { get; set; }
public FileLogConfig? File { get; set; }
public void Validate()
{
}
}

View File

@@ -1,119 +0,0 @@
using Common.CustomDataTypes;
using Common.Exceptions;
using Microsoft.Extensions.Configuration;
namespace Common.Configuration.QueueCleaner;
public sealed record QueueCleanerConfig : IJobConfig, IIgnoredDownloadsConfig
{
public const string SectionName = "QueueCleaner";
public required bool Enabled { get; init; }
public required bool RunSequentially { get; init; }
[ConfigurationKeyName("IGNORED_DOWNLOADS_PATH")]
public string? IgnoredDownloadsPath { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_MAX_STRIKES")]
public ushort ImportFailedMaxStrikes { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_IGNORE_PRIVATE")]
public bool ImportFailedIgnorePrivate { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_DELETE_PRIVATE")]
public bool ImportFailedDeletePrivate { get; init; }
[ConfigurationKeyName("IMPORT_FAILED_IGNORE_PATTERNS")]
public IReadOnlyList<string>? ImportFailedIgnorePatterns { get; init; }
[ConfigurationKeyName("STALLED_MAX_STRIKES")]
public ushort StalledMaxStrikes { get; init; }
[ConfigurationKeyName("STALLED_RESET_STRIKES_ON_PROGRESS")]
public bool StalledResetStrikesOnProgress { get; init; }
[ConfigurationKeyName("STALLED_IGNORE_PRIVATE")]
public bool StalledIgnorePrivate { get; init; }
[ConfigurationKeyName("STALLED_DELETE_PRIVATE")]
public bool StalledDeletePrivate { get; init; }
[ConfigurationKeyName("DOWNLOADING_METADATA_MAX_STRIKES")]
public ushort DownloadingMetadataMaxStrikes { get; init; }
[ConfigurationKeyName("SLOW_MAX_STRIKES")]
public ushort SlowMaxStrikes { get; init; }
[ConfigurationKeyName("SLOW_RESET_STRIKES_ON_PROGRESS")]
public bool SlowResetStrikesOnProgress { get; init; }
[ConfigurationKeyName("SLOW_IGNORE_PRIVATE")]
public bool SlowIgnorePrivate { get; init; }
[ConfigurationKeyName("SLOW_DELETE_PRIVATE")]
public bool SlowDeletePrivate { get; init; }
[ConfigurationKeyName("SLOW_MIN_SPEED")]
public string SlowMinSpeed { get; init; } = string.Empty;
public ByteSize SlowMinSpeedByteSize => string.IsNullOrEmpty(SlowMinSpeed) ? new ByteSize(0) : ByteSize.Parse(SlowMinSpeed);
[ConfigurationKeyName("SLOW_MAX_TIME")]
public double SlowMaxTime { get; init; }
[ConfigurationKeyName("SLOW_IGNORE_ABOVE_SIZE")]
public string SlowIgnoreAboveSize { get; init; } = string.Empty;
public ByteSize? SlowIgnoreAboveSizeByteSize => string.IsNullOrEmpty(SlowIgnoreAboveSize) ? null : ByteSize.Parse(SlowIgnoreAboveSize);
public void Validate()
{
if (ImportFailedMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__IMPORT_FAILED_MAX_STRIKES must be 3");
}
if (StalledMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__STALLED_MAX_STRIKES must be 3");
}
if (DownloadingMetadataMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__DOWNLOADING_METADATA_MAX_STRIKES must be 3");
}
if (SlowMaxStrikes is > 0 and < 3)
{
throw new ValidationException($"the minimum value for {SectionName.ToUpperInvariant()}__SLOW_MAX_STRIKES must be 3");
}
if (SlowMaxStrikes > 0)
{
bool isSlowSpeedSet = !string.IsNullOrEmpty(SlowMinSpeed);
if (isSlowSpeedSet && ByteSize.TryParse(SlowMinSpeed, out _) is false)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_MIN_SPEED");
}
if (SlowMaxTime < 0)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_MAX_TIME");
}
if (!isSlowSpeedSet && SlowMaxTime is 0)
{
throw new ValidationException($"either {SectionName.ToUpperInvariant()}__SLOW_MIN_SPEED or {SectionName.ToUpperInvariant()}__SLOW_MAX_STRIKES must be set");
}
bool isSlowIgnoreAboveSizeSet = !string.IsNullOrEmpty(SlowIgnoreAboveSize);
if (isSlowIgnoreAboveSizeSet && ByteSize.TryParse(SlowIgnoreAboveSize, out _) is false)
{
throw new ValidationException($"invalid value for {SectionName.ToUpperInvariant()}__SLOW_IGNORE_ABOVE_SIZE");
}
}
}
}

View File

@@ -1,8 +0,0 @@
namespace Common.Enums;
public enum CertificateValidationType
{
Enabled = 0,
DisabledForLocalAddresses = 1,
Disabled = 2
}

View File

@@ -1,10 +0,0 @@
namespace Common.Enums;
public enum DownloadClient
{
QBittorrent,
Deluge,
Transmission,
None,
Disabled
}

72
code/Dockerfile Normal file
View File

@@ -0,0 +1,72 @@
# Build Angular frontend
FROM --platform=$BUILDPLATFORM node:18-alpine AS frontend-build
WORKDIR /app
# Copy package files first for better layer caching
COPY frontend/package*.json ./
RUN npm ci && npm install -g @angular/cli
# Copy source code
COPY frontend/ .
# Build with appropriate base-href and deploy-url
RUN npm run build
# Build .NET backend
FROM --platform=$BUILDPLATFORM mcr.microsoft.com/dotnet/sdk:9.0-bookworm-slim AS build
ARG TARGETARCH
ARG VERSION=0.0.1
ARG PACKAGES_USERNAME
ARG PACKAGES_PAT
WORKDIR /app
EXPOSE 11011
# Copy solution and project files first for better layer caching
# COPY backend/*.sln ./backend/
# COPY backend/*/*.csproj ./backend/*/
# Copy source code
COPY backend/ ./backend/
# Restore dependencies
RUN dotnet nuget add source --username ${PACKAGES_USERNAME} --password ${PACKAGES_PAT} --store-password-in-clear-text --name Cleanuparr https://nuget.pkg.github.com/Cleanuparr/index.json
# Build and publish
RUN dotnet publish ./backend/Cleanuparr.Api/Cleanuparr.Api.csproj \
-a $TARGETARCH \
-c Release \
-o /app/publish \
/p:Version=${VERSION} \
/p:PublishSingleFile=true \
/p:DebugSymbols=false
# Runtime stage
FROM mcr.microsoft.com/dotnet/aspnet:9.0-bookworm-slim
# Install required packages for user management and timezone support
RUN apt-get update && apt-get install -y \
tzdata \
gosu \
&& rm -rf /var/lib/apt/lists/*
ENV PUID=1000 \
PGID=1000 \
UMASK=022 \
TZ=Etc/UTC \
HTTP_PORTS=11011
# Fix FileSystemWatcher in Docker: https://github.com/dotnet/dotnet-docker/issues/3546
ENV DOTNET_USE_POLLING_FILE_WATCHER=true
WORKDIR /app
# Copy backend
COPY --from=build /app/publish .
# Copy frontend to wwwroot
COPY --from=frontend-build /app/dist/ui/browser ./wwwroot
# Copy entrypoint script
COPY entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]
CMD ["./Cleanuparr"]

View File

@@ -1,9 +0,0 @@
namespace Domain.Enums;
public enum InstanceType
{
Sonarr,
Radarr,
Lidarr,
Readarr
}

View File

@@ -1,8 +0,0 @@
namespace Domain.Models.Arr.Blocking;
public record BlockedItem
{
public required string Hash { get; init; }
public required Uri InstanceUrl { get; init; }
}

View File

@@ -1,8 +0,0 @@
namespace Domain.Models.Arr.Blocking;
public sealed record LidarrBlockedItem : BlockedItem
{
public required long AlbumId { get; init; }
public required long ArtistId { get; init; }
}

View File

@@ -1,6 +0,0 @@
namespace Domain.Models.Arr.Blocking;
public sealed record RadarrBlockedItem : BlockedItem
{
public required long MovieId { get; init; }
}

View File

@@ -1,10 +0,0 @@
namespace Domain.Models.Arr.Blocking;
public sealed record SonarrBlockedItem : BlockedItem
{
public required long EpisodeId { get; init; }
public required long SeasonNumber { get; init; }
public required long SeriesId { get; init; }
}

View File

@@ -1,28 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Common.Configuration.General;
using Common.Configuration.Logging;
using Common.Configuration.QueueCleaner;
namespace Executable.DependencyInjection;
public static class ConfigurationDI
{
public static IServiceCollection AddConfiguration(this IServiceCollection services, IConfiguration configuration) =>
services
.Configure<DryRunConfig>(configuration)
.Configure<SearchConfig>(configuration)
.Configure<QueueCleanerConfig>(configuration.GetSection(QueueCleanerConfig.SectionName))
.Configure<ContentBlockerConfig>(configuration.GetSection(ContentBlockerConfig.SectionName))
.Configure<DownloadCleanerConfig>(configuration.GetSection(DownloadCleanerConfig.SectionName))
.Configure<DownloadClientConfig>(configuration)
.Configure<QBitConfig>(configuration.GetSection(QBitConfig.SectionName))
.Configure<DelugeConfig>(configuration.GetSection(DelugeConfig.SectionName))
.Configure<TransmissionConfig>(configuration.GetSection(TransmissionConfig.SectionName))
.Configure<SonarrConfig>(configuration.GetSection(SonarrConfig.SectionName))
.Configure<RadarrConfig>(configuration.GetSection(RadarrConfig.SectionName))
.Configure<LidarrConfig>(configuration.GetSection(LidarrConfig.SectionName))
.Configure<LoggingConfig>(configuration.GetSection(LoggingConfig.SectionName));
}

View File

@@ -1,81 +0,0 @@
using Common.Configuration.Logging;
using Domain.Enums;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.QueueCleaner;
using Serilog;
using Serilog.Events;
using Serilog.Templates;
using Serilog.Templates.Themes;
namespace Executable.DependencyInjection;
public static class LoggingDI
{
public static ILoggingBuilder AddLogging(this ILoggingBuilder builder, IConfiguration configuration)
{
LoggingConfig? config = configuration.GetSection(LoggingConfig.SectionName).Get<LoggingConfig>();
if (!string.IsNullOrEmpty(config?.File?.Path) && !Directory.Exists(config.File.Path))
{
try
{
Directory.CreateDirectory(config.File.Path);
}
catch (Exception exception)
{
throw new Exception($"log file path is not a valid directory | {config.File.Path}", exception);
}
}
LoggerConfiguration logConfig = new();
const string jobNameTemplate = "{#if JobName is not null} {Concat('[',JobName,']'),JOB_PAD}{#end}";
const string instanceNameTemplate = "{#if InstanceName is not null} {Concat('[',InstanceName,']'),ARR_PAD}{#end}";
const string consoleOutputTemplate = $"[{{@t:yyyy-MM-dd HH:mm:ss.fff}} {{@l:u3}}]{jobNameTemplate}{instanceNameTemplate} {{@m}}\n{{@x}}";
const string fileOutputTemplate = $"{{@t:yyyy-MM-dd HH:mm:ss.fff zzz}} [{{@l:u3}}]{jobNameTemplate}{instanceNameTemplate} {{@m:lj}}\n{{@x}}";
LogEventLevel level = LogEventLevel.Information;
List<string> names = [nameof(ContentBlocker), nameof(QueueCleaner), nameof(DownloadCleaner)];
int jobPadding = names.Max(x => x.Length) + 2;
names = [InstanceType.Sonarr.ToString(), InstanceType.Radarr.ToString(), InstanceType.Lidarr.ToString()];
int arrPadding = names.Max(x => x.Length) + 2;
string consoleTemplate = consoleOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("ARR_PAD", arrPadding.ToString());
string fileTemplate = fileOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("ARR_PAD", arrPadding.ToString());
if (config is not null)
{
level = config.LogLevel;
if (config.File?.Enabled is true)
{
logConfig.WriteTo.File(
path: Path.Combine(config.File.Path, "cleanuperr-.txt"),
formatter: new ExpressionTemplate(fileTemplate),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true
);
}
}
Log.Logger = logConfig
.MinimumLevel.Is(level)
.MinimumLevel.Override("MassTransit", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft.Extensions.Http", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.WriteTo.Console(new ExpressionTemplate(consoleTemplate))
.Enrich.FromLogContext()
.Enrich.WithProperty("ApplicationName", "cleanuperr")
.CreateLogger();
return builder
.ClearProviders()
.AddSerilog();
}
}

View File

@@ -1,144 +0,0 @@
using Common.Configuration;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.General;
using Common.Configuration.QueueCleaner;
using Common.Helpers;
using Executable.Jobs;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.QueueCleaner;
using Quartz;
using Quartz.Spi;
namespace Executable.DependencyInjection;
public static class QuartzDI
{
public static IServiceCollection AddQuartzServices(this IServiceCollection services, IConfiguration configuration) =>
services
.AddQuartz(q =>
{
TriggersConfig? config = configuration
.GetRequiredSection(TriggersConfig.SectionName)
.Get<TriggersConfig>();
if (config is null)
{
throw new NullReferenceException("triggers configuration is null");
}
q.AddJobs(configuration, config);
})
.AddQuartzHostedService(opt =>
{
opt.WaitForJobsToComplete = true;
});
private static void AddJobs(
this IServiceCollectionQuartzConfigurator q,
IConfiguration configuration,
TriggersConfig triggersConfig
)
{
ContentBlockerConfig? contentBlockerConfig = configuration
.GetRequiredSection(ContentBlockerConfig.SectionName)
.Get<ContentBlockerConfig>();
q.AddJob<ContentBlocker>(contentBlockerConfig, triggersConfig.ContentBlocker);
QueueCleanerConfig? queueCleanerConfig = configuration
.GetRequiredSection(QueueCleanerConfig.SectionName)
.Get<QueueCleanerConfig>();
if (contentBlockerConfig?.Enabled is true && queueCleanerConfig is { Enabled: true, RunSequentially: true })
{
q.AddJob<QueueCleaner>(queueCleanerConfig, string.Empty);
q.AddJobListener(new JobChainingListener(nameof(ContentBlocker), nameof(QueueCleaner)));
}
else
{
q.AddJob<QueueCleaner>(queueCleanerConfig, triggersConfig.QueueCleaner);
}
DownloadCleanerConfig? downloadCleanerConfig = configuration
.GetRequiredSection(DownloadCleanerConfig.SectionName)
.Get<DownloadCleanerConfig>();
q.AddJob<DownloadCleaner>(downloadCleanerConfig, triggersConfig.DownloadCleaner);
}
private static void AddJob<T>(
this IServiceCollectionQuartzConfigurator q,
IJobConfig? config,
string trigger
) where T: GenericHandler
{
string typeName = typeof(T).Name;
if (config is null)
{
throw new NullReferenceException($"{typeName} configuration is null");
}
if (!config.Enabled)
{
return;
}
bool hasTrigger = trigger.Length > 0;
q.AddJob<GenericJob<T>>(opts =>
{
opts.WithIdentity(typeName);
if (!hasTrigger)
{
// jobs with no triggers need to be stored durably
opts.StoreDurably();
}
});
// skip empty triggers
if (!hasTrigger)
{
return;
}
IOperableTrigger triggerObj = (IOperableTrigger)TriggerBuilder.Create()
.WithIdentity("ExampleTrigger")
.StartNow()
.WithCronSchedule(trigger)
.Build();
IReadOnlyList<DateTimeOffset> nextFireTimes = TriggerUtils.ComputeFireTimes(triggerObj, null, 2);
TimeSpan triggerValue = nextFireTimes[1] - nextFireTimes[0];
if (triggerValue > Constants.TriggerMaxLimit)
{
throw new Exception($"{trigger} should have a fire time of maximum {Constants.TriggerMaxLimit.TotalHours} hours");
}
if (triggerValue > StaticConfiguration.TriggerValue)
{
StaticConfiguration.TriggerValue = triggerValue;
}
q.AddTrigger(opts =>
{
opts.ForJob(typeName)
.WithIdentity($"{typeName}-trigger")
.WithCronSchedule(trigger, x =>x.WithMisfireHandlingInstructionDoNothing())
.StartNow();
});
// Startup trigger
q.AddTrigger(opts =>
{
opts.ForJob(typeName)
.WithIdentity($"{typeName}-startup-trigger")
.StartNow();
});
}
}

View File

@@ -1,51 +0,0 @@
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Infrastructure.Interceptors;
using Infrastructure.Providers;
using Infrastructure.Services;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadCleaner;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Infrastructure.Verticals.DownloadRemover;
using Infrastructure.Verticals.DownloadRemover.Interfaces;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.QueueCleaner;
namespace Executable.DependencyInjection;
public static class ServicesDI
{
public static IServiceCollection AddServices(this IServiceCollection services) =>
services
.AddTransient<IDryRunInterceptor, DryRunInterceptor>()
.AddTransient<CertificateValidationService>()
.AddTransient<SonarrClient>()
.AddTransient<RadarrClient>()
.AddTransient<LidarrClient>()
.AddTransient<ArrClientFactory>()
.AddTransient<QueueCleaner>()
.AddTransient<ContentBlocker>()
.AddTransient<DownloadCleaner>()
.AddTransient<IQueueItemRemover, QueueItemRemover>()
.AddTransient<IFilenameEvaluator, FilenameEvaluator>()
.AddTransient<IHardLinkFileService, HardLinkFileService>()
.AddTransient<UnixHardLinkFileService>()
.AddTransient<WindowsHardLinkFileService>()
.AddTransient<DummyDownloadService>()
.AddTransient<QBitService>()
.AddTransient<DelugeService>()
.AddTransient<TransmissionService>()
.AddTransient<ArrQueueIterator>()
.AddTransient<DownloadServiceFactory>()
.AddTransient<IStriker, Striker>()
.AddSingleton<BlocklistProvider>()
.AddSingleton<IgnoredDownloadsProvider<QueueCleanerConfig>>()
.AddSingleton<IgnoredDownloadsProvider<ContentBlockerConfig>>()
.AddSingleton<IgnoredDownloadsProvider<DownloadCleanerConfig>>();
}

View File

@@ -1,31 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Worker">
<PropertyGroup>
<AssemblyName>cleanuperr</AssemblyName>
<TargetFramework>net9.0</TargetFramework>
<Version Condition="'$(Version)' == ''">0.0.1</Version>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<UserSecretsId>dotnet-Executable-6108b2ba-f035-47bc-addf-aaf5e20da4b8</UserSecretsId>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.2" />
<PackageReference Include="Quartz" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.13.1" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.13.1" />
<PackageReference Include="Serilog" Version="4.2.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="6.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj" />
<ProjectReference Include="..\Infrastructure\Infrastructure.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,23 +0,0 @@
using System.Reflection;
namespace Executable;
public static class HostExtensions
{
public static IHost Init(this IHost host)
{
ILogger<Program> logger = host.Services.GetRequiredService<ILogger<Program>>();
Version? version = Assembly.GetExecutingAssembly().GetName().Version;
logger.LogInformation(
version is null
? "cleanuperr version not detected"
: $"cleanuperr v{version.Major}.{version.Minor}.{version.Build}"
);
logger.LogInformation("timezone: {tz}", TimeZoneInfo.Local.DisplayName);
return host;
}
}

View File

@@ -1,12 +0,0 @@
using Executable;
using Executable.DependencyInjection;
var builder = Host.CreateApplicationBuilder(args);
builder.Services.AddInfrastructure(builder.Configuration);
builder.Logging.AddLogging(builder.Configuration);
var host = builder.Build();
host.Init();
host.Run();

View File

@@ -1,151 +0,0 @@
{
"DRY_RUN": true,
"HTTP_MAX_RETRIES": 0,
"HTTP_TIMEOUT": 100,
"HTTP_VALIDATE_CERT": "enabled",
"Logging": {
"LogLevel": "Verbose",
"Enhanced": true,
"File": {
"Enabled": false,
"Path": ""
}
},
"SEARCH_ENABLED": true,
"SEARCH_DELAY": 5,
"Triggers": {
"QueueCleaner": "0/10 * * * * ?",
"ContentBlocker": "0/10 * * * * ?",
"DownloadCleaner": "0/10 * * * * ?"
},
"ContentBlocker": {
"Enabled": true,
"IGNORE_PRIVATE": true,
"DELETE_PRIVATE": false,
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads"
},
"QueueCleaner": {
"Enabled": true,
"RunSequentially": true,
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads",
"IMPORT_FAILED_MAX_STRIKES": 3,
"IMPORT_FAILED_IGNORE_PRIVATE": true,
"IMPORT_FAILED_DELETE_PRIVATE": false,
"IMPORT_FAILED_IGNORE_PATTERNS": [
"file is a sample"
],
"STALLED_MAX_STRIKES": 3,
"STALLED_RESET_STRIKES_ON_PROGRESS": true,
"STALLED_IGNORE_PRIVATE": true,
"STALLED_DELETE_PRIVATE": false,
"DOWNLOADING_METADATA_MAX_STRIKES": 3,
"SLOW_MAX_STRIKES": 5,
"SLOW_RESET_STRIKES_ON_PROGRESS": true,
"SLOW_IGNORE_PRIVATE": false,
"SLOW_DELETE_PRIVATE": false,
"SLOW_MIN_SPEED": "1MB",
"SLOW_MAX_TIME": 20,
"SLOW_IGNORE_ABOVE_SIZE": "4GB"
},
"DownloadCleaner": {
"Enabled": false,
"DELETE_PRIVATE": false,
"CATEGORIES": [
{
"Name": "tv-sonarr",
"MAX_RATIO": -1,
"MIN_SEED_TIME": 0,
"MAX_SEED_TIME": 240
}
],
"UNLINKED_TARGET_CATEGORY": "cleanuperr-unlinked",
"UNLINKED_USE_TAG": false,
"UNLINKED_IGNORED_ROOT_DIR": "",
"UNLINKED_CATEGORIES": [
"tv-sonarr",
"radarr"
],
"IGNORED_DOWNLOADS_PATH": "../test/data/cleanuperr/ignored_downloads"
},
"DOWNLOAD_CLIENT": "qbittorrent",
"qBittorrent": {
"Url": "http://localhost:8080",
"URL_BASE": "",
"Username": "test",
"Password": "testing"
},
"Deluge": {
"Url": "http://localhost:8112",
"URL_BASE": "",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"URL_BASE": "transmission",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": true,
"IMPORT_FAILED_MAX_STRIKES": -1,
"SearchType": "Episode",
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:8989",
"ApiKey": "425d1e713f0c405cbbf359ac0502c1f4"
}
]
},
"Radarr": {
"Enabled": true,
"IMPORT_FAILED_MAX_STRIKES": -1,
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": "8b7454f668e54c5b8f44f56f93969761"
}
]
},
"Lidarr": {
"Enabled": true,
"IMPORT_FAILED_MAX_STRIKES": -1,
"Block": {
"Type": "blacklist",
"Path": "https://raw.githubusercontent.com/flmorg/cleanuperr/refs/heads/main/blacklist"
},
"Instances": [
{
"Url": "http://localhost:8686",
"ApiKey": "7f677cfdc074414397af53dd633860c5"
}
]
},
"Notifiarr": {
"ON_IMPORT_FAILED_STRIKE": true,
"ON_STALLED_STRIKE": true,
"ON_SLOW_STRIKE": true,
"ON_QUEUE_ITEM_DELETED": true,
"ON_DOWNLOAD_CLEANED": true,
"ON_CATEGORY_CHANGED": true,
"API_KEY": "",
"CHANNEL_ID": ""
},
"Apprise": {
"ON_IMPORT_FAILED_STRIKE": true,
"ON_STALLED_STRIKE": true,
"ON_SLOW_STRIKE": true,
"ON_QUEUE_ITEM_DELETED": true,
"ON_DOWNLOAD_CLEANED": true,
"ON_CATEGORY_CHANGED": true,
"URL": "http://localhost:8000",
"KEY": ""
}
}

View File

@@ -1,138 +0,0 @@
{
"DRY_RUN": false,
"HTTP_MAX_RETRIES": 0,
"HTTP_TIMEOUT": 100,
"HTTP_VALIDATE_CERT": "enabled",
"Logging": {
"LogLevel": "Information",
"Enhanced": true,
"File": {
"Enabled": false,
"Path": ""
}
},
"SEARCH_ENABLED": true,
"SEARCH_DELAY": 30,
"Triggers": {
"QueueCleaner": "0 0/5 * * * ?",
"ContentBlocker": "0 0/5 * * * ?",
"DownloadCleaner": "0 0 * * * ?"
},
"ContentBlocker": {
"Enabled": false,
"IGNORE_PRIVATE": false,
"IGNORED_DOWNLOADS_PATH": ""
},
"QueueCleaner": {
"Enabled": false,
"RunSequentially": true,
"IGNORED_DOWNLOADS_PATH": "",
"IMPORT_FAILED_MAX_STRIKES": 0,
"IMPORT_FAILED_IGNORE_PRIVATE": false,
"IMPORT_FAILED_DELETE_PRIVATE": false,
"IMPORT_FAILED_IGNORE_PATTERNS": [],
"STALLED_MAX_STRIKES": 0,
"STALLED_RESET_STRIKES_ON_PROGRESS": false,
"STALLED_IGNORE_PRIVATE": false,
"STALLED_DELETE_PRIVATE": false,
"DOWNLOADING_METADATA_MAX_STRIKES": 0,
"SLOW_MAX_STRIKES": 0,
"SLOW_RESET_STRIKES_ON_PROGRESS": true,
"SLOW_IGNORE_PRIVATE": false,
"SLOW_DELETE_PRIVATE": false,
"SLOW_MIN_SPEED": "",
"SLOW_MAX_TIME": 0,
"SLOW_IGNORE_ABOVE_SIZE": ""
},
"DownloadCleaner": {
"Enabled": false,
"DELETE_PRIVATE": false,
"CATEGORIES": [],
"UNLINKED_TARGET_CATEGORY": "cleanuperr-unlinked",
"UNLINKED_USE_TAG": false,
"UNLINKED_IGNORED_ROOT_DIR": "",
"UNLINKED_CATEGORIES": [],
"IGNORED_DOWNLOADS_PATH": ""
},
"DOWNLOAD_CLIENT": "none",
"qBittorrent": {
"Url": "http://localhost:8080",
"URL_BASE": "",
"Username": "",
"Password": ""
},
"Deluge": {
"Url": "http://localhost:8112",
"URL_BASE": "",
"Password": "testing"
},
"Transmission": {
"Url": "http://localhost:9091",
"URL_BASE": "transmission",
"Username": "test",
"Password": "testing"
},
"Sonarr": {
"Enabled": false,
"IMPORT_FAILED_MAX_STRIKES": -1,
"SearchType": "Episode",
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:8989",
"ApiKey": ""
}
]
},
"Radarr": {
"Enabled": false,
"IMPORT_FAILED_MAX_STRIKES": -1,
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:7878",
"ApiKey": ""
}
]
},
"Lidarr": {
"Enabled": false,
"IMPORT_FAILED_MAX_STRIKES": -1,
"Block": {
"Type": "blacklist",
"Path": ""
},
"Instances": [
{
"Url": "http://localhost:8686",
"ApiKey": ""
}
]
},
"Notifiarr": {
"ON_IMPORT_FAILED_STRIKE": false,
"ON_STALLED_STRIKE": false,
"ON_SLOW_STRIKE": false,
"ON_QUEUE_ITEM_DELETED": false,
"ON_DOWNLOAD_CLEANED": false,
"ON_CATEGORY_CHANGED": false,
"API_KEY": "",
"CHANNEL_ID": ""
},
"Apprise": {
"ON_IMPORT_FAILED_STRIKE": false,
"ON_STALLED_STRIKE": false,
"ON_SLOW_STRIKE": false,
"ON_QUEUE_ITEM_DELETED": false,
"ON_DOWNLOAD_CLEANED": false,
"ON_CATEGORY_CHANGED": false,
"URL": "",
"KEY": ""
}
}

View File

@@ -1,80 +0,0 @@
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using NSubstitute;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class DownloadServiceFixture : IDisposable
{
public ILogger<DownloadService> Logger { get; set; }
public IMemoryCache Cache { get; set; }
public IStriker Striker { get; set; }
public DownloadServiceFixture()
{
Logger = Substitute.For<ILogger<DownloadService>>();
Cache = Substitute.For<IMemoryCache>();
Striker = Substitute.For<IStriker>();
}
public TestDownloadService CreateSut(
QueueCleanerConfig? queueCleanerConfig = null,
ContentBlockerConfig? contentBlockerConfig = null
)
{
queueCleanerConfig ??= new QueueCleanerConfig
{
Enabled = true,
RunSequentially = true,
StalledResetStrikesOnProgress = true,
StalledMaxStrikes = 3
};
var queueCleanerOptions = Substitute.For<IOptions<QueueCleanerConfig>>();
queueCleanerOptions.Value.Returns(queueCleanerConfig);
contentBlockerConfig ??= new ContentBlockerConfig
{
Enabled = true
};
var contentBlockerOptions = Substitute.For<IOptions<ContentBlockerConfig>>();
contentBlockerOptions.Value.Returns(contentBlockerConfig);
var downloadCleanerOptions = Substitute.For<IOptions<DownloadCleanerConfig>>();
downloadCleanerOptions.Value.Returns(new DownloadCleanerConfig());
var filenameEvaluator = Substitute.For<IFilenameEvaluator>();
var notifier = Substitute.For<INotificationPublisher>();
var dryRunInterceptor = Substitute.For<IDryRunInterceptor>();
var hardlinkFileService = Substitute.For<IHardLinkFileService>();
return new TestDownloadService(
Logger,
queueCleanerOptions,
contentBlockerOptions,
downloadCleanerOptions,
Cache,
filenameEvaluator,
Striker,
notifier,
dryRunInterceptor,
hardlinkFileService
);
}
public void Dispose()
{
// Cleanup if needed
}
}

View File

@@ -1,214 +0,0 @@
using Common.Configuration.DownloadCleaner;
using Domain.Enums;
using Domain.Models.Cache;
using Infrastructure.Helpers;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.DownloadClient;
using NSubstitute;
using NSubstitute.ClearExtensions;
using Shouldly;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class DownloadServiceTests : IClassFixture<DownloadServiceFixture>
{
private readonly DownloadServiceFixture _fixture;
public DownloadServiceTests(DownloadServiceFixture fixture)
{
_fixture = fixture;
_fixture.Cache.ClearSubstitute();
_fixture.Striker.ClearSubstitute();
}
public class ResetStrikesOnProgressTests : DownloadServiceTests
{
public ResetStrikesOnProgressTests(DownloadServiceFixture fixture) : base(fixture)
{
}
[Fact]
public void WhenStalledStrikeDisabled_ShouldNotResetStrikes()
{
// Arrange
TestDownloadService sut = _fixture.CreateSut(queueCleanerConfig: new()
{
Enabled = true,
RunSequentially = true,
StalledResetStrikesOnProgress = false,
});
// Act
sut.ResetStalledStrikesOnProgress("test-hash", 100);
// Assert
_fixture.Cache.ReceivedCalls().ShouldBeEmpty();
}
[Fact]
public void WhenProgressMade_ShouldResetStrikes()
{
// Arrange
const string hash = "test-hash";
StalledCacheItem stalledCacheItem = new StalledCacheItem { Downloaded = 100 };
_fixture.Cache.TryGetValue(Arg.Any<object>(), out Arg.Any<object?>())
.Returns(x =>
{
x[1] = stalledCacheItem;
return true;
});
TestDownloadService sut = _fixture.CreateSut();
// Act
sut.ResetStalledStrikesOnProgress(hash, 200);
// Assert
_fixture.Cache.Received(1).Remove(CacheKeys.Strike(StrikeType.Stalled, hash));
}
[Fact]
public void WhenNoProgress_ShouldNotResetStrikes()
{
// Arrange
const string hash = "test-hash";
StalledCacheItem stalledCacheItem = new StalledCacheItem { Downloaded = 200 };
_fixture.Cache
.TryGetValue(Arg.Any<object>(), out Arg.Any<object?>())
.Returns(x =>
{
x[1] = stalledCacheItem;
return true;
});
TestDownloadService sut = _fixture.CreateSut();
// Act
sut.ResetStalledStrikesOnProgress(hash, 100);
// Assert
_fixture.Cache.DidNotReceive().Remove(Arg.Any<object>());
}
}
public class StrikeAndCheckLimitTests : DownloadServiceTests
{
public StrikeAndCheckLimitTests(DownloadServiceFixture fixture) : base(fixture)
{
}
}
public class ShouldCleanDownloadTests : DownloadServiceTests
{
public ShouldCleanDownloadTests(DownloadServiceFixture fixture) : base(fixture)
{
ContextProvider.Set("downloadName", "test-download");
}
[Fact]
public void WhenRatioAndMinSeedTimeReached_ShouldReturnTrue()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 1.0,
MinSeedTime = 1,
MaxSeedTime = -1
};
const double ratio = 1.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeTrue(),
() => result.Reason.ShouldBe(CleanReason.MaxRatioReached)
);
}
[Fact]
public void WhenRatioReachedAndMinSeedTimeNotReached_ShouldReturnFalse()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 1.0,
MinSeedTime = 3,
MaxSeedTime = -1
};
const double ratio = 1.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeFalse(),
() => result.Reason.ShouldBe(CleanReason.None)
);
}
[Fact]
public void WhenMaxSeedTimeReached_ShouldReturnTrue()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = -1,
MinSeedTime = 0,
MaxSeedTime = 1
};
const double ratio = 0.5;
TimeSpan seedingTime = TimeSpan.FromHours(2);
TestDownloadService sut = _fixture.CreateSut();
// Act
SeedingCheckResult result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeTrue(),
() => result.Reason.ShouldBe(CleanReason.MaxSeedTimeReached)
);
}
[Fact]
public void WhenNeitherConditionMet_ShouldReturnFalse()
{
// Arrange
CleanCategory category = new()
{
Name = "test",
MaxRatio = 2.0,
MinSeedTime = 0,
MaxSeedTime = 3
};
const double ratio = 1.0;
TimeSpan seedingTime = TimeSpan.FromHours(1);
TestDownloadService sut = _fixture.CreateSut();
// Act
var result = sut.ShouldCleanDownload(ratio, seedingTime, category);
// Assert
result.ShouldSatisfyAllConditions(
() => result.ShouldClean.ShouldBeFalse(),
() => result.Reason.ShouldBe(CleanReason.None)
);
}
}
}

View File

@@ -1,54 +0,0 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Domain.Enums;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Tests.Verticals.DownloadClient;
public class TestDownloadService : DownloadService
{
public TestDownloadService(
ILogger<DownloadService> logger,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig, cache,
filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
}
public override void Dispose() { }
public override Task LoginAsync() => Task.CompletedTask;
public override Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads) => Task.FromResult(new DownloadCheckResult());
public override Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash, BlocklistType blocklistType,
ConcurrentBag<string> patterns, ConcurrentBag<Regex> regexes, IReadOnlyList<string> ignoredDownloads) => Task.FromResult(new BlockFilesResult());
public override Task DeleteDownload(string hash) => Task.CompletedTask;
public override Task CreateCategoryAsync(string name) => Task.CompletedTask;
public override Task<List<object>?> GetSeedingDownloads() => Task.FromResult<List<object>?>(null);
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories) => null;
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories) => null;
public override Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads) => Task.CompletedTask;
public override Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads) => Task.CompletedTask;
// Expose protected methods for testing
public new void ResetStalledStrikesOnProgress(string hash, long downloaded) => base.ResetStalledStrikesOnProgress(hash, downloaded);
public new SeedingCheckResult ShouldCleanDownload(double ratio, TimeSpan seedingTime, CleanCategory category) => base.ShouldCleanDownload(ratio, seedingTime, category);
}

View File

@@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Common\Common.csproj" />
<ProjectReference Include="..\Domain\Domain.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FLM.QBittorrent" Version="1.0.1" />
<PackageReference Include="FLM.Transmission" Version="1.0.3" />
<PackageReference Include="Mapster" Version="7.4.0" />
<PackageReference Include="MassTransit" Version="8.3.6" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="9.0.2" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.2" />
<PackageReference Include="Mono.Unix" Version="7.1.0-final.1.21458.1" />
<PackageReference Include="Quartz" Version="3.13.1" />
<PackageReference Include="Scrutor" Version="6.0.1" />
</ItemGroup>
</Project>

View File

@@ -1,82 +0,0 @@
using Common.Configuration;
using Infrastructure.Helpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Providers;
public sealed class IgnoredDownloadsProvider<T>
where T : IIgnoredDownloadsConfig
{
private readonly ILogger<IgnoredDownloadsProvider<T>> _logger;
private IIgnoredDownloadsConfig _config;
private readonly IMemoryCache _cache;
private DateTime _lastModified = DateTime.MinValue;
public IgnoredDownloadsProvider(ILogger<IgnoredDownloadsProvider<T>> logger, IOptionsMonitor<T> config, IMemoryCache cache)
{
_config = config.CurrentValue;
config.OnChange((newValue) => _config = newValue);
_logger = logger;
_cache = cache;
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return;
}
if (!File.Exists(_config.IgnoredDownloadsPath))
{
throw new FileNotFoundException("file not found", _config.IgnoredDownloadsPath);
}
}
public async Task<IReadOnlyList<string>> GetIgnoredDownloads()
{
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return Array.Empty<string>();
}
FileInfo fileInfo = new(_config.IgnoredDownloadsPath);
if (fileInfo.LastWriteTime > _lastModified ||
!_cache.TryGetValue(CacheKeys.IgnoredDownloads(typeof(T).Name), out IReadOnlyList<string>? ignoredDownloads) ||
ignoredDownloads is null)
{
_lastModified = fileInfo.LastWriteTime;
return await LoadFile();
}
return ignoredDownloads;
}
private async Task<IReadOnlyList<string>> LoadFile()
{
try
{
if (string.IsNullOrEmpty(_config.IgnoredDownloadsPath))
{
return Array.Empty<string>();
}
string[] ignoredDownloads = (await File.ReadAllLinesAsync(_config.IgnoredDownloadsPath))
.Where(x => !string.IsNullOrWhiteSpace(x))
.ToArray();
_cache.Set(CacheKeys.IgnoredDownloads(typeof(T).Name), ignoredDownloads);
_logger.LogInformation("ignored downloads reloaded");
return ignoredDownloads;
}
catch (Exception exception)
{
_logger.LogError(exception, "error while reading ignored downloads file | {file}", _config.IgnoredDownloadsPath);
}
return Array.Empty<string>();
}
}

View File

@@ -1,19 +0,0 @@
using Common.Configuration.Arr;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface IArrClient
{
Task<QueueListResponse> GetQueueItemsAsync(ArrInstance arrInstance, int page);
Task<bool> ShouldRemoveFromQueue(InstanceType instanceType, QueueRecord record, bool isPrivateDownload, short arrMaxStrikes);
Task DeleteQueueItemAsync(ArrInstance arrInstance, QueueRecord record, bool removeFromClient, DeleteReason deleteReason);
Task SearchItemsAsync(ArrInstance arrInstance, HashSet<SearchItem>? items);
bool IsRecordValid(QueueRecord record);
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface ILidarrClient : IArrClient
{
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface IRadarrClient : IArrClient
{
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Arr.Interfaces;
public interface ISonarrClient : IArrClient
{
}

View File

@@ -1,163 +0,0 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.RegularExpressions;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Helpers;
using Domain.Enums;
using Infrastructure.Helpers;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class BlocklistProvider
{
private readonly ILogger<BlocklistProvider> _logger;
private readonly SonarrConfig _sonarrConfig;
private readonly RadarrConfig _radarrConfig;
private readonly LidarrConfig _lidarrConfig;
private readonly HttpClient _httpClient;
private readonly IMemoryCache _cache;
private bool _initialized;
public BlocklistProvider(
ILogger<BlocklistProvider> logger,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
IMemoryCache cache,
IHttpClientFactory httpClientFactory
)
{
_logger = logger;
_sonarrConfig = sonarrConfig.Value;
_radarrConfig = radarrConfig.Value;
_lidarrConfig = lidarrConfig.Value;
_cache = cache;
_httpClient = httpClientFactory.CreateClient(Constants.HttpClientWithRetryName);
}
public async Task LoadBlocklistsAsync()
{
if (_initialized)
{
_logger.LogTrace("blocklists already loaded");
return;
}
try
{
await LoadPatternsAndRegexesAsync(_sonarrConfig, InstanceType.Sonarr);
await LoadPatternsAndRegexesAsync(_radarrConfig, InstanceType.Radarr);
await LoadPatternsAndRegexesAsync(_lidarrConfig, InstanceType.Lidarr);
_initialized = true;
}
catch
{
_logger.LogError("failed to load blocklists");
throw;
}
}
public BlocklistType GetBlocklistType(InstanceType instanceType)
{
_cache.TryGetValue(CacheKeys.BlocklistType(instanceType), out BlocklistType? blocklistType);
return blocklistType ?? BlocklistType.Blacklist;
}
public ConcurrentBag<string> GetPatterns(InstanceType instanceType)
{
_cache.TryGetValue(CacheKeys.BlocklistPatterns(instanceType), out ConcurrentBag<string>? patterns);
return patterns ?? [];
}
public ConcurrentBag<Regex> GetRegexes(InstanceType instanceType)
{
_cache.TryGetValue(CacheKeys.BlocklistRegexes(instanceType), out ConcurrentBag<Regex>? regexes);
return regexes ?? [];
}
private async Task LoadPatternsAndRegexesAsync(ArrConfig arrConfig, InstanceType instanceType)
{
if (!arrConfig.Enabled)
{
return;
}
if (string.IsNullOrEmpty(arrConfig.Block.Path))
{
return;
}
string[] filePatterns = await ReadContentAsync(arrConfig.Block.Path);
long startTime = Stopwatch.GetTimestamp();
ParallelOptions options = new() { MaxDegreeOfParallelism = 5 };
const string regexId = "regex:";
ConcurrentBag<string> patterns = [];
ConcurrentBag<Regex> regexes = [];
Parallel.ForEach(filePatterns, options, pattern =>
{
if (!pattern.StartsWith(regexId))
{
patterns.Add(pattern);
return;
}
pattern = pattern[regexId.Length..];
try
{
Regex regex = new(pattern, RegexOptions.Compiled);
regexes.Add(regex);
}
catch (ArgumentException)
{
_logger.LogWarning("invalid regex | {pattern}", pattern);
}
});
TimeSpan elapsed = Stopwatch.GetElapsedTime(startTime);
_cache.Set(CacheKeys.BlocklistType(instanceType), arrConfig.Block.Type);
_cache.Set(CacheKeys.BlocklistPatterns(instanceType), patterns);
_cache.Set(CacheKeys.BlocklistRegexes(instanceType), regexes);
_logger.LogDebug("loaded {count} patterns", patterns.Count);
_logger.LogDebug("loaded {count} regexes", regexes.Count);
_logger.LogDebug("blocklist loaded in {elapsed} ms | {path}", elapsed.TotalMilliseconds, arrConfig.Block.Path);
}
private async Task<string[]> ReadContentAsync(string path)
{
if (Uri.TryCreate(path, UriKind.Absolute, out var uri) && (uri.Scheme == Uri.UriSchemeHttp || uri.Scheme == Uri.UriSchemeHttps))
{
// http(s) url
return await ReadFromUrlAsync(path);
}
if (File.Exists(path))
{
// local file path
return await File.ReadAllLinesAsync(path);
}
throw new ArgumentException($"blocklist not found | {path}");
}
private async Task<string[]> ReadFromUrlAsync(string url)
{
using HttpResponseMessage response = await _httpClient.GetAsync(url);
response.EnsureSuccessStatusCode();
return (await response.Content.ReadAsStringAsync())
.Split(['\r','\n'], StringSplitOptions.RemoveEmptyEntries);
}
}

View File

@@ -1,158 +0,0 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.Arr;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadClient;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Helpers;
using Infrastructure.Providers;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadRemover.Models;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.Notifications;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using LogContext = Serilog.Context.LogContext;
namespace Infrastructure.Verticals.ContentBlocker;
public sealed class ContentBlocker : GenericHandler
{
private readonly ContentBlockerConfig _config;
private readonly BlocklistProvider _blocklistProvider;
private readonly IgnoredDownloadsProvider<ContentBlockerConfig> _ignoredDownloadsProvider;
public ContentBlocker(
ILogger<ContentBlocker> logger,
IOptions<ContentBlockerConfig> config,
IOptions<DownloadClientConfig> downloadClientConfig,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
BlocklistProvider blocklistProvider,
DownloadServiceFactory downloadServiceFactory,
INotificationPublisher notifier,
IgnoredDownloadsProvider<ContentBlockerConfig> ignoredDownloadsProvider
) : base(
logger, downloadClientConfig,
sonarrConfig, radarrConfig, lidarrConfig,
cache, messageBus, arrClientFactory, arrArrQueueIterator, downloadServiceFactory,
notifier
)
{
_config = config.Value;
_blocklistProvider = blocklistProvider;
_ignoredDownloadsProvider = ignoredDownloadsProvider;
}
public override async Task ExecuteAsync()
{
if (_downloadClientConfig.DownloadClient is Common.Enums.DownloadClient.None or Common.Enums.DownloadClient.Disabled)
{
_logger.LogWarning("download client is not set");
return;
}
bool blocklistIsConfigured = _sonarrConfig.Enabled && !string.IsNullOrEmpty(_sonarrConfig.Block.Path) ||
_radarrConfig.Enabled && !string.IsNullOrEmpty(_radarrConfig.Block.Path) ||
_lidarrConfig.Enabled && !string.IsNullOrEmpty(_lidarrConfig.Block.Path);
if (!blocklistIsConfigured)
{
_logger.LogWarning("no blocklist is configured");
return;
}
await _blocklistProvider.LoadBlocklistsAsync();
await base.ExecuteAsync();
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType, ArrConfig config)
{
IReadOnlyList<string> ignoredDownloads = await _ignoredDownloadsProvider.GetIgnoredDownloads();
using var _ = LogContext.PushProperty("InstanceName", instanceType.ToString());
IArrClient arrClient = _arrClientFactory.GetClient(instanceType);
BlocklistType blocklistType = _blocklistProvider.GetBlocklistType(instanceType);
ConcurrentBag<string> patterns = _blocklistProvider.GetPatterns(instanceType);
ConcurrentBag<Regex> regexes = _blocklistProvider.GetRegexes(instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.GroupBy(x => x.DownloadId)
.ToList();
foreach (var group in groups)
{
QueueRecord record = group.First();
if (record.Protocol is not "torrent")
{
continue;
}
if (string.IsNullOrEmpty(record.DownloadId))
{
_logger.LogDebug("skip | download id is null for {title}", record.Title);
continue;
}
if (ignoredDownloads.Contains(record.DownloadId, StringComparer.InvariantCultureIgnoreCase))
{
_logger.LogInformation("skip | {title} | ignored", record.Title);
continue;
}
string downloadRemovalKey = CacheKeys.DownloadMarkedForRemoval(record.DownloadId, instance.Url);
if (_cache.TryGetValue(downloadRemovalKey, out bool _))
{
_logger.LogDebug("skip | already marked for removal | {title}", record.Title);
continue;
}
_logger.LogDebug("searching unwanted files for {title}", record.Title);
BlockFilesResult result = await _downloadService
.BlockUnwantedFilesAsync(record.DownloadId, blocklistType, patterns, regexes, ignoredDownloads);
if (!result.ShouldRemove)
{
continue;
}
_logger.LogDebug("all files are marked as unwanted | {hash}", record.Title);
bool removeFromClient = true;
if (result.IsPrivate && !_config.DeletePrivate)
{
removeFromClient = false;
}
await PublishQueueItemRemoveRequest(
downloadRemovalKey,
instanceType,
instance,
record,
group.Count() > 1,
removeFromClient,
DeleteReason.AllFilesBlocked
);
}
});
}
}

View File

@@ -1,155 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Domain.Enums;
using Domain.Models.Arr.Queue;
using Infrastructure.Providers;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.Arr.Interfaces;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.Jobs;
using Infrastructure.Verticals.Notifications;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using LogContext = Serilog.Context.LogContext;
namespace Infrastructure.Verticals.DownloadCleaner;
public sealed class DownloadCleaner : GenericHandler
{
private readonly DownloadCleanerConfig _config;
private readonly IgnoredDownloadsProvider<DownloadCleanerConfig> _ignoredDownloadsProvider;
private readonly HashSet<string> _excludedHashes = [];
private static bool _hardLinkCategoryCreated;
public DownloadCleaner(
ILogger<DownloadCleaner> logger,
IOptions<DownloadCleanerConfig> config,
IOptions<DownloadClientConfig> downloadClientConfig,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory,
INotificationPublisher notifier,
IgnoredDownloadsProvider<DownloadCleanerConfig> ignoredDownloadsProvider
) : base(
logger, downloadClientConfig,
sonarrConfig, radarrConfig, lidarrConfig,
cache, messageBus, arrClientFactory, arrArrQueueIterator, downloadServiceFactory,
notifier
)
{
_config = config.Value;
_config.Validate();
_ignoredDownloadsProvider = ignoredDownloadsProvider;
}
public override async Task ExecuteAsync()
{
if (_downloadClientConfig.DownloadClient is Common.Enums.DownloadClient.None or Common.Enums.DownloadClient.Disabled)
{
_logger.LogWarning("download client is not set");
return;
}
bool isUnlinkedEnabled = !string.IsNullOrEmpty(_config.UnlinkedTargetCategory) && _config.UnlinkedCategories?.Count > 0;
bool isCleaningEnabled = _config.Categories?.Count > 0;
if (!isUnlinkedEnabled && !isCleaningEnabled)
{
_logger.LogWarning("{name} is not configured properly", nameof(DownloadCleaner));
return;
}
IReadOnlyList<string> ignoredDownloads = await _ignoredDownloadsProvider.GetIgnoredDownloads();
await _downloadService.LoginAsync();
List<object>? downloads = await _downloadService.GetSeedingDownloads();
if (downloads?.Count is null or 0)
{
_logger.LogDebug("no seeding downloads found");
return;
}
_logger.LogTrace("found {count} seeding downloads", downloads.Count);
List<object>? downloadsToChangeCategory = null;
if (isUnlinkedEnabled)
{
if (!_hardLinkCategoryCreated)
{
if (_downloadClientConfig.DownloadClient is Common.Enums.DownloadClient.QBittorrent && !_config.UnlinkedUseTag)
{
_logger.LogDebug("creating category {cat}", _config.UnlinkedTargetCategory);
await _downloadService.CreateCategoryAsync(_config.UnlinkedTargetCategory);
}
_hardLinkCategoryCreated = true;
}
downloadsToChangeCategory = _downloadService.FilterDownloadsToChangeCategoryAsync(downloads, _config.UnlinkedCategories);
}
// wait for the downloads to appear in the arr queue
await Task.Delay(10 * 1000);
await ProcessArrConfigAsync(_sonarrConfig, InstanceType.Sonarr, true);
await ProcessArrConfigAsync(_radarrConfig, InstanceType.Radarr, true);
await ProcessArrConfigAsync(_lidarrConfig, InstanceType.Lidarr, true);
if (isUnlinkedEnabled)
{
_logger.LogTrace("found {count} potential downloads to change category", downloadsToChangeCategory?.Count);
await _downloadService.ChangeCategoryForNoHardLinksAsync(downloadsToChangeCategory, _excludedHashes, ignoredDownloads);
_logger.LogTrace("finished changing category");
}
if (_config.Categories?.Count is null or 0)
{
return;
}
List<object>? downloadsToClean = _downloadService.FilterDownloadsToBeCleanedAsync(downloads, _config.Categories);
// release unused objects
downloads = null;
_logger.LogTrace("found {count} potential downloads to clean", downloadsToClean?.Count);
await _downloadService.CleanDownloadsAsync(downloadsToClean, _config.Categories, _excludedHashes, ignoredDownloads);
_logger.LogTrace("finished cleaning downloads");
}
protected override async Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType, ArrConfig config)
{
using var _ = LogContext.PushProperty("InstanceName", instanceType.ToString());
IArrClient arrClient = _arrClientFactory.GetClient(instanceType);
await _arrArrQueueIterator.Iterate(arrClient, instance, async items =>
{
var groups = items
.Where(x => !string.IsNullOrEmpty(x.DownloadId))
.GroupBy(x => x.DownloadId)
.ToList();
foreach (QueueRecord record in groups.Select(group => group.First()))
{
_excludedHashes.Add(record.DownloadId.ToLowerInvariant());
}
});
}
public override void Dispose()
{
_downloadService.Dispose();
}
}

View File

@@ -1,548 +0,0 @@
using System.Collections.Concurrent;
using System.Globalization;
using System.Text.RegularExpressions;
using Common.Attributes;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Common.CustomDataTypes;
using Common.Exceptions;
using Domain.Enums;
using Domain.Models.Deluge.Response;
using Infrastructure.Extensions;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient.Deluge;
public class DelugeService : DownloadService, IDelugeService
{
private readonly DelugeClient _client;
public DelugeService(
ILogger<DelugeService> logger,
IOptions<DelugeConfig> config,
IHttpClientFactory httpClientFactory,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig, cache,
filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
config.Value.Validate();
_client = new (config, httpClientFactory);
}
public override async Task LoginAsync()
{
await _client.LoginAsync();
if (!await _client.IsConnected() && !await _client.Connect())
{
throw new FatalException("Deluge WebUI is not connected to the daemon");
}
}
/// <inheritdoc/>
public override async Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads)
{
hash = hash.ToLowerInvariant();
DelugeContents? contents = null;
DownloadCheckResult result = new();
DownloadStatus? download = await _client.GetTorrentStatus(hash);
if (download?.Hash is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return result;
}
result.IsPrivate = download.Private;
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
return result;
}
try
{
contents = await _client.GetTorrentFiles(hash);
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to find torrent {hash} in the download client", hash);
}
bool shouldRemove = contents?.Contents?.Count > 0;
ProcessFiles(contents.Contents, (_, file) =>
{
if (file.Priority > 0)
{
shouldRemove = false;
}
});
if (shouldRemove)
{
// remove if all files are unwanted
result.ShouldRemove = true;
result.DeleteReason = DeleteReason.AllFilesSkipped;
return result;
}
// remove if download is stuck
(result.ShouldRemove, result.DeleteReason) = await EvaluateDownloadRemoval(download);
return result;
}
/// <inheritdoc/>
public override async Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash,
BlocklistType blocklistType,
ConcurrentBag<string> patterns,
ConcurrentBag<Regex> regexes, IReadOnlyList<string> ignoredDownloads)
{
hash = hash.ToLowerInvariant();
DownloadStatus? download = await _client.GetTorrentStatus(hash);
BlockFilesResult result = new();
if (download?.Hash is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return result;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
return result;
}
result.IsPrivate = download.Private;
if (_contentBlockerConfig.IgnorePrivate && download.Private)
{
// ignore private trackers
_logger.LogDebug("skip files check | download is private | {name}", download.Name);
return result;
}
DelugeContents? contents = null;
try
{
contents = await _client.GetTorrentFiles(hash);
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to find torrent {hash} in the download client", hash);
}
if (contents is null)
{
return result;
}
Dictionary<int, int> priorities = [];
bool hasPriorityUpdates = false;
long totalFiles = 0;
long totalUnwantedFiles = 0;
ProcessFiles(contents.Contents, (name, file) =>
{
totalFiles++;
int priority = file.Priority;
if (file.Priority is 0)
{
totalUnwantedFiles++;
}
if (file.Priority is not 0 && !_filenameEvaluator.IsValid(name, blocklistType, patterns, regexes))
{
totalUnwantedFiles++;
priority = 0;
hasPriorityUpdates = true;
_logger.LogInformation("unwanted file found | {file}", file.Path);
}
priorities.Add(file.Index, priority);
});
if (!hasPriorityUpdates)
{
return result;
}
_logger.LogDebug("changing priorities | torrent {hash}", hash);
List<int> sortedPriorities = priorities
.OrderBy(x => x.Key)
.Select(x => x.Value)
.ToList();
if (totalUnwantedFiles == totalFiles)
{
// Skip marking files as unwanted. The download will be removed completely.
result.ShouldRemove = true;
return result;
}
await _dryRunInterceptor.InterceptAsync(ChangeFilesPriority, hash, sortedPriorities);
return result;
}
public override async Task<List<object>?> GetSeedingDownloads()
{
return (await _client.GetStatusForAllTorrents())
?.Where(x => !string.IsNullOrEmpty(x.Hash))
.Where(x => x.State?.Equals("seeding", StringComparison.InvariantCultureIgnoreCase) is true)
.Cast<object>()
.ToList();
}
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories) =>
downloads
?.Cast<DownloadStatus>()
.Where(x => categories.Any(cat => cat.Name.Equals(x.Label, StringComparison.InvariantCultureIgnoreCase)))
.Cast<object>()
.ToList();
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories) =>
downloads
?.Cast<DownloadStatus>()
.Where(x => !string.IsNullOrEmpty(x.Hash))
.Where(x => categories.Any(cat => cat.Equals(x.Label, StringComparison.InvariantCultureIgnoreCase)))
.Cast<object>()
.ToList();
/// <inheritdoc/>
public override async Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean, HashSet<string> excludedHashes,
IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
foreach (DownloadStatus download in downloads)
{
if (string.IsNullOrEmpty(download.Hash))
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.Hash, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
continue;
}
CleanCategory? category = categoriesToClean
.FirstOrDefault(x => x.Name.Equals(download.Label, StringComparison.InvariantCultureIgnoreCase));
if (category is null)
{
continue;
}
if (!_downloadCleanerConfig.DeletePrivate && download.Private)
{
_logger.LogDebug("skip | download is private | {name}", download.Name);
continue;
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.Hash);
TimeSpan seedingTime = TimeSpan.FromSeconds(download.SeedingTime);
SeedingCheckResult result = ShouldCleanDownload(download.Ratio, seedingTime, category);
if (!result.ShouldClean)
{
continue;
}
await _dryRunInterceptor.InterceptAsync(DeleteDownload, download.Hash);
_logger.LogInformation(
"download cleaned | {reason} reached | {name}",
result.Reason is CleanReason.MaxRatioReached
? "MAX_RATIO & MIN_SEED_TIME"
: "MAX_SEED_TIME",
download.Name
);
await _notifier.NotifyDownloadCleaned(download.Ratio, seedingTime, category.Name, result.Reason);
}
}
public override async Task CreateCategoryAsync(string name)
{
IReadOnlyList<string> existingLabels = await _client.GetLabels();
if (existingLabels.Contains(name, StringComparer.InvariantCultureIgnoreCase))
{
return;
}
await _dryRunInterceptor.InterceptAsync(CreateLabel, name);
}
public override async Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
if (!string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir))
{
_hardLinkFileService.PopulateFileCounts(_downloadCleanerConfig.UnlinkedIgnoredRootDir);
}
foreach (DownloadStatus download in downloads.Cast<DownloadStatus>())
{
if (string.IsNullOrEmpty(download.Hash) || string.IsNullOrEmpty(download.Name) || string.IsNullOrEmpty(download.Label))
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.Hash, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
continue;
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.Hash);
DelugeContents? contents = null;
try
{
contents = await _client.GetTorrentFiles(download.Hash);
}
catch (Exception exception)
{
_logger.LogDebug(exception, "failed to find torrent files for {name}", download.Name);
continue;
}
bool hasHardlinks = false;
ProcessFiles(contents?.Contents, (_, file) =>
{
string filePath = string.Join(Path.DirectorySeparatorChar, Path.Combine(download.DownloadLocation, file.Path).Split(['\\', '/']));
if (file.Priority <= 0)
{
_logger.LogDebug("skip | file is not downloaded | {file}", filePath);
return;
}
long hardlinkCount = _hardLinkFileService.GetHardLinkCount(filePath, !string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir));
if (hardlinkCount < 0)
{
_logger.LogDebug("skip | could not get file properties | {file}", filePath);
hasHardlinks = true;
return;
}
if (hardlinkCount > 0)
{
hasHardlinks = true;
}
});
if (hasHardlinks)
{
_logger.LogDebug("skip | download has hardlinks | {name}", download.Name);
continue;
}
await _dryRunInterceptor.InterceptAsync(ChangeLabel, download.Hash, _downloadCleanerConfig.UnlinkedTargetCategory);
_logger.LogInformation("category changed for {name}", download.Name);
await _notifier.NotifyCategoryChanged(download.Label, _downloadCleanerConfig.UnlinkedTargetCategory);
download.Label = _downloadCleanerConfig.UnlinkedTargetCategory;
}
}
/// <inheritdoc/>
[DryRunSafeguard]
public override async Task DeleteDownload(string hash)
{
hash = hash.ToLowerInvariant();
await _client.DeleteTorrents([hash]);
}
[DryRunSafeguard]
protected async Task CreateLabel(string name)
{
await _client.CreateLabel(name);
}
[DryRunSafeguard]
protected virtual async Task ChangeFilesPriority(string hash, List<int> sortedPriorities)
{
await _client.ChangeFilesPriority(hash, sortedPriorities);
}
[DryRunSafeguard]
protected virtual async Task ChangeLabel(string hash, string newLabel)
{
await _client.SetTorrentLabel(hash, newLabel);
}
private async Task<(bool, DeleteReason)> EvaluateDownloadRemoval(DownloadStatus status)
{
(bool ShouldRemove, DeleteReason Reason) result = await CheckIfSlow(status);
if (result.ShouldRemove)
{
return result;
}
return await CheckIfStuck(status);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfSlow(DownloadStatus download)
{
if (_queueCleanerConfig.SlowMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (download.State is null || !download.State.Equals("Downloading", StringComparison.InvariantCultureIgnoreCase))
{
return (false, DeleteReason.None);
}
if (download.DownloadSpeed <= 0)
{
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.SlowIgnorePrivate && download.Private)
{
// ignore private trackers
_logger.LogDebug("skip slow check | download is private | {name}", download.Name);
return (false, DeleteReason.None);
}
if (download.Size > (_queueCleanerConfig.SlowIgnoreAboveSizeByteSize?.Bytes ?? long.MaxValue))
{
_logger.LogDebug("skip slow check | download is too large | {name}", download.Name);
return (false, DeleteReason.None);
}
ByteSize minSpeed = _queueCleanerConfig.SlowMinSpeedByteSize;
ByteSize currentSpeed = new ByteSize(download.DownloadSpeed);
SmartTimeSpan maxTime = SmartTimeSpan.FromHours(_queueCleanerConfig.SlowMaxTime);
SmartTimeSpan currentTime = SmartTimeSpan.FromSeconds(download.Eta);
return await CheckIfSlow(
download.Hash!,
download.Name!,
minSpeed,
currentSpeed,
maxTime,
currentTime
);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfStuck(DownloadStatus status)
{
if (_queueCleanerConfig.StalledMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.StalledIgnorePrivate && status.Private)
{
// ignore private trackers
_logger.LogDebug("skip stalled check | download is private | {name}", status.Name);
return (false, DeleteReason.None);
}
if (status.State is null || !status.State.Equals("Downloading", StringComparison.InvariantCultureIgnoreCase))
{
return (false, DeleteReason.None);
}
if (status.Eta > 0)
{
return (false, DeleteReason.None);
}
ResetStalledStrikesOnProgress(status.Hash!, status.TotalDone);
return (await _striker.StrikeAndCheckLimit(status.Hash!, status.Name!, _queueCleanerConfig.StalledMaxStrikes, StrikeType.Stalled), DeleteReason.Stalled);
}
private static void ProcessFiles(Dictionary<string, DelugeFileOrDirectory>? contents, Action<string, DelugeFileOrDirectory> processFile)
{
if (contents is null)
{
return;
}
foreach (var (name, data) in contents)
{
switch (data.Type)
{
case "file":
processFile(name, data);
break;
case "dir" when data.Contents is not null:
// Recurse into subdirectories
ProcessFiles(data.Contents, processFile);
break;
}
}
}
public override void Dispose()
{
}
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.DownloadClient.Deluge;
public interface IDelugeService : IDownloadService
{
}

View File

@@ -1,31 +0,0 @@
using Common.Configuration.DownloadClient;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadClient.QBittorrent;
using Infrastructure.Verticals.DownloadClient.Transmission;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient;
public sealed class DownloadServiceFactory
{
private readonly IServiceProvider _serviceProvider;
private readonly Common.Enums.DownloadClient _downloadClient;
public DownloadServiceFactory(IServiceProvider serviceProvider, IOptions<DownloadClientConfig> downloadClientConfig)
{
_serviceProvider = serviceProvider;
_downloadClient = downloadClientConfig.Value.DownloadClient;
}
public IDownloadService CreateDownloadClient() =>
_downloadClient switch
{
Common.Enums.DownloadClient.QBittorrent => _serviceProvider.GetRequiredService<QBitService>(),
Common.Enums.DownloadClient.Deluge => _serviceProvider.GetRequiredService<DelugeService>(),
Common.Enums.DownloadClient.Transmission => _serviceProvider.GetRequiredService<TransmissionService>(),
Common.Enums.DownloadClient.None => _serviceProvider.GetRequiredService<DummyDownloadService>(),
Common.Enums.DownloadClient.Disabled => _serviceProvider.GetRequiredService<DummyDownloadService>(),
_ => throw new ArgumentOutOfRangeException()
};
}

View File

@@ -1,91 +0,0 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.QueueCleaner;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadClient;
public class DummyDownloadService : DownloadService
{
public DummyDownloadService(
ILogger<DownloadService> logger,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig,
cache, filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
}
public override void Dispose()
{
}
public override Task LoginAsync()
{
return Task.CompletedTask;
}
public override Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads)
{
throw new NotImplementedException();
}
public override Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash, BlocklistType blocklistType, ConcurrentBag<string> patterns,
ConcurrentBag<Regex> regexes, IReadOnlyList<string> ignoredDownloads)
{
throw new NotImplementedException();
}
public override Task<List<object>?> GetSeedingDownloads()
{
throw new NotImplementedException();
}
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories)
{
throw new NotImplementedException();
}
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories)
{
throw new NotImplementedException();
}
public override Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
throw new NotImplementedException();
}
public override Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
throw new NotImplementedException();
}
public override Task CreateCategoryAsync(string name)
{
throw new NotImplementedException();
}
public override Task DeleteDownload(string hash)
{
throw new NotImplementedException();
}
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.DownloadClient.QBittorrent;
public interface IQBitService : IDownloadService, IDisposable
{
}

View File

@@ -1,599 +0,0 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Attributes;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Common.CustomDataTypes;
using Common.Helpers;
using Domain.Enums;
using Infrastructure.Extensions;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using QBittorrent.Client;
namespace Infrastructure.Verticals.DownloadClient.QBittorrent;
public class QBitService : DownloadService, IQBitService
{
private readonly QBitConfig _config;
private readonly QBittorrentClient _client;
public QBitService(
ILogger<QBitService> logger,
IHttpClientFactory httpClientFactory,
IOptions<QBitConfig> config,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig, cache,
filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
_config = config.Value;
_config.Validate();
UriBuilder uriBuilder = new(_config.Url);
uriBuilder.Path = string.IsNullOrEmpty(_config.UrlBase)
? uriBuilder.Path
: $"{uriBuilder.Path.TrimEnd('/')}/{_config.UrlBase.TrimStart('/')}";
_client = new(httpClientFactory.CreateClient(Constants.HttpClientWithRetryName), uriBuilder.Uri);
}
public override async Task LoginAsync()
{
if (string.IsNullOrEmpty(_config.Username) && string.IsNullOrEmpty(_config.Password))
{
return;
}
await _client.LoginAsync(_config.Username, _config.Password);
}
/// <inheritdoc/>
public override async Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads)
{
DownloadCheckResult result = new();
TorrentInfo? download = (await _client.GetTorrentListAsync(new TorrentListQuery { Hashes = [hash] }))
.FirstOrDefault();
if (download is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return result;
}
IReadOnlyList<TorrentTracker> trackers = await GetTrackersAsync(hash);
if (ignoredDownloads.Count > 0 &&
(download.ShouldIgnore(ignoredDownloads) || trackers.Any(x => x.ShouldIgnore(ignoredDownloads)) is true))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
return result;
}
TorrentProperties? torrentProperties = await _client.GetTorrentPropertiesAsync(hash);
if (torrentProperties is null)
{
_logger.LogDebug("failed to find torrent properties {hash} in the download client", hash);
return result;
}
result.IsPrivate = torrentProperties.AdditionalData.TryGetValue("is_private", out var dictValue) &&
bool.TryParse(dictValue?.ToString(), out bool boolValue)
&& boolValue;
IReadOnlyList<TorrentContent>? files = await _client.GetTorrentContentsAsync(hash);
if (files?.Count is > 0 && files.All(x => x.Priority is TorrentContentPriority.Skip))
{
result.ShouldRemove = true;
// if all files were blocked by qBittorrent
if (download is { CompletionOn: not null, Downloaded: null or 0 })
{
result.DeleteReason = DeleteReason.AllFilesSkippedByQBit;
return result;
}
// remove if all files are unwanted
result.DeleteReason = DeleteReason.AllFilesSkipped;
return result;
}
(result.ShouldRemove, result.DeleteReason) = await EvaluateDownloadRemoval(download, result.IsPrivate);
return result;
}
/// <inheritdoc/>
public override async Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash,
BlocklistType blocklistType,
ConcurrentBag<string> patterns,
ConcurrentBag<Regex> regexes,
IReadOnlyList<string> ignoredDownloads
)
{
TorrentInfo? download = (await _client.GetTorrentListAsync(new TorrentListQuery { Hashes = [hash] }))
.FirstOrDefault();
BlockFilesResult result = new();
if (download is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return result;
}
IReadOnlyList<TorrentTracker> trackers = await GetTrackersAsync(hash);
if (ignoredDownloads.Count > 0 &&
(download.ShouldIgnore(ignoredDownloads) || trackers.Any(x => x.ShouldIgnore(ignoredDownloads)) is true))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
return result;
}
TorrentProperties? torrentProperties = await _client.GetTorrentPropertiesAsync(hash);
if (torrentProperties is null)
{
_logger.LogDebug("failed to find torrent properties {hash} in the download client", hash);
return result;
}
bool isPrivate = torrentProperties.AdditionalData.TryGetValue("is_private", out var dictValue) &&
bool.TryParse(dictValue?.ToString(), out bool boolValue)
&& boolValue;
result.IsPrivate = isPrivate;
if (_contentBlockerConfig.IgnorePrivate && isPrivate)
{
// ignore private trackers
_logger.LogDebug("skip files check | download is private | {name}", download.Name);
return result;
}
IReadOnlyList<TorrentContent>? files = await _client.GetTorrentContentsAsync(hash);
if (files is null)
{
return result;
}
List<int> unwantedFiles = [];
long totalFiles = 0;
long totalUnwantedFiles = 0;
foreach (TorrentContent file in files)
{
if (!file.Index.HasValue)
{
continue;
}
totalFiles++;
if (file.Priority is TorrentContentPriority.Skip)
{
totalUnwantedFiles++;
continue;
}
if (_filenameEvaluator.IsValid(file.Name, blocklistType, patterns, regexes))
{
continue;
}
_logger.LogInformation("unwanted file found | {file}", file.Name);
unwantedFiles.Add(file.Index.Value);
totalUnwantedFiles++;
}
if (unwantedFiles.Count is 0)
{
return result;
}
if (totalUnwantedFiles == totalFiles)
{
// Skip marking files as unwanted. The download will be removed completely.
result.ShouldRemove = true;
return result;
}
foreach (int fileIndex in unwantedFiles)
{
await _dryRunInterceptor.InterceptAsync(SkipFile, hash, fileIndex);
}
return result;
}
/// <inheritdoc/>
public override async Task<List<object>?> GetSeedingDownloads() =>
(await _client.GetTorrentListAsync(new()
{
Filter = TorrentListFilter.Seeding
}))
?.Where(x => !string.IsNullOrEmpty(x.Hash))
.Cast<object>()
.ToList();
/// <inheritdoc/>
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories) =>
downloads
?.Cast<TorrentInfo>()
.Where(x => !string.IsNullOrEmpty(x.Hash))
.Where(x => categories.Any(cat => cat.Name.Equals(x.Category, StringComparison.InvariantCultureIgnoreCase)))
.Cast<object>()
.ToList();
/// <inheritdoc/>
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories) =>
downloads
?.Cast<TorrentInfo>()
.Where(x => !string.IsNullOrEmpty(x.Hash))
.Where(x => categories.Any(cat => cat.Equals(x.Category, StringComparison.InvariantCultureIgnoreCase)))
.Where(x =>
{
if (_downloadCleanerConfig.UnlinkedUseTag)
{
return !x.Tags.Any(tag => tag.Equals(_downloadCleanerConfig.UnlinkedTargetCategory, StringComparison.InvariantCultureIgnoreCase));
}
return true;
})
.Cast<object>()
.ToList();
/// <inheritdoc/>
public override async Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean,
HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
foreach (TorrentInfo download in downloads)
{
if (string.IsNullOrEmpty(download.Hash))
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.Hash, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
IReadOnlyList<TorrentTracker> trackers = await GetTrackersAsync(download.Hash);
if (ignoredDownloads.Count > 0 &&
(download.ShouldIgnore(ignoredDownloads) || trackers.Any(x => x.ShouldIgnore(ignoredDownloads))))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
continue;
}
CleanCategory? category = categoriesToClean
.FirstOrDefault(x => download.Category.Equals(x.Name, StringComparison.InvariantCultureIgnoreCase));
if (category is null)
{
continue;
}
if (!_downloadCleanerConfig.DeletePrivate)
{
TorrentProperties? torrentProperties = await _client.GetTorrentPropertiesAsync(download.Hash);
if (torrentProperties is null)
{
_logger.LogDebug("failed to find torrent properties in the download client | {name}", download.Name);
return;
}
bool isPrivate = torrentProperties.AdditionalData.TryGetValue("is_private", out var dictValue) &&
bool.TryParse(dictValue?.ToString(), out bool boolValue)
&& boolValue;
if (isPrivate)
{
_logger.LogDebug("skip | download is private | {name}", download.Name);
continue;
}
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.Hash);
SeedingCheckResult result = ShouldCleanDownload(download.Ratio, download.SeedingTime ?? TimeSpan.Zero, category);
if (!result.ShouldClean)
{
continue;
}
await _dryRunInterceptor.InterceptAsync(DeleteDownload, download.Hash);
_logger.LogInformation(
"download cleaned | {reason} reached | {name}",
result.Reason is CleanReason.MaxRatioReached
? "MAX_RATIO & MIN_SEED_TIME"
: "MAX_SEED_TIME",
download.Name
);
await _notifier.NotifyDownloadCleaned(download.Ratio, download.SeedingTime ?? TimeSpan.Zero, category.Name, result.Reason);
}
}
public override async Task CreateCategoryAsync(string name)
{
IReadOnlyDictionary<string, Category>? existingCategories = await _client.GetCategoriesAsync();
if (existingCategories.Any(x => x.Value.Name.Equals(name, StringComparison.InvariantCultureIgnoreCase)))
{
return;
}
await _dryRunInterceptor.InterceptAsync(CreateCategory, name);
}
public override async Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
if (!string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir))
{
_hardLinkFileService.PopulateFileCounts(_downloadCleanerConfig.UnlinkedIgnoredRootDir);
}
foreach (TorrentInfo download in downloads)
{
if (string.IsNullOrEmpty(download.Hash))
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.Hash, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
IReadOnlyList<TorrentTracker> trackers = await GetTrackersAsync(download.Hash);
if (ignoredDownloads.Count > 0 &&
(download.ShouldIgnore(ignoredDownloads) || trackers.Any(x => x.ShouldIgnore(ignoredDownloads))))
{
_logger.LogInformation("skip | download is ignored | {name}", download.Name);
continue;
}
IReadOnlyList<TorrentContent>? files = await _client.GetTorrentContentsAsync(download.Hash);
if (files is null)
{
_logger.LogDebug("failed to find files for {name}", download.Name);
continue;
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.Hash);
bool hasHardlinks = false;
foreach (TorrentContent file in files)
{
if (!file.Index.HasValue)
{
_logger.LogDebug("skip | file index is null for {name}", download.Name);
hasHardlinks = true;
break;
}
string filePath = string.Join(Path.DirectorySeparatorChar, Path.Combine(download.SavePath, file.Name).Split(['\\', '/']));
if (file.Priority is TorrentContentPriority.Skip)
{
_logger.LogDebug("skip | file is not downloaded | {file}", filePath);
continue;
}
long hardlinkCount = _hardLinkFileService.GetHardLinkCount(filePath, !string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir));
if (hardlinkCount < 0)
{
_logger.LogDebug("skip | could not get file properties | {file}", filePath);
hasHardlinks = true;
break;
}
if (hardlinkCount > 0)
{
hasHardlinks = true;
break;
}
}
if (hasHardlinks)
{
_logger.LogDebug("skip | download has hardlinks | {name}", download.Name);
continue;
}
await _dryRunInterceptor.InterceptAsync(ChangeCategory, download.Hash, _downloadCleanerConfig.UnlinkedTargetCategory);
if (_downloadCleanerConfig.UnlinkedUseTag)
{
_logger.LogInformation("tag added for {name}", download.Name);
}
else
{
_logger.LogInformation("category changed for {name}", download.Name);
download.Category = _downloadCleanerConfig.UnlinkedTargetCategory;
}
await _notifier.NotifyCategoryChanged(download.Category, _downloadCleanerConfig.UnlinkedTargetCategory, _downloadCleanerConfig.UnlinkedUseTag);
}
}
/// <inheritdoc/>
[DryRunSafeguard]
public override async Task DeleteDownload(string hash)
{
await _client.DeleteAsync(hash, deleteDownloadedData: true);
}
[DryRunSafeguard]
protected async Task CreateCategory(string name)
{
await _client.AddCategoryAsync(name);
}
[DryRunSafeguard]
protected virtual async Task SkipFile(string hash, int fileIndex)
{
await _client.SetFilePriorityAsync(hash, fileIndex, TorrentContentPriority.Skip);
}
[DryRunSafeguard]
protected virtual async Task ChangeCategory(string hash, string newCategory)
{
if (_downloadCleanerConfig.UnlinkedUseTag)
{
await _client.AddTorrentTagAsync([hash], newCategory);
return;
}
await _client.SetTorrentCategoryAsync([hash], newCategory);
}
public override void Dispose()
{
_client.Dispose();
}
private async Task<(bool, DeleteReason)> EvaluateDownloadRemoval(TorrentInfo torrent, bool isPrivate)
{
(bool ShouldRemove, DeleteReason Reason) result = await CheckIfSlow(torrent, isPrivate);
if (result.ShouldRemove)
{
return result;
}
return await CheckIfStuck(torrent, isPrivate);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfSlow(TorrentInfo download, bool isPrivate)
{
if (_queueCleanerConfig.SlowMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (download.State is not (TorrentState.Downloading or TorrentState.ForcedDownload))
{
return (false, DeleteReason.None);
}
if (download.DownloadSpeed <= 0)
{
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.SlowIgnorePrivate && isPrivate)
{
// ignore private trackers
_logger.LogDebug("skip slow check | download is private | {name}", download.Name);
return (false, DeleteReason.None);
}
if (download.Size > (_queueCleanerConfig.SlowIgnoreAboveSizeByteSize?.Bytes ?? long.MaxValue))
{
_logger.LogDebug("skip slow check | download is too large | {name}", download.Name);
return (false, DeleteReason.None);
}
ByteSize minSpeed = _queueCleanerConfig.SlowMinSpeedByteSize;
ByteSize currentSpeed = new ByteSize(download.DownloadSpeed);
SmartTimeSpan maxTime = SmartTimeSpan.FromHours(_queueCleanerConfig.SlowMaxTime);
SmartTimeSpan currentTime = new SmartTimeSpan(download.EstimatedTime ?? TimeSpan.Zero);
return await CheckIfSlow(
download.Hash,
download.Name,
minSpeed,
currentSpeed,
maxTime,
currentTime
);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfStuck(TorrentInfo torrent, bool isPrivate)
{
if (_queueCleanerConfig.StalledMaxStrikes is 0 && _queueCleanerConfig.DownloadingMetadataMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (torrent.State is not TorrentState.StalledDownload and not TorrentState.FetchingMetadata
and not TorrentState.ForcedFetchingMetadata)
{
// ignore other states
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.StalledMaxStrikes > 0 && torrent.State is TorrentState.StalledDownload)
{
if (_queueCleanerConfig.StalledIgnorePrivate && isPrivate)
{
// ignore private trackers
_logger.LogDebug("skip stalled check | download is private | {name}", torrent.Name);
}
else
{
ResetStalledStrikesOnProgress(torrent.Hash, torrent.Downloaded ?? 0);
return (await _striker.StrikeAndCheckLimit(torrent.Hash, torrent.Name, _queueCleanerConfig.StalledMaxStrikes, StrikeType.Stalled), DeleteReason.Stalled);
}
}
if (_queueCleanerConfig.DownloadingMetadataMaxStrikes > 0 && torrent.State is not TorrentState.StalledDownload)
{
return (await _striker.StrikeAndCheckLimit(torrent.Hash, torrent.Name, _queueCleanerConfig.DownloadingMetadataMaxStrikes, StrikeType.DownloadingMetadata), DeleteReason.DownloadingMetadata);
}
return (false, DeleteReason.None);
}
private async Task<IReadOnlyList<TorrentTracker>> GetTrackersAsync(string hash)
{
return (await _client.GetTorrentTrackersAsync(hash))
.Where(x => x.Url.Contains("**"))
.ToList();
}
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.DownloadClient.Transmission;
public interface ITransmissionService : IDownloadService
{
}

View File

@@ -1,548 +0,0 @@
using System.Collections.Concurrent;
using System.Text.RegularExpressions;
using Common.Attributes;
using Common.Configuration.ContentBlocker;
using Common.Configuration.DownloadCleaner;
using Common.Configuration.DownloadClient;
using Common.Configuration.QueueCleaner;
using Common.CustomDataTypes;
using Common.Helpers;
using Domain.Enums;
using Infrastructure.Extensions;
using Infrastructure.Interceptors;
using Infrastructure.Verticals.ContentBlocker;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.Files;
using Infrastructure.Verticals.ItemStriker;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Transmission.API.RPC;
using Transmission.API.RPC.Arguments;
using Transmission.API.RPC.Entity;
namespace Infrastructure.Verticals.DownloadClient.Transmission;
public class TransmissionService : DownloadService, ITransmissionService
{
private readonly TransmissionConfig _config;
private readonly Client _client;
private static readonly string[] Fields =
[
TorrentFields.FILES,
TorrentFields.FILE_STATS,
TorrentFields.HASH_STRING,
TorrentFields.ID,
TorrentFields.ETA,
TorrentFields.NAME,
TorrentFields.STATUS,
TorrentFields.IS_PRIVATE,
TorrentFields.DOWNLOADED_EVER,
TorrentFields.DOWNLOAD_DIR,
TorrentFields.SECONDS_SEEDING,
TorrentFields.UPLOAD_RATIO,
TorrentFields.TRACKERS,
TorrentFields.RATE_DOWNLOAD,
TorrentFields.TOTAL_SIZE,
];
public TransmissionService(
IHttpClientFactory httpClientFactory,
ILogger<TransmissionService> logger,
IOptions<TransmissionConfig> config,
IOptions<QueueCleanerConfig> queueCleanerConfig,
IOptions<ContentBlockerConfig> contentBlockerConfig,
IOptions<DownloadCleanerConfig> downloadCleanerConfig,
IMemoryCache cache,
IFilenameEvaluator filenameEvaluator,
IStriker striker,
INotificationPublisher notifier,
IDryRunInterceptor dryRunInterceptor,
IHardLinkFileService hardLinkFileService
) : base(
logger, queueCleanerConfig, contentBlockerConfig, downloadCleanerConfig, cache,
filenameEvaluator, striker, notifier, dryRunInterceptor, hardLinkFileService
)
{
_config = config.Value;
_config.Validate();
UriBuilder uriBuilder = new(_config.Url);
uriBuilder.Path = string.IsNullOrEmpty(_config.UrlBase)
? $"{uriBuilder.Path.TrimEnd('/')}/rpc"
: $"{uriBuilder.Path.TrimEnd('/')}/{_config.UrlBase.TrimStart('/').TrimEnd('/')}/rpc";
_client = new(
httpClientFactory.CreateClient(Constants.HttpClientWithRetryName),
uriBuilder.Uri.ToString(),
login: _config.Username,
password: _config.Password
);
}
public override async Task LoginAsync()
{
await _client.GetSessionInformationAsync();
}
/// <inheritdoc/>
public override async Task<DownloadCheckResult> ShouldRemoveFromArrQueueAsync(string hash, IReadOnlyList<string> ignoredDownloads)
{
DownloadCheckResult result = new();
TorrentInfo? download = await GetTorrentAsync(hash);
if (download is null)
{
_logger.LogDebug("failed to find torrent {hash} in the download client", hash);
return result;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogDebug("skip | download is ignored | {name}", download.Name);
return result;
}
bool shouldRemove = download.FileStats?.Length > 0;
result.IsPrivate = download.IsPrivate ?? false;
foreach (TransmissionTorrentFileStats? stats in download.FileStats ?? [])
{
if (!stats.Wanted.HasValue)
{
// if any files stats are missing, do not remove
shouldRemove = false;
}
if (stats.Wanted.HasValue && stats.Wanted.Value)
{
// if any files are wanted, do not remove
shouldRemove = false;
}
}
if (shouldRemove)
{
// remove if all files are unwanted
result.ShouldRemove = true;
result.DeleteReason = DeleteReason.AllFilesBlocked;
return result;
}
// remove if download is stuck
(result.ShouldRemove, result.DeleteReason) = await EvaluateDownloadRemoval(download);
return result;
}
/// <inheritdoc/>
public override async Task<BlockFilesResult> BlockUnwantedFilesAsync(string hash,
BlocklistType blocklistType,
ConcurrentBag<string> patterns,
ConcurrentBag<Regex> regexes, IReadOnlyList<string> ignoredDownloads)
{
TorrentInfo? download = await GetTorrentAsync(hash);
BlockFilesResult result = new();
if (download?.FileStats is null || download.Files is null)
{
return result;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogDebug("skip | download is ignored | {name}", download.Name);
return result;
}
bool isPrivate = download.IsPrivate ?? false;
result.IsPrivate = isPrivate;
if (_contentBlockerConfig.IgnorePrivate && isPrivate)
{
// ignore private trackers
_logger.LogDebug("skip files check | download is private | {name}", download.Name);
return result;
}
List<long> unwantedFiles = [];
long totalFiles = 0;
long totalUnwantedFiles = 0;
for (int i = 0; i < download.Files.Length; i++)
{
if (download.FileStats?[i].Wanted == null)
{
continue;
}
totalFiles++;
if (!download.FileStats[i].Wanted.Value)
{
totalUnwantedFiles++;
continue;
}
if (_filenameEvaluator.IsValid(download.Files[i].Name, blocklistType, patterns, regexes))
{
continue;
}
_logger.LogInformation("unwanted file found | {file}", download.Files[i].Name);
unwantedFiles.Add(i);
totalUnwantedFiles++;
}
if (unwantedFiles.Count is 0)
{
return result;
}
if (totalUnwantedFiles == totalFiles)
{
// Skip marking files as unwanted. The download will be removed completely.
result.ShouldRemove = true;
return result;
}
_logger.LogDebug("changing priorities | torrent {hash}", hash);
await _dryRunInterceptor.InterceptAsync(SetUnwantedFiles, download.Id, unwantedFiles.ToArray());
return result;
}
public override async Task<List<object>?> GetSeedingDownloads() =>
(await _client.TorrentGetAsync(Fields))
?.Torrents
?.Where(x => !string.IsNullOrEmpty(x.HashString))
.Where(x => x.Status is 5 or 6)
.Cast<object>()
.ToList();
/// <inheritdoc/>
public override List<object>? FilterDownloadsToBeCleanedAsync(List<object>? downloads, List<CleanCategory> categories)
{
return downloads
?
.Cast<TorrentInfo>()
.Where(x => categories
.Any(cat => cat.Name.Equals(x.GetCategory(), StringComparison.InvariantCultureIgnoreCase))
)
.Cast<object>()
.ToList();
}
public override List<object>? FilterDownloadsToChangeCategoryAsync(List<object>? downloads, List<string> categories)
{
return downloads
?.Cast<TorrentInfo>()
.Where(x => !string.IsNullOrEmpty(x.HashString))
.Where(x => categories.Any(cat => cat.Equals(x.GetCategory(), StringComparison.InvariantCultureIgnoreCase)))
.Cast<object>()
.ToList();
}
/// <inheritdoc/>
public override async Task CleanDownloadsAsync(List<object>? downloads, List<CleanCategory> categoriesToClean,
HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
foreach (TorrentInfo download in downloads)
{
if (string.IsNullOrEmpty(download.HashString))
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.HashString, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogDebug("skip | download is ignored | {name}", download.Name);
continue;
}
CleanCategory? category = categoriesToClean
.FirstOrDefault(x =>
{
if (download.DownloadDir is null)
{
return false;
}
return Path.GetFileName(Path.TrimEndingDirectorySeparator(download.DownloadDir))
.Equals(x.Name, StringComparison.InvariantCultureIgnoreCase);
});
if (category is null)
{
continue;
}
if (!_downloadCleanerConfig.DeletePrivate && download.IsPrivate is true)
{
_logger.LogDebug("skip | download is private | {name}", download.Name);
continue;
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.HashString);
TimeSpan seedingTime = TimeSpan.FromSeconds(download.SecondsSeeding ?? 0);
SeedingCheckResult result = ShouldCleanDownload(download.uploadRatio ?? 0, seedingTime, category);
if (!result.ShouldClean)
{
continue;
}
await _dryRunInterceptor.InterceptAsync(RemoveDownloadAsync, download.Id);
_logger.LogInformation(
"download cleaned | {reason} reached | {name}",
result.Reason is CleanReason.MaxRatioReached
? "MAX_RATIO & MIN_SEED_TIME"
: "MAX_SEED_TIME",
download.Name
);
await _notifier.NotifyDownloadCleaned(download.uploadRatio ?? 0, seedingTime, category.Name, result.Reason);
}
}
public override async Task CreateCategoryAsync(string name)
{
await Task.CompletedTask;
}
public override async Task ChangeCategoryForNoHardLinksAsync(List<object>? downloads, HashSet<string> excludedHashes, IReadOnlyList<string> ignoredDownloads)
{
if (downloads?.Count is null or 0)
{
return;
}
if (!string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir))
{
_hardLinkFileService.PopulateFileCounts(_downloadCleanerConfig.UnlinkedIgnoredRootDir);
}
foreach (TorrentInfo download in downloads.Cast<TorrentInfo>())
{
if (string.IsNullOrEmpty(download.HashString) || string.IsNullOrEmpty(download.Name) || download.DownloadDir == null)
{
continue;
}
if (excludedHashes.Any(x => x.Equals(download.HashString, StringComparison.InvariantCultureIgnoreCase)))
{
_logger.LogDebug("skip | download is used by an arr | {name}", download.Name);
continue;
}
if (ignoredDownloads.Count > 0 && download.ShouldIgnore(ignoredDownloads))
{
_logger.LogDebug("skip | download is ignored | {name}", download.Name);
continue;
}
ContextProvider.Set("downloadName", download.Name);
ContextProvider.Set("hash", download.HashString);
bool hasHardlinks = false;
if (download.Files is null || download.FileStats is null)
{
_logger.LogDebug("skip | download has no files | {name}", download.Name);
continue;
}
for (int i = 0; i < download.Files.Length; i++)
{
TransmissionTorrentFiles file = download.Files[i];
TransmissionTorrentFileStats stats = download.FileStats[i];
if (stats.Wanted is null or false || string.IsNullOrEmpty(file.Name))
{
continue;
}
string filePath = string.Join(Path.DirectorySeparatorChar, Path.Combine(download.DownloadDir, file.Name).Split(['\\', '/']));
long hardlinkCount = _hardLinkFileService.GetHardLinkCount(filePath, !string.IsNullOrEmpty(_downloadCleanerConfig.UnlinkedIgnoredRootDir));
if (hardlinkCount < 0)
{
_logger.LogDebug("skip | could not get file properties | {file}", filePath);
hasHardlinks = true;
break;
}
if (hardlinkCount > 0)
{
hasHardlinks = true;
break;
}
}
if (hasHardlinks)
{
_logger.LogDebug("skip | download has hardlinks | {name}", download.Name);
continue;
}
string currentCategory = download.GetCategory();
string newLocation = string.Join(Path.DirectorySeparatorChar, Path.Combine(download.DownloadDir, _downloadCleanerConfig.UnlinkedTargetCategory).Split(['\\', '/']));
await _dryRunInterceptor.InterceptAsync(ChangeDownloadLocation, download.Id, newLocation);
_logger.LogInformation("category changed for {name}", download.Name);
await _notifier.NotifyCategoryChanged(currentCategory, _downloadCleanerConfig.UnlinkedTargetCategory);
download.DownloadDir = newLocation;
}
}
[DryRunSafeguard]
protected virtual async Task ChangeDownloadLocation(long downloadId, string newLocation)
{
await _client.TorrentSetLocationAsync([downloadId], newLocation, true);
}
public override async Task DeleteDownload(string hash)
{
TorrentInfo? torrent = await GetTorrentAsync(hash);
if (torrent is null)
{
return;
}
await _client.TorrentRemoveAsync([torrent.Id], true);
}
public override void Dispose()
{
}
[DryRunSafeguard]
protected virtual async Task RemoveDownloadAsync(long downloadId)
{
await _client.TorrentRemoveAsync([downloadId], true);
}
[DryRunSafeguard]
protected virtual async Task SetUnwantedFiles(long downloadId, long[] unwantedFiles)
{
await _client.TorrentSetAsync(new TorrentSettings
{
Ids = [downloadId],
FilesUnwanted = unwantedFiles,
});
}
private async Task<(bool, DeleteReason)> EvaluateDownloadRemoval(TorrentInfo torrent)
{
(bool ShouldRemove, DeleteReason Reason) result = await CheckIfSlow(torrent);
if (result.ShouldRemove)
{
return result;
}
return await CheckIfStuck(torrent);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfSlow(TorrentInfo download)
{
if (_queueCleanerConfig.SlowMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (download.Status is not 4)
{
// not in downloading state
return (false, DeleteReason.None);
}
if (download.RateDownload <= 0)
{
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.SlowIgnorePrivate && download.IsPrivate is true)
{
// ignore private trackers
_logger.LogDebug("skip slow check | download is private | {name}", download.Name);
return (false, DeleteReason.None);
}
if (download.TotalSize > (_queueCleanerConfig.SlowIgnoreAboveSizeByteSize?.Bytes ?? long.MaxValue))
{
_logger.LogDebug("skip slow check | download is too large | {name}", download.Name);
return (false, DeleteReason.None);
}
ByteSize minSpeed = _queueCleanerConfig.SlowMinSpeedByteSize;
ByteSize currentSpeed = new ByteSize(download.RateDownload ?? long.MaxValue);
SmartTimeSpan maxTime = SmartTimeSpan.FromHours(_queueCleanerConfig.SlowMaxTime);
SmartTimeSpan currentTime = SmartTimeSpan.FromSeconds(download.Eta ?? 0);
return await CheckIfSlow(
download.HashString!,
download.Name!,
minSpeed,
currentSpeed,
maxTime,
currentTime
);
}
private async Task<(bool ShouldRemove, DeleteReason Reason)> CheckIfStuck(TorrentInfo download)
{
if (_queueCleanerConfig.StalledMaxStrikes is 0)
{
return (false, DeleteReason.None);
}
if (download.Status is not 4)
{
// not in downloading state
return (false, DeleteReason.None);
}
if (download.RateDownload > 0 || download.Eta > 0)
{
return (false, DeleteReason.None);
}
if (_queueCleanerConfig.StalledIgnorePrivate && (download.IsPrivate ?? false))
{
// ignore private trackers
_logger.LogDebug("skip stalled check | download is private | {name}", download.Name);
return (false, DeleteReason.None);
}
ResetStalledStrikesOnProgress(download.HashString!, download.DownloadedEver ?? 0);
return (await _striker.StrikeAndCheckLimit(download.HashString!, download.Name!, _queueCleanerConfig.StalledMaxStrikes, StrikeType.Stalled), DeleteReason.Stalled);
}
private async Task<TorrentInfo?> GetTorrentAsync(string hash) =>
(await _client.TorrentGetAsync(Fields, hash))
?.Torrents
?.FirstOrDefault();
}

View File

@@ -1,9 +0,0 @@
using Domain.Models.Arr;
using Infrastructure.Verticals.DownloadRemover.Models;
namespace Infrastructure.Verticals.DownloadRemover.Interfaces;
public interface IQueueItemRemover
{
Task RemoveQueueItemAsync<T>(QueueItemRemoveRequest<T> request) where T : SearchItem;
}

View File

@@ -1,66 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.General;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Helpers;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.Context;
using Infrastructure.Verticals.DownloadRemover.Interfaces;
using Infrastructure.Verticals.DownloadRemover.Models;
using Infrastructure.Verticals.Notifications;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.DownloadRemover;
public sealed class QueueItemRemover : IQueueItemRemover
{
private readonly SearchConfig _searchConfig;
private readonly IMemoryCache _cache;
private readonly ArrClientFactory _arrClientFactory;
private readonly INotificationPublisher _notifier;
public QueueItemRemover(
IOptions<SearchConfig> searchConfig,
IMemoryCache cache,
ArrClientFactory arrClientFactory,
INotificationPublisher notifier
)
{
_searchConfig = searchConfig.Value;
_cache = cache;
_arrClientFactory = arrClientFactory;
_notifier = notifier;
}
public async Task RemoveQueueItemAsync<T>(QueueItemRemoveRequest<T> request)
where T : SearchItem
{
try
{
var arrClient = _arrClientFactory.GetClient(request.InstanceType);
await arrClient.DeleteQueueItemAsync(request.Instance, request.Record, request.RemoveFromClient, request.DeleteReason);
// push to context
ContextProvider.Set(nameof(QueueRecord), request.Record);
ContextProvider.Set(nameof(ArrInstance) + nameof(ArrInstance.Url), request.Instance.Url);
ContextProvider.Set(nameof(InstanceType), request.InstanceType);
await _notifier.NotifyQueueItemDeleted(request.RemoveFromClient, request.DeleteReason);
if (!_searchConfig.SearchEnabled)
{
return;
}
await arrClient.SearchItemsAsync(request.Instance, [request.SearchItem]);
// prevent tracker spamming
await Task.Delay(TimeSpan.FromSeconds(_searchConfig.SearchDelay));
}
finally
{
_cache.Remove(CacheKeys.DownloadMarkedForRemoval(request.Record.DownloadId, request.Instance.Url));
}
}
}

View File

@@ -1,179 +0,0 @@
using Common.Configuration.Arr;
using Common.Configuration.DownloadClient;
using Domain.Enums;
using Domain.Models.Arr;
using Domain.Models.Arr.Queue;
using Infrastructure.Verticals.Arr;
using Infrastructure.Verticals.DownloadClient;
using Infrastructure.Verticals.DownloadRemover.Models;
using Infrastructure.Verticals.Notifications;
using MassTransit;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Infrastructure.Verticals.Jobs;
public abstract class GenericHandler : IHandler, IDisposable
{
protected readonly ILogger<GenericHandler> _logger;
protected readonly DownloadClientConfig _downloadClientConfig;
protected readonly SonarrConfig _sonarrConfig;
protected readonly RadarrConfig _radarrConfig;
protected readonly LidarrConfig _lidarrConfig;
protected readonly IMemoryCache _cache;
protected readonly IBus _messageBus;
protected readonly ArrClientFactory _arrClientFactory;
protected readonly ArrQueueIterator _arrArrQueueIterator;
protected readonly IDownloadService _downloadService;
protected readonly INotificationPublisher _notifier;
protected GenericHandler(
ILogger<GenericHandler> logger,
IOptions<DownloadClientConfig> downloadClientConfig,
IOptions<SonarrConfig> sonarrConfig,
IOptions<RadarrConfig> radarrConfig,
IOptions<LidarrConfig> lidarrConfig,
IMemoryCache cache,
IBus messageBus,
ArrClientFactory arrClientFactory,
ArrQueueIterator arrArrQueueIterator,
DownloadServiceFactory downloadServiceFactory,
INotificationPublisher notifier
)
{
_logger = logger;
_downloadClientConfig = downloadClientConfig.Value;
_sonarrConfig = sonarrConfig.Value;
_radarrConfig = radarrConfig.Value;
_lidarrConfig = lidarrConfig.Value;
_cache = cache;
_messageBus = messageBus;
_arrClientFactory = arrClientFactory;
_arrArrQueueIterator = arrArrQueueIterator;
_downloadService = downloadServiceFactory.CreateDownloadClient();
_notifier = notifier;
}
public virtual async Task ExecuteAsync()
{
await _downloadService.LoginAsync();
await ProcessArrConfigAsync(_sonarrConfig, InstanceType.Sonarr);
await ProcessArrConfigAsync(_radarrConfig, InstanceType.Radarr);
await ProcessArrConfigAsync(_lidarrConfig, InstanceType.Lidarr);
}
public virtual void Dispose()
{
_downloadService.Dispose();
}
protected abstract Task ProcessInstanceAsync(ArrInstance instance, InstanceType instanceType, ArrConfig config);
protected async Task ProcessArrConfigAsync(ArrConfig config, InstanceType instanceType, bool throwOnFailure = false)
{
if (!config.Enabled)
{
return;
}
foreach (ArrInstance arrInstance in config.Instances)
{
try
{
await ProcessInstanceAsync(arrInstance, instanceType, config);
}
catch (Exception exception)
{
_logger.LogError(exception, "failed to clean {type} instance | {url}", instanceType, arrInstance.Url);
if (throwOnFailure)
{
throw;
}
}
}
}
protected async Task PublishQueueItemRemoveRequest(
string downloadRemovalKey,
InstanceType instanceType,
ArrInstance instance,
QueueRecord record,
bool isPack,
bool removeFromClient,
DeleteReason deleteReason
)
{
if (instanceType is InstanceType.Sonarr)
{
QueueItemRemoveRequest<SonarrSearchItem> removeRequest = new()
{
InstanceType = instanceType,
Instance = instance,
Record = record,
SearchItem = (SonarrSearchItem)GetRecordSearchItem(instanceType, record, isPack),
RemoveFromClient = removeFromClient,
DeleteReason = deleteReason
};
await _messageBus.Publish(removeRequest);
}
else
{
QueueItemRemoveRequest<SearchItem> removeRequest = new()
{
InstanceType = instanceType,
Instance = instance,
Record = record,
SearchItem = GetRecordSearchItem(instanceType, record, isPack),
RemoveFromClient = removeFromClient,
DeleteReason = deleteReason
};
await _messageBus.Publish(removeRequest);
}
_cache.Set(downloadRemovalKey, true);
_logger.LogInformation("item marked for removal | {title} | {url}", record.Title, instance.Url);
}
protected SearchItem GetRecordSearchItem(InstanceType type, QueueRecord record, bool isPack = false)
{
return type switch
{
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Episode && !isPack => new SonarrSearchItem
{
Id = record.EpisodeId,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Episode
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Episode && isPack => new SonarrSearchItem
{
Id = record.SeasonNumber,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Season
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Season => new SonarrSearchItem
{
Id = record.SeasonNumber,
SeriesId = record.SeriesId,
SearchType = SonarrSearchType.Series
},
InstanceType.Sonarr when _sonarrConfig.SearchType is SonarrSearchType.Series => new SonarrSearchItem
{
Id = record.SeriesId
},
InstanceType.Radarr => new SearchItem
{
Id = record.MovieId
},
InstanceType.Lidarr => new SearchItem
{
Id = record.AlbumId
},
_ => throw new NotImplementedException($"instance type {type} is not yet supported")
};
}
}

View File

@@ -1,6 +0,0 @@
namespace Infrastructure.Verticals.Jobs;
public interface IHandler
{
Task ExecuteAsync();
}

View File

@@ -1,6 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Apprise;
public interface IAppriseProxy
{
Task SendNotification(ApprisePayload payload, AppriseConfig config);
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Models;
public sealed record FailedImportStrikeNotification : ArrNotification
{
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Models;
public sealed record QueueItemDeletedNotification : ArrNotification
{
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Models;
public sealed record SlowStrikeNotification : ArrNotification
{
}

View File

@@ -1,5 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Models;
public sealed record StalledStrikeNotification : ArrNotification
{
}

View File

@@ -1,6 +0,0 @@
namespace Infrastructure.Verticals.Notifications.Notifiarr;
public interface INotifiarrProxy
{
Task SendNotification(NotifiarrPayload payload, NotifiarrConfig config);
}

16
code/Makefile Normal file
View File

@@ -0,0 +1,16 @@
.DEFAULT_GOAL := no-default
no-default:
$(error You must specify a make target)
migrate-data:
ifndef name
$(error name is required. Usage: make migrate-data name=YourMigrationName)
endif
dotnet ef migrations add $(name) --context DataContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Data
migrate-events:
ifndef name
$(error name is required. Usage: make migrate-events name=YourMigrationName)
endif
dotnet ef migrations add $(name) --context EventsContext --project backend/Cleanuparr.Persistence/Cleanuparr.Persistence.csproj --startup-project backend/Cleanuparr.Api/Cleanuparr.Api.csproj --output-dir Migrations/Events

View File

@@ -0,0 +1,49 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<AssemblyName>Cleanuparr</AssemblyName>
<Version Condition="'$(Version)' == ''">0.0.1</Version>
<TargetFramework>net9.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<PublishReadyToRun>true</PublishReadyToRun>
<EnableMacOSCodeSign>false</EnableMacOSCodeSign>
<CodeSignOnCopy>false</CodeSignOnCopy>
<_CodeSignDuringBuild>false</_CodeSignDuringBuild>
<CodesignDisableTimestamp>true</CodesignDisableTimestamp>
<CodesignKeychain></CodesignKeychain>
</PropertyGroup>
<ItemGroup>
<Folder Include="wwwroot\" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Cleanuparr.Application\Cleanuparr.Application.csproj" />
<ProjectReference Include="..\Cleanuparr.Infrastructure\Cleanuparr.Infrastructure.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.EntityFrameworkCore.Design" Version="9.0.6">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Hosting.WindowsServices" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http" Version="9.0.6" />
<PackageReference Include="Microsoft.Extensions.Http.Polly" Version="9.0.6" />
<PackageReference Include="Quartz" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.DependencyInjection" Version="3.14.0" />
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.14.0" />
<PackageReference Include="Serilog" Version="4.3.0" />
<PackageReference Include="Serilog.Expressions" Version="5.0.0" />
<PackageReference Include="Serilog.Extensions.Hosting" Version="9.0.0" />
<PackageReference Include="Serilog.Settings.Configuration" Version="9.0.0" />
<PackageReference Include="Serilog.Sinks.Console" Version="6.0.0" />
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
<!-- API-related packages -->
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="9.0.2" />
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.5.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,14 @@
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api")]
public class ApiDocumentationController : ControllerBase
{
[HttpGet]
public IActionResult RedirectToSwagger()
{
return Redirect("/api/swagger");
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,240 @@
using System.Text.Json.Serialization;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Persistence;
using Cleanuparr.Persistence.Models.Events;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class EventsController : ControllerBase
{
private readonly EventsContext _context;
public EventsController(EventsContext context)
{
_context = context;
}
/// <summary>
/// Gets events with pagination and filtering
/// </summary>
[HttpGet]
public async Task<ActionResult<PaginatedResult<AppEvent>>> GetEvents(
[FromQuery] int page = 1,
[FromQuery] int pageSize = 100,
[FromQuery] string? severity = null,
[FromQuery] string? eventType = null,
[FromQuery] DateTime? fromDate = null,
[FromQuery] DateTime? toDate = null,
[FromQuery] string? search = null)
{
// Validate pagination parameters
if (page < 1) page = 1;
if (pageSize < 1) pageSize = 100;
if (pageSize > 1000) pageSize = 1000; // Cap at 1000 for performance
var query = _context.Events.AsQueryable();
// Apply filters
if (!string.IsNullOrWhiteSpace(severity))
{
if (Enum.TryParse<EventSeverity>(severity, true, out var severityEnum))
query = query.Where(e => e.Severity == severityEnum);
}
if (!string.IsNullOrWhiteSpace(eventType))
{
if (Enum.TryParse<EventType>(eventType, true, out var eventTypeEnum))
query = query.Where(e => e.EventType == eventTypeEnum);
}
// Apply date range filters
if (fromDate.HasValue)
{
query = query.Where(e => e.Timestamp >= fromDate.Value);
}
if (toDate.HasValue)
{
query = query.Where(e => e.Timestamp <= toDate.Value);
}
// Apply search filter if provided
if (!string.IsNullOrWhiteSpace(search))
{
string pattern = EventsContext.GetLikePattern(search);
query = query.Where(e =>
EF.Functions.Like(e.Message, pattern) ||
EF.Functions.Like(e.Data, pattern) ||
EF.Functions.Like(e.TrackingId.ToString(), pattern)
);
}
// Count total matching records for pagination
var totalCount = await query.CountAsync();
// Calculate pagination
var totalPages = (int)Math.Ceiling(totalCount / (double)pageSize);
var skip = (page - 1) * pageSize;
// Get paginated data
var events = await query
.OrderByDescending(e => e.Timestamp)
.Skip(skip)
.Take(pageSize)
.ToListAsync();
events = events
.OrderBy(e => e.Timestamp)
.ToList();
// Return paginated result
var result = new PaginatedResult<AppEvent>
{
Items = events,
Page = page,
PageSize = pageSize,
TotalCount = totalCount,
TotalPages = totalPages
};
return Ok(result);
}
/// <summary>
/// Gets a specific event by ID
/// </summary>
[HttpGet("{id}")]
public async Task<ActionResult<AppEvent>> GetEvent(Guid id)
{
var eventEntity = await _context.Events.FindAsync(id);
if (eventEntity == null)
return NotFound();
return Ok(eventEntity);
}
/// <summary>
/// Gets events by tracking ID
/// </summary>
[HttpGet("tracking/{trackingId}")]
public async Task<ActionResult<List<AppEvent>>> GetEventsByTracking(Guid trackingId)
{
var events = await _context.Events
.Where(e => e.TrackingId == trackingId)
.OrderBy(e => e.Timestamp)
.ToListAsync();
return Ok(events);
}
/// <summary>
/// Gets event statistics
/// </summary>
[HttpGet("stats")]
public async Task<ActionResult<object>> GetEventStats()
{
var stats = new
{
TotalEvents = await _context.Events.CountAsync(),
EventsBySeverity = await _context.Events
.GroupBy(e => e.Severity)
.Select(g => new { Severity = g.Key.ToString(), Count = g.Count() })
.ToListAsync(),
EventsByType = await _context.Events
.GroupBy(e => e.EventType)
.Select(g => new { EventType = g.Key.ToString(), Count = g.Count() })
.OrderByDescending(x => x.Count)
.Take(10)
.ToListAsync(),
RecentEventsCount = await _context.Events
.Where(e => e.Timestamp > DateTime.UtcNow.AddHours(-24))
.CountAsync()
};
return Ok(stats);
}
/// <summary>
/// Manually triggers cleanup of old events
/// </summary>
[HttpPost("cleanup")]
public async Task<ActionResult<object>> CleanupOldEvents([FromQuery] int retentionDays = 30)
{
var cutoffDate = DateTime.UtcNow.AddDays(-retentionDays);
await _context.Events
.Where(e => e.Timestamp < cutoffDate)
.ExecuteDeleteAsync();
return Ok();
}
/// <summary>
/// Gets unique event types
/// </summary>
[HttpGet("types")]
public async Task<ActionResult<List<string>>> GetEventTypes()
{
var types = Enum.GetNames(typeof(EventType)).ToList();
return Ok(types);
}
/// <summary>
/// Gets unique severities
/// </summary>
[HttpGet("severities")]
public async Task<ActionResult<List<string>>> GetSeverities()
{
var severities = Enum.GetNames(typeof(EventSeverity)).ToList();
return Ok(severities);
}
}
/// <summary>
/// Represents a paginated result set
/// </summary>
/// <typeparam name="T">Type of items in the result</typeparam>
public class PaginatedResult<T>
{
/// <summary>
/// The items in the current page
/// </summary>
public List<T> Items { get; set; } = new();
/// <summary>
/// Current page number (1-based)
/// </summary>
public int Page { get; set; }
/// <summary>
/// Number of items per page
/// </summary>
public int PageSize { get; set; }
/// <summary>
/// Total number of items across all pages
/// </summary>
public int TotalCount { get; set; }
/// <summary>
/// Total number of pages
/// </summary>
public int TotalPages { get; set; }
/// <summary>
/// Whether there is a previous page
/// </summary>
[JsonIgnore]
public bool HasPrevious => Page > 1;
/// <summary>
/// Whether there is a next page
/// </summary>
[JsonIgnore]
public bool HasNext => Page < TotalPages;
}

View File

@@ -0,0 +1,103 @@
using Cleanuparr.Infrastructure.Health;
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
/// <summary>
/// Controller for checking the health of download clients
/// </summary>
[ApiController]
[Route("api/health")]
public class HealthCheckController : ControllerBase
{
private readonly ILogger<HealthCheckController> _logger;
private readonly IHealthCheckService _healthCheckService;
/// <summary>
/// Initializes a new instance of the <see cref="HealthCheckController"/> class
/// </summary>
public HealthCheckController(
ILogger<HealthCheckController> logger,
IHealthCheckService healthCheckService)
{
_logger = logger;
_healthCheckService = healthCheckService;
}
/// <summary>
/// Gets the health status of all download clients
/// </summary>
[HttpGet]
public IActionResult GetAllHealth()
{
try
{
var healthStatuses = _healthCheckService.GetAllClientHealth();
return Ok(healthStatuses);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving client health statuses");
return StatusCode(500, new { Error = "An error occurred while retrieving client health statuses" });
}
}
/// <summary>
/// Gets the health status of a specific download client
/// </summary>
[HttpGet("{id:guid}")]
public IActionResult GetClientHealth(Guid id)
{
try
{
var healthStatus = _healthCheckService.GetClientHealth(id);
if (healthStatus == null)
{
return NotFound(new { Message = $"Health status for client with ID '{id}' not found" });
}
return Ok(healthStatus);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving health status for client {id}", id);
return StatusCode(500, new { Error = "An error occurred while retrieving the client health status" });
}
}
/// <summary>
/// Triggers a health check for all download clients
/// </summary>
[HttpPost("check")]
public async Task<IActionResult> CheckAllHealth()
{
try
{
var results = await _healthCheckService.CheckAllClientsHealthAsync();
return Ok(results);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking health for all clients");
return StatusCode(500, new { Error = "An error occurred while checking client health" });
}
}
/// <summary>
/// Triggers a health check for a specific download client
/// </summary>
[HttpPost("check/{id:guid}")]
public async Task<IActionResult> CheckClientHealth(Guid id)
{
try
{
var result = await _healthCheckService.CheckClientHealthAsync(id);
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error checking health for client {id}", id);
return StatusCode(500, new { Error = "An error occurred while checking client health" });
}
}
}

View File

@@ -0,0 +1,163 @@
using Cleanuparr.Api.Models;
using Cleanuparr.Infrastructure.Models;
using Infrastructure.Services.Interfaces;
using Microsoft.AspNetCore.Mvc;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class JobsController : ControllerBase
{
private readonly IJobManagementService _jobManagementService;
private readonly ILogger<JobsController> _logger;
public JobsController(IJobManagementService jobManagementService, ILogger<JobsController> logger)
{
_jobManagementService = jobManagementService;
_logger = logger;
}
[HttpGet]
public async Task<IActionResult> GetAllJobs()
{
try
{
var result = await _jobManagementService.GetAllJobs();
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error getting all jobs");
return StatusCode(500, "An error occurred while retrieving jobs");
}
}
[HttpGet("{jobType}")]
public async Task<IActionResult> GetJob(JobType jobType)
{
try
{
var jobInfo = await _jobManagementService.GetJob(jobType);
if (jobInfo.Status == "Not Found")
{
return NotFound($"Job '{jobType}' not found");
}
return Ok(jobInfo);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error getting job {jobType}", jobType);
return StatusCode(500, $"An error occurred while retrieving job '{jobType}'");
}
}
[HttpPost("{jobType}/start")]
public async Task<IActionResult> StartJob(JobType jobType, [FromBody] ScheduleRequest scheduleRequest = null)
{
try
{
// Get the schedule from the request body if provided
JobSchedule jobSchedule = scheduleRequest.Schedule;
var result = await _jobManagementService.StartJob(jobType, jobSchedule);
if (!result)
{
return BadRequest($"Failed to start job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' started successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error starting job {jobType}", jobType);
return StatusCode(500, $"An error occurred while starting job '{jobType}'");
}
}
[HttpPost("{jobType}/stop")]
public async Task<IActionResult> StopJob(JobType jobType)
{
try
{
var result = await _jobManagementService.StopJob(jobType);
if (!result)
{
return BadRequest($"Failed to stop job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' stopped successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error stopping job {jobType}", jobType);
return StatusCode(500, $"An error occurred while stopping job '{jobType}'");
}
}
[HttpPost("{jobType}/pause")]
public async Task<IActionResult> PauseJob(JobType jobType)
{
try
{
var result = await _jobManagementService.PauseJob(jobType);
if (!result)
{
return BadRequest($"Failed to pause job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' paused successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error pausing job {jobType}", jobType);
return StatusCode(500, $"An error occurred while pausing job '{jobType}'");
}
}
[HttpPost("{jobType}/resume")]
public async Task<IActionResult> ResumeJob(JobType jobType)
{
try
{
var result = await _jobManagementService.ResumeJob(jobType);
if (!result)
{
return BadRequest($"Failed to resume job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' resumed successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error resuming job {jobType}", jobType);
return StatusCode(500, $"An error occurred while resuming job '{jobType}'");
}
}
[HttpPut("{jobType}/schedule")]
public async Task<IActionResult> UpdateJobSchedule(JobType jobType, [FromBody] ScheduleRequest scheduleRequest)
{
if (scheduleRequest?.Schedule == null)
{
return BadRequest("Schedule is required");
}
try
{
var result = await _jobManagementService.UpdateJobSchedule(jobType, scheduleRequest.Schedule);
if (!result)
{
return BadRequest($"Failed to update schedule for job '{jobType}'");
}
return Ok(new { Message = $"Job '{jobType}' schedule updated successfully" });
}
catch (Exception ex)
{
_logger.LogError(ex, "Error updating job {jobType} schedule", jobType);
return StatusCode(500, $"An error occurred while updating schedule for job '{jobType}'");
}
}
}

View File

@@ -0,0 +1,270 @@
using System.Diagnostics;
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Features.Arr;
using Cleanuparr.Infrastructure.Features.DownloadClient;
using Cleanuparr.Persistence;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
namespace Cleanuparr.Api.Controllers;
[ApiController]
[Route("api/[controller]")]
public class StatusController : ControllerBase
{
private readonly ILogger<StatusController> _logger;
private readonly DataContext _dataContext;
private readonly DownloadServiceFactory _downloadServiceFactory;
private readonly ArrClientFactory _arrClientFactory;
public StatusController(
ILogger<StatusController> logger,
DataContext dataContext,
DownloadServiceFactory downloadServiceFactory,
ArrClientFactory arrClientFactory)
{
_logger = logger;
_dataContext = dataContext;
_downloadServiceFactory = downloadServiceFactory;
_arrClientFactory = arrClientFactory;
}
[HttpGet]
public async Task<IActionResult> GetSystemStatus()
{
try
{
var process = Process.GetCurrentProcess();
// Get configuration
var downloadClients = await _dataContext.DownloadClients
.AsNoTracking()
.ToListAsync();
var sonarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Sonarr);
var radarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Radarr);
var lidarrConfig = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.AsNoTracking()
.FirstAsync(x => x.Type == InstanceType.Lidarr);
var status = new
{
Application = new
{
Version = GetType().Assembly.GetName().Version?.ToString() ?? "Unknown",
process.StartTime,
UpTime = DateTime.Now - process.StartTime,
MemoryUsageMB = Math.Round(process.WorkingSet64 / 1024.0 / 1024.0, 2),
ProcessorTime = process.TotalProcessorTime
},
DownloadClient = new
{
// TODO
},
MediaManagers = new
{
Sonarr = new
{
InstanceCount = sonarrConfig.Instances.Count
},
Radarr = new
{
InstanceCount = radarrConfig.Instances.Count
},
Lidarr = new
{
InstanceCount = lidarrConfig.Instances.Count
}
}
};
return Ok(status);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving system status");
return StatusCode(500, "An error occurred while retrieving system status");
}
}
[HttpGet("download-client")]
public async Task<IActionResult> GetDownloadClientStatus()
{
try
{
var downloadClients = await _dataContext.DownloadClients
.AsNoTracking()
.ToListAsync();
var result = new Dictionary<string, object>();
// Check for configured clients
if (downloadClients.Count > 0)
{
var clientsStatus = new List<object>();
foreach (var client in downloadClients)
{
clientsStatus.Add(new
{
client.Id,
client.Name,
Type = client.TypeName,
client.Host,
client.Enabled,
IsConnected = client.Enabled, // We can't check connection status without implementing test methods
});
}
result["Clients"] = clientsStatus;
}
return Ok(result);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving download client status");
return StatusCode(500, "An error occurred while retrieving download client status");
}
}
[HttpGet("arrs")]
public async Task<IActionResult> GetMediaManagersStatus()
{
try
{
var status = new Dictionary<string, object>();
// Get configurations
var enabledSonarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Sonarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();
var enabledRadarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Radarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();
var enabledLidarrInstances = await _dataContext.ArrConfigs
.Include(x => x.Instances)
.Where(x => x.Type == InstanceType.Lidarr)
.SelectMany(x => x.Instances)
.Where(x => x.Enabled)
.AsNoTracking()
.ToListAsync();;
// Check Sonarr instances
var sonarrStatus = new List<object>();
foreach (var instance in enabledSonarrInstances)
{
try
{
var sonarrClient = _arrClientFactory.GetClient(InstanceType.Sonarr);
await sonarrClient.TestConnectionAsync(instance);
sonarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
sonarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Sonarr"] = sonarrStatus;
// Check Radarr instances
var radarrStatus = new List<object>();
foreach (var instance in enabledRadarrInstances)
{
try
{
var radarrClient = _arrClientFactory.GetClient(InstanceType.Radarr);
await radarrClient.TestConnectionAsync(instance);
radarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
radarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Radarr"] = radarrStatus;
// Check Lidarr instances
var lidarrStatus = new List<object>();
foreach (var instance in enabledLidarrInstances)
{
try
{
var lidarrClient = _arrClientFactory.GetClient(InstanceType.Lidarr);
await lidarrClient.TestConnectionAsync(instance);
lidarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = true,
Message = "Successfully connected"
});
}
catch (Exception ex)
{
lidarrStatus.Add(new
{
instance.Name,
instance.Url,
IsConnected = false,
Message = $"Connection failed: {ex.Message}"
});
}
}
status["Lidarr"] = lidarrStatus;
return Ok(status);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error retrieving media managers status");
return StatusCode(500, "An error occurred while retrieving media managers status");
}
}
}

View File

@@ -0,0 +1,148 @@
using System.Text.Json.Serialization;
using Cleanuparr.Api.Middleware;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Hubs;
using Cleanuparr.Infrastructure.Logging;
using Microsoft.AspNetCore.Http.Json;
using Microsoft.OpenApi.Models;
using System.Text;
namespace Cleanuparr.Api.DependencyInjection;
public static class ApiDI
{
public static IServiceCollection AddApiServices(this IServiceCollection services)
{
services.Configure<JsonOptions>(options =>
{
options.SerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.SerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
// Add API-specific services
services
.AddControllers()
.AddJsonOptions(options =>
{
options.JsonSerializerOptions.Converters.Add(new JsonStringEnumConverter());
options.JsonSerializerOptions.ReferenceHandler = ReferenceHandler.IgnoreCycles;
});
services.AddEndpointsApiExplorer();
// Add SignalR for real-time updates
services
.AddSignalR()
.AddJsonProtocol(options =>
{
options.PayloadSerializerOptions.Converters.Add(new JsonStringEnumConverter());
});
// Add health status broadcaster
services.AddHostedService<HealthStatusBroadcaster>();
// Add logging initializer service
services.AddHostedService<LoggingInitializer>();
services.AddSwaggerGen(options =>
{
options.SwaggerDoc("v1", new OpenApiInfo
{
Title = "Cleanuparr API",
Version = "v1",
Description = "API for managing media downloads and cleanups",
Contact = new OpenApiContact
{
Name = "Cleanuparr Team"
}
});
});
return services;
}
public static WebApplication ConfigureApi(this WebApplication app)
{
ILogger<Program> logger = app.Services.GetRequiredService<ILogger<Program>>();
// Enable compression
app.UseResponseCompression();
// Serve static files with caching
app.UseStaticFiles(new StaticFileOptions
{
OnPrepareResponse = ctx =>
{
// Cache static assets for 30 days
// if (ctx.File.Name.EndsWith(".js") || ctx.File.Name.EndsWith(".css"))
// {
// ctx.Context.Response.Headers.CacheControl = "public,max-age=2592000";
// }
}
});
// Add the global exception handling middleware first
app.UseMiddleware<ExceptionMiddleware>();
app.UseCors("Any");
app.UseRouting();
if (app.Environment.IsDevelopment())
{
app.UseSwagger();
app.UseSwaggerUI(options =>
{
options.SwaggerEndpoint("v1/swagger.json", "Cleanuparr API v1");
options.RoutePrefix = "swagger";
options.DocumentTitle = "Cleanuparr API Documentation";
});
}
app.UseAuthorization();
app.MapControllers();
// Custom SPA fallback to inject base path
app.MapFallback(async context =>
{
var basePath = app.Configuration.GetValue<string>("BASE_PATH") ?? "/";
// Normalize the base path (remove trailing slash if not root)
if (basePath != "/" && basePath.EndsWith("/"))
{
basePath = basePath.TrimEnd('/');
}
var webRoot = app.Environment.WebRootPath ?? Path.Combine(app.Environment.ContentRootPath, "wwwroot");
var indexPath = Path.Combine(webRoot, "index.html");
if (!File.Exists(indexPath))
{
context.Response.StatusCode = 404;
await context.Response.WriteAsync("index.html not found");
return;
}
var indexContent = await File.ReadAllTextAsync(indexPath);
// Inject the base path into the HTML
var scriptInjection = $@"
<script>
window['_server_base_path'] = '{basePath}';
</script>";
// Insert the script right before the existing script tag
indexContent = indexContent.Replace(
" <script>",
scriptInjection + "\n <script>"
);
context.Response.ContentType = "text/html";
await context.Response.WriteAsync(indexContent, Encoding.UTF8);
});
// Map SignalR hubs
app.MapHub<HealthStatusHub>("/api/hubs/health");
app.MapHub<AppHub>("/api/hubs/app");
return app;
}
}

View File

@@ -0,0 +1,93 @@
using Cleanuparr.Domain.Enums;
using Cleanuparr.Infrastructure.Models;
using Cleanuparr.Shared.Helpers;
using Serilog;
using Serilog.Events;
using Serilog.Templates;
using Serilog.Templates.Themes;
namespace Cleanuparr.Api.DependencyInjection;
public static class LoggingDI
{
public static ILoggingBuilder AddLogging(this ILoggingBuilder builder)
{
Log.Logger = GetDefaultLoggerConfiguration().CreateLogger();
return builder.ClearProviders().AddSerilog();
}
public static LoggerConfiguration GetDefaultLoggerConfiguration()
{
LoggerConfiguration logConfig = new();
const string categoryTemplate = "{#if Category is not null} {Concat('[',Category,']'),CAT_PAD}{#end}";
const string jobNameTemplate = "{#if JobName is not null} {Concat('[',JobName,']'),JOB_PAD}{#end}";
const string consoleOutputTemplate = $"[{{@t:yyyy-MM-dd HH:mm:ss.fff}} {{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m}}\n{{@x}}";
const string fileOutputTemplate = $"{{@t:yyyy-MM-dd HH:mm:ss.fff zzz}} [{{@l:u3}}]{jobNameTemplate}{categoryTemplate} {{@m:lj}}\n{{@x}}";
// Determine job name padding
List<string> jobNames = [nameof(JobType.QueueCleaner), nameof(JobType.ContentBlocker), nameof(JobType.DownloadCleaner)];
int jobPadding = jobNames.Max(x => x.Length) + 2;
// Determine instance name padding
List<string> categoryNames = [
InstanceType.Sonarr.ToString(),
InstanceType.Radarr.ToString(),
InstanceType.Lidarr.ToString(),
InstanceType.Readarr.ToString(),
InstanceType.Whisparr.ToString(),
"SYSTEM"
];
int catPadding = categoryNames.Max(x => x.Length) + 2;
// Apply padding values to templates
string consoleTemplate = consoleOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
string fileTemplate = fileOutputTemplate
.Replace("JOB_PAD", jobPadding.ToString())
.Replace("CAT_PAD", catPadding.ToString());
// Configure base logger with dynamic level control
logConfig
.MinimumLevel.Is(LogEventLevel.Information)
.Enrich.FromLogContext()
.WriteTo.Console(new ExpressionTemplate(consoleTemplate, theme: TemplateTheme.Literate));
// Create the logs directory
string logsPath = Path.Combine(ConfigurationPathProvider.GetConfigPath(), "logs");
if (!Directory.Exists(logsPath))
{
try
{
Directory.CreateDirectory(logsPath);
}
catch (Exception exception)
{
throw new Exception($"Failed to create log directory | {logsPath}", exception);
}
}
// Add main log file
logConfig.WriteTo.File(
path: Path.Combine(logsPath, "cleanuparr-.txt"),
formatter: new ExpressionTemplate(fileTemplate),
fileSizeLimitBytes: 10L * 1024 * 1024,
rollingInterval: RollingInterval.Day,
rollOnFileSizeLimit: true,
shared: true
);
logConfig
.MinimumLevel.Override("MassTransit", LogEventLevel.Warning)
.MinimumLevel.Override("Microsoft.Hosting.Lifetime", LogEventLevel.Information)
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning)
.MinimumLevel.Override("Quartz", LogEventLevel.Warning)
.MinimumLevel.Override("System.Net.Http.HttpClient", LogEventLevel.Error)
.Enrich.WithProperty("ApplicationName", "Cleanuparr");
return logConfig;
}
}

View File

@@ -1,19 +1,15 @@
using System.Net;
using Common.Configuration.General;
using Common.Helpers;
using Domain.Models.Arr;
using Infrastructure.Services;
using Infrastructure.Verticals.DownloadClient.Deluge;
using Infrastructure.Verticals.DownloadRemover.Consumers;
using Infrastructure.Verticals.Notifications.Consumers;
using System.Text.Json.Serialization;
using Cleanuparr.Infrastructure.Features.DownloadRemover.Consumers;
using Cleanuparr.Infrastructure.Features.Notifications.Consumers;
using Cleanuparr.Infrastructure.Health;
using Cleanuparr.Infrastructure.Http;
using Cleanuparr.Infrastructure.Http.DynamicHttpClientSystem;
using Data.Models.Arr;
using Infrastructure.Verticals.Notifications.Models;
using MassTransit;
using MassTransit.Configuration;
using Microsoft.Extensions.Options;
using Polly;
using Polly.Extensions.Http;
using Microsoft.Extensions.Caching.Memory;
namespace Executable.DependencyInjection;
namespace Cleanuparr.Api.DependencyInjection;
public static class MainDI
{
@@ -21,11 +17,10 @@ public static class MainDI
services
.AddLogging(builder => builder.ClearProviders().AddConsole())
.AddHttpClients(configuration)
.AddConfiguration(configuration)
.AddMemoryCache(options => {
options.ExpirationScanFrequency = TimeSpan.FromMinutes(1);
})
.AddSingleton<MemoryCache>()
.AddSingleton<IMemoryCache>(serviceProvider => serviceProvider.GetRequiredService<MemoryCache>())
.AddServices()
.AddHealthServices()
.AddQuartzServices(configuration)
.AddNotifications(configuration)
.AddMassTransit(config =>
@@ -42,6 +37,14 @@ public static class MainDI
config.UsingInMemory((context, cfg) =>
{
cfg.ConfigureJsonSerializerOptions(options =>
{
options.Converters.Add(new JsonStringEnumConverter());
options.ReferenceHandler = ReferenceHandler.IgnoreCycles;
return options;
});
cfg.ReceiveEndpoint("download-remover-queue", e =>
{
e.ConfigureConsumer<DownloadRemoverConsumer<SearchItem>>(context);
@@ -66,57 +69,23 @@ public static class MainDI
private static IServiceCollection AddHttpClients(this IServiceCollection services, IConfiguration configuration)
{
// add default HttpClient
services.AddHttpClient();
// Add the dynamic HTTP client system - this replaces all the previous static configurations
services.AddDynamicHttpClients();
// Add the dynamic HTTP client provider that uses the new system
services.AddSingleton<IDynamicHttpClientProvider, DynamicHttpClientProvider>();
HttpConfig config = configuration.Get<HttpConfig>() ?? new();
config.Validate();
// add retry HttpClient
services
.AddHttpClient(Constants.HttpClientWithRetryName, x =>
{
x.Timeout = TimeSpan.FromSeconds(config.Timeout);
})
.ConfigurePrimaryHttpMessageHandler(provider =>
{
CertificateValidationService service = provider.GetRequiredService<CertificateValidationService>();
return new HttpClientHandler
{
ServerCertificateCustomValidationCallback = service.ShouldByPassValidationError
};
})
.AddRetryPolicyHandler(config);
// add Deluge HttpClient
services
.AddHttpClient(nameof(DelugeService), x =>
{
x.Timeout = TimeSpan.FromSeconds(config.Timeout);
})
.ConfigurePrimaryHttpMessageHandler(_ =>
{
return new HttpClientHandler
{
AllowAutoRedirect = true,
UseCookies = true,
CookieContainer = new CookieContainer(),
AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate,
ServerCertificateCustomValidationCallback = (_, _, _, _) => true
};
})
.AddRetryPolicyHandler(config);
return services;
}
private static IHttpClientBuilder AddRetryPolicyHandler(this IHttpClientBuilder builder, HttpConfig config) =>
builder.AddPolicyHandler(
HttpPolicyExtensions
.HandleTransientHttpError()
// do not retry on Unauthorized
.OrResult(response => !response.IsSuccessStatusCode && response.StatusCode != HttpStatusCode.Unauthorized)
.WaitAndRetryAsync(config.MaxRetries, retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)))
);
/// <summary>
/// Adds health check services to the service collection
/// </summary>
private static IServiceCollection AddHealthServices(this IServiceCollection services) =>
services
// Register the health check service
.AddSingleton<IHealthCheckService, HealthCheckService>()
// Register the background service for periodic health checks
.AddHostedService<HealthCheckBackgroundService>();
}

View File

@@ -1,15 +1,15 @@
using Infrastructure.Verticals.Notifications;
using Infrastructure.Verticals.Notifications.Apprise;
using Infrastructure.Verticals.Notifications.Notifiarr;
using Cleanuparr.Infrastructure.Features.Notifications;
using Cleanuparr.Infrastructure.Features.Notifications.Apprise;
using Cleanuparr.Infrastructure.Features.Notifications.Notifiarr;
using Infrastructure.Verticals.Notifications;
namespace Executable.DependencyInjection;
namespace Cleanuparr.Api.DependencyInjection;
public static class NotificationsDI
{
public static IServiceCollection AddNotifications(this IServiceCollection services, IConfiguration configuration) =>
services
.Configure<NotifiarrConfig>(configuration.GetSection(NotifiarrConfig.SectionName))
.Configure<AppriseConfig>(configuration.GetSection(AppriseConfig.SectionName))
// Notification configs are now managed through ConfigManager
.AddTransient<INotifiarrProxy, NotifiarrProxy>()
.AddTransient<INotificationProvider, NotifiarrProvider>()
.AddTransient<IAppriseProxy, AppriseProxy>()

View File

@@ -0,0 +1,20 @@
using Cleanuparr.Api.Jobs;
using Quartz;
namespace Cleanuparr.Api.DependencyInjection;
public static class QuartzDI
{
public static IServiceCollection AddQuartzServices(this IServiceCollection services, IConfiguration configuration) =>
services
.AddQuartz()
.AddQuartzHostedService(opt =>
{
opt.WaitForJobsToComplete = true;
})
// Register BackgroundJobManager as a hosted service
.AddSingleton<BackgroundJobManager>()
.AddHostedService(provider => provider.GetRequiredService<BackgroundJobManager>());
// Jobs are now managed by BackgroundJobManager
}

Some files were not shown because too many files have changed in this diff Show More