Compare commits

..

194 Commits

Author SHA1 Message Date
Safihre
e8206fbdd9 Set version to 3.3.0 2021-06-01 07:35:13 +02:00
Jiri van Bergen
589f15a77b Merge branch '3.3.x' 2021-06-01 07:34:59 +02:00
Safihre
7bb443678a Build release when creating the tag 2021-06-01 07:18:41 +02:00
Safihre
6390415101 Update text files for 3.3.0 2021-06-01 07:16:42 +02:00
Sander
4abf192e11 deobfuscate: bugfix for collections if extension in CAPITALS (#1904) 2021-06-01 07:06:21 +02:00
Safihre
1fed37f9da Notify users that Plush will be removed in 3.4.0
Relates to #1902
2021-05-25 09:28:10 +02:00
Safihre
8fdb259270 Update text files for 3.3.0RC2
draft release
2021-05-20 08:04:06 +02:00
Safihre
98b0b46dda Only use active servers in stop_idle_jobs 2021-05-17 23:04:22 +02:00
Safihre
861fb9e3d5 Always update the number of servers
init_server(None, newid) would not trigger a recount
2021-05-17 23:04:22 +02:00
Safihre
644bcee14e Remove max_art_opt
Moved to Specials in 2.2.x and never heard anyone complain about it. So time to get rid of it.
2021-05-17 23:04:22 +02:00
Safihre
933d9e92d1 Re-work the Server's reset article queue 2021-05-17 23:04:22 +02:00
jcfp
9fb03a25f6 allow missing extension for unwanted check (#1896) 2021-05-16 21:02:06 +02:00
Safihre
0b1f7827fc Add additional unrar output when checking passworded files 2021-05-14 22:09:11 +02:00
Safihre
49f21e2c9d macOS Python set to 3.9.5 2021-05-14 22:02:13 +02:00
Safihre
990c0e07cf platform.platform() is not available on all platforms
Closes #1893
2021-05-14 09:08:53 +02:00
Safihre
745459e69f Update text files for 3.3.0RC1
draft release
2021-05-13 15:14:35 +02:00
Safihre
115a6cf5d7 Windows: Update Multipar to 1.3.1.7 2021-05-10 10:44:59 +02:00
Safihre
39aafbbc61 Windows/macOS: Update UnRar to 6.0.1 2021-05-10 10:44:47 +02:00
puzzledsab
93ddc9ce99 Add article queue and change article tries system (#1870)
* Add article queue and change article tries system

* Don't reuse queued articles with get_articles

* Add article_queue to server slots

* Generalize get_articles

* Set fetch_limit to be at least 1

* A little tweaking

* More micro optimization

* Small tweaks

* Remove misplaced reset_article_queue()

* Call reset_article_queue() from plan_server

Co-authored-by: Safihre <safihre@sabnzbd.org>
2021-05-07 22:18:50 +02:00
Safihre
3d877eed13 Call BPSmeter.init_server_stats for all servers at end of day 2021-05-07 16:51:47 +02:00
Safihre
308d612c05 Re-init server statistics when clearing 2021-05-07 15:45:17 +02:00
Safihre
9b75f0428d Only call single BPSMeter.update at midnight 2021-05-07 15:41:40 +02:00
Safihre
e6858659fb Prevent ZeroDivisionError's in BPSMeter 2021-05-07 14:06:27 +02:00
Safihre
815058ffcd Fix and extend on BPSMeter optimalizations 2021-05-07 13:40:08 +02:00
puzzledsab
915b540576 BPSMeter optimalizations (#1889)
* Remove stats initalization

* Use update(), remove sum_cached_amount

* Refactor can_be_slowed ifs

* Revert "Refactor can_be_slowed ifs"

This reverts commit 4c9e3e6645.
2021-05-07 13:02:01 +02:00
Safihre
5b06d6925c Sort Downloader.servers by priorty 2021-05-07 09:07:14 +02:00
Safihre
ef875fa720 Remove unused Downloader.server_dict 2021-05-07 08:46:13 +02:00
Safihre
994a7d044f Config restart-check faster because shutdown is now much faster 2021-05-07 08:32:45 +02:00
Safihre
80cd7f39b4 Show server in download-rapport even if it was disabled later on 2021-05-07 07:15:37 +02:00
Safihre
93bf45cde6 Simplify build_status by removing server connection details for Plush 2021-05-06 22:44:19 +02:00
Safihre
b4adc064a0 Remove subject nzf.subject property as it is unused
Since 3.0.0 we always fill the nzf.filename
2021-05-06 22:33:20 +02:00
Safihre
7e81d0bcbb Update text files for 3.3.0Beta4
draft release
2021-05-06 10:36:47 +02:00
SABnzbd Automation
33b59f091e Update translatable texts 2021-05-06 08:26:04 +00:00
Safihre
ea3dc1f2f4 Add validation of translations 2021-05-06 10:00:10 +02:00
SABnzbd Automation
5d3e68a6a5 Update translatable texts 2021-05-06 07:31:48 +00:00
Safihre
64f2ec3ffe Setting RSS rate would result in crash
Closes #1890
2021-05-06 09:30:55 +02:00
Safihre
c80014ec7d Use __slots__ on Downloader thread object 2021-05-03 22:57:00 +02:00
Safihre
6515720d55 Use __slots__ on Server object (#1887) 2021-05-03 16:34:12 +02:00
puzzledsab
605c5cbfd8 Check busy threads less often if nothing is wrong (#1884)
* Check busy threads less often if nothing is wrong

* Simplify variable usage

* Use local constant for server check delaying
2021-05-03 15:51:16 +02:00
puzzledsab
77e97d1a89 Check header before entering parse_par2_packet (#1885)
* Check header before entering parse_par2_packet

* Stop using offset variable
2021-05-03 13:35:11 +02:00
Safihre
f17d959770 Remove unused code to support Windows Vista 2021-05-02 13:06:45 +02:00
Safihre
22f1d2f642 Stop scanning the par2 file once we have the information of all files 2021-05-02 10:16:17 +02:00
jcfp
7d3907fa0e also test with (partially) exploded ipv4-mapped addresses (#1880) 2021-05-01 19:45:53 +02:00
Safihre
9588fe8d94 Simplify startup logging 2021-05-01 18:38:24 +02:00
Sander
3b3ffdb8d1 Show cpu architecture (#1879)
* show CPU architecture in logging.info

* show CPU architecture in logging.info ... make black happy

* show CPU architecture in logging.info ... comment

* show CPU architecture in logging.info ... comment

* show CPU architecture in logging.info ... comment

* show CPU architecture in logging.info ... oneliner
2021-05-01 17:16:09 +02:00
Safihre
cdd7e6931a Post-processing would crash if there is no files to unpack 2021-05-01 16:50:15 +02:00
puzzledsab
4c3df012a6 Don't slice data and stop reading par2 files when duplicate filename is found (#1878)
* Don't slice data

* Stop reading par2 files when duplicate filename is found
2021-05-01 16:10:40 +02:00
Safihre
b0eaf93331 Extend unit test for par2file to check logging of par2 creator 2021-05-01 12:24:11 +02:00
Safihre
55c03279ca Optimize par2 file parsing 2021-05-01 12:06:00 +02:00
Safihre
c4f0753f5a Add basic unit tests for par2file 2021-05-01 12:03:31 +02:00
puzzledsab
a9bd25873e Store status_code as attribute (#1877)
* Store status_code as attribute

* Do reading of code in try
2021-05-01 07:26:44 +02:00
jcfp
5ab6de8123 cut closer to the middle to avoid random test failures (#1874) 2021-04-30 10:45:51 +02:00
jcfp
75deb9d678 add --disable-file-log to systemd service (#1873) 2021-04-30 09:26:55 +02:00
Safihre
b5ce0e0766 Allow setting inet_exposure from command line
Closes #1872
2021-04-30 09:23:30 +02:00
Safihre
43817aef20 Update text files for 3.3.0Beta3
draft release
2021-04-29 11:01:12 +02:00
jcfp
81a7a58299 support prefix and netmask for local_ranges (#1871)
* support prefix and netmask for local_ranges

* housekeeping
2021-04-29 08:35:46 +02:00
puzzledsab
4ae1c21b6f Minor optimizations (#1869) 2021-04-28 12:15:10 +02:00
Safihre
8ffa3e5d4c Add unit tests for sanitize_files 2021-04-27 22:51:43 +02:00
Safihre
ac6ebe1f99 Only reset the NZF try_list when adding par2 files
We can rely on the article try list, or at least we should be able to..
2021-04-27 17:56:13 +02:00
Safihre
a5c07e7873 Reset fetcher and fetcher_priority when resetting article try_list
Closes #1863
2021-04-27 16:48:34 +02:00
SABnzbd Automation
94c4f6008d Update translatable texts 2021-04-27 10:16:05 +00:00
Safihre
615c296023 sanitize_files_in_folder would ignore the newfiles
Would result in deobfuscate not working.
This needs unittests!
Closes #1868
2021-04-27 12:09:46 +02:00
SABnzbd Automation
d227611ee8 Update translatable texts 2021-04-26 21:34:41 +00:00
Safihre
acf00c723f Remove all xmlns from NZB-file data
https://forums.sabnzbd.org/viewtopic.php?f=2&t=25342
2021-04-26 23:33:47 +02:00
Safihre
adb3913daa Only remove the failed server in NzbQueue.reset_try_lists
Closes #1866
2021-04-26 11:48:48 +02:00
Safihre
faf1a44944 Black formatting update 2021-04-26 10:52:11 +02:00
Safihre
9f5cb9ffff Read All Feeds was broken
Closes #1865
2021-04-26 10:14:20 +02:00
SABnzbd Automation
068c653a2a Update translatable texts 2021-04-25 09:03:10 +00:00
Safihre
b1c922bb75 Post-proc queue was not filtered by nzo_ids 2021-04-25 11:02:04 +02:00
Safihre
4879fbc6d4 CRC/yenc errors would be counted twice as bad articles 2021-04-24 21:53:23 +02:00
Safihre
e7dc81eb38 Update text files for 3.3.0Beta2
draft release
2021-04-23 17:24:50 +02:00
Safihre
a9d86a7447 Set version to 3.2.1 2021-03-31 10:24:42 +02:00
Safihre
2abe4c3cef Merge branch '3.2.x' 2021-03-31 09:25:49 +02:00
Safihre
0542c25003 Update text files for 3.2.1
draft release
2021-03-31 09:24:31 +02:00
puzzledsab
1b8ee4e290 Show server expiration date in server summary (#1841) 2021-03-31 08:57:38 +02:00
Safihre
51128cba55 Do not notify warning/errors from same source twice
Closes #1842
2021-03-30 17:30:07 +02:00
Safihre
3612432581 Do not discard data for CrcError's
https://forums.sabnzbd.org/viewtopic.php?f=11&t=25278
2021-03-30 16:05:04 +02:00
Safihre
deca000a1b Revert some improvements to the encrypted RAR-detection
Closes #1840
2021-03-29 14:05:52 +02:00
Safihre
39cccb5653 Update text files for 3.2.1RC2
draft release
2021-03-24 10:13:43 +01:00
Safihre
f6838dc985 Improvements to the encrypted RAR-detection 2021-03-20 18:32:11 +01:00
Safihre
8cd4d92395 Make get_all_passwords return only unique passwords
If the filename and the NZB specified the same one it could occur 2 or 3 times.
2021-03-20 18:32:05 +01:00
Safihre
3bf9906f45 Update text files for 3.2.1RC1
draft release
2021-03-18 10:30:05 +01:00
Safihre
9f7daf96ef Update URL for Python 3 information 2021-03-18 09:10:39 +01:00
Sander
67de4df155 deobfuscate: no globber, but use given filelist (#1830) 2021-03-18 09:10:31 +01:00
Safihre
bc51a4bd1c Remove old compatibility code from BPSMeter that causes crash on startup
Closes #1827
2021-03-18 09:10:23 +01:00
Sander
bb54616018 deobfuscate: rename accompanying (smaller) files with same basename, and no renaming of collections with same extension (#1826)
* deobfuscate: rename accompanying (smaller) files with same basename

* deobfuscate: do not rename collections of same extension

* deobfuscate: collection ... much easier with one loop, thanks safihre.

* deobfuscate: globber_full, and cleanup

* deobfuscate: unittest test_deobfuscate_big_file_small_accompanying_files

* deobfuscate: unittest test_deobfuscate_collection_with_same_extension

* deobfuscate: unittest test_deobfuscate_collection_with_same_extension
2021-03-18 09:10:18 +01:00
Safihre
6bcff5e014 More space for the RSS table
Closes #1824
2021-03-18 09:10:09 +01:00
puzzledsab
8970a03a9a Use binary mode to make write test more accurate on Windows (#1815) 2021-03-10 22:23:10 +01:00
Safihre
3ad717ca35 Single indexer categories would be saved with "," between each letter 2021-03-10 22:23:10 +01:00
jcfp
b14f72c67a fix config auto_sort setting, broken by #1666 (#1813)
* fix config auto_sort setting, broken by #1666

* oops I did it again
2021-03-10 22:23:10 +01:00
Safihre
45d036804f Show name of item to be deleted from queue/history in confirm dialog 2021-03-10 22:23:10 +01:00
Safihre
8f606db233 Add traceback when failing to read the password file
Closes #1810
2021-03-10 22:23:10 +01:00
Safihre
3766ba5402 pre-create subdir if needed (POSIX, par2) (#1802)
* pre-create subdir it needed

* pre-create subdir it needed: check if already exists

* use os.makedirs() to handle subdir1/subdir2/blabla

* protect against malicous "..", and better naming

* check for Windows \ and POSIX /

* check again within path, typo and formatting

* regex: square brackets

* cleanup: only "/" can occur in par2

* cleanup: better logging

* unit test: testing of filesystem.renamer()

* if subdir specified in par2: let filesystem.renamer() do all the work

* if create_local_directories=True, then renamer() must stay within specified directory. Plus unittest for that.

* if create_local_directories=True, then renamer() must stay within specified directory. Plus unittest for that.

* more comments in code

* use filesystem.create_all_dirs(), less logging, clearer "..", and other feedback from Safihre

* make remote black happy too

* Small changes in wording of comments and error

Co-authored-by: Safihre <safihre@sabnzbd.org>
2021-03-10 22:23:10 +01:00
jxyzn
e851813cef Sanitize names possibly derived from X-DNZB-EpisodeName (#1806) 2021-03-10 22:15:23 +01:00
thezoggy
4d49ad9141 3.2.x cleanup (#1808)
* Update uni_config bootstrap css to same version of js (3.3.7).
* small accessibility change, removed thin dot border on focus

* Ignore VS Code settings folder

* cherry picked 'Fix disabled select for Glitter Night'

* glitter night - fix search border color
2021-02-27 14:47:44 +01:00
Safihre
16618b3af2 Set version to 3.2.0 2021-02-26 10:30:00 +01:00
Safihre
0e5c0f664f Merge branch '3.2.x' 2021-02-26 10:29:39 +01:00
Safihre
7be9281431 Update text files for 3.2.0
draft release
2021-02-26 09:56:47 +01:00
Safihre
ee0327fac1 Update macOS build Python to 3.9.2 2021-02-26 09:44:51 +01:00
Safihre
9930de3e7f Log all nzo_info when adding NZB's
Relates to #1806
2021-02-26 09:18:14 +01:00
Sander
e8503e89c6 handle gracefully if no malloc_trim() available (#1800) 2021-02-26 09:18:00 +01:00
puzzledsab
1d9ed419eb Remove some redundant ifs (#1791) 2021-02-26 09:17:29 +01:00
Safihre
0207652e3e Update text files for 3.2.0RC2
draft release
2021-02-08 21:02:38 +01:00
Safihre
0f1e99c5cb Update translatable texts 2021-02-08 13:29:16 +01:00
puzzledsab
f134bc7efb Right-to-Left support for Glitter and Config (#1776)
* Add rtl on main page

* Adjustments to rtl

* Forgot to add black check for this checkout

* Remove unnecessary style

* Remove more redundant attributes

* Some more reordering and alignment

* Align sorting and nzb drop downs

* Update NZB details and shutdown page

* Fix format

* Fix SABnzbd Config title tag

* Change file list header direction

* Set rtl variables in build_header instead and test dir="rtl" in config pages

* Revert some changes and handle styling using CSS

* Move more items to CSS

* Config RTL

* Move even more to CSS

* Small tweak

Co-authored-by: Safihre <safihre@sabnzbd.org>
2021-02-08 13:23:03 +01:00
puzzledsab
dcd7c7180e Do full server check when there are busy_threads (#1786)
* Do full server check when there are busy_threads

* Reduce next_article_search delay to 0.5s
2021-02-08 13:19:38 +01:00
jcfp
fbbfcd075b fix bonjour with localhost, retire LOCALHOSTS constant (#1782)
* fix bonjour with localhost, retire LOCALHOSTS constant

* rename probablyipv[46] functions to is_ipv[46]_addr

* refuse to send ssdp description_xml to outsiders
2021-02-08 13:19:30 +01:00
Safihre
f42d2e4140 Rename Glitter Default to Light and make Auto the new Default 2021-02-05 15:01:28 +01:00
Sam Edwards
88882cebbc Support for auto night mode switching in Glitter (#1783) 2021-02-05 15:01:13 +01:00
Safihre
17a979675c Do not re-release from GA when the release tag is pushed 2021-02-05 15:01:04 +01:00
Safihre
4642850c79 Set macOS Python installer target to "/" 2021-02-05 15:01:00 +01:00
Safihre
e8d6eebb04 Set version to 3.1.1 2020-11-11 22:04:44 +01:00
Safihre
864c5160c0 Merge branch '3.1.x' 2020-11-11 22:01:20 +01:00
Safihre
99b5a00c12 Update text files for 3.1.1 2020-11-11 21:56:15 +01:00
Safihre
85ee1f07d7 Do not crash if we cannot format the error message 2020-11-08 15:06:50 +01:00
exizak42
e58b4394e0 Separate email message lines are with CRLF (#1671)
SMTP protocol dictates that all lines are supposed to be separated
with CRLF and not LF (even on LF-based systems). This change ensures
that even if the original byte string message is using `\n` for line
separators, the SMTP protocol will still work properly.

This resolves sabnzbd#1669

Fix code formatting
2020-11-08 14:44:44 +01:00
Safihre
1e91a57bf1 It was not possible to set directory-settings to empty values 2020-11-06 16:14:53 +01:00
Safihre
39cee52a7e Update text files for 3.1.1RC1 2020-11-02 20:03:43 +01:00
Safihre
72068f939d Improve handling of binary restarts (macOS / Windows) 2020-11-02 19:57:57 +01:00
Safihre
096d0d3cad Deobfuscate-during-download did not work
https://forums.sabnzbd.org/viewtopic.php?f=3&t=25037
2020-11-01 15:35:09 +01:00
Safihre
2472ab0121 Python 3.5 does not know ssl.PROTOCOL_TLS_SERVER
Closes #1658
2020-10-27 15:52:28 +01:00
Safihre
00421717b8 Queue Repair would fail if Rating is enabled
Closes #1649
2020-10-24 11:10:03 +02:00
Safihre
ae96d93f94 Set version to 3.1.0 2020-10-16 17:02:28 +02:00
Safihre
8522c40c8f Merge branch '3.1.x' 2020-10-16 16:58:58 +02:00
Safihre
23f86e95f1 Update text files for 3.1.0 2020-10-16 16:42:35 +02:00
Safihre
eed2045189 After pre-check the job was not restored to the original spot 2020-10-16 16:27:51 +02:00
Safihre
217785bf0f Applying Filters to a feed would result in crash
Closes #1634
2020-10-15 18:07:06 +02:00
Safihre
6aef50dc5d Update text files for 3.1.0RC3 2020-10-02 11:34:21 +02:00
Safihre
16b6e3caa7 Notify users of Deobfuscate.py that it is now part of SABnzbd 2020-09-29 14:08:51 +02:00
Safihre
3de4c99a8a Only set the "Waiting" status when the job hits post-processing
https://forums.sabnzbd.org/viewtopic.php?f=11&t=24969
2020-09-29 13:51:15 +02:00
Safihre
980aa19a75 Only run Windows Service code when executed from the executables
Could be made to work with the from-sources code.. But seems like very small usecase.
Closes #1623
2020-09-29 10:42:23 +02:00
Safihre
fb4b57e056 Update text files for 3.1.0RC2 2020-09-27 17:19:34 +02:00
Safihre
03638365ea Set execute bit on Deobfuscate.py 2020-09-27 17:17:30 +02:00
Safihre
157cb1c83d Handle failing RSS-feeds for feedparser 6.0.0+
Closes #1621
Now throws warnings (that can be disabled, helpfull_warnings) if readout failed.
2020-09-27 13:32:38 +02:00
Safihre
e51f11c2b1 Do not crash if attributes file is not present 2020-09-25 10:50:19 +02:00
Safihre
1ad0961dd8 Existing files were not parsed when re-adding a job 2020-09-25 10:49:50 +02:00
Safihre
46ff7dd4e2 Do not crash if we can't save attributes, the job might be gone 2020-09-25 10:03:05 +02:00
Safihre
8b067df914 Correctly parse failed_only for Plush 2020-09-23 16:56:57 +02:00
Safihre
ef43b13272 Assume RarFile parses the correct filepaths for the RAR-volumes
Parsing UTF8 from command-line still fails.
https://forums.sabnzbd.org/viewtopic.php?p=122267#p122267
2020-09-21 22:12:43 +02:00
Safihre
e8e9974224 work_name would not be sanatized when adding NZB's
Closes #1615
Now with tests, yeah.
2020-09-21 22:12:34 +02:00
Safihre
feebbb9f04 Merge branch '3.0.x' 2020-09-13 16:40:43 +02:00
Safihre
bc4f06dd1d Limit feedparser<6.0.0 for 3.0.x 2020-09-13 16:40:14 +02:00
Safihre
971e4fc909 Merge branch '3.0.x' 2020-08-30 20:58:31 +02:00
Safihre
51cc765949 Update text files for 3.0.2 2020-08-30 20:50:45 +02:00
Safihre
19c6a4fffa Propagation delay label was shown even if no delay was activated 2020-08-29 16:46:16 +02:00
Safihre
105ac32d2f Reading RSS feed with no categories set could result in crash
Closes #1589
2020-08-28 10:16:49 +02:00
Safihre
57550675d2 Removed logging in macOS sabApp that resulted in double logging 2020-08-28 10:16:41 +02:00
Safihre
e674abc5c0 Update text files for 3.0.2RC2 2020-08-26 08:56:29 +02:00
Safihre
f965c96f51 Change the macOS power assertion to NoIdleSleep 2020-08-26 08:50:54 +02:00
Safihre
c76b8ed9e0 End-of-queue-script did not run on Windows due to long-path
https://forums.sabnzbd.org/viewtopic.php?f=3&t=24918

Will refactor this so they all call 1 function.
2020-08-24 11:28:14 +02:00
Safihre
4fbd0d8a7b Check if name is a string before switching to nzbfile in addfile
Closes #1584
2020-08-24 09:05:25 +02:00
Safihre
2186c0fff6 Update text files for 3.0.2 RC 1 2020-08-21 15:42:35 +02:00
Safihre
1adca9a9c1 Do not crash if certifi certificates are not available
This could happen on Windows, due to overactive virus scanners
2020-08-21 15:26:06 +02:00
Safihre
9408353f2b Priority was not parsed correctly if supplied as string 2020-08-21 15:12:09 +02:00
Safihre
84f4d453d2 Permissions would be set even if user didn't set any
Windows developers like me shouldn't do permissions stuff..
2020-08-21 15:12:01 +02:00
Safihre
d10209f2a1 Extend tests of create_all_dirs to cover apply_umask=False 2020-08-21 15:11:53 +02:00
Safihre
3ae149c72f Split the make_mo.py command for NSIS 2020-08-19 22:21:02 +02:00
Safihre
47385acc3b Make sure we force the final_name to string on legacy get_attrib_file 2020-08-19 16:21:13 +02:00
Safihre
814eeaa900 Redesigned the saving of attributes
Now uses pickle, so that the type of the property is preserved.
Made flexible, so that more properties can be easily added later.
Closes #1575
2020-08-19 16:21:07 +02:00
Safihre
5f2ea13aad NzbFile comparison could crash when comparing finished_files
https://forums.sabnzbd.org/viewtopic.php?f=3&t=24902&p=121748
2020-08-19 08:50:06 +02:00
Safihre
41ca217931 Merge branch '3.0.x' 2020-08-18 11:05:50 +02:00
Safihre
b57d36e8dd Set version information to 3.0.1 2020-08-18 11:05:36 +02:00
Safihre
9a4be70734 List Cheetah minimal version in requirements.txt 2020-08-18 08:21:20 +02:00
Safihre
a8443595a6 Generalize use of certifi module 2020-08-18 08:20:47 +02:00
Safihre
fd0a70ac58 Update text files for 3.0.1 2020-08-17 16:52:23 +02:00
Safihre
8a8685c968 Permissions should only be applied if requested
Corrects 050b925f7b
2020-08-16 18:28:39 +02:00
Safihre
9e6cb8da8e Temporarily set cheroot version due to it breaking our tests
cherrypy/cheroot/issues/312
2020-08-16 18:28:13 +02:00
Safihre
054ec54d51 Basic authentication option was broken
Closes #1571
2020-08-10 15:34:01 +02:00
Safihre
272ce773cb Update text files for 3.0.1RC1 2020-08-07 15:28:11 +02:00
Safihre
050b925f7b Permissions were not set correctly when creating directories (#1568)
Restores changes made in d2e0ebe
2020-08-07 15:22:53 +02:00
Safihre
0087940898 Merge branch '3.0.x' into master 2020-08-02 09:46:41 +02:00
Safihre
e323c014f9 Set version information to 3.0.0 2020-08-01 16:17:08 +02:00
Safihre
cc465c7554 Update text files for 3.0.0
🎉🎉
2020-08-01 15:59:30 +02:00
Safihre
14cb37564f Update translate-link in SABnzbd 2020-07-19 13:01:39 +02:00
Safihre
094db56c3b Default-text for Automatically sort queue 2020-07-16 22:29:02 +02:00
Safihre
aabb709b8b Update text files for 3.0.0 RC 2 2020-07-15 14:10:35 +02:00
Safihre
0833dd2db9 Update translatable texts in 3.0.x branch 2020-07-15 14:07:21 +02:00
Safihre
cd3f912be4 RAR-renamer should be run on badly named RAR-files
https://forums.sabnzbd.org/viewtopic.php?f=2&t=24514&p=121433
2020-07-15 14:01:48 +02:00
Safihre
665c516db6 Only really run pre-script when it is set 2020-07-12 14:20:18 +02:00
Safihre
b670da9fa0 Always use Default-priority when creating NZB-objects
Closes #1552
2020-07-12 14:03:07 +02:00
Safihre
80bee9bffe Search-icon would be shown on top of drop-downs
Closes #1545
2020-06-30 12:57:28 +02:00
Safihre
d85a70e8ad Always report API paused status as a boolean
Closes #1542
2020-06-30 10:26:34 +02:00
Safihre
8f21533e76 Set version to 2.3.9 2019-05-24 11:39:14 +02:00
Safihre
89996482a1 Merge branch '2.3.x' 2019-05-24 09:33:12 +02:00
Safihre
03c10dce91 Update text files for 2.3.9 2019-05-24 09:32:34 +02:00
Safihre
bd5331be05 Merge branch 'develop' into 2.3.x 2019-05-24 09:12:02 +02:00
Safihre
46e1645289 Correct typo in release notes 2019-05-18 10:56:39 +02:00
Safihre
4ce3965747 Update text files for 2.3.9RC2 2019-05-18 09:56:05 +02:00
Safihre
9d4af19db3 Merge branch 'develop' into 2.3.x 2019-05-18 09:45:20 +02:00
Safihre
48e034f4be Update text files for 2.3.9RC1 2019-05-07 13:50:20 +02:00
Safihre
f8959baa2f Revert "Notify develop-users that we will switch to Python 3"
This reverts commit fb238af7de.
2019-05-07 13:35:13 +02:00
Safihre
8ed5997eae Merge branch 'develop' into 2.3.x 2019-05-07 13:10:10 +02:00
Safihre
daf9f50ac8 Set version to 2.3.8 2019-03-18 11:10:56 +01:00
Safihre
6b11013c1a Merge branch '2.3.x' 2019-03-18 11:09:35 +01:00
99 changed files with 1659 additions and 1386 deletions

View File

@@ -59,7 +59,7 @@ jobs:
path: "*-win32-bin.zip"
name: Windows Windows standalone binary (32bit and legacy)
- name: Prepare official release
if: env.AUTOMATION_GITHUB_TOKEN && !startsWith(github.ref, 'refs/tags/')
if: env.AUTOMATION_GITHUB_TOKEN && startsWith(github.ref, 'refs/tags/')
run: python builder/package.py release
build_macos:
@@ -73,7 +73,7 @@ jobs:
# We need the official Python, because the GA ones only support newer macOS versions
# The deployment target is picked up by the Python build tools automatically
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
PYTHON_VERSION: 3.9.4
PYTHON_VERSION: 3.9.5
MACOSX_DEPLOYMENT_TARGET: 10.9
steps:
- uses: actions/checkout@v2
@@ -110,5 +110,5 @@ jobs:
path: "*-osx.dmg"
name: macOS binary (not notarized)
- name: Prepare official release
if: env.AUTOMATION_GITHUB_TOKEN && !startsWith(github.ref, 'refs/tags/')
if: env.AUTOMATION_GITHUB_TOKEN && startsWith(github.ref, 'refs/tags/')
run: python3 builder/package.py release

View File

@@ -24,6 +24,9 @@ jobs:
tx pull --all --force --parallel
env:
TX_TOKEN: ${{ secrets.TX_TOKEN }}
- name: Compile translations to validate them
run: |
python3 tools/make_mo.py
- name: Push translatable and translated texts back to repo
uses: stefanzweifel/git-auto-commit-action@v4.5.1
with:

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 3.3.0Beta1
Summary: SABnzbd-3.3.0Beta1
Version: 3.3.0
Summary: SABnzbd-3.3.0
Home-page: https://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,30 +1,46 @@
Release Notes - SABnzbd 3.3.0 Beta 1
Release Notes - SABnzbd 3.3.0
=========================================================
## Changes since 3.2.1
- The `External internet access` will automatically detect local network
and no longer requires the ranges to be defined. Custom ranges can still
be defined through `local_ranges` in Special settings.
and no longer requires local network ranges to be defined. Custom ranges
can still be defined through `local_ranges` in Special settings.
- Allow setting `inet_exposure` from the command line.
- Support prefix and netmask for Special setting `local_ranges`.
- The `Unwanted extensions` detection can be set to `Whitelist`-mode.
This will block or pause all jobs with non-matching extensions.
- Servers article statistics are shown in K, G, M-notation.
- Resolution added as a pattern key (`%r`) for Sorting.
- Optimized performance of par2 file parsing.
- CPU usage optimizations in the download process.
- Revised handling of categories, scripts, and priorities when adding NZB's.
- Download statistics are also shown when no History is shown.
- Confirm rename if Direct Unpack is active for the job.
- Obfuscated-RAR detection will always be performed.
- All requests will be logged, not just API calls.
- Stability improvement to encrypted RAR-detection.
- Allow missing extensions in `Unwanted extensions` detection.
- Removed Special setting `max_art_opt`.
- Add notification that Plush will be removed in 3.4.0.
- Windows/macOS: Update UnRar to 6.0.1.
- Windows: Update Multipar to 1.3.1.7 (adds faster verification).
## Bugfixes since 3.1.1
## Bugfixes since 3.2.1
- Prevent failed post-processing if job name ends in multiple dots or spaces.
- Failing articles could result in jobs being stuck at 99%.
- Jobs could be stuck in the queue or duplicate if they had missing articles.
- Prevent jobs getting stuck at 99% due to unreliable servers.
- CRC/yEnc errors would be counted twice as bad articles.
- Some NZB files would incorrectly be marked as empty.
- API-call `history` would not filter active post-processing by `nzo_ids`.
- Login page could be accessed even if `External internet access` was set
to `No access`. All other access would still be blocked.
to `No access`. Any other calls would still be blocked.
- Ignore duplicate files inside messy NZB's.
- macOS: disk space would be incorrect for very large disks.
- Windows: `Deobfuscate final filenames` could fail to deobfuscate.
- macOS: Disk space would be incorrect for very large disks.
## Upgrade notices
- The download statistics file `totals10.sab` is updated in this
- The download statistics file `totals10.sab` is updated in 3.2.x
version. If you downgrade to 3.1.x or lower, detailed download
statistics will be lost.

View File

@@ -62,7 +62,6 @@ from sabnzbd.misc import (
exit_sab,
split_host,
create_https_certificates,
windows_variant,
ip_extract,
set_serv_parms,
get_serv_parms,
@@ -79,6 +78,7 @@ import sabnzbd.downloader
import sabnzbd.notifier as notifier
import sabnzbd.zconfig
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6
from sabnzbd.utils.getperformance import getpystone, getcpu
import sabnzbd.utils.ssdp as ssdp
try:
@@ -89,9 +89,13 @@ try:
import win32service
import win32ts
import pywintypes
import servicemanager
from win32com.shell import shell, shellcon
from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info
import sabnzbd.sabtray
win32api.SetConsoleCtrlHandler(sabnzbd.sig_handler, True)
from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info
except ImportError:
if sabnzbd.WIN32:
print("Sorry, requires Python module PyWin32.")
@@ -102,13 +106,13 @@ LOG_FLAG = False
def guard_loglevel():
""" Callback function for guarding loglevel """
"""Callback function for guarding loglevel"""
global LOG_FLAG
LOG_FLAG = True
def warning_helpful(*args, **kwargs):
""" Wrapper to ignore helpfull warnings if desired """
"""Wrapper to ignore helpfull warnings if desired"""
if sabnzbd.cfg.helpfull_warnings():
return logging.warning(*args, **kwargs)
return logging.info(*args, **kwargs)
@@ -123,13 +127,13 @@ class GUIHandler(logging.Handler):
"""
def __init__(self, size):
""" Initializes the handler """
"""Initializes the handler"""
logging.Handler.__init__(self)
self._size: int = size
self.store: List[Dict[str, Any]] = []
def emit(self, record: logging.LogRecord):
""" Emit a record by adding it to our private queue """
"""Emit a record by adding it to our private queue"""
# If % is part of the msg, this could fail
try:
parsed_msg = record.msg % record.args
@@ -171,7 +175,7 @@ class GUIHandler(logging.Handler):
return len(self.store)
def content(self):
""" Return an array with last records """
"""Return an array with last records"""
return self.store
@@ -182,35 +186,36 @@ def print_help():
print("Options marked [*] are stored in the config file")
print()
print("Options:")
print(" -f --config-file <ini> Location of config file")
print(" -s --server <srv:port> Listen on server:port [*]")
print(" -t --templates <templ> Template directory [*]")
print(" -f --config-file <ini> Location of config file")
print(" -s --server <srv:port> Listen on server:port [*]")
print(" -t --templates <templ> Template directory [*]")
print()
print(" -l --logging <-1..2> Set logging level (-1=off, 0= least, 2= most) [*]")
print(" -w --weblogging Enable cherrypy access logging")
print(" -l --logging <-1..2> Set logging level (-1=off, 0=least,2= most) [*]")
print(" -w --weblogging Enable cherrypy access logging")
print()
print(" -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]")
print(" -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]")
if sabnzbd.WIN32:
print(" -d --daemon Use when run as a service")
print(" -d --daemon Use when run as a service")
else:
print(" -d --daemon Fork daemon process")
print(" --pid <path> Create a PID file in the given folder (full path)")
print(" --pidfile <path> Create a PID file with the given name (full path)")
print(" -d --daemon Fork daemon process")
print(" --pid <path> Create a PID file in the given folder (full path)")
print(" --pidfile <path> Create a PID file with the given name (full path)")
print()
print(" -h --help Print this message")
print(" -v --version Print version information")
print(" -c --clean Remove queue, cache and logs")
print(" -p --pause Start in paused mode")
print(" --repair Add orphaned jobs from the incomplete folder to the queue")
print(" --repair-all Try to reconstruct the queue from the incomplete folder")
print(" with full data reconstruction")
print(" --https <port> Port to use for HTTPS server")
print(" --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]")
print(" --no-login Start with username and password reset")
print(" --log-all Log all article handling (for developers)")
print(" --disable-file-log Logging is only written to console")
print(" --console Force logging to console")
print(" --new Run a new instance of SABnzbd")
print(" -h --help Print this message")
print(" -v --version Print version information")
print(" -c --clean Remove queue, cache and logs")
print(" -p --pause Start in paused mode")
print(" --repair Add orphaned jobs from the incomplete folder to the queue")
print(" --repair-all Try to reconstruct the queue from the incomplete folder")
print(" with full data reconstruction")
print(" --https <port> Port to use for HTTPS server")
print(" --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]")
print(" --inet_exposure <0..5> Set external internet access [*]")
print(" --no-login Start with username and password reset")
print(" --log-all Log all article handling (for developers)")
print(" --disable-file-log Logging is only written to console")
print(" --console Force logging to console")
print(" --new Run a new instance of SABnzbd")
print()
print("NZB (or related) file:")
print(" NZB or compressed NZB file, with extension .nzb, .zip, .rar, .7z, .gz, or .bz2")
@@ -236,7 +241,7 @@ GNU GENERAL PUBLIC LICENSE Version 2 or (at your option) any later version.
def daemonize():
""" Daemonize the process, based on various StackOverflow answers """
"""Daemonize the process, based on various StackOverflow answers"""
try:
pid = os.fork()
if pid > 0:
@@ -278,7 +283,7 @@ def daemonize():
def abort_and_show_error(browserhost, cherryport, err=""):
""" Abort program because of CherryPy troubles """
"""Abort program because of CherryPy troubles"""
logging.error(T("Failed to start web-interface") + " : " + str(err))
if not sabnzbd.DAEMON:
if "49" in err:
@@ -290,7 +295,7 @@ def abort_and_show_error(browserhost, cherryport, err=""):
def identify_web_template(key, defweb, wdir):
""" Determine a correct web template set, return full template path """
"""Determine a correct web template set, return full template path"""
if wdir is None:
try:
wdir = fix_webname(key())
@@ -321,7 +326,7 @@ def identify_web_template(key, defweb, wdir):
def check_template_scheme(color, web_dir):
""" Check existence of color-scheme """
"""Check existence of color-scheme"""
if color and os.path.exists(os.path.join(web_dir, "static", "stylesheets", "colorschemes", color + ".css")):
return color
elif color and os.path.exists(os.path.join(web_dir, "static", "stylesheets", "colorschemes", color)):
@@ -347,8 +352,8 @@ def fix_webname(name):
return name
def get_user_profile_paths(vista_plus):
""" Get the default data locations on Windows"""
def get_user_profile_paths():
"""Get the default data locations on Windows"""
if sabnzbd.DAEMON:
# In daemon mode, do not try to access the user profile
# just assume that everything defaults to the program dir
@@ -363,22 +368,15 @@ def get_user_profile_paths(vista_plus):
return
elif sabnzbd.WIN32:
try:
from win32com.shell import shell, shellcon
path = shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, None, 0)
sabnzbd.DIR_LCLDATA = os.path.join(path, DEF_WORKDIR)
sabnzbd.DIR_HOME = os.environ["USERPROFILE"]
except:
try:
if vista_plus:
root = os.environ["AppData"]
user = os.environ["USERPROFILE"]
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root.replace("\\Roaming", "\\Local"), DEF_WORKDIR)
sabnzbd.DIR_HOME = user
else:
root = os.environ["USERPROFILE"]
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root, DEF_WORKDIR)
sabnzbd.DIR_HOME = root
root = os.environ["AppData"]
user = os.environ["USERPROFILE"]
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root.replace("\\Roaming", "\\Local"), DEF_WORKDIR)
sabnzbd.DIR_HOME = user
except:
pass
@@ -407,7 +405,7 @@ def get_user_profile_paths(vista_plus):
def print_modules():
""" Log all detected optional or external modules """
"""Log all detected optional or external modules"""
if sabnzbd.decoder.SABYENC_ENABLED:
# Yes, we have SABYenc, and it's the correct version, so it's enabled
logging.info("SABYenc module (v%s)... found!", sabnzbd.decoder.SABYENC_VERSION)
@@ -484,7 +482,7 @@ def print_modules():
def all_localhosts():
""" Return all unique values of localhost in order of preference """
"""Return all unique values of localhost in order of preference"""
ips = ["127.0.0.1"]
try:
# Check whether IPv6 is available and enabled
@@ -512,7 +510,7 @@ def all_localhosts():
def check_resolve(host):
""" Return True if 'host' resolves """
"""Return True if 'host' resolves"""
try:
socket.getaddrinfo(host, None)
except socket.error:
@@ -600,7 +598,7 @@ def get_webhost(cherryhost, cherryport, https_port):
browserhost = localhost
else:
# If on Vista and/or APIPA, use numerical IP, to help FireFoxers
# If on APIPA, use numerical IP, to help FireFoxers
if ipv6 and ipv4:
cherryhost = hostip
browserhost = cherryhost
@@ -655,7 +653,7 @@ def get_webhost(cherryhost, cherryport, https_port):
def attach_server(host, port, cert=None, key=None, chain=None):
""" Define and attach server, optionally HTTPS """
"""Define and attach server, optionally HTTPS"""
if sabnzbd.cfg.ipv6_hosting() or "::1" not in host:
http_server = cherrypy._cpserver.Server()
http_server.bind_addr = (host, port)
@@ -668,7 +666,7 @@ def attach_server(host, port, cert=None, key=None, chain=None):
def is_sabnzbd_running(url):
""" Return True when there's already a SABnzbd instance running. """
"""Return True when there's already a SABnzbd instance running."""
try:
url = "%s&mode=version" % url
# Do this without certificate verification, few installations will have that
@@ -681,7 +679,7 @@ def is_sabnzbd_running(url):
def find_free_port(host, currentport):
""" Return a free port, 0 when nothing is free """
"""Return a free port, 0 when nothing is free"""
n = 0
while n < 10 and currentport <= 49151:
try:
@@ -778,10 +776,9 @@ def commandline_handler():
"server=",
"templates",
"ipv6_hosting=",
"template2",
"inet_exposure=",
"browser=",
"config-file=",
"force",
"disable-file-log",
"version",
"https=",
@@ -835,7 +832,7 @@ def commandline_handler():
def get_f_option(opts):
""" Return value of the -f option """
"""Return value of the -f option"""
for opt, arg in opts:
if opt == "-f":
return arg
@@ -863,8 +860,6 @@ def main():
console_logging = False
no_file_log = False
web_dir = None
vista_plus = False
win64 = False
repair = 0
no_login = False
sabnzbd.RESTART_ARGS = [sys.argv[0]]
@@ -872,6 +867,7 @@ def main():
pid_file = None
new_instance = False
ipv6_hosting = None
inet_exposure = None
_service, sab_opts, _serv_opts, upload_nzbs = commandline_handler()
@@ -951,6 +947,8 @@ def main():
new_instance = True
elif opt == "--ipv6_hosting":
ipv6_hosting = arg
elif opt == "--inet_exposure":
inet_exposure = arg
sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME))
sabnzbd.MY_NAME = os.path.basename(sabnzbd.MY_FULLNAME)
@@ -977,17 +975,18 @@ def main():
logger.setLevel(logging.WARNING)
logger.addHandler(gui_log)
# Detect Windows variant
# Detect CPU architecture and Windows variant
# Use .machine as .processor is not always filled
cpu_architecture = platform.uname().machine
if sabnzbd.WIN32:
vista_plus, win64 = windows_variant()
sabnzbd.WIN64 = win64
sabnzbd.WIN64 = cpu_architecture == "AMD64"
if inifile:
# INI file given, simplest case
inifile = evaluate_inipath(inifile)
else:
# No ini file given, need profile data
get_user_profile_paths(vista_plus)
get_user_profile_paths()
# Find out where INI file is
inifile = os.path.abspath(os.path.join(sabnzbd.DIR_LCLDATA, DEF_INI_FILE))
@@ -1169,24 +1168,19 @@ def main():
).strip()
except:
pass
logging.info("Commit: %s", sabnzbd.__baseline__)
logging.info("Commit = %s", sabnzbd.__baseline__)
logging.info("Full executable path = %s", sabnzbd.MY_FULLNAME)
if sabnzbd.WIN32:
suffix = ""
if win64:
suffix = "(win64)"
try:
logging.info("Platform = %s %s", platform.platform(), suffix)
except:
logging.info("Platform = %s <unknown>", suffix)
else:
logging.info("Platform = %s", os.name)
logging.info("Python-version = %s", sys.version)
logging.info("Arguments = %s", sabnzbd.CMDLINE)
if sabnzbd.DOCKER:
logging.info("Running inside a docker container")
else:
logging.info("Not inside a docker container")
logging.info("Python-version = %s", sys.version)
logging.info("Dockerized = %s", sabnzbd.DOCKER)
logging.info("CPU architecture = %s", cpu_architecture)
try:
logging.info("Platform = %s - %s", os.name, platform.platform())
except:
# Can fail on special platforms (like Snapcraft or embedded)
pass
# Find encoding; relevant for external processing activities
logging.info("Preferred encoding = %s", sabnzbd.encoding.CODEPAGE)
@@ -1210,8 +1204,8 @@ def main():
try:
os.environ["SSL_CERT_FILE"] = certifi.where()
logging.info("Certifi version: %s", certifi.__version__)
logging.info("Loaded additional certificates from: %s", os.environ["SSL_CERT_FILE"])
logging.info("Certifi version = %s", certifi.__version__)
logging.info("Loaded additional certificates from %s", os.environ["SSL_CERT_FILE"])
except:
# Sometimes the certificate file is blocked
logging.warning(T("Could not load additional certificates from certifi package"))
@@ -1220,38 +1214,16 @@ def main():
# Extra startup info
if sabnzbd.cfg.log_level() > 1:
# List the number of certificates available (can take up to 1.5 seconds)
ctx = ssl.create_default_context()
logging.debug("Available certificates: %s", repr(ctx.cert_store_stats()))
logging.debug("Available certificates = %s", repr(ssl.create_default_context().cert_store_stats()))
mylocalipv4 = localipv4()
if mylocalipv4:
logging.debug("My local IPv4 address = %s", mylocalipv4)
else:
logging.debug("Could not determine my local IPv4 address")
mypublicipv4 = publicipv4()
if mypublicipv4:
logging.debug("My public IPv4 address = %s", mypublicipv4)
else:
logging.debug("Could not determine my public IPv4 address")
myipv6 = ipv6()
if myipv6:
logging.debug("My IPv6 address = %s", myipv6)
else:
logging.debug("Could not determine my IPv6 address")
# List networking
logging.debug("Local IPv4 address = %s", localipv4())
logging.debug("Public IPv4 address = %s", publicipv4())
logging.debug("IPv6 address = %s", ipv6())
# Measure and log system performance measured by pystone and - if possible - CPU model
from sabnzbd.utils.getperformance import getpystone, getcpu
pystoneperf = getpystone()
if pystoneperf:
logging.debug("CPU Pystone available performance = %s", pystoneperf)
else:
logging.debug("CPU Pystone available performance could not be calculated")
cpumodel = getcpu() # Linux only
if cpumodel:
logging.debug("CPU model = %s", cpumodel)
logging.debug("CPU Pystone available performance = %s", getpystone())
logging.debug("CPU model = %s", getcpu())
logging.info("Using INI file %s", inifile)
@@ -1272,8 +1244,6 @@ def main():
# Handle the several tray icons
if sabnzbd.cfg.win_menu() and not sabnzbd.DAEMON and not sabnzbd.WIN_SERVICE:
if sabnzbd.WIN32:
import sabnzbd.sabtray
sabnzbd.WINTRAY = sabnzbd.sabtray.SABTrayThread()
elif sabnzbd.LINUX_POWER and os.environ.get("DISPLAY"):
try:
@@ -1362,6 +1332,10 @@ def main():
sabnzbd.cfg.username.set("")
sabnzbd.cfg.password.set("")
# Overwrite inet_exposure from command-line for VPS-setups
if inet_exposure:
sabnzbd.cfg.inet_exposure.set(inet_exposure)
mime_gzip = (
"text/*",
"application/javascript",
@@ -1632,10 +1606,9 @@ def main():
if sabnzbd.WIN32:
import servicemanager
class SABnzbd(win32serviceutil.ServiceFramework):
""" Win32 Service Handler """
"""Win32 Service Handler"""
_svc_name_ = "SABnzbd"
_svc_display_name_ = "SABnzbd Binary Newsreader"
@@ -1699,7 +1672,7 @@ def handle_windows_service():
Returns True when any service commands were detected or
when we have started as a service.
"""
# Detect if running as Windows Service (only Vista and above!)
# Detect if running as Windows Service
# Adapted from https://stackoverflow.com/a/55248281/5235502
# Only works when run from the exe-files
if hasattr(sys, "frozen") and win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:

View File

@@ -71,14 +71,14 @@ def safe_remove(path):
def delete_files_glob(name):
""" Delete one file or set of files from wild-card spec """
"""Delete one file or set of files from wild-card spec"""
for f in glob.glob(name):
if os.path.exists(f):
os.remove(f)
def run_external_command(command):
""" Wrapper to ease the use of calling external programs """
"""Wrapper to ease the use of calling external programs"""
process = subprocess.Popen(command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, _ = process.communicate()
ret = process.wait()
@@ -90,7 +90,7 @@ def run_external_command(command):
def run_git_command(parms):
""" Run git command, raise error if it failed """
"""Run git command, raise error if it failed"""
return run_external_command(["git"] + parms)
@@ -148,7 +148,7 @@ if __name__ == "__main__":
patch_version_file(RELEASE_VERSION)
# To draft a release or not to draft a release?
RELEASE_THIS = "draft release" in run_git_command(["log", "-1", "--pretty=format:%b"])
RELEASE_THIS = "refs/tags/" in os.environ.get("GITHUB_REF", "")
# Rename release notes file
safe_remove("README.txt")
@@ -339,7 +339,7 @@ if __name__ == "__main__":
print("Approved! Stapling the result to the app")
run_external_command(["xcrun", "stapler", "staple", "dist/SABnzbd.app"])
elif notarization_user and notarization_pass:
print("Notarization skipped, add 'draft release' to the commit message trigger notarization!")
print("Notarization skipped, tag commit to trigger notarization!")
else:
print("Notarization skipped, NOTARIZATION_USER or NOTARIZATION_PASS missing.")
else:
@@ -542,7 +542,7 @@ if __name__ == "__main__":
head=RELEASE_VERSION,
)
else:
print("To push release to GitHub, add 'draft release' to the commit message.")
print("To push release to GitHub, first tag the commit.")
print("Or missing the AUTOMATION_GITHUB_TOKEN, cannot push to GitHub without it.")
# Reset!

View File

@@ -1,5 +1,5 @@
# Basic build requirements
pyinstaller
pyinstaller==4.2
setuptools
pkginfo
certifi

View File

@@ -264,13 +264,13 @@ function do_restart() {
$.ajax({ url: '../../config/restart?apikey=' + sabSession,
complete: function() {
// Keep counter of failures
var failureCounter = 0;
var loopCounter = 0;
// Now we try until we can connect
var refreshInterval = setInterval(function() {
// We skip the first one
if(failureCounter == 0) {
failureCounter = failureCounter+1;
setInterval(function() {
loopCounter = loopCounter+1;
// We skip the first one so we give it time to shutdown
if(loopCounter < 2) {
return
}
$.ajax({ url: urlTotal,
@@ -279,17 +279,16 @@ function do_restart() {
location.href = urlTotal;
},
error: function(status, text) {
failureCounter = failureCounter+1;
// Too many failuers and we give up
if(failureCounter >= 6) {
// Too many failures and we give up
if(loopCounter >= 10) {
// If the port has changed 'Access-Control-Allow-Origin' header will not allow
// us to check if the server is back up. So after 7 failures we redirect
// us to check if the server is back up. So after 10 failures (20 sec) we redirect
// anyway in the hopes it works anyway..
location.href = urlTotal;
}
}
})
}, 4000)
}, 2000)
// Exception if we go from HTTPS to HTTP
// (this is not allowed by browsers and all of the above will be ignored)

View File

@@ -103,7 +103,7 @@
<span id="warning_box"><b><a href="${path}status/#tabs-warnings" id="last_warning"><span id="have_warnings">$have_warnings</span> $T('warnings')</a></b></span>
#if $pane=="Main"#
#if $new_release#&sdot; <a href="$new_rel_url" id="new_release" target="_blank">$T('Plush-updateAvailable').replace(' ','&nbsp;')</a>#end if#
This skin is no longer actively maintained! <a href="${path}config/general/#web_dir"><strong>We recommend using the Glitter skin.</strong></a>
<a href="${path}config/general/#web_dir"><strong style="color: red">This skin will be removed in SABnzbd 3.4.0! <br>We recommend using the Glitter skin.</strong></a>
#end if#
</div>
</div>

View File

@@ -91,40 +91,7 @@
<div id="tabs-connections">
<a href="refresh_conn?apikey=$apikey" class="juiButton">$T('Plush-button-refresh')</a>
<a href="disconnect?apikey=$apikey" class="juiButton">$T('link-forceDisc')</a>
<hr>
<!--#if $servers#-->
<!--#set $count=0#-->
<!--#for $server in $servers#-->
<!--#set $count=$count+1#-->
<p>$T('swtag-server'): <strong>$server[0]</strong></p>
<p>$T('Priority') = $server[7]&nbsp;&nbsp;<!--#if int($server[8]) != 0#-->$T('optional').capitalize()<!--#else#-->$T('enabled').capitalize()<!--#end if#--></p>
<p># $T('connections'): $server[2]</p>
<!--#if not $server[5]#-->
<a href="./unblock_server?server=$server[0]&apikey=$apikey" class="juiButton">$T('server-blocked')</a>
&nbsp;&nbsp;$server[6]
<!--#end if#-->
<!--#if $server[3]#-->
<table class="rssTable">
<tr>
<th>$T('article-id')</th>
<th>$T('filename')</th>
<th>$T('file-set')</th>
</tr>
<!--#set $odd = False#-->
<!--#for $thrd in $server[3]#-->
<!--#set $odd = not $odd#-->
<tr class="<!--#if $odd then "odd" else "even"#-->">
<td>$thrd[1]</td><td>$thrd[2]</td><td>$thrd[3]</td></tr>
<!--#end for#-->
</table>
<!--#end if#-->
<br/><hr/><br/>
<!--#end for#-->
<!--#else#-->
<p>$T('none')</p>
<!--#end if#-->
</div>
<div id="tabs-dashboard">

View File

@@ -18,7 +18,7 @@ After=network-online.target
[Service]
Environment="PYTHONIOENCODING=utf-8"
ExecStart=/opt/sabnzbd/SABnzbd.py --logging 1 --browser 0
ExecStart=/opt/sabnzbd/SABnzbd.py --disable-file-log --logging 1 --browser 0
User=%I
Type=simple
Restart=on-failure

View File

Binary file not shown.

View File

@@ -1124,10 +1124,6 @@ msgstr ""
msgid "%s -> Unknown encoding"
msgstr ""
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr ""
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1188,10 +1188,6 @@ msgstr "NZB přidáno do fronty"
msgid "%s -> Unknown encoding"
msgstr "%s -> Neznámé kódování"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr ""
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1197,10 +1197,6 @@ msgstr "NZB tilføjet i køen"
msgid "%s -> Unknown encoding"
msgstr "%s -> Ukendt kodning"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => mangler fra alle servere, afviser"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -3,16 +3,17 @@
# team@sabnzbd.org
#
# Translators:
# Safihre <safihre@sabnzbd.org>, 2020
# C E <githubce@eiselt.ch>, 2020
# Nikolai Bohl <n.kay01@gmail.com>, 2020
# reloxx13 <reloxx@interia.pl>, 2021
# Ben Hecht <benjamin.hecht@me.com>, 2021
# Safihre <safihre@sabnzbd.org>, 2021
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-3.3.0-develop\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: reloxx13 <reloxx@interia.pl>, 2021\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2021\n"
"Language-Team: German (https://www.transifex.com/sabnzbd/teams/111101/de/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
@@ -334,7 +335,7 @@ msgstr "Server-Adresse wird benötigt"
#: sabnzbd/cfg.py
msgid "%s is not a valid script"
msgstr ""
msgstr "%s ist kein gültiges Script"
#. Warning message
#: sabnzbd/config.py
@@ -516,12 +517,12 @@ msgstr "Wird beendet …"
#. Warning message
#: sabnzbd/downloader.py
msgid "Server %s is expiring in %s day(s)"
msgstr ""
msgstr "Server %s läuft in %s tag(en) ab"
#. Warning message
#: sabnzbd/downloader.py
msgid "Server %s has used the specified quota"
msgstr ""
msgstr "Server %s hat die angegebene Quote verbraucht"
#: sabnzbd/emailer.py
msgid "Failed to connect to mail server"
@@ -631,11 +632,11 @@ msgstr "Verschieben von %s nach %s fehlgeschlagen"
#. Error message
#: sabnzbd/filesystem.py
msgid "Blocked attempt to create directory %s"
msgstr ""
msgstr "Versuch das Verzeichnis %s zu erstellen wurde blockiert"
#: sabnzbd/interface.py
msgid "Refused connection from:"
msgstr ""
msgstr "Abgelehnte Verbindung von:"
#: sabnzbd/interface.py
msgid "Refused connection with hostname \"%s\" from:"
@@ -1234,10 +1235,6 @@ msgstr "NZB zur Warteschlange hinzugefügt"
msgid "%s -> Unknown encoding"
msgstr "%s -> Unbekannte Kodierung"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s wurde auf keinem Server gefunden und daher übersprungen"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
@@ -3249,6 +3246,7 @@ msgstr "Externer Internetzugriff"
#: sabnzbd/skintext.py
msgid "You can set access rights for systems outside your local network."
msgstr ""
"Du kannst Zugriffsrechte für Systeme ausserhalb deines Netzwerkes setzen."
#: sabnzbd/skintext.py
msgid "No access"
@@ -3588,7 +3586,7 @@ msgstr "Aktion bei ungewollter Dateienendung"
#: sabnzbd/skintext.py
msgid "Action when an unwanted extension is detected"
msgstr ""
msgstr "Aktion bei ungewollter Dateienendung"
#: sabnzbd/skintext.py
msgid "Unwanted extensions"
@@ -3596,11 +3594,11 @@ msgstr "Ungewollte Dateiendungen"
#: sabnzbd/skintext.py
msgid "Blacklist"
msgstr ""
msgstr "Blacklist"
#: sabnzbd/skintext.py
msgid "Whitelist"
msgstr ""
msgstr "Whitelist"
#: sabnzbd/skintext.py
msgid ""
@@ -4179,12 +4177,12 @@ msgstr "Download erzwingen"
#. Config->RSS edit button
#: sabnzbd/skintext.py
msgid "Edit"
msgstr ""
msgstr "Bearbeiten"
#. Config->RSS when will be the next RSS scan
#: sabnzbd/skintext.py
msgid "Next scan at"
msgstr ""
msgstr "Nächster scan um"
#. Config->RSS table column header
#: sabnzbd/skintext.py

View File

@@ -1240,10 +1240,6 @@ msgstr "NZB añadido a la cola"
msgid "%s -> Unknown encoding"
msgstr "%s -> Codificación desconocida"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => faltando de todos servidores, desechando"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1190,10 +1190,6 @@ msgstr "NZB lisätty jonoon"
msgid "%s -> Unknown encoding"
msgstr "%s -> Tuntematon koodaus"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => puuttuu kaikilta palvelimilta, hylätään"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -637,7 +637,7 @@ msgstr "Tentative bloquée de création du répertoire %s"
#: sabnzbd/interface.py
msgid "Refused connection from:"
msgstr ""
msgstr "Connexion refusée de:"
#: sabnzbd/interface.py
msgid "Refused connection with hostname \"%s\" from:"
@@ -1237,10 +1237,6 @@ msgstr "NZB ajouté à la file d'attente"
msgid "%s -> Unknown encoding"
msgstr "%s -> Encodage inconnu"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => absent de tous les serveurs, rejeté"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
@@ -3251,6 +3247,8 @@ msgstr "Accès Internet externe"
#: sabnzbd/skintext.py
msgid "You can set access rights for systems outside your local network."
msgstr ""
"Vous pouvez définir des droits d'accès pour les systèmes en dehors de votre "
"réseau local."
#: sabnzbd/skintext.py
msgid "No access"

View File

@@ -607,7 +607,7 @@ msgstr "ניסיון נחסם ליצור תיקייה %s"
#: sabnzbd/interface.py
msgid "Refused connection from:"
msgstr ""
msgstr "חיבור מסורב מאת:"
#: sabnzbd/interface.py
msgid "Refused connection with hostname \"%s\" from:"
@@ -1189,10 +1189,6 @@ msgstr "NZB התווסף לתור"
msgid "%s -> Unknown encoding"
msgstr "קידוד בלתי ידוע -> %s"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => חסר מכל השרתים, משליך"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
@@ -3168,7 +3164,7 @@ msgstr "גישת אינטרנט חיצונית"
#: sabnzbd/skintext.py
msgid "You can set access rights for systems outside your local network."
msgstr ""
msgstr "אתה יכול להגדיר זכויות גישה עבור מערכות מחוץ אל הרשת המקומית שלך."
#: sabnzbd/skintext.py
msgid "No access"

View File

@@ -1187,10 +1187,6 @@ msgstr "NZB er lagt til i køen"
msgid "%s -> Unknown encoding"
msgstr "%s -> Ukjent koding"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => mangler på alle servere, fjerner"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -622,11 +622,11 @@ msgstr "Verplaatsen van %s naar %s mislukt"
#. Error message
#: sabnzbd/filesystem.py
msgid "Blocked attempt to create directory %s"
msgstr ""
msgstr "Poging om map %s aan te maken geblokkeerd"
#: sabnzbd/interface.py
msgid "Refused connection from:"
msgstr ""
msgstr "Verbinding geweigerd van: "
#: sabnzbd/interface.py
msgid "Refused connection with hostname \"%s\" from:"
@@ -1219,10 +1219,6 @@ msgstr "Download aan wachtrij toegevoegd"
msgid "%s -> Unknown encoding"
msgstr "%s -> Onbekende codering"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => ontbreekt op alle servers, overslaan"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
@@ -3221,6 +3217,7 @@ msgstr "Externe toegang"
#: sabnzbd/skintext.py
msgid "You can set access rights for systems outside your local network."
msgstr ""
"Je kunt toegangsrechten instellen voor systemen buiten je lokale netwerk. "
#: sabnzbd/skintext.py
msgid "No access"
@@ -3547,7 +3544,7 @@ msgstr "Actie bij ontdekken van ongewenste extensie"
#: sabnzbd/skintext.py
msgid "Action when an unwanted extension is detected"
msgstr ""
msgstr "Actie bij ontdekken van een ongewenste extensie"
#: sabnzbd/skintext.py
msgid "Unwanted extensions"
@@ -3555,17 +3552,19 @@ msgstr "Ongewenste extensies"
#: sabnzbd/skintext.py
msgid "Blacklist"
msgstr ""
msgstr "Blacklist"
#: sabnzbd/skintext.py
msgid "Whitelist"
msgstr ""
msgstr "Whitelist"
#: sabnzbd/skintext.py
msgid ""
"Select a mode and list all (un)wanted extensions. For example: <b>exe</b> or"
" <b>exe, com</b>"
msgstr ""
"Kies een stand en voer een lijst van alle (on)gewenste extensies in. "
"Voorbeeld: <b>exe</b> or <b>exe, com</b>"
#: sabnzbd/skintext.py
msgid "Enable SFV-based checks"
@@ -4139,12 +4138,12 @@ msgstr "Forceer download"
#. Config->RSS edit button
#: sabnzbd/skintext.py
msgid "Edit"
msgstr ""
msgstr "Wijzigen"
#. Config->RSS when will be the next RSS scan
#: sabnzbd/skintext.py
msgid "Next scan at"
msgstr ""
msgstr "Wordt uitgevoerd om"
#. Config->RSS table column header
#: sabnzbd/skintext.py
@@ -4940,7 +4939,7 @@ msgstr "Toon Script resultaat"
#: sabnzbd/skintext.py
msgid "Renaming the job will abort Direct Unpack."
msgstr ""
msgstr "Als je de naam wijzigt zal het Direct Uitpakken gestopt worden."
#: sabnzbd/skintext.py
msgid ""

View File

@@ -1188,10 +1188,6 @@ msgstr "NZB dodany do kolejki"
msgid "%s -> Unknown encoding"
msgstr "%s -> Nieznane kodowanie"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => nie znaleziono na żadnym serwerze, porzucam"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1189,10 +1189,6 @@ msgstr "NZB adicionado à fila"
msgid "%s -> Unknown encoding"
msgstr "%s -> Codificação desconhecida"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => faltando em todos os servidores. Descartando"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1215,10 +1215,6 @@ msgstr "NZB adăugat în coadă"
msgid "%s -> Unknown encoding"
msgstr "%s -> Codificare Necunoscută"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => lipsă de pe toate serverele, ignorare"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1189,10 +1189,6 @@ msgstr "NZB-файл добавлен в очередь"
msgid "%s -> Unknown encoding"
msgstr "%s -> неизвестная кодировка"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => отсутствует на всех серверах, отброшен"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1183,10 +1183,6 @@ msgstr "NZB додат у ред"
msgid "%s -> Unknown encoding"
msgstr "%s -> Непознато енкодирање"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => фали на свим серверима, одбацивање"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1187,10 +1187,6 @@ msgstr "NZB tillagd i kön"
msgid "%s -> Unknown encoding"
msgstr "%s -> Okänd kodning"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => saknas från alla servrar, kastar"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -1173,10 +1173,6 @@ msgstr "NZB 已添加到队列"
msgid "%s -> Unknown encoding"
msgstr "%s -> 未知编码"
#: sabnzbd/nzbstuff.py
msgid "%s => missing from all servers, discarding"
msgstr "%s => 所有服务器均缺失,正在舍弃"
#. Warning message
#: sabnzbd/nzbstuff.py
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"

View File

@@ -4,12 +4,13 @@
#
# Translators:
# Safihre <safihre@sabnzbd.org>, 2020
# Ben Hecht <benjamin.hecht@me.com>, 2021
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-3.3.0-develop\n"
"PO-Revision-Date: 2020-06-27 15:56+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2020\n"
"Last-Translator: Ben Hecht <benjamin.hecht@me.com>, 2021\n"
"Language-Team: German (https://www.transifex.com/sabnzbd/teams/111101/de/)\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
@@ -51,6 +52,8 @@ msgid ""
"The installer only supports Windows 8.1 and above, use the standalone legacy"
" version to run on older Windows version."
msgstr ""
"Der Installer unterstützt nur Windows 8.1 und höher. Benutze die Standalone "
"Version für ältere Windows Versionen."
#: builder/win/NSIS_Installer.nsi
msgid "This will uninstall SABnzbd from your system"

View File

@@ -452,13 +452,13 @@ def halt():
def notify_shutdown_loop():
""" Trigger the main loop to wake up"""
"""Trigger the main loop to wake up"""
with sabnzbd.SABSTOP_CONDITION:
sabnzbd.SABSTOP_CONDITION.notify()
def shutdown_program():
""" Stop program after halting and saving """
"""Stop program after halting and saving"""
if not sabnzbd.SABSTOP:
logging.info("[%s] Performing SABnzbd shutdown", misc.caller_name())
sabnzbd.halt()
@@ -468,7 +468,7 @@ def shutdown_program():
def trigger_restart(timeout=None):
""" Trigger a restart by setting a flag an shutting down CP """
"""Trigger a restart by setting a flag an shutting down CP"""
# Sometimes we need to wait a bit to send good-bye to the browser
if timeout:
time.sleep(timeout)
@@ -482,22 +482,22 @@ def trigger_restart(timeout=None):
# Misc Wrappers
##############################################################################
def new_limit():
""" Callback for article cache changes """
"""Callback for article cache changes"""
sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int())
def guard_restart():
""" Callback for config options requiring a restart """
"""Callback for config options requiring a restart"""
sabnzbd.RESTART_REQ = True
def guard_top_only():
""" Callback for change of top_only option """
"""Callback for change of top_only option"""
sabnzbd.NzbQueue.set_top_only(cfg.top_only())
def guard_pause_on_pp():
""" Callback for change of pause-download-on-pp """
"""Callback for change of pause-download-on-pp"""
if cfg.pause_on_post_processing():
pass # Not safe to idle downloader, because we don't know
# if post-processing is active now
@@ -506,17 +506,17 @@ def guard_pause_on_pp():
def guard_quota_size():
""" Callback for change of quota_size """
"""Callback for change of quota_size"""
sabnzbd.BPSMeter.change_quota()
def guard_quota_dp():
""" Callback for change of quota_day or quota_period """
"""Callback for change of quota_day or quota_period"""
sabnzbd.Scheduler.restart()
def guard_language():
""" Callback for change of the interface language """
"""Callback for change of the interface language"""
sabnzbd.lang.set_language(cfg.language())
sabnzbd.api.clear_trans_cache()
@@ -534,12 +534,12 @@ def set_https_verification(value):
def guard_https_ver():
""" Callback for change of https verification """
"""Callback for change of https verification"""
set_https_verification(cfg.enable_https_verification())
def add_url(url, pp=None, script=None, cat=None, priority=None, nzbname=None, password=None):
""" Add NZB based on a URL, attributes optional """
"""Add NZB based on a URL, attributes optional"""
if "http" not in url:
return
if not pp or pp == "-1":
@@ -568,7 +568,7 @@ def add_url(url, pp=None, script=None, cat=None, priority=None, nzbname=None, pa
def save_state():
""" Save all internal bookkeeping to disk """
"""Save all internal bookkeeping to disk"""
config.save_config()
sabnzbd.ArticleCache.flush_articles()
sabnzbd.NzbQueue.save()
@@ -580,14 +580,14 @@ def save_state():
def pause_all():
""" Pause all activities than cause disk access """
"""Pause all activities than cause disk access"""
sabnzbd.PAUSED_ALL = True
sabnzbd.Downloader.pause()
logging.debug("PAUSED_ALL active")
def unpause_all():
""" Resume all activities """
"""Resume all activities"""
sabnzbd.PAUSED_ALL = False
sabnzbd.Downloader.resume()
logging.debug("PAUSED_ALL inactive")
@@ -599,20 +599,20 @@ def unpause_all():
def backup_exists(filename: str) -> bool:
""" Return True if backup exists and no_dupes is set """
"""Return True if backup exists and no_dupes is set"""
path = cfg.nzb_backup_dir.get_path()
return path and os.path.exists(os.path.join(path, filename + ".gz"))
def backup_nzb(filename: str, data: AnyStr):
""" Backup NZB file """
"""Backup NZB file"""
path = cfg.nzb_backup_dir.get_path()
if path:
save_compressed(path, filename, data)
def save_compressed(folder: str, filename: str, data: AnyStr):
""" Save compressed NZB file in folder """
"""Save compressed NZB file in folder"""
if filename.endswith(".nzb"):
filename += ".gz"
else:
@@ -728,7 +728,7 @@ def add_nzbfile(
def enable_server(server):
""" Enable server (scheduler only) """
"""Enable server (scheduler only)"""
try:
config.get_config("servers", server).enable.set(1)
except:
@@ -739,7 +739,7 @@ def enable_server(server):
def disable_server(server):
""" Disable server (scheduler only) """
"""Disable server (scheduler only)"""
try:
config.get_config("servers", server).enable.set(0)
except:
@@ -750,7 +750,7 @@ def disable_server(server):
def system_shutdown():
""" Shutdown system after halting download and saving bookkeeping """
"""Shutdown system after halting download and saving bookkeeping"""
logging.info("Performing system shutdown")
Thread(target=halt).start()
@@ -766,7 +766,7 @@ def system_shutdown():
def system_hibernate():
""" Hibernate system """
"""Hibernate system"""
logging.info("Performing system hybernation")
if sabnzbd.WIN32:
powersup.win_hibernate()
@@ -777,7 +777,7 @@ def system_hibernate():
def system_standby():
""" Standby system """
"""Standby system"""
logging.info("Performing system standby")
if sabnzbd.WIN32:
powersup.win_standby()
@@ -788,7 +788,7 @@ def system_standby():
def restart_program():
""" Restart program (used by scheduler) """
"""Restart program (used by scheduler)"""
logging.info("Scheduled restart request")
# Just set the stop flag, because stopping CherryPy from
# the scheduler is not reliable
@@ -831,7 +831,7 @@ def change_queue_complete_action(action, new=True):
def run_script(script):
""" Run a user script (queue complete only) """
"""Run a user script (queue complete only)"""
script_path = filesystem.make_script_path(script)
if script_path:
try:
@@ -842,7 +842,7 @@ def run_script(script):
def keep_awake():
""" If we still have work to do, keep Windows/macOS system awake """
"""If we still have work to do, keep Windows/macOS system awake"""
if KERNEL32 or FOUNDATION:
if sabnzbd.cfg.keep_awake():
ES_CONTINUOUS = 0x80000000
@@ -890,7 +890,7 @@ def get_new_id(prefix, folder, check_list=None):
def save_data(data, _id, path, do_pickle=True, silent=False):
""" Save data to a diskfile """
"""Save data to a diskfile"""
if not silent:
logging.debug("[%s] Saving data for %s in %s", misc.caller_name(), _id, path)
path = os.path.join(path, _id)
@@ -917,7 +917,7 @@ def save_data(data, _id, path, do_pickle=True, silent=False):
def load_data(data_id, path, remove=True, do_pickle=True, silent=False):
""" Read data from disk file """
"""Read data from disk file"""
path = os.path.join(path, data_id)
if not os.path.exists(path):
@@ -949,7 +949,7 @@ def load_data(data_id, path, remove=True, do_pickle=True, silent=False):
def remove_data(_id: str, path: str):
""" Remove admin file """
"""Remove admin file"""
path = os.path.join(path, _id)
try:
if os.path.exists(path):
@@ -959,19 +959,19 @@ def remove_data(_id: str, path: str):
def save_admin(data: Any, data_id: str):
""" Save data in admin folder in specified format """
"""Save data in admin folder in specified format"""
logging.debug("[%s] Saving data for %s", misc.caller_name(), data_id)
save_data(data, data_id, cfg.admin_dir.get_path())
def load_admin(data_id: str, remove=False, silent=False) -> Any:
""" Read data in admin folder in specified format """
"""Read data in admin folder in specified format"""
logging.debug("[%s] Loading data for %s", misc.caller_name(), data_id)
return load_data(data_id, cfg.admin_dir.get_path(), remove=remove, silent=silent)
def request_repair():
""" Request a full repair on next restart """
"""Request a full repair on next restart"""
path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
try:
with open(path, "w") as f:
@@ -981,7 +981,7 @@ def request_repair():
def check_repair_request():
""" Return True if repair request found, remove afterwards """
"""Return True if repair request found, remove afterwards"""
path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
if os.path.exists(path):
try:
@@ -1044,7 +1044,7 @@ def check_all_tasks():
def pid_file(pid_path=None, pid_file=None, port=0):
""" Create or remove pid file """
"""Create or remove pid file"""
if not sabnzbd.WIN32:
if pid_path and pid_path.startswith("/"):
sabnzbd.DIR_PID = os.path.join(pid_path, "sabnzbd-%d.pid" % port)
@@ -1077,14 +1077,14 @@ def check_incomplete_vs_complete():
def wait_for_download_folder():
""" Wait for download folder to become available """
"""Wait for download folder to become available"""
while not cfg.download_dir.test_path():
logging.debug('Waiting for "incomplete" folder')
time.sleep(2.0)
def test_ipv6():
""" Check if external IPv6 addresses are reachable """
"""Check if external IPv6 addresses are reachable"""
if not cfg.selftest_host():
# User disabled the test, assume active IPv6
return True
@@ -1112,7 +1112,7 @@ def test_ipv6():
def test_cert_checking():
""" Test quality of certificate validation """
"""Test quality of certificate validation"""
# User disabled the test, assume proper SSL certificates
if not cfg.selftest_host():
return True
@@ -1139,7 +1139,7 @@ def test_cert_checking():
def history_updated():
""" To make sure we always have a fresh history """
"""To make sure we always have a fresh history"""
sabnzbd.LAST_HISTORY_UPDATE += 1
# Never go over the limit
if sabnzbd.LAST_HISTORY_UPDATE + 1 >= sys.maxsize:

View File

@@ -29,7 +29,7 @@ import json
import cherrypy
import locale
from threading import Thread
from typing import Tuple
from typing import Tuple, Optional, List
import sabnzbd
from sabnzbd.constants import (
@@ -86,7 +86,7 @@ _MSG_BAD_SERVER_PARMS = "Incorrect server settings"
def api_handler(kwargs):
""" API Dispatcher """
"""API Dispatcher"""
# Clean-up the arguments
for vr in ("mode", "output", "name"):
if vr in kwargs and isinstance(kwargs[vr], list):
@@ -101,13 +101,13 @@ def api_handler(kwargs):
def _api_get_config(name, output, kwargs):
""" API: accepts output, keyword, section """
"""API: accepts output, keyword, section"""
_, data = config.get_dconfig(kwargs.get("section"), kwargs.get("keyword"))
return report(output, keyword="config", data=data)
def _api_set_config(name, output, kwargs):
""" API: accepts output, keyword, section """
"""API: accepts output, keyword, section"""
if cfg.configlock():
return report(output, _MSG_CONFIG_LOCKED)
if kwargs.get("section") == "servers":
@@ -126,7 +126,7 @@ def _api_set_config(name, output, kwargs):
def _api_set_config_default(name, output, kwargs):
""" API: Reset requested config variables back to defaults. Currently only for misc-section """
"""API: Reset requested config variables back to defaults. Currently only for misc-section"""
if cfg.configlock():
return report(output, _MSG_CONFIG_LOCKED)
keywords = kwargs.get("keyword", [])
@@ -141,7 +141,7 @@ def _api_set_config_default(name, output, kwargs):
def _api_del_config(name, output, kwargs):
""" API: accepts output, keyword, section """
"""API: accepts output, keyword, section"""
if cfg.configlock():
return report(output, _MSG_CONFIG_LOCKED)
if del_from_section(kwargs):
@@ -151,13 +151,13 @@ def _api_del_config(name, output, kwargs):
def _api_queue(name, output, kwargs):
""" API: Dispatcher for mode=queue """
"""API: Dispatcher for mode=queue"""
value = kwargs.get("value", "")
return _api_queue_table.get(name, (_api_queue_default, 2))[0](output, value, kwargs)
def _api_queue_delete(output, value, kwargs):
""" API: accepts output, value """
"""API: accepts output, value"""
if value.lower() == "all":
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed})
@@ -171,7 +171,7 @@ def _api_queue_delete(output, value, kwargs):
def _api_queue_delete_nzf(output, value, kwargs):
""" API: accepts value(=nzo_id), value2(=nzf_id) """
"""API: accepts value(=nzo_id), value2(=nzf_id)"""
value2 = kwargs.get("value2")
if value and value2:
removed = sabnzbd.NzbQueue.remove_nzf(value, value2, force_delete=True)
@@ -181,7 +181,7 @@ def _api_queue_delete_nzf(output, value, kwargs):
def _api_queue_rename(output, value, kwargs):
""" API: accepts output, value(=old name), value2(=new name), value3(=password) """
"""API: accepts output, value(=old name), value2(=new name), value3(=password)"""
value2 = kwargs.get("value2")
value3 = kwargs.get("value3")
if value and value2:
@@ -192,19 +192,19 @@ def _api_queue_rename(output, value, kwargs):
def _api_queue_change_complete_action(output, value, kwargs):
""" API: accepts output, value(=action) """
"""API: accepts output, value(=action)"""
sabnzbd.change_queue_complete_action(value)
return report(output)
def _api_queue_purge(output, value, kwargs):
""" API: accepts output """
"""API: accepts output"""
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed})
def _api_queue_pause(output, value, kwargs):
""" API: accepts output, value(=list of nzo_id) """
"""API: accepts output, value(=list of nzo_id)"""
if value:
items = value.split(",")
handled = sabnzbd.NzbQueue.pause_multiple_nzo(items)
@@ -214,7 +214,7 @@ def _api_queue_pause(output, value, kwargs):
def _api_queue_resume(output, value, kwargs):
""" API: accepts output, value(=list of nzo_id) """
"""API: accepts output, value(=list of nzo_id)"""
if value:
items = value.split(",")
handled = sabnzbd.NzbQueue.resume_multiple_nzo(items)
@@ -224,7 +224,7 @@ def _api_queue_resume(output, value, kwargs):
def _api_queue_priority(output, value, kwargs):
""" API: accepts output, value(=nzo_id), value2(=priority) """
"""API: accepts output, value(=nzo_id), value2(=priority)"""
value2 = kwargs.get("value2")
if value and value2:
try:
@@ -242,7 +242,7 @@ def _api_queue_priority(output, value, kwargs):
def _api_queue_sort(output, value, kwargs):
""" API: accepts output, sort, dir """
"""API: accepts output, sort, dir"""
sort = kwargs.get("sort")
direction = kwargs.get("dir", "")
if sort:
@@ -253,7 +253,7 @@ def _api_queue_sort(output, value, kwargs):
def _api_queue_default(output, value, kwargs):
""" API: accepts output, sort, dir, start, limit """
"""API: accepts output, sort, dir, start, limit"""
start = int_conv(kwargs.get("start"))
limit = int_conv(kwargs.get("limit"))
search = kwargs.get("search")
@@ -264,7 +264,7 @@ def _api_queue_default(output, value, kwargs):
def _api_queue_rating(output, value, kwargs):
""" API: accepts output, value(=nzo_id), type, setting, detail """
"""API: accepts output, value(=nzo_id), type, setting, detail"""
vote_map = {"up": sabnzbd.Rating.VOTE_UP, "down": sabnzbd.Rating.VOTE_DOWN}
flag_map = {
"spam": sabnzbd.Rating.FLAG_SPAM,
@@ -296,17 +296,17 @@ def _api_queue_rating(output, value, kwargs):
def _api_options(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return options_list(output)
def _api_translate(name, output, kwargs):
""" API: accepts output, value(=acronym) """
"""API: accepts output, value(=acronym)"""
return report(output, keyword="value", data=T(kwargs.get("value", "")))
def _api_addfile(name, output, kwargs):
""" API: accepts name, output, pp, script, cat, priority, nzbname """
"""API: accepts name, output, pp, script, cat, priority, nzbname"""
# Normal upload will send the nzb in a kw arg called name or nzbfile
if not name or isinstance(name, str):
name = kwargs.get("nzbfile", None)
@@ -332,7 +332,7 @@ def _api_addfile(name, output, kwargs):
def _api_retry(name, output, kwargs):
""" API: accepts name, output, value(=nzo_id), nzbfile(=optional NZB), password (optional) """
"""API: accepts name, output, value(=nzo_id), nzbfile(=optional NZB), password (optional)"""
value = kwargs.get("value")
# Normal upload will send the nzb in a kw arg called nzbfile
if name is None or isinstance(name, str):
@@ -348,7 +348,7 @@ def _api_retry(name, output, kwargs):
def _api_cancel_pp(name, output, kwargs):
""" API: accepts name, output, value(=nzo_id) """
"""API: accepts name, output, value(=nzo_id)"""
nzo_id = kwargs.get("value")
if sabnzbd.PostProcessor.cancel_pp(nzo_id):
return report(output, keyword="", data={"status": True, "nzo_id": nzo_id})
@@ -357,7 +357,7 @@ def _api_cancel_pp(name, output, kwargs):
def _api_addlocalfile(name, output, kwargs):
""" API: accepts name, output, pp, script, cat, priority, nzbname """
"""API: accepts name, output, pp, script, cat, priority, nzbname"""
if name:
if os.path.exists(name):
pp = kwargs.get("pp")
@@ -395,7 +395,7 @@ def _api_addlocalfile(name, output, kwargs):
def _api_switch(name, output, kwargs):
""" API: accepts output, value(=first id), value2(=second id) """
"""API: accepts output, value(=first id), value2(=second id)"""
value = kwargs.get("value")
value2 = kwargs.get("value2")
if value and value2:
@@ -407,7 +407,7 @@ def _api_switch(name, output, kwargs):
def _api_change_cat(name, output, kwargs):
""" API: accepts output, value(=nzo_id), value2(=category) """
"""API: accepts output, value(=nzo_id), value2(=category)"""
value = kwargs.get("value")
value2 = kwargs.get("value2")
if value and value2:
@@ -422,7 +422,7 @@ def _api_change_cat(name, output, kwargs):
def _api_change_script(name, output, kwargs):
""" API: accepts output, value(=nzo_id), value2(=script) """
"""API: accepts output, value(=nzo_id), value2(=script)"""
value = kwargs.get("value")
value2 = kwargs.get("value2")
if value and value2:
@@ -437,7 +437,7 @@ def _api_change_script(name, output, kwargs):
def _api_change_opts(name, output, kwargs):
""" API: accepts output, value(=nzo_id), value2(=pp) """
"""API: accepts output, value(=nzo_id), value2(=pp)"""
value = kwargs.get("value")
value2 = kwargs.get("value2")
result = 0
@@ -447,13 +447,13 @@ def _api_change_opts(name, output, kwargs):
def _api_fullstatus(name, output, kwargs):
""" API: full history status"""
"""API: full history status"""
status = build_status(skip_dashboard=kwargs.get("skip_dashboard", 1), output=output)
return report(output, keyword="status", data=status)
def _api_history(name, output, kwargs):
""" API: accepts output, value(=nzo_id), start, limit, search, nzo_ids """
"""API: accepts output, value(=nzo_id), start, limit, search, nzo_ids"""
value = kwargs.get("value", "")
start = int_conv(kwargs.get("start"))
limit = int_conv(kwargs.get("limit"))
@@ -470,6 +470,9 @@ def _api_history(name, output, kwargs):
if categories and not isinstance(categories, list):
categories = [categories]
if nzo_ids and not isinstance(nzo_ids, list):
nzo_ids = nzo_ids.split(",")
if not limit:
limit = cfg.history_limit()
@@ -514,7 +517,7 @@ def _api_history(name, output, kwargs):
def _api_get_files(name, output, kwargs):
""" API: accepts output, value(=nzo_id) """
"""API: accepts output, value(=nzo_id)"""
value = kwargs.get("value")
if value:
return report(output, keyword="files", data=build_file_list(value))
@@ -523,7 +526,7 @@ def _api_get_files(name, output, kwargs):
def _api_addurl(name, output, kwargs):
""" API: accepts name, output, pp, script, cat, priority, nzbname """
"""API: accepts name, output, pp, script, cat, priority, nzbname"""
pp = kwargs.get("pp")
script = kwargs.get("script")
cat = kwargs.get("cat")
@@ -541,27 +544,27 @@ def _api_addurl(name, output, kwargs):
def _api_pause(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.Scheduler.plan_resume(0)
sabnzbd.Downloader.pause()
return report(output)
def _api_resume(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.Scheduler.plan_resume(0)
sabnzbd.unpause_all()
return report(output)
def _api_shutdown(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.shutdown_program()
return report(output)
def _api_warnings(name, output, kwargs):
""" API: accepts name, output """
"""API: accepts name, output"""
if name == "clear":
return report(output, keyword="warnings", data=sabnzbd.GUIHANDLER.clear())
elif name == "show":
@@ -572,22 +575,22 @@ def _api_warnings(name, output, kwargs):
def _api_get_cats(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, keyword="categories", data=list_cats(False))
def _api_get_scripts(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, keyword="scripts", data=list_scripts())
def _api_version(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, keyword="version", data=sabnzbd.__version__)
def _api_auth(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
auth = "None"
if not cfg.disable_key():
auth = "badkey"
@@ -605,7 +608,7 @@ def _api_auth(name, output, kwargs):
def _api_restart(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
logging.info("Restart requested by API")
# Do the shutdown async to still send goodbye to browser
Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start()
@@ -613,7 +616,7 @@ def _api_restart(name, output, kwargs):
def _api_restart_repair(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
logging.info("Queue repair requested by API")
sabnzbd.request_repair()
# Do the shutdown async to still send goodbye to browser
@@ -622,26 +625,26 @@ def _api_restart_repair(name, output, kwargs):
def _api_disconnect(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.Downloader.disconnect()
return report(output)
def _api_osx_icon(name, output, kwargs):
""" API: accepts output, value """
"""API: accepts output, value"""
value = kwargs.get("value", "1").strip()
cfg.osx_menu.set(value != "0")
return report(output)
def _api_rescan(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=True)
return report(output)
def _api_eval_sort(name, output, kwargs):
""" API: evaluate sorting expression """
"""API: evaluate sorting expression"""
name = kwargs.get("name", "")
value = kwargs.get("value", "")
title = kwargs.get("title")
@@ -654,43 +657,43 @@ def _api_eval_sort(name, output, kwargs):
def _api_watched_now(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.DirScanner.scan()
return report(output)
def _api_resume_pp(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.PostProcessor.paused = False
return report(output)
def _api_pause_pp(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sabnzbd.PostProcessor.paused = True
return report(output)
def _api_rss_now(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
# Run RSS scan async, because it can take a long time
sabnzbd.Scheduler.force_rss()
return report(output)
def _api_retry_all(name, output, kwargs):
""" API: Retry all failed items in History """
"""API: Retry all failed items in History"""
return report(output, keyword="status", data=retry_all_jobs())
def _api_reset_quota(name, output, kwargs):
""" Reset quota left """
"""Reset quota left"""
sabnzbd.BPSMeter.reset_quota(force=True)
return report(output)
def _api_test_email(name, output, kwargs):
""" API: send a test email, return result """
"""API: send a test email, return result"""
logging.info("Sending test email")
pack = {"download": ["action 1", "action 2"], "unpack": ["action 1", "action 2"]}
res = sabnzbd.emailer.endjob(
@@ -712,61 +715,61 @@ def _api_test_email(name, output, kwargs):
def _api_test_windows(name, output, kwargs):
""" API: send a test to Windows, return result """
"""API: send a test to Windows, return result"""
logging.info("Sending test notification")
res = sabnzbd.notifier.send_windows("SABnzbd", T("Test Notification"), "other")
return report(output, error=res)
def _api_test_notif(name, output, kwargs):
""" API: send a test to Notification Center, return result """
"""API: send a test to Notification Center, return result"""
logging.info("Sending test notification")
res = sabnzbd.notifier.send_notification_center("SABnzbd", T("Test Notification"), "other")
return report(output, error=res)
def _api_test_osd(name, output, kwargs):
""" API: send a test OSD notification, return result """
"""API: send a test OSD notification, return result"""
logging.info("Sending OSD notification")
res = sabnzbd.notifier.send_notify_osd("SABnzbd", T("Test Notification"))
return report(output, error=res)
def _api_test_prowl(name, output, kwargs):
""" API: send a test Prowl notification, return result """
"""API: send a test Prowl notification, return result"""
logging.info("Sending Prowl notification")
res = sabnzbd.notifier.send_prowl("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
return report(output, error=res)
def _api_test_pushover(name, output, kwargs):
""" API: send a test Pushover notification, return result """
"""API: send a test Pushover notification, return result"""
logging.info("Sending Pushover notification")
res = sabnzbd.notifier.send_pushover("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
return report(output, error=res)
def _api_test_pushbullet(name, output, kwargs):
""" API: send a test Pushbullet notification, return result """
"""API: send a test Pushbullet notification, return result"""
logging.info("Sending Pushbullet notification")
res = sabnzbd.notifier.send_pushbullet("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
return report(output, error=res)
def _api_test_nscript(name, output, kwargs):
""" API: execute a test notification script, return result """
"""API: execute a test notification script, return result"""
logging.info("Executing notification script")
res = sabnzbd.notifier.send_nscript("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
return report(output, error=res)
def _api_undefined(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, _MSG_NOT_IMPLEMENTED)
def _api_browse(name, output, kwargs):
""" Return tree of local path """
"""Return tree of local path"""
compact = kwargs.get("compact")
if compact and compact == "1":
@@ -780,14 +783,14 @@ def _api_browse(name, output, kwargs):
def _api_config(name, output, kwargs):
""" API: Dispatcher for "config" """
"""API: Dispatcher for "config" """
if cfg.configlock():
return report(output, _MSG_CONFIG_LOCKED)
return _api_config_table.get(name, (_api_config_undefined, 2))[0](output, kwargs)
def _api_config_speedlimit(output, kwargs):
""" API: accepts output, value(=speed) """
"""API: accepts output, value(=speed)"""
value = kwargs.get("value")
if not value:
value = "0"
@@ -796,12 +799,12 @@ def _api_config_speedlimit(output, kwargs):
def _api_config_get_speedlimit(output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, keyword="speedlimit", data=sabnzbd.Downloader.get_limit())
def _api_config_set_colorscheme(output, kwargs):
""" API: accepts output"""
"""API: accepts output"""
value = kwargs.get("value")
if value:
cfg.web_color.set(value)
@@ -811,21 +814,21 @@ def _api_config_set_colorscheme(output, kwargs):
def _api_config_set_pause(output, kwargs):
""" API: accepts output, value(=pause interval) """
"""API: accepts output, value(=pause interval)"""
value = kwargs.get("value")
sabnzbd.Scheduler.plan_resume(int_conv(value))
return report(output)
def _api_config_set_apikey(output, kwargs):
""" API: accepts output """
"""API: accepts output"""
cfg.api_key.set(config.create_api_key())
config.save_config()
return report(output, keyword="apikey", data=cfg.api_key())
def _api_config_set_nzbkey(output, kwargs):
""" API: accepts output """
"""API: accepts output"""
cfg.nzb_key.set(config.create_api_key())
config.save_config()
return report(output, keyword="nzbkey", data=cfg.nzb_key())
@@ -846,7 +849,7 @@ def _api_config_regenerate_certs(output, kwargs):
def _api_config_test_server(output, kwargs):
""" API: accepts output, server-params """
"""API: accepts output, server-params"""
result, msg = test_nntp_server_dict(kwargs)
response = {"result": result, "message": msg}
if output:
@@ -856,12 +859,12 @@ def _api_config_test_server(output, kwargs):
def _api_config_undefined(output, kwargs):
""" API: accepts output """
"""API: accepts output"""
return report(output, _MSG_NOT_IMPLEMENTED)
def _api_server_stats(name, output, kwargs):
""" API: accepts output """
"""API: accepts output"""
sum_t, sum_m, sum_w, sum_d = sabnzbd.BPSMeter.get_sums()
stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}}
@@ -971,7 +974,7 @@ _api_config_table = {
def api_level(mode: str, name: str) -> int:
""" Return access level required for this API call """
"""Return access level required for this API call"""
if mode == "queue" and name in _api_queue_table:
return _api_queue_table[name][1]
if mode == "config" and name in _api_config_table:
@@ -1088,7 +1091,7 @@ class xml_factory:
def handle_server_api(output, kwargs):
""" Special handler for API-call 'set_config' [servers] """
"""Special handler for API-call 'set_config' [servers]"""
name = kwargs.get("keyword")
if not name:
name = kwargs.get("name")
@@ -1106,7 +1109,7 @@ def handle_server_api(output, kwargs):
def handle_rss_api(output, kwargs):
""" Special handler for API-call 'set_config' [rss] """
"""Special handler for API-call 'set_config' [rss]"""
name = kwargs.get("keyword")
if not name:
name = kwargs.get("name")
@@ -1140,7 +1143,7 @@ def handle_rss_api(output, kwargs):
def handle_cat_api(output, kwargs):
""" Special handler for API-call 'set_config' [categories] """
"""Special handler for API-call 'set_config' [categories]"""
name = kwargs.get("keyword")
if not name:
name = kwargs.get("name")
@@ -1165,6 +1168,7 @@ def build_status(skip_dashboard=False, output=None):
info["loglevel"] = str(cfg.log_level())
info["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
info["configfn"] = config.get_filename()
info["warnings"] = sabnzbd.GUIHANDLER.content()
# Dashboard: Speed of System
info["cpumodel"] = getcpu()
@@ -1194,42 +1198,22 @@ def build_status(skip_dashboard=False, output=None):
info["dnslookup"] = None
info["servers"] = []
servers = sorted(sabnzbd.Downloader.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
for server in servers:
# Servers-list could be modified during iteration, so we need a copy
for server in sabnzbd.Downloader.servers[:]:
connected = sum(nw.connected for nw in server.idle_threads[:])
serverconnections = []
connected = 0
for nw in server.idle_threads[:]:
if nw.connected:
connected += 1
for nw in server.busy_threads[:]:
article = nw.article
art_name = ""
nzf_name = ""
nzo_name = ""
if article:
nzf = article.nzf
nzo = nzf.nzo
art_name = article.article
# filename field is not always present
try:
nzf_name = nzf.filename
except: # attribute error
nzf_name = nzf.subject
nzo_name = nzo.final_name
# For the templates or for JSON
if output:
thread_info = {"thrdnum": nw.thrdnum, "art_name": art_name, "nzf_name": nzf_name, "nzo_name": nzo_name}
serverconnections.append(thread_info)
else:
serverconnections.append((nw.thrdnum, art_name, nzf_name, nzo_name))
if nw.connected:
connected += 1
if nw.article:
serverconnections.append(
{
"thrdnum": nw.thrdnum,
"art_name": nw.article.article,
"nzf_name": nw.article.nzf.filename,
"nzo_name": nw.article.nzf.nzo.final_name,
}
)
if server.warning and not (connected or server.errormsg):
connected = server.warning
@@ -1237,38 +1221,20 @@ def build_status(skip_dashboard=False, output=None):
if server.request and not server.info:
connected = T("&nbsp;Resolving address").replace("&nbsp;", "")
# For the templates or for JSON
if output:
server_info = {
"servername": server.displayname,
"serveractiveconn": connected,
"servertotalconn": server.threads,
"serverconnections": serverconnections,
"serverssl": server.ssl,
"serversslinfo": server.ssl_info,
"serveractive": server.active,
"servererror": server.errormsg,
"serverpriority": server.priority,
"serveroptional": server.optional,
"serverbps": to_units(sabnzbd.BPSMeter.server_bps.get(server.id, 0)),
}
info["servers"].append(server_info)
else:
info["servers"].append(
(
server.displayname,
"",
connected,
serverconnections,
server.ssl,
server.active,
server.errormsg,
server.priority,
server.optional,
)
)
info["warnings"] = sabnzbd.GUIHANDLER.content()
server_info = {
"servername": server.displayname,
"serveractiveconn": connected,
"servertotalconn": server.threads,
"serverconnections": serverconnections,
"serverssl": server.ssl,
"serversslinfo": server.ssl_info,
"serveractive": server.active,
"servererror": server.errormsg,
"serverpriority": server.priority,
"serveroptional": server.optional,
"serverbps": to_units(sabnzbd.BPSMeter.server_bps.get(server.id, 0)),
}
info["servers"].append(server_info)
return info
@@ -1384,7 +1350,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None, nzo_ids
def fast_queue() -> Tuple[bool, int, float, str]:
""" Return paused, bytes_left, bpsnow, time_left """
"""Return paused, bytes_left, bpsnow, time_left"""
bytes_left = sabnzbd.sabnzbd.NzbQueue.remaining()
paused = sabnzbd.Downloader.paused
bpsnow = sabnzbd.BPSMeter.bps
@@ -1406,7 +1372,7 @@ def build_file_list(nzo_id: str):
for nzf in finished_files:
jobs.append(
{
"filename": nzf.filename if nzf.filename else nzf.subject,
"filename": nzf.filename,
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
"mb": "%.2f" % (nzf.bytes / MEBI),
"bytes": "%.2f" % nzf.bytes,
@@ -1419,7 +1385,7 @@ def build_file_list(nzo_id: str):
for nzf in active_files:
jobs.append(
{
"filename": nzf.filename if nzf.filename else nzf.subject,
"filename": nzf.filename,
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
"mb": "%.2f" % (nzf.bytes / MEBI),
"bytes": "%.2f" % nzf.bytes,
@@ -1432,7 +1398,7 @@ def build_file_list(nzo_id: str):
for nzf in queued_files:
jobs.append(
{
"filename": nzf.filename if nzf.filename else nzf.subject,
"filename": nzf.filename,
"set": nzf.setname,
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
"mb": "%.2f" % (nzf.bytes / MEBI),
@@ -1464,7 +1430,7 @@ def options_list(output):
def retry_job(job, new_nzb=None, password=None):
""" Re enter failed job in the download queue """
"""Re enter failed job in the download queue"""
if job:
history_db = sabnzbd.get_db_connection()
futuretype, url, pp, script, cat = history_db.get_other(job)
@@ -1481,7 +1447,7 @@ def retry_job(job, new_nzb=None, password=None):
def retry_all_jobs():
""" Re enter all failed jobs in the download queue """
"""Re enter all failed jobs in the download queue"""
# Fetch all retryable folders from History
items = sabnzbd.api.build_history()[0]
nzo_ids = []
@@ -1492,14 +1458,14 @@ def retry_all_jobs():
def del_job_files(job_paths):
""" Remove files of each path in the list """
"""Remove files of each path in the list"""
for path in job_paths:
if path and clip_path(path).lower().startswith(cfg.download_dir.get_clipped_path().lower()):
remove_all(path, recursive=True)
def del_hist_job(job, del_files):
""" Remove history element """
"""Remove history element"""
if job:
path = sabnzbd.PostProcessor.get_path(job)
if path:
@@ -1511,7 +1477,7 @@ def del_hist_job(job, del_files):
def Tspec(txt):
""" Translate special terms """
"""Translate special terms"""
if txt == "None":
return T("None")
elif txt in ("Default", "*"):
@@ -1540,14 +1506,14 @@ def Ttemplate(txt):
def clear_trans_cache():
""" Clean cache for skin translations """
"""Clean cache for skin translations"""
global _SKIN_CACHE
_SKIN_CACHE = {}
sabnzbd.WEBUI_READY = True
def build_header(webdir="", output=None, trans_functions=True):
""" Build the basic header """
"""Build the basic header"""
try:
uptime = calc_age(sabnzbd.START)
except:
@@ -1625,7 +1591,7 @@ def build_header(webdir="", output=None, trans_functions=True):
def build_queue_header(search=None, nzo_ids=None, start=0, limit=0, output=None):
""" Build full queue header """
"""Build full queue header"""
header = build_header(output=output)
@@ -1662,7 +1628,14 @@ def build_queue_header(search=None, nzo_ids=None, start=0, limit=0, output=None)
return header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page
def build_history(start=0, limit=0, search=None, failed_only=0, categories=None, nzo_ids=None):
def build_history(
start: int = 0,
limit: int = 0,
search: Optional[str] = None,
failed_only: int = 0,
categories: Optional[List[str]] = None,
nzo_ids: Optional[List[str]] = None,
):
"""Combine the jobs still in post-processing and the database history"""
if not limit:
limit = 1000000
@@ -1685,6 +1658,9 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None,
except:
logging.error(T("Failed to compile regex for search term: %s"), search_text)
if nzo_ids:
postproc_queue = [nzo for nzo in postproc_queue if nzo.nzo_id in nzo_ids]
# Multi-page support for postproc items
postproc_queue_size = len(postproc_queue)
if start > postproc_queue_size:
@@ -1769,7 +1745,7 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None,
def get_active_history(queue, items):
""" Get the currently in progress and active history queue. """
"""Get the currently in progress and active history queue."""
for nzo in queue:
item = {}
(
@@ -1812,7 +1788,7 @@ def get_active_history(queue, items):
def calc_timeleft(bytesleft, bps):
""" Calculate the time left in the format HH:MM:SS """
"""Calculate the time left in the format HH:MM:SS"""
try:
if bytesleft <= 0:
return "0:00:00"
@@ -1864,7 +1840,7 @@ def plural_to_single(kw, def_kw=""):
def del_from_section(kwargs):
""" Remove keyword in section """
"""Remove keyword in section"""
section = kwargs.get("section", "")
if section in ("servers", "rss", "categories"):
keyword = kwargs.get("keyword")
@@ -1882,7 +1858,7 @@ def del_from_section(kwargs):
def history_remove_failed():
""" Remove all failed jobs from history, including files """
"""Remove all failed jobs from history, including files"""
logging.info("Scheduled removal of all failed jobs")
with HistoryDB() as history_db:
del_job_files(history_db.get_failed_paths())
@@ -1890,7 +1866,7 @@ def history_remove_failed():
def history_remove_completed():
""" Remove all completed jobs from history """
"""Remove all completed jobs from history"""
logging.info("Scheduled removal of all completed jobs")
with HistoryDB() as history_db:
history_db.remove_completed()

View File

@@ -55,7 +55,7 @@ class ArticleCache:
return ANFO(len(self.__article_table), abs(self.__cache_size), self.__cache_limit_org)
def new_limit(self, limit: int):
""" Called when cache limit changes """
"""Called when cache limit changes"""
self.__cache_limit_org = limit
if limit < 0:
self.__cache_limit = self.__cache_upper_limit
@@ -70,20 +70,20 @@ class ArticleCache:
@synchronized(ARTICLE_COUNTER_LOCK)
def reserve_space(self, data_size: int):
""" Reserve space in the cache """
"""Reserve space in the cache"""
self.__cache_size += data_size
@synchronized(ARTICLE_COUNTER_LOCK)
def free_reserved_space(self, data_size: int):
""" Remove previously reserved space """
"""Remove previously reserved space"""
self.__cache_size -= data_size
def space_left(self) -> bool:
""" Is there space left in the set limit? """
"""Is there space left in the set limit?"""
return self.__cache_size < self.__cache_limit
def save_article(self, article: Article, data: bytes):
""" Save article in cache, either memory or disk """
"""Save article in cache, either memory or disk"""
nzo = article.nzf.nzo
if nzo.is_gone():
# Do not discard this article because the
@@ -115,7 +115,7 @@ class ArticleCache:
self.__flush_article_to_disk(article, data)
def load_article(self, article: Article):
""" Load the data of the article """
"""Load the data of the article"""
data = None
nzo = article.nzf.nzo
@@ -145,7 +145,7 @@ class ArticleCache:
logging.debug("Failed to flush item from cache, probably already deleted or written to disk")
def purge_articles(self, articles: List[Article]):
""" Remove all saved articles, from memory and disk """
"""Remove all saved articles, from memory and disk"""
logging.debug("Purging %s articles from the cache/disk", len(articles))
for article in articles:
if article in self.__article_table:

View File

@@ -36,7 +36,6 @@ from sabnzbd.filesystem import (
has_win_device,
diskspace,
get_filename,
get_ext,
has_unwanted_extension,
)
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
@@ -267,7 +266,7 @@ SAFE_EXTS = (".mkv", ".mp4", ".avi", ".wmv", ".mpg", ".webm")
def is_cloaked(nzo: NzbObject, path: str, names: List[str]) -> bool:
""" Return True if this is likely to be a cloaked encrypted post """
"""Return True if this is likely to be a cloaked encrypted post"""
fname = os.path.splitext(get_filename(path.lower()))[0]
for name in names:
name = get_filename(name.lower())
@@ -296,7 +295,7 @@ def is_cloaked(nzo: NzbObject, path: str, names: List[str]) -> bool:
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[bool, Optional[str]]:
""" Combines check for unwanted and encrypted files to save on CPU and IO """
"""Combines check for unwanted and encrypted files to save on CPU and IO"""
encrypted = False
unwanted = None
@@ -354,7 +353,7 @@ def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[b
except rarfile.RarCRCError as e:
# CRC errors can be thrown for wrong password or
# missing the next volume (with correct password)
if "cannot find volume" in str(e).lower():
if match_str(str(e), ("cannot find volume", "unexpected end of archive")):
# We assume this one worked!
password_hit = password
break

View File

@@ -39,14 +39,14 @@ RE_HHMM = re.compile(r"(\d+):(\d+)\s*$")
def tomorrow(t: float) -> float:
""" Return timestamp for tomorrow (midnight) """
"""Return timestamp for tomorrow (midnight)"""
now = time.localtime(t)
ntime = (now[0], now[1], now[2], 0, 0, 0, now[6], now[7], now[8])
return time.mktime(ntime) + DAY
def this_week(t: float) -> float:
""" Return timestamp for start of this week (monday) """
"""Return timestamp for start of this week (monday)"""
while 1:
tm = time.localtime(t)
if tm.tm_wday == 0:
@@ -57,19 +57,19 @@ def this_week(t: float) -> float:
def next_week(t: float) -> float:
""" Return timestamp for start of next week (monday) """
"""Return timestamp for start of next week (monday)"""
return this_week(t) + WEEK
def this_month(t: float) -> float:
""" Return timestamp for start of next month """
"""Return timestamp for start of next month"""
now = time.localtime(t)
ntime = (now[0], now[1], 1, 0, 0, 0, 0, 0, now[8])
return time.mktime(ntime)
def last_month_day(tm: time.struct_time) -> int:
""" Return last day of this month """
"""Return last day of this month"""
year, month = tm[:2]
day = DAYS[month]
# This simple formula for leap years is good enough
@@ -79,7 +79,7 @@ def last_month_day(tm: time.struct_time) -> int:
def next_month(t: float) -> float:
""" Return timestamp for start of next month """
"""Return timestamp for start of next month"""
now = time.localtime(t)
month = now.tm_mon + 1
year = now.tm_year
@@ -91,6 +91,38 @@ def next_month(t: float) -> float:
class BPSMeter:
__slots__ = (
"start_time",
"log_time",
"speed_log_time",
"last_update",
"bps",
"bps_list",
"server_bps",
"cached_amount",
"sum_cached_amount",
"day_total",
"week_total",
"month_total",
"grand_total",
"timeline_total",
"article_stats_tried",
"article_stats_failed",
"day_label",
"end_of_day",
"end_of_week",
"end_of_month",
"q_day",
"q_period",
"quota",
"left",
"have_quota",
"q_time",
"q_hour",
"q_minute",
"quota_enabled",
)
def __init__(self):
t = time.time()
self.start_time = t
@@ -128,7 +160,7 @@ class BPSMeter:
self.quota_enabled: bool = True # Scheduled quota enable/disable
def save(self):
""" Save admin to disk """
"""Save admin to disk"""
sabnzbd.save_admin(
(
self.last_update,
@@ -150,7 +182,7 @@ class BPSMeter:
)
def defaults(self):
""" Get the latest data from the database and assign to a fake server """
"""Get the latest data from the database and assign to a fake server"""
logging.debug("Setting default BPS meter values")
with sabnzbd.database.HistoryDB() as history_db:
grand, month, week = history_db.get_history_size()
@@ -167,7 +199,7 @@ class BPSMeter:
self.quota = self.left = cfg.quota_size.get_float()
def read(self):
""" Read admin from disk, return True when pause is needed """
"""Read admin from disk, return True when pause is needed"""
res = False
quota = self.left = cfg.quota_size.get_float() # Quota for this period
self.have_quota = bool(cfg.quota_size())
@@ -200,71 +232,82 @@ class BPSMeter:
self.defaults()
return res
def update(self, server: Optional[str] = None, amount: int = 0, force_full_update: bool = True):
""" Update counters for "server" with "amount" bytes """
t = time.time()
def init_server_stats(self, server: str = None):
"""Initialize counters for "server" """
if server not in self.cached_amount:
self.cached_amount[server] = 0
self.server_bps[server] = 0.0
if server not in self.day_total:
self.day_total[server] = 0
if server not in self.week_total:
self.week_total[server] = 0
if server not in self.month_total:
self.month_total[server] = 0
if server not in self.month_total:
self.month_total[server] = 0
if server not in self.grand_total:
self.grand_total[server] = 0
if server not in self.timeline_total:
self.timeline_total[server] = {}
if self.day_label not in self.timeline_total[server]:
self.timeline_total[server][self.day_label] = 0
if server not in self.server_bps:
self.server_bps[server] = 0.0
if server not in self.article_stats_tried:
self.article_stats_tried[server] = {}
self.article_stats_failed[server] = {}
if self.day_label not in self.article_stats_tried[server]:
self.article_stats_tried[server][self.day_label] = 0
self.article_stats_failed[server][self.day_label] = 0
def update(self, server: Optional[str] = None, amount: int = 0):
"""Update counters for "server" with "amount" bytes"""
# Add amount to temporary storage
if server:
if server not in self.cached_amount:
self.cached_amount[server] = 0
self.server_bps[server] = 0.0
self.cached_amount[server] += amount
self.sum_cached_amount += amount
# Wait at least 0.05 seconds between each full update
if not force_full_update and t - self.last_update < 0.05:
return
if t > self.end_of_day:
# current day passed. get new end of day
self.day_label = time.strftime("%Y-%m-%d")
self.day_total = {}
self.end_of_day = tomorrow(t) - 1.0
t = time.time()
if t > self.end_of_day:
# Current day passed, get new end of day
self.day_label = time.strftime("%Y-%m-%d")
self.end_of_day = tomorrow(t) - 1.0
self.day_total = {}
# Check end of week and end of month
if t > self.end_of_week:
self.week_total = {}
self.end_of_week = next_week(t) - 1.0
if t > self.end_of_month:
self.month_total = {}
self.end_of_month = next_month(t) - 1.0
# Need to reset all counters
for server in sabnzbd.Downloader.servers[:]:
self.init_server_stats(server.id)
# Add amounts that have been stored temporarily to statistics
for srv in self.cached_amount:
cached_amount = self.cached_amount[srv]
if cached_amount:
self.cached_amount[srv] = 0
if srv not in self.day_total:
self.day_total[srv] = 0
self.day_total[srv] += cached_amount
if srv not in self.week_total:
self.week_total[srv] = 0
self.week_total[srv] += cached_amount
if srv not in self.month_total:
self.month_total[srv] = 0
self.month_total[srv] += cached_amount
if srv not in self.grand_total:
self.grand_total[srv] = 0
self.grand_total[srv] += cached_amount
if srv not in self.timeline_total:
self.timeline_total[srv] = {}
if self.day_label not in self.timeline_total[srv]:
self.timeline_total[srv][self.day_label] = 0
self.timeline_total[srv][self.day_label] += cached_amount
if self.cached_amount[srv]:
self.day_total[srv] += self.cached_amount[srv]
self.week_total[srv] += self.cached_amount[srv]
self.month_total[srv] += self.cached_amount[srv]
self.grand_total[srv] += self.cached_amount[srv]
self.timeline_total[srv][self.day_label] += self.cached_amount[srv]
# Update server bps
try:
# Update server bps
self.server_bps[srv] = (self.server_bps[srv] * (self.last_update - self.start_time) + cached_amount) / (
t - self.start_time
)
except:
self.server_bps[srv] = (
self.server_bps[srv] * (self.last_update - self.start_time) + self.cached_amount[srv]
) / (t - self.start_time)
except ZeroDivisionError:
self.server_bps[srv] = 0.0
# Reset for next time
self.cached_amount[srv] = 0
# Quota check
if self.have_quota and self.quota_enabled:
self.left -= self.sum_cached_amount
@@ -278,14 +321,13 @@ class BPSMeter:
self.bps = (self.bps * (self.last_update - self.start_time) + self.sum_cached_amount) / (
t - self.start_time
)
except:
except ZeroDivisionError:
self.bps = 0.0
self.server_bps = {}
self.sum_cached_amount = 0
self.last_update = t
check_time = t - 5.0
self.sum_cached_amount = 0
if self.start_time < check_time:
self.start_time = check_time
@@ -304,20 +346,10 @@ class BPSMeter:
def register_server_article_tried(self, server: str):
"""Keep track how many articles were tried for each server"""
if server not in self.article_stats_tried:
self.article_stats_tried[server] = {}
self.article_stats_failed[server] = {}
if self.day_label not in self.article_stats_tried[server]:
self.article_stats_tried[server][self.day_label] = 0
self.article_stats_failed[server][self.day_label] = 0
# Update the counters
self.article_stats_tried[server][self.day_label] += 1
def register_server_article_failed(self, server: str):
"""Keep track how many articles failed for each server"""
# This function is always called after the one above,
# so we can skip the check if the keys in the dict exist
self.article_stats_failed[server][self.day_label] += 1
def reset(self):
@@ -325,8 +357,11 @@ class BPSMeter:
self.start_time = t
self.log_time = t
self.last_update = t
# Reset general BPS and the for all servers
self.bps = 0.0
self.server_bps = {}
for server in self.server_bps:
self.server_bps[server] = 0.0
def add_empty_time(self):
# Extra zeros, but never more than the maximum!
@@ -339,7 +374,7 @@ class BPSMeter:
self.bps_list = self.bps_list[len(self.bps_list) - BPS_LIST_MAX :]
def get_sums(self):
""" return tuple of grand, month, week, day totals """
"""return tuple of grand, month, week, day totals"""
return (
sum([v for v in self.grand_total.values()]),
sum([v for v in self.month_total.values()]),
@@ -348,7 +383,7 @@ class BPSMeter:
)
def amounts(self, server: str):
""" Return grand, month, week, day and article totals for specified server """
"""Return grand, month, week, day and article totals for specified server"""
return (
self.grand_total.get(server, 0),
self.month_total.get(server, 0),
@@ -360,7 +395,7 @@ class BPSMeter:
)
def clear_server(self, server: str):
""" Clean counters for specified server """
"""Clean counters for specified server"""
if server in self.day_total:
del self.day_total[server]
if server in self.week_total:
@@ -375,6 +410,7 @@ class BPSMeter:
del self.article_stats_tried[server]
if server in self.article_stats_failed:
del self.article_stats_failed[server]
self.init_server_stats(server)
self.save()
def get_bps_list(self):
@@ -425,7 +461,7 @@ class BPSMeter:
return True
def next_reset(self, t: Optional[float] = None):
""" Determine next reset time """
"""Determine next reset time"""
t = t or time.time()
tm = time.localtime(t)
if self.q_period == "d":
@@ -456,7 +492,7 @@ class BPSMeter:
logging.debug("Will reset quota at %s", tm)
def change_quota(self, allow_resume: bool = True):
""" Update quota, potentially pausing downloader """
"""Update quota, potentially pausing downloader"""
if not self.have_quota and self.quota < 0.5:
# Never set, use last period's size
per = cfg.quota_period()
@@ -486,7 +522,7 @@ class BPSMeter:
self.resume()
def get_quota(self):
""" If quota active, return check-function, hour, minute """
"""If quota active, return check-function, hour, minute"""
if self.have_quota:
self.q_period = cfg.quota_period()[0].lower()
self.q_day = 1
@@ -515,24 +551,19 @@ class BPSMeter:
return None, 0, 0
def set_status(self, status: bool, action: bool = True):
""" Disable/enable quota management """
"""Disable/enable quota management"""
self.quota_enabled = status
if action and not status:
self.resume()
@staticmethod
def resume():
""" Resume downloading """
"""Resume downloading"""
if cfg.quota_resume() and sabnzbd.Downloader.paused:
sabnzbd.Downloader.resume()
def midnight(self):
""" Midnight action: dummy update for all servers """
for server in self.day_total.keys():
self.update(server)
def quota_handler():
""" To be called from scheduler """
"""To be called from scheduler"""
logging.debug("Checking quota")
sabnzbd.BPSMeter.reset_quota()

View File

@@ -69,7 +69,7 @@ def validate_email(value):
def validate_server(value):
""" Check if server non-empty"""
"""Check if server non-empty"""
global email_endjob, email_full, email_rss
if value == "" and (email_endjob() or email_full() or email_rss()):
return T("Server address required"), None
@@ -78,7 +78,7 @@ def validate_server(value):
def validate_script(value):
""" Check if value is a valid script """
"""Check if value is a valid script"""
if not sabnzbd.__INITIALIZED__ or (value and sabnzbd.filesystem.is_valid_script(value)):
return None, value
elif (value and value == "None") or not value:
@@ -283,7 +283,6 @@ keep_awake = OptionBool("misc", "keep_awake", True)
win_menu = OptionBool("misc", "win_menu", True)
allow_incomplete_nzb = OptionBool("misc", "allow_incomplete_nzb", False)
enable_broadcast = OptionBool("misc", "enable_broadcast", True)
max_art_opt = OptionBool("misc", "max_art_opt", False)
ipv6_hosting = OptionBool("misc", "ipv6_hosting", False)
fixed_ports = OptionBool("misc", "fixed_ports", False)
api_warnings = OptionBool("misc", "api_warnings", True, protect=True)

View File

@@ -52,7 +52,7 @@ RE_PARAMFINDER = re.compile(r"""(?:'.*?')|(?:".*?")|(?:[^'",\s][^,]*)""")
class Option:
""" Basic option class, basic fields """
"""Basic option class, basic fields"""
def __init__(self, section: str, keyword: str, default_val: Any = None, add: bool = True, protect: bool = False):
"""Basic option
@@ -81,7 +81,7 @@ class Option:
anchor[keyword] = self
def get(self) -> Any:
""" Retrieve value field """
"""Retrieve value field"""
if self.__value is not None:
return self.__value
else:
@@ -91,11 +91,11 @@ class Option:
return str(self.get())
def get_dict(self, safe: bool = False) -> Dict[str, Any]:
""" Return value a dictionary """
"""Return value a dictionary"""
return {self.__keyword: self.get()}
def set_dict(self, values: Dict[str, Any]):
""" Set value based on dictionary """
"""Set value based on dictionary"""
if not self.__protect:
try:
self.set(values["value"])
@@ -103,7 +103,7 @@ class Option:
pass
def set(self, value: Any):
""" Set new value, no validation """
"""Set new value, no validation"""
global modified
if value is not None:
if isinstance(value, list) or isinstance(value, dict) or value != self.__value:
@@ -116,11 +116,11 @@ class Option:
return self.__default_val
def callback(self, callback: Callable):
""" Set callback function """
"""Set callback function"""
self.__callback = callback
def ident(self):
""" Return section-list and keyword """
"""Return section-list and keyword"""
return self.__sections, self.__keyword
@@ -145,7 +145,7 @@ class OptionNumber(Option):
super().__init__(section, keyword, default_val, add=add, protect=protect)
def set(self, value: Any):
""" set new value, limited by range """
"""set new value, limited by range"""
if value is not None:
try:
if self.__int:
@@ -165,12 +165,12 @@ class OptionNumber(Option):
super().set(value)
def __call__(self) -> Union[int, float]:
""" get() replacement """
"""get() replacement"""
return self.get()
class OptionBool(Option):
""" Boolean option class, always returns 0 or 1."""
"""Boolean option class, always returns 0 or 1."""
def __init__(self, section: str, keyword: str, default_val: bool = False, add: bool = True, protect: bool = False):
super().__init__(section, keyword, int(default_val), add=add, protect=protect)
@@ -180,12 +180,12 @@ class OptionBool(Option):
super().set(sabnzbd.misc.int_conv(value))
def __call__(self) -> int:
""" get() replacement """
"""get() replacement"""
return int(self.get())
class OptionDir(Option):
""" Directory option class """
"""Directory option class"""
def __init__(
self,
@@ -206,7 +206,7 @@ class OptionDir(Option):
super().__init__(section, keyword, default_val, add=add)
def get(self) -> str:
""" Return value, corrected for platform """
"""Return value, corrected for platform"""
p = super().get()
if sabnzbd.WIN32:
return p.replace("/", "\\") if "/" in p else p
@@ -214,7 +214,7 @@ class OptionDir(Option):
return p.replace("\\", "/") if "\\" in p else p
def get_path(self) -> str:
""" Return full absolute path """
"""Return full absolute path"""
value = self.get()
path = ""
if value:
@@ -224,11 +224,11 @@ class OptionDir(Option):
return path
def get_clipped_path(self) -> str:
""" Return clipped full absolute path """
"""Return clipped full absolute path"""
return clip_path(self.get_path())
def test_path(self) -> bool:
""" Return True if path exists """
"""Return True if path exists"""
value = self.get()
if value:
return os.path.exists(real_path(self.__root, value))
@@ -236,7 +236,7 @@ class OptionDir(Option):
return False
def set_root(self, root: str):
""" Set new root, is assumed to be valid """
"""Set new root, is assumed to be valid"""
self.__root = root
def set(self, value: str, create: bool = False) -> Optional[str]:
@@ -260,16 +260,16 @@ class OptionDir(Option):
return error
def set_create(self, value: bool):
""" Set auto-creation value """
"""Set auto-creation value"""
self.__create = value
def __call__(self) -> str:
""" get() replacement """
"""get() replacement"""
return self.get()
class OptionList(Option):
""" List option class """
"""List option class"""
def __init__(
self,
@@ -286,7 +286,7 @@ class OptionList(Option):
super().__init__(section, keyword, default_val, add=add, protect=protect)
def set(self, value: Union[str, List]) -> Optional[str]:
""" Set the list given a comma-separated string or a list """
"""Set the list given a comma-separated string or a list"""
error = None
if value is not None:
if not isinstance(value, list):
@@ -301,20 +301,20 @@ class OptionList(Option):
return error
def get_string(self) -> str:
""" Return the list as a comma-separated string """
"""Return the list as a comma-separated string"""
return ", ".join(self.get())
def default_string(self) -> str:
""" Return the default list as a comma-separated string """
"""Return the default list as a comma-separated string"""
return ", ".join(self.default())
def __call__(self) -> List[str]:
""" get() replacement """
"""get() replacement"""
return self.get()
class OptionStr(Option):
""" String class."""
"""String class."""
def __init__(
self,
@@ -331,15 +331,15 @@ class OptionStr(Option):
super().__init__(section, keyword, default_val, add=add, protect=protect)
def get_float(self) -> float:
""" Return value converted to a float, allowing KMGT notation """
"""Return value converted to a float, allowing KMGT notation"""
return sabnzbd.misc.from_units(self.get())
def get_int(self) -> int:
""" Return value converted to an int, allowing KMGT notation """
"""Return value converted to an int, allowing KMGT notation"""
return int(self.get_float())
def set(self, value: Any) -> Optional[str]:
""" Set stripped value """
"""Set stripped value"""
error = None
if isinstance(value, str) and self.__strip:
value = value.strip()
@@ -351,46 +351,46 @@ class OptionStr(Option):
return error
def __call__(self) -> str:
""" get() replacement """
"""get() replacement"""
return self.get()
class OptionPassword(Option):
""" Password class. """
"""Password class."""
def __init__(self, section: str, keyword: str, default_val: str = "", add: bool = True):
self.get_string = self.get_stars
super().__init__(section, keyword, default_val, add=add)
def get(self) -> Optional[str]:
""" Return decoded password """
"""Return decoded password"""
return decode_password(super().get(), self.ident())
def get_stars(self) -> Optional[str]:
""" Return non-descript asterisk string """
"""Return non-descript asterisk string"""
if self.get():
return "*" * 10
return ""
def get_dict(self, safe: bool = False) -> Dict[str, str]:
""" Return value a dictionary """
"""Return value a dictionary"""
if safe:
return {self.ident()[1]: self.get_stars()}
else:
return {self.ident()[1]: self.get()}
def set(self, pw: str):
""" Set password, encode it """
"""Set password, encode it"""
if (pw is not None and pw == "") or (pw and pw.strip("*")):
super().set(encode_password(pw))
def __call__(self) -> str:
""" get() replacement """
"""get() replacement"""
return self.get()
class ConfigServer:
""" Class defining a single server """
"""Class defining a single server"""
def __init__(self, name, values):
@@ -422,7 +422,7 @@ class ConfigServer:
add_to_database("servers", self.__name, self)
def set_dict(self, values: Dict[str, Any]):
""" Set one or more fields, passed as dictionary """
"""Set one or more fields, passed as dictionary"""
# Replace usage_at_start value with most recent statistics if the user changes the quota value
# Only when we are updating it from the Config
if sabnzbd.WEBUI_READY and values.get("quota", "") != self.quota():
@@ -459,7 +459,7 @@ class ConfigServer:
self.displayname.set(self.__name)
def get_dict(self, safe: bool = False) -> Dict[str, Any]:
""" Return a dictionary with all attributes """
"""Return a dictionary with all attributes"""
output_dict = {}
output_dict["name"] = self.__name
output_dict["displayname"] = self.displayname()
@@ -487,11 +487,11 @@ class ConfigServer:
return output_dict
def delete(self):
""" Remove from database """
"""Remove from database"""
delete_from_database("servers", self.__name)
def rename(self, name: str):
""" Give server new display name """
"""Give server new display name"""
self.displayname.set(name)
def ident(self) -> Tuple[str, str]:
@@ -499,7 +499,7 @@ class ConfigServer:
class ConfigCat:
""" Class defining a single category """
"""Class defining a single category"""
def __init__(self, name: str, values: Dict[str, Any]):
self.__name = name
@@ -516,7 +516,7 @@ class ConfigCat:
add_to_database("categories", self.__name, self)
def set_dict(self, values: Dict[str, Any]):
""" Set one or more fields, passed as dictionary """
"""Set one or more fields, passed as dictionary"""
for kw in ("order", "pp", "script", "dir", "newzbin", "priority"):
try:
value = values[kw]
@@ -525,7 +525,7 @@ class ConfigCat:
continue
def get_dict(self, safe: bool = False) -> Dict[str, Any]:
""" Return a dictionary with all attributes """
"""Return a dictionary with all attributes"""
output_dict = {}
output_dict["name"] = self.__name
output_dict["order"] = self.order()
@@ -537,19 +537,19 @@ class ConfigCat:
return output_dict
def delete(self):
""" Remove from database """
"""Remove from database"""
delete_from_database("categories", self.__name)
class OptionFilters(Option):
""" Filter list class """
"""Filter list class"""
def __init__(self, section, keyword, add=True):
super().__init__(section, keyword, add=add)
self.set([])
def move(self, current: int, new: int):
""" Move filter from position 'current' to 'new' """
"""Move filter from position 'current' to 'new'"""
lst = self.get()
try:
item = lst.pop(current)
@@ -570,7 +570,7 @@ class OptionFilters(Option):
self.set(lst)
def delete(self, pos: int):
""" Remove filter 'pos' """
"""Remove filter 'pos'"""
lst = self.get()
try:
lst.pop(pos)
@@ -579,14 +579,14 @@ class OptionFilters(Option):
self.set(lst)
def get_dict(self, safe: bool = False) -> Dict[str, str]:
""" Return filter list as a dictionary with keys 'filter[0-9]+' """
"""Return filter list as a dictionary with keys 'filter[0-9]+'"""
output_dict = {}
for n, rss_filter in enumerate(self.get()):
output_dict[f"filter{n}"] = rss_filter
return output_dict
def set_dict(self, values: Dict[str, Any]):
""" Create filter list from dictionary with keys 'filter[0-9]+' """
"""Create filter list from dictionary with keys 'filter[0-9]+'"""
filters = []
# We don't know how many filters there are, so just assume all values are filters
for n in range(len(values)):
@@ -597,12 +597,12 @@ class OptionFilters(Option):
self.set(filters)
def __call__(self) -> List[List[str]]:
""" get() replacement """
"""get() replacement"""
return self.get()
class ConfigRSS:
""" Class defining a single Feed definition """
"""Class defining a single Feed definition"""
def __init__(self, name, values):
self.__name = name
@@ -621,7 +621,7 @@ class ConfigRSS:
add_to_database("rss", self.__name, self)
def set_dict(self, values: Dict[str, Any]):
""" Set one or more fields, passed as dictionary """
"""Set one or more fields, passed as dictionary"""
for kw in ("uri", "cat", "pp", "script", "priority", "enable"):
try:
value = values[kw]
@@ -631,7 +631,7 @@ class ConfigRSS:
self.filters.set_dict(values)
def get_dict(self, safe: bool = False) -> Dict[str, Any]:
""" Return a dictionary with all attributes """
"""Return a dictionary with all attributes"""
output_dict = {}
output_dict["name"] = self.__name
output_dict["uri"] = self.uri()
@@ -646,11 +646,11 @@ class ConfigRSS:
return output_dict
def delete(self):
""" Remove from database """
"""Remove from database"""
delete_from_database("rss", self.__name)
def rename(self, new_name: str):
""" Update the name and the saved entries """
"""Update the name and the saved entries"""
delete_from_database("rss", self.__name)
sabnzbd.RSSReader.rename(self.__name, new_name)
self.__name = new_name
@@ -662,7 +662,7 @@ class ConfigRSS:
@synchronized(CONFIG_LOCK)
def add_to_database(section, keyword, obj):
""" add object as section/keyword to INI database """
"""add object as section/keyword to INI database"""
global database
if section not in database:
database[section] = {}
@@ -671,7 +671,7 @@ def add_to_database(section, keyword, obj):
@synchronized(CONFIG_LOCK)
def delete_from_database(section, keyword):
""" Remove section/keyword from INI database """
"""Remove section/keyword from INI database"""
global database, CFG, modified
del database[section][keyword]
if section == "servers" and "[" in keyword:
@@ -725,7 +725,7 @@ def get_dconfig(section, keyword, nested=False):
def get_config(section, keyword):
""" Return a config object, based on 'section', 'keyword' """
"""Return a config object, based on 'section', 'keyword'"""
try:
return database[section][keyword]
except KeyError:
@@ -734,7 +734,7 @@ def get_config(section, keyword):
def set_config(kwargs):
""" Set a config item, using values in dictionary """
"""Set a config item, using values in dictionary"""
try:
item = database[kwargs.get("section")][kwargs.get("keyword")]
except KeyError:
@@ -744,7 +744,7 @@ def set_config(kwargs):
def delete(section: str, keyword: str):
""" Delete specific config item """
"""Delete specific config item"""
try:
database[section][keyword].delete()
except KeyError:
@@ -842,7 +842,7 @@ def _read_config(path, try_backup=False):
@synchronized(SAVE_CONFIG_LOCK)
def save_config(force=False):
""" Update Setup file with current option values """
"""Update Setup file with current option values"""
global CFG, database, modified
if not (modified or force):
@@ -1025,7 +1025,7 @@ class ErrorCatchingArgumentParser(argparse.ArgumentParser):
def encode_password(pw):
""" Encode password in hexadecimal if needed """
"""Encode password in hexadecimal if needed"""
enc = False
if pw:
encPW = __PW_PREFIX
@@ -1058,7 +1058,7 @@ def decode_password(pw, name):
def clean_nice_ionice_parameters(value):
""" Verify that the passed parameters are not exploits """
"""Verify that the passed parameters are not exploits"""
if value:
parser = ErrorCatchingArgumentParser()
@@ -1081,7 +1081,7 @@ def clean_nice_ionice_parameters(value):
def all_lowercase(value):
""" Lowercase everything! """
"""Lowercase everything!"""
if isinstance(value, list):
# If list, for each item
return None, [item.lower() for item in value]
@@ -1089,7 +1089,7 @@ def all_lowercase(value):
def validate_octal(value):
""" Check if string is valid octal number """
"""Check if string is valid octal number"""
if not value:
return None, value
try:
@@ -1100,7 +1100,7 @@ def validate_octal(value):
def validate_no_unc(root, value, default):
""" Check if path isn't a UNC path """
"""Check if path isn't a UNC path"""
# Only need to check the 'value' part
if value and not value.startswith(r"\\"):
return validate_notempty(root, value, default)
@@ -1117,7 +1117,7 @@ def validate_safedir(root, value, default):
def validate_notempty(root, value, default):
""" If value is empty, return default """
"""If value is empty, return default"""
if value:
return None, value
else:
@@ -1142,5 +1142,5 @@ def validate_single_tag(value: List[str]) -> Tuple[None, List[str]]:
def create_api_key():
""" Return a new randomized API_KEY """
"""Return a new randomized API_KEY"""
return uuid.uuid4().hex

View File

@@ -26,7 +26,7 @@ import logging
import sys
import threading
import sqlite3
from typing import Union, Dict
from typing import Union, Dict, Optional, List
import sabnzbd
import sabnzbd.cfg
@@ -41,7 +41,7 @@ DB_LOCK = threading.RLock()
def convert_search(search):
""" Convert classic wildcard to SQL wildcard """
"""Convert classic wildcard to SQL wildcard"""
if not search:
# Default value
search = ""
@@ -75,14 +75,14 @@ class HistoryDB:
@synchronized(DB_LOCK)
def __init__(self):
""" Determine databse path and create connection """
"""Determine databse path and create connection"""
self.con = self.c = None
if not HistoryDB.db_path:
HistoryDB.db_path = os.path.join(sabnzbd.cfg.admin_dir.get_path(), DB_HISTORY_NAME)
self.connect()
def connect(self):
""" Create a connection to the database """
"""Create a connection to the database"""
create_table = not os.path.exists(HistoryDB.db_path)
self.con = sqlite3.connect(HistoryDB.db_path)
self.con.row_factory = sqlite3.Row
@@ -117,7 +117,7 @@ class HistoryDB:
)
def execute(self, command, args=(), save=False):
""" Wrapper for executing SQL commands """
"""Wrapper for executing SQL commands"""
for tries in range(5, 0, -1):
try:
if args and isinstance(args, tuple):
@@ -161,7 +161,7 @@ class HistoryDB:
return False
def create_history_db(self):
""" Create a new (empty) database file """
"""Create a new (empty) database file"""
self.execute(
"""
CREATE TABLE "history" (
@@ -198,7 +198,7 @@ class HistoryDB:
self.execute("PRAGMA user_version = 2;")
def close(self):
""" Close database connection """
"""Close database connection"""
try:
self.c.close()
self.con.close()
@@ -207,7 +207,7 @@ class HistoryDB:
logging.info("Traceback: ", exc_info=True)
def remove_completed(self, search=None):
""" Remove all completed jobs from the database, optional with `search` pattern """
"""Remove all completed jobs from the database, optional with `search` pattern"""
search = convert_search(search)
logging.info("Removing all completed jobs from history")
return self.execute(
@@ -215,7 +215,7 @@ class HistoryDB:
)
def get_failed_paths(self, search=None):
""" Return list of all storage paths of failed jobs (may contain non-existing or empty paths) """
"""Return list of all storage paths of failed jobs (may contain non-existing or empty paths)"""
search = convert_search(search)
fetch_ok = self.execute(
"""SELECT path FROM history WHERE name LIKE ? AND status = ?""", (search, Status.FAILED)
@@ -226,7 +226,7 @@ class HistoryDB:
return []
def remove_failed(self, search=None):
""" Remove all failed jobs from the database, optional with `search` pattern """
"""Remove all failed jobs from the database, optional with `search` pattern"""
search = convert_search(search)
logging.info("Removing all failed jobs from history")
return self.execute(
@@ -234,7 +234,7 @@ class HistoryDB:
)
def remove_history(self, jobs=None):
""" Remove all jobs in the list `jobs`, empty list will remove all completed jobs """
"""Remove all jobs in the list `jobs`, empty list will remove all completed jobs"""
if jobs is None:
self.remove_completed()
else:
@@ -246,7 +246,7 @@ class HistoryDB:
logging.info("[%s] Removing job %s from history", caller_name(), job)
def auto_history_purge(self):
""" Remove history items based on the configured history-retention """
"""Remove history items based on the configured history-retention"""
if sabnzbd.cfg.history_retention() == "0":
return
@@ -279,7 +279,7 @@ class HistoryDB:
)
def add_history_db(self, nzo, storage="", postproc_time=0, script_output="", script_line=""):
""" Add a new job entry to the database """
"""Add a new job entry to the database"""
t = build_history_info(nzo, storage, postproc_time, script_output, script_line, series_info=True)
self.execute(
@@ -292,8 +292,16 @@ class HistoryDB:
)
logging.info("Added job %s to history", nzo.final_name)
def fetch_history(self, start=None, limit=None, search=None, failed_only=0, categories=None, nzo_ids=None):
""" Return records for specified jobs """
def fetch_history(
self,
start: Optional[int] = None,
limit: Optional[int] = None,
search: Optional[str] = None,
failed_only: int = 0,
categories: Optional[List[str]] = None,
nzo_ids: Optional[List[str]] = None,
):
"""Return records for specified jobs"""
command_args = [convert_search(search)]
post = ""
@@ -304,7 +312,6 @@ class HistoryDB:
post += ")"
command_args.extend(categories)
if nzo_ids:
nzo_ids = nzo_ids.split(",")
post += " AND (NZO_ID = ?"
post += " OR NZO_ID = ? " * (len(nzo_ids) - 1)
post += ")"
@@ -339,7 +346,7 @@ class HistoryDB:
return items, fetched_items, total_items
def have_episode(self, series, season, episode):
""" Check whether History contains this series episode """
"""Check whether History contains this series episode"""
total = 0
series = series.lower().replace(".", " ").replace("_", " ").replace(" ", " ")
if series and season and episode:
@@ -351,7 +358,7 @@ class HistoryDB:
return total > 0
def have_name_or_md5sum(self, name, md5sum):
""" Check whether this name or md5sum is already in History """
"""Check whether this name or md5sum is already in History"""
total = 0
if self.execute(
"""SELECT COUNT(*) FROM History WHERE ( LOWER(name) = LOWER(?) OR md5sum = ? ) AND STATUS != ?""",
@@ -386,7 +393,7 @@ class HistoryDB:
return total, month, week
def get_script_log(self, nzo_id):
""" Return decompressed log file """
"""Return decompressed log file"""
data = ""
t = (nzo_id,)
if self.execute("""SELECT script_log FROM history WHERE nzo_id = ?""", t):
@@ -397,7 +404,7 @@ class HistoryDB:
return data
def get_name(self, nzo_id):
""" Return name of the job `nzo_id` """
"""Return name of the job `nzo_id`"""
t = (nzo_id,)
name = ""
if self.execute("""SELECT name FROM history WHERE nzo_id = ?""", t):
@@ -409,7 +416,7 @@ class HistoryDB:
return name
def get_path(self, nzo_id: str):
""" Return the `incomplete` path of the job `nzo_id` if it is still there """
"""Return the `incomplete` path of the job `nzo_id` if it is still there"""
t = (nzo_id,)
path = ""
if self.execute("""SELECT path FROM history WHERE nzo_id = ?""", t):
@@ -423,7 +430,7 @@ class HistoryDB:
return None
def get_other(self, nzo_id):
""" Return additional data for job `nzo_id` """
"""Return additional data for job `nzo_id`"""
t = (nzo_id,)
if self.execute("""SELECT * FROM history WHERE nzo_id = ?""", t):
try:
@@ -435,11 +442,11 @@ class HistoryDB:
return "", "", "", "", ""
def __enter__(self):
""" For context manager support """
"""For context manager support"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
""" For context manager support, ignore any exception """
"""For context manager support, ignore any exception"""
self.close()
@@ -447,7 +454,7 @@ _PP_LOOKUP = {0: "", 1: "R", 2: "U", 3: "D"}
def build_history_info(nzo, workdir_complete="", postproc_time=0, script_output="", script_line="", series_info=False):
""" Collects all the information needed for the database """
"""Collects all the information needed for the database"""
completed = int(time.time())
pp = _PP_LOOKUP.get(opts_to_pp(*nzo.repair_opts), "X")

View File

@@ -59,7 +59,7 @@ class BadYenc(Exception):
class Decoder:
""" Implement thread-like coordinator for the decoders """
"""Implement thread-like coordinator for the decoders"""
def __init__(self):
logging.debug("Initializing decoders")
@@ -106,7 +106,7 @@ class Decoder:
class DecoderWorker(Thread):
""" The actuall workhorse that handles decoding! """
"""The actuall workhorse that handles decoding!"""
def __init__(self, decoder_queue):
super().__init__()
@@ -246,7 +246,7 @@ def decode(article: Article, raw_data: List[bytes]) -> bytes:
def search_new_server(article: Article) -> bool:
""" Shorthand for searching new server or else increasing bad_articles """
"""Shorthand for searching new server or else increasing bad_articles"""
# Continue to the next one if we found new server
if not article.search_new_server():
# Increase bad articles if no new server was found

View File

@@ -42,7 +42,7 @@ MIN_FILE_SIZE = 10 * 1024 * 1024
def decode_par2(parfile):
""" Parse a par2 file and rename files listed in the par2 to their real name """
"""Parse a par2 file and rename files listed in the par2 to their real name"""
# Check if really a par2 file
if not is_parfile(parfile):
logging.info("Par2 file %s was not really a par2 file")
@@ -132,7 +132,7 @@ def is_probably_obfuscated(myinputfilename):
def deobfuscate_list(filelist, usefulname):
""" Check all files in filelist, and if wanted, deobfuscate: rename to filename based on usefulname"""
"""Check all files in filelist, and if wanted, deobfuscate: rename to filename based on usefulname"""
# to be sure, only keep really exsiting files:
filelist = [f for f in filelist if os.path.exists(f)]
@@ -142,17 +142,17 @@ def deobfuscate_list(filelist, usefulname):
# Found any par2 files we can use?
run_renamer = True
if not par2_files:
logging.debug("No par2 files found to process, running renamer.")
logging.debug("No par2 files found to process, running renamer")
else:
# Run par2 from SABnzbd on them
for par2_file in par2_files:
# Analyse data and analyse result
logging.debug("Deobfuscate par2: handling %s", par2_file)
if decode_par2(par2_file):
logging.debug("Deobfuscate par2 repair/verify finished.")
logging.debug("Deobfuscate par2 repair/verify finished")
run_renamer = False
else:
logging.debug("Deobfuscate par2 repair/verify did not find anything to rename.")
logging.debug("Deobfuscate par2 repair/verify did not find anything to rename")
# No par2 files? Then we try to rename qualifying (big, not-excluded, obfuscated) files to the job-name
if run_renamer:
@@ -163,7 +163,7 @@ def deobfuscate_list(filelist, usefulname):
if os.path.getsize(file) < MIN_FILE_SIZE:
# too small to care
continue
_, ext = os.path.splitext(file)
ext = get_ext(file)
if ext in extcounter:
extcounter[ext] += 1
else:
@@ -208,5 +208,7 @@ def deobfuscate_list(filelist, usefulname):
logging.info("Deobfuscate renaming %s to %s", otherfile, new_name)
# Rename and make sure the new filename is unique
renamer(otherfile, new_name)
else:
logging.debug("%s excluded from deobfuscation based on size, extension or non-obfuscation", filename)
else:
logging.info("No qualifying files found to deobfuscate")

View File

@@ -106,7 +106,7 @@ class DirectUnpacker(threading.Thread):
return True
def set_volumes_for_nzo(self):
""" Loop over all files to detect the names """
"""Loop over all files to detect the names"""
none_counter = 0
found_counter = 0
for nzf in self.nzo.files + self.nzo.finished_files:
@@ -126,7 +126,7 @@ class DirectUnpacker(threading.Thread):
@synchronized(START_STOP_LOCK)
def add(self, nzf: NzbFile):
""" Add jobs and start instance of DirectUnpack """
"""Add jobs and start instance of DirectUnpack"""
if not cfg.direct_unpack_tested():
test_disk_performance()
@@ -350,7 +350,7 @@ class DirectUnpacker(threading.Thread):
@synchronized(START_STOP_LOCK)
def create_unrar_instance(self):
""" Start the unrar instance using the user's options """
"""Start the unrar instance using the user's options"""
# Generate extraction path and save for post-proc
if not self.unpack_dir_info:
try:
@@ -432,7 +432,7 @@ class DirectUnpacker(threading.Thread):
@synchronized(START_STOP_LOCK)
def abort(self):
""" Abort running instance and delete generated files """
"""Abort running instance and delete generated files"""
if not self.killed and self.cur_setname:
logging.info("Aborting DirectUnpack for %s", self.cur_setname)
self.killed = True
@@ -494,7 +494,7 @@ class DirectUnpacker(threading.Thread):
self.reset_active()
def get_formatted_stats(self):
""" Get percentage or number of rar's done """
"""Get percentage or number of rar's done"""
if self.cur_setname and self.cur_setname in self.total_volumes:
# This won't work on obfuscated posts
if self.total_volumes[self.cur_setname] >= self.cur_volume and self.cur_volume:
@@ -520,7 +520,7 @@ def analyze_rar_filename(filename):
def abort_all():
""" Abort all running DirectUnpackers """
"""Abort all running DirectUnpackers"""
logging.info("Aborting all DirectUnpackers")
for direct_unpacker in ACTIVE_UNPACKERS:
direct_unpacker.abort()

View File

@@ -32,7 +32,7 @@ import sabnzbd.cfg as cfg
def compare_stat_tuple(tup1, tup2):
""" Test equality of two stat-tuples, content-related parts only """
"""Test equality of two stat-tuples, content-related parts only"""
if tup1.st_ino != tup2.st_ino:
return False
if tup1.st_size != tup2.st_size:
@@ -45,7 +45,7 @@ def compare_stat_tuple(tup1, tup2):
def clean_file_list(inp_list, folder, files):
""" Remove elements of "inp_list" not found in "files" """
"""Remove elements of "inp_list" not found in "files" """
for path in sorted(inp_list):
fld, name = os.path.split(path)
if fld == folder:
@@ -89,31 +89,31 @@ class DirScanner(threading.Thread):
cfg.dirscan_speed.callback(self.newspeed)
def newdir(self):
""" We're notified of a dir change """
"""We're notified of a dir change"""
self.ignored = {}
self.suspected = {}
self.dirscan_dir = cfg.dirscan_dir.get_path()
self.dirscan_speed = cfg.dirscan_speed()
def newspeed(self):
""" We're notified of a scan speed change """
"""We're notified of a scan speed change"""
# If set to 0, use None so the wait() is forever
self.dirscan_speed = cfg.dirscan_speed() or None
with self.loop_condition:
self.loop_condition.notify()
def stop(self):
""" Stop the dir scanner """
"""Stop the dir scanner"""
self.shutdown = True
with self.loop_condition:
self.loop_condition.notify()
def save(self):
""" Save dir scanner bookkeeping """
"""Save dir scanner bookkeeping"""
sabnzbd.save_admin((self.dirscan_dir, self.ignored, self.suspected), SCAN_FILE_NAME)
def run(self):
""" Start the scanner """
"""Start the scanner"""
logging.info("Dirscanner starting up")
self.shutdown = False
@@ -125,7 +125,7 @@ class DirScanner(threading.Thread):
self.scan()
def scan(self):
""" Do one scan of the watched folder """
"""Do one scan of the watched folder"""
def run_dir(folder, catdir):
try:

View File

@@ -50,11 +50,50 @@ _PENALTY_PERM = 10 # Permanent error, like bad username/password
_PENALTY_SHORT = 1 # Minimal penalty when no_penalties is set
_PENALTY_VERYSHORT = 0.1 # Error 400 without cause clues
# Wait this many seconds between checking idle servers for new articles or busy threads for timeout
_SERVER_CHECK_DELAY = 0.5
# Wait this many seconds between updates of the BPSMeter
_BPSMETER_UPDATE_DELAY = 0.05
TIMER_LOCK = RLock()
class Server:
# Pre-define attributes to save memory and improve get/set performance
__slots__ = (
"id",
"newid",
"restart",
"displayname",
"host",
"port",
"timeout",
"threads",
"priority",
"ssl",
"ssl_verify",
"ssl_ciphers",
"optional",
"retention",
"send_group",
"username",
"password",
"busy_threads",
"next_busy_threads_check",
"idle_threads",
"next_article_search",
"active",
"bad_cons",
"errormsg",
"warning",
"info",
"ssl_info",
"request",
"have_body",
"have_stat",
"article_queue",
)
def __init__(
self,
server_id,
@@ -94,6 +133,7 @@ class Server:
self.password: Optional[str] = password
self.busy_threads: List[NewsWrapper] = []
self.next_busy_threads_check: float = 0
self.idle_threads: List[NewsWrapper] = []
self.next_article_search: float = 0
self.active: bool = True
@@ -105,10 +145,15 @@ class Server:
self.request: bool = False # True if a getaddrinfo() request is pending
self.have_body: bool = True # Assume server has "BODY", until proven otherwise
self.have_stat: bool = True # Assume server has "STAT", until proven otherwise
self.article_queue: List[sabnzbd.nzbstuff.Article] = []
# Initialize threads
for i in range(threads):
self.idle_threads.append(NewsWrapper(self, i + 1))
# Tell the BPSMeter about this server
sabnzbd.BPSMeter.init_server_stats(self.id)
@property
def hostip(self) -> str:
"""In case a server still has active connections, we use the same IP again
@@ -146,6 +191,11 @@ class Server:
logging.debug("%s: No successful IP connection was possible", self.host)
return ip
def deactivate(self):
"""Deactive server and reset queued articles"""
self.active = False
self.reset_article_queue()
def stop(self):
"""Remove all connections from server"""
for nw in self.idle_threads:
@@ -162,8 +212,14 @@ class Server:
self.request = True
Thread(target=self._request_info_internal).start()
def reset_article_queue(self):
logging.debug("Resetting article queue for %s", self)
for article in self.article_queue:
sabnzbd.NzbQueue.reset_try_lists(article, remove_fetcher_from_trylist=False)
self.article_queue = []
def _request_info_internal(self):
""" Async attempt to run getaddrinfo() for specified server """
"""Async attempt to run getaddrinfo() for specified server"""
logging.debug("Retrieving server address information for %s", self.host)
self.info = get_server_addrinfo(self.host, self.port)
if not self.info:
@@ -178,7 +234,25 @@ class Server:
class Downloader(Thread):
""" Singleton Downloader Thread """
"""Singleton Downloader Thread"""
# Improves get/set performance, even though it's inherited from Thread
# Due to the huge number of get-calls in run(), it can actually make a difference
__slots__ = (
"paused",
"bandwidth_limit",
"bandwidth_perc",
"can_be_slowed",
"can_be_slowed_timer",
"sleep_time",
"paused_for_postproc",
"shutdown",
"server_restarts",
"force_disconnect",
"read_fds",
"servers",
"timers",
)
def __init__(self, paused=False):
super().__init__()
@@ -214,8 +288,6 @@ class Downloader(Thread):
self.read_fds: Dict[int, NewsWrapper] = {}
self.servers: List[Server] = []
self.server_dict: Dict[str, Server] = {} # For faster lookups, but is not updated later!
self.server_nr: int = 0
self.timers: Dict[str, List[float]] = {}
for server in config.get_servers():
@@ -256,45 +328,46 @@ class Downloader(Thread):
create = False
server.newid = newserver
server.restart = True
server.reset_article_queue()
self.server_restarts += 1
break
if create and enabled and host and port and threads:
server = Server(
newserver,
displayname,
host,
port,
timeout,
threads,
priority,
ssl,
ssl_verify,
ssl_ciphers,
send_group,
username,
password,
optional,
retention,
self.servers.append(
Server(
newserver,
displayname,
host,
port,
timeout,
threads,
priority,
ssl,
ssl_verify,
ssl_ciphers,
send_group,
username,
password,
optional,
retention,
)
)
self.servers.append(server)
self.server_dict[newserver] = server
# Update server-count
self.server_nr = len(self.servers)
# Sort the servers for performance
self.servers.sort(key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
def add_socket(self, fileno: int, nw: NewsWrapper):
""" Add a socket ready to be used to the list to be watched """
"""Add a socket ready to be used to the list to be watched"""
self.read_fds[fileno] = nw
def remove_socket(self, nw: NewsWrapper):
""" Remove a socket to be watched """
"""Remove a socket to be watched"""
if nw.nntp:
self.read_fds.pop(nw.nntp.fileno, None)
@NzbQueueLocker
def set_paused_state(self, state: bool):
""" Set downloader to specified paused state """
"""Set downloader to specified paused state"""
self.paused = state
@NzbQueueLocker
@@ -307,7 +380,7 @@ class Downloader(Thread):
@NzbQueueLocker
def pause(self):
""" Pause the downloader, optionally saving admin """
"""Pause the downloader, optionally saving admin"""
if not self.paused:
self.paused = True
logging.info("Pausing")
@@ -407,22 +480,21 @@ class Downloader(Thread):
# Not fully the same as the code below for optional servers
server.bad_cons = 0
server.active = False
server.deactivate()
self.plan_server(server, _PENALTY_TIMEOUT)
# Optional and active server had too many problems.
# Disable it now and send a re-enable plan to the scheduler
if server.optional and server.active and (server.bad_cons / server.threads) > 3:
# Deactivate server
server.bad_cons = 0
server.active = False
server.deactivate()
logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT)
self.plan_server(server, _PENALTY_TIMEOUT)
# Remove all connections to server
for nw in server.idle_threads + server.busy_threads:
self.__reset_nw(
nw, "forcing disconnect", warn=False, wait=False, count_article_try=False, send_quit=False
)
self.__reset_nw(nw, "forcing disconnect", warn=False, wait=False, retry_article=False, send_quit=False)
# Make sure server address resolution is refreshed
server.info = None
@@ -438,6 +510,7 @@ class Downloader(Thread):
if not raw_data:
if not article.search_new_server():
sabnzbd.NzbQueue.register_article(article, success=False)
article.nzf.nzo.increase_bad_articles_counter("missing_articles")
return
# Send to decoder-queue
@@ -466,7 +539,9 @@ class Downloader(Thread):
logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION)
# Kick BPS-Meter to check quota
sabnzbd.BPSMeter.update()
BPSMeter = sabnzbd.BPSMeter
BPSMeter.update()
next_bpsmeter_update = 0
# Check server expiration dates
check_server_expiration()
@@ -483,15 +558,17 @@ class Downloader(Thread):
if not server.busy_threads and server.next_article_search > now:
continue
for nw in server.busy_threads[:]:
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
if nw.nntp and nw.nntp.error_msg:
# Already showed error
self.__reset_nw(nw)
else:
self.__reset_nw(nw, "timed out", warn=True)
server.bad_cons += 1
self.maybe_block_server(server)
if server.next_busy_threads_check < now:
server.next_busy_threads_check = now + _SERVER_CHECK_DELAY
for nw in server.busy_threads[:]:
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
if nw.nntp and nw.nntp.error_msg:
# Already showed error
self.__reset_nw(nw)
else:
self.__reset_nw(nw, "timed out", warn=True)
server.bad_cons += 1
self.maybe_block_server(server)
if server.restart:
if not server.busy_threads:
@@ -509,7 +586,6 @@ class Downloader(Thread):
if (
not server.idle_threads
or server.restart
or self.is_paused()
or self.shutdown
or self.paused_for_postproc
@@ -531,20 +607,28 @@ class Downloader(Thread):
server.request_info()
break
article = sabnzbd.NzbQueue.get_article(server, self.servers)
if not article:
# Skip this server for 0.5 second
server.next_article_search = now + 0.5
break
if server.retention and article.nzf.nzo.avg_stamp < now - server.retention:
# Let's get rid of all the articles for this server at once
logging.info("Job %s too old for %s, moving on", article.nzf.nzo.final_name, server.host)
while article:
self.decode(article, None)
article = article.nzf.nzo.get_article(server, self.servers)
break
# Get article from pre-fetched ones or fetch new ones
if server.article_queue:
article = server.article_queue.pop(0)
else:
# Pre-fetch new articles
server.article_queue = sabnzbd.NzbQueue.get_articles(
server, self.servers, max(1, server.threads // 4)
)
if server.article_queue:
article = server.article_queue.pop(0)
# Mark expired articles as tried on this server
if server.retention and article.nzf.nzo.avg_stamp < now - server.retention:
self.decode(article, None)
while server.article_queue:
self.decode(server.article_queue.pop(), None)
# Move to the next server, allowing the next server to already start
# fetching the articles that were too old for this server
break
else:
# Skip this server for a short time
server.next_article_search = now + _SERVER_CHECK_DELAY
break
server.idle_threads.remove(nw)
server.busy_threads.append(nw)
@@ -572,18 +656,15 @@ class Downloader(Thread):
# Send goodbye if we have open socket
if nw.nntp:
self.__reset_nw(
nw,
"forcing disconnect",
wait=False,
count_article_try=False,
send_quit=True,
nw, "forcing disconnect", wait=False, count_article_try=False, send_quit=True
)
# Make sure server address resolution is refreshed
server.info = None
server.reset_article_queue()
self.force_disconnect = False
# Make sure we update the stats
sabnzbd.BPSMeter.update()
BPSMeter.update()
# Exit-point
if self.shutdown:
@@ -602,20 +683,20 @@ class Downloader(Thread):
# Need to initialize the check during first 20 seconds
if self.can_be_slowed is None or self.can_be_slowed_timer:
# Wait for stable speed to start testing
if not self.can_be_slowed_timer and sabnzbd.BPSMeter.get_stable_speed(timespan=10):
if not self.can_be_slowed_timer and BPSMeter.get_stable_speed(timespan=10):
self.can_be_slowed_timer = time.time()
# Check 10 seconds after enabling slowdown
if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10:
# Now let's check if it was stable in the last 10 seconds
self.can_be_slowed = sabnzbd.BPSMeter.get_stable_speed(timespan=10)
self.can_be_slowed = BPSMeter.get_stable_speed(timespan=10)
self.can_be_slowed_timer = 0
logging.debug("Downloader-slowdown: %r", self.can_be_slowed)
else:
read = []
sabnzbd.BPSMeter.reset()
BPSMeter.reset()
time.sleep(1.0)
@@ -628,8 +709,11 @@ class Downloader(Thread):
):
DOWNLOADER_CV.wait()
if now > next_bpsmeter_update:
BPSMeter.update()
next_bpsmeter_update = now + _BPSMETER_UPDATE_DELAY
if not read:
sabnzbd.BPSMeter.update(force_full_update=False)
continue
for selected in read:
@@ -643,7 +727,6 @@ class Downloader(Thread):
bytes_received, done, skip = (0, False, False)
if skip:
sabnzbd.BPSMeter.update(force_full_update=False)
continue
if bytes_received < 1:
@@ -652,22 +735,22 @@ class Downloader(Thread):
else:
try:
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, bytes_received)
article.nzf.nzo.update_download_stats(BPSMeter.bps, server.id, bytes_received)
except AttributeError:
# In case nzf has disappeared because the file was deleted before the update could happen
pass
sabnzbd.BPSMeter.update(server.id, bytes_received, force_full_update=False)
if self.bandwidth_limit:
if sabnzbd.BPSMeter.sum_cached_amount + sabnzbd.BPSMeter.bps > self.bandwidth_limit:
sabnzbd.BPSMeter.update()
while sabnzbd.BPSMeter.bps > self.bandwidth_limit:
time.sleep(0.01)
sabnzbd.BPSMeter.update()
BPSMeter.update(server.id, bytes_received)
if not done and nw.status_code != 222:
if self.bandwidth_limit:
if BPSMeter.bps + BPSMeter.sum_cached_amount > self.bandwidth_limit:
BPSMeter.update()
while BPSMeter.bps > self.bandwidth_limit:
time.sleep(0.01)
BPSMeter.update()
if nw.status_code != 222 and not done:
if not nw.connected or nw.status_code == 480:
done = False
try:
nw.finish_connect(nw.status_code)
if sabnzbd.LOG_ALL:
@@ -692,7 +775,7 @@ class Downloader(Thread):
server.errormsg = errormsg
logging.warning(T("Too many connections to server %s"), server.host)
# Don't count this for the tries (max_art_tries) on this server
self.__reset_nw(nw, count_article_try=False, send_quit=True)
self.__reset_nw(nw, send_quit=True)
self.plan_server(server, _PENALTY_TOOMANY)
server.threads -= 1
elif ecode in (502, 481, 482) and clues_too_many_ip(msg):
@@ -743,11 +826,11 @@ class Downloader(Thread):
block = True
if block or (penalty and server.optional):
if server.active:
server.active = False
server.deactivate()
if penalty and (block or server.optional):
self.plan_server(server, penalty)
# Note that this will count towards the tries (max_art_tries) on this server!
self.__reset_nw(nw, send_quit=True)
# Note that the article is discard for this server
self.__reset_nw(nw, retry_article=False, send_quit=True)
continue
except:
logging.error(
@@ -757,7 +840,7 @@ class Downloader(Thread):
nntp_to_msg(nw.data),
)
# No reset-warning needed, above logging is sufficient
self.__reset_nw(nw)
self.__reset_nw(nw, retry_article=False)
if nw.connected:
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
@@ -768,7 +851,6 @@ class Downloader(Thread):
logging.debug("Article <%s> is present", article.article)
elif nw.status_code == 211:
done = False
logging.debug("group command ok -> %s", nntp_to_msg(nw.data))
nw.group = nw.article.nzf.nzo.group
nw.clear_data()
@@ -818,6 +900,7 @@ class Downloader(Thread):
warn: bool = False,
wait: bool = True,
count_article_try: bool = True,
retry_article: bool = True,
send_quit: bool = False,
):
# Some warnings are errors, and not added as server.warning
@@ -838,16 +921,23 @@ class Downloader(Thread):
if nw.article:
# Only some errors should count towards the total tries for each server
if (
count_article_try
and nw.article.tries > cfg.max_art_tries()
and (nw.article.fetcher.optional or not cfg.max_art_opt())
):
if count_article_try:
nw.article.tries += 1
# Do we discard, or try again for this server
if not retry_article or nw.article.tries > cfg.max_art_tries():
# Too many tries on this server, consider article missing
self.decode(nw.article, None)
nw.article.tries = 0
else:
# Allow all servers to iterate over this nzo/nzf again
sabnzbd.NzbQueue.reset_try_lists(nw.article)
# Retry again with the same server
logging.debug(
"Re-adding article %s from %s to server %s",
nw.article.article,
nw.article.nzf.filename,
nw.article.fetcher,
)
nw.article.fetcher.article_queue.append(nw.article)
# Reset connection object
nw.hard_reset(wait, send_quit=send_quit)
@@ -886,7 +976,7 @@ class Downloader(Thread):
@synchronized(TIMER_LOCK)
def plan_server(self, server: Server, interval: int):
""" Plan the restart of a server in 'interval' minutes """
"""Plan the restart of a server in 'interval' minutes"""
if cfg.no_penalties() and interval > _PENALTY_SHORT:
# Overwrite in case of no_penalties
interval = _PENALTY_SHORT
@@ -901,7 +991,7 @@ class Downloader(Thread):
@synchronized(TIMER_LOCK)
def trigger_server(self, server_id: str, timestamp: float):
""" Called by scheduler, start server if timer still valid """
"""Called by scheduler, start server if timer still valid"""
logging.debug("Trigger planned server resume for server-id %s", server_id)
if server_id in self.timers:
if timestamp in self.timers[server_id]:
@@ -930,7 +1020,7 @@ class Downloader(Thread):
@NzbQueueLocker
@synchronized(TIMER_LOCK)
def check_timers(self):
""" Make sure every server without a non-expired timer is active """
"""Make sure every server without a non-expired timer is active"""
# Clean expired timers
now = time.time()
kicked = []
@@ -956,18 +1046,18 @@ class Downloader(Thread):
@NzbQueueLocker
def wakeup(self):
""" Just rattle the semaphore """
"""Just rattle the semaphore"""
pass
@NzbQueueLocker
def stop(self):
""" Shutdown, wrapped so the semaphore is notified """
"""Shutdown, wrapped so the semaphore is notified"""
self.shutdown = True
sabnzbd.notifier.send_notification("SABnzbd", T("Shutting down"), "startup")
def clues_login(text: str) -> bool:
""" Check for any "failed login" clues in the response code """
"""Check for any "failed login" clues in the response code"""
text = text.lower()
for clue in ("username", "password", "invalid", "authen", "access denied"):
if clue in text:
@@ -976,7 +1066,7 @@ def clues_login(text: str) -> bool:
def clues_too_many(text: str) -> bool:
""" Check for any "too many connections" clues in the response code """
"""Check for any "too many connections" clues in the response code"""
text = text.lower()
for clue in ("exceed", "connections", "too many", "threads", "limit"):
# Not 'download limit exceeded' error
@@ -986,7 +1076,7 @@ def clues_too_many(text: str) -> bool:
def clues_too_many_ip(text: str) -> bool:
""" Check for any "account sharing" clues in the response code """
"""Check for any "account sharing" clues in the response code"""
text = text.lower()
for clue in ("simultaneous ip", "multiple ip"):
if clue in text:
@@ -995,7 +1085,7 @@ def clues_too_many_ip(text: str) -> bool:
def clues_pay(text: str) -> bool:
""" Check for messages about payments """
"""Check for messages about payments"""
text = text.lower()
for clue in ("credits", "paym", "expired", "exceeded"):
if clue in text:

View File

@@ -44,14 +44,14 @@ def errormsg(msg):
def get_email_date():
""" Return un-localized date string for the Date: field """
"""Return un-localized date string for the Date: field"""
# Get locale independent date/time string: "Sun May 22 20:15:12 2011"
day, month, dayno, hms, year = time.asctime(time.gmtime()).split()
return "%s, %s %s %s %s +0000" % (day, dayno, month, year, hms)
def send_email(message, email_to, test=None):
""" Send message if message non-empty and email-parms are set """
"""Send message if message non-empty and email-parms are set"""
# we should not use CFG if we are testing. we should use values
# from UI instead.
# email_to is replaced at send_with_template, since it can be an array
@@ -153,7 +153,7 @@ def send_email(message, email_to, test=None):
def send_with_template(prefix, parm, test=None):
""" Send an email using template """
"""Send an email using template"""
parm["from"] = cfg.email_from()
parm["date"] = get_email_date()
@@ -203,7 +203,7 @@ def send_with_template(prefix, parm, test=None):
def endjob(
filename, cat, status, path, bytes_downloaded, fail_msg, stages, script, script_output, script_ret, test=None
):
""" Send end-of-job email """
"""Send end-of-job email"""
# Is it allowed?
if not check_cat("misc", cat, keyword="email") and not test:
return None
@@ -241,19 +241,19 @@ def endjob(
def rss_mail(feed, jobs):
""" Send notification email containing list of files """
"""Send notification email containing list of files"""
parm = {"amount": len(jobs), "feed": feed, "jobs": jobs}
return send_with_template("rss", parm)
def badfetch_mail(msg, url):
""" Send notification email about failed NZB fetch """
"""Send notification email about failed NZB fetch"""
parm = {"url": url, "msg": msg}
return send_with_template("badfetch", parm)
def diskfull_mail():
""" Send email about disk full, no templates """
"""Send email about disk full, no templates"""
if cfg.email_full():
return send_email(
T(
@@ -277,7 +277,7 @@ Please make room and resume SABnzbd manually.
def _prepare_message(txt):
""" Parse the headers in the template to real headers """
"""Parse the headers in the template to real headers"""
msg = EmailMessage()
payload = []
body = False

View File

@@ -28,14 +28,14 @@ CODEPAGE = locale.getpreferredencoding()
def utob(str_in: AnyStr) -> bytes:
""" Shorthand for converting UTF-8 string to bytes """
"""Shorthand for converting UTF-8 string to bytes"""
if isinstance(str_in, bytes):
return str_in
return str_in.encode("utf-8")
def ubtou(str_in: AnyStr) -> str:
""" Shorthand for converting unicode bytes to UTF-8 string """
"""Shorthand for converting unicode bytes to UTF-8 string"""
if not isinstance(str_in, bytes):
return str_in
return str_in.decode("utf-8")
@@ -78,5 +78,5 @@ def correct_unknown_encoding(str_or_bytes_in: AnyStr) -> str:
def xml_name(p):
""" Prepare name for use in HTML/XML contect """
"""Prepare name for use in HTML/XML contect"""
return escape(str(p))

View File

@@ -52,7 +52,7 @@ else:
def get_ext(filename: str) -> str:
""" Return lowercased file extension """
"""Return lowercased file extension"""
try:
return os.path.splitext(filename)[1].lower()
except:
@@ -60,7 +60,7 @@ def get_ext(filename: str) -> str:
def has_unwanted_extension(filename: str) -> bool:
""" Determine if a filename has an unwanted extension, given the configured mode """
"""Determine if a filename has an unwanted extension, given the configured mode"""
extension = get_ext(filename).replace(".", "")
if extension and sabnzbd.cfg.unwanted_extensions():
return (
@@ -73,11 +73,14 @@ def has_unwanted_extension(filename: str) -> bool:
and extension not in sabnzbd.cfg.unwanted_extensions()
)
else:
return bool(sabnzbd.cfg.unwanted_extensions_mode())
# Don't consider missing extensions unwanted to prevent indiscriminate blocking of
# obfuscated jobs in whitelist mode. If there is an extension but nothing listed as
# (un)wanted, the result only depends on the configured mode.
return bool(extension and sabnzbd.cfg.unwanted_extensions_mode())
def get_filename(path: str) -> str:
""" Return path without the file extension """
"""Return path without the file extension"""
try:
return os.path.split(path)[1]
except:
@@ -85,12 +88,12 @@ def get_filename(path: str) -> str:
def setname_from_path(path: str) -> str:
""" Get the setname from a path """
"""Get the setname from a path"""
return os.path.splitext(os.path.basename(path))[0]
def is_writable(path: str) -> bool:
""" Return True is file is writable (also when non-existent) """
"""Return True is file is writable (also when non-existent)"""
if os.path.isfile(path):
return bool(os.stat(path).st_mode & stat.S_IWUSR)
else:
@@ -267,7 +270,7 @@ def sanitize_foldername(name: str) -> str:
def sanitize_and_trim_path(path: str) -> str:
""" Remove illegal characters and trim element size """
"""Remove illegal characters and trim element size"""
path = path.strip()
new_path = ""
if sabnzbd.WIN32:
@@ -292,21 +295,20 @@ def sanitize_and_trim_path(path: str) -> str:
return os.path.abspath(os.path.normpath(new_path))
def sanitize_files_in_folder(folder):
"""Sanitize each file in the folder, return list of new names"""
lst = []
for root, _, files in os.walk(folder):
for file_ in files:
path = os.path.join(root, file_)
new_path = os.path.join(root, sanitize_filename(file_))
if path != new_path:
try:
os.rename(path, new_path)
path = new_path
except:
logging.debug("Cannot rename %s to %s", path, new_path)
lst.append(path)
return lst
def sanitize_files(folder: Optional[str] = None, filelist: Optional[List[str]] = None) -> List[str]:
"""Sanitize each file in the folder or list of filepaths, return list of new names"""
logging.info("Checking if any resulting filenames need to be sanitized")
if folder:
filelist = listdir_full(folder)
else:
filelist = filelist or []
# Loop over all the files
output_filelist = []
for old_path in filelist:
# Will skip files if there's nothing to sanitize
output_filelist.append(renamer(old_path, old_path))
return output_filelist
def real_path(loc: str, path: str) -> str:
@@ -470,7 +472,7 @@ def safe_fnmatch(f: str, pattern: str) -> bool:
def globber(path: str, pattern: str = "*") -> List[str]:
""" Return matching base file/folder names in folder `path` """
"""Return matching base file/folder names in folder `path`"""
# Cannot use glob.glob() because it doesn't support Windows long name notation
if os.path.exists(path):
return [f for f in os.listdir(path) if safe_fnmatch(f, pattern)]
@@ -478,7 +480,7 @@ def globber(path: str, pattern: str = "*") -> List[str]:
def globber_full(path: str, pattern: str = "*") -> List[str]:
""" Return matching full file/folder names in folder `path` """
"""Return matching full file/folder names in folder `path`"""
# Cannot use glob.glob() because it doesn't support Windows long name notation
if os.path.exists(path):
return [os.path.join(path, f) for f in os.listdir(path) if safe_fnmatch(f, pattern)]
@@ -502,12 +504,12 @@ def fix_unix_encoding(folder: str):
def is_valid_script(basename: str) -> bool:
""" Determine if 'basename' is a valid script """
"""Determine if 'basename' is a valid script"""
return basename in list_scripts(default=False, none=False)
def list_scripts(default: bool = False, none: bool = True) -> List[str]:
""" Return a list of script names, optionally with 'Default' added """
"""Return a list of script names, optionally with 'Default' added"""
lst = []
path = sabnzbd.cfg.script_dir.get_path()
if path and os.access(path, os.R_OK):
@@ -533,7 +535,7 @@ def list_scripts(default: bool = False, none: bool = True) -> List[str]:
def make_script_path(script: str) -> Optional[str]:
""" Return full script path, if any valid script exists, else None """
"""Return full script path, if any valid script exists, else None"""
script_path = None
script_dir = sabnzbd.cfg.script_dir.get_path()
if script_dir and script:
@@ -558,7 +560,7 @@ def get_admin_path(name: str, future: bool):
def set_chmod(path: str, permissions: int, report: bool):
""" Set 'permissions' on 'path', report any errors when 'report' is True """
"""Set 'permissions' on 'path', report any errors when 'report' is True"""
try:
logging.debug("Applying permissions %s (octal) to %s", oct(permissions), path)
os.chmod(path, permissions)
@@ -570,7 +572,7 @@ def set_chmod(path: str, permissions: int, report: bool):
def set_permissions(path: str, recursive: bool = True):
""" Give folder tree and its files their proper permissions """
"""Give folder tree and its files their proper permissions"""
if not sabnzbd.WIN32:
umask = sabnzbd.cfg.umask()
try:
@@ -615,14 +617,14 @@ def userxbit(filename: str) -> bool:
def clip_path(path: str) -> str:
r""" Remove \\?\ or \\?\UNC\ prefix from Windows path """
r"""Remove \\?\ or \\?\UNC\ prefix from Windows path"""
if sabnzbd.WIN32 and path and "?" in path:
path = path.replace("\\\\?\\UNC\\", "\\\\", 1).replace("\\\\?\\", "", 1)
return path
def long_path(path: str) -> str:
""" For Windows, convert to long style path; others, return same path """
"""For Windows, convert to long style path; others, return same path"""
if sabnzbd.WIN32 and path and not path.startswith("\\\\?\\"):
if path.startswith("\\\\"):
# Special form for UNC paths
@@ -679,7 +681,7 @@ def create_all_dirs(path: str, apply_umask: bool = False) -> Union[str, bool]:
@synchronized(DIR_LOCK)
def get_unique_path(dirpath: str, n: int = 0, create_dir: bool = True) -> str:
""" Determine a unique folder or filename """
"""Determine a unique folder or filename"""
if not check_mount(dirpath):
return dirpath
@@ -714,7 +716,7 @@ def get_unique_filename(path: str) -> str:
@synchronized(DIR_LOCK)
def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
""" List all files in dirs and sub-dirs """
"""List all files in dirs and sub-dirs"""
filelist = []
for root, dirs, files in os.walk(input_dir):
for file in files:
@@ -768,7 +770,7 @@ def move_to_path(path: str, new_path: str) -> Tuple[bool, Optional[str]]:
@synchronized(DIR_LOCK)
def cleanup_empty_directories(path: str):
""" Remove all empty folders inside (and including) 'path' """
"""Remove all empty folders inside (and including) 'path'"""
path = os.path.normpath(path)
while 1:
repeat = False
@@ -792,7 +794,7 @@ def cleanup_empty_directories(path: str):
@synchronized(DIR_LOCK)
def get_filepath(path: str, nzo, filename: str):
""" Create unique filepath """
"""Create unique filepath"""
# This procedure is only used by the Assembler thread
# It does no umask setting
# It uses the dir_lock for the (rare) case that the
@@ -828,16 +830,17 @@ def get_filepath(path: str, nzo, filename: str):
@synchronized(DIR_LOCK)
def renamer(old: str, new: str, create_local_directories: bool = False):
def renamer(old: str, new: str, create_local_directories: bool = False) -> str:
"""Rename file/folder with retries for Win32
Optionally alows the creation of local directories if they don't exist yet"""
Optionally alows the creation of local directories if they don't exist yet
Returns new filename (which could be changed due to sanitize_filenam) on success"""
# Sanitize last part of new name
path, name = os.path.split(new)
new = os.path.join(path, sanitize_filename(name))
# Skip if nothing changes
if old == new:
return
return new
# In case we want nonexistent directories to be created, check for directory escape (forbidden)
if create_local_directories:
@@ -864,7 +867,7 @@ def renamer(old: str, new: str, create_local_directories: bool = False):
# Now we try the back-up method
logging.debug("Could not rename, trying move for %s to %s", old, new)
shutil.move(old, new)
return
return new
except OSError as err:
logging.debug('Error renaming "%s" to "%s" <%s>', old, new, err)
if err.winerror == 17:
@@ -883,17 +886,18 @@ def renamer(old: str, new: str, create_local_directories: bool = False):
raise OSError("Failed to rename")
else:
shutil.move(old, new)
return new
def remove_file(path: str):
""" Wrapper function so any file removal is logged """
"""Wrapper function so any file removal is logged"""
logging.debug("[%s] Deleting file %s", sabnzbd.misc.caller_name(), path)
os.remove(path)
@synchronized(DIR_LOCK)
def remove_dir(path: str):
""" Remove directory with retries for Win32 """
"""Remove directory with retries for Win32"""
logging.debug("[%s] Removing dir %s", sabnzbd.misc.caller_name(), path)
if sabnzbd.WIN32:
retries = 15
@@ -916,7 +920,7 @@ def remove_dir(path: str):
@synchronized(DIR_LOCK)
def remove_all(path: str, pattern: str = "*", keep_folder: bool = False, recursive: bool = False):
""" Remove folder and all its content (optionally recursive) """
"""Remove folder and all its content (optionally recursive)"""
if path and os.path.exists(path):
# Fast-remove the whole tree if recursive
if pattern == "*" and not keep_folder and recursive:
@@ -994,7 +998,7 @@ def disk_free_macos_clib_statfs64(directory: str) -> Tuple[int, int]:
def diskspace_base(dir_to_check: str) -> Tuple[float, float]:
""" Return amount of free and used diskspace in GBytes """
"""Return amount of free and used diskspace in GBytes"""
# Find first folder level that exists in the path
x = "x"
while x and not os.path.exists(dir_to_check):
@@ -1038,7 +1042,7 @@ __LAST_DISK_CALL = 0
def diskspace(force: bool = False) -> Dict[str, Tuple[float, float]]:
""" Wrapper to cache results """
"""Wrapper to cache results"""
global __DIRS_CHECKED, __DISKS_SAME, __LAST_DISK_RESULT, __LAST_DISK_CALL
# Reset everything when folders changed

View File

@@ -31,14 +31,14 @@ from sabnzbd.encoding import ubtou
def timeout(max_timeout):
""" Timeout decorator, parameter in seconds. """
"""Timeout decorator, parameter in seconds."""
def timeout_decorator(item):
""" Wrap the original function. """
"""Wrap the original function."""
@functools.wraps(item)
def func_wrapper(*args, **kwargs):
""" Closure for function. """
"""Closure for function."""
with multiprocessing.pool.ThreadPool(processes=1) as pool:
async_result = pool.apply_async(item, args, kwargs)
# raises a TimeoutError if execution exceeds max_timeout

View File

@@ -52,6 +52,8 @@ from sabnzbd.misc import (
get_server_addrinfo,
is_lan_addr,
is_loopback_addr,
ip_in_subnet,
strip_ipv4_mapped_notation,
)
from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file
from sabnzbd.encoding import xml_name, utob
@@ -99,7 +101,7 @@ def secured_expose(
check_api_key: bool = False,
access_type: int = 4,
) -> Union[Callable, str]:
""" Wrapper for both cherrypy.expose and login/access check """
"""Wrapper for both cherrypy.expose and login/access check"""
if not wrap_func:
return functools.partial(
secured_expose,
@@ -186,29 +188,19 @@ def check_access(access_type: int = 4, warn_user: bool = False) -> bool:
if access_type <= cfg.inet_exposure():
return True
# CherryPy will report ::ffff:192.168.0.10 on dual-stack situation
# It will always contain that ::ffff: prefix, the ipaddress module can handle that
remote_ip = cherrypy.request.remote.ip
# Check for localhost
if is_loopback_addr(remote_ip):
return True
# No special ranged defined
is_allowed = False
if not cfg.local_ranges():
try:
is_allowed = ipaddress.ip_address(remote_ip).is_private
except ValueError:
# Something malformed, reject
pass
# No local ranges defined, allow all private addresses by default
is_allowed = is_lan_addr(remote_ip)
else:
# Get rid off the special dual-stack notation
if remote_ip.startswith("::ffff:") and not remote_ip.find(".") < 0:
remote_ip = remote_ip.replace("::ffff:", "")
is_allowed = any(remote_ip.startswith(r) for r in cfg.local_ranges())
is_allowed = any(ip_in_subnet(remote_ip, r) for r in cfg.local_ranges())
# Reject
if not is_allowed and warn_user:
log_warning_and_ip(T("Refused connection from:"))
return is_allowed
@@ -306,12 +298,12 @@ def check_login():
def check_basic_auth(_, username, password):
""" CherryPy basic authentication validation """
"""CherryPy basic authentication validation"""
return username == cfg.username() and password == cfg.password()
def set_auth(conf):
""" Set the authentication for CherryPy """
"""Set the authentication for CherryPy"""
if cfg.username() and cfg.password() and not cfg.html_login():
conf.update(
{
@@ -379,7 +371,7 @@ def check_apikey(kwargs):
def log_warning_and_ip(txt):
""" Include the IP and the Proxy-IP for warnings """
"""Include the IP and the Proxy-IP for warnings"""
if cfg.api_warnings():
logging.warning("%s %s", txt, cherrypy.request.remote_label)
@@ -487,12 +479,12 @@ class MainPage:
@secured_expose(check_api_key=True, access_type=1)
def api(self, **kwargs):
""" Redirect to API-handler, we check the access_type in the API-handler """
"""Redirect to API-handler, we check the access_type in the API-handler"""
return api_handler(kwargs)
@secured_expose
def scriptlog(self, **kwargs):
""" Needed for all skins, URL is fixed due to postproc """
"""Needed for all skins, URL is fixed due to postproc"""
# No session key check, due to fixed URLs
name = kwargs.get("name")
if name:
@@ -503,7 +495,7 @@ class MainPage:
@secured_expose(check_api_key=True)
def retry(self, **kwargs):
""" Duplicate of retry of History, needed for some skins """
"""Duplicate of retry of History, needed for some skins"""
job = kwargs.get("job", "")
url = kwargs.get("url", "").strip()
pp = kwargs.get("pp")
@@ -522,13 +514,13 @@ class MainPage:
@secured_expose
def robots_txt(self, **kwargs):
""" Keep web crawlers out """
"""Keep web crawlers out"""
cherrypy.response.headers["Content-Type"] = "text/plain"
return "User-agent: *\nDisallow: /\n"
@secured_expose
def description_xml(self, **kwargs):
""" Provide the description.xml which was broadcast via SSDP """
"""Provide the description.xml which was broadcast via SSDP"""
if is_lan_addr(cherrypy.request.remote.ip):
cherrypy.response.headers["Content-Type"] = "application/xml"
return utob(sabnzbd.utils.ssdp.server_ssdp_xml())
@@ -543,7 +535,7 @@ class Wizard:
@secured_expose(check_configlock=True)
def index(self, **kwargs):
""" Show the language selection page """
"""Show the language selection page"""
if sabnzbd.WIN32:
from sabnzbd.utils.apireg import get_install_lng
@@ -559,7 +551,7 @@ class Wizard:
@secured_expose(check_configlock=True)
def one(self, **kwargs):
""" Accept language and show server page """
"""Accept language and show server page"""
if kwargs.get("lang"):
cfg.language.set(kwargs.get("lang"))
@@ -605,7 +597,7 @@ class Wizard:
@secured_expose(check_configlock=True)
def two(self, **kwargs):
""" Accept server and show the final page for restart """
"""Accept server and show the final page for restart"""
# Save server details
if kwargs:
kwargs["enable"] = 1
@@ -627,13 +619,13 @@ class Wizard:
@secured_expose
def exit(self, **kwargs):
""" Stop SABnzbd """
"""Stop SABnzbd"""
sabnzbd.shutdown_program()
return T("SABnzbd shutdown finished")
def get_access_info():
""" Build up a list of url's that sabnzbd can be accessed from """
"""Build up a list of url's that sabnzbd can be accessed from"""
# Access_url is used to provide the user a link to SABnzbd depending on the host
cherryhost = cfg.cherryhost()
host = socket.gethostname().lower()
@@ -826,7 +818,7 @@ class NzoPage:
checked = True
active.append(
{
"filename": nzf.filename if nzf.filename else nzf.subject,
"filename": nzf.filename,
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
"mb": "%.2f" % (nzf.bytes / MEBI),
"size": to_units(nzf.bytes, "B"),
@@ -1373,7 +1365,6 @@ SPECIAL_BOOL_LIST = (
"empty_postproc",
"html_login",
"wait_for_dfolder",
"max_art_opt",
"enable_broadcast",
"warn_dupl_jobs",
"replace_illegal",
@@ -1680,7 +1671,7 @@ class ConfigServer:
def unique_svr_name(server):
""" Return a unique variant on given server name """
"""Return a unique variant on given server name"""
num = 0
svr = 1
new_name = server
@@ -1695,7 +1686,7 @@ def unique_svr_name(server):
def check_server(host, port, ajax):
""" Check if server address resolves properly """
"""Check if server address resolves properly"""
if host.lower() == "localhost" and sabnzbd.AMBI_LOCALHOST:
return badParameterResponse(T("Warning: LOCALHOST is ambiguous, use numerical IP-address."), ajax)
@@ -1706,7 +1697,7 @@ def check_server(host, port, ajax):
def handle_server(kwargs, root=None, new_svr=False):
""" Internal server handler """
"""Internal server handler"""
ajax = kwargs.get("ajax")
host = kwargs.get("host", "").strip()
if not host:
@@ -1857,11 +1848,11 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def save_rss_rate(self, **kwargs):
""" Save changed RSS automatic readout rate """
"""Save changed RSS automatic readout rate"""
cfg.rss_rate.set(kwargs.get("rss_rate"))
config.save_config()
sabnzbd.Scheduler.restart()
raise rssRaiser(self.__root, kwargs)
raise Raiser(self.__root)
@secured_expose(check_api_key=True, check_configlock=True)
def upd_rss_feed(self, **kwargs):
@@ -1886,7 +1877,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def save_rss_feed(self, **kwargs):
""" Update Feed level attributes """
"""Update Feed level attributes"""
feed_name = kwargs.get("feed")
try:
cf = config.get_rss()[feed_name]
@@ -1912,7 +1903,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def toggle_rss_feed(self, **kwargs):
""" Toggle automatic read-out flag of Feed """
"""Toggle automatic read-out flag of Feed"""
try:
item = config.get_rss()[kwargs.get("feed")]
except KeyError:
@@ -1927,7 +1918,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def add_rss_feed(self, **kwargs):
""" Add one new RSS feed definition """
"""Add one new RSS feed definition"""
feed = Strip(kwargs.get("feed")).strip("[]")
uri = Strip(kwargs.get("uri"))
if feed and uri:
@@ -1956,11 +1947,11 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def upd_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
"""Wrapper, so we can call from api.py"""
self.internal_upd_rss_filter(**kwargs)
def internal_upd_rss_filter(self, **kwargs):
""" Save updated filter definition """
"""Save updated filter definition"""
try:
feed_cfg = config.get_rss()[kwargs.get("feed")]
except KeyError:
@@ -1993,7 +1984,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def del_rss_feed(self, *args, **kwargs):
""" Remove complete RSS feed """
"""Remove complete RSS feed"""
kwargs["section"] = "rss"
kwargs["keyword"] = kwargs.get("feed")
del_from_section(kwargs)
@@ -2002,11 +1993,11 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def del_rss_filter(self, **kwargs):
""" Wrapper, so we can call from api.py """
"""Wrapper, so we can call from api.py"""
self.internal_del_rss_filter(**kwargs)
def internal_del_rss_filter(self, **kwargs):
""" Remove one RSS filter """
"""Remove one RSS filter"""
try:
feed_cfg = config.get_rss()[kwargs.get("feed")]
except KeyError:
@@ -2020,7 +2011,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def download_rss_feed(self, *args, **kwargs):
""" Force download of all matching jobs in a feed """
"""Force download of all matching jobs in a feed"""
if "feed" in kwargs:
feed = kwargs["feed"]
self.__refresh_readout = feed
@@ -2032,14 +2023,14 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def clean_rss_jobs(self, *args, **kwargs):
""" Remove processed RSS jobs from UI """
"""Remove processed RSS jobs from UI"""
sabnzbd.RSSReader.clear_downloaded(kwargs["feed"])
self.__evaluate = True
raise rssRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True, check_configlock=True)
def test_rss_feed(self, *args, **kwargs):
""" Read the feed content again and show results """
"""Read the feed content again and show results"""
if "feed" in kwargs:
feed = kwargs["feed"]
self.__refresh_readout = feed
@@ -2052,7 +2043,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def eval_rss_feed(self, *args, **kwargs):
""" Re-apply the filters to the feed """
"""Re-apply the filters to the feed"""
if "feed" in kwargs:
self.__refresh_download = False
self.__refresh_force = False
@@ -2064,7 +2055,7 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def download(self, **kwargs):
""" Download NZB from provider (Download button) """
"""Download NZB from provider (Download button)"""
feed = kwargs.get("feed")
url = kwargs.get("url")
nzbname = kwargs.get("nzbname")
@@ -2083,13 +2074,13 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def rss_now(self, *args, **kwargs):
""" Run an automatic RSS run now """
"""Run an automatic RSS run now"""
sabnzbd.Scheduler.force_rss()
raise rssRaiser(self.__root, kwargs)
raise Raiser(self.__root)
def ConvertSpecials(p):
""" Convert None to 'None' and 'Default' to '' """
"""Convert None to 'None' and 'Default' to ''"""
if p is None:
p = "None"
elif p.lower() == T("Default").lower():
@@ -2098,12 +2089,12 @@ def ConvertSpecials(p):
def IsNone(value):
""" Return True if either None, 'None' or '' """
"""Return True if either None, 'None' or ''"""
return value is None or value == "" or value.lower() == "none"
def Strip(txt):
""" Return stripped string, can handle None """
"""Return stripped string, can handle None"""
try:
return txt.strip()
except:
@@ -2619,7 +2610,7 @@ def orphan_add_all():
def badParameterResponse(msg, ajax=None):
""" Return a html page with error message and a 'back' button """
"""Return a html page with error message and a 'back' button"""
if ajax:
return sabnzbd.api.report("json", error=msg)
else:
@@ -2646,7 +2637,7 @@ def badParameterResponse(msg, ajax=None):
def ShowString(name, msg):
""" Return a html page listing a file and a 'back' button """
"""Return a html page listing a file and a 'back' button"""
return """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN">
<html>

View File

@@ -45,14 +45,14 @@ _LOCALEDIR = "" # Holds path to the translation base folder
def set_locale_info(domain, localedir):
""" Setup the domain and localedir for translations """
"""Setup the domain and localedir for translations"""
global _DOMAIN, _LOCALEDIR
_DOMAIN = domain
_LOCALEDIR = localedir
def set_language(language=None):
""" Activate language, empty language will set default texts. """
"""Activate language, empty language will set default texts."""
if not language:
language = ""

View File

@@ -64,12 +64,9 @@ if sabnzbd.WIN32:
except ImportError:
pass
if sabnzbd.DARWIN:
from PyObjCTools import AppHelper
def time_format(fmt):
""" Return time-format string adjusted for 12/24 hour clock setting """
"""Return time-format string adjusted for 12/24 hour clock setting"""
if cfg.ampm() and HAVE_AMPM:
return fmt.replace("%H:%M:%S", "%I:%M:%S %p").replace("%H:%M", "%I:%M %p")
else:
@@ -111,7 +108,7 @@ def calc_age(date: datetime.datetime, trans=False) -> str:
def safe_lower(txt: Any) -> str:
""" Return lowercased string. Return '' for None """
"""Return lowercased string. Return '' for None"""
if txt:
return txt.lower()
else:
@@ -131,7 +128,7 @@ def cmp(x, y):
def name_to_cat(fname, cat=None):
""" Retrieve category from file name, but only if "cat" is None. """
"""Retrieve category from file name, but only if "cat" is None."""
if cat is None and fname.startswith("{{"):
n = fname.find("}}")
if n > 2:
@@ -176,7 +173,7 @@ def cat_to_opts(cat, pp=None, script=None, priority=None) -> Tuple[str, int, str
def pp_to_opts(pp: int) -> Tuple[bool, bool, bool]:
""" Convert numeric processing options to (repair, unpack, delete) """
"""Convert numeric processing options to (repair, unpack, delete)"""
# Convert the pp to an int
pp = sabnzbd.interface.int_conv(pp)
if pp == 0:
@@ -189,7 +186,7 @@ def pp_to_opts(pp: int) -> Tuple[bool, bool, bool]:
def opts_to_pp(repair: bool, unpack: bool, delete: bool) -> int:
""" Convert (repair, unpack, delete) to numeric process options """
"""Convert (repair, unpack, delete) to numeric process options"""
pp = 0
if repair:
pp = 1
@@ -219,7 +216,7 @@ _wildcard_to_regex = {
def wildcard_to_re(text):
""" Convert plain wildcard string (with '*' and '?') to regex. """
"""Convert plain wildcard string (with '*' and '?') to regex."""
return "".join([_wildcard_to_regex.get(ch, ch) for ch in text])
@@ -263,43 +260,12 @@ def cat_convert(cat):
return None
def windows_variant():
"""Determine Windows variant
Return vista_plus, x64
"""
from win32api import GetVersionEx
from win32con import VER_PLATFORM_WIN32_NT
import winreg
vista_plus = x64 = False
maj, _minor, _buildno, plat, _csd = GetVersionEx()
if plat == VER_PLATFORM_WIN32_NT:
vista_plus = maj > 5
if vista_plus:
# Must be done the hard way, because the Python runtime lies to us.
# This does *not* work:
# return os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64'
# because the Python runtime returns 'X86' even on an x64 system!
key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment"
)
for n in range(winreg.QueryInfoKey(key)[1]):
name, value, _val_type = winreg.EnumValue(key, n)
if name == "PROCESSOR_ARCHITECTURE":
x64 = value.upper() == "AMD64"
break
winreg.CloseKey(key)
return vista_plus, x64
_SERVICE_KEY = "SYSTEM\\CurrentControlSet\\services\\"
_SERVICE_PARM = "CommandLine"
def get_serv_parms(service):
""" Get the service command line parameters from Registry """
"""Get the service command line parameters from Registry"""
import winreg
service_parms = []
@@ -320,7 +286,7 @@ def get_serv_parms(service):
def set_serv_parms(service, args):
""" Set the service command line parameters in Registry """
"""Set the service command line parameters in Registry"""
import winreg
serv = []
@@ -339,7 +305,7 @@ def set_serv_parms(service, args):
def get_from_url(url: str) -> Optional[str]:
""" Retrieve URL and return content """
"""Retrieve URL and return content"""
try:
req = urllib.request.Request(url)
req.add_header("User-Agent", "SABnzbd/%s" % sabnzbd.__version__)
@@ -350,7 +316,7 @@ def get_from_url(url: str) -> Optional[str]:
def convert_version(text):
""" Convert version string to numerical value and a testversion indicator """
"""Convert version string to numerical value and a testversion indicator"""
version = 0
test = True
m = RE_VERSION.search(ubtou(text))
@@ -456,7 +422,7 @@ def check_latest_version():
def upload_file_to_sabnzbd(url, fp):
""" Function for uploading nzbs to a running SABnzbd instance """
"""Function for uploading nzbs to a running SABnzbd instance"""
try:
fp = urllib.parse.quote_plus(fp)
url = "%s&mode=addlocalfile&name=%s" % (url, fp)
@@ -477,7 +443,7 @@ def upload_file_to_sabnzbd(url, fp):
def from_units(val: str) -> float:
""" Convert K/M/G/T/P notation to float """
"""Convert K/M/G/T/P notation to float"""
val = str(val).strip().upper()
if val == "-1":
return float(val)
@@ -555,7 +521,7 @@ def caller_name(skip=2):
def exit_sab(value: int):
""" Leave the program after flushing stderr/stdout """
"""Leave the program after flushing stderr/stdout"""
sys.stderr.flush()
sys.stdout.flush()
# Cannot use sys.exit as it will not work inside the macOS-runner-thread
@@ -563,7 +529,7 @@ def exit_sab(value: int):
def split_host(srv):
""" Split host:port notation, allowing for IPV6 """
"""Split host:port notation, allowing for IPV6"""
if not srv:
return None, None
@@ -623,7 +589,7 @@ def get_cache_limit():
def get_windows_memory():
""" Use ctypes to extract available memory """
"""Use ctypes to extract available memory"""
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
@@ -649,13 +615,13 @@ def get_windows_memory():
def get_darwin_memory():
""" Use system-call to extract total memory on macOS """
"""Use system-call to extract total memory on macOS"""
system_output = run_command(["sysctl", "hw.memsize"])
return float(system_output.split()[1])
def on_cleanup_list(filename, skip_nzb=False):
""" Return True if a filename matches the clean-up list """
"""Return True if a filename matches the clean-up list"""
lst = cfg.cleanup_list()
if lst:
name, ext = os.path.splitext(filename)
@@ -692,7 +658,7 @@ _HAVE_STATM = _PAGE_SIZE and memory_usage()
def loadavg():
""" Return 1, 5 and 15 minute load average of host or "" if not supported """
"""Return 1, 5 and 15 minute load average of host or "" if not supported"""
p = ""
if not sabnzbd.WIN32 and not sabnzbd.DARWIN:
opt = cfg.show_sysload()
@@ -707,7 +673,7 @@ def loadavg():
def format_time_string(seconds):
""" Return a formatted and translated time string """
"""Return a formatted and translated time string"""
def unit(single, n):
# Seconds and minutes are special due to historical reasons
@@ -743,7 +709,7 @@ def format_time_string(seconds):
def int_conv(value: Any) -> int:
""" Safe conversion to int (can handle None) """
"""Safe conversion to int (can handle None)"""
try:
value = int(value)
except:
@@ -752,7 +718,7 @@ def int_conv(value: Any) -> int:
def create_https_certificates(ssl_cert, ssl_key):
""" Create self-signed HTTPS certificates and store in paths 'ssl_cert' and 'ssl_key' """
"""Create self-signed HTTPS certificates and store in paths 'ssl_cert' and 'ssl_key'"""
try:
from sabnzbd.utils.certgen import generate_key, generate_local_cert
@@ -768,7 +734,7 @@ def create_https_certificates(ssl_cert, ssl_key):
def get_all_passwords(nzo):
""" Get all passwords, from the NZB, meta and password file """
"""Get all passwords, from the NZB, meta and password file"""
if nzo.password:
logging.info("Found a password that was set by the user: %s", nzo.password)
passwords = [nzo.password.strip()]
@@ -817,7 +783,7 @@ def get_all_passwords(nzo):
def find_on_path(targets):
""" Search the PATH for a program and return full path """
"""Search the PATH for a program and return full path"""
if sabnzbd.WIN32:
paths = os.getenv("PATH").split(";")
else:
@@ -834,8 +800,53 @@ def find_on_path(targets):
return None
def strip_ipv4_mapped_notation(ip: str) -> str:
"""Convert an IP address in IPv4-mapped IPv6 notation (e.g. ::ffff:192.168.0.10) to its regular
IPv4 form. Any value of ip that doesn't use the relevant notation is returned unchanged.
CherryPy may report remote IP addresses in this notation. While the ipaddress module should be
able to handle that, the latter has issues with the is_private/is_loopback properties for these
addresses. See https://bugs.python.org/issue33433"""
try:
# Keep the original if ipv4_mapped is None
ip = ipaddress.ip_address(ip).ipv4_mapped or ip
except (AttributeError, ValueError):
pass
return str(ip)
def ip_in_subnet(ip: str, subnet: str) -> bool:
"""Determine whether ip is part of subnet. For the latter, the standard form with a prefix or
netmask (e.g. "192.168.1.0/24" or "10.42.0.0/255.255.0.0") is expected. Input in SABnzbd's old
cfg.local_ranges() settings style (e.g. "192.168.1."), intended for use with str.startswith(),
is also accepted and internally converted to address/prefix form."""
if not ip or not subnet:
return False
try:
if subnet.find("/") < 0 and subnet.find("::") < 0:
# The subnet doesn't include a prefix or netmask, or represent a single (compressed)
# IPv6 address; try converting from the older local_ranges settings style.
# Take the IP version of the subnet into account
IP_LEN, IP_BITS, IP_SEP = (8, 16, ":") if subnet.find(":") >= 0 else (4, 8, ".")
subnet = subnet.rstrip(IP_SEP).split(IP_SEP)
prefix = IP_BITS * len(subnet)
# Append as many zeros as needed
subnet.extend(["0"] * (IP_LEN - len(subnet)))
# Store in address/prefix form
subnet = "%s/%s" % (IP_SEP.join(subnet), prefix)
ip = strip_ipv4_mapped_notation(ip)
return ipaddress.ip_address(ip) in ipaddress.ip_network(subnet, strict=True)
except Exception:
# Probably an invalid range
return False
def is_ipv4_addr(ip: str) -> bool:
""" Determine if the ip is an IPv4 address """
"""Determine if the ip is an IPv4 address"""
try:
return ipaddress.ip_address(ip).version == 4
except ValueError:
@@ -843,7 +854,7 @@ def is_ipv4_addr(ip: str) -> bool:
def is_ipv6_addr(ip: str) -> bool:
""" Determine if the ip is an IPv6 address; square brackets ([2001::1]) are OK """
"""Determine if the ip is an IPv6 address; square brackets ([2001::1]) are OK"""
try:
return ipaddress.ip_address(ip.strip("[]")).version == 6
except (ValueError, AttributeError):
@@ -851,25 +862,29 @@ def is_ipv6_addr(ip: str) -> bool:
def is_loopback_addr(ip: str) -> bool:
""" Determine if the ip is an IPv4 or IPv6 local loopback address """
"""Determine if the ip is an IPv4 or IPv6 local loopback address"""
try:
if ip.find(".") < 0:
ip = ip.strip("[]")
ip = strip_ipv4_mapped_notation(ip)
return ipaddress.ip_address(ip).is_loopback
except (ValueError, AttributeError):
return False
def is_localhost(value: str) -> bool:
""" Determine if the input is some variety of 'localhost' """
"""Determine if the input is some variety of 'localhost'"""
return (value == "localhost") or is_loopback_addr(value)
def is_lan_addr(ip: str) -> bool:
""" Determine if the ip is a local area network address """
"""Determine if the ip is a local area network address"""
try:
ip = strip_ipv4_mapped_notation(ip)
return (
ip not in ("0.0.0.0", "255.255.255.255", "::")
# The ipaddress module considers these private, see https://bugs.python.org/issue38655
not ip in ("0.0.0.0", "255.255.255.255")
and not ip_in_subnet(ip, "::/128") # Also catch (partially) exploded forms of "::"
and ipaddress.ip_address(ip).is_private
and not is_loopback_addr(ip)
)
@@ -878,7 +893,7 @@ def is_lan_addr(ip: str) -> bool:
def ip_extract() -> List[str]:
""" Return list of IP addresses of this system """
"""Return list of IP addresses of this system"""
ips = []
program = find_on_path("ip")
if program:
@@ -908,7 +923,7 @@ def ip_extract() -> List[str]:
def get_server_addrinfo(host: str, port: int) -> socket.getaddrinfo:
""" Return processed getaddrinfo() """
"""Return processed getaddrinfo()"""
try:
int(port)
except:
@@ -959,15 +974,16 @@ def get_base_url(url: str) -> str:
def match_str(text: AnyStr, matches: Tuple[AnyStr, ...]) -> Optional[AnyStr]:
""" Return first matching element of list 'matches' in 'text', otherwise None """
"""Return first matching element of list 'matches' in 'text', otherwise None"""
text = text.lower()
for match in matches:
if match in text:
if match.lower() in text:
return match
return None
def nntp_to_msg(text: Union[List[AnyStr], str]) -> str:
""" Format raw NNTP bytes data for display """
"""Format raw NNTP bytes data for display"""
if isinstance(text, list):
text = text[0]
@@ -981,7 +997,7 @@ def nntp_to_msg(text: Union[List[AnyStr], str]) -> str:
def list2cmdline(lst: List[str]) -> str:
""" convert list to a cmd.exe-compatible command string """
"""convert list to a cmd.exe-compatible command string"""
nlst = []
for arg in lst:
if not arg:
@@ -1053,7 +1069,7 @@ def build_and_run_command(command: List[str], flatten_command=False, **kwargs):
def run_command(cmd: List[str], **kwargs):
""" Run simple external command and return output as a string. """
"""Run simple external command and return output as a string."""
with build_and_run_command(cmd, **kwargs) as p:
txt = platform_btou(p.stdout.read())
p.wait()

View File

@@ -88,7 +88,7 @@ RAR_VERSION = 0
def find_programs(curdir):
""" Find external programs """
"""Find external programs"""
def check(path, program):
p = os.path.abspath(os.path.join(path, program))
@@ -169,7 +169,7 @@ ENV_NZO_FIELDS = [
def external_processing(extern_proc, nzo: NzbObject, complete_dir, nicename, status):
""" Run a user postproc script, return console output and exit value """
"""Run a user postproc script, return console output and exit value"""
failure_url = nzo.nzo_info.get("failure", "")
# Items can be bool or null, causing POpen to fail
command = [
@@ -229,7 +229,7 @@ def external_processing(extern_proc, nzo: NzbObject, complete_dir, nicename, sta
def unpack_magic(
nzo: NzbObject, workdir, workdir_complete, dele, one_folder, joinables, zips, rars, sevens, ts, depth=0
):
""" Do a recursive unpack from all archives in 'workdir' to 'workdir_complete' """
"""Do a recursive unpack from all archives in 'workdir' to 'workdir_complete'"""
if depth > 5:
logging.warning(T("Unpack nesting too deep [%s]"), nzo.final_name)
return False, []
@@ -333,7 +333,7 @@ def unpack_magic(
# Filejoin Functions
##############################################################################
def match_ts(file):
""" Return True if file is a joinable TS file """
"""Return True if file is a joinable TS file"""
match = TS_RE.search(file)
if not match:
return False, "", 0
@@ -348,7 +348,7 @@ def match_ts(file):
def clean_up_joinables(names):
""" Remove joinable files and their .1 backups """
"""Remove joinable files and their .1 backups"""
for name in names:
if os.path.exists(name):
try:
@@ -364,7 +364,7 @@ def clean_up_joinables(names):
def get_seq_number(name):
""" Return sequence number if name as an int """
"""Return sequence number if name as an int"""
head, tail = os.path.splitext(name)
if tail == ".ts":
match, set, num = match_ts(name)
@@ -907,7 +907,7 @@ def unzip(nzo: NzbObject, workdir, workdir_complete, delete, one_folder, zips):
def ZIP_Extract(zipfile, extraction_path, one_folder):
""" Unzip single zip set 'zipfile' to 'extraction_path' """
"""Unzip single zip set 'zipfile' to 'extraction_path'"""
command = ["%s" % ZIP_COMMAND, "-o", "-Pnone", "%s" % clip_path(zipfile), "-d%s" % extraction_path]
if one_folder or cfg.flat_unpack():
@@ -1080,7 +1080,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
# PAR2 Functions
##############################################################################
def par2_repair(parfile_nzf: NzbFile, nzo: NzbObject, workdir, setname, single):
""" Try to repair a set, return readd or correctness """
"""Try to repair a set, return readd or correctness"""
# Check if file exists, otherwise see if another is done
parfile_path = os.path.join(workdir, parfile_nzf.filename)
if not os.path.exists(parfile_path) and nzo.extrapars[setname]:
@@ -1206,7 +1206,7 @@ _RE_LOADED_PAR2 = re.compile(r"Loaded (\d+) new packets")
def PAR_Verify(parfile, nzo: NzbObject, setname, joinables, single=False):
""" Run par2 on par-set """
"""Run par2 on par-set"""
used_joinables = []
used_for_repair = []
# set the current nzo status to "Verifying...". Used in History
@@ -1518,7 +1518,7 @@ _RE_FILENAME = re.compile(r'"([^"]+)"')
def MultiPar_Verify(parfile, nzo: NzbObject, setname, joinables, single=False):
""" Run par2 on par-set """
"""Run par2 on par-set"""
parfolder = os.path.split(parfile)[0]
used_joinables = []
used_for_repair = []
@@ -1980,7 +1980,7 @@ def rar_volumelist(rarfile_path, password, known_volumes):
# Sort the various RAR filename formats properly :\
def rar_sort(a, b):
""" Define sort method for rar file names """
"""Define sort method for rar file names"""
aext = a.split(".")[-1]
bext = b.split(".")[-1]
@@ -2040,7 +2040,7 @@ def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=
def quick_check_set(set, nzo):
""" Check all on-the-fly md5sums of a set """
"""Check all on-the-fly md5sums of a set"""
md5pack = nzo.md5packs.get(set)
if md5pack is None:
return False
@@ -2132,7 +2132,7 @@ def unrar_check(rar):
def par2_mt_check(par2_path):
""" Detect if we have multicore par2 variants """
"""Detect if we have multicore par2 variants"""
try:
par2_version = run_command([par2_path, "-h"])
# Look for a threads option
@@ -2144,7 +2144,7 @@ def par2_mt_check(par2_path):
def is_sfv_file(myfile):
""" Checks if given file is a SFV file, and returns result as boolean """
"""Checks if given file is a SFV file, and returns result as boolean"""
# based on https://stackoverflow.com/a/7392391/5235502
textchars = bytearray({7, 8, 9, 10, 12, 13, 27} | set(range(0x20, 0x100)) - {0x7F})
is_ascii_string = lambda input_bytes: not bool(input_bytes.translate(None, textchars))
@@ -2188,7 +2188,7 @@ def is_sfv_file(myfile):
def sfv_check(sfvs, nzo: NzbObject, workdir):
""" Verify files using SFV files """
"""Verify files using SFV files"""
# Update status
nzo.status = Status.VERIFYING
nzo.set_action_line(T("Trying SFV verification"), "...")
@@ -2271,7 +2271,7 @@ def sfv_check(sfvs, nzo: NzbObject, workdir):
def parse_sfv(sfv_filename):
""" Parse SFV file and return dictonary of crc32's and filenames """
"""Parse SFV file and return dictonary of crc32's and filenames"""
results = {}
with open(sfv_filename, mode="rb") as sfv_list:
for sfv_item in sfv_list:
@@ -2287,7 +2287,7 @@ def parse_sfv(sfv_filename):
def crc_calculate(path):
""" Calculate crc32 of the given file """
"""Calculate crc32 of the given file"""
crc = 0
with open(path, "rb") as fp:
while 1:
@@ -2299,7 +2299,7 @@ def crc_calculate(path):
def analyse_show(name):
""" Do a quick SeasonSort check and return basic facts """
"""Do a quick SeasonSort check and return basic facts"""
job = SeriesSorter(None, name, None, None)
job.match(force=True)
if job.is_match():
@@ -2386,18 +2386,18 @@ def pre_queue(nzo: NzbObject, pp, cat):
def is_sevenfile(path):
""" Return True if path has proper extension and 7Zip is installed """
"""Return True if path has proper extension and 7Zip is installed"""
return SEVEN_COMMAND and os.path.splitext(path)[1].lower() == ".7z"
class SevenZip:
""" Minimal emulation of ZipFile class for 7Zip """
"""Minimal emulation of ZipFile class for 7Zip"""
def __init__(self, path):
self.path = path
def namelist(self):
""" Return list of names in 7Zip """
"""Return list of names in 7Zip"""
names = []
# Future extension: use '-sccUTF-8' to get names in UTF8 encoding
command = [SEVEN_COMMAND, "l", "-p", "-y", "-slt", self.path]
@@ -2414,11 +2414,11 @@ class SevenZip:
return names
def read(self, name):
""" Read named file from 7Zip and return data """
"""Read named file from 7Zip and return data"""
command = [SEVEN_COMMAND, "e", "-p", "-y", "-so", self.path, name]
# Ignore diagnostic output, otherwise it will be appended to content
return run_command(command, stderr=subprocess.DEVNULL)
def close(self):
""" Close file """
"""Close file"""
pass

View File

@@ -55,6 +55,7 @@ class NewsWrapper:
"user_ok",
"pass_ok",
"force_login",
"status_code",
)
def __init__(self, server, thrdnum, block=False):
@@ -75,17 +76,10 @@ class NewsWrapper:
self.pass_ok: bool = False
self.force_login: bool = False
self.group: Optional[str] = None
@property
def status_code(self) -> Optional[int]:
""" Shorthand to get the code """
try:
return int(self.data[0][:3])
except:
return None
self.status_code: Optional[int] = None
def init_connect(self):
""" Setup the connection in NNTP object """
"""Setup the connection in NNTP object"""
# Server-info is normally requested by initialization of
# servers in Downloader, but not when testing servers
if self.blocking and not self.server.info:
@@ -96,7 +90,7 @@ class NewsWrapper:
self.timeout = time.time() + self.server.timeout
def finish_connect(self, code: int):
""" Perform login options """
"""Perform login options"""
if not (self.server.username or self.server.password or self.force_login):
self.connected = True
self.user_sent = True
@@ -108,6 +102,7 @@ class NewsWrapper:
# Change to a sensible text
code = 481
self.data[0] = "%d %s" % (code, T("Authentication failed, check username/password."))
self.status_code = code
self.user_ok = True
self.pass_sent = True
@@ -124,7 +119,7 @@ class NewsWrapper:
elif not self.user_sent:
command = utob("authinfo user %s\r\n" % self.server.username)
self.nntp.sock.sendall(command)
self.data = []
self.clear_data()
self.user_sent = True
elif not self.user_ok:
if code == 381:
@@ -139,7 +134,7 @@ class NewsWrapper:
if self.user_ok and not self.pass_sent:
command = utob("authinfo pass %s\r\n" % self.server.password)
self.nntp.sock.sendall(command)
self.data = []
self.clear_data()
self.pass_sent = True
elif self.user_ok and not self.pass_ok:
if code != 281:
@@ -151,7 +146,7 @@ class NewsWrapper:
self.timeout = time.time() + self.server.timeout
def body(self):
""" Request the body of the article """
"""Request the body of the article"""
self.timeout = time.time() + self.server.timeout
if self.article.nzf.nzo.precheck:
if self.server.have_stat:
@@ -163,17 +158,17 @@ class NewsWrapper:
else:
command = utob("ARTICLE <%s>\r\n" % self.article.article)
self.nntp.sock.sendall(command)
self.data = []
self.clear_data()
def send_group(self, group: str):
""" Send the NNTP GROUP command """
"""Send the NNTP GROUP command"""
self.timeout = time.time() + self.server.timeout
command = utob("GROUP %s\r\n" % group)
self.nntp.sock.sendall(command)
self.data = []
self.clear_data()
def recv_chunk(self, block: bool = False) -> Tuple[int, bool, bool]:
""" Receive data, return #bytes, done, skip """
"""Receive data, return #bytes, done, skip"""
self.timeout = time.time() + self.server.timeout
while 1:
try:
@@ -195,6 +190,12 @@ class NewsWrapper:
else:
return 0, False, True
if not self.data:
try:
self.status_code = int(chunk[:3])
except:
self.status_code = None
# Append so we can do 1 join(), much faster than multiple!
self.data.append(chunk)
@@ -213,17 +214,18 @@ class NewsWrapper:
return chunk_len, False, False
def soft_reset(self):
""" Reset for the next article """
"""Reset for the next article"""
self.timeout = None
self.article = None
self.clear_data()
def clear_data(self):
""" Clear the stored raw data """
"""Clear the stored raw data"""
self.data = []
self.status_code = None
def hard_reset(self, wait: bool = True, send_quit: bool = True):
""" Destroy and restart """
"""Destroy and restart"""
if self.nntp:
try:
if send_quit:
@@ -382,6 +384,7 @@ class NNTP:
msg = "Failed to connect: %s" % (str(error))
msg = "%s %s@%s:%s" % (msg, self.nw.thrdnum, self.host, self.nw.server.port)
self.error_msg = msg
self.nw.server.next_busy_threads_check = 0
logging.info(msg)
self.nw.server.warning = msg

View File

@@ -79,12 +79,12 @@ def get_icon():
def have_ntfosd():
""" Return if any PyNotify (notify2) support is present """
"""Return if any PyNotify (notify2) support is present"""
return bool(_HAVE_NTFOSD)
def check_classes(gtype, section):
""" Check if `gtype` is enabled in `section` """
"""Check if `gtype` is enabled in `section`"""
try:
return sabnzbd.config.get_config(section, "%s_prio_%s" % (section, gtype))() > 0
except TypeError:
@@ -93,7 +93,7 @@ def check_classes(gtype, section):
def get_prio(gtype, section):
""" Check prio of `gtype` in `section` """
"""Check prio of `gtype` in `section`"""
try:
return sabnzbd.config.get_config(section, "%s_prio_%s" % (section, gtype))()
except TypeError:
@@ -118,7 +118,7 @@ def check_cat(section, job_cat, keyword=None):
def send_notification(title, msg, gtype, job_cat=None):
""" Send Notification message """
"""Send Notification message"""
logging.info("Sending notification: %s - %s (type=%s, job_cat=%s)", title, msg, gtype, job_cat)
# Notification Center
if sabnzbd.DARWIN and sabnzbd.cfg.ncenter_enable():
@@ -163,7 +163,7 @@ _NTFOSD = False
def send_notify_osd(title, message):
""" Send a message to NotifyOSD """
"""Send a message to NotifyOSD"""
global _NTFOSD
if not _HAVE_NTFOSD:
return T("Not available") # : Function is not available on this OS
@@ -193,7 +193,7 @@ def send_notify_osd(title, message):
def send_notification_center(title, msg, gtype):
""" Send message to macOS Notification Center """
"""Send message to macOS Notification Center"""
try:
NSUserNotification = objc.lookUpClass("NSUserNotification")
NSUserNotificationCenter = objc.lookUpClass("NSUserNotificationCenter")
@@ -211,7 +211,7 @@ def send_notification_center(title, msg, gtype):
def send_prowl(title, msg, gtype, force=False, test=None):
""" Send message to Prowl """
"""Send message to Prowl"""
if test:
apikey = test.get("prowl_apikey")
@@ -244,7 +244,7 @@ def send_prowl(title, msg, gtype, force=False, test=None):
def send_pushover(title, msg, gtype, force=False, test=None):
""" Send message to pushover """
"""Send message to pushover"""
if test:
apikey = test.get("pushover_token")
@@ -311,7 +311,7 @@ def do_send_pushover(body):
def send_pushbullet(title, msg, gtype, force=False, test=None):
""" Send message to Pushbullet """
"""Send message to Pushbullet"""
if test:
apikey = test.get("pushbullet_apikey")
@@ -346,7 +346,7 @@ def send_pushbullet(title, msg, gtype, force=False, test=None):
def send_nscript(title, msg, gtype, force=False, test=None):
""" Run user's notification script """
"""Run user's notification script"""
if test:
script = test.get("nscript_script")
nscript_parameters = test.get("nscript_parameters")

View File

@@ -20,6 +20,7 @@ sabnzbd.nzbparser - Parse and import NZB files
"""
import bz2
import gzip
import re
import time
import logging
import hashlib
@@ -35,7 +36,7 @@ from sabnzbd.misc import name_to_cat
def nzbfile_parser(raw_data, nzo):
# Load data as file-object
raw_data = raw_data.replace("http://www.newzbin.com/DTD/2003/nzb", "", 1)
raw_data = re.sub(r"""\s(xmlns="[^"]+"|xmlns='[^']+')""", "", raw_data, count=1)
nzb_tree = xml.etree.ElementTree.fromstring(raw_data)
# Hash for dupe-checking

View File

@@ -59,7 +59,7 @@ import sabnzbd.notifier as notifier
class NzbQueue:
""" Singleton NzbQueue """
"""Singleton NzbQueue"""
def __init__(self):
self.__top_only: bool = cfg.top_only()
@@ -165,7 +165,7 @@ class NzbQueue:
return result
def repair_job(self, repair_folder, new_nzb=None, password=None):
""" Reconstruct admin for a single job folder, optionally with new NZB """
"""Reconstruct admin for a single job folder, optionally with new NZB"""
# Check if folder exists
if not repair_folder or not os.path.exists(repair_folder):
return None
@@ -207,7 +207,7 @@ class NzbQueue:
@NzbQueueLocker
def send_back(self, old_nzo: NzbObject):
""" Send back job to queue after successful pre-check """
"""Send back job to queue after successful pre-check"""
try:
nzb_path = globber_full(old_nzo.admin_path, "*.gz")[0]
except:
@@ -229,7 +229,7 @@ class NzbQueue:
@NzbQueueLocker
def save(self, save_nzo: Union[NzbObject, None, bool] = None):
""" Save queue, all nzo's or just the specified one """
"""Save queue, all nzo's or just the specified one"""
logging.info("Saving queue")
nzo_ids = []
@@ -250,7 +250,7 @@ class NzbQueue:
self.__top_only = value
def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=DEFAULT_PRIORITY, nzbname=None):
""" Create and return a placeholder nzo object """
"""Create and return a placeholder nzo object"""
logging.debug("Creating placeholder NZO")
future_nzo = NzbObject(
filename=msg,
@@ -417,7 +417,7 @@ class NzbQueue:
@NzbQueueLocker
def remove_all(self, search: str = "") -> List[str]:
""" Remove NZO's that match the search-pattern """
"""Remove NZO's that match the search-pattern"""
nzo_ids = []
search = safe_lower(search)
for nzo_id, nzo in self.__nzo_table.items():
@@ -598,7 +598,7 @@ class NzbQueue:
@NzbQueueLocker
def __set_priority(self, nzo_id, priority):
""" Sets the priority on the nzo and places it in the queue at the appropriate position """
"""Sets the priority on the nzo and places it in the queue at the appropriate position"""
try:
priority = int_conv(priority)
nzo = self.__nzo_table[nzo_id]
@@ -685,11 +685,12 @@ class NzbQueue:
return -1
@staticmethod
def reset_try_lists(article: Article, article_reset=True):
""" Let article get new fetcher and reset trylists """
def reset_try_lists(article: Article, remove_fetcher_from_trylist: bool = True):
"""Let article get new fetcher and reset trylists"""
if remove_fetcher_from_trylist:
article.remove_from_try_list(article.fetcher)
article.fetcher = None
if article_reset:
article.reset_try_list()
article.tries = 0
article.nzf.reset_try_list()
article.nzf.nzo.reset_try_list()
@@ -702,7 +703,7 @@ class NzbQueue:
return True
return False
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
"""Get next article for jobs in the queue
Not locked for performance, since it only reads the queue
"""
@@ -718,12 +719,13 @@ class NzbQueue:
or (nzo.avg_stamp + propagation_delay) < time.time()
):
if not nzo.server_in_try_list(server):
article = nzo.get_article(server, servers)
if article:
return article
articles = nzo.get_articles(server, servers, fetch_limit)
if articles:
return articles
# Stop after first job that wasn't paused/propagating/etc
if self.__top_only:
return
return []
return []
def register_article(self, article: Article, success: bool = True):
"""Register the articles we tried
@@ -771,7 +773,7 @@ class NzbQueue:
@NzbQueueLocker
def end_job(self, nzo: NzbObject):
""" Send NZO to the post-processing queue """
"""Send NZO to the post-processing queue"""
# Notify assembler to call postprocessor
if not nzo.deleted:
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
@@ -857,8 +859,11 @@ class NzbQueue:
return empty
def stop_idle_jobs(self):
""" Detect jobs that have zero files left and send them to post processing """
"""Detect jobs that have zero files left and send them to post processing"""
# Only check servers that are active
nr_servers = len([server for server in sabnzbd.Downloader.servers[:] if server.active])
empty = []
for nzo in self.__nzo_list:
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
logging.info("Found idle job %s", nzo.final_name)
@@ -866,10 +871,10 @@ class NzbQueue:
# Stall prevention by checking if all servers are in the trylist
# This is a CPU-cheaper alternative to prevent stalling
if len(nzo.try_list) == sabnzbd.Downloader.server_nr:
if len(nzo.try_list) >= nr_servers:
# Maybe the NZF's need a reset too?
for nzf in nzo.files:
if len(nzf.try_list) == sabnzbd.Downloader.server_nr:
if len(nzf.try_list) >= nr_servers:
# We do not want to reset all article trylists, they are good
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
nzf.reset_try_list()
@@ -906,7 +911,7 @@ class NzbQueue:
nzo.status = Status.QUEUED
def get_urls(self):
""" Return list of future-types needing URL """
"""Return list of future-types needing URL"""
lst = []
for nzo_id in self.__nzo_table:
nzo = self.__nzo_table[nzo_id]

View File

@@ -108,37 +108,42 @@ class TryList:
"""TryList keeps track of which servers have been tried for a specific article"""
# Pre-define attributes to save memory
__slots__ = ("try_list", "fetcher_priority")
__slots__ = ("try_list",)
def __init__(self):
self.try_list: List[Server] = []
self.fetcher_priority: int = 0
def server_in_try_list(self, server: Server):
""" Return whether specified server has been tried """
"""Return whether specified server has been tried"""
with TRYLIST_LOCK:
return server in self.try_list
def add_to_try_list(self, server: Server):
""" Register server as having been tried already """
"""Register server as having been tried already"""
with TRYLIST_LOCK:
if server not in self.try_list:
self.try_list.append(server)
def remove_from_try_list(self, server: Server):
"""Remove server from list of tried servers"""
with TRYLIST_LOCK:
if server in self.try_list:
self.try_list.remove(server)
def reset_try_list(self):
""" Clean the list """
"""Clean the list"""
with TRYLIST_LOCK:
self.try_list = []
def __getstate__(self):
""" Save the servers """
"""Save the servers"""
return [server.id for server in self.try_list]
def __setstate__(self, servers_ids: List[str]):
self.try_list = []
for server_id in servers_ids:
if server_id in sabnzbd.Downloader.server_dict:
self.add_to_try_list(sabnzbd.Downloader.server_dict[server_id])
for server in sabnzbd.Downloader.servers:
if server.id in servers_ids:
self.add_to_try_list(server)
##############################################################################
@@ -148,25 +153,32 @@ ArticleSaver = ("article", "art_id", "bytes", "lowest_partnum", "decoded", "on_d
class Article(TryList):
""" Representation of one article """
"""Representation of one article"""
# Pre-define attributes to save memory
__slots__ = ArticleSaver + ("fetcher", "fetcher_priority", "tries")
def __init__(self, article, article_bytes, nzf):
super().__init__()
self.fetcher: Optional[Server] = None
self.article: str = article
self.art_id = None
self.bytes = article_bytes
self.lowest_partnum = False
self.tries = 0 # Try count
self.decoded = False
self.on_disk = False
self.art_id: Optional[str] = None
self.bytes: int = article_bytes
self.lowest_partnum: bool = False
self.fetcher: Optional[Server] = None
self.fetcher_priority: int = 0
self.tries: int = 0 # Try count
self.decoded: bool = False
self.on_disk: bool = False
self.nzf: NzbFile = nzf
def reset_try_list(self):
"""In addition to resetting the try list, also reset fetcher so all servers are tried again"""
self.fetcher = None
self.fetcher_priority = 0
super().reset_try_list()
def get_article(self, server: Server, servers: List[Server]):
""" Return article when appropriate for specified server """
"""Return article when appropriate for specified server"""
log = sabnzbd.LOG_ALL
if not self.fetcher and not self.server_in_try_list(server):
if log:
@@ -229,7 +241,7 @@ class Article(TryList):
return None
def get_art_id(self):
""" Return unique article storage name, create if needed """
"""Return unique article storage name, create if needed"""
if not self.art_id:
self.art_id = sabnzbd.get_new_id("article", self.nzf.nzo.admin_path)
return self.art_id
@@ -239,20 +251,20 @@ class Article(TryList):
# Since we need a new server, this one can be listed as failed
sabnzbd.BPSMeter.register_server_article_failed(self.fetcher.id)
self.add_to_try_list(self.fetcher)
for server in sabnzbd.Downloader.servers:
# Servers-list could be modified during iteration, so we need a copy
for server in sabnzbd.Downloader.servers[:]:
if server.active and not self.server_in_try_list(server):
if server.priority >= self.fetcher.priority:
self.tries = 0
# Allow all servers for this nzo and nzf again (but not for this article)
sabnzbd.NzbQueue.reset_try_lists(self, article_reset=False)
# Allow all servers for this nzo and nzf again (but not this fetcher for this article)
sabnzbd.NzbQueue.reset_try_lists(self, remove_fetcher_from_trylist=False)
return True
logging.info(T("%s => missing from all servers, discarding") % self)
self.nzf.nzo.increase_bad_articles_counter("missing_articles")
logging.info("Article %s unavailable on all servers, discarding", self.article)
return False
def __getstate__(self):
""" Save to pickle file, selecting attributes """
"""Save to pickle file, selecting attributes"""
dict_ = {}
for item in ArticleSaver:
dict_[item] = getattr(self, item)
@@ -260,7 +272,7 @@ class Article(TryList):
return dict_
def __setstate__(self, dict_):
""" Load from pickle file, selecting attributes """
"""Load from pickle file, selecting attributes"""
for item in ArticleSaver:
try:
setattr(self, item, dict_[item])
@@ -268,12 +280,12 @@ class Article(TryList):
# Handle new attributes
setattr(self, item, None)
super().__setstate__(dict_.get("try_list", []))
self.fetcher_priority = 0
self.fetcher = None
self.fetcher_priority = 0
self.tries = 0
def __eq__(self, other):
""" Articles with the same usenet address are the same """
"""Articles with the same usenet address are the same"""
return self.article == other.article
def __hash__(self):
@@ -292,7 +304,6 @@ class Article(TryList):
##############################################################################
NzbFileSaver = (
"date",
"subject",
"filename",
"filename_checked",
"filepath",
@@ -316,17 +327,16 @@ NzbFileSaver = (
class NzbFile(TryList):
""" Representation of one file consisting of multiple articles """
"""Representation of one file consisting of multiple articles"""
# Pre-define attributes to save memory
__slots__ = NzbFileSaver + ("md5",)
def __init__(self, date, subject, raw_article_db, file_bytes, nzo):
""" Setup object """
"""Setup object"""
super().__init__()
self.date: datetime.datetime = date
self.subject: str = subject
self.type: Optional[str] = None
self.filename: str = sanitize_filename(name_extractor(subject))
self.filename_checked = False
@@ -348,13 +358,12 @@ class NzbFile(TryList):
self.nzo: NzbObject = nzo
self.nzf_id: str = sabnzbd.get_new_id("nzf", nzo.admin_path)
self.deleted = False
self.valid = False
self.import_finished = False
self.md5 = None
self.md5sum: Optional[bytes] = None
self.md5of16k: Optional[bytes] = None
self.valid = bool(raw_article_db)
self.valid: bool = bool(raw_article_db)
if self.valid and self.nzf_id:
# Save first article separate so we can do
@@ -377,7 +386,7 @@ class NzbFile(TryList):
self.import_finished = True
def finish_import(self):
""" Load the article objects from disk """
"""Load the article objects from disk"""
logging.debug("Finishing import on %s", self.filename)
raw_article_db = sabnzbd.load_data(self.nzf_id, self.nzo.admin_path, remove=False)
if raw_article_db:
@@ -396,14 +405,14 @@ class NzbFile(TryList):
self.import_finished = True
def add_article(self, article_info):
""" Add article to object database and return article object """
"""Add article to object database and return article object"""
article = Article(article_info[0], article_info[1], self)
self.articles.append(article)
self.decodetable.append(article)
return article
def remove_article(self, article: Article, success: bool) -> int:
""" Handle completed article, possibly end of file """
"""Handle completed article, possibly end of file"""
if article in self.articles:
self.articles.remove(article)
if success:
@@ -411,28 +420,32 @@ class NzbFile(TryList):
return len(self.articles)
def set_par2(self, setname, vol, blocks):
""" Designate this this file as a par2 file """
"""Designate this this file as a par2 file"""
self.is_par2 = True
self.setname = setname
self.vol = vol
self.blocks = int_conv(blocks)
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
""" Get next article to be downloaded """
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
"""Get next articles to be downloaded"""
articles = []
for article in self.articles:
article = article.get_article(server, servers)
if article:
return article
articles.append(article)
if len(articles) >= fetch_limit:
return articles
self.add_to_try_list(server)
return articles
def reset_all_try_lists(self):
""" Clear all lists of visited servers """
"""Clear all lists of visited servers"""
for art in self.articles:
art.reset_try_list()
self.reset_try_list()
def prepare_filepath(self):
""" Do all checks before making the final path """
"""Do all checks before making the final path"""
if not self.filepath:
self.nzo.verify_nzf_filename(self)
filename = sanitize_filename(self.filename)
@@ -442,11 +455,11 @@ class NzbFile(TryList):
@property
def completed(self):
""" Is this file completed? """
"""Is this file completed?"""
return self.import_finished and not bool(self.articles)
def remove_admin(self):
""" Remove article database from disk (sabnzbd_nzf_<id>)"""
"""Remove article database from disk (sabnzbd_nzf_<id>)"""
try:
logging.debug("Removing article database for %s", self.nzf_id)
remove_file(os.path.join(self.nzo.admin_path, self.nzf_id))
@@ -454,7 +467,7 @@ class NzbFile(TryList):
pass
def __getstate__(self):
""" Save to pickle file, selecting attributes """
"""Save to pickle file, selecting attributes"""
dict_ = {}
for item in NzbFileSaver:
dict_[item] = getattr(self, item)
@@ -462,7 +475,7 @@ class NzbFile(TryList):
return dict_
def __setstate__(self, dict_):
""" Load from pickle file, selecting attributes """
"""Load from pickle file, selecting attributes"""
for item in NzbFileSaver:
try:
setattr(self, item, dict_[item])
@@ -673,7 +686,7 @@ class NzbObject(TryList):
self.first_articles_count = 0
self.saved_articles: List[Article] = []
self.nzo_id = None
self.nzo_id: Optional[str] = None
self.futuretype = futuretype
self.deleted = False
@@ -991,7 +1004,7 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def postpone_pars(self, nzf: NzbFile, parset: str):
""" Move all vol-par files matching 'parset' to the extrapars table """
"""Move all vol-par files matching 'parset' to the extrapars table"""
# Create new extrapars if it didn't already exist
# For example if created when the first par2 file was missing
if parset not in self.extrapars:
@@ -1002,10 +1015,9 @@ class NzbObject(TryList):
lparset = parset.lower()
for xnzf in self.files[:]:
name = xnzf.filename or xnzf.subject
# Move only when not current NZF and filename was extractable from subject
if name:
setname, vol, block = sabnzbd.par2file.analyse_par2(name)
if xnzf.filename:
setname, vol, block = sabnzbd.par2file.analyse_par2(xnzf.filename)
# Don't postpone header-only-files, to extract all possible md5of16k
if setname and block and matcher(lparset, setname.lower()):
xnzf.set_par2(parset, vol, block)
@@ -1025,7 +1037,7 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def handle_par2(self, nzf: NzbFile, filepath):
""" Check if file is a par2 and build up par2 collection """
"""Check if file is a par2 and build up par2 collection"""
# Need to remove it from the other set it might be in
self.remove_extrapar(nzf)
@@ -1133,7 +1145,7 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def remove_article(self, article: Article, success: bool):
""" Remove article from the NzbFile and do check if it can succeed"""
"""Remove article from the NzbFile and do check if it can succeed"""
job_can_succeed = True
nzf = article.nzf
@@ -1211,47 +1223,46 @@ class NzbObject(TryList):
pass
def check_existing_files(self, wdir: str):
""" Check if downloaded files already exits, for these set NZF to complete """
"""Check if downloaded files already exits, for these set NZF to complete"""
fix_unix_encoding(wdir)
# Get a list of already present files, ignore folders
files = globber(wdir, "*.*")
existing_files = globber(wdir, "*.*")
# Substitute renamed files
renames = sabnzbd.load_data(RENAMES_FILE, self.admin_path, remove=True)
if renames:
for name in renames:
if name in files or renames[name] in files:
if name in files:
files.remove(name)
files.append(renames[name])
if name in existing_files or renames[name] in existing_files:
if name in existing_files:
existing_files.remove(name)
existing_files.append(renames[name])
self.renames = renames
# Looking for the longest name first, minimizes the chance on a mismatch
files.sort(key=len)
existing_files.sort(key=len)
# The NZFs should be tried shortest first, to improve the chance on a proper match
nzfs = self.files[:]
nzfs.sort(key=lambda x: len(x.subject))
nzfs.sort(key=lambda x: len(x.filename))
# Flag files from NZB that already exist as finished
for filename in files[:]:
for existing_filename in existing_files[:]:
for nzf in nzfs:
subject = sanitize_filename(name_extractor(nzf.subject))
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
logging.info("Existing file %s matched to file %s of %s", filename, nzf.filename, self.final_name)
nzf.filename = filename
if existing_filename in nzf.filename:
logging.info("Matched file %s to %s of %s", existing_filename, nzf.filename, self.final_name)
nzf.filename = existing_filename
nzf.bytes_left = 0
self.remove_nzf(nzf)
nzfs.remove(nzf)
files.remove(filename)
existing_files.remove(existing_filename)
# Set bytes correctly
self.bytes_tried += nzf.bytes
self.bytes_downloaded += nzf.bytes
# Process par2 files
filepath = os.path.join(wdir, filename)
filepath = os.path.join(wdir, existing_filename)
if sabnzbd.par2file.is_parfile(filepath):
self.handle_par2(nzf, filepath)
self.bytes_par2 += nzf.bytes
@@ -1259,16 +1270,16 @@ class NzbObject(TryList):
# Create an NZF for each remaining existing file
try:
for filename in files:
for existing_filename in existing_files:
# Create NZO's using basic information
filepath = os.path.join(wdir, filename)
logging.info("Existing file %s added to %s", filename, self.final_name)
filepath = os.path.join(wdir, existing_filename)
logging.info("Existing file %s added to %s", existing_filename, self.final_name)
tup = os.stat(filepath)
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
nzf = NzbFile(tm, filename, [], tup.st_size, self)
nzf = NzbFile(tm, existing_filename, [], tup.st_size, self)
self.files.append(nzf)
self.files_table[nzf.nzf_id] = nzf
nzf.filename = filename
nzf.filename = existing_filename
self.remove_nzf(nzf)
# Set bytes correctly
@@ -1300,7 +1311,7 @@ class NzbObject(TryList):
self.abort_direct_unpacker()
def set_priority(self, value: Any):
""" Check if this is a valid priority """
"""Check if this is a valid priority"""
# When unknown (0 is a known one), set to DEFAULT
if value == "" or value is None:
self.priority = DEFAULT_PRIORITY
@@ -1344,7 +1355,7 @@ class NzbObject(TryList):
@property
def labels(self):
""" Return (translated) labels of job """
"""Return (translated) labels of job"""
labels = []
if self.duplicate:
labels.append(T("DUPLICATE"))
@@ -1415,18 +1426,18 @@ class NzbObject(TryList):
self.unwanted_ext = 2
@synchronized(NZO_LOCK)
def add_parfile(self, parfile):
def add_parfile(self, parfile: NzbFile):
"""Add parfile to the files to be downloaded
Resets trylist just to be sure
Adjust download-size accordingly
"""
if not parfile.completed and parfile not in self.files and parfile not in self.finished_files:
parfile.reset_all_try_lists()
parfile.reset_try_list()
self.files.append(parfile)
self.bytes_tried -= parfile.bytes_left
@synchronized(NZO_LOCK)
def remove_parset(self, setname):
def remove_parset(self, setname: str):
if setname in self.extrapars:
self.extrapars.pop(setname)
if setname in self.partable:
@@ -1434,7 +1445,7 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def remove_extrapar(self, parfile: NzbFile):
""" Remove par file from any/all sets """
"""Remove par file from any/all sets"""
for _set in self.extrapars:
if parfile in self.extrapars[_set]:
self.extrapars[_set].remove(parfile)
@@ -1460,18 +1471,18 @@ class NzbObject(TryList):
self.reset_try_list()
def add_to_direct_unpacker(self, nzf: NzbFile):
""" Start or add to DirectUnpacker """
"""Start or add to DirectUnpacker"""
if not self.direct_unpacker:
sabnzbd.directunpacker.DirectUnpacker(self)
self.direct_unpacker.add(nzf)
def abort_direct_unpacker(self):
""" Abort any running DirectUnpackers """
"""Abort any running DirectUnpackers"""
if self.direct_unpacker:
self.direct_unpacker.abort()
def check_availability_ratio(self):
""" Determine if we are still meeting the required ratio """
"""Determine if we are still meeting the required ratio"""
availability_ratio = req_ratio = cfg.req_completion_rate()
# Rare case where the NZB only consists of par2 files
@@ -1511,15 +1522,14 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def set_download_report(self):
""" Format the stats for the history information """
"""Format the stats for the history information"""
# Pretty-format the per-server stats
if self.servercount:
# Sort the servers first
servers = config.get_servers()
server_names = sorted(
servers,
key=lambda svr: "%d%02d%s"
% (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()),
key=lambda svr: "%02d%s" % (servers[svr].priority(), servers[svr].displayname().lower()),
)
msgs = [
"%s=%sB" % (servers[server_name].displayname(), to_units(self.servercount[server_name]))
@@ -1564,29 +1574,32 @@ class NzbObject(TryList):
self.set_unpack_info("Source", self.url, unique=True)
@synchronized(NZO_LOCK)
def increase_bad_articles_counter(self, article_type):
""" Record information about bad articles """
def increase_bad_articles_counter(self, article_type: str):
"""Record information about bad articles"""
if article_type not in self.nzo_info:
self.nzo_info[article_type] = 0
self.nzo_info[article_type] += 1
self.bad_articles += 1
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
article = None
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
articles = []
nzf_remove_list = []
# Did we go through all first-articles?
if self.first_articles:
for article_test in self.first_articles:
article = article_test.get_article(server, servers)
if article:
if not article:
break
articles.append(article)
if len(articles) >= fetch_limit:
break
# Move on to next ones
if not article:
if not articles:
for nzf in self.files:
if nzf.deleted:
logging.debug("Skipping existing file %s", nzf.filename or nzf.subject)
logging.debug("Skipping existing file %s", nzf.filename)
else:
# Don't try to get an article if server is in try_list of nzf
if not nzf.server_in_try_list(server):
@@ -1602,10 +1615,10 @@ class NzbObject(TryList):
nzf.nzo.status = Status.PAUSED
continue
else:
continue
break
article = nzf.get_article(server, servers)
if article:
articles = nzf.get_articles(server, servers, fetch_limit)
if articles:
break
# Remove all files for which admin could not be read
@@ -1617,10 +1630,10 @@ class NzbObject(TryList):
if nzf_remove_list and not self.files:
sabnzbd.NzbQueue.end_job(self)
if not article:
if not articles:
# No articles for this server, block for next time
self.add_to_try_list(server)
return article
return articles
@synchronized(NZO_LOCK)
def move_top_bulk(self, nzf_ids):
@@ -1691,7 +1704,7 @@ class NzbObject(TryList):
self.files[pos] = tmp_nzf
def verify_nzf_filename(self, nzf: NzbFile, yenc_filename: Optional[str] = None):
""" Get filename from par2-info or from yenc """
"""Get filename from par2-info or from yenc"""
# Already done?
if nzf.filename_checked:
return
@@ -1784,12 +1797,12 @@ class NzbObject(TryList):
@property
def admin_path(self):
""" Return the full path for my job-admin folder """
"""Return the full path for my job-admin folder"""
return long_path(get_admin_path(self.work_name, self.futuretype))
@property
def download_path(self):
""" Return the full path for the download folder """
"""Return the full path for the download folder"""
if self.futuretype:
return ""
else:
@@ -1804,12 +1817,12 @@ class NzbObject(TryList):
@property
def remaining(self):
""" Return remaining bytes """
"""Return remaining bytes"""
return self.bytes - self.bytes_tried
@synchronized(NZO_LOCK)
def purge_data(self, delete_all_data=True):
""" Remove (all) job data """
"""Remove (all) job data"""
logging.info(
"[%s] Purging data for job %s (delete_all_data=%s)", caller_name(), self.final_name, delete_all_data
)
@@ -1904,13 +1917,13 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def save_to_disk(self):
""" Save job's admin to disk """
"""Save job's admin to disk"""
self.save_attribs()
if self.nzo_id and not self.is_gone():
sabnzbd.save_data(self, self.nzo_id, self.admin_path)
def save_attribs(self):
""" Save specific attributes for Retry """
"""Save specific attributes for Retry"""
attribs = {}
for attrib in NzoAttributeSaver:
attribs[attrib] = getattr(self, attrib)
@@ -1918,7 +1931,7 @@ class NzbObject(TryList):
sabnzbd.save_data(attribs, ATTRIB_FILE, self.admin_path, silent=True)
def load_attribs(self) -> Tuple[Optional[str], Optional[int], Optional[str]]:
""" Load saved attributes and return them to be parsed """
"""Load saved attributes and return them to be parsed"""
attribs = sabnzbd.load_data(ATTRIB_FILE, self.admin_path, remove=False)
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
@@ -2001,11 +2014,11 @@ class NzbObject(TryList):
return res, series
def is_gone(self):
""" Is this job still going somehow? """
"""Is this job still going somehow?"""
return self.status in (Status.COMPLETED, Status.DELETED, Status.FAILED)
def __getstate__(self):
""" Save to pickle file, selecting attributes """
"""Save to pickle file, selecting attributes"""
dict_ = {}
for item in NzbObjectSaver:
dict_[item] = getattr(self, item)
@@ -2013,7 +2026,7 @@ class NzbObject(TryList):
return dict_
def __setstate__(self, dict_):
""" Load from pickle file, selecting attributes """
"""Load from pickle file, selecting attributes"""
for item in NzbObjectSaver:
try:
setattr(self, item, dict_[item])
@@ -2098,7 +2111,7 @@ def nzf_cmp_name(nzf1: NzbFile, nzf2: NzbFile):
def create_work_name(name: str) -> str:
""" Remove ".nzb" and ".par(2)" and sanitize, skip URL's """
"""Remove ".nzb" and ".par(2)" and sanitize, skip URL's"""
if name.find("://") < 0:
# In case it was one of these, there might be more
# Need to remove any invalid characters before starting
@@ -2113,7 +2126,7 @@ def create_work_name(name: str) -> str:
def scan_password(name: str) -> Tuple[str, Optional[str]]:
""" Get password (if any) from the title """
"""Get password (if any) from the title"""
if "http://" in name or "https://" in name:
return name, None
@@ -2153,7 +2166,7 @@ def scan_password(name: str) -> Tuple[str, Optional[str]]:
def name_extractor(subject: str) -> str:
""" Try to extract a file name from a subject line, return `subject` if in doubt """
"""Try to extract a file name from a subject line, return `subject` if in doubt"""
result = subject
# Filename nicely wrapped in quotes
for name in re.findall(RE_SUBJECT_FILENAME_QUOTES, subject):
@@ -2173,7 +2186,7 @@ def name_extractor(subject: str) -> str:
def matcher(pattern, txt):
""" Return True if `pattern` is sufficiently equal to `txt` """
"""Return True if `pattern` is sufficiently equal to `txt`"""
if txt.endswith(pattern):
txt = txt[: txt.rfind(pattern)].strip()
return (not txt) or txt.endswith('"')

View File

@@ -289,7 +289,7 @@ class SABnzbdDelegate(NSObject):
# Fetch history items
if not self.history_db:
self.history_db = sabnzbd.database.HistoryDB()
items, fetched_items, _total_items = self.history_db.fetch_history(0, 10, None)
items, fetched_items, _total_items = self.history_db.fetch_history(limit=10)
self.menu_history = NSMenu.alloc().init()
self.failedAttributes = {

View File

@@ -148,7 +148,7 @@ def MSG_SQLITE():
def panic_message(panic_code, a=None, b=None):
""" Create the panic message from templates """
"""Create the panic message from templates"""
if sabnzbd.WIN32:
os_str = T("Press Startkey+R and type the line (example):")
prog_path = '"%s"' % sabnzbd.MY_FULLNAME
@@ -222,7 +222,7 @@ def panic(reason, remedy=""):
def launch_a_browser(url, force=False):
""" Launch a browser pointing to the URL """
"""Launch a browser pointing to the URL"""
if not force and not cfg.autobrowser() or sabnzbd.DAEMON:
return
@@ -259,7 +259,7 @@ def show_error_dialog(msg):
def error_page_401(status, message, traceback, version):
""" Custom handler for 401 error """
"""Custom handler for 401 error"""
title = T("Access denied")
body = T("Error %s: You need to provide a valid username and password.") % status
return r"""
@@ -279,7 +279,7 @@ def error_page_401(status, message, traceback, version):
def error_page_404(status, message, traceback, version):
""" Custom handler for 404 error, redirect to main page """
"""Custom handler for 404 error, redirect to main page"""
return (
r"""
<html>

View File

@@ -23,14 +23,17 @@ import logging
import os
import re
import struct
from typing import Dict, Optional, Tuple
from typing import Dict, Optional, Tuple, BinaryIO
from sabnzbd.constants import MEBI
from sabnzbd.encoding import correct_unknown_encoding
PROBABLY_PAR2_RE = re.compile(r"(.*)\.vol(\d*)[+\-](\d*)\.par2", re.I)
SCAN_LIMIT = 10 * MEBI
PAR_PKT_ID = b"PAR2\x00PKT"
PAR_MAIN_ID = b"PAR 2.0\x00Main\x00\x00\x00\x00"
PAR_FILE_ID = b"PAR 2.0\x00FileDesc"
PAR_CREATOR_ID = b"PAR 2.0\x00Creator"
PAR_CREATOR_ID = b"PAR 2.0\x00Creator\x00"
PAR_RECOVERY_ID = b"RecvSlic"
@@ -91,22 +94,34 @@ def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Dict[str, bytes]:
For a full description of the par2 specification, visit:
http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
"""
total_size = os.path.getsize(fname)
table = {}
duplicates16k = []
total_nr_files = None
try:
with open(fname, "rb") as f:
header = f.read(8)
while header:
name, filehash, hash16k = parse_par2_file_packet(f, header)
if name:
table[name] = filehash
if hash16k not in md5of16k:
md5of16k[hash16k] = name
elif md5of16k[hash16k] != name:
# Not unique and not already linked to this file
# Remove to avoid false-renames
duplicates16k.append(hash16k)
if header == PAR_PKT_ID:
name, filehash, hash16k, nr_files = parse_par2_packet(f)
if name:
table[name] = filehash
if hash16k not in md5of16k:
md5of16k[hash16k] = name
elif md5of16k[hash16k] != name:
# Not unique and not already linked to this file
# Remove to avoid false-renames
duplicates16k.append(hash16k)
# Store the number of files for later
if nr_files:
total_nr_files = nr_files
# On large files, we stop after seeing all the listings
# On smaller files, we scan them fully to get the par2-creator
if total_size > SCAN_LIMIT and len(table) == total_nr_files:
break
header = f.read(8)
@@ -129,13 +144,18 @@ def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Dict[str, bytes]:
return table
def parse_par2_file_packet(f, header) -> Tuple[Optional[str], Optional[bytes], Optional[bytes]]:
""" Look up and analyze a FileDesc package """
def parse_par2_packet(f: BinaryIO) -> Tuple[Optional[str], Optional[bytes], Optional[bytes], Optional[int]]:
"""Look up and analyze a PAR2 packet"""
nothing = None, None, None
filename, filehash, hash16k, nr_files = nothing = None, None, None, None
if header != PAR_PKT_ID:
return nothing
# All packages start with a header before the body
# 8 : PAR2\x00PKT
# 8 : Length of the entire packet. Must be multiple of 4. (NB: Includes length of header.)
# 16 : MD5 Hash of packet. Calculation starts at first byte of Recovery Set ID and ends at last byte of body.
# 16 : Recovery Set ID.
# 16 : Type of packet.
# ?*4 : Body of Packet. Must be a multiple of 4 bytes.
# Length must be multiple of 4 and at least 20
pack_len = struct.unpack("<Q", f.read(8))[0]
@@ -146,31 +166,37 @@ def parse_par2_file_packet(f, header) -> Tuple[Optional[str], Optional[bytes], O
md5sum = f.read(16)
# Read and check the data
# Subtract 32 because we already read these bytes of the header
data = f.read(pack_len - 32)
md5 = hashlib.md5()
md5.update(data)
if md5sum != md5.digest():
return nothing
# The FileDesc packet looks like:
# 16 : "PAR 2.0\0FileDesc"
# 16 : FileId
# 16 : Hash for full file **
# 16 : Hash for first 16K
# 8 : File length
# xx : Name (multiple of 4, padded with \0 if needed) **
# See if it's any of the packages we care about
par2_packet_type = data[16:32]
# See if it's the right packet and get name + hash
for offset in range(0, pack_len, 8):
if data[offset : offset + 16] == PAR_FILE_ID:
filehash = data[offset + 32 : offset + 48]
hash16k = data[offset + 48 : offset + 64]
filename = correct_unknown_encoding(data[offset + 72 :].strip(b"\0"))
return filename, filehash, hash16k
elif data[offset : offset + 15] == PAR_CREATOR_ID:
# From here until the end is the creator-text
# Useful in case of bugs in the par2-creating software
par2creator = data[offset + 16 :].strip(b"\0") # Remove any trailing \0
logging.debug("Par2-creator of %s is: %s", os.path.basename(f.name), correct_unknown_encoding(par2creator))
if par2_packet_type == PAR_FILE_ID:
# The FileDesc packet looks like:
# 16 : "PAR 2.0\0FileDesc"
# 16 : FileId
# 16 : Hash for full file
# 16 : Hash for first 16K
# 8 : File length
# xx : Name (multiple of 4, padded with \0 if needed)
filehash = data[48:64]
hash16k = data[64:80]
filename = correct_unknown_encoding(data[88:].strip(b"\0"))
elif par2_packet_type == PAR_CREATOR_ID:
# From here until the end is the creator-text
# Useful in case of bugs in the par2-creating software
par2creator = data[32:].strip(b"\0") # Remove any trailing \0
logging.debug("Par2-creator of %s is: %s", os.path.basename(f.name), correct_unknown_encoding(par2creator))
elif par2_packet_type == PAR_MAIN_ID:
# The Main packet looks like:
# 16 : "PAR 2.0\0Main"
# 8 : Slice size
# 4 : Number of files in the recovery set
nr_files = struct.unpack("<I", data[40:44])[0]
return nothing
return filename, filehash, hash16k, nr_files

View File

@@ -55,7 +55,7 @@ from sabnzbd.filesystem import (
cleanup_empty_directories,
fix_unix_encoding,
sanitize_and_trim_path,
sanitize_files_in_folder,
sanitize_files,
remove_file,
listdir_full,
setname_from_path,
@@ -98,10 +98,10 @@ RE_SAMPLE = re.compile(sample_match, re.I)
class PostProcessor(Thread):
""" PostProcessor thread, designed as Singleton """
"""PostProcessor thread, designed as Singleton"""
def __init__(self):
""" Initialize PostProcessor thread """
"""Initialize PostProcessor thread"""
super().__init__()
# This history queue is simply used to log what active items to display in the web_ui
@@ -130,12 +130,12 @@ class PostProcessor(Thread):
self.paused = False
def save(self):
""" Save postproc queue """
"""Save postproc queue"""
logging.info("Saving postproc queue")
sabnzbd.save_admin((POSTPROC_QUEUE_VERSION, self.history_queue), POSTPROC_QUEUE_FILE_NAME)
def load(self):
""" Save postproc queue """
"""Save postproc queue"""
logging.info("Loading postproc queue")
data = sabnzbd.load_admin(POSTPROC_QUEUE_FILE_NAME)
if data is None:
@@ -151,7 +151,7 @@ class PostProcessor(Thread):
logging.info("Traceback: ", exc_info=True)
def delete(self, nzo_id, del_files=False):
""" Remove a job from the post processor queue """
"""Remove a job from the post processor queue"""
for nzo in self.history_queue:
if nzo.nzo_id == nzo_id:
if nzo.status in (Status.FAILED, Status.COMPLETED):
@@ -164,7 +164,7 @@ class PostProcessor(Thread):
break
def process(self, nzo: NzbObject):
""" Push on finished job in the queue """
"""Push on finished job in the queue"""
# Make sure we return the status "Waiting"
nzo.status = Status.QUEUED
if nzo not in self.history_queue:
@@ -179,7 +179,7 @@ class PostProcessor(Thread):
sabnzbd.history_updated()
def remove(self, nzo: NzbObject):
""" Remove given nzo from the queue """
"""Remove given nzo from the queue"""
try:
self.history_queue.remove(nzo)
except:
@@ -188,13 +188,13 @@ class PostProcessor(Thread):
sabnzbd.history_updated()
def stop(self):
""" Stop thread after finishing running job """
"""Stop thread after finishing running job"""
self.__stop = True
self.slow_queue.put(None)
self.fast_queue.put(None)
def cancel_pp(self, nzo_id):
""" Change the status, so that the PP is canceled """
"""Change the status, so that the PP is canceled"""
for nzo in self.history_queue:
if nzo.nzo_id == nzo_id:
nzo.abort_direct_unpacker()
@@ -210,22 +210,22 @@ class PostProcessor(Thread):
return None
def empty(self):
""" Return True if pp queue is empty """
"""Return True if pp queue is empty"""
return self.slow_queue.empty() and self.fast_queue.empty() and not self.__busy
def get_queue(self):
""" Return list of NZOs that still need to be processed """
"""Return list of NZOs that still need to be processed"""
return [nzo for nzo in self.history_queue if nzo.work_name]
def get_path(self, nzo_id):
""" Return download path for given nzo_id or None when not found """
"""Return download path for given nzo_id or None when not found"""
for nzo in self.history_queue:
if nzo.nzo_id == nzo_id:
return nzo.download_path
return None
def run(self):
""" Postprocessor loop """
"""Postprocessor loop"""
# First we do a dircheck
complete_dir = sabnzbd.cfg.complete_dir.get_path()
if sabnzbd.utils.checkdir.isFAT(complete_dir):
@@ -309,7 +309,7 @@ class PostProcessor(Thread):
def process_job(nzo: NzbObject):
""" Process one job """
"""Process one job"""
start = time.time()
# keep track of whether we can continue
@@ -398,8 +398,7 @@ def process_job(nzo: NzbObject):
sabnzbd.Downloader.disconnect()
# Sanitize the resulting files
if sabnzbd.WIN32:
sanitize_files_in_folder(workdir)
sanitize_files(folder=workdir)
# Check if user allows unsafe post-processing
if flag_repair and cfg.safe_postproc():
@@ -435,9 +434,8 @@ def process_job(nzo: NzbObject):
)
logging.info("Unpacked files %s", newfiles)
if sabnzbd.WIN32:
# Sanitize the resulting files
newfiles = sanitize_files_in_folder(tmp_workdir_complete)
# Sanitize the resulting files
newfiles = sanitize_files(filelist=newfiles)
logging.info("Finished unpack_magic on %s", filename)
if cfg.safe_postproc():
@@ -734,7 +732,7 @@ def prepare_extraction_path(nzo: NzbObject):
def parring(nzo: NzbObject, workdir: str):
""" Perform par processing. Returns: (par_error, re_add) """
"""Perform par processing. Returns: (par_error, re_add)"""
logging.info("Starting verification and repair of %s", nzo.final_name)
par_error = False
re_add = False
@@ -892,7 +890,7 @@ def try_rar_check(nzo: NzbObject, rars):
def rar_renamer(nzo: NzbObject, workdir):
""" Deobfuscate rar file names: Use header and content information to give RAR-files decent names """
"""Deobfuscate rar file names: Use header and content information to give RAR-files decent names"""
nzo.status = Status.VERIFYING
nzo.set_unpack_info("Repair", T("Trying RAR renamer"))
nzo.set_action_line(T("Trying RAR renamer"), "...")
@@ -1028,7 +1026,7 @@ def rar_renamer(nzo: NzbObject, workdir):
def handle_empty_queue():
""" Check if empty queue calls for action """
"""Check if empty queue calls for action"""
if sabnzbd.NzbQueue.actives() == 0:
sabnzbd.save_state()
notifier.send_notification("SABnzbd", T("Queue finished"), "queue_done")
@@ -1116,7 +1114,7 @@ def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
def one_file_or_folder(folder):
""" If the dir only contains one file or folder, join that file/folder onto the path """
"""If the dir only contains one file or folder, join that file/folder onto the path"""
if os.path.exists(folder) and os.path.isdir(folder):
try:
cont = os.listdir(folder)
@@ -1133,7 +1131,7 @@ TAG_RE = re.compile(r"<[^>]+>")
def get_last_line(txt):
""" Return last non-empty line of a text, trim to 150 max """
"""Return last non-empty line of a text, trim to 150 max"""
# First we remove HTML code in a basic way
txt = TAG_RE.sub(" ", txt)
@@ -1201,7 +1199,7 @@ def rename_and_collapse_folder(oldpath, newpath, files):
def set_marker(folder):
""" Set marker file and return name """
"""Set marker file and return name"""
name = cfg.marker_file()
if name:
path = os.path.join(folder, name)
@@ -1217,7 +1215,7 @@ def set_marker(folder):
def del_marker(path):
""" Remove marker file """
"""Remove marker file"""
if path and os.path.exists(path):
logging.debug("Removing marker file %s", path)
try:
@@ -1237,7 +1235,7 @@ def remove_from_list(name, lst):
def try_alt_nzb(nzo):
""" Try to get a new NZB if available """
"""Try to get a new NZB if available"""
url = nzo.nzo_info.get("failure")
if url and cfg.new_nzb_on_failure():
sabnzbd.add_url(url, nzo.pp, nzo.script, nzo.cat, nzo.priority)

View File

@@ -37,7 +37,7 @@ except ImportError:
def win_power_privileges():
""" To do any power-options, the process needs higher privileges """
"""To do any power-options, the process needs higher privileges"""
flags = ntsecuritycon.TOKEN_ADJUST_PRIVILEGES | ntsecuritycon.TOKEN_QUERY
htoken = win32security.OpenProcessToken(win32api.GetCurrentProcess(), flags)
id_ = win32security.LookupPrivilegeValue(None, ntsecuritycon.SE_SHUTDOWN_NAME)
@@ -46,7 +46,7 @@ def win_power_privileges():
def win_hibernate():
""" Hibernate Windows system, returns after wakeup """
"""Hibernate Windows system, returns after wakeup"""
try:
win_power_privileges()
win32api.SetSystemPowerState(False, True)
@@ -56,7 +56,7 @@ def win_hibernate():
def win_standby():
""" Standby Windows system, returns after wakeup """
"""Standby Windows system, returns after wakeup"""
try:
win_power_privileges()
win32api.SetSystemPowerState(True, True)
@@ -66,7 +66,7 @@ def win_standby():
def win_shutdown():
""" Shutdown Windows system, never returns """
"""Shutdown Windows system, never returns"""
try:
win_power_privileges()
win32api.InitiateSystemShutdown("", "", 30, 1, 0)
@@ -80,7 +80,7 @@ def win_shutdown():
def osx_shutdown():
""" Shutdown macOS system, never returns """
"""Shutdown macOS system, never returns"""
try:
subprocess.call(["osascript", "-e", 'tell app "System Events" to shut down'])
except:
@@ -90,7 +90,7 @@ def osx_shutdown():
def osx_standby():
""" Make macOS system sleep, returns after wakeup """
"""Make macOS system sleep, returns after wakeup"""
try:
subprocess.call(["osascript", "-e", 'tell app "System Events" to sleep'])
time.sleep(10)
@@ -100,7 +100,7 @@ def osx_standby():
def osx_hibernate():
""" Make macOS system sleep, returns after wakeup """
"""Make macOS system sleep, returns after wakeup"""
osx_standby()
@@ -131,7 +131,7 @@ _LOGIND_SUCCESSFUL_RESULT = "yes"
def _get_sessionproxy():
""" Return (proxy-object, interface), (None, None) if not available """
"""Return (proxy-object, interface), (None, None) if not available"""
name = "org.freedesktop.PowerManagement"
path = "/org/freedesktop/PowerManagement"
interface = "org.freedesktop.PowerManagement"
@@ -143,7 +143,7 @@ def _get_sessionproxy():
def _get_systemproxy(method):
""" Return (proxy-object, interface, pinterface), (None, None, None) if not available """
"""Return (proxy-object, interface, pinterface), (None, None, None) if not available"""
if method == "ConsoleKit":
name = "org.freedesktop.ConsoleKit"
path = "/org/freedesktop/ConsoleKit/Manager"
@@ -173,7 +173,7 @@ def _get_systemproxy(method):
def linux_shutdown():
""" Make Linux system shutdown, never returns """
"""Make Linux system shutdown, never returns"""
if not HAVE_DBUS:
os._exit(0)
@@ -201,7 +201,7 @@ def linux_shutdown():
def linux_hibernate():
""" Make Linux system go into hibernate, returns after wakeup """
"""Make Linux system go into hibernate, returns after wakeup"""
if not HAVE_DBUS:
return
@@ -230,7 +230,7 @@ def linux_hibernate():
def linux_standby():
""" Make Linux system go into standby, returns after wakeup """
"""Make Linux system go into standby, returns after wakeup"""
if not HAVE_DBUS:
return

View File

@@ -43,7 +43,7 @@ import feedparser
def notdefault(item):
""" Return True if not 'Default|''|*' """
"""Return True if not 'Default|''|*'"""
return bool(item) and str(item).lower() not in ("default", "*", "", str(DEFAULT_PRIORITY))
@@ -132,7 +132,7 @@ class RSSReader:
@synchronized(RSS_LOCK)
def run_feed(self, feed=None, download=False, ignoreFirst=False, force=False, readout=True):
""" Run the query for one URI and apply filters """
"""Run the query for one URI and apply filters"""
self.shutdown = False
if not feed:
@@ -469,7 +469,7 @@ class RSSReader:
return msg
def run(self):
""" Run all the URI's and filters """
"""Run all the URI's and filters"""
if not sabnzbd.PAUSED_ALL:
active = False
if self.next_run < time.time():
@@ -630,7 +630,7 @@ def _HandleLink(
priority=DEFAULT_PRIORITY,
rule=0,
):
""" Process one link """
"""Process one link"""
if script == "":
script = None
if pp == "":
@@ -746,7 +746,7 @@ def _get_link(entry):
def special_rss_site(url):
""" Return True if url describes an RSS site with odd titles """
"""Return True if url describes an RSS site with odd titles"""
return cfg.rss_filenames() or match_str(url, cfg.rss_odd_titles())

View File

@@ -86,14 +86,14 @@ class SABTrayThread(SysTrayIconThread):
super().__init__(self.sabicons["default"], "SABnzbd", menu_options, None, 0, "SabTrayIcon")
def set_texts(self):
""" Cache texts for performance, doUpdates is called often """
"""Cache texts for performance, doUpdates is called often"""
self.txt_idle = T("Idle")
self.txt_paused = T("Paused")
self.txt_remaining = T("Remaining")
# called every few ms by SysTrayIconThread
def doUpdates(self):
""" Update menu info, once every 10 calls """
"""Update menu info, once every 10 calls"""
self.counter += 1
if self.counter > 10:
self.sabpaused, bytes_left, bpsnow, time_left = api.fast_queue()
@@ -143,7 +143,7 @@ class SABTrayThread(SysTrayIconThread):
self.pause()
def pausefor(self, minutes):
""" Need function for each pause-timer """
"""Need function for each pause-timer"""
sabnzbd.Scheduler.plan_resume(minutes)
def pausefor5min(self, icon):

View File

@@ -116,7 +116,7 @@ class StatusIcon(Thread):
return 1
def right_click_event(self, icon, button, time):
""" menu """
"""menu"""
menu = Gtk.Menu()
maddnzb = Gtk.MenuItem(label=T("Add NZB"))
@@ -151,7 +151,7 @@ class StatusIcon(Thread):
menu.popup(None, None, None, self.statusicon, button, time)
def addnzb(self, icon):
""" menu handlers """
"""menu handlers"""
dialog = Gtk.FileChooserDialog(title="SABnzbd - " + T("Add NZB"), action=Gtk.FileChooserAction.OPEN)
dialog.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OPEN, Gtk.ResponseType.OK)
dialog.set_select_multiple(True)

View File

@@ -47,16 +47,16 @@ class Scheduler:
self.load_schedules()
def start(self):
""" Start the scheduler """
"""Start the scheduler"""
self.scheduler.start()
def stop(self):
""" Stop the scheduler, destroy instance """
"""Stop the scheduler, destroy instance"""
logging.debug("Stopping scheduler")
self.scheduler.stop()
def restart(self, plan_restart=True):
""" Stop and start scheduler """
"""Stop and start scheduler"""
if plan_restart:
self.restart_scheduler = True
elif self.restart_scheduler:
@@ -73,7 +73,7 @@ class Scheduler:
self.scheduler.running = False
def is_alive(self):
""" Thread-like check if we are doing fine """
"""Thread-like check if we are doing fine"""
if self.scheduler.thread:
return self.scheduler.thread.is_alive()
return False
@@ -213,7 +213,7 @@ class Scheduler:
)
logging.info("Setting schedule for midnight BPS reset")
self.scheduler.add_daytime_task(sabnzbd.BPSMeter.midnight, "midnight_bps", DAILY_RANGE, None, (0, 0))
self.scheduler.add_daytime_task(sabnzbd.BPSMeter.update, "midnight_bps", DAILY_RANGE, None, (0, 0))
logging.info("Setting schedule for server expiration check")
self.scheduler.add_daytime_task(
@@ -336,11 +336,11 @@ class Scheduler:
config.save_config()
def scheduler_restart_guard(self):
""" Set flag for scheduler restart """
"""Set flag for scheduler restart"""
self.restart_scheduler = True
def scheduled_resume(self):
""" Scheduled resume, only when no oneshot resume is active """
"""Scheduled resume, only when no oneshot resume is active"""
if self.pause_end is None:
sabnzbd.unpause_all()
@@ -356,7 +356,7 @@ class Scheduler:
logging.debug("Ignoring cancelled resume")
def plan_resume(self, interval):
""" Set a scheduled resume after the interval """
"""Set a scheduled resume after the interval"""
if interval > 0:
self.pause_end = time.time() + (interval * 60)
logging.debug("Schedule resume at %s", self.pause_end)
@@ -367,7 +367,7 @@ class Scheduler:
sabnzbd.unpause_all()
def __check_diskspace(self, full_dir: str, required_space: float):
""" Resume if there is sufficient available space """
"""Resume if there is sufficient available space"""
if not cfg.fulldisk_autoresume():
self.cancel_resume_task()
return
@@ -384,7 +384,7 @@ class Scheduler:
self.cancel_resume_task()
def plan_diskspace_resume(self, full_dir: str, required_space: float):
""" Create regular check for free disk space """
"""Create regular check for free disk space"""
self.cancel_resume_task()
logging.info("Will resume when %s has more than %d GB free space", full_dir, required_space)
self.resume_task = self.scheduler.add_interval_task(
@@ -392,14 +392,14 @@ class Scheduler:
)
def cancel_resume_task(self):
""" Cancel the current auto resume task """
"""Cancel the current auto resume task"""
if self.resume_task:
logging.debug("Cancelling existing resume_task '%s'", self.resume_task.name)
self.scheduler.cancel(self.resume_task)
self.resume_task = None
def pause_int(self) -> str:
""" Return minutes:seconds until pause ends """
"""Return minutes:seconds until pause ends"""
if self.pause_end is None:
return "0"
else:
@@ -414,18 +414,18 @@ class Scheduler:
return "%s%d:%02d" % (sign, mins, sec)
def pause_check(self):
""" Unpause when time left is negative, compensate for missed schedule """
"""Unpause when time left is negative, compensate for missed schedule"""
if self.pause_end is not None and (self.pause_end - time.time()) < 0:
self.pause_end = None
logging.debug("Force resume, negative timer")
sabnzbd.unpause_all()
def plan_server(self, action, parms, interval):
""" Plan to re-activate server after 'interval' minutes """
"""Plan to re-activate server after 'interval' minutes"""
self.scheduler.add_single_task(action, "", interval * 60, args=parms)
def force_rss(self):
""" Add a one-time RSS scan, one second from now """
"""Add a one-time RSS scan, one second from now"""
self.scheduler.add_single_task(sabnzbd.RSSReader.run, "RSS", 1)

View File

@@ -118,14 +118,14 @@ COUNTRY_REP = (
def ends_in_file(path):
""" Return True when path ends with '.%ext' or '%fn' """
"""Return True when path ends with '.%ext' or '%fn'"""
_RE_ENDEXT = re.compile(r"\.%ext[{}]*$", re.I)
_RE_ENDFN = re.compile(r"%fn[{}]*$", re.I)
return bool(_RE_ENDEXT.search(path) or _RE_ENDFN.search(path))
def move_to_parent_folder(workdir):
""" Move all in 'workdir' into 'workdir/..' """
"""Move all in 'workdir' into 'workdir/..'"""
# Determine 'folder'/..
workdir = os.path.abspath(os.path.normpath(workdir))
dest = os.path.abspath(os.path.normpath(os.path.join(workdir, "..")))
@@ -148,7 +148,7 @@ def move_to_parent_folder(workdir):
class Sorter:
""" Generic Sorter class """
"""Generic Sorter class"""
def __init__(self, nzo: Optional[NzbObject], cat):
self.sorter = None
@@ -159,7 +159,7 @@ class Sorter:
self.ext = ""
def detect(self, job_name, complete_dir):
""" Detect which kind of sort applies """
"""Detect which kind of sort applies"""
self.sorter = SeriesSorter(self.nzo, job_name, complete_dir, self.cat)
if self.sorter.matched:
complete_dir = self.sorter.get_final_path()
@@ -185,12 +185,12 @@ class Sorter:
return complete_dir
def rename(self, newfiles, workdir_complete):
""" Rename files of the job """
"""Rename files of the job"""
if self.sorter.rename_or_not:
self.sorter.rename(newfiles, workdir_complete)
def rename_with_ext(self, workdir_complete):
""" Special renamer for %ext """
"""Special renamer for %ext"""
if self.sorter.rename_or_not and "%ext" in workdir_complete and self.ext:
# Replace %ext with extension
newpath = workdir_complete.replace("%ext", self.ext)
@@ -232,7 +232,7 @@ class Sorter:
class SeriesSorter:
""" Methods for Series Sorting """
"""Methods for Series Sorting"""
def __init__(self, nzo: Optional[NzbObject], job_name, path, cat):
self.matched = False
@@ -258,7 +258,7 @@ class SeriesSorter:
self.match()
def match(self, force=False):
""" Checks the regex for a match, if so set self.match to true """
"""Checks the regex for a match, if so set self.match to true"""
if force or (cfg.enable_tv_sorting() and cfg.tv_sort_string()):
if (
force
@@ -273,11 +273,11 @@ class SeriesSorter:
self.matched = True
def is_match(self):
""" Returns whether there was a match or not """
"""Returns whether there was a match or not"""
return self.matched
def get_final_path(self):
""" Collect and construct all the variables such as episode name, show names """
"""Collect and construct all the variables such as episode name, show names"""
if self.get_values():
# Get the final path
path = self.construct_path()
@@ -289,7 +289,7 @@ class SeriesSorter:
@staticmethod
def get_multi_ep_naming(one, two, extras):
""" Returns a list of unique values joined into a string and separated by - (ex:01-02-03-04) """
"""Returns a list of unique values joined into a string and separated by - (ex:01-02-03-04)"""
extra_list = [one]
extra2_list = [two]
for extra in extras:
@@ -303,7 +303,7 @@ class SeriesSorter:
return one, two
def get_shownames(self):
""" Get the show name from the match object and format it """
"""Get the show name from the match object and format it"""
# Get the formatted title and alternate title formats
self.show_info["show_tname"], self.show_info["show_tname_two"], self.show_info["show_tname_three"] = get_titles(
self.nzo, self.match_obj, self.original_job_name, True
@@ -313,7 +313,7 @@ class SeriesSorter:
)
def get_seasons(self):
""" Get the season number from the match object and format it """
"""Get the season number from the match object and format it"""
try:
season = self.match_obj.group(1).strip("_") # season number
except AttributeError:
@@ -333,7 +333,7 @@ class SeriesSorter:
self.show_info["season_num_alt"] = season2
def get_episodes(self):
""" Get the episode numbers from the match object, format and join them """
"""Get the episode numbers from the match object, format and join them"""
try:
ep_no = self.match_obj.group(2) # episode number
except AttributeError:
@@ -355,7 +355,7 @@ class SeriesSorter:
self.show_info["episode_num_alt"] = ep_no2
def get_showdescriptions(self):
""" Get the show descriptions from the match object and format them """
"""Get the show descriptions from the match object and format them"""
self.show_info["ep_name"], self.show_info["ep_name_two"], self.show_info["ep_name_three"] = get_descriptions(
self.nzo, self.match_obj, self.original_job_name
)
@@ -364,7 +364,7 @@ class SeriesSorter:
self.show_info["resolution"] = get_resolution(self.original_job_name)
def get_values(self):
""" Collect and construct all the values needed for path replacement """
"""Collect and construct all the values needed for path replacement"""
try:
# - Show Name
self.get_shownames()
@@ -389,7 +389,7 @@ class SeriesSorter:
return False
def construct_path(self):
""" Replaces the sort string with real values such as Show Name and Episode Number """
"""Replaces the sort string with real values such as Show Name and Episode Number"""
sorter = self.sort_string.replace("\\", "/")
mapping = []
@@ -463,7 +463,7 @@ class SeriesSorter:
return head
def rename(self, files, current_path):
""" Rename for Series """
"""Rename for Series"""
logging.debug("Renaming Series")
largest = (None, None, 0)
@@ -522,7 +522,7 @@ _RE_MULTIPLE = (
def check_for_multiple(files):
""" Return list of files that looks like a multi-part post """
"""Return list of files that looks like a multi-part post"""
for regex in _RE_MULTIPLE:
matched_files = check_for_sequence(regex, files)
if matched_files:
@@ -531,7 +531,7 @@ def check_for_multiple(files):
def check_for_sequence(regex, files):
""" Return list of files that looks like a sequence, using 'regex' """
"""Return list of files that looks like a sequence, using 'regex'"""
matches = {}
prefix = None
# Build up a dictionary of matches
@@ -581,7 +581,7 @@ def check_for_sequence(regex, files):
class MovieSorter:
""" Methods for Generic Sorting """
"""Methods for Generic Sorting"""
def __init__(self, nzo: Optional[NzbObject], job_name, path, cat):
self.matched = False
@@ -607,7 +607,7 @@ class MovieSorter:
self.match()
def match(self, force=False):
""" Checks the category for a match, if so set self.match to true """
"""Checks the category for a match, if so set self.match to true"""
if force or (cfg.enable_movie_sorting() and self.sort_string):
# First check if the show matches TV episode regular expressions. Returns regex match object
if force or (self.cat and self.cat.lower() in self.cats) or (not self.cat and "None" in self.cats):
@@ -615,7 +615,7 @@ class MovieSorter:
self.matched = True
def get_final_path(self):
""" Collect and construct all the variables such as episode name, show names """
"""Collect and construct all the variables such as episode name, show names"""
if self.get_values():
# Get the final path
path = self.construct_path()
@@ -626,7 +626,7 @@ class MovieSorter:
return os.path.join(self.original_path, self.original_job_name)
def get_values(self):
""" Collect and construct all the values needed for path replacement """
"""Collect and construct all the values needed for path replacement"""
# - Get Year
if self.nzo:
year = self.nzo.nzo_info.get("year")
@@ -663,7 +663,7 @@ class MovieSorter:
return True
def construct_path(self):
""" Return path reconstructed from original and sort expression """
"""Return path reconstructed from original and sort expression"""
sorter = self.sort_string.replace("\\", "/")
mapping = []
@@ -731,7 +731,7 @@ class MovieSorter:
return head
def rename(self, _files, current_path):
""" Rename for Generic files """
"""Rename for Generic files"""
logging.debug("Renaming Generic file")
def filter_files(_file, current_path):
@@ -801,7 +801,7 @@ class MovieSorter:
class DateSorter:
""" Methods for Date Sorting """
"""Methods for Date Sorting"""
def __init__(self, nzo: Optional[NzbObject], job_name, path, cat):
self.matched = False
@@ -827,7 +827,7 @@ class DateSorter:
self.match()
def match(self, force=False):
""" Checks the category for a match, if so set self.matched to true """
"""Checks the category for a match, if so set self.matched to true"""
if force or (cfg.enable_date_sorting() and self.sort_string):
# First check if the show matches TV episode regular expressions. Returns regex match object
if force or (self.cat and self.cat.lower() in self.cats) or (not self.cat and "None" in self.cats):
@@ -837,11 +837,11 @@ class DateSorter:
self.matched = True
def is_match(self):
""" Returns whether there was a match or not """
"""Returns whether there was a match or not"""
return self.matched
def get_final_path(self):
""" Collect and construct all the variables such as episode name, show names """
"""Collect and construct all the variables such as episode name, show names"""
if self.get_values():
# Get the final path
path = self.construct_path()
@@ -852,7 +852,7 @@ class DateSorter:
return os.path.join(self.original_path, self.original_job_name)
def get_values(self):
""" Collect and construct all the values needed for path replacement """
"""Collect and construct all the values needed for path replacement"""
# 2008-10-16
if self.date_type == 1:
@@ -889,7 +889,7 @@ class DateSorter:
return True
def construct_path(self):
""" Return path reconstructed from original and sort expression """
"""Return path reconstructed from original and sort expression"""
sorter = self.sort_string.replace("\\", "/")
mapping = []
@@ -973,7 +973,7 @@ class DateSorter:
return head
def rename(self, files, current_path):
""" Renaming Date file """
"""Renaming Date file"""
logging.debug("Renaming Date file")
# find the master file to rename
for file in files:
@@ -1103,7 +1103,7 @@ def get_titles(nzo: NzbObject, match, name, titleing=False):
def replace_word(word_input, one, two):
""" Regex replace on just words """
"""Regex replace on just words"""
regex = re.compile(r"\W(%s)(\W|$)" % one, re.I)
matches = regex.findall(word_input)
if matches:
@@ -1138,7 +1138,7 @@ def get_descriptions(nzo: NzbObject, match, name):
def get_decades(year):
""" Return 4 digit and 2 digit decades given 'year' """
"""Return 4 digit and 2 digit decades given 'year'"""
if year:
try:
decade = year[2:3] + "0"
@@ -1163,7 +1163,7 @@ def get_resolution(job_name):
def check_for_folder(path):
""" Return True if any folder is found in the tree at 'path' """
"""Return True if any folder is found in the tree at 'path'"""
for _root, dirs, _files in os.walk(path):
if dirs:
return True
@@ -1171,7 +1171,7 @@ def check_for_folder(path):
def to_lowercase(path):
""" Lowercases any characters enclosed in {} """
"""Lowercases any characters enclosed in {}"""
_RE_LOWERCASE = re.compile(r"{([^{]*)}")
while True:
m = _RE_LOWERCASE.search(path)
@@ -1197,7 +1197,7 @@ def strip_folders(path):
f.insert(0, "")
def strip_all(x):
""" Strip all leading/trailing underscores also dots for Windows """
"""Strip all leading/trailing underscores also dots for Windows"""
x = x.strip().strip("_")
if sabnzbd.WIN32:
# macOS and Linux should keep dots, because leading dots are significant
@@ -1287,7 +1287,7 @@ def check_for_date(filename, matcher):
def is_full_path(file):
""" Return True if path is absolute """
"""Return True if path is absolute"""
if file.startswith("\\") or file.startswith("/"):
return True
try:
@@ -1299,7 +1299,7 @@ def is_full_path(file):
def eval_sort(sorttype, expression, name=None, multipart=""):
""" Preview a sort expression, to be used by API """
"""Preview a sort expression, to be used by API"""
from sabnzbd.api import Ttemplate
path = ""

View File

@@ -68,7 +68,7 @@ class URLGrabber(Thread):
self.shutdown = False
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None):
""" Add an URL to the URLGrabber queue, 'when' is seconds from now """
"""Add an URL to the URLGrabber queue, 'when' is seconds from now"""
if future_nzo and when:
# Always increase counter
future_nzo.url_tries += 1

View File

@@ -23,7 +23,7 @@ import winreg
def reg_info(user):
""" Return the reg key for API """
"""Return the reg key for API"""
if user:
# Normally use the USER part of the registry
section = winreg.HKEY_CURRENT_USER
@@ -64,7 +64,7 @@ def get_connection_info(user=True):
def set_connection_info(url, user=True):
""" Set API info in register """
"""Set API info in register"""
section, keypath = reg_info(user)
try:
hive = winreg.ConnectRegistry(None, section)
@@ -85,7 +85,7 @@ def set_connection_info(url, user=True):
def del_connection_info(user=True):
""" Remove API info from register """
"""Remove API info from register"""
section, keypath = reg_info(user)
try:
hive = winreg.ConnectRegistry(None, section)
@@ -100,7 +100,7 @@ def del_connection_info(user=True):
def get_install_lng():
""" Return language-code used by the installer """
"""Return language-code used by the installer"""
lng = 0
try:
hive = winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER)

View File

@@ -11,7 +11,7 @@ debug = False
def getcmdoutput(cmd):
""" execectue cmd, and give back output lines as array """
"""execectue cmd, and give back output lines as array"""
with os.popen(cmd) as p:
outputlines = p.readlines()
return outputlines

View File

@@ -38,8 +38,12 @@ def getcpu():
# OK, found. Remove unwanted spaces:
cputype = " ".join(cputype.split())
else:
# Not found, so let's fall back to platform()
cputype = platform.platform()
try:
# Not found, so let's fall back to platform()
cputype = platform.platform()
except:
# Can fail on special platforms (like Snapcraft or embedded)
pass
return cputype

View File

@@ -18,7 +18,7 @@ SIZE_URL_LIST = [
def measure_speed_from_url(url: str) -> float:
""" Download the specified url (pointing to a file), and report back MB/s (as a float) """
"""Download the specified url (pointing to a file), and report back MB/s (as a float)"""
logging.debug("URL is %s", url)
start = time.time()
downloaded_bytes = 0 # default
@@ -38,12 +38,12 @@ def measure_speed_from_url(url: str) -> float:
def bytes_to_bits(megabytes_per_second: float) -> float:
""" convert bytes (per second) to bits (per second), taking into a account network overhead"""
"""convert bytes (per second) to bits (per second), taking into a account network overhead"""
return 8.05 * megabytes_per_second # bits
def internetspeed() -> float:
""" Report Internet speed in MB/s as a float """
"""Report Internet speed in MB/s as a float"""
# Do basic test with a small download
logging.debug("Basic measurement, with small download:")
urlbasic = SIZE_URL_LIST[0][1] # get first URL, which is smallest download

View File

@@ -64,7 +64,7 @@ def test_nntp_server_dict(kwargs):
def test_nntp_server(host, port, server=None, username=None, password=None, ssl=None, ssl_verify=1, ssl_ciphers=None):
""" Will connect (blocking) to the nttp server and report back any errors """
"""Will connect (blocking) to the nttp server and report back any errors"""
timeout = 4.0
if "*" in password and not password.strip("*"):
# If the password is masked, try retrieving it from the config

View File

@@ -58,7 +58,7 @@ def keep_awake(reason):
def allow_sleep():
""" Allow OS to go to sleep """
"""Allow OS to go to sleep"""
global assertion_id
if assertion_id:
IOPMAssertionRelease(assertion_id)

View File

@@ -5,5 +5,5 @@
# You MUST use double quotes (so " and not ')
__version__ = "3.3.0-develop"
__baseline__ = "unknown"
__version__ = "3.3.0"
__baseline__ = "7bb443678ac5c8394ead4ecdf76e7b57f4f4bd7a"

View File

@@ -55,7 +55,7 @@ def _zeroconf_callback(sdRef, flags, errorCode, name, regtype, domain):
def set_bonjour(host=None, port=None):
""" Publish host/port combo through Bonjour """
"""Publish host/port combo through Bonjour"""
global _HOST_PORT, _BONJOUR_OBJECT
if not _HAVE_BONJOUR or not cfg.enable_broadcast():
@@ -109,7 +109,7 @@ def _bonjour_server(refObject):
def remove_server():
""" Remove Bonjour registration """
"""Remove Bonjour registration"""
global _BONJOUR_OBJECT
if _BONJOUR_OBJECT:
_BONJOUR_OBJECT.close()

View File

@@ -84,7 +84,7 @@ STRUCT_FILE_DESC_PACKET = struct.Struct(
# Supporting functions
def print_splitter():
""" Simple helper function """
"""Simple helper function"""
print("\n------------------------\n")

View File

@@ -123,7 +123,7 @@ def run_sabnzbd(clean_cache_dir):
@pytest.fixture(scope="session")
def run_sabnews_and_selenium(request):
""" Start SABNews and Selenium/Chromedriver, shared across the pytest session. """
"""Start SABNews and Selenium/Chromedriver, shared across the pytest session."""
# We only try Chrome for consistent results
driver_options = ChromeOptions()
@@ -171,7 +171,7 @@ def run_sabnews_and_selenium(request):
@pytest.fixture(scope="class")
def generate_fake_history(request):
""" Add fake entries to the history db """
"""Add fake entries to the history db"""
history_size = randint(42, 81)
try:
history_db = os.path.join(SAB_CACHE_DIR, DEF_ADMIN_DIR, DB_HISTORY_NAME)
@@ -189,7 +189,7 @@ def generate_fake_history(request):
@pytest.fixture(scope="function")
def update_history_specs(request):
""" Update the history size at the start of every test """
"""Update the history size at the start of every test"""
if request.function.__name__.startswith("test_"):
json = get_api_result(
"history",

View File

Binary file not shown.

View File

@@ -28,7 +28,7 @@ import sabnzbd.interface as interface
class TestApiInternals:
""" Test internal functions of the API """
"""Test internal functions of the API"""
def test_empty(self):
with pytest.raises(TypeError):
@@ -68,13 +68,13 @@ class TestApiInternals:
def set_remote_host_or_ip(hostname: str = "localhost", remote_ip: str = "127.0.0.1"):
""" Change CherryPy's "Host" and "remote.ip"-values """
"""Change CherryPy's "Host" and "remote.ip"-values"""
cherrypy.request.headers["Host"] = hostname
cherrypy.request.remote.ip = remote_ip
class TestSecuredExpose:
""" Test the security handling """
"""Test the security handling"""
main_page = sabnzbd.interface.MainPage()

View File

@@ -30,7 +30,7 @@ class TestValidators:
"""
def assert_allowed(inp_value):
""" Helper function to check for block """
"""Helper function to check for block"""
msg, value = config.clean_nice_ionice_parameters(inp_value)
assert msg is None
assert value == inp_value
@@ -62,10 +62,10 @@ class TestValidators:
assert_allowed("-t -n9 -c7")
def test_clean_nice_ionice_parameters_blocked(self):
""" Should all be blocked """
"""Should all be blocked"""
def assert_blocked(inp_value):
""" Helper function to check for block """
"""Helper function to check for block"""
msg, value = config.clean_nice_ionice_parameters(inp_value)
assert msg
assert msg.startswith("Incorrect parameter")

View File

@@ -252,6 +252,44 @@ class TestFileFolderNameSanitizer:
assert sanitizedname == name # no change
class TestSanitizeFiles(ffs.TestCase):
def setUp(self):
self.setUpPyfakefs()
self.fs.path_separator = "\\"
self.fs.is_windows_fs = True
def test_sanitize_files_input(self):
assert [] == filesystem.sanitize_files(folder=None)
assert [] == filesystem.sanitize_files(filelist=None)
assert [] == filesystem.sanitize_files(folder=None, filelist=None)
@set_platform("win32")
@set_config({"sanitize_safe": True})
def test_sanitize_files(self):
# The very specific tests of sanitize_filename() are above
# Here we just want to see that sanitize_files() works as expected
input_list = [r"c:\test\con.man", r"c:\test\foo:bar"]
output_list = [r"c:\test\_con.man", r"c:\test\foo-bar"]
# Test both the "folder" and "filelist" based calls
for kwargs in ({"folder": r"c:\test"}, {"filelist": input_list}):
# Create source files
for file in input_list:
self.fs.create_file(file)
assert output_list == filesystem.sanitize_files(**kwargs)
# Make sure the old ones are gone
for file in input_list:
assert not os.path.exists(file)
# Make sure the new ones are there
for file in output_list:
assert os.path.exists(file)
os.remove(file)
assert not os.path.exists(file)
class TestSameFile:
def test_nothing_in_common_win_paths(self):
assert 0 == filesystem.same_file("C:\\", "D:\\")
@@ -993,7 +1031,7 @@ class TestRenamer:
filename = os.path.join(dirname, "myfile.txt")
Path(filename).touch() # create file
newfilename = os.path.join(dirname, "newfile.txt")
filesystem.renamer(filename, newfilename) # rename() does not return a value ...
assert newfilename == filesystem.renamer(filename, newfilename)
assert not os.path.isfile(filename)
assert os.path.isfile(newfilename)
@@ -1003,7 +1041,7 @@ class TestRenamer:
sameleveldirname = os.path.join(SAB_DATA_DIR, "othertestdir" + str(random.randint(10000, 99999)))
os.mkdir(sameleveldirname)
newfilename = os.path.join(sameleveldirname, "newfile.txt")
filesystem.renamer(filename, newfilename)
assert newfilename == filesystem.renamer(filename, newfilename)
assert not os.path.isfile(filename)
assert os.path.isfile(newfilename)
shutil.rmtree(sameleveldirname)
@@ -1012,7 +1050,8 @@ class TestRenamer:
Path(filename).touch() # create file
newfilename = os.path.join(dirname, "nonexistingsubdir", "newfile.txt")
try:
filesystem.renamer(filename, newfilename) # rename() does not return a value ...
# Should fail
filesystem.renamer(filename, newfilename)
except:
pass
assert os.path.isfile(filename)
@@ -1085,7 +1124,11 @@ class TestUnwantedExtensions:
@set_config({"unwanted_extensions_mode": 1, "unwanted_extensions": test_extensions})
def test_has_unwanted_extension_whitelist_mode(self):
for filename, result in self.test_params:
assert filesystem.has_unwanted_extension(filename) is not result
if filesystem.get_ext(filename):
assert filesystem.has_unwanted_extension(filename) is not result
else:
# missing extension is never considered unwanted
assert filesystem.has_unwanted_extension(filename) is False
@set_config({"unwanted_extensions_mode": 0, "unwanted_extensions": ""})
def test_has_unwanted_extension_empty_blacklist(self):
@@ -1095,4 +1138,8 @@ class TestUnwantedExtensions:
@set_config({"unwanted_extensions_mode": 1, "unwanted_extensions": ""})
def test_has_unwanted_extension_empty_whitelist(self):
for filename, result in self.test_params:
assert filesystem.has_unwanted_extension(filename) is True
if filesystem.get_ext(filename):
assert filesystem.has_unwanted_extension(filename) is True
else:
# missing extension is never considered unwanted
assert filesystem.has_unwanted_extension(filename) is False

View File

@@ -172,7 +172,7 @@ class TestAddingNZBs:
assert VAR.SCRIPT_DIR in json["config"]["misc"]["script_dir"]
def _customize_pre_queue_script(self, priority, category):
""" Add a script that accepts the job and sets priority & category """
"""Add a script that accepts the job and sets priority & category"""
script_name = "SCRIPT%s.py" % SCRIPT_RANDOM
try:
script_path = os.path.join(VAR.SCRIPT_DIR, script_name)
@@ -255,7 +255,7 @@ class TestAddingNZBs:
return self._create_random_nzb(metadata={"category": cat_meta})
def _expected_results(self, STAGES, return_state=None):
""" Figure out what priority and state the job should end up with """
"""Figure out what priority and state the job should end up with"""
# Define a bunch of helpers
def sanitize_stages(hit_stage, STAGES):
# Fallback is always category-based, so nix any explicit priorities (stages 1, 3).
@@ -275,7 +275,7 @@ class TestAddingNZBs:
return STAGES
def handle_state_prio(hit_stage, STAGES, return_state):
""" Find the priority that should to be set after changing the job state """
"""Find the priority that should to be set after changing the job state"""
# Keep record of the priority that caused the initial hit (for verification of the job state later on)
if not return_state:
return_state = STAGES[hit_stage]
@@ -318,7 +318,7 @@ class TestAddingNZBs:
return self._expected_results(STAGES, return_state)
def handle_default_cat(hit_stage, STAGES, return_state):
""" Figure out the (category) default priority """
"""Figure out the (category) default priority"""
STAGES = sanitize_stages(hit_stage, STAGES)
# Strip the current -100 hit before recursing
@@ -474,7 +474,7 @@ class TestAddingNZBs:
nzb_basedir, nzb_basename = os.path.split(VAR.NZB_FILE)
nzb_size = os.stat(VAR.NZB_FILE).st_size
part_size = round(randint(20, 80) / 100 * nzb_size)
part_size = round(randint(40, 70) / 100 * nzb_size)
first_part = os.path.join(nzb_basedir, "part1_of_" + nzb_basename)
second_part = os.path.join(nzb_basedir, "part2_of_" + nzb_basename)
@@ -509,7 +509,7 @@ class TestAddingNZBs:
],
)
def test_adding_nzbs_malformed(self, keep_first, keep_last, strip_first, strip_last, should_work):
""" Test adding broken, empty, or otherwise malformed NZB file """
"""Test adding broken, empty, or otherwise malformed NZB file"""
if not VAR.NZB_FILE:
VAR.NZB_FILE = self._create_random_nzb()
@@ -549,7 +549,7 @@ class TestAddingNZBs:
@pytest.mark.parametrize("prio_def_cat", sample(VALID_DEFAULT_PRIORITIES, 1))
@pytest.mark.parametrize("prio_add", PRIO_OPTS_ADD)
def test_adding_nzbs_size_limit(self, prio_meta_cat, prio_def_cat, prio_add):
""" Verify state and priority of a job exceeding the size_limit """
"""Verify state and priority of a job exceeding the size_limit"""
# Set size limit
json = get_api_result(
mode="set_config", extra_arguments={"section": "misc", "keyword": "size_limit", "value": MIN_FILESIZE - 1}

View File

@@ -35,22 +35,22 @@ from tests.testhelper import *
class ApiTestFunctions:
""" Collection of (wrapper) functions for API testcases """
"""Collection of (wrapper) functions for API testcases"""
def _get_api_json(self, mode, extra_args={}):
""" Wrapper for API calls with json output """
"""Wrapper for API calls with json output"""
extra = {"output": "json", "apikey": SAB_APIKEY}
extra.update(extra_args)
return get_api_result(mode=mode, host=SAB_HOST, port=SAB_PORT, extra_arguments=extra)
def _get_api_text(self, mode, extra_args={}):
""" Wrapper for API calls with text output """
"""Wrapper for API calls with text output"""
extra = {"output": "text", "apikey": SAB_APIKEY}
extra.update(extra_args)
return get_api_result(mode=mode, host=SAB_HOST, port=SAB_PORT, extra_arguments=extra)
def _get_api_xml(self, mode, extra_args={}):
""" Wrapper for API calls with xml output """
"""Wrapper for API calls with xml output"""
extra = {"output": "xml", "apikey": SAB_APIKEY}
extra.update(extra_args)
return get_api_result(mode=mode, host=SAB_HOST, port=SAB_PORT, extra_arguments=extra)
@@ -84,14 +84,14 @@ class ApiTestFunctions:
self._get_api_json("set_config", extra_args=script_dir_extra)
def _record_slots(self, keys):
""" Return a list of dicts, storing queue info for the items in iterable 'keys' """
"""Return a list of dicts, storing queue info for the items in iterable 'keys'"""
record = []
for slot in self._get_api_json("queue")["queue"]["slots"]:
record.append({key: slot[key] for key in keys})
return record
def _run_tavern(self, test_name, extra_vars=None):
""" Run tavern tests in ${test_name}.yaml """
"""Run tavern tests in ${test_name}.yaml"""
vars = [
("SAB_HOST", SAB_HOST),
("SAB_PORT", SAB_PORT),
@@ -111,7 +111,7 @@ class ApiTestFunctions:
assert result is result.OK
def _get_api_history(self, extra={}):
""" Wrapper for history-related api calls """
"""Wrapper for history-related api calls"""
# Set a higher default limit; the default is 10 via cfg(history_limit)
if "limit" not in extra.keys() and "name" not in extra.keys():
# History calls that use 'name' don't need the limit parameter
@@ -172,21 +172,21 @@ class ApiTestFunctions:
warn("Failed to remove %s" % job_dir)
def _purge_queue(self, del_files=0):
""" Clear the entire queue """
"""Clear the entire queue"""
self._get_api_json("queue", extra_args={"name": "purge", "del_files": del_files})
assert len(self._get_api_json("queue")["queue"]["slots"]) == 0
@pytest.mark.usefixtures("run_sabnzbd")
class TestOtherApi(ApiTestFunctions):
""" Test API function not directly involving either history or queue """
"""Test API function not directly involving either history or queue"""
def test_api_version_testhelper(self):
""" Check the version, testhelper style """
"""Check the version, testhelper style"""
assert "version" in get_api_result("version", SAB_HOST, SAB_PORT)
def test_api_version_tavern(self):
""" Same same, tavern style """
"""Same same, tavern style"""
self._run_tavern("api_version")
def test_api_version_json(self):
@@ -199,7 +199,7 @@ class TestOtherApi(ApiTestFunctions):
assert self._get_api_xml("version")["version"] == sabnzbd.__version__
def test_api_server_stats(self):
""" Verify server stats format """
"""Verify server stats format"""
self._run_tavern("api_server_stats")
@pytest.mark.parametrize("extra_args", [{}, {"name": "change_complete_action", "value": ""}])
@@ -403,10 +403,10 @@ class TestOtherApi(ApiTestFunctions):
@pytest.mark.usefixtures("run_sabnzbd")
class TestQueueApi(ApiTestFunctions):
""" Test queue-related API responses """
"""Test queue-related API responses"""
def test_api_queue_empty_format(self):
""" Verify formatting, presence of fields for empty queue """
"""Verify formatting, presence of fields for empty queue"""
self._purge_queue()
self._run_tavern("api_queue_empty")
@@ -566,7 +566,7 @@ class TestQueueApi(ApiTestFunctions):
self._get_api_json("queue", extra_args={"name": "change_complete_action", "value": ""})
def test_api_queue_single_format(self):
""" Verify formatting, presence of fields for single queue entry """
"""Verify formatting, presence of fields for single queue entry"""
self._create_random_queue(minimum_size=1)
self._run_tavern("api_queue_format")
@@ -845,7 +845,7 @@ class TestQueueApi(ApiTestFunctions):
assert changed[row] == original[row]
def test_api_queue_get_files_format(self):
""" Verify formatting, presence of fields for mode=get_files """
"""Verify formatting, presence of fields for mode=get_files"""
self._create_random_queue(minimum_size=1)
nzo_id = self._get_api_json("queue")["queue"]["slots"][0]["nzo_id"]
# Pass the nzo_id this way rather than fetching it in a tavern stage, as
@@ -896,10 +896,10 @@ class TestQueueApi(ApiTestFunctions):
@pytest.mark.usefixtures("run_sabnzbd", "generate_fake_history", "update_history_specs")
class TestHistoryApi(ApiTestFunctions):
""" Test history-related API responses """
"""Test history-related API responses"""
def test_api_history_format(self):
""" Verify formatting, presence of expected history fields """
"""Verify formatting, presence of expected history fields"""
# Checks all output styles: json, text and xml
self._run_tavern("api_history_format")
@@ -974,7 +974,7 @@ class TestHistoryApi(ApiTestFunctions):
assert len(json["history"]["slots"]) == 0
def test_api_history_restrict_cat_and_search_and_limit(self):
""" Combine search, category and limits requirements into a single query """
"""Combine search, category and limits requirements into a single query"""
limit_sum = 0
slot_sum = 0
limits = [randint(1, ceil(self.history_size / 10)) for _ in range(0, len(self.history_distro_names))]
@@ -1111,6 +1111,6 @@ class TestHistoryApiPart2(ApiTestFunctions):
assert json["history"]["noofslots"] == 0
def test_api_history_empty_format(self):
""" Verify formatting, presence of fields for empty history """
"""Verify formatting, presence of fields for empty history"""
# Checks all output styles: json, text and xml
self._run_tavern("api_history_empty")

View File

@@ -30,7 +30,7 @@ from tests.testhelper import *
class TestShowLogging(SABnzbdBaseTest):
def test_showlog(self):
""" Test the output of the filtered-log button """
"""Test the output of the filtered-log button"""
# Basic URL-fetching, easier than Selenium file download
log_result = get_url_result("status/showlog")
@@ -92,7 +92,7 @@ class TestQueueRepair(SABnzbdBaseTest):
class TestSamplePostProc:
def test_sample_post_proc(self):
""" Make sure we don't break things """
"""Make sure we don't break things"""
# Set parameters
script_params = [
"somedir222",
@@ -127,7 +127,7 @@ class TestSamplePostProc:
class TestExtractPot:
def test_extract_pot(self):
""" Simple test if translation extraction still works """
"""Simple test if translation extraction still works"""
script_call = [sys.executable, "tools/extract_pot.py"]
# Run script and check output

View File

@@ -224,7 +224,7 @@ class TestMisc:
],
)
def test_list_to_cmd(self, test_input, expected_output):
""" Test to convert list to a cmd.exe-compatible command string """
"""Test to convert list to a cmd.exe-compatible command string"""
res = misc.list2cmdline(test_input)
# Make sure the output is cmd.exe-compatible
@@ -248,6 +248,7 @@ class TestMisc:
("", False),
("3.2.0", False),
(-42, False),
("::ffff:192.168.1.100", False),
],
)
def test_is_ipv4_addr(self, value, result):
@@ -276,6 +277,7 @@ class TestMisc:
("[1.2.3.4]", False),
("2001:1", False),
("2001::[2001::1]", False),
("::ffff:192.168.1.100", True),
],
)
def test_is_ipv6_addr(self, value, result):
@@ -305,6 +307,9 @@ class TestMisc:
("[127.6.6.6]", False),
("2001:1", False),
("2001::[2001::1]", False),
("::ffff:192.168.1.100", False),
("::ffff:1.1.1.1", False),
("::ffff:127.0.0.1", True),
],
)
def test_is_loopback_addr(self, value, result):
@@ -337,6 +342,9 @@ class TestMisc:
("[127.6.6.6]", False),
("2001:1", False),
("2001::[2001::1]", False),
("::ffff:192.168.1.100", False),
("::ffff:1.1.1.1", False),
("::ffff:127.0.0.1", True),
],
)
def test_is_localhost(self, value, result):
@@ -373,11 +381,97 @@ class TestMisc:
("[1.2.3.4]", False),
("2001:1", False),
("2001::[2001::1]", False),
("::ffff:192.168.1.100", True),
("::ffff:1.1.1.1", False),
("::ffff:127.0.0.1", False),
],
)
def test_is_lan_addr(self, value, result):
assert misc.is_lan_addr(value) is result
@pytest.mark.parametrize(
"ip, subnet, result",
[
("2001:c0f:fee::1", "2001:c0f:fee", True), # Old-style range setting
("2001:c0f:fee::1", "2001:c0f:FEE:", True),
("2001:c0f:fee::1", "2001:c0FF:ffee", False),
("2001:c0f:fee::1", "2001:c0ff:ffee:", False),
("2001:C0F:FEE::1", "2001:c0f:fee::/48", True),
("2001:c0f:fee::1", "2001:c0f:fee::/112", True),
("2001:c0f:fee::1", "::/0", True), # Subnet equals the entire IPv6 address space
("2001:c0f:fee::1", "2001:c0:ffee::/48", False),
("2001:c0f:fee::1", "2001:c0ff:ee::/112", False),
("2001:c0f:fEE::1", "2001:c0f:fee:eeee::/48", False), # Invalid subnet
("2001:c0f:Fee::1", "2001:c0f:fee:/64", False),
("2001:c0f:fee::1", "2001:c0f:fee:eeee:3:2:1:0/112", False),
("2001:c0f:fee::1", "2001:c0f:fee::1", True), # Single-IP subnet
("2001:c0f:fee::1", "2001:c0f:fee::1/128", True),
("2001:c0f:fee::1", "2001:c0f:fee::2", False),
("2001:c0f:fee::1", "2001:c0f:fee::2/128", False),
("::1", "::/127", True),
("::1", "2021::/64", False), # Localhost not in subnet
("192.168.43.21", "192.168.43", True), # Old-style subnet setting
("192.168.43.21", "192.168.43.", True),
("192.168.43.21", "192.168.4", False),
("192.168.43.21", "192.168.4.", False),
("10.11.12.13", "10", True), # Bad old-style setting (allowed 100.0.0.0/6, 104.0.0.0/6 and 108.0.0.0/7)
("10.11.12.13", "10.", True), # Correct version of the same (10.0.0.0/8 only)
("108.1.2.3", "10", False), # This used to be allowed with the bad setting!
("108.1.2.3", "10.", False),
("192.168.43.21", "192.168.0.0/16", True),
("192.168.43.21", "192.168.0.0/255.255.255.0", True),
("::ffff:192.168.43.21", "192.168.43.0/24", True), # IPv4-mapped IPv6 ("dual-stack") notation
("::FFff:192.168.43.21", "192.168.43.0/24", True),
("::ffff:192.168.12.34", "192.168.43.0/24", False),
("::ffFF:192.168.12.34", "192.168.43.0/24", False),
("192.168.43.21", "192.168.43.0/26", True),
("200.100.50.25", "0.0.0.0/0", True), # Subnet equals the entire IPv4 address space
("192.168.43.21", "10.0.0.0/8", False),
("192.168.43.21", "192.168.1.0/22", False),
("192.168.43.21", "192.168.43.21/24", False), # Invalid subnet
("192.168.43.21", "192.168.43/24", False),
("192.168.43.21", "192.168.43.0/16", False),
("192.168.43.21", "192.168.43.0/255.252.0.0", False),
("192.168.43.21", "192.168.43.21", True), # Single-IP subnet
("192.168.43.21", "192.168.43.21/32", True),
("192.168.43.21", "192.168.43.21/255.255.255.255", True),
("192.168.43.21", "192.168.43.12", False),
("192.168.43.21", "192.168.43.0/32", False),
("192.168.43.21", "43.21.168.192/255.255.255.255", False),
("127.0.0.1", "127.0.0.0/31", True),
("127.0.1.1", "127.0.0.0/24", False), # Localhost not in subnet
("111.222.33.44", "111:222:33::/96", False), # IPv4/IPv6 mixup
("111:222:33::44", "111.222.0.0/24", False),
("aaaa::1:2:3:4", "f:g:h:i:43:21::/112", False), # Invalid subnet
("4.3.2.1", "654.3.2.1.0/24", False),
(None, "1.2.3.4/32", False), # Missing input
("1:a:2:b::", None, False),
(None, None, False),
],
)
def test_ip_in_subnet(self, ip, subnet, result):
misc.ip_in_subnet(ip, subnet) is result
@pytest.mark.parametrize(
"ip, result",
[
("::ffff:127.0.0.1", "127.0.0.1"),
("::FFFF:127.0.0.1", "127.0.0.1"),
("::ffff:192.168.1.255", "192.168.1.255"),
("::ffff:8.8.8.8", "8.8.8.8"),
("2007::2021", "2007::2021"),
("::ffff:2007:2021", "::ffff:2007:2021"),
("2007::ffff:2021", "2007::ffff:2021"),
("12.34.56.78", "12.34.56.78"),
("foobar", "foobar"),
("0:0:0:0:0:ffff:8.8.4.4", "8.8.4.4"),
("0000:0000:0000:0000:0000:ffff:1.0.0.1", "1.0.0.1"),
("0000::0:ffff:1.1.1.1", "1.1.1.1"),
],
)
def test_strip_ipv4_mapped_notation(self, ip, result):
misc.strip_ipv4_mapped_notation(ip) == result
class TestBuildAndRunCommand:
# Path should exist

67
tests/test_par2file.py Normal file
View File

@@ -0,0 +1,67 @@
#!/usr/bin/python3 -OO
# Copyright 2007-2021 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Testing SABnzbd par2 parsing
"""
from sabnzbd.par2file import *
from tests.testhelper import *
# TODO: Add testing for edge cases, such as non-unique md5of16k or broken par files
class TestPar2Parsing:
def test_parse_par2_file(self, caplog):
# To capture the par2-creator, we need to capture the logging
with caplog.at_level(logging.DEBUG):
# These files are all <16k so the MD5 of the whole file is the same as the 16k one
assert {"random.bin": b"\xbf\xe0\xe4\x10\xa2#\xf5\xbeN\x7f2\xe5\x9e\xdd\t\x03"} == parse_par2_file(
os.path.join(SAB_DATA_DIR, "deobfuscate_filenames", "rename.par2"), {}
)
assert "Par2-creator of rename.par2 is: QuickPar 0.9" in caplog.text
caplog.clear()
assert {"frènch_german_demö.rar": b"C\t\x1d\xbd\xdf\x8c\xb5w \xcco\xbf~L)\xc2"} == parse_par2_file(
os.path.join(SAB_DATA_DIR, "test_win_unicode", "frènch_german_demö.rar.vol0+1.par2"), {}
)
assert "Par2-creator of frènch_german_demö.rar.vol0+1.par2 is: QuickPar 0.9" in caplog.text
caplog.clear()
assert {
"我喜欢编程.part5.rar": b"\x19\xe7\xb7\xb3\xbc\x17\xc4\xefo\x96*+x\x0c]M",
"我喜欢编程.part6.rar": b"M\x8c.{\xae\x15\xb7\xa1\x8c\xc7\x1f\x8a\xb3^`\xd9",
"我喜欢编程.part4.rar": b"\xb8D:r\xd8\x04\x98\xb3\xc2\x89\xed\xc1\x90\xe445",
"我喜欢编程.part2.rar": b"aN#\x04*\x86\xd96|PoDV\xa6S\xa8",
"我喜欢编程.part3.rar": b"\xc5\x1ep\xeb\x94\xa7\x12\xa1e\x8c\xc5\xda\xda\xae1 ",
"我喜欢编程.part1.rar": b'_tJ\x15\x1a3;1\xaao\xa9n\n"\xa5p',
"我喜欢编程.part7.rar": b"M\x1c\x14\x9b\xacY\x81\x8d\x82 VV\x81&\x8eH",
} == parse_par2_file(os.path.join(SAB_DATA_DIR, "unicode_rar", "我喜欢编程.par2"), {})
assert "Par2-creator of 我喜欢编程.par2 is: ParPar v0.3.2" in caplog.text
caplog.clear()
def test_parse_par2_file_16k(self, caplog):
# Capture logging of the par2-creator
with caplog.at_level(logging.DEBUG):
# This file is 18k, so it's md5 of the first 16k is actually different
md5of16k = {}
assert {"rss_feed_test.xml": b"\xf8\x8f\x88\x91\xae{\x03\xc8\xad\xcb\xb4Y\xa0+\x06\xf6"} == parse_par2_file(
os.path.join(SAB_DATA_DIR, "par2file", "basic_16k.par2"), md5of16k
)
assert md5of16k == {b"'ky\xd7\xd1\xd3wF\xed\x9c\xf7\x9b\x90\x93\x106": "rss_feed_test.xml"}
assert "Par2-creator of basic_16k.par2 is: QuickPar 0.9" in caplog.text
caplog.clear()

View File

@@ -29,7 +29,7 @@ import sabnzbd.config
class TestRSS:
@staticmethod
def setup_rss(feed_name, feed_url):
""" Setup the basic settings to get things going"""
"""Setup the basic settings to get things going"""
# Setup the config settings
sabnzbd.config.CFG = configobj.ConfigObj()
sabnzbd.config.ConfigRSS(feed_name, {"uri": feed_url})

View File

@@ -28,10 +28,10 @@ from tests.testhelper import SAB_CACHE_DIR
@pytest.mark.usefixtures("clean_cache_dir")
class TestDiskSpeed:
""" test sabnzbd.utils.diskspeed """
"""test sabnzbd.utils.diskspeed"""
def test_disk_speed(self):
""" Test the normal use case: writable directory"""
"""Test the normal use case: writable directory"""
speed = diskspeedmeasure(SAB_CACHE_DIR)
assert speed > 0.0
assert isinstance(speed, float)
@@ -40,7 +40,7 @@ class TestDiskSpeed:
assert not os.path.exists(os.path.join(SAB_CACHE_DIR, "outputTESTING.txt"))
def test_non_existing_dir(self):
""" testing a non-existing dir should result in 0"""
"""testing a non-existing dir should result in 0"""
speed = diskspeedmeasure("such_a_dir_does_not_exist")
assert speed == 0
@@ -54,7 +54,7 @@ class TestDiskSpeed:
assert speed == 0
def test_file_not_dir_specified(self):
""" testing a file should result in 0"""
"""testing a file should result in 0"""
with tempfile.NamedTemporaryFile() as temp_file:
speed = diskspeedmeasure(temp_file.name)
assert speed == 0

View File

@@ -24,7 +24,7 @@ from sabnzbd.utils.pystone import pystones
class TestPystone:
def test_pystone(self):
""" Tests for performance with various loop sizes """
"""Tests for performance with various loop sizes"""
loops = [10, 1000, 50000, 100000]
for loop in loops:
benchtime, stones = pystones(loop)

View File

@@ -45,5 +45,5 @@ class TestAPIReg:
assert not ar.get_connection_info(True)
def test_get_install_lng(self):
""" Not much to test yet.. """
"""Not much to test yet.."""
assert ar.get_install_lng() == "en"

View File

@@ -65,7 +65,7 @@ SAB_NEWSSERVER_PORT = 8888
def set_config(settings_dict):
""" Change config-values on the fly, per test"""
"""Change config-values on the fly, per test"""
def set_config_decorator(func):
def wrapper_func(*args, **kwargs):
@@ -87,7 +87,7 @@ def set_config(settings_dict):
def set_platform(platform):
""" Change config-values on the fly, per test"""
"""Change config-values on the fly, per test"""
def set_platform_decorator(func):
def wrapper_func(*args, **kwargs):
@@ -121,13 +121,13 @@ def set_platform(platform):
def get_url_result(url="", host=SAB_HOST, port=SAB_PORT):
""" Do basic request to web page """
"""Do basic request to web page"""
arguments = {"apikey": SAB_APIKEY}
return requests.get("http://%s:%s/%s/" % (host, port, url), params=arguments).text
def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}):
""" Build JSON request to SABnzbd """
"""Build JSON request to SABnzbd"""
arguments = {"apikey": SAB_APIKEY, "output": "json", "mode": mode}
arguments.update(extra_arguments)
r = requests.get("http://%s:%s/api" % (host, port), params=arguments)
@@ -139,13 +139,13 @@ def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}):
def create_nzb(nzb_dir, metadata=None):
""" Create NZB from directory using SABNews """
"""Create NZB from directory using SABNews"""
nzb_dir_full = os.path.join(SAB_DATA_DIR, nzb_dir)
return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full, metadata=metadata)
def create_and_read_nzb(nzbdir):
""" Create NZB, return data and delete file """
"""Create NZB, return data and delete file"""
# Create NZB-file to import
nzb_path = create_nzb(nzbdir)
with open(nzb_path, "r") as nzb_data_fp:
@@ -179,7 +179,7 @@ class FakeHistoryDB(db.HistoryDB):
super().__init__()
def add_fake_history_jobs(self, number_of_entries=1):
""" Generate a history db with any number of fake entries """
"""Generate a history db with any number of fake entries"""
for _ in range(0, number_of_entries):
nzo = mock.Mock()
@@ -246,7 +246,7 @@ class SABnzbdBaseTest:
@staticmethod
def selenium_wrapper(func, *args):
""" Wrapper with retries for more stable Selenium """
"""Wrapper with retries for more stable Selenium"""
for i in range(3):
try:
return func(*args)

View File

@@ -64,7 +64,7 @@ RE_CONTEXT = re.compile(r"#:\s*(.*)$")
def get_a_line(line_src, number):
""" Retrieve line 'number' from file 'src' with caching """
"""Retrieve line 'number' from file 'src' with caching"""
global FILE_CACHE
if line_src not in FILE_CACHE:
FILE_CACHE[line_src] = []
@@ -79,7 +79,7 @@ def get_a_line(line_src, number):
def get_context(ctx_line):
""" Read context info from source file and append to line. """
"""Read context info from source file and append to line."""
if not ctx_line.startswith("#:"):
return ctx_line
@@ -125,7 +125,7 @@ def get_context(ctx_line):
def add_tmpl_to_pot(prefix, dst_file):
""" Append english template to open POT file 'dst' """
"""Append english template to open POT file 'dst'"""
with open(EMAIL_DIR + "/%s-en.tmpl" % prefix, "r") as tmpl_src:
dst_file.write("#: email/%s.tmpl:1\n" % prefix)
dst_file.write('msgid ""\n')

View File

@@ -140,7 +140,7 @@ RE_LANG = re.compile(r'"Language-Description:\s([^"]+)\\n')
def run(cmd):
""" Run system command, returns exit-code and stdout """
"""Run system command, returns exit-code and stdout"""
try:
txt = subprocess.check_output(cmd, universal_newlines=True)
ret = 0
@@ -152,7 +152,7 @@ def run(cmd):
def process_po_folder(domain, folder, extra=""):
""" Process each PO file in folder """
"""Process each PO file in folder"""
result = True
for fname in glob.glob(os.path.join(folder, "*.po")):
basename = os.path.split(fname)[1]
@@ -180,7 +180,7 @@ def process_po_folder(domain, folder, extra=""):
def remove_mo_files():
""" Remove MO files in locale """
"""Remove MO files in locale"""
for root, dirs, files in os.walk(MO_DIR, topdown=False):
for f in files:
if not f.startswith(DOMAIN):
@@ -188,7 +188,7 @@ def remove_mo_files():
def translate_tmpl(prefix, lng):
""" Translate template 'prefix' into language 'lng' """
"""Translate template 'prefix' into language 'lng'"""
# Open the original file
with open(EMAIL_DIR + "/%s-en.tmpl" % prefix, "r", encoding="utf-8") as src:
data = src.read()
@@ -204,7 +204,7 @@ def translate_tmpl(prefix, lng):
def make_templates():
""" Create email templates """
"""Create email templates"""
if not os.path.exists("email"):
os.makedirs("email")
for path in glob.glob(os.path.join(MO_DIR, "*")):
@@ -224,7 +224,7 @@ def make_templates():
def patch_nsis():
""" Patch translation into the NSIS script """
"""Patch translation into the NSIS script"""
RE_NSIS = re.compile(r'^(\s*LangString\s+)(\w+)(\s+\$\{LANG_)(\w+)\}\s+(".*)', re.I)
languages = [os.path.split(path)[1] for path in glob.glob(os.path.join(MO_DIR, "*"))]

View File

@@ -53,7 +53,7 @@ def usage(code, msg=""):
def add(id, str, fuzzy):
""" Add a non-fuzzy translation to the dictionary. """
"""Add a non-fuzzy translation to the dictionary."""
global MESSAGES, nonewlines, RE_HTML
if not fuzzy and str:
if id.count(b"%s") == str.count(b"%s"):

View File

Binary file not shown.

View File

Binary file not shown.

View File

Binary file not shown.

View File

Binary file not shown.