mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-22 14:29:10 -05:00
Compare commits
177 Commits
3.3.0Beta3
...
3.3.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8206fbdd9 | ||
|
|
589f15a77b | ||
|
|
7bb443678a | ||
|
|
6390415101 | ||
|
|
4abf192e11 | ||
|
|
1fed37f9da | ||
|
|
8fdb259270 | ||
|
|
98b0b46dda | ||
|
|
861fb9e3d5 | ||
|
|
644bcee14e | ||
|
|
933d9e92d1 | ||
|
|
9fb03a25f6 | ||
|
|
0b1f7827fc | ||
|
|
49f21e2c9d | ||
|
|
990c0e07cf | ||
|
|
745459e69f | ||
|
|
115a6cf5d7 | ||
|
|
39aafbbc61 | ||
|
|
93ddc9ce99 | ||
|
|
3d877eed13 | ||
|
|
308d612c05 | ||
|
|
9b75f0428d | ||
|
|
e6858659fb | ||
|
|
815058ffcd | ||
|
|
915b540576 | ||
|
|
5b06d6925c | ||
|
|
ef875fa720 | ||
|
|
994a7d044f | ||
|
|
80cd7f39b4 | ||
|
|
93bf45cde6 | ||
|
|
b4adc064a0 | ||
|
|
7e81d0bcbb | ||
|
|
33b59f091e | ||
|
|
ea3dc1f2f4 | ||
|
|
5d3e68a6a5 | ||
|
|
64f2ec3ffe | ||
|
|
c80014ec7d | ||
|
|
6515720d55 | ||
|
|
605c5cbfd8 | ||
|
|
77e97d1a89 | ||
|
|
f17d959770 | ||
|
|
22f1d2f642 | ||
|
|
7d3907fa0e | ||
|
|
9588fe8d94 | ||
|
|
3b3ffdb8d1 | ||
|
|
cdd7e6931a | ||
|
|
4c3df012a6 | ||
|
|
b0eaf93331 | ||
|
|
55c03279ca | ||
|
|
c4f0753f5a | ||
|
|
a9bd25873e | ||
|
|
5ab6de8123 | ||
|
|
75deb9d678 | ||
|
|
b5ce0e0766 | ||
|
|
a9d86a7447 | ||
|
|
2abe4c3cef | ||
|
|
0542c25003 | ||
|
|
1b8ee4e290 | ||
|
|
51128cba55 | ||
|
|
3612432581 | ||
|
|
deca000a1b | ||
|
|
39cccb5653 | ||
|
|
f6838dc985 | ||
|
|
8cd4d92395 | ||
|
|
3bf9906f45 | ||
|
|
9f7daf96ef | ||
|
|
67de4df155 | ||
|
|
bc51a4bd1c | ||
|
|
bb54616018 | ||
|
|
6bcff5e014 | ||
|
|
8970a03a9a | ||
|
|
3ad717ca35 | ||
|
|
b14f72c67a | ||
|
|
45d036804f | ||
|
|
8f606db233 | ||
|
|
3766ba5402 | ||
|
|
e851813cef | ||
|
|
4d49ad9141 | ||
|
|
16618b3af2 | ||
|
|
0e5c0f664f | ||
|
|
7be9281431 | ||
|
|
ee0327fac1 | ||
|
|
9930de3e7f | ||
|
|
e8503e89c6 | ||
|
|
1d9ed419eb | ||
|
|
0207652e3e | ||
|
|
0f1e99c5cb | ||
|
|
f134bc7efb | ||
|
|
dcd7c7180e | ||
|
|
fbbfcd075b | ||
|
|
f42d2e4140 | ||
|
|
88882cebbc | ||
|
|
17a979675c | ||
|
|
4642850c79 | ||
|
|
e8d6eebb04 | ||
|
|
864c5160c0 | ||
|
|
99b5a00c12 | ||
|
|
85ee1f07d7 | ||
|
|
e58b4394e0 | ||
|
|
1e91a57bf1 | ||
|
|
39cee52a7e | ||
|
|
72068f939d | ||
|
|
096d0d3cad | ||
|
|
2472ab0121 | ||
|
|
00421717b8 | ||
|
|
ae96d93f94 | ||
|
|
8522c40c8f | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 | ||
|
|
feebbb9f04 | ||
|
|
bc4f06dd1d | ||
|
|
971e4fc909 | ||
|
|
51cc765949 | ||
|
|
19c6a4fffa | ||
|
|
105ac32d2f | ||
|
|
57550675d2 | ||
|
|
e674abc5c0 | ||
|
|
f965c96f51 | ||
|
|
c76b8ed9e0 | ||
|
|
4fbd0d8a7b | ||
|
|
2186c0fff6 | ||
|
|
1adca9a9c1 | ||
|
|
9408353f2b | ||
|
|
84f4d453d2 | ||
|
|
d10209f2a1 | ||
|
|
3ae149c72f | ||
|
|
47385acc3b | ||
|
|
814eeaa900 | ||
|
|
5f2ea13aad | ||
|
|
41ca217931 | ||
|
|
b57d36e8dd | ||
|
|
9a4be70734 | ||
|
|
a8443595a6 | ||
|
|
fd0a70ac58 | ||
|
|
8a8685c968 | ||
|
|
9e6cb8da8e | ||
|
|
054ec54d51 | ||
|
|
272ce773cb | ||
|
|
050b925f7b | ||
|
|
0087940898 | ||
|
|
e323c014f9 | ||
|
|
cc465c7554 | ||
|
|
14cb37564f | ||
|
|
094db56c3b | ||
|
|
aabb709b8b | ||
|
|
0833dd2db9 | ||
|
|
cd3f912be4 | ||
|
|
665c516db6 | ||
|
|
b670da9fa0 | ||
|
|
80bee9bffe | ||
|
|
d85a70e8ad | ||
|
|
8f21533e76 | ||
|
|
89996482a1 | ||
|
|
03c10dce91 | ||
|
|
bd5331be05 | ||
|
|
46e1645289 | ||
|
|
4ce3965747 | ||
|
|
9d4af19db3 | ||
|
|
48e034f4be | ||
|
|
f8959baa2f | ||
|
|
8ed5997eae | ||
|
|
daf9f50ac8 | ||
|
|
6b11013c1a |
6
.github/workflows/build_release.yml
vendored
6
.github/workflows/build_release.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
path: "*-win32-bin.zip"
|
||||
name: Windows Windows standalone binary (32bit and legacy)
|
||||
- name: Prepare official release
|
||||
if: env.AUTOMATION_GITHUB_TOKEN && !startsWith(github.ref, 'refs/tags/')
|
||||
if: env.AUTOMATION_GITHUB_TOKEN && startsWith(github.ref, 'refs/tags/')
|
||||
run: python builder/package.py release
|
||||
|
||||
build_macos:
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: 3.9.4
|
||||
PYTHON_VERSION: 3.9.5
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -110,5 +110,5 @@ jobs:
|
||||
path: "*-osx.dmg"
|
||||
name: macOS binary (not notarized)
|
||||
- name: Prepare official release
|
||||
if: env.AUTOMATION_GITHUB_TOKEN && !startsWith(github.ref, 'refs/tags/')
|
||||
if: env.AUTOMATION_GITHUB_TOKEN && startsWith(github.ref, 'refs/tags/')
|
||||
run: python3 builder/package.py release
|
||||
3
.github/workflows/translations.yml
vendored
3
.github/workflows/translations.yml
vendored
@@ -24,6 +24,9 @@ jobs:
|
||||
tx pull --all --force --parallel
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
- name: Compile translations to validate them
|
||||
run: |
|
||||
python3 tools/make_mo.py
|
||||
- name: Push translatable and translated texts back to repo
|
||||
uses: stefanzweifel/git-auto-commit-action@v4.5.1
|
||||
with:
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.3.0Beta3
|
||||
Summary: SABnzbd-3.3.0Beta3
|
||||
Version: 3.3.0
|
||||
Summary: SABnzbd-3.3.0
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
42
README.mkd
42
README.mkd
@@ -1,42 +1,46 @@
|
||||
Release Notes - SABnzbd 3.3.0 Beta 3
|
||||
Release Notes - SABnzbd 3.3.0
|
||||
=========================================================
|
||||
|
||||
## Changes and bugfixes since 3.3.0 Beta 2
|
||||
- Failing articles could result in jobs being stuck at 99%.
|
||||
- Some NZB files would incorrectly be marked as empty.
|
||||
- CRC/yEnc errors would be counted twice as bad articles.
|
||||
- API-call `history` would not filter active post-processing `nzo_ids`.
|
||||
- RSS `Read All Feeds` button would result in a crash.
|
||||
- Support prefix and netmask for `local_ranges`.
|
||||
- Windows: `Deobfuscate final filenames` could fail to deobfuscate.
|
||||
|
||||
## Changes and bugfixes since 3.3.0 Beta 1
|
||||
- Binaries would show an error when starting.
|
||||
|
||||
## Changes since 3.2.1
|
||||
- The `External internet access` will automatically detect local network
|
||||
and no longer requires the ranges to be defined. Custom ranges can still
|
||||
be defined through `local_ranges` in Special settings.
|
||||
and no longer requires local network ranges to be defined. Custom ranges
|
||||
can still be defined through `local_ranges` in Special settings.
|
||||
- Allow setting `inet_exposure` from the command line.
|
||||
- Support prefix and netmask for Special setting `local_ranges`.
|
||||
- The `Unwanted extensions` detection can be set to `Whitelist`-mode.
|
||||
This will block or pause all jobs with non-matching extensions.
|
||||
- Servers article statistics are shown in K, G, M-notation.
|
||||
- Resolution added as a pattern key (`%r`) for Sorting.
|
||||
- Optimized performance of par2 file parsing.
|
||||
- CPU usage optimizations in the download process.
|
||||
- Revised handling of categories, scripts, and priorities when adding NZB's.
|
||||
- Download statistics are also shown when no History is shown.
|
||||
- Confirm rename if Direct Unpack is active for the job.
|
||||
- Obfuscated-RAR detection will always be performed.
|
||||
- All requests will be logged, not just API calls.
|
||||
- Stability improvement to encrypted RAR-detection.
|
||||
- Allow missing extensions in `Unwanted extensions` detection.
|
||||
- Removed Special setting `max_art_opt`.
|
||||
- Add notification that Plush will be removed in 3.4.0.
|
||||
- Windows/macOS: Update UnRar to 6.0.1.
|
||||
- Windows: Update Multipar to 1.3.1.7 (adds faster verification).
|
||||
|
||||
## Bugfixes since 3.1.1
|
||||
## Bugfixes since 3.2.1
|
||||
- Prevent failed post-processing if job name ends in multiple dots or spaces.
|
||||
- Failing articles could result in jobs being stuck at 99%.
|
||||
- Jobs could be stuck in the queue or duplicate if they had missing articles.
|
||||
- Prevent jobs getting stuck at 99% due to unreliable servers.
|
||||
- CRC/yEnc errors would be counted twice as bad articles.
|
||||
- Some NZB files would incorrectly be marked as empty.
|
||||
- API-call `history` would not filter active post-processing by `nzo_ids`.
|
||||
- Login page could be accessed even if `External internet access` was set
|
||||
to `No access`. All other access would still be blocked.
|
||||
to `No access`. Any other calls would still be blocked.
|
||||
- Ignore duplicate files inside messy NZB's.
|
||||
- macOS: disk space would be incorrect for very large disks.
|
||||
- Windows: `Deobfuscate final filenames` could fail to deobfuscate.
|
||||
- macOS: Disk space would be incorrect for very large disks.
|
||||
|
||||
## Upgrade notices
|
||||
- The download statistics file `totals10.sab` is updated in this
|
||||
- The download statistics file `totals10.sab` is updated in 3.2.x
|
||||
version. If you downgrade to 3.1.x or lower, detailed download
|
||||
statistics will be lost.
|
||||
|
||||
|
||||
169
SABnzbd.py
169
SABnzbd.py
@@ -62,7 +62,6 @@ from sabnzbd.misc import (
|
||||
exit_sab,
|
||||
split_host,
|
||||
create_https_certificates,
|
||||
windows_variant,
|
||||
ip_extract,
|
||||
set_serv_parms,
|
||||
get_serv_parms,
|
||||
@@ -79,6 +78,7 @@ import sabnzbd.downloader
|
||||
import sabnzbd.notifier as notifier
|
||||
import sabnzbd.zconfig
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6
|
||||
from sabnzbd.utils.getperformance import getpystone, getcpu
|
||||
import sabnzbd.utils.ssdp as ssdp
|
||||
|
||||
try:
|
||||
@@ -89,9 +89,13 @@ try:
|
||||
import win32service
|
||||
import win32ts
|
||||
import pywintypes
|
||||
import servicemanager
|
||||
from win32com.shell import shell, shellcon
|
||||
|
||||
from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info
|
||||
import sabnzbd.sabtray
|
||||
|
||||
win32api.SetConsoleCtrlHandler(sabnzbd.sig_handler, True)
|
||||
from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info
|
||||
except ImportError:
|
||||
if sabnzbd.WIN32:
|
||||
print("Sorry, requires Python module PyWin32.")
|
||||
@@ -182,35 +186,36 @@ def print_help():
|
||||
print("Options marked [*] are stored in the config file")
|
||||
print()
|
||||
print("Options:")
|
||||
print(" -f --config-file <ini> Location of config file")
|
||||
print(" -s --server <srv:port> Listen on server:port [*]")
|
||||
print(" -t --templates <templ> Template directory [*]")
|
||||
print(" -f --config-file <ini> Location of config file")
|
||||
print(" -s --server <srv:port> Listen on server:port [*]")
|
||||
print(" -t --templates <templ> Template directory [*]")
|
||||
print()
|
||||
print(" -l --logging <-1..2> Set logging level (-1=off, 0= least, 2= most) [*]")
|
||||
print(" -w --weblogging Enable cherrypy access logging")
|
||||
print(" -l --logging <-1..2> Set logging level (-1=off, 0=least,2= most) [*]")
|
||||
print(" -w --weblogging Enable cherrypy access logging")
|
||||
print()
|
||||
print(" -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]")
|
||||
print(" -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]")
|
||||
if sabnzbd.WIN32:
|
||||
print(" -d --daemon Use when run as a service")
|
||||
print(" -d --daemon Use when run as a service")
|
||||
else:
|
||||
print(" -d --daemon Fork daemon process")
|
||||
print(" --pid <path> Create a PID file in the given folder (full path)")
|
||||
print(" --pidfile <path> Create a PID file with the given name (full path)")
|
||||
print(" -d --daemon Fork daemon process")
|
||||
print(" --pid <path> Create a PID file in the given folder (full path)")
|
||||
print(" --pidfile <path> Create a PID file with the given name (full path)")
|
||||
print()
|
||||
print(" -h --help Print this message")
|
||||
print(" -v --version Print version information")
|
||||
print(" -c --clean Remove queue, cache and logs")
|
||||
print(" -p --pause Start in paused mode")
|
||||
print(" --repair Add orphaned jobs from the incomplete folder to the queue")
|
||||
print(" --repair-all Try to reconstruct the queue from the incomplete folder")
|
||||
print(" with full data reconstruction")
|
||||
print(" --https <port> Port to use for HTTPS server")
|
||||
print(" --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]")
|
||||
print(" --no-login Start with username and password reset")
|
||||
print(" --log-all Log all article handling (for developers)")
|
||||
print(" --disable-file-log Logging is only written to console")
|
||||
print(" --console Force logging to console")
|
||||
print(" --new Run a new instance of SABnzbd")
|
||||
print(" -h --help Print this message")
|
||||
print(" -v --version Print version information")
|
||||
print(" -c --clean Remove queue, cache and logs")
|
||||
print(" -p --pause Start in paused mode")
|
||||
print(" --repair Add orphaned jobs from the incomplete folder to the queue")
|
||||
print(" --repair-all Try to reconstruct the queue from the incomplete folder")
|
||||
print(" with full data reconstruction")
|
||||
print(" --https <port> Port to use for HTTPS server")
|
||||
print(" --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]")
|
||||
print(" --inet_exposure <0..5> Set external internet access [*]")
|
||||
print(" --no-login Start with username and password reset")
|
||||
print(" --log-all Log all article handling (for developers)")
|
||||
print(" --disable-file-log Logging is only written to console")
|
||||
print(" --console Force logging to console")
|
||||
print(" --new Run a new instance of SABnzbd")
|
||||
print()
|
||||
print("NZB (or related) file:")
|
||||
print(" NZB or compressed NZB file, with extension .nzb, .zip, .rar, .7z, .gz, or .bz2")
|
||||
@@ -347,7 +352,7 @@ def fix_webname(name):
|
||||
return name
|
||||
|
||||
|
||||
def get_user_profile_paths(vista_plus):
|
||||
def get_user_profile_paths():
|
||||
"""Get the default data locations on Windows"""
|
||||
if sabnzbd.DAEMON:
|
||||
# In daemon mode, do not try to access the user profile
|
||||
@@ -363,22 +368,15 @@ def get_user_profile_paths(vista_plus):
|
||||
return
|
||||
elif sabnzbd.WIN32:
|
||||
try:
|
||||
from win32com.shell import shell, shellcon
|
||||
|
||||
path = shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, None, 0)
|
||||
sabnzbd.DIR_LCLDATA = os.path.join(path, DEF_WORKDIR)
|
||||
sabnzbd.DIR_HOME = os.environ["USERPROFILE"]
|
||||
except:
|
||||
try:
|
||||
if vista_plus:
|
||||
root = os.environ["AppData"]
|
||||
user = os.environ["USERPROFILE"]
|
||||
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root.replace("\\Roaming", "\\Local"), DEF_WORKDIR)
|
||||
sabnzbd.DIR_HOME = user
|
||||
else:
|
||||
root = os.environ["USERPROFILE"]
|
||||
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root, DEF_WORKDIR)
|
||||
sabnzbd.DIR_HOME = root
|
||||
root = os.environ["AppData"]
|
||||
user = os.environ["USERPROFILE"]
|
||||
sabnzbd.DIR_LCLDATA = "%s\\%s" % (root.replace("\\Roaming", "\\Local"), DEF_WORKDIR)
|
||||
sabnzbd.DIR_HOME = user
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -600,7 +598,7 @@ def get_webhost(cherryhost, cherryport, https_port):
|
||||
browserhost = localhost
|
||||
|
||||
else:
|
||||
# If on Vista and/or APIPA, use numerical IP, to help FireFoxers
|
||||
# If on APIPA, use numerical IP, to help FireFoxers
|
||||
if ipv6 and ipv4:
|
||||
cherryhost = hostip
|
||||
browserhost = cherryhost
|
||||
@@ -778,10 +776,9 @@ def commandline_handler():
|
||||
"server=",
|
||||
"templates",
|
||||
"ipv6_hosting=",
|
||||
"template2",
|
||||
"inet_exposure=",
|
||||
"browser=",
|
||||
"config-file=",
|
||||
"force",
|
||||
"disable-file-log",
|
||||
"version",
|
||||
"https=",
|
||||
@@ -863,8 +860,6 @@ def main():
|
||||
console_logging = False
|
||||
no_file_log = False
|
||||
web_dir = None
|
||||
vista_plus = False
|
||||
win64 = False
|
||||
repair = 0
|
||||
no_login = False
|
||||
sabnzbd.RESTART_ARGS = [sys.argv[0]]
|
||||
@@ -872,6 +867,7 @@ def main():
|
||||
pid_file = None
|
||||
new_instance = False
|
||||
ipv6_hosting = None
|
||||
inet_exposure = None
|
||||
|
||||
_service, sab_opts, _serv_opts, upload_nzbs = commandline_handler()
|
||||
|
||||
@@ -951,6 +947,8 @@ def main():
|
||||
new_instance = True
|
||||
elif opt == "--ipv6_hosting":
|
||||
ipv6_hosting = arg
|
||||
elif opt == "--inet_exposure":
|
||||
inet_exposure = arg
|
||||
|
||||
sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME))
|
||||
sabnzbd.MY_NAME = os.path.basename(sabnzbd.MY_FULLNAME)
|
||||
@@ -977,17 +975,18 @@ def main():
|
||||
logger.setLevel(logging.WARNING)
|
||||
logger.addHandler(gui_log)
|
||||
|
||||
# Detect Windows variant
|
||||
# Detect CPU architecture and Windows variant
|
||||
# Use .machine as .processor is not always filled
|
||||
cpu_architecture = platform.uname().machine
|
||||
if sabnzbd.WIN32:
|
||||
vista_plus, win64 = windows_variant()
|
||||
sabnzbd.WIN64 = win64
|
||||
sabnzbd.WIN64 = cpu_architecture == "AMD64"
|
||||
|
||||
if inifile:
|
||||
# INI file given, simplest case
|
||||
inifile = evaluate_inipath(inifile)
|
||||
else:
|
||||
# No ini file given, need profile data
|
||||
get_user_profile_paths(vista_plus)
|
||||
get_user_profile_paths()
|
||||
# Find out where INI file is
|
||||
inifile = os.path.abspath(os.path.join(sabnzbd.DIR_LCLDATA, DEF_INI_FILE))
|
||||
|
||||
@@ -1169,24 +1168,19 @@ def main():
|
||||
).strip()
|
||||
except:
|
||||
pass
|
||||
logging.info("Commit: %s", sabnzbd.__baseline__)
|
||||
logging.info("Commit = %s", sabnzbd.__baseline__)
|
||||
|
||||
logging.info("Full executable path = %s", sabnzbd.MY_FULLNAME)
|
||||
if sabnzbd.WIN32:
|
||||
suffix = ""
|
||||
if win64:
|
||||
suffix = "(win64)"
|
||||
try:
|
||||
logging.info("Platform = %s %s", platform.platform(), suffix)
|
||||
except:
|
||||
logging.info("Platform = %s <unknown>", suffix)
|
||||
else:
|
||||
logging.info("Platform = %s", os.name)
|
||||
logging.info("Python-version = %s", sys.version)
|
||||
logging.info("Arguments = %s", sabnzbd.CMDLINE)
|
||||
if sabnzbd.DOCKER:
|
||||
logging.info("Running inside a docker container")
|
||||
else:
|
||||
logging.info("Not inside a docker container")
|
||||
logging.info("Python-version = %s", sys.version)
|
||||
logging.info("Dockerized = %s", sabnzbd.DOCKER)
|
||||
logging.info("CPU architecture = %s", cpu_architecture)
|
||||
|
||||
try:
|
||||
logging.info("Platform = %s - %s", os.name, platform.platform())
|
||||
except:
|
||||
# Can fail on special platforms (like Snapcraft or embedded)
|
||||
pass
|
||||
|
||||
# Find encoding; relevant for external processing activities
|
||||
logging.info("Preferred encoding = %s", sabnzbd.encoding.CODEPAGE)
|
||||
@@ -1210,8 +1204,8 @@ def main():
|
||||
|
||||
try:
|
||||
os.environ["SSL_CERT_FILE"] = certifi.where()
|
||||
logging.info("Certifi version: %s", certifi.__version__)
|
||||
logging.info("Loaded additional certificates from: %s", os.environ["SSL_CERT_FILE"])
|
||||
logging.info("Certifi version = %s", certifi.__version__)
|
||||
logging.info("Loaded additional certificates from %s", os.environ["SSL_CERT_FILE"])
|
||||
except:
|
||||
# Sometimes the certificate file is blocked
|
||||
logging.warning(T("Could not load additional certificates from certifi package"))
|
||||
@@ -1220,38 +1214,16 @@ def main():
|
||||
# Extra startup info
|
||||
if sabnzbd.cfg.log_level() > 1:
|
||||
# List the number of certificates available (can take up to 1.5 seconds)
|
||||
ctx = ssl.create_default_context()
|
||||
logging.debug("Available certificates: %s", repr(ctx.cert_store_stats()))
|
||||
logging.debug("Available certificates = %s", repr(ssl.create_default_context().cert_store_stats()))
|
||||
|
||||
mylocalipv4 = localipv4()
|
||||
if mylocalipv4:
|
||||
logging.debug("My local IPv4 address = %s", mylocalipv4)
|
||||
else:
|
||||
logging.debug("Could not determine my local IPv4 address")
|
||||
|
||||
mypublicipv4 = publicipv4()
|
||||
if mypublicipv4:
|
||||
logging.debug("My public IPv4 address = %s", mypublicipv4)
|
||||
else:
|
||||
logging.debug("Could not determine my public IPv4 address")
|
||||
|
||||
myipv6 = ipv6()
|
||||
if myipv6:
|
||||
logging.debug("My IPv6 address = %s", myipv6)
|
||||
else:
|
||||
logging.debug("Could not determine my IPv6 address")
|
||||
# List networking
|
||||
logging.debug("Local IPv4 address = %s", localipv4())
|
||||
logging.debug("Public IPv4 address = %s", publicipv4())
|
||||
logging.debug("IPv6 address = %s", ipv6())
|
||||
|
||||
# Measure and log system performance measured by pystone and - if possible - CPU model
|
||||
from sabnzbd.utils.getperformance import getpystone, getcpu
|
||||
|
||||
pystoneperf = getpystone()
|
||||
if pystoneperf:
|
||||
logging.debug("CPU Pystone available performance = %s", pystoneperf)
|
||||
else:
|
||||
logging.debug("CPU Pystone available performance could not be calculated")
|
||||
cpumodel = getcpu() # Linux only
|
||||
if cpumodel:
|
||||
logging.debug("CPU model = %s", cpumodel)
|
||||
logging.debug("CPU Pystone available performance = %s", getpystone())
|
||||
logging.debug("CPU model = %s", getcpu())
|
||||
|
||||
logging.info("Using INI file %s", inifile)
|
||||
|
||||
@@ -1272,8 +1244,6 @@ def main():
|
||||
# Handle the several tray icons
|
||||
if sabnzbd.cfg.win_menu() and not sabnzbd.DAEMON and not sabnzbd.WIN_SERVICE:
|
||||
if sabnzbd.WIN32:
|
||||
import sabnzbd.sabtray
|
||||
|
||||
sabnzbd.WINTRAY = sabnzbd.sabtray.SABTrayThread()
|
||||
elif sabnzbd.LINUX_POWER and os.environ.get("DISPLAY"):
|
||||
try:
|
||||
@@ -1362,6 +1332,10 @@ def main():
|
||||
sabnzbd.cfg.username.set("")
|
||||
sabnzbd.cfg.password.set("")
|
||||
|
||||
# Overwrite inet_exposure from command-line for VPS-setups
|
||||
if inet_exposure:
|
||||
sabnzbd.cfg.inet_exposure.set(inet_exposure)
|
||||
|
||||
mime_gzip = (
|
||||
"text/*",
|
||||
"application/javascript",
|
||||
@@ -1632,7 +1606,6 @@ def main():
|
||||
|
||||
|
||||
if sabnzbd.WIN32:
|
||||
import servicemanager
|
||||
|
||||
class SABnzbd(win32serviceutil.ServiceFramework):
|
||||
"""Win32 Service Handler"""
|
||||
@@ -1699,7 +1672,7 @@ def handle_windows_service():
|
||||
Returns True when any service commands were detected or
|
||||
when we have started as a service.
|
||||
"""
|
||||
# Detect if running as Windows Service (only Vista and above!)
|
||||
# Detect if running as Windows Service
|
||||
# Adapted from https://stackoverflow.com/a/55248281/5235502
|
||||
# Only works when run from the exe-files
|
||||
if hasattr(sys, "frozen") and win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
|
||||
@@ -148,7 +148,7 @@ if __name__ == "__main__":
|
||||
patch_version_file(RELEASE_VERSION)
|
||||
|
||||
# To draft a release or not to draft a release?
|
||||
RELEASE_THIS = "draft release" in run_git_command(["log", "-1", "--pretty=format:%b"])
|
||||
RELEASE_THIS = "refs/tags/" in os.environ.get("GITHUB_REF", "")
|
||||
|
||||
# Rename release notes file
|
||||
safe_remove("README.txt")
|
||||
@@ -339,7 +339,7 @@ if __name__ == "__main__":
|
||||
print("Approved! Stapling the result to the app")
|
||||
run_external_command(["xcrun", "stapler", "staple", "dist/SABnzbd.app"])
|
||||
elif notarization_user and notarization_pass:
|
||||
print("Notarization skipped, add 'draft release' to the commit message trigger notarization!")
|
||||
print("Notarization skipped, tag commit to trigger notarization!")
|
||||
else:
|
||||
print("Notarization skipped, NOTARIZATION_USER or NOTARIZATION_PASS missing.")
|
||||
else:
|
||||
@@ -542,7 +542,7 @@ if __name__ == "__main__":
|
||||
head=RELEASE_VERSION,
|
||||
)
|
||||
else:
|
||||
print("To push release to GitHub, add 'draft release' to the commit message.")
|
||||
print("To push release to GitHub, first tag the commit.")
|
||||
print("Or missing the AUTOMATION_GITHUB_TOKEN, cannot push to GitHub without it.")
|
||||
|
||||
# Reset!
|
||||
|
||||
@@ -264,13 +264,13 @@ function do_restart() {
|
||||
$.ajax({ url: '../../config/restart?apikey=' + sabSession,
|
||||
complete: function() {
|
||||
// Keep counter of failures
|
||||
var failureCounter = 0;
|
||||
var loopCounter = 0;
|
||||
|
||||
// Now we try until we can connect
|
||||
var refreshInterval = setInterval(function() {
|
||||
// We skip the first one
|
||||
if(failureCounter == 0) {
|
||||
failureCounter = failureCounter+1;
|
||||
setInterval(function() {
|
||||
loopCounter = loopCounter+1;
|
||||
// We skip the first one so we give it time to shutdown
|
||||
if(loopCounter < 2) {
|
||||
return
|
||||
}
|
||||
$.ajax({ url: urlTotal,
|
||||
@@ -279,17 +279,16 @@ function do_restart() {
|
||||
location.href = urlTotal;
|
||||
},
|
||||
error: function(status, text) {
|
||||
failureCounter = failureCounter+1;
|
||||
// Too many failuers and we give up
|
||||
if(failureCounter >= 6) {
|
||||
// Too many failures and we give up
|
||||
if(loopCounter >= 10) {
|
||||
// If the port has changed 'Access-Control-Allow-Origin' header will not allow
|
||||
// us to check if the server is back up. So after 7 failures we redirect
|
||||
// us to check if the server is back up. So after 10 failures (20 sec) we redirect
|
||||
// anyway in the hopes it works anyway..
|
||||
location.href = urlTotal;
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 4000)
|
||||
}, 2000)
|
||||
|
||||
// Exception if we go from HTTPS to HTTP
|
||||
// (this is not allowed by browsers and all of the above will be ignored)
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
<span id="warning_box"><b><a href="${path}status/#tabs-warnings" id="last_warning"><span id="have_warnings">$have_warnings</span> $T('warnings')</a></b></span>
|
||||
#if $pane=="Main"#
|
||||
#if $new_release#⋅ <a href="$new_rel_url" id="new_release" target="_blank">$T('Plush-updateAvailable').replace(' ',' ')</a>#end if#
|
||||
This skin is no longer actively maintained! <a href="${path}config/general/#web_dir"><strong>We recommend using the Glitter skin.</strong></a>
|
||||
<a href="${path}config/general/#web_dir"><strong style="color: red">This skin will be removed in SABnzbd 3.4.0! <br>We recommend using the Glitter skin.</strong></a>
|
||||
#end if#
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -91,40 +91,7 @@
|
||||
|
||||
|
||||
<div id="tabs-connections">
|
||||
<a href="refresh_conn?apikey=$apikey" class="juiButton">$T('Plush-button-refresh')</a>
|
||||
<a href="disconnect?apikey=$apikey" class="juiButton">$T('link-forceDisc')</a>
|
||||
<hr>
|
||||
<!--#if $servers#-->
|
||||
<!--#set $count=0#-->
|
||||
<!--#for $server in $servers#-->
|
||||
<!--#set $count=$count+1#-->
|
||||
<p>$T('swtag-server'): <strong>$server[0]</strong></p>
|
||||
<p>$T('Priority') = $server[7] <!--#if int($server[8]) != 0#-->$T('optional').capitalize()<!--#else#-->$T('enabled').capitalize()<!--#end if#--></p>
|
||||
<p># $T('connections'): $server[2]</p>
|
||||
<!--#if not $server[5]#-->
|
||||
<a href="./unblock_server?server=$server[0]&apikey=$apikey" class="juiButton">$T('server-blocked')</a>
|
||||
$server[6]
|
||||
<!--#end if#-->
|
||||
<!--#if $server[3]#-->
|
||||
<table class="rssTable">
|
||||
<tr>
|
||||
<th>$T('article-id')</th>
|
||||
<th>$T('filename')</th>
|
||||
<th>$T('file-set')</th>
|
||||
</tr>
|
||||
<!--#set $odd = False#-->
|
||||
<!--#for $thrd in $server[3]#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$thrd[1]</td><td>$thrd[2]</td><td>$thrd[3]</td></tr>
|
||||
<!--#end for#-->
|
||||
</table>
|
||||
<!--#end if#-->
|
||||
<br/><hr/><br/>
|
||||
<!--#end for#-->
|
||||
<!--#else#-->
|
||||
<p>$T('none')</p>
|
||||
<!--#end if#-->
|
||||
</div>
|
||||
|
||||
<div id="tabs-dashboard">
|
||||
|
||||
@@ -18,7 +18,7 @@ After=network-online.target
|
||||
|
||||
[Service]
|
||||
Environment="PYTHONIOENCODING=utf-8"
|
||||
ExecStart=/opt/sabnzbd/SABnzbd.py --logging 1 --browser 0
|
||||
ExecStart=/opt/sabnzbd/SABnzbd.py --disable-file-log --logging 1 --browser 0
|
||||
User=%I
|
||||
Type=simple
|
||||
Restart=on-failure
|
||||
|
||||
BIN
osx/unrar/unrar
BIN
osx/unrar/unrar
Binary file not shown.
@@ -3,16 +3,17 @@
|
||||
# team@sabnzbd.org
|
||||
#
|
||||
# Translators:
|
||||
# Safihre <safihre@sabnzbd.org>, 2020
|
||||
# C E <githubce@eiselt.ch>, 2020
|
||||
# Nikolai Bohl <n.kay01@gmail.com>, 2020
|
||||
# reloxx13 <reloxx@interia.pl>, 2021
|
||||
# Ben Hecht <benjamin.hecht@me.com>, 2021
|
||||
# Safihre <safihre@sabnzbd.org>, 2021
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-3.3.0-develop\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: reloxx13 <reloxx@interia.pl>, 2021\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2021\n"
|
||||
"Language-Team: German (https://www.transifex.com/sabnzbd/teams/111101/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -334,7 +335,7 @@ msgstr "Server-Adresse wird benötigt"
|
||||
|
||||
#: sabnzbd/cfg.py
|
||||
msgid "%s is not a valid script"
|
||||
msgstr ""
|
||||
msgstr "%s ist kein gültiges Script"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/config.py
|
||||
@@ -516,12 +517,12 @@ msgstr "Wird beendet …"
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Server %s is expiring in %s day(s)"
|
||||
msgstr ""
|
||||
msgstr "Server %s läuft in %s tag(en) ab"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Server %s has used the specified quota"
|
||||
msgstr ""
|
||||
msgstr "Server %s hat die angegebene Quote verbraucht"
|
||||
|
||||
#: sabnzbd/emailer.py
|
||||
msgid "Failed to connect to mail server"
|
||||
@@ -631,11 +632,11 @@ msgstr "Verschieben von %s nach %s fehlgeschlagen"
|
||||
#. Error message
|
||||
#: sabnzbd/filesystem.py
|
||||
msgid "Blocked attempt to create directory %s"
|
||||
msgstr ""
|
||||
msgstr "Versuch das Verzeichnis %s zu erstellen wurde blockiert"
|
||||
|
||||
#: sabnzbd/interface.py
|
||||
msgid "Refused connection from:"
|
||||
msgstr ""
|
||||
msgstr "Abgelehnte Verbindung von:"
|
||||
|
||||
#: sabnzbd/interface.py
|
||||
msgid "Refused connection with hostname \"%s\" from:"
|
||||
@@ -3245,6 +3246,7 @@ msgstr "Externer Internetzugriff"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "You can set access rights for systems outside your local network."
|
||||
msgstr ""
|
||||
"Du kannst Zugriffsrechte für Systeme ausserhalb deines Netzwerkes setzen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "No access"
|
||||
@@ -3584,7 +3586,7 @@ msgstr "Aktion bei ungewollter Dateienendung"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when an unwanted extension is detected"
|
||||
msgstr ""
|
||||
msgstr "Aktion bei ungewollter Dateienendung"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Unwanted extensions"
|
||||
@@ -3592,11 +3594,11 @@ msgstr "Ungewollte Dateiendungen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Blacklist"
|
||||
msgstr ""
|
||||
msgstr "Blacklist"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Whitelist"
|
||||
msgstr ""
|
||||
msgstr "Whitelist"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4175,12 +4177,12 @@ msgstr "Download erzwingen"
|
||||
#. Config->RSS edit button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Edit"
|
||||
msgstr ""
|
||||
msgstr "Bearbeiten"
|
||||
|
||||
#. Config->RSS when will be the next RSS scan
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Next scan at"
|
||||
msgstr ""
|
||||
msgstr "Nächster scan um"
|
||||
|
||||
#. Config->RSS table column header
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -4,12 +4,13 @@
|
||||
#
|
||||
# Translators:
|
||||
# Safihre <safihre@sabnzbd.org>, 2020
|
||||
# Ben Hecht <benjamin.hecht@me.com>, 2021
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-3.3.0-develop\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:56+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2020\n"
|
||||
"Last-Translator: Ben Hecht <benjamin.hecht@me.com>, 2021\n"
|
||||
"Language-Team: German (https://www.transifex.com/sabnzbd/teams/111101/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -51,6 +52,8 @@ msgid ""
|
||||
"The installer only supports Windows 8.1 and above, use the standalone legacy"
|
||||
" version to run on older Windows version."
|
||||
msgstr ""
|
||||
"Der Installer unterstützt nur Windows 8.1 und höher. Benutze die Standalone "
|
||||
"Version für ältere Windows Versionen."
|
||||
|
||||
#: builder/win/NSIS_Installer.nsi
|
||||
msgid "This will uninstall SABnzbd from your system"
|
||||
|
||||
@@ -1168,6 +1168,7 @@ def build_status(skip_dashboard=False, output=None):
|
||||
info["loglevel"] = str(cfg.log_level())
|
||||
info["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
|
||||
info["configfn"] = config.get_filename()
|
||||
info["warnings"] = sabnzbd.GUIHANDLER.content()
|
||||
|
||||
# Dashboard: Speed of System
|
||||
info["cpumodel"] = getcpu()
|
||||
@@ -1197,42 +1198,22 @@ def build_status(skip_dashboard=False, output=None):
|
||||
info["dnslookup"] = None
|
||||
|
||||
info["servers"] = []
|
||||
servers = sorted(sabnzbd.Downloader.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
|
||||
for server in servers:
|
||||
# Servers-list could be modified during iteration, so we need a copy
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
connected = sum(nw.connected for nw in server.idle_threads[:])
|
||||
serverconnections = []
|
||||
connected = 0
|
||||
|
||||
for nw in server.idle_threads[:]:
|
||||
if nw.connected:
|
||||
connected += 1
|
||||
|
||||
for nw in server.busy_threads[:]:
|
||||
article = nw.article
|
||||
art_name = ""
|
||||
nzf_name = ""
|
||||
nzo_name = ""
|
||||
|
||||
if article:
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
|
||||
art_name = article.article
|
||||
# filename field is not always present
|
||||
try:
|
||||
nzf_name = nzf.filename
|
||||
except: # attribute error
|
||||
nzf_name = nzf.subject
|
||||
nzo_name = nzo.final_name
|
||||
|
||||
# For the templates or for JSON
|
||||
if output:
|
||||
thread_info = {"thrdnum": nw.thrdnum, "art_name": art_name, "nzf_name": nzf_name, "nzo_name": nzo_name}
|
||||
serverconnections.append(thread_info)
|
||||
else:
|
||||
serverconnections.append((nw.thrdnum, art_name, nzf_name, nzo_name))
|
||||
|
||||
if nw.connected:
|
||||
connected += 1
|
||||
if nw.article:
|
||||
serverconnections.append(
|
||||
{
|
||||
"thrdnum": nw.thrdnum,
|
||||
"art_name": nw.article.article,
|
||||
"nzf_name": nw.article.nzf.filename,
|
||||
"nzo_name": nw.article.nzf.nzo.final_name,
|
||||
}
|
||||
)
|
||||
|
||||
if server.warning and not (connected or server.errormsg):
|
||||
connected = server.warning
|
||||
@@ -1240,38 +1221,20 @@ def build_status(skip_dashboard=False, output=None):
|
||||
if server.request and not server.info:
|
||||
connected = T(" Resolving address").replace(" ", "")
|
||||
|
||||
# For the templates or for JSON
|
||||
if output:
|
||||
server_info = {
|
||||
"servername": server.displayname,
|
||||
"serveractiveconn": connected,
|
||||
"servertotalconn": server.threads,
|
||||
"serverconnections": serverconnections,
|
||||
"serverssl": server.ssl,
|
||||
"serversslinfo": server.ssl_info,
|
||||
"serveractive": server.active,
|
||||
"servererror": server.errormsg,
|
||||
"serverpriority": server.priority,
|
||||
"serveroptional": server.optional,
|
||||
"serverbps": to_units(sabnzbd.BPSMeter.server_bps.get(server.id, 0)),
|
||||
}
|
||||
info["servers"].append(server_info)
|
||||
else:
|
||||
info["servers"].append(
|
||||
(
|
||||
server.displayname,
|
||||
"",
|
||||
connected,
|
||||
serverconnections,
|
||||
server.ssl,
|
||||
server.active,
|
||||
server.errormsg,
|
||||
server.priority,
|
||||
server.optional,
|
||||
)
|
||||
)
|
||||
|
||||
info["warnings"] = sabnzbd.GUIHANDLER.content()
|
||||
server_info = {
|
||||
"servername": server.displayname,
|
||||
"serveractiveconn": connected,
|
||||
"servertotalconn": server.threads,
|
||||
"serverconnections": serverconnections,
|
||||
"serverssl": server.ssl,
|
||||
"serversslinfo": server.ssl_info,
|
||||
"serveractive": server.active,
|
||||
"servererror": server.errormsg,
|
||||
"serverpriority": server.priority,
|
||||
"serveroptional": server.optional,
|
||||
"serverbps": to_units(sabnzbd.BPSMeter.server_bps.get(server.id, 0)),
|
||||
}
|
||||
info["servers"].append(server_info)
|
||||
|
||||
return info
|
||||
|
||||
@@ -1409,7 +1372,7 @@ def build_file_list(nzo_id: str):
|
||||
for nzf in finished_files:
|
||||
jobs.append(
|
||||
{
|
||||
"filename": nzf.filename if nzf.filename else nzf.subject,
|
||||
"filename": nzf.filename,
|
||||
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
|
||||
"mb": "%.2f" % (nzf.bytes / MEBI),
|
||||
"bytes": "%.2f" % nzf.bytes,
|
||||
@@ -1422,7 +1385,7 @@ def build_file_list(nzo_id: str):
|
||||
for nzf in active_files:
|
||||
jobs.append(
|
||||
{
|
||||
"filename": nzf.filename if nzf.filename else nzf.subject,
|
||||
"filename": nzf.filename,
|
||||
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
|
||||
"mb": "%.2f" % (nzf.bytes / MEBI),
|
||||
"bytes": "%.2f" % nzf.bytes,
|
||||
@@ -1435,7 +1398,7 @@ def build_file_list(nzo_id: str):
|
||||
for nzf in queued_files:
|
||||
jobs.append(
|
||||
{
|
||||
"filename": nzf.filename if nzf.filename else nzf.subject,
|
||||
"filename": nzf.filename,
|
||||
"set": nzf.setname,
|
||||
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
|
||||
"mb": "%.2f" % (nzf.bytes / MEBI),
|
||||
|
||||
@@ -36,7 +36,6 @@ from sabnzbd.filesystem import (
|
||||
has_win_device,
|
||||
diskspace,
|
||||
get_filename,
|
||||
get_ext,
|
||||
has_unwanted_extension,
|
||||
)
|
||||
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
|
||||
@@ -354,7 +353,7 @@ def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[b
|
||||
except rarfile.RarCRCError as e:
|
||||
# CRC errors can be thrown for wrong password or
|
||||
# missing the next volume (with correct password)
|
||||
if "cannot find volume" in str(e).lower():
|
||||
if match_str(str(e), ("cannot find volume", "unexpected end of archive")):
|
||||
# We assume this one worked!
|
||||
password_hit = password
|
||||
break
|
||||
|
||||
@@ -91,6 +91,38 @@ def next_month(t: float) -> float:
|
||||
|
||||
|
||||
class BPSMeter:
|
||||
__slots__ = (
|
||||
"start_time",
|
||||
"log_time",
|
||||
"speed_log_time",
|
||||
"last_update",
|
||||
"bps",
|
||||
"bps_list",
|
||||
"server_bps",
|
||||
"cached_amount",
|
||||
"sum_cached_amount",
|
||||
"day_total",
|
||||
"week_total",
|
||||
"month_total",
|
||||
"grand_total",
|
||||
"timeline_total",
|
||||
"article_stats_tried",
|
||||
"article_stats_failed",
|
||||
"day_label",
|
||||
"end_of_day",
|
||||
"end_of_week",
|
||||
"end_of_month",
|
||||
"q_day",
|
||||
"q_period",
|
||||
"quota",
|
||||
"left",
|
||||
"have_quota",
|
||||
"q_time",
|
||||
"q_hour",
|
||||
"q_minute",
|
||||
"quota_enabled",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
t = time.time()
|
||||
self.start_time = t
|
||||
@@ -200,71 +232,82 @@ class BPSMeter:
|
||||
self.defaults()
|
||||
return res
|
||||
|
||||
def update(self, server: Optional[str] = None, amount: int = 0, force_full_update: bool = True):
|
||||
"""Update counters for "server" with "amount" bytes"""
|
||||
t = time.time()
|
||||
def init_server_stats(self, server: str = None):
|
||||
"""Initialize counters for "server" """
|
||||
if server not in self.cached_amount:
|
||||
self.cached_amount[server] = 0
|
||||
self.server_bps[server] = 0.0
|
||||
if server not in self.day_total:
|
||||
self.day_total[server] = 0
|
||||
if server not in self.week_total:
|
||||
self.week_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.grand_total:
|
||||
self.grand_total[server] = 0
|
||||
if server not in self.timeline_total:
|
||||
self.timeline_total[server] = {}
|
||||
if self.day_label not in self.timeline_total[server]:
|
||||
self.timeline_total[server][self.day_label] = 0
|
||||
if server not in self.server_bps:
|
||||
self.server_bps[server] = 0.0
|
||||
if server not in self.article_stats_tried:
|
||||
self.article_stats_tried[server] = {}
|
||||
self.article_stats_failed[server] = {}
|
||||
if self.day_label not in self.article_stats_tried[server]:
|
||||
self.article_stats_tried[server][self.day_label] = 0
|
||||
self.article_stats_failed[server][self.day_label] = 0
|
||||
|
||||
def update(self, server: Optional[str] = None, amount: int = 0):
|
||||
"""Update counters for "server" with "amount" bytes"""
|
||||
# Add amount to temporary storage
|
||||
if server:
|
||||
if server not in self.cached_amount:
|
||||
self.cached_amount[server] = 0
|
||||
self.server_bps[server] = 0.0
|
||||
self.cached_amount[server] += amount
|
||||
self.sum_cached_amount += amount
|
||||
|
||||
# Wait at least 0.05 seconds between each full update
|
||||
if not force_full_update and t - self.last_update < 0.05:
|
||||
return
|
||||
|
||||
if t > self.end_of_day:
|
||||
# current day passed. get new end of day
|
||||
self.day_label = time.strftime("%Y-%m-%d")
|
||||
self.day_total = {}
|
||||
self.end_of_day = tomorrow(t) - 1.0
|
||||
t = time.time()
|
||||
|
||||
if t > self.end_of_day:
|
||||
# Current day passed, get new end of day
|
||||
self.day_label = time.strftime("%Y-%m-%d")
|
||||
self.end_of_day = tomorrow(t) - 1.0
|
||||
self.day_total = {}
|
||||
|
||||
# Check end of week and end of month
|
||||
if t > self.end_of_week:
|
||||
self.week_total = {}
|
||||
self.end_of_week = next_week(t) - 1.0
|
||||
|
||||
if t > self.end_of_month:
|
||||
self.month_total = {}
|
||||
self.end_of_month = next_month(t) - 1.0
|
||||
|
||||
# Need to reset all counters
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
self.init_server_stats(server.id)
|
||||
|
||||
# Add amounts that have been stored temporarily to statistics
|
||||
for srv in self.cached_amount:
|
||||
cached_amount = self.cached_amount[srv]
|
||||
if cached_amount:
|
||||
self.cached_amount[srv] = 0
|
||||
if srv not in self.day_total:
|
||||
self.day_total[srv] = 0
|
||||
self.day_total[srv] += cached_amount
|
||||
|
||||
if srv not in self.week_total:
|
||||
self.week_total[srv] = 0
|
||||
self.week_total[srv] += cached_amount
|
||||
|
||||
if srv not in self.month_total:
|
||||
self.month_total[srv] = 0
|
||||
self.month_total[srv] += cached_amount
|
||||
|
||||
if srv not in self.grand_total:
|
||||
self.grand_total[srv] = 0
|
||||
self.grand_total[srv] += cached_amount
|
||||
|
||||
if srv not in self.timeline_total:
|
||||
self.timeline_total[srv] = {}
|
||||
if self.day_label not in self.timeline_total[srv]:
|
||||
self.timeline_total[srv][self.day_label] = 0
|
||||
self.timeline_total[srv][self.day_label] += cached_amount
|
||||
if self.cached_amount[srv]:
|
||||
self.day_total[srv] += self.cached_amount[srv]
|
||||
self.week_total[srv] += self.cached_amount[srv]
|
||||
self.month_total[srv] += self.cached_amount[srv]
|
||||
self.grand_total[srv] += self.cached_amount[srv]
|
||||
self.timeline_total[srv][self.day_label] += self.cached_amount[srv]
|
||||
|
||||
# Update server bps
|
||||
try:
|
||||
# Update server bps
|
||||
self.server_bps[srv] = (self.server_bps[srv] * (self.last_update - self.start_time) + cached_amount) / (
|
||||
t - self.start_time
|
||||
)
|
||||
except:
|
||||
self.server_bps[srv] = (
|
||||
self.server_bps[srv] * (self.last_update - self.start_time) + self.cached_amount[srv]
|
||||
) / (t - self.start_time)
|
||||
except ZeroDivisionError:
|
||||
self.server_bps[srv] = 0.0
|
||||
|
||||
# Reset for next time
|
||||
self.cached_amount[srv] = 0
|
||||
|
||||
# Quota check
|
||||
if self.have_quota and self.quota_enabled:
|
||||
self.left -= self.sum_cached_amount
|
||||
@@ -278,14 +321,13 @@ class BPSMeter:
|
||||
self.bps = (self.bps * (self.last_update - self.start_time) + self.sum_cached_amount) / (
|
||||
t - self.start_time
|
||||
)
|
||||
except:
|
||||
except ZeroDivisionError:
|
||||
self.bps = 0.0
|
||||
self.server_bps = {}
|
||||
|
||||
self.sum_cached_amount = 0
|
||||
self.last_update = t
|
||||
|
||||
check_time = t - 5.0
|
||||
self.sum_cached_amount = 0
|
||||
|
||||
if self.start_time < check_time:
|
||||
self.start_time = check_time
|
||||
@@ -304,20 +346,10 @@ class BPSMeter:
|
||||
|
||||
def register_server_article_tried(self, server: str):
|
||||
"""Keep track how many articles were tried for each server"""
|
||||
if server not in self.article_stats_tried:
|
||||
self.article_stats_tried[server] = {}
|
||||
self.article_stats_failed[server] = {}
|
||||
if self.day_label not in self.article_stats_tried[server]:
|
||||
self.article_stats_tried[server][self.day_label] = 0
|
||||
self.article_stats_failed[server][self.day_label] = 0
|
||||
|
||||
# Update the counters
|
||||
self.article_stats_tried[server][self.day_label] += 1
|
||||
|
||||
def register_server_article_failed(self, server: str):
|
||||
"""Keep track how many articles failed for each server"""
|
||||
# This function is always called after the one above,
|
||||
# so we can skip the check if the keys in the dict exist
|
||||
self.article_stats_failed[server][self.day_label] += 1
|
||||
|
||||
def reset(self):
|
||||
@@ -325,8 +357,11 @@ class BPSMeter:
|
||||
self.start_time = t
|
||||
self.log_time = t
|
||||
self.last_update = t
|
||||
|
||||
# Reset general BPS and the for all servers
|
||||
self.bps = 0.0
|
||||
self.server_bps = {}
|
||||
for server in self.server_bps:
|
||||
self.server_bps[server] = 0.0
|
||||
|
||||
def add_empty_time(self):
|
||||
# Extra zeros, but never more than the maximum!
|
||||
@@ -375,6 +410,7 @@ class BPSMeter:
|
||||
del self.article_stats_tried[server]
|
||||
if server in self.article_stats_failed:
|
||||
del self.article_stats_failed[server]
|
||||
self.init_server_stats(server)
|
||||
self.save()
|
||||
|
||||
def get_bps_list(self):
|
||||
@@ -526,11 +562,6 @@ class BPSMeter:
|
||||
if cfg.quota_resume() and sabnzbd.Downloader.paused:
|
||||
sabnzbd.Downloader.resume()
|
||||
|
||||
def midnight(self):
|
||||
"""Midnight action: dummy update for all servers"""
|
||||
for server in self.day_total.keys():
|
||||
self.update(server)
|
||||
|
||||
|
||||
def quota_handler():
|
||||
"""To be called from scheduler"""
|
||||
|
||||
@@ -283,7 +283,6 @@ keep_awake = OptionBool("misc", "keep_awake", True)
|
||||
win_menu = OptionBool("misc", "win_menu", True)
|
||||
allow_incomplete_nzb = OptionBool("misc", "allow_incomplete_nzb", False)
|
||||
enable_broadcast = OptionBool("misc", "enable_broadcast", True)
|
||||
max_art_opt = OptionBool("misc", "max_art_opt", False)
|
||||
ipv6_hosting = OptionBool("misc", "ipv6_hosting", False)
|
||||
fixed_ports = OptionBool("misc", "fixed_ports", False)
|
||||
api_warnings = OptionBool("misc", "api_warnings", True, protect=True)
|
||||
|
||||
@@ -163,7 +163,7 @@ def deobfuscate_list(filelist, usefulname):
|
||||
if os.path.getsize(file) < MIN_FILE_SIZE:
|
||||
# too small to care
|
||||
continue
|
||||
_, ext = os.path.splitext(file)
|
||||
ext = get_ext(file)
|
||||
if ext in extcounter:
|
||||
extcounter[ext] += 1
|
||||
else:
|
||||
@@ -208,5 +208,7 @@ def deobfuscate_list(filelist, usefulname):
|
||||
logging.info("Deobfuscate renaming %s to %s", otherfile, new_name)
|
||||
# Rename and make sure the new filename is unique
|
||||
renamer(otherfile, new_name)
|
||||
else:
|
||||
logging.debug("%s excluded from deobfuscation based on size, extension or non-obfuscation", filename)
|
||||
else:
|
||||
logging.info("No qualifying files found to deobfuscate")
|
||||
|
||||
@@ -50,11 +50,50 @@ _PENALTY_PERM = 10 # Permanent error, like bad username/password
|
||||
_PENALTY_SHORT = 1 # Minimal penalty when no_penalties is set
|
||||
_PENALTY_VERYSHORT = 0.1 # Error 400 without cause clues
|
||||
|
||||
# Wait this many seconds between checking idle servers for new articles or busy threads for timeout
|
||||
_SERVER_CHECK_DELAY = 0.5
|
||||
# Wait this many seconds between updates of the BPSMeter
|
||||
_BPSMETER_UPDATE_DELAY = 0.05
|
||||
|
||||
TIMER_LOCK = RLock()
|
||||
|
||||
|
||||
class Server:
|
||||
# Pre-define attributes to save memory and improve get/set performance
|
||||
__slots__ = (
|
||||
"id",
|
||||
"newid",
|
||||
"restart",
|
||||
"displayname",
|
||||
"host",
|
||||
"port",
|
||||
"timeout",
|
||||
"threads",
|
||||
"priority",
|
||||
"ssl",
|
||||
"ssl_verify",
|
||||
"ssl_ciphers",
|
||||
"optional",
|
||||
"retention",
|
||||
"send_group",
|
||||
"username",
|
||||
"password",
|
||||
"busy_threads",
|
||||
"next_busy_threads_check",
|
||||
"idle_threads",
|
||||
"next_article_search",
|
||||
"active",
|
||||
"bad_cons",
|
||||
"errormsg",
|
||||
"warning",
|
||||
"info",
|
||||
"ssl_info",
|
||||
"request",
|
||||
"have_body",
|
||||
"have_stat",
|
||||
"article_queue",
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
server_id,
|
||||
@@ -94,6 +133,7 @@ class Server:
|
||||
self.password: Optional[str] = password
|
||||
|
||||
self.busy_threads: List[NewsWrapper] = []
|
||||
self.next_busy_threads_check: float = 0
|
||||
self.idle_threads: List[NewsWrapper] = []
|
||||
self.next_article_search: float = 0
|
||||
self.active: bool = True
|
||||
@@ -105,10 +145,15 @@ class Server:
|
||||
self.request: bool = False # True if a getaddrinfo() request is pending
|
||||
self.have_body: bool = True # Assume server has "BODY", until proven otherwise
|
||||
self.have_stat: bool = True # Assume server has "STAT", until proven otherwise
|
||||
self.article_queue: List[sabnzbd.nzbstuff.Article] = []
|
||||
|
||||
# Initialize threads
|
||||
for i in range(threads):
|
||||
self.idle_threads.append(NewsWrapper(self, i + 1))
|
||||
|
||||
# Tell the BPSMeter about this server
|
||||
sabnzbd.BPSMeter.init_server_stats(self.id)
|
||||
|
||||
@property
|
||||
def hostip(self) -> str:
|
||||
"""In case a server still has active connections, we use the same IP again
|
||||
@@ -146,6 +191,11 @@ class Server:
|
||||
logging.debug("%s: No successful IP connection was possible", self.host)
|
||||
return ip
|
||||
|
||||
def deactivate(self):
|
||||
"""Deactive server and reset queued articles"""
|
||||
self.active = False
|
||||
self.reset_article_queue()
|
||||
|
||||
def stop(self):
|
||||
"""Remove all connections from server"""
|
||||
for nw in self.idle_threads:
|
||||
@@ -162,6 +212,12 @@ class Server:
|
||||
self.request = True
|
||||
Thread(target=self._request_info_internal).start()
|
||||
|
||||
def reset_article_queue(self):
|
||||
logging.debug("Resetting article queue for %s", self)
|
||||
for article in self.article_queue:
|
||||
sabnzbd.NzbQueue.reset_try_lists(article, remove_fetcher_from_trylist=False)
|
||||
self.article_queue = []
|
||||
|
||||
def _request_info_internal(self):
|
||||
"""Async attempt to run getaddrinfo() for specified server"""
|
||||
logging.debug("Retrieving server address information for %s", self.host)
|
||||
@@ -180,6 +236,24 @@ class Server:
|
||||
class Downloader(Thread):
|
||||
"""Singleton Downloader Thread"""
|
||||
|
||||
# Improves get/set performance, even though it's inherited from Thread
|
||||
# Due to the huge number of get-calls in run(), it can actually make a difference
|
||||
__slots__ = (
|
||||
"paused",
|
||||
"bandwidth_limit",
|
||||
"bandwidth_perc",
|
||||
"can_be_slowed",
|
||||
"can_be_slowed_timer",
|
||||
"sleep_time",
|
||||
"paused_for_postproc",
|
||||
"shutdown",
|
||||
"server_restarts",
|
||||
"force_disconnect",
|
||||
"read_fds",
|
||||
"servers",
|
||||
"timers",
|
||||
)
|
||||
|
||||
def __init__(self, paused=False):
|
||||
super().__init__()
|
||||
|
||||
@@ -214,8 +288,6 @@ class Downloader(Thread):
|
||||
self.read_fds: Dict[int, NewsWrapper] = {}
|
||||
|
||||
self.servers: List[Server] = []
|
||||
self.server_dict: Dict[str, Server] = {} # For faster lookups, but is not updated later!
|
||||
self.server_nr: int = 0
|
||||
self.timers: Dict[str, List[float]] = {}
|
||||
|
||||
for server in config.get_servers():
|
||||
@@ -256,32 +328,33 @@ class Downloader(Thread):
|
||||
create = False
|
||||
server.newid = newserver
|
||||
server.restart = True
|
||||
server.reset_article_queue()
|
||||
self.server_restarts += 1
|
||||
break
|
||||
|
||||
if create and enabled and host and port and threads:
|
||||
server = Server(
|
||||
newserver,
|
||||
displayname,
|
||||
host,
|
||||
port,
|
||||
timeout,
|
||||
threads,
|
||||
priority,
|
||||
ssl,
|
||||
ssl_verify,
|
||||
ssl_ciphers,
|
||||
send_group,
|
||||
username,
|
||||
password,
|
||||
optional,
|
||||
retention,
|
||||
self.servers.append(
|
||||
Server(
|
||||
newserver,
|
||||
displayname,
|
||||
host,
|
||||
port,
|
||||
timeout,
|
||||
threads,
|
||||
priority,
|
||||
ssl,
|
||||
ssl_verify,
|
||||
ssl_ciphers,
|
||||
send_group,
|
||||
username,
|
||||
password,
|
||||
optional,
|
||||
retention,
|
||||
)
|
||||
)
|
||||
self.servers.append(server)
|
||||
self.server_dict[newserver] = server
|
||||
|
||||
# Update server-count
|
||||
self.server_nr = len(self.servers)
|
||||
# Sort the servers for performance
|
||||
self.servers.sort(key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
|
||||
|
||||
def add_socket(self, fileno: int, nw: NewsWrapper):
|
||||
"""Add a socket ready to be used to the list to be watched"""
|
||||
@@ -407,22 +480,21 @@ class Downloader(Thread):
|
||||
|
||||
# Not fully the same as the code below for optional servers
|
||||
server.bad_cons = 0
|
||||
server.active = False
|
||||
server.deactivate()
|
||||
self.plan_server(server, _PENALTY_TIMEOUT)
|
||||
|
||||
# Optional and active server had too many problems.
|
||||
# Disable it now and send a re-enable plan to the scheduler
|
||||
if server.optional and server.active and (server.bad_cons / server.threads) > 3:
|
||||
# Deactivate server
|
||||
server.bad_cons = 0
|
||||
server.active = False
|
||||
server.deactivate()
|
||||
logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT)
|
||||
self.plan_server(server, _PENALTY_TIMEOUT)
|
||||
|
||||
# Remove all connections to server
|
||||
for nw in server.idle_threads + server.busy_threads:
|
||||
self.__reset_nw(
|
||||
nw, "forcing disconnect", warn=False, wait=False, count_article_try=False, send_quit=False
|
||||
)
|
||||
self.__reset_nw(nw, "forcing disconnect", warn=False, wait=False, retry_article=False, send_quit=False)
|
||||
|
||||
# Make sure server address resolution is refreshed
|
||||
server.info = None
|
||||
@@ -467,7 +539,9 @@ class Downloader(Thread):
|
||||
logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION)
|
||||
|
||||
# Kick BPS-Meter to check quota
|
||||
sabnzbd.BPSMeter.update()
|
||||
BPSMeter = sabnzbd.BPSMeter
|
||||
BPSMeter.update()
|
||||
next_bpsmeter_update = 0
|
||||
|
||||
# Check server expiration dates
|
||||
check_server_expiration()
|
||||
@@ -484,15 +558,17 @@ class Downloader(Thread):
|
||||
if not server.busy_threads and server.next_article_search > now:
|
||||
continue
|
||||
|
||||
for nw in server.busy_threads[:]:
|
||||
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
|
||||
if nw.nntp and nw.nntp.error_msg:
|
||||
# Already showed error
|
||||
self.__reset_nw(nw)
|
||||
else:
|
||||
self.__reset_nw(nw, "timed out", warn=True)
|
||||
server.bad_cons += 1
|
||||
self.maybe_block_server(server)
|
||||
if server.next_busy_threads_check < now:
|
||||
server.next_busy_threads_check = now + _SERVER_CHECK_DELAY
|
||||
for nw in server.busy_threads[:]:
|
||||
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
|
||||
if nw.nntp and nw.nntp.error_msg:
|
||||
# Already showed error
|
||||
self.__reset_nw(nw)
|
||||
else:
|
||||
self.__reset_nw(nw, "timed out", warn=True)
|
||||
server.bad_cons += 1
|
||||
self.maybe_block_server(server)
|
||||
|
||||
if server.restart:
|
||||
if not server.busy_threads:
|
||||
@@ -510,7 +586,6 @@ class Downloader(Thread):
|
||||
|
||||
if (
|
||||
not server.idle_threads
|
||||
or server.restart
|
||||
or self.is_paused()
|
||||
or self.shutdown
|
||||
or self.paused_for_postproc
|
||||
@@ -532,20 +607,28 @@ class Downloader(Thread):
|
||||
server.request_info()
|
||||
break
|
||||
|
||||
article = sabnzbd.NzbQueue.get_article(server, self.servers)
|
||||
|
||||
if not article:
|
||||
# Skip this server for 0.5 second
|
||||
server.next_article_search = now + 0.5
|
||||
break
|
||||
|
||||
if server.retention and article.nzf.nzo.avg_stamp < now - server.retention:
|
||||
# Let's get rid of all the articles for this server at once
|
||||
logging.info("Job %s too old for %s, moving on", article.nzf.nzo.final_name, server.host)
|
||||
while article:
|
||||
self.decode(article, None)
|
||||
article = article.nzf.nzo.get_article(server, self.servers)
|
||||
break
|
||||
# Get article from pre-fetched ones or fetch new ones
|
||||
if server.article_queue:
|
||||
article = server.article_queue.pop(0)
|
||||
else:
|
||||
# Pre-fetch new articles
|
||||
server.article_queue = sabnzbd.NzbQueue.get_articles(
|
||||
server, self.servers, max(1, server.threads // 4)
|
||||
)
|
||||
if server.article_queue:
|
||||
article = server.article_queue.pop(0)
|
||||
# Mark expired articles as tried on this server
|
||||
if server.retention and article.nzf.nzo.avg_stamp < now - server.retention:
|
||||
self.decode(article, None)
|
||||
while server.article_queue:
|
||||
self.decode(server.article_queue.pop(), None)
|
||||
# Move to the next server, allowing the next server to already start
|
||||
# fetching the articles that were too old for this server
|
||||
break
|
||||
else:
|
||||
# Skip this server for a short time
|
||||
server.next_article_search = now + _SERVER_CHECK_DELAY
|
||||
break
|
||||
|
||||
server.idle_threads.remove(nw)
|
||||
server.busy_threads.append(nw)
|
||||
@@ -573,18 +656,15 @@ class Downloader(Thread):
|
||||
# Send goodbye if we have open socket
|
||||
if nw.nntp:
|
||||
self.__reset_nw(
|
||||
nw,
|
||||
"forcing disconnect",
|
||||
wait=False,
|
||||
count_article_try=False,
|
||||
send_quit=True,
|
||||
nw, "forcing disconnect", wait=False, count_article_try=False, send_quit=True
|
||||
)
|
||||
# Make sure server address resolution is refreshed
|
||||
server.info = None
|
||||
server.reset_article_queue()
|
||||
self.force_disconnect = False
|
||||
|
||||
# Make sure we update the stats
|
||||
sabnzbd.BPSMeter.update()
|
||||
BPSMeter.update()
|
||||
|
||||
# Exit-point
|
||||
if self.shutdown:
|
||||
@@ -603,20 +683,20 @@ class Downloader(Thread):
|
||||
# Need to initialize the check during first 20 seconds
|
||||
if self.can_be_slowed is None or self.can_be_slowed_timer:
|
||||
# Wait for stable speed to start testing
|
||||
if not self.can_be_slowed_timer and sabnzbd.BPSMeter.get_stable_speed(timespan=10):
|
||||
if not self.can_be_slowed_timer and BPSMeter.get_stable_speed(timespan=10):
|
||||
self.can_be_slowed_timer = time.time()
|
||||
|
||||
# Check 10 seconds after enabling slowdown
|
||||
if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10:
|
||||
# Now let's check if it was stable in the last 10 seconds
|
||||
self.can_be_slowed = sabnzbd.BPSMeter.get_stable_speed(timespan=10)
|
||||
self.can_be_slowed = BPSMeter.get_stable_speed(timespan=10)
|
||||
self.can_be_slowed_timer = 0
|
||||
logging.debug("Downloader-slowdown: %r", self.can_be_slowed)
|
||||
|
||||
else:
|
||||
read = []
|
||||
|
||||
sabnzbd.BPSMeter.reset()
|
||||
BPSMeter.reset()
|
||||
|
||||
time.sleep(1.0)
|
||||
|
||||
@@ -629,8 +709,11 @@ class Downloader(Thread):
|
||||
):
|
||||
DOWNLOADER_CV.wait()
|
||||
|
||||
if now > next_bpsmeter_update:
|
||||
BPSMeter.update()
|
||||
next_bpsmeter_update = now + _BPSMETER_UPDATE_DELAY
|
||||
|
||||
if not read:
|
||||
sabnzbd.BPSMeter.update(force_full_update=False)
|
||||
continue
|
||||
|
||||
for selected in read:
|
||||
@@ -644,7 +727,6 @@ class Downloader(Thread):
|
||||
bytes_received, done, skip = (0, False, False)
|
||||
|
||||
if skip:
|
||||
sabnzbd.BPSMeter.update(force_full_update=False)
|
||||
continue
|
||||
|
||||
if bytes_received < 1:
|
||||
@@ -653,22 +735,22 @@ class Downloader(Thread):
|
||||
|
||||
else:
|
||||
try:
|
||||
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, bytes_received)
|
||||
article.nzf.nzo.update_download_stats(BPSMeter.bps, server.id, bytes_received)
|
||||
except AttributeError:
|
||||
# In case nzf has disappeared because the file was deleted before the update could happen
|
||||
pass
|
||||
|
||||
sabnzbd.BPSMeter.update(server.id, bytes_received, force_full_update=False)
|
||||
if self.bandwidth_limit:
|
||||
if sabnzbd.BPSMeter.sum_cached_amount + sabnzbd.BPSMeter.bps > self.bandwidth_limit:
|
||||
sabnzbd.BPSMeter.update()
|
||||
while sabnzbd.BPSMeter.bps > self.bandwidth_limit:
|
||||
time.sleep(0.01)
|
||||
sabnzbd.BPSMeter.update()
|
||||
BPSMeter.update(server.id, bytes_received)
|
||||
|
||||
if not done and nw.status_code != 222:
|
||||
if self.bandwidth_limit:
|
||||
if BPSMeter.bps + BPSMeter.sum_cached_amount > self.bandwidth_limit:
|
||||
BPSMeter.update()
|
||||
while BPSMeter.bps > self.bandwidth_limit:
|
||||
time.sleep(0.01)
|
||||
BPSMeter.update()
|
||||
|
||||
if nw.status_code != 222 and not done:
|
||||
if not nw.connected or nw.status_code == 480:
|
||||
done = False
|
||||
try:
|
||||
nw.finish_connect(nw.status_code)
|
||||
if sabnzbd.LOG_ALL:
|
||||
@@ -693,7 +775,7 @@ class Downloader(Thread):
|
||||
server.errormsg = errormsg
|
||||
logging.warning(T("Too many connections to server %s"), server.host)
|
||||
# Don't count this for the tries (max_art_tries) on this server
|
||||
self.__reset_nw(nw, count_article_try=False, send_quit=True)
|
||||
self.__reset_nw(nw, send_quit=True)
|
||||
self.plan_server(server, _PENALTY_TOOMANY)
|
||||
server.threads -= 1
|
||||
elif ecode in (502, 481, 482) and clues_too_many_ip(msg):
|
||||
@@ -744,11 +826,11 @@ class Downloader(Thread):
|
||||
block = True
|
||||
if block or (penalty and server.optional):
|
||||
if server.active:
|
||||
server.active = False
|
||||
server.deactivate()
|
||||
if penalty and (block or server.optional):
|
||||
self.plan_server(server, penalty)
|
||||
# Note that this will count towards the tries (max_art_tries) on this server!
|
||||
self.__reset_nw(nw, send_quit=True)
|
||||
# Note that the article is discard for this server
|
||||
self.__reset_nw(nw, retry_article=False, send_quit=True)
|
||||
continue
|
||||
except:
|
||||
logging.error(
|
||||
@@ -758,7 +840,7 @@ class Downloader(Thread):
|
||||
nntp_to_msg(nw.data),
|
||||
)
|
||||
# No reset-warning needed, above logging is sufficient
|
||||
self.__reset_nw(nw)
|
||||
self.__reset_nw(nw, retry_article=False)
|
||||
|
||||
if nw.connected:
|
||||
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
|
||||
@@ -769,7 +851,6 @@ class Downloader(Thread):
|
||||
logging.debug("Article <%s> is present", article.article)
|
||||
|
||||
elif nw.status_code == 211:
|
||||
done = False
|
||||
logging.debug("group command ok -> %s", nntp_to_msg(nw.data))
|
||||
nw.group = nw.article.nzf.nzo.group
|
||||
nw.clear_data()
|
||||
@@ -819,6 +900,7 @@ class Downloader(Thread):
|
||||
warn: bool = False,
|
||||
wait: bool = True,
|
||||
count_article_try: bool = True,
|
||||
retry_article: bool = True,
|
||||
send_quit: bool = False,
|
||||
):
|
||||
# Some warnings are errors, and not added as server.warning
|
||||
@@ -839,16 +921,23 @@ class Downloader(Thread):
|
||||
|
||||
if nw.article:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if (
|
||||
count_article_try
|
||||
and nw.article.tries > cfg.max_art_tries()
|
||||
and (nw.article.fetcher.optional or not cfg.max_art_opt())
|
||||
):
|
||||
if count_article_try:
|
||||
nw.article.tries += 1
|
||||
|
||||
# Do we discard, or try again for this server
|
||||
if not retry_article or nw.article.tries > cfg.max_art_tries():
|
||||
# Too many tries on this server, consider article missing
|
||||
self.decode(nw.article, None)
|
||||
nw.article.tries = 0
|
||||
else:
|
||||
# Allow all servers to iterate over this nzo/nzf again
|
||||
sabnzbd.NzbQueue.reset_try_lists(nw.article)
|
||||
# Retry again with the same server
|
||||
logging.debug(
|
||||
"Re-adding article %s from %s to server %s",
|
||||
nw.article.article,
|
||||
nw.article.nzf.filename,
|
||||
nw.article.fetcher,
|
||||
)
|
||||
nw.article.fetcher.article_queue.append(nw.article)
|
||||
|
||||
# Reset connection object
|
||||
nw.hard_reset(wait, send_quit=send_quit)
|
||||
|
||||
@@ -73,7 +73,10 @@ def has_unwanted_extension(filename: str) -> bool:
|
||||
and extension not in sabnzbd.cfg.unwanted_extensions()
|
||||
)
|
||||
else:
|
||||
return bool(sabnzbd.cfg.unwanted_extensions_mode())
|
||||
# Don't consider missing extensions unwanted to prevent indiscriminate blocking of
|
||||
# obfuscated jobs in whitelist mode. If there is an extension but nothing listed as
|
||||
# (un)wanted, the result only depends on the configured mode.
|
||||
return bool(extension and sabnzbd.cfg.unwanted_extensions_mode())
|
||||
|
||||
|
||||
def get_filename(path: str) -> str:
|
||||
@@ -295,8 +298,10 @@ def sanitize_and_trim_path(path: str) -> str:
|
||||
def sanitize_files(folder: Optional[str] = None, filelist: Optional[List[str]] = None) -> List[str]:
|
||||
"""Sanitize each file in the folder or list of filepaths, return list of new names"""
|
||||
logging.info("Checking if any resulting filenames need to be sanitized")
|
||||
if not filelist:
|
||||
if folder:
|
||||
filelist = listdir_full(folder)
|
||||
else:
|
||||
filelist = filelist or []
|
||||
|
||||
# Loop over all the files
|
||||
output_filelist = []
|
||||
|
||||
@@ -818,7 +818,7 @@ class NzoPage:
|
||||
checked = True
|
||||
active.append(
|
||||
{
|
||||
"filename": nzf.filename if nzf.filename else nzf.subject,
|
||||
"filename": nzf.filename,
|
||||
"mbleft": "%.2f" % (nzf.bytes_left / MEBI),
|
||||
"mb": "%.2f" % (nzf.bytes / MEBI),
|
||||
"size": to_units(nzf.bytes, "B"),
|
||||
@@ -1365,7 +1365,6 @@ SPECIAL_BOOL_LIST = (
|
||||
"empty_postproc",
|
||||
"html_login",
|
||||
"wait_for_dfolder",
|
||||
"max_art_opt",
|
||||
"enable_broadcast",
|
||||
"warn_dupl_jobs",
|
||||
"replace_illegal",
|
||||
@@ -1853,7 +1852,7 @@ class ConfigRss:
|
||||
cfg.rss_rate.set(kwargs.get("rss_rate"))
|
||||
config.save_config()
|
||||
sabnzbd.Scheduler.restart()
|
||||
raise rssRaiser(self.__root, kwargs)
|
||||
raise Raiser(self.__root)
|
||||
|
||||
@secured_expose(check_api_key=True, check_configlock=True)
|
||||
def upd_rss_feed(self, **kwargs):
|
||||
|
||||
@@ -64,9 +64,6 @@ if sabnzbd.WIN32:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if sabnzbd.DARWIN:
|
||||
from PyObjCTools import AppHelper
|
||||
|
||||
|
||||
def time_format(fmt):
|
||||
"""Return time-format string adjusted for 12/24 hour clock setting"""
|
||||
@@ -263,37 +260,6 @@ def cat_convert(cat):
|
||||
return None
|
||||
|
||||
|
||||
def windows_variant():
|
||||
"""Determine Windows variant
|
||||
Return vista_plus, x64
|
||||
"""
|
||||
from win32api import GetVersionEx
|
||||
from win32con import VER_PLATFORM_WIN32_NT
|
||||
import winreg
|
||||
|
||||
vista_plus = x64 = False
|
||||
maj, _minor, _buildno, plat, _csd = GetVersionEx()
|
||||
|
||||
if plat == VER_PLATFORM_WIN32_NT:
|
||||
vista_plus = maj > 5
|
||||
if vista_plus:
|
||||
# Must be done the hard way, because the Python runtime lies to us.
|
||||
# This does *not* work:
|
||||
# return os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64'
|
||||
# because the Python runtime returns 'X86' even on an x64 system!
|
||||
key = winreg.OpenKey(
|
||||
winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment"
|
||||
)
|
||||
for n in range(winreg.QueryInfoKey(key)[1]):
|
||||
name, value, _val_type = winreg.EnumValue(key, n)
|
||||
if name == "PROCESSOR_ARCHITECTURE":
|
||||
x64 = value.upper() == "AMD64"
|
||||
break
|
||||
winreg.CloseKey(key)
|
||||
|
||||
return vista_plus, x64
|
||||
|
||||
|
||||
_SERVICE_KEY = "SYSTEM\\CurrentControlSet\\services\\"
|
||||
_SERVICE_PARM = "CommandLine"
|
||||
|
||||
@@ -1009,8 +975,9 @@ def get_base_url(url: str) -> str:
|
||||
|
||||
def match_str(text: AnyStr, matches: Tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
"""Return first matching element of list 'matches' in 'text', otherwise None"""
|
||||
text = text.lower()
|
||||
for match in matches:
|
||||
if match in text:
|
||||
if match.lower() in text:
|
||||
return match
|
||||
return None
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ class NewsWrapper:
|
||||
"user_ok",
|
||||
"pass_ok",
|
||||
"force_login",
|
||||
"status_code",
|
||||
)
|
||||
|
||||
def __init__(self, server, thrdnum, block=False):
|
||||
@@ -75,14 +76,7 @@ class NewsWrapper:
|
||||
self.pass_ok: bool = False
|
||||
self.force_login: bool = False
|
||||
self.group: Optional[str] = None
|
||||
|
||||
@property
|
||||
def status_code(self) -> Optional[int]:
|
||||
"""Shorthand to get the code"""
|
||||
try:
|
||||
return int(self.data[0][:3])
|
||||
except:
|
||||
return None
|
||||
self.status_code: Optional[int] = None
|
||||
|
||||
def init_connect(self):
|
||||
"""Setup the connection in NNTP object"""
|
||||
@@ -108,6 +102,7 @@ class NewsWrapper:
|
||||
# Change to a sensible text
|
||||
code = 481
|
||||
self.data[0] = "%d %s" % (code, T("Authentication failed, check username/password."))
|
||||
self.status_code = code
|
||||
self.user_ok = True
|
||||
self.pass_sent = True
|
||||
|
||||
@@ -124,7 +119,7 @@ class NewsWrapper:
|
||||
elif not self.user_sent:
|
||||
command = utob("authinfo user %s\r\n" % self.server.username)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.clear_data()
|
||||
self.user_sent = True
|
||||
elif not self.user_ok:
|
||||
if code == 381:
|
||||
@@ -139,7 +134,7 @@ class NewsWrapper:
|
||||
if self.user_ok and not self.pass_sent:
|
||||
command = utob("authinfo pass %s\r\n" % self.server.password)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.clear_data()
|
||||
self.pass_sent = True
|
||||
elif self.user_ok and not self.pass_ok:
|
||||
if code != 281:
|
||||
@@ -163,14 +158,14 @@ class NewsWrapper:
|
||||
else:
|
||||
command = utob("ARTICLE <%s>\r\n" % self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.clear_data()
|
||||
|
||||
def send_group(self, group: str):
|
||||
"""Send the NNTP GROUP command"""
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
command = utob("GROUP %s\r\n" % group)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.clear_data()
|
||||
|
||||
def recv_chunk(self, block: bool = False) -> Tuple[int, bool, bool]:
|
||||
"""Receive data, return #bytes, done, skip"""
|
||||
@@ -195,6 +190,12 @@ class NewsWrapper:
|
||||
else:
|
||||
return 0, False, True
|
||||
|
||||
if not self.data:
|
||||
try:
|
||||
self.status_code = int(chunk[:3])
|
||||
except:
|
||||
self.status_code = None
|
||||
|
||||
# Append so we can do 1 join(), much faster than multiple!
|
||||
self.data.append(chunk)
|
||||
|
||||
@@ -221,6 +222,7 @@ class NewsWrapper:
|
||||
def clear_data(self):
|
||||
"""Clear the stored raw data"""
|
||||
self.data = []
|
||||
self.status_code = None
|
||||
|
||||
def hard_reset(self, wait: bool = True, send_quit: bool = True):
|
||||
"""Destroy and restart"""
|
||||
@@ -382,6 +384,7 @@ class NNTP:
|
||||
msg = "Failed to connect: %s" % (str(error))
|
||||
msg = "%s %s@%s:%s" % (msg, self.nw.thrdnum, self.host, self.nw.server.port)
|
||||
self.error_msg = msg
|
||||
self.nw.server.next_busy_threads_check = 0
|
||||
logging.info(msg)
|
||||
self.nw.server.warning = msg
|
||||
|
||||
|
||||
@@ -690,6 +690,7 @@ class NzbQueue:
|
||||
if remove_fetcher_from_trylist:
|
||||
article.remove_from_try_list(article.fetcher)
|
||||
article.fetcher = None
|
||||
article.tries = 0
|
||||
article.nzf.reset_try_list()
|
||||
article.nzf.nzo.reset_try_list()
|
||||
|
||||
@@ -702,7 +703,7 @@ class NzbQueue:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next article for jobs in the queue
|
||||
Not locked for performance, since it only reads the queue
|
||||
"""
|
||||
@@ -718,12 +719,13 @@ class NzbQueue:
|
||||
or (nzo.avg_stamp + propagation_delay) < time.time()
|
||||
):
|
||||
if not nzo.server_in_try_list(server):
|
||||
article = nzo.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
articles = nzo.get_articles(server, servers, fetch_limit)
|
||||
if articles:
|
||||
return articles
|
||||
# Stop after first job that wasn't paused/propagating/etc
|
||||
if self.__top_only:
|
||||
return
|
||||
return []
|
||||
return []
|
||||
|
||||
def register_article(self, article: Article, success: bool = True):
|
||||
"""Register the articles we tried
|
||||
@@ -858,7 +860,10 @@ class NzbQueue:
|
||||
|
||||
def stop_idle_jobs(self):
|
||||
"""Detect jobs that have zero files left and send them to post processing"""
|
||||
# Only check servers that are active
|
||||
nr_servers = len([server for server in sabnzbd.Downloader.servers[:] if server.active])
|
||||
empty = []
|
||||
|
||||
for nzo in self.__nzo_list:
|
||||
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
|
||||
logging.info("Found idle job %s", nzo.final_name)
|
||||
@@ -866,10 +871,10 @@ class NzbQueue:
|
||||
|
||||
# Stall prevention by checking if all servers are in the trylist
|
||||
# This is a CPU-cheaper alternative to prevent stalling
|
||||
if len(nzo.try_list) == sabnzbd.Downloader.server_nr:
|
||||
if len(nzo.try_list) >= nr_servers:
|
||||
# Maybe the NZF's need a reset too?
|
||||
for nzf in nzo.files:
|
||||
if len(nzf.try_list) == sabnzbd.Downloader.server_nr:
|
||||
if len(nzf.try_list) >= nr_servers:
|
||||
# We do not want to reset all article trylists, they are good
|
||||
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
|
||||
nzf.reset_try_list()
|
||||
|
||||
@@ -141,9 +141,9 @@ class TryList:
|
||||
|
||||
def __setstate__(self, servers_ids: List[str]):
|
||||
self.try_list = []
|
||||
for server_id in servers_ids:
|
||||
if server_id in sabnzbd.Downloader.server_dict:
|
||||
self.add_to_try_list(sabnzbd.Downloader.server_dict[server_id])
|
||||
for server in sabnzbd.Downloader.servers:
|
||||
if server.id in servers_ids:
|
||||
self.add_to_try_list(server)
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -251,7 +251,8 @@ class Article(TryList):
|
||||
# Since we need a new server, this one can be listed as failed
|
||||
sabnzbd.BPSMeter.register_server_article_failed(self.fetcher.id)
|
||||
self.add_to_try_list(self.fetcher)
|
||||
for server in sabnzbd.Downloader.servers:
|
||||
# Servers-list could be modified during iteration, so we need a copy
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
if server.active and not self.server_in_try_list(server):
|
||||
if server.priority >= self.fetcher.priority:
|
||||
self.tries = 0
|
||||
@@ -303,7 +304,6 @@ class Article(TryList):
|
||||
##############################################################################
|
||||
NzbFileSaver = (
|
||||
"date",
|
||||
"subject",
|
||||
"filename",
|
||||
"filename_checked",
|
||||
"filepath",
|
||||
@@ -337,7 +337,6 @@ class NzbFile(TryList):
|
||||
super().__init__()
|
||||
|
||||
self.date: datetime.datetime = date
|
||||
self.subject: str = subject
|
||||
self.type: Optional[str] = None
|
||||
self.filename: str = sanitize_filename(name_extractor(subject))
|
||||
self.filename_checked = False
|
||||
@@ -427,13 +426,17 @@ class NzbFile(TryList):
|
||||
self.vol = vol
|
||||
self.blocks = int_conv(blocks)
|
||||
|
||||
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
|
||||
"""Get next article to be downloaded"""
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next articles to be downloaded"""
|
||||
articles = []
|
||||
for article in self.articles:
|
||||
article = article.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
return articles
|
||||
self.add_to_try_list(server)
|
||||
return articles
|
||||
|
||||
def reset_all_try_lists(self):
|
||||
"""Clear all lists of visited servers"""
|
||||
@@ -1012,10 +1015,9 @@ class NzbObject(TryList):
|
||||
|
||||
lparset = parset.lower()
|
||||
for xnzf in self.files[:]:
|
||||
name = xnzf.filename or xnzf.subject
|
||||
# Move only when not current NZF and filename was extractable from subject
|
||||
if name:
|
||||
setname, vol, block = sabnzbd.par2file.analyse_par2(name)
|
||||
if xnzf.filename:
|
||||
setname, vol, block = sabnzbd.par2file.analyse_par2(xnzf.filename)
|
||||
# Don't postpone header-only-files, to extract all possible md5of16k
|
||||
if setname and block and matcher(lparset, setname.lower()):
|
||||
xnzf.set_par2(parset, vol, block)
|
||||
@@ -1225,43 +1227,42 @@ class NzbObject(TryList):
|
||||
fix_unix_encoding(wdir)
|
||||
|
||||
# Get a list of already present files, ignore folders
|
||||
files = globber(wdir, "*.*")
|
||||
existing_files = globber(wdir, "*.*")
|
||||
|
||||
# Substitute renamed files
|
||||
renames = sabnzbd.load_data(RENAMES_FILE, self.admin_path, remove=True)
|
||||
if renames:
|
||||
for name in renames:
|
||||
if name in files or renames[name] in files:
|
||||
if name in files:
|
||||
files.remove(name)
|
||||
files.append(renames[name])
|
||||
if name in existing_files or renames[name] in existing_files:
|
||||
if name in existing_files:
|
||||
existing_files.remove(name)
|
||||
existing_files.append(renames[name])
|
||||
self.renames = renames
|
||||
|
||||
# Looking for the longest name first, minimizes the chance on a mismatch
|
||||
files.sort(key=len)
|
||||
existing_files.sort(key=len)
|
||||
|
||||
# The NZFs should be tried shortest first, to improve the chance on a proper match
|
||||
nzfs = self.files[:]
|
||||
nzfs.sort(key=lambda x: len(x.subject))
|
||||
nzfs.sort(key=lambda x: len(x.filename))
|
||||
|
||||
# Flag files from NZB that already exist as finished
|
||||
for filename in files[:]:
|
||||
for existing_filename in existing_files[:]:
|
||||
for nzf in nzfs:
|
||||
subject = sanitize_filename(name_extractor(nzf.subject))
|
||||
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
|
||||
logging.info("Existing file %s matched to file %s of %s", filename, nzf.filename, self.final_name)
|
||||
nzf.filename = filename
|
||||
if existing_filename in nzf.filename:
|
||||
logging.info("Matched file %s to %s of %s", existing_filename, nzf.filename, self.final_name)
|
||||
nzf.filename = existing_filename
|
||||
nzf.bytes_left = 0
|
||||
self.remove_nzf(nzf)
|
||||
nzfs.remove(nzf)
|
||||
files.remove(filename)
|
||||
existing_files.remove(existing_filename)
|
||||
|
||||
# Set bytes correctly
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
filepath = os.path.join(wdir, filename)
|
||||
filepath = os.path.join(wdir, existing_filename)
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
@@ -1269,16 +1270,16 @@ class NzbObject(TryList):
|
||||
|
||||
# Create an NZF for each remaining existing file
|
||||
try:
|
||||
for filename in files:
|
||||
for existing_filename in existing_files:
|
||||
# Create NZO's using basic information
|
||||
filepath = os.path.join(wdir, filename)
|
||||
logging.info("Existing file %s added to %s", filename, self.final_name)
|
||||
filepath = os.path.join(wdir, existing_filename)
|
||||
logging.info("Existing file %s added to %s", existing_filename, self.final_name)
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
nzf = NzbFile(tm, existing_filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
nzf.filename = existing_filename
|
||||
self.remove_nzf(nzf)
|
||||
|
||||
# Set bytes correctly
|
||||
@@ -1528,8 +1529,7 @@ class NzbObject(TryList):
|
||||
servers = config.get_servers()
|
||||
server_names = sorted(
|
||||
servers,
|
||||
key=lambda svr: "%d%02d%s"
|
||||
% (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()),
|
||||
key=lambda svr: "%02d%s" % (servers[svr].priority(), servers[svr].displayname().lower()),
|
||||
)
|
||||
msgs = [
|
||||
"%s=%sB" % (servers[server_name].displayname(), to_units(self.servercount[server_name]))
|
||||
@@ -1581,22 +1581,25 @@ class NzbObject(TryList):
|
||||
self.nzo_info[article_type] += 1
|
||||
self.bad_articles += 1
|
||||
|
||||
def get_article(self, server: Server, servers: List[Server]) -> Optional[Article]:
|
||||
article = None
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
articles = []
|
||||
nzf_remove_list = []
|
||||
|
||||
# Did we go through all first-articles?
|
||||
if self.first_articles:
|
||||
for article_test in self.first_articles:
|
||||
article = article_test.get_article(server, servers)
|
||||
if article:
|
||||
if not article:
|
||||
break
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
break
|
||||
|
||||
# Move on to next ones
|
||||
if not article:
|
||||
if not articles:
|
||||
for nzf in self.files:
|
||||
if nzf.deleted:
|
||||
logging.debug("Skipping existing file %s", nzf.filename or nzf.subject)
|
||||
logging.debug("Skipping existing file %s", nzf.filename)
|
||||
else:
|
||||
# Don't try to get an article if server is in try_list of nzf
|
||||
if not nzf.server_in_try_list(server):
|
||||
@@ -1614,8 +1617,8 @@ class NzbObject(TryList):
|
||||
else:
|
||||
break
|
||||
|
||||
article = nzf.get_article(server, servers)
|
||||
if article:
|
||||
articles = nzf.get_articles(server, servers, fetch_limit)
|
||||
if articles:
|
||||
break
|
||||
|
||||
# Remove all files for which admin could not be read
|
||||
@@ -1627,10 +1630,10 @@ class NzbObject(TryList):
|
||||
if nzf_remove_list and not self.files:
|
||||
sabnzbd.NzbQueue.end_job(self)
|
||||
|
||||
if not article:
|
||||
if not articles:
|
||||
# No articles for this server, block for next time
|
||||
self.add_to_try_list(server)
|
||||
return article
|
||||
return articles
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def move_top_bulk(self, nzf_ids):
|
||||
|
||||
@@ -23,14 +23,17 @@ import logging
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
from typing import Dict, Optional, Tuple
|
||||
from typing import Dict, Optional, Tuple, BinaryIO
|
||||
|
||||
from sabnzbd.constants import MEBI
|
||||
from sabnzbd.encoding import correct_unknown_encoding
|
||||
|
||||
PROBABLY_PAR2_RE = re.compile(r"(.*)\.vol(\d*)[+\-](\d*)\.par2", re.I)
|
||||
SCAN_LIMIT = 10 * MEBI
|
||||
PAR_PKT_ID = b"PAR2\x00PKT"
|
||||
PAR_MAIN_ID = b"PAR 2.0\x00Main\x00\x00\x00\x00"
|
||||
PAR_FILE_ID = b"PAR 2.0\x00FileDesc"
|
||||
PAR_CREATOR_ID = b"PAR 2.0\x00Creator"
|
||||
PAR_CREATOR_ID = b"PAR 2.0\x00Creator\x00"
|
||||
PAR_RECOVERY_ID = b"RecvSlic"
|
||||
|
||||
|
||||
@@ -91,22 +94,34 @@ def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Dict[str, bytes]:
|
||||
For a full description of the par2 specification, visit:
|
||||
http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
|
||||
"""
|
||||
total_size = os.path.getsize(fname)
|
||||
table = {}
|
||||
duplicates16k = []
|
||||
total_nr_files = None
|
||||
|
||||
try:
|
||||
with open(fname, "rb") as f:
|
||||
header = f.read(8)
|
||||
while header:
|
||||
name, filehash, hash16k = parse_par2_file_packet(f, header)
|
||||
if name:
|
||||
table[name] = filehash
|
||||
if hash16k not in md5of16k:
|
||||
md5of16k[hash16k] = name
|
||||
elif md5of16k[hash16k] != name:
|
||||
# Not unique and not already linked to this file
|
||||
# Remove to avoid false-renames
|
||||
duplicates16k.append(hash16k)
|
||||
if header == PAR_PKT_ID:
|
||||
name, filehash, hash16k, nr_files = parse_par2_packet(f)
|
||||
if name:
|
||||
table[name] = filehash
|
||||
if hash16k not in md5of16k:
|
||||
md5of16k[hash16k] = name
|
||||
elif md5of16k[hash16k] != name:
|
||||
# Not unique and not already linked to this file
|
||||
# Remove to avoid false-renames
|
||||
duplicates16k.append(hash16k)
|
||||
|
||||
# Store the number of files for later
|
||||
if nr_files:
|
||||
total_nr_files = nr_files
|
||||
|
||||
# On large files, we stop after seeing all the listings
|
||||
# On smaller files, we scan them fully to get the par2-creator
|
||||
if total_size > SCAN_LIMIT and len(table) == total_nr_files:
|
||||
break
|
||||
|
||||
header = f.read(8)
|
||||
|
||||
@@ -129,13 +144,18 @@ def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Dict[str, bytes]:
|
||||
return table
|
||||
|
||||
|
||||
def parse_par2_file_packet(f, header) -> Tuple[Optional[str], Optional[bytes], Optional[bytes]]:
|
||||
"""Look up and analyze a FileDesc package"""
|
||||
def parse_par2_packet(f: BinaryIO) -> Tuple[Optional[str], Optional[bytes], Optional[bytes], Optional[int]]:
|
||||
"""Look up and analyze a PAR2 packet"""
|
||||
|
||||
nothing = None, None, None
|
||||
filename, filehash, hash16k, nr_files = nothing = None, None, None, None
|
||||
|
||||
if header != PAR_PKT_ID:
|
||||
return nothing
|
||||
# All packages start with a header before the body
|
||||
# 8 : PAR2\x00PKT
|
||||
# 8 : Length of the entire packet. Must be multiple of 4. (NB: Includes length of header.)
|
||||
# 16 : MD5 Hash of packet. Calculation starts at first byte of Recovery Set ID and ends at last byte of body.
|
||||
# 16 : Recovery Set ID.
|
||||
# 16 : Type of packet.
|
||||
# ?*4 : Body of Packet. Must be a multiple of 4 bytes.
|
||||
|
||||
# Length must be multiple of 4 and at least 20
|
||||
pack_len = struct.unpack("<Q", f.read(8))[0]
|
||||
@@ -146,31 +166,37 @@ def parse_par2_file_packet(f, header) -> Tuple[Optional[str], Optional[bytes], O
|
||||
md5sum = f.read(16)
|
||||
|
||||
# Read and check the data
|
||||
# Subtract 32 because we already read these bytes of the header
|
||||
data = f.read(pack_len - 32)
|
||||
md5 = hashlib.md5()
|
||||
md5.update(data)
|
||||
if md5sum != md5.digest():
|
||||
return nothing
|
||||
|
||||
# The FileDesc packet looks like:
|
||||
# 16 : "PAR 2.0\0FileDesc"
|
||||
# 16 : FileId
|
||||
# 16 : Hash for full file **
|
||||
# 16 : Hash for first 16K
|
||||
# 8 : File length
|
||||
# xx : Name (multiple of 4, padded with \0 if needed) **
|
||||
# See if it's any of the packages we care about
|
||||
par2_packet_type = data[16:32]
|
||||
|
||||
# See if it's the right packet and get name + hash
|
||||
for offset in range(0, pack_len, 8):
|
||||
if data[offset : offset + 16] == PAR_FILE_ID:
|
||||
filehash = data[offset + 32 : offset + 48]
|
||||
hash16k = data[offset + 48 : offset + 64]
|
||||
filename = correct_unknown_encoding(data[offset + 72 :].strip(b"\0"))
|
||||
return filename, filehash, hash16k
|
||||
elif data[offset : offset + 15] == PAR_CREATOR_ID:
|
||||
# From here until the end is the creator-text
|
||||
# Useful in case of bugs in the par2-creating software
|
||||
par2creator = data[offset + 16 :].strip(b"\0") # Remove any trailing \0
|
||||
logging.debug("Par2-creator of %s is: %s", os.path.basename(f.name), correct_unknown_encoding(par2creator))
|
||||
if par2_packet_type == PAR_FILE_ID:
|
||||
# The FileDesc packet looks like:
|
||||
# 16 : "PAR 2.0\0FileDesc"
|
||||
# 16 : FileId
|
||||
# 16 : Hash for full file
|
||||
# 16 : Hash for first 16K
|
||||
# 8 : File length
|
||||
# xx : Name (multiple of 4, padded with \0 if needed)
|
||||
filehash = data[48:64]
|
||||
hash16k = data[64:80]
|
||||
filename = correct_unknown_encoding(data[88:].strip(b"\0"))
|
||||
elif par2_packet_type == PAR_CREATOR_ID:
|
||||
# From here until the end is the creator-text
|
||||
# Useful in case of bugs in the par2-creating software
|
||||
par2creator = data[32:].strip(b"\0") # Remove any trailing \0
|
||||
logging.debug("Par2-creator of %s is: %s", os.path.basename(f.name), correct_unknown_encoding(par2creator))
|
||||
elif par2_packet_type == PAR_MAIN_ID:
|
||||
# The Main packet looks like:
|
||||
# 16 : "PAR 2.0\0Main"
|
||||
# 8 : Slice size
|
||||
# 4 : Number of files in the recovery set
|
||||
nr_files = struct.unpack("<I", data[40:44])[0]
|
||||
|
||||
return nothing
|
||||
return filename, filehash, hash16k, nr_files
|
||||
|
||||
@@ -213,7 +213,7 @@ class Scheduler:
|
||||
)
|
||||
|
||||
logging.info("Setting schedule for midnight BPS reset")
|
||||
self.scheduler.add_daytime_task(sabnzbd.BPSMeter.midnight, "midnight_bps", DAILY_RANGE, None, (0, 0))
|
||||
self.scheduler.add_daytime_task(sabnzbd.BPSMeter.update, "midnight_bps", DAILY_RANGE, None, (0, 0))
|
||||
|
||||
logging.info("Setting schedule for server expiration check")
|
||||
self.scheduler.add_daytime_task(
|
||||
|
||||
@@ -38,8 +38,12 @@ def getcpu():
|
||||
# OK, found. Remove unwanted spaces:
|
||||
cputype = " ".join(cputype.split())
|
||||
else:
|
||||
# Not found, so let's fall back to platform()
|
||||
cputype = platform.platform()
|
||||
try:
|
||||
# Not found, so let's fall back to platform()
|
||||
cputype = platform.platform()
|
||||
except:
|
||||
# Can fail on special platforms (like Snapcraft or embedded)
|
||||
pass
|
||||
|
||||
return cputype
|
||||
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
|
||||
# You MUST use double quotes (so " and not ')
|
||||
|
||||
__version__ = "3.3.0-develop"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "3.3.0"
|
||||
__baseline__ = "7bb443678ac5c8394ead4ecdf76e7b57f4f4bd7a"
|
||||
|
||||
BIN
tests/data/par2file/basic_16k.par2
Normal file
BIN
tests/data/par2file/basic_16k.par2
Normal file
Binary file not shown.
@@ -258,6 +258,11 @@ class TestSanitizeFiles(ffs.TestCase):
|
||||
self.fs.path_separator = "\\"
|
||||
self.fs.is_windows_fs = True
|
||||
|
||||
def test_sanitize_files_input(self):
|
||||
assert [] == filesystem.sanitize_files(folder=None)
|
||||
assert [] == filesystem.sanitize_files(filelist=None)
|
||||
assert [] == filesystem.sanitize_files(folder=None, filelist=None)
|
||||
|
||||
@set_platform("win32")
|
||||
@set_config({"sanitize_safe": True})
|
||||
def test_sanitize_files(self):
|
||||
@@ -1119,7 +1124,11 @@ class TestUnwantedExtensions:
|
||||
@set_config({"unwanted_extensions_mode": 1, "unwanted_extensions": test_extensions})
|
||||
def test_has_unwanted_extension_whitelist_mode(self):
|
||||
for filename, result in self.test_params:
|
||||
assert filesystem.has_unwanted_extension(filename) is not result
|
||||
if filesystem.get_ext(filename):
|
||||
assert filesystem.has_unwanted_extension(filename) is not result
|
||||
else:
|
||||
# missing extension is never considered unwanted
|
||||
assert filesystem.has_unwanted_extension(filename) is False
|
||||
|
||||
@set_config({"unwanted_extensions_mode": 0, "unwanted_extensions": ""})
|
||||
def test_has_unwanted_extension_empty_blacklist(self):
|
||||
@@ -1129,4 +1138,8 @@ class TestUnwantedExtensions:
|
||||
@set_config({"unwanted_extensions_mode": 1, "unwanted_extensions": ""})
|
||||
def test_has_unwanted_extension_empty_whitelist(self):
|
||||
for filename, result in self.test_params:
|
||||
assert filesystem.has_unwanted_extension(filename) is True
|
||||
if filesystem.get_ext(filename):
|
||||
assert filesystem.has_unwanted_extension(filename) is True
|
||||
else:
|
||||
# missing extension is never considered unwanted
|
||||
assert filesystem.has_unwanted_extension(filename) is False
|
||||
|
||||
@@ -474,7 +474,7 @@ class TestAddingNZBs:
|
||||
|
||||
nzb_basedir, nzb_basename = os.path.split(VAR.NZB_FILE)
|
||||
nzb_size = os.stat(VAR.NZB_FILE).st_size
|
||||
part_size = round(randint(20, 80) / 100 * nzb_size)
|
||||
part_size = round(randint(40, 70) / 100 * nzb_size)
|
||||
first_part = os.path.join(nzb_basedir, "part1_of_" + nzb_basename)
|
||||
second_part = os.path.join(nzb_basedir, "part2_of_" + nzb_basename)
|
||||
|
||||
|
||||
@@ -464,6 +464,9 @@ class TestMisc:
|
||||
("2007::ffff:2021", "2007::ffff:2021"),
|
||||
("12.34.56.78", "12.34.56.78"),
|
||||
("foobar", "foobar"),
|
||||
("0:0:0:0:0:ffff:8.8.4.4", "8.8.4.4"),
|
||||
("0000:0000:0000:0000:0000:ffff:1.0.0.1", "1.0.0.1"),
|
||||
("0000::0:ffff:1.1.1.1", "1.1.1.1"),
|
||||
],
|
||||
)
|
||||
def test_strip_ipv4_mapped_notation(self, ip, result):
|
||||
|
||||
67
tests/test_par2file.py
Normal file
67
tests/test_par2file.py
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2021 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
Testing SABnzbd par2 parsing
|
||||
"""
|
||||
|
||||
from sabnzbd.par2file import *
|
||||
from tests.testhelper import *
|
||||
|
||||
# TODO: Add testing for edge cases, such as non-unique md5of16k or broken par files
|
||||
|
||||
|
||||
class TestPar2Parsing:
|
||||
def test_parse_par2_file(self, caplog):
|
||||
# To capture the par2-creator, we need to capture the logging
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
# These files are all <16k so the MD5 of the whole file is the same as the 16k one
|
||||
assert {"random.bin": b"\xbf\xe0\xe4\x10\xa2#\xf5\xbeN\x7f2\xe5\x9e\xdd\t\x03"} == parse_par2_file(
|
||||
os.path.join(SAB_DATA_DIR, "deobfuscate_filenames", "rename.par2"), {}
|
||||
)
|
||||
assert "Par2-creator of rename.par2 is: QuickPar 0.9" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
assert {"frènch_german_demö.rar": b"C\t\x1d\xbd\xdf\x8c\xb5w \xcco\xbf~L)\xc2"} == parse_par2_file(
|
||||
os.path.join(SAB_DATA_DIR, "test_win_unicode", "frènch_german_demö.rar.vol0+1.par2"), {}
|
||||
)
|
||||
assert "Par2-creator of frènch_german_demö.rar.vol0+1.par2 is: QuickPar 0.9" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
assert {
|
||||
"我喜欢编程.part5.rar": b"\x19\xe7\xb7\xb3\xbc\x17\xc4\xefo\x96*+x\x0c]M",
|
||||
"我喜欢编程.part6.rar": b"M\x8c.{\xae\x15\xb7\xa1\x8c\xc7\x1f\x8a\xb3^`\xd9",
|
||||
"我喜欢编程.part4.rar": b"\xb8D:r\xd8\x04\x98\xb3\xc2\x89\xed\xc1\x90\xe445",
|
||||
"我喜欢编程.part2.rar": b"aN#\x04*\x86\xd96|PoDV\xa6S\xa8",
|
||||
"我喜欢编程.part3.rar": b"\xc5\x1ep\xeb\x94\xa7\x12\xa1e\x8c\xc5\xda\xda\xae1 ",
|
||||
"我喜欢编程.part1.rar": b'_tJ\x15\x1a3;1\xaao\xa9n\n"\xa5p',
|
||||
"我喜欢编程.part7.rar": b"M\x1c\x14\x9b\xacY\x81\x8d\x82 VV\x81&\x8eH",
|
||||
} == parse_par2_file(os.path.join(SAB_DATA_DIR, "unicode_rar", "我喜欢编程.par2"), {})
|
||||
assert "Par2-creator of 我喜欢编程.par2 is: ParPar v0.3.2" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
def test_parse_par2_file_16k(self, caplog):
|
||||
# Capture logging of the par2-creator
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
# This file is 18k, so it's md5 of the first 16k is actually different
|
||||
md5of16k = {}
|
||||
assert {"rss_feed_test.xml": b"\xf8\x8f\x88\x91\xae{\x03\xc8\xad\xcb\xb4Y\xa0+\x06\xf6"} == parse_par2_file(
|
||||
os.path.join(SAB_DATA_DIR, "par2file", "basic_16k.par2"), md5of16k
|
||||
)
|
||||
assert md5of16k == {b"'ky\xd7\xd1\xd3wF\xed\x9c\xf7\x9b\x90\x93\x106": "rss_feed_test.xml"}
|
||||
assert "Par2-creator of basic_16k.par2 is: QuickPar 0.9" in caplog.text
|
||||
caplog.clear()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user