mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-24 08:08:37 -05:00
Compare commits
221 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bca9f3b753 | ||
|
|
cad8a9a5d3 | ||
|
|
f5f36d21e8 | ||
|
|
c51435c114 | ||
|
|
2a7f1780b4 | ||
|
|
98a44e40fb | ||
|
|
65cf6fa9a1 | ||
|
|
b2e32d1720 | ||
|
|
f0bfedbe8e | ||
|
|
fd4e059c13 | ||
|
|
a53575e154 | ||
|
|
4a73484603 | ||
|
|
03b380f90b | ||
|
|
a2bd3b2dfe | ||
|
|
56fe140ebf | ||
|
|
4fafcce740 | ||
|
|
02352c4ae6 | ||
|
|
4b74aab335 | ||
|
|
2d67ac189d | ||
|
|
8ece62e23d | ||
|
|
56c2bdd77d | ||
|
|
1f555f1930 | ||
|
|
8496432c14 | ||
|
|
1672ffa670 | ||
|
|
6aab199f12 | ||
|
|
46d0c379a4 | ||
|
|
99240f145a | ||
|
|
3c9079d73c | ||
|
|
0eb98b9a6c | ||
|
|
76bfd98b77 | ||
|
|
3348640c88 | ||
|
|
d81c64fd2b | ||
|
|
8b4c919617 | ||
|
|
76c58953df | ||
|
|
4ddc5caa49 | ||
|
|
694663bd95 | ||
|
|
62aba5844e | ||
|
|
d0d60cef05 | ||
|
|
3d293fdcb0 | ||
|
|
96e9528046 | ||
|
|
4ea24b3203 | ||
|
|
a756eea25a | ||
|
|
210020e489 | ||
|
|
e586ead024 | ||
|
|
14c80bf1dc | ||
|
|
bdd56e794a | ||
|
|
a544548934 | ||
|
|
e06c1d61fb | ||
|
|
600c5209c6 | ||
|
|
bee90366ee | ||
|
|
e9bc4e9417 | ||
|
|
f01ff15761 | ||
|
|
356ada159d | ||
|
|
cc831e16d8 | ||
|
|
b8dc46ad01 | ||
|
|
d8ab19087d | ||
|
|
ec8a79eedd | ||
|
|
f1e2a8e9d8 | ||
|
|
4042a5fe5d | ||
|
|
a4752751ed | ||
|
|
e23ecf46d1 | ||
|
|
70a8c597a6 | ||
|
|
fa639bdb53 | ||
|
|
233bdd5b1d | ||
|
|
a0ab6d35c7 | ||
|
|
bd29680ce7 | ||
|
|
7139e92554 | ||
|
|
897df53466 | ||
|
|
58281711f6 | ||
|
|
b524383aa3 | ||
|
|
75a16e3588 | ||
|
|
1453032ad6 | ||
|
|
824ab4afad | ||
|
|
73dd41c67f | ||
|
|
59ee77355d | ||
|
|
5c758773ad | ||
|
|
46de49df06 | ||
|
|
d1c54a9a74 | ||
|
|
e7527c45cd | ||
|
|
7d5207aa67 | ||
|
|
654302e691 | ||
|
|
ee673b57fd | ||
|
|
2be374b841 | ||
|
|
906e1eda89 | ||
|
|
ece02cc4fa | ||
|
|
876ad60ddf | ||
|
|
862da354ac | ||
|
|
8fd477b979 | ||
|
|
2d7005655c | ||
|
|
7322f8348a | ||
|
|
e3e3a12e73 | ||
|
|
77cdd057a4 | ||
|
|
e8206fbdd9 | ||
|
|
589f15a77b | ||
|
|
7bb443678a | ||
|
|
6390415101 | ||
|
|
4abf192e11 | ||
|
|
1fed37f9da | ||
|
|
a9d86a7447 | ||
|
|
2abe4c3cef | ||
|
|
0542c25003 | ||
|
|
1b8ee4e290 | ||
|
|
51128cba55 | ||
|
|
3612432581 | ||
|
|
deca000a1b | ||
|
|
39cccb5653 | ||
|
|
f6838dc985 | ||
|
|
8cd4d92395 | ||
|
|
3bf9906f45 | ||
|
|
9f7daf96ef | ||
|
|
67de4df155 | ||
|
|
bc51a4bd1c | ||
|
|
bb54616018 | ||
|
|
6bcff5e014 | ||
|
|
8970a03a9a | ||
|
|
3ad717ca35 | ||
|
|
b14f72c67a | ||
|
|
45d036804f | ||
|
|
8f606db233 | ||
|
|
3766ba5402 | ||
|
|
e851813cef | ||
|
|
4d49ad9141 | ||
|
|
16618b3af2 | ||
|
|
0e5c0f664f | ||
|
|
7be9281431 | ||
|
|
ee0327fac1 | ||
|
|
9930de3e7f | ||
|
|
e8503e89c6 | ||
|
|
1d9ed419eb | ||
|
|
0207652e3e | ||
|
|
0f1e99c5cb | ||
|
|
f134bc7efb | ||
|
|
dcd7c7180e | ||
|
|
fbbfcd075b | ||
|
|
f42d2e4140 | ||
|
|
88882cebbc | ||
|
|
17a979675c | ||
|
|
4642850c79 | ||
|
|
e8d6eebb04 | ||
|
|
864c5160c0 | ||
|
|
99b5a00c12 | ||
|
|
85ee1f07d7 | ||
|
|
e58b4394e0 | ||
|
|
1e91a57bf1 | ||
|
|
39cee52a7e | ||
|
|
72068f939d | ||
|
|
096d0d3cad | ||
|
|
2472ab0121 | ||
|
|
00421717b8 | ||
|
|
ae96d93f94 | ||
|
|
8522c40c8f | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 | ||
|
|
feebbb9f04 | ||
|
|
bc4f06dd1d | ||
|
|
971e4fc909 | ||
|
|
51cc765949 | ||
|
|
19c6a4fffa | ||
|
|
105ac32d2f | ||
|
|
57550675d2 | ||
|
|
e674abc5c0 | ||
|
|
f965c96f51 | ||
|
|
c76b8ed9e0 | ||
|
|
4fbd0d8a7b | ||
|
|
2186c0fff6 | ||
|
|
1adca9a9c1 | ||
|
|
9408353f2b | ||
|
|
84f4d453d2 | ||
|
|
d10209f2a1 | ||
|
|
3ae149c72f | ||
|
|
47385acc3b | ||
|
|
814eeaa900 | ||
|
|
5f2ea13aad | ||
|
|
41ca217931 | ||
|
|
b57d36e8dd | ||
|
|
9a4be70734 | ||
|
|
a8443595a6 | ||
|
|
fd0a70ac58 | ||
|
|
8a8685c968 | ||
|
|
9e6cb8da8e | ||
|
|
054ec54d51 | ||
|
|
272ce773cb | ||
|
|
050b925f7b | ||
|
|
0087940898 | ||
|
|
e323c014f9 | ||
|
|
cc465c7554 | ||
|
|
14cb37564f | ||
|
|
094db56c3b | ||
|
|
aabb709b8b | ||
|
|
0833dd2db9 | ||
|
|
cd3f912be4 | ||
|
|
665c516db6 | ||
|
|
b670da9fa0 | ||
|
|
80bee9bffe | ||
|
|
d85a70e8ad | ||
|
|
8f21533e76 | ||
|
|
89996482a1 | ||
|
|
03c10dce91 | ||
|
|
bd5331be05 | ||
|
|
46e1645289 | ||
|
|
4ce3965747 | ||
|
|
9d4af19db3 | ||
|
|
48e034f4be | ||
|
|
f8959baa2f | ||
|
|
8ed5997eae | ||
|
|
daf9f50ac8 | ||
|
|
6b11013c1a |
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/builder"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/builder/osx"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
14
.github/workflows/build_release.yml
vendored
14
.github/workflows/build_release.yml
vendored
@@ -2,12 +2,14 @@ name: Build binaries and source distribution
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
env:
|
||||
AUTOMATION_GITHUB_TOKEN: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
build_windows:
|
||||
name: Build Windows binary
|
||||
runs-on: windows-latest
|
||||
env:
|
||||
AUTOMATION_GITHUB_TOKEN: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.10 (64bit)
|
||||
@@ -65,15 +67,15 @@ jobs:
|
||||
build_macos:
|
||||
name: Build macOS binary
|
||||
runs-on: macos-11
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
SIGNING_AUTH: ${{ secrets.SIGNING_AUTH }}
|
||||
NOTARIZATION_USER: ${{ secrets.NOTARIZATION_USER }}
|
||||
NOTARIZATION_PASS: ${{ secrets.NOTARIZATION_PASS }}
|
||||
AUTOMATION_GITHUB_TOKEN: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: "3.10.1"
|
||||
PYTHON_VERSION: "3.10.2"
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.9"
|
||||
# We need to force compile for universal2 support
|
||||
CFLAGS: -arch arm64 -arch x86_64
|
||||
@@ -108,8 +110,8 @@ jobs:
|
||||
pip3 install --upgrade -r requirements.txt --no-binary sabyenc3
|
||||
|
||||
pip3 uninstall cryptography -y
|
||||
pip3 download cryptography --platform macosx_10_10_universal2 --only-binary :all: --no-deps --dest .
|
||||
pip3 install cryptography --no-cache-dir --no-index --find-links .
|
||||
pip3 download -r builder/osx/requirements.txt --platform macosx_10_10_universal2 --only-binary :all: --no-deps --dest .
|
||||
pip3 install -r builder/osx/requirements.txt --no-cache-dir --no-index --find-links .
|
||||
|
||||
PYINSTALLER_COMPILE_BOOTLOADER=1 pip3 install --upgrade -r builder/requirements.txt --no-binary pyinstaller
|
||||
- name: Import macOS codesign certificates
|
||||
|
||||
1
.github/workflows/integration_testing.yml
vendored
1
.github/workflows/integration_testing.yml
vendored
@@ -13,6 +13,7 @@ jobs:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
# TODO: Update to 3.10 when all packages are available, currently lxml is missing
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
python-version: "3.10"
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.5.0RC1
|
||||
Summary: SABnzbd-3.5.0RC1
|
||||
Version: 3.5.3
|
||||
Summary: SABnzbd-3.5.3
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
29
README.mkd
29
README.mkd
@@ -1,6 +1,19 @@
|
||||
Release Notes - SABnzbd 3.5.0 Release Candidate 1
|
||||
Release Notes - SABnzbd 3.5.3
|
||||
=========================================================
|
||||
|
||||
## Bugfix since 3.5.0
|
||||
- Prevent disk errors due to Direct Unpack being too aggressive.
|
||||
- URL's waiting to fetch get stuck indefinitely upon restart.
|
||||
|
||||
## Changes and bugfixes since 3.5.0
|
||||
- Prevent permissions errors on systems that do not support them.
|
||||
- Small changes in file assembly and Direct Unpack processing.
|
||||
- Changes to the transition from download to active post-processing.
|
||||
- Malformed NZB files could result in a crash.
|
||||
- Prevent crash in Direct Unpack for obfuscated posts.
|
||||
- RSS feeds with HTML-characters in the name resulted in crashes.
|
||||
- macOS: failed to start on older macOS versions.
|
||||
|
||||
## Changes since 3.4.2
|
||||
- Removed Python 3.6 support.
|
||||
- SOCKS5 proxy support for all outgoing connections.
|
||||
@@ -8,31 +21,33 @@ Release Notes - SABnzbd 3.5.0 Release Candidate 1
|
||||
- `Required` server option: in case of connection failures, the queue
|
||||
will be paused for a few minutes instead of skipping the server.
|
||||
- Added Special option to preserve paused state after a restart.
|
||||
- Show an estimated time-left indicator for repair and unpacking.
|
||||
- Show an estimated time-left indicator for repair and unpacking.
|
||||
- Require TLS version 1.2 or higher for SSL news server connections.
|
||||
- Setting custom ciphers forces the maximum TLS version to 1.2.
|
||||
- Print low-level Windows status error on `IOError`.
|
||||
- Reduced memory usage during and after parsing `.nzb` files.
|
||||
- Handle multiple passwords stored in NZB-file.
|
||||
- macOS/Linux: `Permissions` are only applied if any are set.
|
||||
- macOS/Windows: updated to Python 3.10.1.
|
||||
- macOS: run native on M1 systems. However, included tools
|
||||
- macOS/Windows: updated to Python 3.10.2.
|
||||
- macOS: run native on M1 systems. However, included tools
|
||||
(`par2`, `unrar` and `7za`) still require Rosetta emulation.
|
||||
- Snap: updated to `core20` base and restore 7zip support.
|
||||
|
||||
## Bugfixes since 3.4.2
|
||||
- Global interface settings would not always be applied correctly.
|
||||
- Email notification setting was not shown correctly.
|
||||
- Improvements and fixes for `Defobfuscate final filenames`.
|
||||
- `Post-Process Only Verified Jobs` would not always work as intended.
|
||||
- Correctly detect too little disk space when unpacking 7zip's.
|
||||
- Improvements to handling of repair by MultiPar and par2cmdline.
|
||||
- HTML characters in configuration fields were shown incorrectly.
|
||||
- On Retry the number of downloaded bytes could exceed the total bytes.
|
||||
- `unrar` logging of Direct Unpack was not logged if it was aborted.
|
||||
- Windows: `portable.cmd` was not included in the release.
|
||||
- Windows: print low-level Windows error on `IOError`.
|
||||
|
||||
## Upgrade notices
|
||||
- The download statistics file `totals10.sab` is updated in 3.2.x
|
||||
version. If you downgrade to 3.1.x or lower, detailed download
|
||||
- The download statistics file `totals10.sab` is updated in 3.2.x
|
||||
version. If you downgrade to 3.1.x or lower, detailed download
|
||||
statistics will be lost.
|
||||
|
||||
## Known problems and solutions
|
||||
|
||||
3
builder/osx/requirements.txt
Normal file
3
builder/osx/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
# Special requirements for macOS universal2 binary release
|
||||
# This way dependabot can auto-update them
|
||||
cryptography==36.0.1
|
||||
@@ -20,10 +20,14 @@ import platform
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import tempfile
|
||||
import time
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import configobj
|
||||
import pkginfo
|
||||
import github
|
||||
|
||||
@@ -114,6 +118,62 @@ def patch_version_file(release_name):
|
||||
ver.write(version_file)
|
||||
|
||||
|
||||
def test_sab_binary(binary_path: str):
|
||||
"""Wrapper to have a simple start-up test for the binary"""
|
||||
with tempfile.TemporaryDirectory() as config_dir:
|
||||
sabnzbd_process = subprocess.Popen(
|
||||
[binary_path, "--browser", "0", "--logging", "2", "--config", config_dir],
|
||||
text=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
# Wait for SAB to respond
|
||||
base_url = "http://127.0.0.1:8080/"
|
||||
for _ in range(10):
|
||||
try:
|
||||
urllib.request.urlopen(base_url, timeout=1).read()
|
||||
break
|
||||
except urllib.error.URLError:
|
||||
time.sleep(1)
|
||||
else:
|
||||
raise urllib.error.URLError("Could not connect to SABnzbd")
|
||||
|
||||
# Open a number of API calls and pages, to see if we are really up
|
||||
pages_to_test = [
|
||||
"",
|
||||
"wizard",
|
||||
"config",
|
||||
"config/server",
|
||||
"config/categories",
|
||||
"config/scheduling",
|
||||
"config/rss",
|
||||
"config/general",
|
||||
"config/folders",
|
||||
"config/switches",
|
||||
"config/sorting",
|
||||
"config/notify",
|
||||
"config/special",
|
||||
"api?mode=version",
|
||||
]
|
||||
for url in pages_to_test:
|
||||
print("Testing: %s%s" % (base_url, url))
|
||||
if b"500 Internal Server Error" in urllib.request.urlopen(base_url + url, timeout=1).read():
|
||||
raise RuntimeError("Crash in %s" % url)
|
||||
|
||||
# Parse API-key so we can do a graceful shutdown
|
||||
sab_config = configobj.ConfigObj(os.path.join(config_dir, "sabnzbd.ini"))
|
||||
urllib.request.urlopen(base_url + "shutdown/?apikey=" + sab_config["misc"]["api_key"], timeout=10)
|
||||
sabnzbd_process.wait()
|
||||
|
||||
# Print logs for verification
|
||||
with open(os.path.join(config_dir, "logs", "sabnzbd.log"), "r") as log_file:
|
||||
print(log_file.read())
|
||||
|
||||
# So we have time to print the file before the directory is removed
|
||||
time.sleep(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Was any option supplied?
|
||||
if len(sys.argv) < 2:
|
||||
@@ -226,11 +286,14 @@ if __name__ == "__main__":
|
||||
)
|
||||
|
||||
# Rename the folder
|
||||
os.rename("dist/SABnzbd", RELEASE_NAME)
|
||||
shutil.copytree("dist/SABnzbd", RELEASE_NAME)
|
||||
|
||||
# Create the archive
|
||||
run_external_command(["win/7zip/7za.exe", "a", RELEASE_BINARY, RELEASE_NAME])
|
||||
|
||||
# Test the release, as the very last step to not mess with any release code
|
||||
test_sab_binary("dist/SABnzbd/SABnzbd.exe")
|
||||
|
||||
if "app" in sys.argv:
|
||||
# Must be run on macOS
|
||||
if sys.platform != "darwin":
|
||||
@@ -296,50 +359,24 @@ if __name__ == "__main__":
|
||||
|
||||
# Upload to Apple
|
||||
print("Sending zip to Apple notarization service")
|
||||
upload_process = run_external_command(
|
||||
upload_result = run_external_command(
|
||||
[
|
||||
"xcrun",
|
||||
"altool",
|
||||
"--notarize-app",
|
||||
"-t",
|
||||
"osx",
|
||||
"-f",
|
||||
"notarytool",
|
||||
"submit",
|
||||
notarization_zip,
|
||||
"--primary-bundle-id",
|
||||
"org.sabnzbd.sabnzbd",
|
||||
"-u",
|
||||
"--apple-id",
|
||||
notarization_user,
|
||||
"-p",
|
||||
"--team-id",
|
||||
authority,
|
||||
"--password",
|
||||
notarization_pass,
|
||||
"--wait",
|
||||
],
|
||||
)
|
||||
|
||||
# Extract the notarization ID
|
||||
m = re.match(".*RequestUUID = (.*?)\n", upload_process, re.S)
|
||||
if not m:
|
||||
raise RuntimeError("No UUID created")
|
||||
uuid = m.group(1)
|
||||
|
||||
print("Checking notarization of UUID: %s (every 30 seconds)" % uuid)
|
||||
notarization_in_progress = True
|
||||
while notarization_in_progress:
|
||||
time.sleep(30)
|
||||
check_status = run_external_command(
|
||||
[
|
||||
"xcrun",
|
||||
"altool",
|
||||
"--notarization-info",
|
||||
uuid,
|
||||
"-u",
|
||||
notarization_user,
|
||||
"-p",
|
||||
notarization_pass,
|
||||
],
|
||||
)
|
||||
notarization_in_progress = "Status: in progress" in check_status
|
||||
|
||||
# Check if success
|
||||
if "Status: success" not in check_status:
|
||||
if "status: accepted" not in upload_result.lower():
|
||||
raise RuntimeError("Failed to notarize..")
|
||||
|
||||
# Staple the notarization!
|
||||
@@ -352,6 +389,9 @@ if __name__ == "__main__":
|
||||
else:
|
||||
print("Signing skipped, missing SIGNING_AUTH.")
|
||||
|
||||
# Test the release, as the very last step to not mess with any release code
|
||||
test_sab_binary("dist/SABnzbd.app/Contents/MacOS/SABnzbd")
|
||||
|
||||
if "source" in sys.argv:
|
||||
# Prepare Source distribution package.
|
||||
# We assume the sources are freshly cloned from the repo
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
# Basic build requirements
|
||||
pyinstaller>=4.8
|
||||
setuptools
|
||||
pkginfo
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
pyinstaller==4.8
|
||||
pyinstaller-hooks-contrib==2022.0
|
||||
altgraph==0.17.2
|
||||
wrapt==1.13.3
|
||||
setuptools==60.6.0
|
||||
pkginfo==1.8.2
|
||||
PyGithub==1.55
|
||||
charset-normalizer==2.0.11
|
||||
certifi
|
||||
pygithub
|
||||
|
||||
# For the OSX build specific
|
||||
dmgbuild; sys_platform == 'darwin'
|
||||
# For the macOS build
|
||||
dmgbuild==1.5.2; sys_platform == 'darwin'
|
||||
mac-alias==2.2.0; sys_platform == 'darwin'
|
||||
macholib==1.15.2; sys_platform == 'darwin'
|
||||
ds-store==1.3.0; sys_platform == 'darwin'
|
||||
PyNaCl==1.5.0; sys_platform == 'darwin'
|
||||
@@ -53,7 +53,7 @@
|
||||
</td>
|
||||
<td class="title">
|
||||
<a href="?feed=$rss[$feed_item]['link']" class="subscription-title path feed <!--#if int($rss[$feed_item]['enable']) != 0 then 'feed_enabled' else 'feed_disabled'#-->">
|
||||
$feed_item
|
||||
$feed_item_html
|
||||
</a>
|
||||
</td>
|
||||
<td class="controls">
|
||||
@@ -102,7 +102,7 @@
|
||||
</div>
|
||||
<!--#end if#-->
|
||||
<!--#if $active_feed#-->
|
||||
<!--#set $feed = $active_feed#-->
|
||||
<!--#set $feed = html.unescape($active_feed)#-->
|
||||
<div class="section rss-section">
|
||||
<div class="padTable">
|
||||
<a class="main-helplink" href="$helpuri$help_uri" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a>
|
||||
@@ -113,12 +113,12 @@
|
||||
<!--#if $error#-->
|
||||
<div class="alert alert-danger">
|
||||
<span class="glyphicon glyphicon-exclamation-sign"></span>
|
||||
<!--#echo html.escape($error)#-->
|
||||
$error
|
||||
</div>
|
||||
<!--#end if#-->
|
||||
<form action="upd_rss_feed" method="post">
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="feed" value="$feed" />
|
||||
<input type="hidden" name="feed" value="$active_feed" />
|
||||
<input type="hidden" name="uri" value="$rss[$feed]['uris']" />
|
||||
<table class="catTable">
|
||||
<thead>
|
||||
@@ -210,7 +210,7 @@
|
||||
<form action="upd_rss_filter" method="post">
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="index" value="$rss[$feed]['filtercount']" />
|
||||
<input type="hidden" name="feed" value="$feed" />
|
||||
<input type="hidden" name="feed" value="$active_feed" />
|
||||
<table class="catTable">
|
||||
<tbody>
|
||||
<tr>
|
||||
@@ -286,7 +286,7 @@
|
||||
<form action="upd_rss_filter" method="post" autocomplete="off">
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="index" value="$fnum" />
|
||||
<input type="hidden" name="feed" value="$feed" />
|
||||
<input type="hidden" name="feed" value="$active_feed" />
|
||||
<table class="catTable">
|
||||
<tbody>
|
||||
<tr class="<!--#if $odd then " alt " else " "#--> <!--#if $filter[3]!="A" and $filter[3]!="S" then 'disabled_options_rule' else ""#-->">
|
||||
@@ -302,8 +302,8 @@
|
||||
<option value="M" <!--#if $filter[3]=="M" then 'selected="selected"' else ""#-->> $T('rss-must')</option>
|
||||
<option value="R" <!--#if $filter[3]=="R" then 'selected="selected"' else ""#-->> $T('rss-reject')</option>
|
||||
<option value="C" <!--#if $filter[3]=="C" then 'selected="selected"' else ""#-->> $T('rss-mustcat')</option>
|
||||
<option value=">" <!--#if $filter[3]==">" then 'selected="selected"' else ""#-->> $T('rss-atleast')</option>
|
||||
<option value="<" <!--#if $filter[3]=="<" then 'selected="selected"' else ""#-->> $T('rss-atmost')</option>
|
||||
<option value=">" <!--#if $filter[3]==">" then 'selected="selected"' else ""#-->> $T('rss-atleast')</option>
|
||||
<option value="<" <!--#if $filter[3]=="<" then 'selected="selected"' else ""#-->> $T('rss-atmost')</option>
|
||||
<option value="F" <!--#if $filter[3]=="F" then 'selected="selected"' else ""#-->> $T('rss-from')</option>
|
||||
<option value="S" <!--#if $filter[3]=="S" then 'selected="selected"' else ""#-->> $T('rss-from-show') ($T('rss-accept'))</option>
|
||||
</select>
|
||||
@@ -363,13 +363,13 @@
|
||||
<!--#end for#-->
|
||||
<form action="download_rss_feed" method="post">
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="feed" value="$feed" />
|
||||
<input type="hidden" name="feed" value="$active_feed" />
|
||||
<div class="padding">
|
||||
<button type="button" class="btn btn-default testFeed" rel="$feed"><span class="glyphicon glyphicon-sort"></span> $T('button-preFeed')</button>
|
||||
<button type="button" class="btn btn-default testFeed" rel="$active_feed"><span class="glyphicon glyphicon-sort"></span> $T('button-preFeed')</button>
|
||||
<button type="submit" class="btn btn-default Save"><span class="glyphicon glyphicon-forward"></span> $T('button-forceFeed')</button>
|
||||
<button type="button" class="btn btn-default cleanFeed"><span class="glyphicon glyphicon-trash"></span> $T('button-clear') $T('rss-done')</button>
|
||||
<!--#if $evalButton#-->
|
||||
<button type="button" class="btn btn-default evalFeed" rel="$feed"><span class="glyphicon glyphicon-ok-circle"></span> $T('button-evalFeed')</button>
|
||||
<button type="button" class="btn btn-default evalFeed" rel="$active_feed"><span class="glyphicon glyphicon-ok-circle"></span> $T('button-evalFeed')</button>
|
||||
<!--#end if#-->
|
||||
</div>
|
||||
</form>
|
||||
@@ -402,7 +402,7 @@
|
||||
<tr class="infoTableSeperator">
|
||||
<td>
|
||||
<form action="download" method="get">
|
||||
<input type="hidden" value="$feed" name="feed" />
|
||||
<input type="hidden" value="$active_feed" name="feed" />
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="url" value="$job['url']" />
|
||||
<input type="hidden" name="nzbname" value="$job['nzbname']" />
|
||||
@@ -446,7 +446,7 @@
|
||||
<tr class="infoTableSeperator">
|
||||
<td>
|
||||
<form action="download" method="get">
|
||||
<input type="hidden" value="$feed" name="feed" />
|
||||
<input type="hidden" value="$active_feed" name="feed" />
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<input type="hidden" name="url" value="$job['url']" />
|
||||
<input type="hidden" name="nzbname" value="$job['nzbname']" />
|
||||
@@ -475,7 +475,7 @@
|
||||
<div class="tab-pane padTable" id="rss-tab-done">
|
||||
<!--#if $downloaded#-->
|
||||
<form action="clean_rss_jobs" method="post">
|
||||
<input type="hidden" value="$feed" name="feed" />
|
||||
<input type="hidden" value="$active_feed" name="feed" />
|
||||
<input type="hidden" name="apikey" value="$apikey" />
|
||||
<table class="catTable">
|
||||
<thead>
|
||||
|
||||
@@ -1,22 +1,42 @@
|
||||
sabyenc3>=4.0.0
|
||||
cheetah3>=3.0.0
|
||||
cryptography
|
||||
feedparser>=6.0.0
|
||||
configobj
|
||||
cheroot
|
||||
cherrypy
|
||||
portend
|
||||
chardet
|
||||
notify2
|
||||
PySocks
|
||||
puremagic
|
||||
guessit>=3.1.0
|
||||
# Main requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
sabyenc3==4.0.2
|
||||
cheetah3==3.2.6
|
||||
cryptography==36.0.1
|
||||
cffi==1.15
|
||||
pycparser==2.21
|
||||
feedparser==6.0.8
|
||||
configobj==5.0.6
|
||||
cheroot==8.6.0
|
||||
six==1.16.0
|
||||
cherrypy==18.6.1
|
||||
jaraco.functools==3.5.0
|
||||
jaraco.collections==3.5.1
|
||||
jaraco.text==3.7.0
|
||||
jaraco.classes==3.2.1
|
||||
jaraco.context==4.1.1
|
||||
more-itertools==8.12.0
|
||||
zc.lockfile==2.0
|
||||
python-dateutil==2.8.2
|
||||
tempora==5.0.1
|
||||
pytz==2021.3
|
||||
sgmllib3k==1.0.0
|
||||
portend==3.1.0
|
||||
chardet==4.0.0
|
||||
PySocks==1.7.1
|
||||
puremagic==1.11
|
||||
guessit==3.4.3
|
||||
babelfish==0.6.0
|
||||
rebulk==3.1.0
|
||||
|
||||
# Windows system integration
|
||||
pywin32>=227; sys_platform == 'win32'
|
||||
pywin32==303; sys_platform == 'win32'
|
||||
|
||||
# macOS system calls
|
||||
pyobjc; sys_platform == 'darwin'
|
||||
pyobjc==8.1; sys_platform == 'darwin'
|
||||
|
||||
# Linux notifications
|
||||
notify2==0.3.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
|
||||
# Optional support for *nix tray icon.
|
||||
# Note that pygobject depends on pycairo, which requires pkg-config and cairo headers.
|
||||
|
||||
@@ -85,9 +85,8 @@ class ArticleCache:
|
||||
def save_article(self, article: Article, data: bytes):
|
||||
"""Save article in cache, either memory or disk"""
|
||||
nzo = article.nzf.nzo
|
||||
if nzo.is_gone():
|
||||
# Do not discard this article because the
|
||||
# file might still be processed at this moment!!
|
||||
# Skip if already post-processing or fully finished
|
||||
if nzo.pp_or_finished:
|
||||
return
|
||||
|
||||
# Register article for bookkeeping in case the job is deleted
|
||||
@@ -162,11 +161,8 @@ class ArticleCache:
|
||||
|
||||
@staticmethod
|
||||
def __flush_article_to_disk(article: Article, data):
|
||||
nzo = article.nzf.nzo
|
||||
if nzo.is_gone():
|
||||
# Don't store deleted jobs
|
||||
return
|
||||
|
||||
# Save data, but don't complain when destination folder is missing
|
||||
# because this flush may come after completion of the NZO.
|
||||
sabnzbd.filesystem.save_data(data, article.get_art_id(), nzo.admin_path, do_pickle=False, silent=True)
|
||||
sabnzbd.filesystem.save_data(
|
||||
data, article.get_art_id(), article.nzf.nzo.admin_path, do_pickle=False, silent=True
|
||||
)
|
||||
|
||||
@@ -105,101 +105,106 @@ class Assembler(Thread):
|
||||
filepath = nzf.prepare_filepath()
|
||||
|
||||
if filepath:
|
||||
logging.debug("Decoding part of %s", filepath)
|
||||
try:
|
||||
logging.debug("Decoding part of %s", filepath)
|
||||
self.assemble(nzf, file_done)
|
||||
|
||||
# Continue after partly written data
|
||||
if not file_done:
|
||||
continue
|
||||
|
||||
# Clean-up admin data
|
||||
logging.info("Decoding finished %s", filepath)
|
||||
nzf.remove_admin()
|
||||
|
||||
# Do rar-related processing
|
||||
if rarfile.is_rarfile(filepath):
|
||||
# Encryption and unwanted extension detection
|
||||
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
|
||||
if rar_encrypted:
|
||||
if cfg.pause_on_pwrar() == 1:
|
||||
logging.warning(
|
||||
T(
|
||||
'Paused job "%s" because of encrypted RAR file (if supplied, all passwords were tried)'
|
||||
),
|
||||
nzo.final_name,
|
||||
)
|
||||
nzo.pause()
|
||||
else:
|
||||
logging.warning(
|
||||
T(
|
||||
'Aborted job "%s" because of encrypted RAR file (if supplied, all passwords were tried)'
|
||||
),
|
||||
nzo.final_name,
|
||||
)
|
||||
nzo.fail_msg = T("Aborted, encryption detected")
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
if unwanted_file:
|
||||
# Don't repeat the warning after a user override of an unwanted extension pause
|
||||
if nzo.unwanted_ext == 0:
|
||||
logging.warning(
|
||||
T('In "%s" unwanted extension in RAR file. Unwanted file is %s '),
|
||||
nzo.final_name,
|
||||
unwanted_file,
|
||||
)
|
||||
logging.debug(T("Unwanted extension is in rar file %s"), filepath)
|
||||
if cfg.action_on_unwanted_extensions() == 1 and nzo.unwanted_ext == 0:
|
||||
logging.debug("Unwanted extension ... pausing")
|
||||
nzo.unwanted_ext = 1
|
||||
nzo.pause()
|
||||
if cfg.action_on_unwanted_extensions() == 2:
|
||||
logging.debug("Unwanted extension ... aborting")
|
||||
nzo.fail_msg = T("Aborted, unwanted extension detected")
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
# Add to direct unpack
|
||||
nzo.add_to_direct_unpacker(nzf)
|
||||
|
||||
elif par2file.is_parfile(filepath):
|
||||
# Parse par2 files, cloaked or not
|
||||
nzo.handle_par2(nzf, filepath)
|
||||
|
||||
filter_output, reason = nzo_filtered_by_rating(nzo)
|
||||
if filter_output == 1:
|
||||
logging.warning(
|
||||
T('Paused job "%s" because of rating (%s)'),
|
||||
nzo.final_name,
|
||||
reason,
|
||||
)
|
||||
nzo.pause()
|
||||
elif filter_output == 2:
|
||||
logging.warning(
|
||||
T('Aborted job "%s" because of rating (%s)'),
|
||||
nzo.final_name,
|
||||
reason,
|
||||
)
|
||||
nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
except IOError as err:
|
||||
# If job was deleted or in active post-processing, ignore error
|
||||
if not nzo.deleted and not nzo.is_gone() and not nzo.pp_active:
|
||||
# If job was deleted/finished or in active post-processing, ignore error
|
||||
if not nzo.pp_or_finished:
|
||||
# 28 == disk full => pause downloader
|
||||
if err.errno == 28:
|
||||
logging.error(T("Disk full! Forcing Pause"))
|
||||
else:
|
||||
logging.error(T("Disk error on creating file %s"), clip_path(filepath))
|
||||
# Log traceback
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
if sabnzbd.WIN32:
|
||||
logging.info("Winerror: %s", hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2 ** 32))
|
||||
logging.info(
|
||||
"Winerror: %s - %s",
|
||||
err.winerror,
|
||||
hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2**32),
|
||||
)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# Pause without saving
|
||||
sabnzbd.Downloader.pause()
|
||||
continue
|
||||
else:
|
||||
logging.debug("Ignoring error %s for %s, already finished or in post-proc", err, filepath)
|
||||
except:
|
||||
logging.error(T("Fatal error in Assembler"), exc_info=True)
|
||||
break
|
||||
|
||||
# Continue after partly written data
|
||||
if not file_done:
|
||||
continue
|
||||
|
||||
# Clean-up admin data
|
||||
logging.info("Decoding finished %s", filepath)
|
||||
nzf.remove_admin()
|
||||
|
||||
# Do rar-related processing
|
||||
if rarfile.is_rarfile(filepath):
|
||||
# Encryption and unwanted extension detection
|
||||
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
|
||||
if rar_encrypted:
|
||||
if cfg.pause_on_pwrar() == 1:
|
||||
logging.warning(
|
||||
T(
|
||||
'Paused job "%s" because of encrypted RAR file (if supplied, all passwords were tried)'
|
||||
),
|
||||
nzo.final_name,
|
||||
)
|
||||
nzo.pause()
|
||||
else:
|
||||
logging.warning(
|
||||
T(
|
||||
'Aborted job "%s" because of encrypted RAR file (if supplied, all passwords were tried)'
|
||||
),
|
||||
nzo.final_name,
|
||||
)
|
||||
nzo.fail_msg = T("Aborted, encryption detected")
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
if unwanted_file:
|
||||
# Don't repeat the warning after a user override of an unwanted extension pause
|
||||
if nzo.unwanted_ext == 0:
|
||||
logging.warning(
|
||||
T('In "%s" unwanted extension in RAR file. Unwanted file is %s '),
|
||||
nzo.final_name,
|
||||
unwanted_file,
|
||||
)
|
||||
logging.debug(T("Unwanted extension is in rar file %s"), filepath)
|
||||
if cfg.action_on_unwanted_extensions() == 1 and nzo.unwanted_ext == 0:
|
||||
logging.debug("Unwanted extension ... pausing")
|
||||
nzo.unwanted_ext = 1
|
||||
nzo.pause()
|
||||
if cfg.action_on_unwanted_extensions() == 2:
|
||||
logging.debug("Unwanted extension ... aborting")
|
||||
nzo.fail_msg = T("Aborted, unwanted extension detected")
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
# Add to direct unpack
|
||||
nzo.add_to_direct_unpacker(nzf)
|
||||
|
||||
elif par2file.is_parfile(filepath):
|
||||
# Parse par2 files, cloaked or not
|
||||
nzo.handle_par2(nzf, filepath)
|
||||
|
||||
filter_output, reason = nzo_filtered_by_rating(nzo)
|
||||
if filter_output == 1:
|
||||
logging.warning(
|
||||
T('Paused job "%s" because of rating (%s)'),
|
||||
nzo.final_name,
|
||||
reason,
|
||||
)
|
||||
nzo.pause()
|
||||
elif filter_output == 2:
|
||||
logging.warning(
|
||||
T('Aborted job "%s" because of rating (%s)'),
|
||||
nzo.final_name,
|
||||
reason,
|
||||
)
|
||||
nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
|
||||
else:
|
||||
sabnzbd.NzbQueue.remove(nzo.nzo_id, cleanup=False)
|
||||
sabnzbd.PostProcessor.process(nzo)
|
||||
@@ -214,7 +219,8 @@ class Assembler(Thread):
|
||||
if not nzf.md5:
|
||||
nzf.md5 = hashlib.md5()
|
||||
|
||||
with open(nzf.filepath, "ab") as fout:
|
||||
# We write large article-sized chunks, so we can safely skip the buffering of Python
|
||||
with open(nzf.filepath, "ab", buffering=0) as fout:
|
||||
for article in nzf.decodetable:
|
||||
# Break if deleted during writing
|
||||
if nzf.nzo.status is Status.DELETED:
|
||||
|
||||
@@ -401,7 +401,7 @@ class ConfigServer:
|
||||
|
||||
self.displayname = OptionStr(name, "displayname", add=False)
|
||||
self.host = OptionStr(name, "host", add=False)
|
||||
self.port = OptionNumber(name, "port", 119, 0, 2 ** 16 - 1, add=False)
|
||||
self.port = OptionNumber(name, "port", 119, 0, 2**16 - 1, add=False)
|
||||
self.timeout = OptionNumber(name, "timeout", 60, 20, 240, add=False)
|
||||
self.username = OptionStr(name, "username", add=False)
|
||||
self.password = OptionPassword(name, "password", add=False)
|
||||
|
||||
@@ -41,9 +41,9 @@ ANFO = namedtuple("ANFO", "article_sum cache_size cache_limit")
|
||||
DEF_FOLDER_MAX = 256 - 10
|
||||
DEF_FILE_MAX = 255 - 10 # max filename length on modern filesystems, minus some room for extra chars later on
|
||||
|
||||
GIGI = float(2 ** 30)
|
||||
MEBI = float(2 ** 20)
|
||||
KIBI = float(2 ** 10)
|
||||
GIGI = float(2**30)
|
||||
MEBI = float(2**20)
|
||||
KIBI = float(2**10)
|
||||
|
||||
BYTES_FILE_NAME_OLD = "totals9.sab"
|
||||
BYTES_FILE_NAME = "totals10.sab"
|
||||
@@ -120,7 +120,7 @@ VALID_NZB_FILES = (".nzb", ".gz", ".bz2")
|
||||
|
||||
CHEETAH_DIRECTIVES = {"directiveStartToken": "<!--#", "directiveEndToken": "#-->", "prioritizeSearchListOverSelf": True}
|
||||
|
||||
IGNORED_FOLDERS = ("@eaDir", ".appleDouble")
|
||||
IGNORED_FILES_AND_FOLDERS = ("@eaDir", ".appleDouble", ".DS_Store")
|
||||
IGNORED_MOVIE_FOLDERS = ("video_ts", "audio_ts", "bdmv")
|
||||
|
||||
EXCLUDED_GUESSIT_PROPERTIES = [
|
||||
|
||||
0
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
0
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
@@ -54,20 +54,20 @@ class DirectUnpacker(threading.Thread):
|
||||
|
||||
self.nzo: NzbObject = nzo
|
||||
self.active_instance: Optional[subprocess.Popen] = None
|
||||
self.killed = False
|
||||
self.killed: bool = False
|
||||
self.next_file_lock = threading.Condition(threading.RLock())
|
||||
|
||||
self.unpack_dir_info = None
|
||||
self.rarfile_nzf: Optional[NzbFile] = None
|
||||
self.cur_setname = None
|
||||
self.cur_volume = 0
|
||||
self.total_volumes = {}
|
||||
self.unpack_time = 0.0
|
||||
self.cur_setname: Optional[str] = None
|
||||
self.cur_volume: int = 0
|
||||
self.total_volumes: Dict[str, int] = {}
|
||||
self.unpack_time: float = 0.0
|
||||
|
||||
self.success_sets: Dict[str, Tuple[List[str], List[str]]] = {}
|
||||
self.next_sets = []
|
||||
self.next_sets: List[NzbFile] = []
|
||||
|
||||
self.duplicate_lines = 0
|
||||
self.duplicate_lines: int = 0
|
||||
|
||||
nzo.direct_unpacker = self
|
||||
|
||||
@@ -108,6 +108,7 @@ class DirectUnpacker(threading.Thread):
|
||||
|
||||
def set_volumes_for_nzo(self):
|
||||
"""Loop over all files to detect the names"""
|
||||
logging.debug("Parsing setname and volume information for %s" % self.nzo.final_name)
|
||||
none_counter = 0
|
||||
found_counter = 0
|
||||
for nzf in self.nzo.files + self.nzo.finished_files:
|
||||
@@ -292,9 +293,13 @@ class DirectUnpacker(threading.Thread):
|
||||
|
||||
# Possible that the instance was deleted while locked
|
||||
if not self.killed:
|
||||
# Sometimes the assembler is still working on the file, resulting in "Unexpected end of archive".
|
||||
# So we delay a tiny bit before we continue. This is not the cleanest solution, but it works.
|
||||
time.sleep(0.1)
|
||||
|
||||
# If unrar stopped or is killed somehow, writing will cause a crash
|
||||
try:
|
||||
# Give unrar some time to do it's thing
|
||||
# Give unrar some time to do its thing
|
||||
self.active_instance.stdin.write(b"C\n")
|
||||
start_time = time.time()
|
||||
time.sleep(0.1)
|
||||
@@ -306,11 +311,7 @@ class DirectUnpacker(threading.Thread):
|
||||
if not last_volume_linebuf or last_volume_linebuf != linebuf:
|
||||
# Next volume
|
||||
self.cur_volume += 1
|
||||
perc = (self.cur_volume / self.total_volumes[self.cur_setname]) * 100
|
||||
self.nzo.set_action_line(
|
||||
T("Direct Unpack"),
|
||||
"%s %s" % (self.get_formatted_stats(), add_time_left(perc, time_used=self.unpack_time)),
|
||||
)
|
||||
self.nzo.set_action_line(T("Direct Unpack"), self.get_formatted_stats(include_time_left=True))
|
||||
logging.info("DirectUnpacked volume %s for %s", self.cur_volume, self.cur_setname)
|
||||
|
||||
# If lines did not change and we don't have the next volume, this download is missing files!
|
||||
@@ -351,10 +352,10 @@ class DirectUnpacker(threading.Thread):
|
||||
return False
|
||||
|
||||
def wait_for_next_volume(self):
|
||||
"""Wait for the correct volume to appear
|
||||
But stop if it was killed or the NZB is done
|
||||
"""Wait for the correct volume to appear but stop if it was killed
|
||||
or the NZB is in post-processing and no new files will be downloaded.
|
||||
"""
|
||||
while not self.have_next_volume() and not self.killed and self.nzo.files:
|
||||
while not self.have_next_volume() and not self.killed and not self.nzo.pp_active:
|
||||
with self.next_file_lock:
|
||||
self.next_file_lock.wait()
|
||||
|
||||
@@ -389,7 +390,7 @@ class DirectUnpacker(threading.Thread):
|
||||
# The first NZF
|
||||
self.rarfile_nzf = self.have_next_volume()
|
||||
|
||||
# Ignore if maybe this set is not there any more
|
||||
# Ignore if maybe this set is not there anymore
|
||||
# This can happen due to race/timing issues when creating the sets
|
||||
if not self.rarfile_nzf:
|
||||
return
|
||||
@@ -505,12 +506,17 @@ class DirectUnpacker(threading.Thread):
|
||||
# Reset settings
|
||||
self.reset_active()
|
||||
|
||||
def get_formatted_stats(self):
|
||||
def get_formatted_stats(self, include_time_left: bool = False):
|
||||
"""Get percentage or number of rar's done"""
|
||||
if self.cur_setname and self.cur_setname in self.total_volumes:
|
||||
# This won't work on obfuscated posts
|
||||
if self.total_volumes[self.cur_setname] >= self.cur_volume and self.cur_volume:
|
||||
return "%02d/%02d" % (self.cur_volume, self.total_volumes[self.cur_setname])
|
||||
formatted_stats = "%02d/%02d" % (self.cur_volume, self.total_volumes[self.cur_setname])
|
||||
if include_time_left:
|
||||
formatted_stats += add_time_left(
|
||||
(self.cur_volume / self.total_volumes[self.cur_setname]) * 100, time_used=self.unpack_time
|
||||
)
|
||||
return formatted_stats
|
||||
return self.cur_volume
|
||||
|
||||
|
||||
|
||||
@@ -952,7 +952,7 @@ class Downloader(Thread):
|
||||
# Make sure it is not in the readable sockets
|
||||
self.remove_socket(nw)
|
||||
|
||||
if nw.article:
|
||||
if nw.article and not nw.article.nzf.nzo.removed_from_queue:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if count_article_try:
|
||||
nw.article.tries += 1
|
||||
|
||||
@@ -41,7 +41,7 @@ except ImportError:
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.constants import FUTURE_Q_FOLDER, JOB_ADMIN, GIGI, DEF_FILE_MAX
|
||||
from sabnzbd.constants import FUTURE_Q_FOLDER, JOB_ADMIN, GIGI, DEF_FILE_MAX, IGNORED_FILES_AND_FOLDERS
|
||||
from sabnzbd.encoding import correct_unknown_encoding, utob, ubtou
|
||||
from sabnzbd.utils import rarfile
|
||||
|
||||
@@ -600,16 +600,17 @@ def get_admin_path(name: str, future: bool):
|
||||
return os.path.join(os.path.join(sabnzbd.cfg.download_dir.get_path(), name), JOB_ADMIN)
|
||||
|
||||
|
||||
def set_chmod(path: str, permissions: int):
|
||||
def set_chmod(path: str, permissions: int, allow_failures: bool = False):
|
||||
"""Set 'permissions' on 'path'"""
|
||||
try:
|
||||
logging.debug("Applying permissions %s (octal) to %s", oct(permissions), path)
|
||||
os.chmod(path, permissions)
|
||||
except:
|
||||
lpath = path.lower()
|
||||
if ".appledouble" not in lpath and ".ds_store" not in lpath:
|
||||
if not allow_failures and not sabnzbd.misc.match_str(path, IGNORED_FILES_AND_FOLDERS):
|
||||
logging.error(T("Cannot change permissions of %s"), clip_path(path))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
else:
|
||||
logging.debug("Could not change permissions of %s", path)
|
||||
|
||||
|
||||
def set_permissions(path: str, recursive: bool = True):
|
||||
@@ -642,12 +643,15 @@ def removexbits(path: str, custom_permissions: int = None):
|
||||
if os.path.isfile(path):
|
||||
# Use custom permissions as base
|
||||
current_permissions = custom_permissions
|
||||
allow_failures = False
|
||||
if not custom_permissions:
|
||||
current_permissions = os.stat(path).st_mode
|
||||
# Allow failures if no custom permissions are set, changing permissions might not be supported
|
||||
allow_failures = True
|
||||
# Check if the file has any x-bits, no need to remove them otherwise
|
||||
if custom_permissions or current_permissions & UNWANTED_FILE_PERMISSIONS:
|
||||
# Mask out the X-bits
|
||||
set_chmod(path, current_permissions & ~UNWANTED_FILE_PERMISSIONS)
|
||||
set_chmod(path, current_permissions & ~UNWANTED_FILE_PERMISSIONS, allow_failures)
|
||||
|
||||
|
||||
def userxbit(path: str) -> bool:
|
||||
@@ -762,9 +766,8 @@ def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk(input_dir):
|
||||
for file in files:
|
||||
if ".AppleDouble" not in root and ".DS_Store" not in root:
|
||||
p = os.path.join(root, file)
|
||||
filelist.append(p)
|
||||
if not sabnzbd.misc.match_str(root, IGNORED_FILES_AND_FOLDERS):
|
||||
filelist.append(os.path.join(root, file))
|
||||
if not recursive:
|
||||
break
|
||||
return filelist
|
||||
@@ -890,7 +893,7 @@ def renamer(old: str, new: str, create_local_directories: bool = False) -> str:
|
||||
time.sleep(2)
|
||||
else:
|
||||
raise
|
||||
raise OSError("Failed to rename (Winerr %s)" % hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2 ** 32))
|
||||
raise OSError("Failed to rename (Winerr %s)" % hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2**32))
|
||||
else:
|
||||
shutil.move(old, new)
|
||||
return new
|
||||
|
||||
@@ -30,6 +30,7 @@ import hashlib
|
||||
import socket
|
||||
import ssl
|
||||
import functools
|
||||
import copy
|
||||
from random import randint
|
||||
from xml.sax.saxutils import escape
|
||||
from Cheetah.Template import Template
|
||||
@@ -367,8 +368,11 @@ def check_apikey(kwargs):
|
||||
|
||||
def template_filtered_response(file: str, search_list: Dict[str, Any]):
|
||||
"""Wrapper for Cheetah response"""
|
||||
recursive_html_escape(search_list, exclude_items=("webdir",))
|
||||
return Template(file=file, searchList=[search_list], compilerSettings=CHEETAH_DIRECTIVES).respond()
|
||||
# We need a copy, because otherwise source-dicts might be modified
|
||||
search_list_copy = copy.deepcopy(search_list)
|
||||
# 'filters' is excluded because the RSS-filters are listed twice
|
||||
recursive_html_escape(search_list_copy, exclude_items=("webdir", "filters"))
|
||||
return Template(file=file, searchList=[search_list_copy], compilerSettings=CHEETAH_DIRECTIVES).respond()
|
||||
|
||||
|
||||
def log_warning_and_ip(txt):
|
||||
@@ -1445,7 +1449,7 @@ class ConfigRss:
|
||||
|
||||
if filt:
|
||||
feed_cfg.filters.update(
|
||||
int(kwargs.get("index", 0)), (cat, pp, script, kwargs.get("filter_type"), filt, prio, enabled)
|
||||
int(kwargs.get("index", 0)), [cat, pp, script, kwargs.get("filter_type"), filt, prio, enabled]
|
||||
)
|
||||
|
||||
# Move filter if requested
|
||||
|
||||
@@ -733,7 +733,7 @@ def loadavg():
|
||||
return p
|
||||
|
||||
|
||||
def format_time_string(seconds):
|
||||
def format_time_string(seconds: float) -> str:
|
||||
"""Return a formatted and translated time string"""
|
||||
|
||||
def unit(single, n):
|
||||
@@ -1080,11 +1080,13 @@ def recursive_html_escape(input_dict_or_list: Union[Dict[str, Any], List], exclu
|
||||
iterator = enumerate(input_dict_or_list)
|
||||
|
||||
for key, value in iterator:
|
||||
# We ignore any other than str and those on the exclude_items-list
|
||||
if isinstance(value, str) and key not in exclude_items:
|
||||
input_dict_or_list[key] = html.escape(value, quote=True)
|
||||
if isinstance(value, (dict, list)):
|
||||
recursive_html_escape(value)
|
||||
# Ignore any keys that are not safe to convert
|
||||
if key not in exclude_items:
|
||||
# We ignore any other than str
|
||||
if isinstance(value, str):
|
||||
input_dict_or_list[key] = html.escape(value, quote=True)
|
||||
if isinstance(value, (dict, list)):
|
||||
recursive_html_escape(value, exclude_items=exclude_items)
|
||||
else:
|
||||
raise ValueError("Expected dict or str, got %s" % type(input_dict_or_list))
|
||||
|
||||
|
||||
@@ -970,6 +970,8 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: Lis
|
||||
logging.info("Starting extract on 7zip set/file: %s ", seven_set)
|
||||
nzo.set_action_line(T("Unpacking"), setname_from_path(seven_set))
|
||||
|
||||
# Sort, so that x.001 is the first one
|
||||
seven_sets[seven_set].sort()
|
||||
seven_path = seven_sets[seven_set][0]
|
||||
|
||||
if workdir_complete and seven_path.startswith(nzo.download_path):
|
||||
@@ -978,7 +980,9 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: Lis
|
||||
extraction_path = os.path.split(seven_path)[0]
|
||||
|
||||
res, new_files_set = seven_extract(nzo, seven_path, seven_set, extraction_path, one_folder)
|
||||
if not res and nzo.delete:
|
||||
if res:
|
||||
unseven_failed = True
|
||||
elif nzo.delete:
|
||||
for seven in seven_sets[seven_set]:
|
||||
try:
|
||||
remove_file(seven)
|
||||
@@ -1284,7 +1288,7 @@ def par2cmdline_verify(
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
if not line.startswith(("Repairing:", "Scanning:", "Loading:")):
|
||||
if not line.startswith(("Repairing:", "Scanning:", "Loading:", "Solving:", "Constructing:")):
|
||||
lines.append(line)
|
||||
|
||||
if line.startswith(("Invalid option specified", "Invalid thread option", "Cannot specify recovery file count")):
|
||||
@@ -2046,12 +2050,14 @@ def unrar_check(rar: str) -> Tuple[int, bool]:
|
||||
|
||||
def sevenzip_check(sevenzip: str) -> str:
|
||||
"""Return version of 7zip, currently as a string"""
|
||||
try:
|
||||
seven_command_output = run_command([sevenzip])
|
||||
# Example: 7-Zip (z) 21.06 (x64) : Copyright (c) 1999-2021 Igor Pavlov : 2021-11-24
|
||||
return re.search(r"(\d+\.\d+).*Copyright", seven_command_output).group(1)
|
||||
except:
|
||||
return ""
|
||||
if sevenzip:
|
||||
try:
|
||||
seven_command_output = run_command([sevenzip])
|
||||
# Example: 7-Zip (z) 21.06 (x64) : Copyright (c) 1999-2021 Igor Pavlov : 2021-11-24
|
||||
return re.search(r"(\d+\.\d+).*Copyright", seven_command_output).group(1)
|
||||
except:
|
||||
pass
|
||||
return ""
|
||||
|
||||
|
||||
def par2_mt_check(par2_path: str) -> bool:
|
||||
|
||||
@@ -34,7 +34,15 @@ from typing import Optional, Dict, Any, Union, List, Tuple
|
||||
import sabnzbd
|
||||
from sabnzbd import nzbstuff
|
||||
from sabnzbd.encoding import utob, correct_unknown_encoding
|
||||
from sabnzbd.filesystem import get_filename, is_valid_script, get_ext, setname_from_path, clip_path, remove_file
|
||||
from sabnzbd.filesystem import (
|
||||
get_filename,
|
||||
is_valid_script,
|
||||
get_ext,
|
||||
setname_from_path,
|
||||
clip_path,
|
||||
remove_file,
|
||||
remove_data,
|
||||
)
|
||||
from sabnzbd.misc import name_to_cat
|
||||
from sabnzbd.constants import DEFAULT_PRIORITY, VALID_ARCHIVES
|
||||
from sabnzbd.utils import rarfile
|
||||
@@ -393,7 +401,8 @@ def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
# Parse the files
|
||||
if element.tag.lower() == "file":
|
||||
# Get subject and date
|
||||
file_name = ""
|
||||
# Don't fail, if subject is missing
|
||||
file_name = "unknown"
|
||||
if element.attrib.get("subject"):
|
||||
file_name = element.attrib.get("subject")
|
||||
|
||||
@@ -435,7 +444,7 @@ def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
nzo.increase_bad_articles_counter("duplicate_articles")
|
||||
else:
|
||||
logging.info("Skipping duplicate article (%s)", article_id)
|
||||
elif segment_size <= 0 or segment_size >= 2 ** 23:
|
||||
elif segment_size <= 0 or segment_size >= 2**23:
|
||||
# Perform sanity check (not negative, 0 or larger than 8MB) on article size
|
||||
# We use this value later to allocate memory in cache and sabyenc
|
||||
logging.info("Skipping article %s due to strange size (%s)", article_id, segment_size)
|
||||
@@ -469,7 +478,7 @@ def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
else:
|
||||
logging.info("Error importing %s, skipping", file_name)
|
||||
if nzf.nzf_id:
|
||||
sabnzbd.remove_data(nzf.nzf_id, nzo.admin_path)
|
||||
remove_data(nzf.nzf_id, nzo.admin_path)
|
||||
skipped_files += 1
|
||||
element.clear()
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ from sabnzbd.constants import (
|
||||
STOP_PRIORITY,
|
||||
VERIFIED_FILE,
|
||||
Status,
|
||||
IGNORED_FOLDERS,
|
||||
IGNORED_FILES_AND_FOLDERS,
|
||||
QNFO,
|
||||
DIRECT_WRITE_TRIGGER,
|
||||
)
|
||||
@@ -154,7 +154,7 @@ class NzbQueue:
|
||||
# Repair unregistered folders
|
||||
for folder in globber_full(cfg.download_dir.get_path()):
|
||||
name = os.path.basename(folder)
|
||||
if os.path.isdir(folder) and name not in registered and name not in IGNORED_FOLDERS:
|
||||
if os.path.isdir(folder) and name not in registered and name not in IGNORED_FILES_AND_FOLDERS:
|
||||
if action:
|
||||
logging.info("Repairing job %s", folder)
|
||||
self.repair_job(folder)
|
||||
@@ -239,7 +239,7 @@ class NzbQueue:
|
||||
nzo_ids = []
|
||||
# Aggregate nzo_ids and save each nzo
|
||||
for nzo in self.__nzo_list[:]:
|
||||
if not nzo.is_gone():
|
||||
if not nzo.removed_from_queue:
|
||||
nzo_ids.append(os.path.join(nzo.work_name, nzo.nzo_id))
|
||||
if save_nzo is None or nzo is save_nzo:
|
||||
if not nzo.futuretype:
|
||||
@@ -338,7 +338,7 @@ class NzbQueue:
|
||||
|
||||
# Reset try_lists, markers and evaluate the scheduling settings
|
||||
nzo.reset_try_list()
|
||||
nzo.deleted = False
|
||||
nzo.removed_from_queue = False
|
||||
priority = nzo.priority
|
||||
if sabnzbd.Scheduler.analyse(False, priority):
|
||||
nzo.status = Status.PAUSED
|
||||
@@ -395,12 +395,10 @@ class NzbQueue:
|
||||
logging.info("[%s] Removing job %s", caller_name(), nzo.final_name)
|
||||
|
||||
# Set statuses
|
||||
nzo.deleted = True
|
||||
if cleanup and not nzo.is_gone():
|
||||
nzo.status = Status.DELETED
|
||||
nzo.removed_from_queue = True
|
||||
self.__nzo_list.remove(nzo)
|
||||
|
||||
if cleanup:
|
||||
nzo.status = Status.DELETED
|
||||
nzo.purge_data(delete_all_data=delete_all_data)
|
||||
self.save(False)
|
||||
return nzo_id
|
||||
@@ -737,47 +735,49 @@ class NzbQueue:
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
|
||||
if nzf.deleted:
|
||||
logging.debug("Discarding article %s, no longer in queue", article.article)
|
||||
if nzo.pp_or_finished or nzf.deleted:
|
||||
logging.debug("Discarding article for file %s: deleted or already post-processing", nzf.filename)
|
||||
# If this file is needed later (par2 file added back to queue), it would be damaged because
|
||||
# we discard this article. So we reset it to be picked up again if needed.
|
||||
# But not reset all articles, as it could cause problems for articles still attached to a server.
|
||||
article.reset_try_list()
|
||||
nzf.reset_try_list()
|
||||
return
|
||||
|
||||
articles_left, file_done, post_done = nzo.remove_article(article, success)
|
||||
|
||||
if nzo.is_gone():
|
||||
logging.debug("Discarding article for file %s, no longer in queue", nzf.filename)
|
||||
else:
|
||||
# Write data if file is done or at trigger time
|
||||
if file_done or (articles_left and (articles_left % DIRECT_WRITE_TRIGGER) == 0):
|
||||
if not nzo.precheck:
|
||||
# Only start decoding if we have a filename and type
|
||||
# The type is only set if sabyenc could decode the article
|
||||
if nzf.filename and nzf.type:
|
||||
sabnzbd.Assembler.process(nzo, nzf, file_done)
|
||||
elif nzf.filename.lower().endswith(".par2"):
|
||||
# Broken par2 file, try to get another one
|
||||
nzo.promote_par2(nzf)
|
||||
# Write data if file is done or at trigger time
|
||||
if file_done or (articles_left and (articles_left % DIRECT_WRITE_TRIGGER) == 0):
|
||||
if not nzo.precheck:
|
||||
# Only start decoding if we have a filename and type
|
||||
# The type is only set if sabyenc could decode the article
|
||||
if nzf.filename and nzf.type:
|
||||
sabnzbd.Assembler.process(nzo, nzf, file_done)
|
||||
elif nzf.filename.lower().endswith(".par2"):
|
||||
# Broken par2 file, try to get another one
|
||||
nzo.promote_par2(nzf)
|
||||
|
||||
# Save bookkeeping in case of crash
|
||||
if file_done and (nzo.next_save is None or time.time() > nzo.next_save):
|
||||
nzo.save_to_disk()
|
||||
sabnzbd.BPSMeter.save()
|
||||
if nzo.save_timeout is None:
|
||||
nzo.next_save = None
|
||||
else:
|
||||
nzo.next_save = time.time() + nzo.save_timeout
|
||||
# Save bookkeeping in case of crash
|
||||
if file_done and (nzo.next_save is None or time.time() > nzo.next_save):
|
||||
nzo.save_to_disk()
|
||||
sabnzbd.BPSMeter.save()
|
||||
if nzo.save_timeout is None:
|
||||
nzo.next_save = None
|
||||
else:
|
||||
nzo.next_save = time.time() + nzo.save_timeout
|
||||
|
||||
# Remove post from Queue
|
||||
if post_done:
|
||||
nzo.set_download_report()
|
||||
self.end_job(nzo)
|
||||
# Remove post from Queue
|
||||
if post_done:
|
||||
nzo.set_download_report()
|
||||
self.end_job(nzo)
|
||||
|
||||
@NzbQueueLocker
|
||||
def end_job(self, nzo: NzbObject):
|
||||
"""Send NZO to the post-processing queue"""
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
if not nzo.removed_from_queue:
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
nzo.deleted = True
|
||||
nzo.removed_from_queue = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
# Check result
|
||||
|
||||
@@ -546,7 +546,7 @@ NzbObjectSaver = (
|
||||
"saved_articles",
|
||||
"nzo_id",
|
||||
"futuretype",
|
||||
"deleted",
|
||||
"removed_from_queue",
|
||||
"parsed",
|
||||
"action_line",
|
||||
"unpack_info",
|
||||
@@ -687,7 +687,7 @@ class NzbObject(TryList):
|
||||
self.nzo_id: Optional[str] = None
|
||||
|
||||
self.futuretype = futuretype
|
||||
self.deleted = False
|
||||
self.removed_from_queue = False
|
||||
self.to_be_removed = False
|
||||
self.parsed = False
|
||||
self.duplicate = False
|
||||
@@ -719,7 +719,7 @@ class NzbObject(TryList):
|
||||
self.encrypted = 0
|
||||
self.url_wait: Optional[float] = None
|
||||
self.url_tries = 0
|
||||
self.pp_active = False # Signals active post-processing (not saved)
|
||||
self.pp_active = False
|
||||
self.md5sum: Optional[str] = None
|
||||
|
||||
# Path is empty in case of a future NZB
|
||||
@@ -1404,10 +1404,15 @@ class NzbObject(TryList):
|
||||
self.final_name = sanitize_foldername(name)
|
||||
self.save_to_disk()
|
||||
|
||||
@property
|
||||
def pp_or_finished(self):
|
||||
"""We don't want any more articles if we are post-processing or in the final state"""
|
||||
return self.pp_active or self.status in (Status.COMPLETED, Status.DELETED, Status.FAILED)
|
||||
|
||||
def pause(self):
|
||||
self.status = Status.PAUSED
|
||||
# Prevent loss of paused state when terminated
|
||||
if self.nzo_id and not self.is_gone():
|
||||
if self.nzo_id and not self.removed_from_queue:
|
||||
self.save_to_disk()
|
||||
|
||||
def resume(self):
|
||||
@@ -1608,7 +1613,7 @@ class NzbObject(TryList):
|
||||
if sabnzbd.Downloader.highest_server(server):
|
||||
nzf.finish_import()
|
||||
# Still not finished? Something went wrong...
|
||||
if not nzf.import_finished and not self.is_gone():
|
||||
if not nzf.import_finished and not self.removed_from_queue:
|
||||
logging.error(T("Error importing %s"), nzf)
|
||||
nzf_remove_list.append(nzf)
|
||||
nzf.nzo.status = Status.PAUSED
|
||||
@@ -1741,7 +1746,7 @@ class NzbObject(TryList):
|
||||
@synchronized(NZO_LOCK)
|
||||
def verify_all_filenames_and_resort(self):
|
||||
"""Verify all filenames based on par2 info and then re-sort files.
|
||||
Locked so all files are verified at once without interuptions.
|
||||
Locked so all files are verified at once without interruptions.
|
||||
"""
|
||||
logging.info("Checking all filenames for %s", self.final_name)
|
||||
for nzf_verify in self.files:
|
||||
@@ -1749,6 +1754,10 @@ class NzbObject(TryList):
|
||||
logging.info("Re-sorting %s after getting filename information", self.final_name)
|
||||
self.sort_nzfs()
|
||||
|
||||
# Also trigger it again for Direct Unpack, if it's active
|
||||
if self.direct_unpacker:
|
||||
self.direct_unpacker.set_volumes_for_nzo()
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def renamed_file(self, name_set, old_name=None):
|
||||
"""Save renames at various stages (Download/PP)
|
||||
@@ -1906,7 +1915,7 @@ class NzbObject(TryList):
|
||||
def save_to_disk(self):
|
||||
"""Save job's admin to disk"""
|
||||
self.save_attribs()
|
||||
if self.nzo_id and not self.is_gone():
|
||||
if self.nzo_id and not self.removed_from_queue:
|
||||
sabnzbd.filesystem.save_data(self, self.nzo_id, self.admin_path)
|
||||
|
||||
def save_attribs(self):
|
||||
@@ -1922,7 +1931,7 @@ class NzbObject(TryList):
|
||||
attribs = sabnzbd.filesystem.load_data(ATTRIB_FILE, self.admin_path, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
|
||||
# If attributes file somehow does not exists
|
||||
# If attributes file somehow does not exist
|
||||
if not attribs:
|
||||
return None, None, None
|
||||
|
||||
@@ -1936,7 +1945,7 @@ class NzbObject(TryList):
|
||||
return attribs["cat"], attribs["pp"], attribs["script"]
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def build_pos_nzf_table(self, nzf_ids):
|
||||
def build_pos_nzf_table(self, nzf_ids: List[str]) -> Dict[int, NzbFile]:
|
||||
pos_nzf_table = {}
|
||||
for nzf_id in nzf_ids:
|
||||
if nzf_id in self.files_table:
|
||||
@@ -1947,7 +1956,7 @@ class NzbObject(TryList):
|
||||
return pos_nzf_table
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def cleanup_nzf_ids(self, nzf_ids):
|
||||
def cleanup_nzf_ids(self, nzf_ids: List[str]):
|
||||
for nzf_id in nzf_ids[:]:
|
||||
if nzf_id in self.files_table:
|
||||
if self.files_table[nzf_id] not in self.files:
|
||||
@@ -2003,10 +2012,6 @@ class NzbObject(TryList):
|
||||
|
||||
return res, series
|
||||
|
||||
def is_gone(self):
|
||||
"""Is this job still going somehow?"""
|
||||
return self.status in (Status.COMPLETED, Status.DELETED, Status.FAILED)
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
|
||||
@@ -228,7 +228,7 @@ class PostProcessor(Thread):
|
||||
% complete_dir
|
||||
)
|
||||
else:
|
||||
logging.info("Completed Download Folder %s is not on FAT", complete_dir)
|
||||
logging.debug("Completed Download Folder %s is not on FAT", complete_dir)
|
||||
|
||||
# Start looping
|
||||
check_eoq = False
|
||||
|
||||
@@ -681,7 +681,7 @@ def _get_link(entry):
|
||||
|
||||
# GUID usually has URL to result on page
|
||||
infourl = None
|
||||
if entry.get("id") and entry.id != link and entry.id.startswith("http"):
|
||||
if entry.get("id") and entry.id != link and entry.id.lower().startswith("http"):
|
||||
infourl = entry.id
|
||||
|
||||
if size == 0:
|
||||
@@ -716,7 +716,7 @@ def _get_link(entry):
|
||||
except (KeyError, IndexError):
|
||||
season = episode = 0
|
||||
|
||||
if link and "http" in link.lower():
|
||||
if link and link.lower().startswith("http"):
|
||||
try:
|
||||
category = entry.cattext
|
||||
except AttributeError:
|
||||
|
||||
@@ -63,8 +63,6 @@ class URLGrabber(Thread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.queue: queue.Queue[Tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
|
||||
for url_nzo_tup in sabnzbd.NzbQueue.get_urls():
|
||||
self.queue.put(url_nzo_tup)
|
||||
self.shutdown = False
|
||||
|
||||
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None):
|
||||
@@ -87,7 +85,11 @@ class URLGrabber(Thread):
|
||||
self.queue.put((None, None))
|
||||
|
||||
def run(self):
|
||||
self.shutdown = False
|
||||
# Read all URL's to grab from the queue
|
||||
for url_nzo_tup in sabnzbd.NzbQueue.get_urls():
|
||||
self.queue.put(url_nzo_tup)
|
||||
|
||||
# Start fetching
|
||||
while not self.shutdown:
|
||||
# Set NzbObject object to None so reference from this thread
|
||||
# does not keep the object alive in the future (see #1628)
|
||||
@@ -116,7 +118,7 @@ class URLGrabber(Thread):
|
||||
if future_nzo:
|
||||
# If nzo entry deleted, give up
|
||||
try:
|
||||
deleted = future_nzo.deleted
|
||||
deleted = future_nzo.removed_from_queue
|
||||
except AttributeError:
|
||||
deleted = True
|
||||
if deleted:
|
||||
@@ -403,7 +405,7 @@ def add_url(
|
||||
password: Optional[str] = None,
|
||||
):
|
||||
"""Add NZB based on a URL, attributes optional"""
|
||||
if "http" not in url:
|
||||
if not url.lower().startswith("http"):
|
||||
return
|
||||
if not pp or pp == "-1":
|
||||
pp = None
|
||||
|
||||
@@ -34,7 +34,7 @@ def measure_speed_from_url(url: str) -> float:
|
||||
logging.debug("Downloaded bytes: %d", downloaded_bytes)
|
||||
logging.debug("Duration in seconds: %f", duration)
|
||||
|
||||
return downloaded_bytes / 1024 ** 2 / duration
|
||||
return downloaded_bytes / 1024**2 / duration
|
||||
|
||||
|
||||
def bytes_to_bits(megabytes_per_second: float) -> float:
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
|
||||
# You MUST use double quotes (so " and not ')
|
||||
|
||||
__version__ = "develop"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "3.5.3"
|
||||
__baseline__ = "cad8a9a5d3b2bc1eadfcda4cff84ecad1f352d7e"
|
||||
|
||||
@@ -7,6 +7,7 @@ pytest-httpbin
|
||||
pytest-httpserver
|
||||
flaky
|
||||
xmltodict
|
||||
tavern
|
||||
tavern<1.16.2; python_version == '3.6'
|
||||
tavern; python_version > '3.6'
|
||||
tavalidate
|
||||
lxml>=4.5.0 # needed by tavalidate
|
||||
|
||||
@@ -264,7 +264,7 @@ class TestOtherApi(ApiTestFunctions):
|
||||
if round(limit_pct / 100 * linespeed_value) > 20:
|
||||
speed_abs = str(round(limit_pct / 100 * linespeed_value)) + "M"
|
||||
else:
|
||||
speed_abs = str(round(limit_pct * 2 ** 10 * linespeed_value / 100)) + "K"
|
||||
speed_abs = str(round(limit_pct * 2**10 * linespeed_value / 100)) + "K"
|
||||
else:
|
||||
speed_abs = str(round(limit_pct / 100 * from_units(linespeed)))
|
||||
assert self._get_api_json("config", extra_args={"name": "speedlimit", "value": speed_abs})["status"] is True
|
||||
@@ -615,7 +615,7 @@ class TestQueueApi(ApiTestFunctions):
|
||||
def size_in_bytes(size):
|
||||
# Helper function for list.sort() to deal with B/KB/MB in size values
|
||||
if size.endswith(" MB"):
|
||||
return float(size.strip(" MB")) * 1024 ** 2
|
||||
return float(size.strip(" MB")) * 1024**2
|
||||
if size.endswith(" KB"):
|
||||
return float(size.strip(" KB")) * 1024
|
||||
if size.endswith(" B"):
|
||||
|
||||
@@ -92,10 +92,10 @@ class TestDownloadFlow(SABnzbdBaseTest):
|
||||
'//div[@id="history-tab"]//tr[td/div/span[contains(text(), "%s")]]/td[contains(@class, "status")]'
|
||||
% test_job_name
|
||||
).text
|
||||
# Always sleep to give it some time
|
||||
time.sleep(1)
|
||||
if status_text == "Completed":
|
||||
break
|
||||
else:
|
||||
time.sleep(1)
|
||||
except WebDriverException:
|
||||
time.sleep(1)
|
||||
else:
|
||||
|
||||
@@ -124,12 +124,12 @@ class TestMisc:
|
||||
assert "10.0 M" == misc.to_units(1024 * 1024 * 10)
|
||||
assert "100.0 M" == misc.to_units(1024 * 1024 * 100)
|
||||
assert "9.8 G" == misc.to_units(1024 * 1024 * 10000)
|
||||
assert "1024.0 P" == misc.to_units(1024 ** 6)
|
||||
assert "1024.0 P" == misc.to_units(1024**6)
|
||||
|
||||
def test_unit_back_and_forth(self):
|
||||
assert 100 == misc.from_units(misc.to_units(100))
|
||||
assert 1024 == misc.from_units(misc.to_units(1024))
|
||||
assert 1024 ** 3 == misc.from_units(misc.to_units(1024 ** 3))
|
||||
assert 1024**3 == misc.from_units(misc.to_units(1024**3))
|
||||
|
||||
def test_caller_name(self):
|
||||
@set_config({"log_level": 0})
|
||||
|
||||
@@ -208,21 +208,21 @@ class FakeHistoryDB(db.HistoryDB):
|
||||
nzo.status = choice([Status.COMPLETED, choice(self.status_options)])
|
||||
nzo.fail_msg = "¡Fracaso absoluto!" if nzo.status == Status.FAILED else ""
|
||||
nzo.nzo_id = "SABnzbd_nzo_%s" % ("".join(choice(ascii_lowercase + digits) for i in range(8)))
|
||||
nzo.bytes_downloaded = randint(1024, 1024 ** 4)
|
||||
nzo.bytes_downloaded = randint(1024, 1024**4)
|
||||
nzo.md5sum = "".join(choice("abcdef" + digits) for i in range(32))
|
||||
nzo.repair, nzo.unpack, nzo.delete = pp_to_opts(choice(list(db._PP_LOOKUP.keys()))) # for "pp"
|
||||
nzo.nzo_info = {"download_time": randint(1, 10 ** 4)}
|
||||
nzo.nzo_info = {"download_time": randint(1, 10**4)}
|
||||
nzo.unpack_info = {"unpack_info": "placeholder unpack_info line\r\n" * 3}
|
||||
nzo.futuretype = False # for "report", only True when fetching an URL
|
||||
nzo.download_path = os.path.join(os.path.dirname(db.HistoryDB.db_path), "placeholder_downpath")
|
||||
|
||||
# Mock time when calling add_history_db() to randomize completion times
|
||||
almost_time = mock.Mock(return_value=time.time() - randint(0, 10 ** 8))
|
||||
almost_time = mock.Mock(return_value=time.time() - randint(0, 10**8))
|
||||
with mock.patch("time.time", almost_time):
|
||||
self.add_history_db(
|
||||
nzo,
|
||||
storage=os.path.join(os.path.dirname(db.HistoryDB.db_path), "placeholder_workdir"),
|
||||
postproc_time=randint(1, 10 ** 3),
|
||||
postproc_time=randint(1, 10**3),
|
||||
script_output="",
|
||||
script_line="",
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user