mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-02 04:28:02 -05:00
Compare commits
423 Commits
4.6.0Alpha
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11ba9ae12a | ||
|
|
a61a5539a7 | ||
|
|
77f7490aea | ||
|
|
a7198b6a81 | ||
|
|
977dbc805f | ||
|
|
abcca19820 | ||
|
|
52a7b5dcff | ||
|
|
9518714885 | ||
|
|
4a89fcf8ea | ||
|
|
d11e757c6e | ||
|
|
4f9ed7803f | ||
|
|
95bc069af9 | ||
|
|
d1d9bab65a | ||
|
|
e2560bf214 | ||
|
|
825322baa4 | ||
|
|
7a5ca5b226 | ||
|
|
cb4f022d17 | ||
|
|
913e4ea02e | ||
|
|
aa0d44a60b | ||
|
|
8b5f29df8f | ||
|
|
82954f5930 | ||
|
|
67f1858315 | ||
|
|
55bb81ceef | ||
|
|
6864810ace | ||
|
|
bae55636a8 | ||
|
|
f4778abd1f | ||
|
|
2cb716ce26 | ||
|
|
3246e9c6d4 | ||
|
|
ae6d5f54bd | ||
|
|
bd95c29866 | ||
|
|
074eed16e1 | ||
|
|
3b72a005fd | ||
|
|
afb9a4758f | ||
|
|
35c180216b | ||
|
|
f0c6fe5786 | ||
|
|
bd8c245b83 | ||
|
|
effc7265d4 | ||
|
|
bd9a8e5c33 | ||
|
|
c55d662e1f | ||
|
|
dbfabc1d80 | ||
|
|
d897936da5 | ||
|
|
f81a8c97c4 | ||
|
|
e93e01dd59 | ||
|
|
79b504ff93 | ||
|
|
52dafd4ab8 | ||
|
|
0cc538ac5a | ||
|
|
4b99d04454 | ||
|
|
708fad33f3 | ||
|
|
c6dc25c9c2 | ||
|
|
07be38cd01 | ||
|
|
0121e0ae16 | ||
|
|
f24b3ced28 | ||
|
|
157dfc928d | ||
|
|
d10639542d | ||
|
|
c0f0b7eb31 | ||
|
|
d6d70325db | ||
|
|
46954165d2 | ||
|
|
58e7d520bf | ||
|
|
a4f8040324 | ||
|
|
8d5cc9a3e6 | ||
|
|
4592ce4d55 | ||
|
|
b62b38b5af | ||
|
|
14b1d4630c | ||
|
|
8a42abd1e7 | ||
|
|
41e5dfdf18 | ||
|
|
41de13388c | ||
|
|
1f16f13169 | ||
|
|
ef23d40972 | ||
|
|
b07b43496c | ||
|
|
2ba04f1a6a | ||
|
|
e7e06dea41 | ||
|
|
ce32504a81 | ||
|
|
7cd6c94482 | ||
|
|
fcb3d01194 | ||
|
|
af0b53990c | ||
|
|
e3861954ba | ||
|
|
006dd8dc77 | ||
|
|
dbff203c62 | ||
|
|
f45eb891cd | ||
|
|
77b58240cf | ||
|
|
97ae1ff10e | ||
|
|
8734a4f24b | ||
|
|
480fce55a8 | ||
|
|
d4136fadd2 | ||
|
|
308bc375bd | ||
|
|
3bbcf6a41e | ||
|
|
3d5d10a4c1 | ||
|
|
0e979c14f0 | ||
|
|
70f49114ac | ||
|
|
f730607414 | ||
|
|
0172ee25c9 | ||
|
|
699d75bb9f | ||
|
|
95822704c8 | ||
|
|
76e5f69e67 | ||
|
|
abd31d0249 | ||
|
|
9ae7ee6e2d | ||
|
|
18f4cc25f3 | ||
|
|
b755192600 | ||
|
|
045140cfbc | ||
|
|
4e7e44e25f | ||
|
|
5c4dfa4cc6 | ||
|
|
b7e3401e8e | ||
|
|
90cee7fb31 | ||
|
|
8e0e3cf35e | ||
|
|
7f72584537 | ||
|
|
8f0d606892 | ||
|
|
9fafe64cff | ||
|
|
94b42e0597 | ||
|
|
b2c1960d93 | ||
|
|
9d24b4cc35 | ||
|
|
3d675b033c | ||
|
|
0d2d9be8b3 | ||
|
|
6e9b6dab97 | ||
|
|
44a1717f6d | ||
|
|
4f51c74297 | ||
|
|
87c64a8c5d | ||
|
|
b6c6635f22 | ||
|
|
5a7abcb07c | ||
|
|
65232d134b | ||
|
|
d7b4bdefe5 | ||
|
|
6d9174bea1 | ||
|
|
921edfd4c5 | ||
|
|
786d5b0667 | ||
|
|
e846c71f20 | ||
|
|
0108e2ef5a | ||
|
|
9a81277ff6 | ||
|
|
06cc2ff316 | ||
|
|
7cdf4cb48c | ||
|
|
c34c547f1f | ||
|
|
9507294db7 | ||
|
|
ae7dd62d9f | ||
|
|
52e309cb09 | ||
|
|
b580373982 | ||
|
|
ec7bde5bb2 | ||
|
|
3516eeec5b | ||
|
|
52351192e6 | ||
|
|
3a6f04496d | ||
|
|
47f2df2112 | ||
|
|
363a26b8a1 | ||
|
|
7e50a00f55 | ||
|
|
a7d6a80e82 | ||
|
|
e7da95b2ac | ||
|
|
74fca23d59 | ||
|
|
0a12fa1253 | ||
|
|
1263068140 | ||
|
|
916c191b18 | ||
|
|
d8c0220353 | ||
|
|
4ab425d15c | ||
|
|
74e5633d1c | ||
|
|
89d36bbc61 | ||
|
|
1877ac18a5 | ||
|
|
5e42e25617 | ||
|
|
c27c9564cf | ||
|
|
c4b0da335d | ||
|
|
fab36ec008 | ||
|
|
8a2b875779 | ||
|
|
efaffb8298 | ||
|
|
e004eb3f00 | ||
|
|
43e8f6dc81 | ||
|
|
f5bff8fe7c | ||
|
|
fad8484b93 | ||
|
|
7664b54f89 | ||
|
|
21cbc353dd | ||
|
|
8d66306ec4 | ||
|
|
479daf0e76 | ||
|
|
bf0fbb7b10 | ||
|
|
d3c91f1585 | ||
|
|
ca165b328a | ||
|
|
fa2ffeea92 | ||
|
|
0d00965ac3 | ||
|
|
7d7bec1f80 | ||
|
|
b6fd915365 | ||
|
|
fecae72267 | ||
|
|
7bffd91e3f | ||
|
|
f859521a7e | ||
|
|
a869386fac | ||
|
|
8bc7885b7a | ||
|
|
78be46738d | ||
|
|
6fce73855c | ||
|
|
fa844a6223 | ||
|
|
906379dd09 | ||
|
|
37cded612f | ||
|
|
73e8fade61 | ||
|
|
758cc7afab | ||
|
|
d74b7b06d2 | ||
|
|
39009f2f71 | ||
|
|
9fdc1c6813 | ||
|
|
c5568fe830 | ||
|
|
bad81f84b9 | ||
|
|
2ac08dd0e6 | ||
|
|
408ffc4539 | ||
|
|
eb958327c5 | ||
|
|
e157d77a1e | ||
|
|
e961c9ea8f | ||
|
|
258c4f769d | ||
|
|
b31fedd857 | ||
|
|
eafe69500b | ||
|
|
ae09990c43 | ||
|
|
cf54b65c32 | ||
|
|
7974421fa1 | ||
|
|
847a098d4e | ||
|
|
eb4de0ae0f | ||
|
|
bca9f3b753 | ||
|
|
cad8a9a5d3 | ||
|
|
f5f36d21e8 | ||
|
|
c51435c114 | ||
|
|
2a7f1780b4 | ||
|
|
98a44e40fb | ||
|
|
65cf6fa9a1 | ||
|
|
b2e32d1720 | ||
|
|
f0bfedbe8e | ||
|
|
fd4e059c13 | ||
|
|
a53575e154 | ||
|
|
4a73484603 | ||
|
|
03b380f90b | ||
|
|
a2bd3b2dfe | ||
|
|
56fe140ebf | ||
|
|
4fafcce740 | ||
|
|
02352c4ae6 | ||
|
|
4b74aab335 | ||
|
|
2d67ac189d | ||
|
|
8ece62e23d | ||
|
|
56c2bdd77d | ||
|
|
1f555f1930 | ||
|
|
8496432c14 | ||
|
|
1672ffa670 | ||
|
|
6aab199f12 | ||
|
|
46d0c379a4 | ||
|
|
99240f145a | ||
|
|
3c9079d73c | ||
|
|
0eb98b9a6c | ||
|
|
76bfd98b77 | ||
|
|
3348640c88 | ||
|
|
d81c64fd2b | ||
|
|
8b4c919617 | ||
|
|
76c58953df | ||
|
|
4ddc5caa49 | ||
|
|
694663bd95 | ||
|
|
62aba5844e | ||
|
|
d0d60cef05 | ||
|
|
3d293fdcb0 | ||
|
|
96e9528046 | ||
|
|
4ea24b3203 | ||
|
|
a756eea25a | ||
|
|
210020e489 | ||
|
|
e586ead024 | ||
|
|
14c80bf1dc | ||
|
|
bdd56e794a | ||
|
|
a544548934 | ||
|
|
e06c1d61fb | ||
|
|
600c5209c6 | ||
|
|
bee90366ee | ||
|
|
e9bc4e9417 | ||
|
|
f01ff15761 | ||
|
|
356ada159d | ||
|
|
cc831e16d8 | ||
|
|
b8dc46ad01 | ||
|
|
d8ab19087d | ||
|
|
ec8a79eedd | ||
|
|
f1e2a8e9d8 | ||
|
|
4042a5fe5d | ||
|
|
a4752751ed | ||
|
|
e23ecf46d1 | ||
|
|
70a8c597a6 | ||
|
|
fa639bdb53 | ||
|
|
233bdd5b1d | ||
|
|
a0ab6d35c7 | ||
|
|
bd29680ce7 | ||
|
|
7139e92554 | ||
|
|
897df53466 | ||
|
|
58281711f6 | ||
|
|
b524383aa3 | ||
|
|
75a16e3588 | ||
|
|
1453032ad6 | ||
|
|
824ab4afad | ||
|
|
73dd41c67f | ||
|
|
59ee77355d | ||
|
|
5c758773ad | ||
|
|
46de49df06 | ||
|
|
d1c54a9a74 | ||
|
|
e7527c45cd | ||
|
|
7d5207aa67 | ||
|
|
654302e691 | ||
|
|
ee673b57fd | ||
|
|
2be374b841 | ||
|
|
906e1eda89 | ||
|
|
ece02cc4fa | ||
|
|
876ad60ddf | ||
|
|
862da354ac | ||
|
|
8fd477b979 | ||
|
|
2d7005655c | ||
|
|
7322f8348a | ||
|
|
e3e3a12e73 | ||
|
|
77cdd057a4 | ||
|
|
e8206fbdd9 | ||
|
|
589f15a77b | ||
|
|
7bb443678a | ||
|
|
6390415101 | ||
|
|
4abf192e11 | ||
|
|
1fed37f9da | ||
|
|
a9d86a7447 | ||
|
|
2abe4c3cef | ||
|
|
0542c25003 | ||
|
|
1b8ee4e290 | ||
|
|
51128cba55 | ||
|
|
3612432581 | ||
|
|
deca000a1b | ||
|
|
39cccb5653 | ||
|
|
f6838dc985 | ||
|
|
8cd4d92395 | ||
|
|
3bf9906f45 | ||
|
|
9f7daf96ef | ||
|
|
67de4df155 | ||
|
|
bc51a4bd1c | ||
|
|
bb54616018 | ||
|
|
6bcff5e014 | ||
|
|
8970a03a9a | ||
|
|
3ad717ca35 | ||
|
|
b14f72c67a | ||
|
|
45d036804f | ||
|
|
8f606db233 | ||
|
|
3766ba5402 | ||
|
|
e851813cef | ||
|
|
4d49ad9141 | ||
|
|
16618b3af2 | ||
|
|
0e5c0f664f | ||
|
|
7be9281431 | ||
|
|
ee0327fac1 | ||
|
|
9930de3e7f | ||
|
|
e8503e89c6 | ||
|
|
1d9ed419eb | ||
|
|
0207652e3e | ||
|
|
0f1e99c5cb | ||
|
|
f134bc7efb | ||
|
|
dcd7c7180e | ||
|
|
fbbfcd075b | ||
|
|
f42d2e4140 | ||
|
|
88882cebbc | ||
|
|
17a979675c | ||
|
|
4642850c79 | ||
|
|
e8d6eebb04 | ||
|
|
864c5160c0 | ||
|
|
99b5a00c12 | ||
|
|
85ee1f07d7 | ||
|
|
e58b4394e0 | ||
|
|
1e91a57bf1 | ||
|
|
39cee52a7e | ||
|
|
72068f939d | ||
|
|
096d0d3cad | ||
|
|
2472ab0121 | ||
|
|
00421717b8 | ||
|
|
ae96d93f94 | ||
|
|
8522c40c8f | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 | ||
|
|
feebbb9f04 | ||
|
|
bc4f06dd1d | ||
|
|
971e4fc909 | ||
|
|
51cc765949 | ||
|
|
19c6a4fffa | ||
|
|
105ac32d2f | ||
|
|
57550675d2 | ||
|
|
e674abc5c0 | ||
|
|
f965c96f51 | ||
|
|
c76b8ed9e0 | ||
|
|
4fbd0d8a7b | ||
|
|
2186c0fff6 | ||
|
|
1adca9a9c1 | ||
|
|
9408353f2b | ||
|
|
84f4d453d2 | ||
|
|
d10209f2a1 | ||
|
|
3ae149c72f | ||
|
|
47385acc3b | ||
|
|
814eeaa900 | ||
|
|
5f2ea13aad | ||
|
|
41ca217931 | ||
|
|
b57d36e8dd | ||
|
|
9a4be70734 | ||
|
|
a8443595a6 | ||
|
|
fd0a70ac58 | ||
|
|
8a8685c968 | ||
|
|
9e6cb8da8e | ||
|
|
054ec54d51 | ||
|
|
272ce773cb | ||
|
|
050b925f7b | ||
|
|
0087940898 | ||
|
|
e323c014f9 | ||
|
|
cc465c7554 | ||
|
|
14cb37564f | ||
|
|
094db56c3b | ||
|
|
aabb709b8b | ||
|
|
0833dd2db9 | ||
|
|
cd3f912be4 | ||
|
|
665c516db6 | ||
|
|
b670da9fa0 | ||
|
|
80bee9bffe | ||
|
|
d85a70e8ad | ||
|
|
8f21533e76 | ||
|
|
89996482a1 | ||
|
|
03c10dce91 | ||
|
|
bd5331be05 | ||
|
|
46e1645289 | ||
|
|
4ce3965747 | ||
|
|
9d4af19db3 | ||
|
|
48e034f4be | ||
|
|
f8959baa2f | ||
|
|
8ed5997eae | ||
|
|
daf9f50ac8 | ||
|
|
6b11013c1a |
3
.github/renovate.json
vendored
3
.github/renovate.json
vendored
@@ -23,8 +23,7 @@
|
||||
"jaraco.collections",
|
||||
"sabctools",
|
||||
"paho-mqtt",
|
||||
"werkzeug",
|
||||
"tavern"
|
||||
"werkzeug"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
|
||||
38
.github/workflows/build_release.yml
vendored
38
.github/workflows/build_release.yml
vendored
@@ -10,9 +10,9 @@ jobs:
|
||||
build_windows:
|
||||
name: Build Windows binary
|
||||
runs-on: windows-2022
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
@@ -31,13 +31,13 @@ jobs:
|
||||
id: windows_binary
|
||||
run: python builder/package.py binary
|
||||
- name: Upload Windows standalone binary (unsigned)
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
id: upload-unsigned-binary
|
||||
with:
|
||||
path: "*-win64-bin.zip"
|
||||
name: Windows standalone binary
|
||||
- name: Sign Windows standalone binary
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
uses: signpath/github-action-submit-signing-request@v1
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
wait-for-completion: true
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows standalone binary (signed)
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: Windows standalone binary (signed)
|
||||
@@ -57,13 +57,13 @@ jobs:
|
||||
- name: Build Windows installer
|
||||
run: python builder/package.py installer
|
||||
- name: Upload Windows installer
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
id: upload-unsigned-installer
|
||||
with:
|
||||
path: "*-win-setup.exe"
|
||||
name: Windows installer
|
||||
- name: Sign Windows installer
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
uses: signpath/github-action-submit-signing-request@v1
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
@@ -76,7 +76,7 @@ jobs:
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows installer (signed)
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Windows installer (signed)
|
||||
path: "signed/*-win-setup.exe"
|
||||
@@ -84,18 +84,18 @@ jobs:
|
||||
build_macos:
|
||||
name: Build macOS binary
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: "3.14.2"
|
||||
PYTHON_VERSION: "3.14.0"
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.15"
|
||||
# We need to force compile for universal2 support
|
||||
CFLAGS: -arch x86_64 -arch arm64
|
||||
ARCHFLAGS: -arch x86_64 -arch arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
# Only use this for the caching of pip packages!
|
||||
uses: actions/setup-python@v6
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
# Run this on macOS so the line endings are correct by default
|
||||
run: python builder/package.py source
|
||||
- name: Upload source distribution
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: "*-src.tar.gz"
|
||||
name: Source distribution
|
||||
@@ -153,7 +153,7 @@ jobs:
|
||||
python3 builder/package.py app
|
||||
python3 builder/make_dmg.py
|
||||
- name: Upload macOS binary
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: "*-macos.dmg"
|
||||
name: macOS binary
|
||||
@@ -172,7 +172,7 @@ jobs:
|
||||
linux_arch: arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Cache par2cmdline-turbo tarball
|
||||
uses: actions/cache@v4
|
||||
id: cache-par2cmdline
|
||||
@@ -196,7 +196,7 @@ jobs:
|
||||
timeout 10s snap run sabnzbd --help || true
|
||||
sudo snap remove sabnzbd
|
||||
- name: Upload snap
|
||||
uses: actions/upload-artifact@v5
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Snap package (${{ matrix.linux_arch }})
|
||||
path: ${{ steps.snapcraft.outputs.snap }}
|
||||
@@ -215,7 +215,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_windows, build_macos]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
@@ -223,15 +223,15 @@ jobs:
|
||||
cache: pip
|
||||
cache-dependency-path: "builder/release-requirements.txt"
|
||||
- name: Download Source distribution artifact
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: Source distribution
|
||||
- name: Download macOS artifact
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: macOS binary
|
||||
- name: Download Windows artifacts
|
||||
uses: actions/download-artifact@v6
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: ${{ (contains(github.ref, 'refs/tags/')) && '*signed*' || '*Windows*' }}
|
||||
merge-multiple: true
|
||||
|
||||
10
.github/workflows/integration_testing.yml
vendored
10
.github/workflows/integration_testing.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
name: Black Code Formatter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Black Code Formatter
|
||||
uses: lgeiger/black-action@master
|
||||
with:
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
builder/SABnzbd.spec
|
||||
tests
|
||||
--line-length=120
|
||||
--target-version=py39
|
||||
--target-version=py38
|
||||
--check
|
||||
--diff
|
||||
|
||||
@@ -31,19 +31,19 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14" ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
name: ["Linux"]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
os: macos-13
|
||||
python-version: "3.14"
|
||||
- name: Windows
|
||||
os: windows-2022
|
||||
python-version: "3.14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
- name: Generate translatable texts
|
||||
|
||||
@@ -52,7 +52,7 @@ Specific guides to install from source are available for Windows and macOS:
|
||||
https://sabnzbd.org/wiki/installation/install-macos
|
||||
https://sabnzbd.org/wiki/installation/install-from-source-windows
|
||||
|
||||
Only Python 3.9 and above is supported.
|
||||
Only Python 3.8 and above is supported.
|
||||
|
||||
On Linux systems you need to install:
|
||||
par2 unrar python3-setuptools python3-pip
|
||||
|
||||
@@ -16,7 +16,7 @@ If you want to know more you can head over to our website: https://sabnzbd.org.
|
||||
|
||||
SABnzbd has a few dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (Python 3.9 and above, often called `python3`)
|
||||
- `python` (Python 3.8 and above, often called `python3`)
|
||||
- Python modules listed in `requirements.txt`. Install with `python3 -m pip install -r requirements.txt -U`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://sabnzbd.org/wiki/installation/multicore-par2))
|
||||
- `unrar` (make sure you get the "official" non-free version of unrar)
|
||||
|
||||
105
README.mkd
105
README.mkd
@@ -1,36 +1,95 @@
|
||||
Release Notes - SABnzbd 4.6.0 Alpha 2
|
||||
Release Notes - SABnzbd 4.5.5
|
||||
=========================================================
|
||||
|
||||
This is the second test release of version 4.6.
|
||||
## Bug fixes and changes in 4.5.5
|
||||
|
||||
## New features in 4.6.0
|
||||
* macOS: Failed to start on versions of macOS older than 11.
|
||||
Python 3.14 dropped support for macOS 10.13 and 10.14.
|
||||
Because of that macOS 10.15 is required to run 4.5.5.
|
||||
|
||||
* Added default support for NNTP Pipelining which eliminates idle waiting
|
||||
between requests, significantly improving speeds on high-latency connections.
|
||||
Read more here: https://sabnzbd.org/wiki/advanced/nntp-pipelining
|
||||
* Dynamically increase Assembler limits on faster connections.
|
||||
* Improved disk speed measurement in Status window.
|
||||
* Enable `verify_xff_header` by default.
|
||||
* Reduce delays between jobs during post-processing.
|
||||
* If a download only has `.nzb` files inside, the new downloads
|
||||
will include the name of the original download.
|
||||
* Dropped support for Python 3.8.
|
||||
## Bug fixes and changes in 4.5.4
|
||||
|
||||
## Bug fixes since 4.5.0
|
||||
### New Features
|
||||
* History details now includes option to mark job as `Completed`.
|
||||
* `Quota` notifications available for all notification services.
|
||||
- Sends alerts at 75%, 90%, and 100% quota usage.
|
||||
* Multi-Operations now supports Move to Top/Bottom.
|
||||
* New `outgoing_nntp_ip` option to bind outgoing NNTP connections to specific IP address.
|
||||
|
||||
* `Check before download` could get stuck or fail to reject.
|
||||
* Windows: Tray icon disappears after Explorer restart.
|
||||
* Correct mobile layout if `Full Width` is enabled.
|
||||
* Aborted Direct Unpack could result in no files being unpacked.
|
||||
* macOS: Slow to start on some network setups.
|
||||
### Improvements
|
||||
* Setup wizard now requires successful Server Test before proceeding.
|
||||
* Anime episode notation `S04 - 10` now supported for Sorting and Duplicate Detection.
|
||||
* Multi-Operations: Play/Resume button unselects on second click for better usability.
|
||||
* Unrar now handles renaming of invalid characters on Windows filesystem.
|
||||
* Switched from vendored `sabnzbd.rarfile` module to `rarfile>=4.2`.
|
||||
* Warning displayed when removing all Orphaned jobs (clears Temporary Download folder).
|
||||
|
||||
### Bug Fixes
|
||||
* Active connections counter in Status window now updates correctly.
|
||||
* Job setting changes during URL-grabbing no longer ignored.
|
||||
* Incomplete `.par2` file parsing no longer leaves files behind.
|
||||
* `Local IPv4 address` now detectable when using Socks5 proxy.
|
||||
* Server configuration changes no longer show `Failure` message during page reload.
|
||||
|
||||
### Platform-Specific
|
||||
* Linux: `Make Windows compatible` automatically enabled when needed.
|
||||
* Windows: Executables are now signed using SignPath Foundation certificate.
|
||||
* Windows: Can now start SABnzbd directly from installer.
|
||||
* Windows and macOS: Binaries now use Python 3.14.
|
||||
|
||||
## Bug fixes and changes in 4.5.3
|
||||
|
||||
* Remember if `Permanently delete` was previously checked.
|
||||
* All available IP-addresses will be included when selecting the fastest.
|
||||
* Pre-queue script rejected NZBs were sometimes reported as `URL Fetching failed`.
|
||||
* RSS `Next scan` time was not adjusted after manual `Read All Feeds Now`.
|
||||
* Prevent renaming of `.cbr` files during verification.
|
||||
* If `--disable-file-log` was enabled, `Show Logging` would crash.
|
||||
* API: Added `time_added`, timestamp of when the job was added to the queue.
|
||||
* API: History output could contain duplicate items.
|
||||
* Snap: Updated packages and changed build process for reliability.
|
||||
* macOS: Repair would fail on macOS 10.13 High Sierra.
|
||||
* Windows: Unable to start on Windows 8.
|
||||
* Windows: Updated Unrar to 7.13, which resolves CVE-2025-8088.
|
||||
|
||||
## Bug fixes and changes in 4.5.2
|
||||
|
||||
* Added Tab and Shift+Tab navigation to move between rename fields in queue.
|
||||
* Invalid cookies of other services could result in errors.
|
||||
* Internet Bandwidth test could be stuck in infinite loop.
|
||||
* RSS readout did not ignore torrent alternatives.
|
||||
* Prowl and Pushover settings did not load correctly.
|
||||
* Renamed `osx` to `macos` internally.
|
||||
* API: Removed `B` post-fix from `quota` and `left_quota` fields in `queue`.
|
||||
* Windows: Support more languages in the installer.
|
||||
* Windows and macOS: Updated par2cmdline-turbo to 1.3.0 and Unrar to 7.12.
|
||||
|
||||
## Bug fixes and changes in 4.5.1
|
||||
|
||||
* Correct platform detection on Linux.
|
||||
* The `From SxxEyy` RSS filters did not always work.
|
||||
* Windows and macOS: Update Unrar to 7.11.
|
||||
|
||||
## New features in 4.5.0
|
||||
|
||||
* Improved failure detection by downloading additional par2 files right away.
|
||||
* Added more diagnostic information about the system.
|
||||
* Use XFF headers for login validation if `verify_xff_header` is enabled.
|
||||
* Added Turkish translation (by @cardpuncher).
|
||||
* Added `unrar_parameters` option to supply custom Unrar parameters.
|
||||
* Windows: Removed MultiPar support.
|
||||
* Windows and macOS: Updated Python to 3.13.2, 7zip to 24.09,
|
||||
Unrar to 7.10 and par2cmdline-turbo to 1.2.0.
|
||||
|
||||
## Bug fixes since 4.4.0
|
||||
|
||||
* Handle filenames that exceed maximum filesystem lengths.
|
||||
* Directly decompress gzip responses when retrieving NZB's.
|
||||
|
||||
## Upgrade notices
|
||||
|
||||
* You can directly upgrade from version 3.0.0 and newer.
|
||||
* Upgrading from older versions will require performing a `Queue repair`.
|
||||
* Downgrading from version 4.2.0 or newer to 3.7.2 or older will require
|
||||
performing a `Queue repair` due to changes in the internal data format.
|
||||
* Direct upgrade supported from version 3.0.0 and newer.
|
||||
* Older versions require performing a `Queue repair` after upgrading.
|
||||
|
||||
## Known problems and solutions
|
||||
|
||||
|
||||
28
SABnzbd.py
28
SABnzbd.py
@@ -19,8 +19,8 @@ import sys
|
||||
|
||||
# Trick to show a better message on older Python
|
||||
# releases that don't support walrus operator
|
||||
if Python_39_is_required_to_run_SABnzbd := sys.hexversion < 0x03090000:
|
||||
print("Sorry, requires Python 3.9 or above")
|
||||
if Python_38_is_required_to_run_SABnzbd := sys.hexversion < 0x03080000:
|
||||
print("Sorry, requires Python 3.8 or above")
|
||||
print("You can read more at: https://sabnzbd.org/wiki/installation/install-off-modules")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -40,7 +40,7 @@ import re
|
||||
import gc
|
||||
import threading
|
||||
import http.cookies
|
||||
from typing import Any
|
||||
from typing import List, Dict, Any
|
||||
|
||||
try:
|
||||
import sabctools
|
||||
@@ -142,7 +142,7 @@ class GUIHandler(logging.Handler):
|
||||
"""Initializes the handler"""
|
||||
logging.Handler.__init__(self)
|
||||
self._size: int = size
|
||||
self.store: list[dict[str, Any]] = []
|
||||
self.store: List[Dict[str, Any]] = []
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
"""Emit a record by adding it to our private queue"""
|
||||
@@ -540,19 +540,21 @@ def get_webhost(web_host, web_port, https_port):
|
||||
# If only APIPA's or IPV6 are found, fall back to localhost
|
||||
ipv4 = ipv6 = False
|
||||
localhost = hostip = "localhost"
|
||||
|
||||
try:
|
||||
# Valid user defined name?
|
||||
info = socket.getaddrinfo(web_host, None)
|
||||
info = socket.getaddrinfo(socket.gethostname(), None)
|
||||
except socket.error:
|
||||
if not is_localhost(web_host):
|
||||
web_host = "0.0.0.0"
|
||||
# Hostname does not resolve
|
||||
try:
|
||||
info = socket.getaddrinfo(localhost, None)
|
||||
# Valid user defined name?
|
||||
info = socket.getaddrinfo(web_host, None)
|
||||
except socket.error:
|
||||
info = socket.getaddrinfo("127.0.0.1", None)
|
||||
localhost = "127.0.0.1"
|
||||
|
||||
if not is_localhost(web_host):
|
||||
web_host = "0.0.0.0"
|
||||
try:
|
||||
info = socket.getaddrinfo(localhost, None)
|
||||
except socket.error:
|
||||
info = socket.getaddrinfo("127.0.0.1", None)
|
||||
localhost = "127.0.0.1"
|
||||
for item in info:
|
||||
ip = str(item[4][0])
|
||||
if ip.startswith("169.254."):
|
||||
|
||||
@@ -28,6 +28,7 @@ import urllib.request
|
||||
import urllib.error
|
||||
import configobj
|
||||
import packaging.version
|
||||
from typing import List
|
||||
|
||||
from constants import (
|
||||
RELEASE_VERSION,
|
||||
@@ -69,7 +70,7 @@ def delete_files_glob(glob_pattern: str, allow_no_matches: bool = False):
|
||||
raise FileNotFoundError(f"No files found that match '{glob_pattern}'")
|
||||
|
||||
|
||||
def run_external_command(command: list[str], print_output: bool = True, **kwargs):
|
||||
def run_external_command(command: List[str], print_output: bool = True, **kwargs):
|
||||
"""Wrapper to ease the use of calling external programs"""
|
||||
process = subprocess.Popen(command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
|
||||
output, _ = process.communicate()
|
||||
|
||||
@@ -112,7 +112,7 @@ if RELEASE_THIS and gh_token:
|
||||
print("Removing existing asset %s " % gh_asset.name)
|
||||
gh_asset.delete_asset()
|
||||
# Upload the new one
|
||||
print("Uploading %s to release %s" % (file_to_check, gh_release.name))
|
||||
print("Uploading %s to release %s" % (file_to_check, gh_release.title))
|
||||
gh_release.upload_asset(file_to_check)
|
||||
|
||||
# Check if we now have all files
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# Basic build requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
pyinstaller==6.17.0
|
||||
pyinstaller==6.16.0
|
||||
packaging==25.0
|
||||
pyinstaller-hooks-contrib==2025.10
|
||||
altgraph==0.17.5
|
||||
wrapt==2.0.1
|
||||
pyinstaller-hooks-contrib==2025.9
|
||||
altgraph==0.17.4
|
||||
wrapt==2.0.0
|
||||
setuptools==80.9.0
|
||||
|
||||
# For the Windows build
|
||||
@@ -12,8 +12,8 @@ pefile==2024.8.26; sys_platform == 'win32'
|
||||
pywin32-ctypes==0.2.3; sys_platform == 'win32'
|
||||
|
||||
# For the macOS build
|
||||
dmgbuild==1.6.6; sys_platform == 'darwin'
|
||||
mac-alias==2.2.3; sys_platform == 'darwin'
|
||||
macholib==1.16.4; sys_platform == 'darwin'
|
||||
ds-store==1.3.2; sys_platform == 'darwin'
|
||||
PyNaCl==1.6.1; sys_platform == 'darwin'
|
||||
dmgbuild==1.6.5; sys_platform == 'darwin'
|
||||
mac-alias==2.2.2; sys_platform == 'darwin'
|
||||
macholib==1.16.3; sys_platform == 'darwin'
|
||||
ds-store==1.3.1; sys_platform == 'darwin'
|
||||
PyNaCl==1.6.0; sys_platform == 'darwin'
|
||||
|
||||
@@ -187,7 +187,7 @@
|
||||
<td><label for="apprise_enable"> $T('opt-apprise_enable')</label></td>
|
||||
</tr>
|
||||
</table>
|
||||
<p>$T('explain-apprise_enable')</p>
|
||||
<em>$T('explain-apprise_enable')</em><br>
|
||||
<p>$T('version'): ${apprise.__version__}</p>
|
||||
|
||||
$show_cat_box('apprise')
|
||||
@@ -197,7 +197,7 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="apprise_urls">$T('opt-apprise_urls')</label>
|
||||
<input type="text" name="apprise_urls" id="apprise_urls" value="$apprise_urls" />
|
||||
<span class="desc">$T('explain-apprise_urls')</span>
|
||||
<span class="desc">$T('explain-apprise_urls'). <br>$T('readwiki')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<span class="desc">$T('explain-apprise_extra_urls')</span>
|
||||
|
||||
@@ -7,10 +7,6 @@
|
||||
padding-right: 8px;
|
||||
}
|
||||
|
||||
.container-full-width .container {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.main-navbar {
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
<url type="faq">https://sabnzbd.org/wiki/faq</url>
|
||||
<url type="contact">https://sabnzbd.org/live-chat.html</url>
|
||||
<releases>
|
||||
<release version="4.6.0" date="2025-12-24" type="stable"/>
|
||||
<release version="4.5.5" date="2025-10-24" type="stable"/>
|
||||
<release version="4.5.4" date="2025-10-22" type="stable"/>
|
||||
<release version="4.5.3" date="2025-08-25" type="stable"/>
|
||||
|
||||
@@ -508,6 +508,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr ""
|
||||
@@ -525,6 +530,11 @@ msgstr ""
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr ""
|
||||
@@ -1117,16 +1127,6 @@ msgstr ""
|
||||
msgid "left"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr ""
|
||||
@@ -3127,7 +3127,7 @@ msgid "Enable SFV-based checks"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3885,16 +3885,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Send notifications directly to any notification service you use.<br>For example: Slack, Discord, Telegram, or any service from over 100 supported services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Apprise defines service connection information using URLs.<br>Read the Apprise wiki how to define the URL for each service.<br>Use a comma and/or space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -560,6 +560,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Příliš mnoho spojení k serveru %s [%s]"
|
||||
@@ -579,6 +584,11 @@ msgstr "Přihlášení k serveru %s se nezdařilo [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nejspíše chyba downloaderu"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Vypínání"
|
||||
@@ -1197,16 +1207,6 @@ msgstr "Zkouším SFV ověření"
|
||||
msgid "left"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nejspíše chyba downloaderu"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Tento server nepovoluje SSL na tomto portu"
|
||||
@@ -3287,8 +3287,7 @@ msgid "Enable SFV-based checks"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4081,22 +4080,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
253
po/main/da.po
253
po/main/da.po
@@ -360,11 +360,11 @@ msgstr "Kvota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Advarsel om kvotegrænse (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download genoptaget efter nulstilling af kvote"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -585,6 +585,11 @@ msgstr "Det lykkedes ikke at initialisere %s@%s med begrundelse %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Alvorlig fejl i Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Modtog ukendt statuskode %s for artikel %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Alt for mange forbindelser til serveren %s [%s]"
|
||||
@@ -606,6 +611,11 @@ msgstr "Det lykkedes ikke at logge på serveren %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Forbindelse %s@%s mislykkedes, besked %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Suspect fejl i downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Påbegynder lukning af SABnzbd"
|
||||
@@ -1238,16 +1248,6 @@ msgstr "Forsøger SFV verifikation"
|
||||
msgid "left"
|
||||
msgstr "tilbage"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Modtog ukendt statuskode %s for artikel %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Suspect fejl i downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Denne server tillader ikke SSL på denne port"
|
||||
@@ -1461,7 +1461,7 @@ msgstr "Før-kø script job markeret som mislykkedet"
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Uønsket filtype i fil %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
@@ -1478,7 +1478,7 @@ msgstr "DUPLIKERE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIV"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
@@ -1717,7 +1717,7 @@ msgstr "Efterbehandling mislykkedes for %s (%s)"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Post-processing was aborted"
|
||||
msgstr "Efterbehandling blev afbrudt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Download Failed"
|
||||
@@ -1771,12 +1771,12 @@ msgstr "RAR filer kunne ikke bekræfte"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Trying RAR renamer"
|
||||
msgstr "Forsøger RAR-omdøbning"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "No matching earlier rar file for %s"
|
||||
msgstr "Ingen matchende tidligere rar-fil for %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/postproc.py
|
||||
@@ -1801,7 +1801,7 @@ msgstr "Fejl ved lukning af system"
|
||||
#. Error message
|
||||
#: sabnzbd/powersup.py
|
||||
msgid "Received a DBus exception %s"
|
||||
msgstr "Modtog en DBus-undtagelse %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/rss.py
|
||||
@@ -2177,7 +2177,7 @@ msgstr "Denne måned"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Selected date range"
|
||||
msgstr "Valgt datointerval"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Today"
|
||||
@@ -2272,7 +2272,7 @@ msgstr "Forum"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Live Chat"
|
||||
msgstr "Live chat"
|
||||
msgstr ""
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2421,7 +2421,7 @@ msgstr "Forsøg igen"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Markér som fuldført og fjern midlertidige filer"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2436,7 +2436,7 @@ msgstr "Fjern alt fra køen?"
|
||||
#. Delete confirmation popup
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Are you sure you want to remove these jobs?"
|
||||
msgstr "Er du sikker på, at du vil fjerne disse jobs?"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page button
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2461,7 +2461,7 @@ msgstr "Fjern NZB & slet filer"
|
||||
#. Checkbox if job should be added to Archive
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Permanently delete (skip archive)"
|
||||
msgstr "Slet permanent (spring arkiv over)"
|
||||
msgstr ""
|
||||
|
||||
#. Caption for missing articles in Queue
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2484,7 +2484,7 @@ msgstr "Nulstil kvota nu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Archive"
|
||||
msgstr "Arkiv"
|
||||
msgstr ""
|
||||
|
||||
#. Button/link hiding History job details
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2509,7 +2509,7 @@ msgstr "Vis Alt"
|
||||
#. Button showing all archived jobs
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Show Archive"
|
||||
msgstr "Vis arkiv"
|
||||
msgstr ""
|
||||
|
||||
#. History table header - Size of the download quota
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2560,8 +2560,6 @@ msgid ""
|
||||
"Disconnect all active connections to usenet servers. Connections will be "
|
||||
"reopened after a few seconds if there are items in the queue."
|
||||
msgstr ""
|
||||
"Afbryd alle aktive forbindelser til usenet-servere. Forbindelser genåbnes "
|
||||
"efter få sekunder, hvis der er elementer i køen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "This will send a test email to your account."
|
||||
@@ -2752,8 +2750,6 @@ msgid ""
|
||||
"Speed up repairs by installing par2cmdline-turbo, it is available for many "
|
||||
"platforms."
|
||||
msgstr ""
|
||||
"Sæt fart på reparationer ved at installere par2cmdline-turbo, det er "
|
||||
"tilgængeligt for mange platforme."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Version"
|
||||
@@ -2829,8 +2825,6 @@ msgid ""
|
||||
"If the SABnzbd Host or Port is exposed to the internet, your current "
|
||||
"settings allow full external access to the SABnzbd interface."
|
||||
msgstr ""
|
||||
"Hvis SABnzbd-værten eller porten er eksponeret på internettet, tillader dine"
|
||||
" nuværende indstillinger fuld ekstern adgang til SABnzbd-grænsefladen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Security"
|
||||
@@ -2941,10 +2935,6 @@ msgid ""
|
||||
"the Completed Download Folder.<br>Recurring backups can be configured on the"
|
||||
" Scheduling page."
|
||||
msgstr ""
|
||||
"Opret en sikkerhedskopi af konfigurationsfilen og databaser i "
|
||||
"sikkerhedskopimappen.<br>Hvis sikkerhedskopimappen ikke er indstillet, "
|
||||
"oprettes sikkerhedskopien i den fuldførte downloadmappe.<br>Tilbagevendende "
|
||||
"sikkerhedskopier kan konfigureres på planlægningssiden."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Cleanup List"
|
||||
@@ -3059,8 +3049,6 @@ msgstr "Eksterne internetadgang"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "You can set access rights for systems outside your local network."
|
||||
msgstr ""
|
||||
"Du kan indstille adgangsrettigheder for systemer uden for dit lokale "
|
||||
"netværk."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "No access"
|
||||
@@ -3164,9 +3152,6 @@ msgid ""
|
||||
" again.<br />Applies to both the Temporary and Complete Download Folder.<br "
|
||||
"/>Checked every few minutes."
|
||||
msgstr ""
|
||||
"Download genoptages automatisk, hvis den minimale ledige plads er "
|
||||
"tilgængelig igen.<br />Gælder for både den midlertidige og den fuldførte "
|
||||
"downloadmappe.<br />Kontrolleres hvert par minutter."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Permissions for completed downloads"
|
||||
@@ -3252,9 +3237,6 @@ msgid ""
|
||||
"stored.<br />If left empty, the backup will be created in the Completed "
|
||||
"Download Folder."
|
||||
msgstr ""
|
||||
"Placering, hvor sikkerhedskopier af konfigurationsfilen og databaser "
|
||||
"gemmes.<br />Hvis den efterlades tom, oprettes sikkerhedskopien i den "
|
||||
"fuldførte downloadmappe."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "<i>Data will <b>not</b> be moved. Requires SABnzbd restart!</i>"
|
||||
@@ -3272,7 +3254,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Purge Logs"
|
||||
msgstr "Ryd logfiler"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ".nzb Backup Folder"
|
||||
@@ -3336,8 +3318,6 @@ msgid ""
|
||||
"turned off, all jobs will be marked as Completed even if they are "
|
||||
"incomplete."
|
||||
msgstr ""
|
||||
"Udpak kun og kør scripts på jobs, der bestod verifikationsstadiet. Hvis "
|
||||
"slået fra, markeres alle jobs som fuldført, selvom de er ufuldstændige."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when encrypted RAR is downloaded"
|
||||
@@ -3350,19 +3330,19 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Identical download detection"
|
||||
msgstr "Identisk downloaddetektering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Detect identical downloads based on name or NZB contents."
|
||||
msgstr "Detektér identiske downloads baseret på navn eller NZB-indhold."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Smart duplicate detection"
|
||||
msgstr "Smart dubletdetektering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Detect duplicates based on analysis of the filename."
|
||||
msgstr "Detektér dubletter baseret på analyse af filnavnet."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Allow proper releases"
|
||||
@@ -3373,8 +3353,6 @@ msgid ""
|
||||
"Bypass smart duplicate detection if PROPER, REAL or REPACK is detected in "
|
||||
"the download name."
|
||||
msgstr ""
|
||||
"Spring smart dubletdetektering over, hvis PROPER, REAL eller REPACK "
|
||||
"registreres i downloadnavnet."
|
||||
|
||||
#. Four way switch for duplicates
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3393,7 +3371,7 @@ msgstr "Mislykkes job (flyt til historik)"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Abort post-processing"
|
||||
msgstr "Afbryd efterbehandling"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when unwanted extension detected"
|
||||
@@ -3401,7 +3379,7 @@ msgstr "Aktion når uønsket extension er fundet"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when an unwanted extension is detected"
|
||||
msgstr "Handling når en uønsket filtype registreres"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Unwanted extensions"
|
||||
@@ -3409,28 +3387,25 @@ msgstr "Uønsket extension"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Blacklist"
|
||||
msgstr "Sortliste"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Whitelist"
|
||||
msgstr "Hvidliste"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Select a mode and list all (un)wanted extensions. For example: <b>exe</b> or"
|
||||
" <b>exe, com</b>"
|
||||
msgstr ""
|
||||
"Vælg en tilstand og angiv alle (u)ønskede filtypeendelser. For eksempel: "
|
||||
"<b>exe</b> eller <b>exe, com</b>"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Enable SFV-based checks"
|
||||
msgstr "Aktiver SFV-baseret kontrol"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Udfør en ekstra kontrol baseret på SFV-filer."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3498,15 +3473,15 @@ msgstr "Afbryd fra usenet-serverne når køen er tom eller sat på pause."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Automatically sort queue"
|
||||
msgstr "Sortér kø automatisk"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Automatically sort jobs in the queue when a new job is added."
|
||||
msgstr "Sortér automatisk jobs i køen, når et nyt job tilføjes."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "The queue will resort every 30 seconds if % downloaded is selected."
|
||||
msgstr "Køen vil sortere hver 30. sekund, hvis % downloadet er valgt."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Propagation delay"
|
||||
@@ -3539,11 +3514,11 @@ msgstr "Erstat mellemrum med understreg i mappenavn."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace underscores in folder name"
|
||||
msgstr "Erstat understreger i mappenavn"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace underscores with dots in folder names."
|
||||
msgstr "Erstat understreger med punktummer i mappenavne."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace dots in Foldername"
|
||||
@@ -3595,23 +3570,19 @@ msgstr "Fjern efter download"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Deobfuscate final filenames"
|
||||
msgstr "Afslør endelige filnavne"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If filenames of (large) files in the final folder look obfuscated or "
|
||||
"meaningless they will be renamed to the job name."
|
||||
msgstr ""
|
||||
"Hvis filnavne på (store) filer i den endelige mappe ser slørede eller "
|
||||
"meningsløse ud, omdøbes de til jobnavnet."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Additionally, attempts to set the correct file extension based on the file "
|
||||
"signature if the extension is not present or meaningless."
|
||||
msgstr ""
|
||||
"Forsøger derudover at indstille den korrekte filendelse baseret på "
|
||||
"filsignaturen, hvis endelsen ikke er til stede eller meningsløs."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "HTTPS certificate verification"
|
||||
@@ -3626,11 +3597,11 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr "SOCKS5-proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr "Brug den angivne SOCKS5-proxy til alle udgående forbindelser."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
@@ -3743,11 +3714,11 @@ msgstr "Tidsudløb"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Account expiration date"
|
||||
msgstr "Kontoudløbsdato"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Warn 5 days in advance of account expiration date."
|
||||
msgstr "Advar 5 dage før kontoudløbsdato."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -3755,9 +3726,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Kvote for denne server, talt fra det tidspunkt, den indstilles. I bytes, "
|
||||
"efterfulgt eventuelt af K,M,G.<br />Kontrolleres hvert par minutter. Besked "
|
||||
"sendes, når kvoten er brugt."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3788,13 +3756,6 @@ msgid ""
|
||||
"used. - Disabled: no certification verification. This is not secure at all, "
|
||||
"anyone could intercept your connection. "
|
||||
msgstr ""
|
||||
"Når SSL er aktiveret: - Streng: gennemtving fuld certifikatverifikation. "
|
||||
"Dette er den mest sikre indstilling. - Medium: verificér at certifikatet er "
|
||||
"gyldigt og matcher serveradressen, men tillad lokalt injicerede certifikater"
|
||||
" (f.eks. af firewall eller virusscanner). - Minimal: verificér at "
|
||||
"certifikatet er gyldigt. Dette er ikke sikkert, ethvert gyldigt certifikat "
|
||||
"kan bruges. - Deaktiveret: ingen certifikatverifikation. Dette er slet ikke "
|
||||
"sikkert, enhver kan opfange din forbindelse."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Disabled"
|
||||
@@ -3806,7 +3767,7 @@ msgstr "Minimal"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Medium"
|
||||
msgstr "Medium"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Strict"
|
||||
@@ -3820,15 +3781,13 @@ msgstr "0 er højeste prioritet, 100 er den laveste prioritet"
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "Påkrævet"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"I tilfælde af forbindelsesfejl vil downloadkøen blive sat på pause i et par "
|
||||
"minutter i stedet for at springe denne server over"
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3874,11 +3833,11 @@ msgstr "Personlige notater"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Article availability"
|
||||
msgstr "Artikeltilgængelighed"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "%f% available of %d requested articles"
|
||||
msgstr "%f% tilgængelige af %d anmodede artikler"
|
||||
msgstr ""
|
||||
|
||||
#. Config->Scheduling
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3939,12 +3898,12 @@ msgstr "Anvend filtre"
|
||||
#. Config->RSS edit button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Edit"
|
||||
msgstr "Redigér"
|
||||
msgstr ""
|
||||
|
||||
#. Config->RSS when will be the next RSS scan
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Next scan at"
|
||||
msgstr "Næste scanning kl."
|
||||
msgstr ""
|
||||
|
||||
#. Config->RSS table column header
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4026,8 +3985,6 @@ msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
"Hvis kun kategorien <em>Standard</em> er valgt, er beskeder aktiveret for "
|
||||
"jobs i alle kategorier."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
@@ -4204,20 +4161,20 @@ msgstr "Enhed(er) som meddelelse skal sendes til"
|
||||
#. Pushover settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Emergency retry"
|
||||
msgstr "Nødforsøg"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How often (in seconds) the same notification will be sent"
|
||||
msgstr "Hvor ofte (i sekunder) samme besked vil blive sendt"
|
||||
msgstr ""
|
||||
|
||||
#. Pushover settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Emergency expire"
|
||||
msgstr "Nødudløb"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How many seconds your notification will continue to be retried"
|
||||
msgstr "Hvor mange sekunder din besked fortsætter med at blive forsøgt"
|
||||
msgstr ""
|
||||
|
||||
#. Header for Pushbullet notification section
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4260,30 +4217,19 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Aktiver Apprise-notifikationer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Send beskeder direkte til enhver beskedtjeneste, du bruger.<br>For eksempel:"
|
||||
" Slack, Discord, Telegram eller enhver tjeneste fra over 100 understøttede "
|
||||
"tjenester!"
|
||||
"Send notifikationer via Apprise til næsten enhver notifikationstjeneste"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Brug standard Apprise-URL'er"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standard Apprise-URL'er"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definerer tjenesteforbindelsesoplysninger ved hjælp af "
|
||||
"URL'er.<br>Læs Apprise-wikien om, hvordan man definerer URL'en for hver "
|
||||
"tjeneste.<br>Brug komma og/eller mellemrum til at identificere mere end én "
|
||||
"URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Brug komma og/eller mellemrum for at angive flere URL'er."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4462,15 +4408,15 @@ msgstr "Sorteringsstreng"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Multi-part Label"
|
||||
msgstr "Fler-dels-etiket"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Show folder"
|
||||
msgstr "Vis mappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Season folder"
|
||||
msgstr "Sæsonmappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "In folders"
|
||||
@@ -4486,7 +4432,7 @@ msgstr "Job Navn som Filnavn"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Series"
|
||||
msgstr "Serier"
|
||||
msgstr ""
|
||||
|
||||
#. Note for title expression in Sorting that does case adjustment
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4499,31 +4445,31 @@ msgstr "Forarbejdede resultat"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Any property"
|
||||
msgstr "Enhver egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "property"
|
||||
msgstr "egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt Property"
|
||||
msgstr "GuessIt-egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt.Property"
|
||||
msgstr "GuessIt.Egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt_Property"
|
||||
msgstr "GuessIt_Egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Minimum Filesize"
|
||||
msgstr "Minimum filstørrelse"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Affected Job Types"
|
||||
msgstr "Berørte jobtyper"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "All"
|
||||
@@ -4531,15 +4477,15 @@ msgstr "Alle"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Series with air dates"
|
||||
msgstr "Serier med sendetidspunkter"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Movies"
|
||||
msgstr "Film"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Other / Unknown"
|
||||
msgstr "Andet / Ukendt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4551,43 +4497,34 @@ msgid ""
|
||||
"applied.</p><p>More options are available when Advanced Settings is "
|
||||
"checked.<br/>Detailed information can be found on the Wiki.</p>"
|
||||
msgstr ""
|
||||
"<p>Brug sorteringsværktøjer til automatisk at organisere dine fuldførte "
|
||||
"downloads. For eksempel, placer alle episoder fra en serie i en "
|
||||
"sæsonspecifik mappe. Eller placer film i en mappe opkaldt efter "
|
||||
"filmen.</p><p>Sorteringsværktøjer afprøves i den rækkefølge, de vises, og "
|
||||
"kan omarrangeres ved at trække og slippe.<br/>Den første aktive sortering, "
|
||||
"der matcher både den berørte kategori og jobtype, anvendes.</p><p>Flere "
|
||||
"muligheder er tilgængelige, når Avancerede indstillinger er "
|
||||
"markeret.<br/>Detaljeret information kan findes på Wiki'en.</p>"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Add Sorter"
|
||||
msgstr "Tilføj sortering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Sorter"
|
||||
msgstr "Fjern sortering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Test Data"
|
||||
msgstr "Testdata"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Quick start"
|
||||
msgstr "Hurtig start"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Move and rename all episodes in the \"tv\" category to a show-specific "
|
||||
"folder"
|
||||
msgstr ""
|
||||
"Flyt og omdøb alle episoder i kategorien \"tv\" til en programspecifik mappe"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Move and rename all movies in the \"movies\" category to a movie-specific "
|
||||
"folder"
|
||||
msgstr "Flyt og omdøb alle film i kategorien \"movies\" til en filmspecifik mappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4698,11 +4635,11 @@ msgstr "Datoformat"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extra queue columns"
|
||||
msgstr "Ekstra køkolonner"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extra history columns"
|
||||
msgstr "Ekstra historikkolonner"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "page"
|
||||
@@ -4753,8 +4690,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Er du sikker på, at du vil slette alle mapper i din midlertidige "
|
||||
"downloadmappe? Dette kan ikke fortrydes!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -4793,8 +4728,6 @@ msgid ""
|
||||
"When you Retry a job, 'Duplicate Detection' and 'Abort jobs that cannot be "
|
||||
"completed' are disabled."
|
||||
msgstr ""
|
||||
"Når du genforsøger et job, er 'Dubletdetektering' og 'Afbryd jobs, der ikke "
|
||||
"kan fuldføres' deaktiveret."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "View Script Log"
|
||||
@@ -4802,7 +4735,7 @@ msgstr "Vis scriptlog"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Renaming the job will abort Direct Unpack."
|
||||
msgstr "Omdøbning af jobbet vil afbryde direkte udpakning."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4826,7 +4759,7 @@ msgstr "Kompakt layout"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Always use full screen width"
|
||||
msgstr "Brug altid fuld skærmbredde"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Tabbed layout <br/>(separate queue and history)"
|
||||
@@ -4846,11 +4779,11 @@ msgstr "Bekræft Historik-fjernelse"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Keyboard shortcuts"
|
||||
msgstr "Tastaturgenveje"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Shift+Arrow key: Browse Queue and History pages"
|
||||
msgstr "Shift+piletast: Gennemse Kø- og Historiksider"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How long or untill when do you want to pause? (in English!)"
|
||||
@@ -4873,12 +4806,10 @@ msgid ""
|
||||
"All usernames, passwords and API-keys are automatically removed from the log"
|
||||
" and the included copy of your settings."
|
||||
msgstr ""
|
||||
"Alle brugernavne, adgangskoder og API-nøgler fjernes automatisk fra loggen "
|
||||
"og den inkluderede kopi af dine indstillinger."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Sort by % downloaded <small>Most→Least</small>"
|
||||
msgstr "Sortér efter % downloadet <small>Mest→Mindst</small>"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Sort by Age <small>Oldest→Newest</small>"
|
||||
@@ -5013,11 +4944,11 @@ msgstr "Start guide"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klik på Test server før du fortsætter"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
msgstr "Gendan sikkerhedskopi"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -5034,7 +4965,7 @@ msgstr ""
|
||||
#. Error message
|
||||
#: sabnzbd/sorting.py
|
||||
msgid "Failed to rename %s to %s"
|
||||
msgstr "Kunne ikke omdøbe %s til %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/sorting.py
|
||||
|
||||
@@ -15,14 +15,14 @@
|
||||
# Stefan Rodriguez Galeano, 2024
|
||||
# M Z, 2024
|
||||
# Gjelbrim Haskaj, 2024
|
||||
# Safihre <safihre@sabnzbd.org>, 2024
|
||||
# Media Cat, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-4.6.0\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2025\n"
|
||||
"Last-Translator: Media Cat, 2025\n"
|
||||
"Language-Team: German (https://app.transifex.com/sabnzbd/teams/111101/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -58,8 +58,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"OpenSSL kann nicht verknüpft werden, optimierte SSL-Verbindungsfunktionen "
|
||||
"werden nicht verwendet."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -386,11 +384,11 @@ msgstr "Kontingent"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Warnung zur Kontingentgrenze (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download nach Kontingentzurücksetzung fortgesetzt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -617,6 +615,11 @@ msgstr "Fehler %s@%s zu initialisieren, aus folgendem Grund: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Schwerer Fehler im Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s:Unbekannter Statuscode%s für Artikel erhalten %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Zu viele Verbindungen zu Server %s [%s]"
|
||||
@@ -638,6 +641,11 @@ msgstr "Anmelden beim Server fehlgeschlagen. %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Fehler beim Verbinden mit %s@%s, Meldung = %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vermute Fehler im Downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Wird beendet …"
|
||||
@@ -1283,16 +1291,6 @@ msgstr "Versuche SFV-Überprüfung"
|
||||
msgid "left"
|
||||
msgstr "rest"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s:Unbekannter Statuscode%s für Artikel erhalten %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vermute Fehler im Downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Dieser Server erlaubt kein SSL auf diesem Port"
|
||||
@@ -2480,7 +2478,7 @@ msgstr "Erneut versuchen"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Als abgeschlossen markieren und temporäre Dateien entfernen"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3519,9 +3517,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV-basierte Überprüfung aktivieren"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Zusätzliche Überprüfung mittels SFV-Dateien durchführen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3864,9 +3861,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Kontingent für diesen Server, gezählt ab dem Zeitpunkt der Festlegung. In "
|
||||
"Bytes, optional gefolgt von K,M,G.<br />Wird alle paar Minuten überprüft. "
|
||||
"Benachrichtigung wird gesendet, wenn das Kontingent aufgebraucht ist."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4373,30 +4367,22 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Aktivieren Sie Info-Benachrichtigungen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Senden Sie Benachrichtigungen direkt an jeden von Ihnen genutzten "
|
||||
"Benachrichtigungsdienst.<br>Zum Beispiel: Slack, Discord, Telegram oder "
|
||||
"jeden anderen Dienst aus über 100 unterstützten Diensten!"
|
||||
"Senden Sie Benachrichtigungen mit Anfragen an fast jeden "
|
||||
"Benachrichtigungsdienst"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Standard-Apprise-URLs verwenden"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standard Apprise URLs"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definiert Dienstverbindungsinformationen über URLs.<br>Lesen Sie das"
|
||||
" Apprise-Wiki, um zu erfahren, wie Sie die URL für jeden Dienst "
|
||||
"definieren.<br>Verwenden Sie ein Komma und/oder Leerzeichen, um mehr als "
|
||||
"eine URL anzugeben."
|
||||
"Verwenden Sie ein Komma und/oder ein Leerzeichen, um mehr als eine URL zu "
|
||||
"kennzeichnen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4870,8 +4856,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Sind Sie sicher, dass Sie alle Ordner in Ihrem temporären Download-Ordner "
|
||||
"löschen möchten? Dies kann nicht rückgängig gemacht werden!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5131,7 +5115,7 @@ msgstr "Assistenten starten"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klicken Sie auf \"Server testen\", bevor Sie fortfahren"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
|
||||
@@ -373,11 +373,11 @@ msgstr "Cuota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Advertencia de límite de cuota (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Descarga reanudada después de reiniciar la cuota"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -602,6 +602,12 @@ msgstr "Error al inicializar %s@%s con la razón: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Error grave en el descargador"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
"%s@%s: Se recibió un código de estado desconocido %s para el artículo %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Demasiadas conexiones con el servidor %s [%s]"
|
||||
@@ -623,6 +629,11 @@ msgstr "Registraccion fallo para servidor %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Ha fallado la conexión a %s@%s, el mensaje=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Error sospechoso en downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Apagando"
|
||||
@@ -1271,17 +1282,6 @@ msgstr "Intentando verificación por SFV"
|
||||
msgid "left"
|
||||
msgstr "Restante"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
"%s@%s: Se recibió un código de estado desconocido %s para el artículo %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Error sospechoso en downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Este servidor no permite SSL en este puerto"
|
||||
@@ -2469,7 +2469,7 @@ msgstr "Reintentar"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Marcar como completado y eliminar archivos temporales"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3495,9 +3495,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Habilitar verificacion basada en SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Realiza una verificación extra basada en ficheros SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3832,9 +3831,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Cuota para este servidor, contada desde el momento en que se establece. En "
|
||||
"bytes, opcionalmente seguido de K,M,G.<br />Comprobado cada pocos minutos. "
|
||||
"Se envía una notificación cuando se agota la cuota."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4342,29 +4338,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Habilitar notificaciones Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Envíe notificaciones directamente a cualquier servicio de notificaciones que"
|
||||
" utilice.<br>Por ejemplo: Slack, Discord, Telegram o cualquier servicio de "
|
||||
"más de 100 servicios compatibles."
|
||||
"Enviar notificaciones usando Apprise a casi cualquier servicio de "
|
||||
"notificación"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Usar URLs de Apprise predeterminadas"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URLs predeterminadas de Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise define la información de conexión del servicio mediante URLs.<br>Lea"
|
||||
" el wiki de Apprise para saber cómo definir la URL de cada servicio.<br>Use "
|
||||
"una coma y/o espacio para identificar más de una URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Use una coma y/o espacio para identificar más de una URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4841,8 +4828,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"¿Está seguro de que desea eliminar todas las carpetas en su carpeta de "
|
||||
"descargas temporales? ¡Esto no se puede deshacer!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5102,7 +5087,7 @@ msgstr "Iniciar Asistente"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Haga clic en Probar servidor antes de continuar"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
|
||||
@@ -556,6 +556,11 @@ msgstr "Alustaminen epäonnistui kohteessa %s@%s syy: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Liikaa yhteyksiä palvelimelle %s [%s]"
|
||||
@@ -575,6 +580,11 @@ msgstr "Kirjautuminen palvelimelle %s epäonnistui [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Yhdistäminen %s@%s epäonnistui, viesti=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mahdollinen virhe lataajassa"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Sammutetaan"
|
||||
@@ -1198,16 +1208,6 @@ msgstr "Yritetään SFV varmennusta"
|
||||
msgid "left"
|
||||
msgstr "jäljellä"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mahdollinen virhe lataajassa"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Tämä palvelin ei salli SSL yhteyksiä tähän porttiin"
|
||||
@@ -3363,9 +3363,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV-pohjaiset tarkistukset käytössä"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Suorittaa ylimääräisen varmennuksen SFV tiedostojen avulla."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4176,22 +4175,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -606,6 +606,11 @@ msgstr "Échec d'initialisation de %s@%s pour la raison suivante : %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Erreur fatale dans le Téléchargeur"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s a reçu le code d'état inconnu %s pour l'article %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Trop de connexions au serveur %s [%s]"
|
||||
@@ -627,6 +632,11 @@ msgstr "Échec de la connexion au serveur %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "La connexion à %s@%s a échoué, message=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erreur suspecte dans le téléchargeur"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Arrêt en cours..."
|
||||
@@ -1272,16 +1282,6 @@ msgstr "Essai vérification SFV"
|
||||
msgid "left"
|
||||
msgstr "restant"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s a reçu le code d'état inconnu %s pour l'article %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erreur suspecte dans le téléchargeur"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Ce serveur n'authorise pas de connexion SSL sur ce port"
|
||||
@@ -3502,9 +3502,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Activer les contrôles SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fait une vérification supplémentaire basée sur les fichiers SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4354,30 +4353,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Activer les notifications Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Envoyez des notifications directement vers n'importe quel service de "
|
||||
"notification que vous utilisez.<br>Par exemple : Slack, Discord, Telegram ou"
|
||||
" tout autre service parmi plus de 100 services pris en charge !"
|
||||
"Envoyer des notifications en utilisant Apprise vers presque n'importe quel "
|
||||
"service de notification"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Utiliser les URLs Apprise par défaut"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URLs par défaut d'Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise définit les informations de connexion au service à l'aide "
|
||||
"d'URL.<br>Consultez le wiki Apprise pour savoir comment définir l'URL de "
|
||||
"chaque service.<br>Utilisez une virgule et/ou un espace pour identifier "
|
||||
"plusieurs URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Utilisez une virgule et/ou un espace pour identifier plusieurs URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
|
||||
@@ -2,14 +2,14 @@
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# Translators:
|
||||
# Safihre <safihre@sabnzbd.org>, 2023
|
||||
# ION, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-4.6.0\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2025\n"
|
||||
"Last-Translator: ION, 2025\n"
|
||||
"Language-Team: Hebrew (https://app.transifex.com/sabnzbd/teams/111101/he/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -42,7 +42,7 @@ msgstr "לא ניתן למצוא תבניות רשת: %s, מנסה תבנית ת
|
||||
msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr "לא ניתן לקשר ל-OpenSSL, פונקציות חיבור SSL מותאמות לא יהיו בשימוש."
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -210,16 +210,12 @@ msgid ""
|
||||
"Could not connect to %s on port %s. Use the default usenet settings: port "
|
||||
"563 and SSL turned on"
|
||||
msgstr ""
|
||||
"לא ניתן להתחבר ל-%s בפורט %s. השתמש בהגדרות ברירת המחדל של usenet: פורט 563 "
|
||||
"ו-SSL מופעל"
|
||||
|
||||
#: sabnzbd/api.py
|
||||
msgid ""
|
||||
"Could not connect to %s on port %s. Use the default usenet settings: port "
|
||||
"119 and SSL turned off"
|
||||
msgstr ""
|
||||
"לא ניתן להתחבר ל-%s בפורט %s. השתמש בהגדרות ברירת המחדל של usenet: פורט 119 "
|
||||
"ו-SSL כבוי"
|
||||
|
||||
#: sabnzbd/api.py, sabnzbd/interface.py
|
||||
msgid "Server address \"%s:%s\" is not valid."
|
||||
@@ -347,11 +343,11 @@ msgstr "מכסה"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "אזהרת מגבלת מכסה (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "ההורדה התחדשה לאחר איפוס מכסה"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -415,7 +411,7 @@ msgstr ""
|
||||
#: sabnzbd/cfg.py
|
||||
msgid ""
|
||||
"The par2 application was switched, any custom par2 parameters were removed"
|
||||
msgstr "יישום par2 הוחלף, כל פרמטרי par2 מותאמים אישית הוסרו"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/config.py
|
||||
@@ -491,7 +487,7 @@ msgstr "אי־האפלה שינתה שם של %d קבצים"
|
||||
|
||||
#: sabnzbd/deobfuscate_filenames.py
|
||||
msgid "Deobfuscate renamed %d subtitle file(s)"
|
||||
msgstr "בוצע ביטול ערפול של %d קבצי כתוביות ששמם שונה"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/directunpacker.py, sabnzbd/skintext.py
|
||||
msgid "Direct Unpack"
|
||||
@@ -567,6 +563,11 @@ msgstr "כישלון באתחול %s@%s עם סיבה: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "שגיאה גורלית במורידן"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: קוד בלתי ידוע של מעמד התקבל %s עבור מאמר %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "יותר מדי חיבורים לשרת %s [%s]"
|
||||
@@ -588,6 +589,11 @@ msgstr "כניסה נכשלה עבור שרת %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "התחברות אל %s@%s נכשלה, הודעה=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "הורדה חשודה במורידן"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "מכבה"
|
||||
@@ -1211,16 +1217,6 @@ msgstr "מנסה וידוא SFV"
|
||||
msgid "left"
|
||||
msgstr "נותר"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: קוד בלתי ידוע של מעמד התקבל %s עבור מאמר %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "הורדה חשודה במורידן"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "שרת זה אינו מתיר SSL על פתחה זו"
|
||||
@@ -1239,8 +1235,6 @@ msgid ""
|
||||
" locally injected certificate (for example by firewall or virus scanner). "
|
||||
"Try setting Certificate verification to Medium."
|
||||
msgstr ""
|
||||
"לא ניתן לאמת את האישור. זה יכול להיות בעיית שרת או בגלל אישור מוזרק מקומית "
|
||||
"(לדוגמה על ידי חומת אש או סורק וירוסים). נסה להגדיר את אימות האישור לבינוני."
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Server %s uses an untrusted certificate [%s]"
|
||||
@@ -1321,7 +1315,7 @@ msgstr "כישלון בשליחת הודעת Prowl"
|
||||
#. Warning message
|
||||
#: sabnzbd/notifier.py
|
||||
msgid "Failed to send Apprise message - no URLs defined"
|
||||
msgstr "שליחת הודעת Apprise נכשלה - לא הוגדרו כתובות URL"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/notifier.py
|
||||
@@ -2393,7 +2387,7 @@ msgstr "נסה שוב"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "סמן כהושלם והסר קבצים זמניים"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2939,7 +2933,7 @@ msgstr "העבר עבודות אל הארכיון אם ההיסטוריה חור
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Delete jobs if the history and archive exceeds specified number of jobs"
|
||||
msgstr "מחק עבודות אם ההיסטוריה והארכיון עוברים את מספר העבודות שצוין"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Move jobs to the archive after specified number of days"
|
||||
@@ -2948,7 +2942,7 @@ msgstr "העבר עבודות אל הארכיון לאחר מספר מצוין
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Delete jobs from the history and archive after specified number of days"
|
||||
msgstr "מחק עבודות מההיסטוריה והארכיון לאחר מספר הימים שצוין"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Move all completed jobs to archive"
|
||||
@@ -3377,9 +3371,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "אפשר בדיקות מבוססות SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "בצע וידוא נוסף שמבוסס על קבצי SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3695,8 +3688,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"מכסה לשרת זה, נספרת מהרגע שהיא נקבעה. בבייטים, באופן אופציונלי ניתן להוסיף "
|
||||
"K,M,G.<br />נבדקת כל כמה דקות. הודעה נשלחת כאשר המכסה מוצתה."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3727,11 +3718,6 @@ msgid ""
|
||||
"used. - Disabled: no certification verification. This is not secure at all, "
|
||||
"anyone could intercept your connection. "
|
||||
msgstr ""
|
||||
"כאשר SSL מופעל: - מחמיר: אכוף אימות אישור מלא. זוהי ההגדרה המאובטחת ביותר. -"
|
||||
" בינוני: אמת שהאישור תקף ותואם לכתובת השרת, אך אפשר אישורים המוזרקים מקומית "
|
||||
"(למשל על ידי חומת אש או סורק וירוסים). - מינימלי: אמת שהאישור תקף. זה לא "
|
||||
"מאובטח, כל אישור תקף יכול לשמש. - מושבת: ללא אימות אישור. זה לא מאובטח כלל, "
|
||||
"כל אחד יכול ליירט את החיבור שלך."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Disabled"
|
||||
@@ -3743,7 +3729,7 @@ msgstr "מזערי"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Medium"
|
||||
msgstr "בינוני"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Strict"
|
||||
@@ -4195,28 +4181,18 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "אפשר התראות Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgstr ""
|
||||
"שלח הודעות ישירות לכל שירות הודעות שאתה משתמש בו.<br>לדוגמה: Slack, Discord,"
|
||||
" Telegram או כל שירות מתוך למעלה מ-100 שירותים נתמכים!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr "שלח התראות ע״י שימוש בשירות Apprise אל כמעט כל שירות התראות"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "השתמש בכתובות URL של Apprise המוגדרות כברירת מחדל"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "כתובות Apprise ברירות מחדל"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise מגדיר מידע על חיבור שירות באמצעות כתובות URL.<br>קרא את הוויקי של "
|
||||
"Apprise כדי ללמוד כיצד להגדיר את כתובת ה-URL עבור כל שירות.<br>השתמש בפסיק "
|
||||
"ו/או רווח כדי לזהות יותר מכתובת URL אחת."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "השתמש בפסיק, ברווח או בשניהם כדי לזהות יותר מכתובת אחת."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4679,8 +4655,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"האם אתה בטוח שברצונך למחוק את כל התיקיות בתיקיית ההורדות הזמנית שלך? לא ניתן"
|
||||
" לבטל פעולה זו!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -4939,7 +4913,7 @@ msgstr "התחל אשף"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "לחץ על בדיקת שרת לפני המשך"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
|
||||
@@ -42,8 +42,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"Impossibile collegarsi a OpenSSL, le funzioni di connessione SSL ottimizzate"
|
||||
" non verranno utilizzate."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -371,11 +369,11 @@ msgstr "Quota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Avviso limite quota (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download ripreso dopo il ripristino della quota"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -599,6 +597,11 @@ msgstr "Inizializzazione di %s@%s fallita con motivo: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Errore fatale nel Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Ricevuto codice di stato sconosciuto %s per l'articolo %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Troppe connessioni al server %s [%s]"
|
||||
@@ -620,6 +623,11 @@ msgstr "Accesso fallito per il server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Connessione a %s@%s fallita, messaggio=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sospetto errore nel downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Spegnimento in corso"
|
||||
@@ -1259,16 +1267,6 @@ msgstr "Tentativo di verifica SFV"
|
||||
msgid "left"
|
||||
msgstr "rimanente"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Ricevuto codice di stato sconosciuto %s per l'articolo %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sospetto errore nel downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Questo server non permette SSL su questa porta"
|
||||
@@ -2444,7 +2442,7 @@ msgstr "Riprova"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Segna come completato e rimuovi i file temporanei"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3471,9 +3469,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Abilita controlli basati su SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Esegui una verifica extra basata sui file SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3803,9 +3800,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Quota per questo server, contata dal momento in cui viene impostata. In "
|
||||
"byte, opzionalmente seguito da K,M,G.<br />Controllato ogni pochi minuti. La"
|
||||
" notifica viene inviata quando la quota è esaurita."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4310,29 +4304,18 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Abilita notifiche Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgstr ""
|
||||
"Invia notifiche direttamente a qualsiasi servizio di notifica che "
|
||||
"utilizzi.<br>Ad esempio: Slack, Discord, Telegram o qualsiasi servizio tra "
|
||||
"oltre 100 servizi supportati!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr "Invia notifiche usando Apprise a quasi tutti i servizi di notifica"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Usa URL Apprise predefiniti"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URL predefiniti di Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definisce le informazioni di connessione del servizio utilizzando "
|
||||
"URL.<br>Leggi il wiki di Apprise per sapere come definire l'URL per ogni "
|
||||
"servizio.<br>Usa una virgola e/o uno spazio per identificare più di un URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Usa una virgola e/o uno spazio per identificare più di un URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4809,8 +4792,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Sei sicuro di voler eliminare tutte le cartelle nella tua cartella di "
|
||||
"download temporanei? Questo non può essere annullato!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5071,7 +5052,7 @@ msgstr "Avvia procedura guidata"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Fai clic su Prova server prima di continuare"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
|
||||
@@ -553,6 +553,11 @@ msgstr "Feilet å starte %s@%s grunnet: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "For mange tilkoblinger til server %s [%s]"
|
||||
@@ -572,6 +577,11 @@ msgstr "Kunne ikke logge inn på server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Kontaker %s@%s feilet, feilmelding=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mistenker feil i nedlaster"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Starter avslutning av SABnzbd.."
|
||||
@@ -1196,16 +1206,6 @@ msgstr "Prøver SFV-verifisering"
|
||||
msgid "left"
|
||||
msgstr "gjenstår"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mistenker feil i nedlaster"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Denne serveren tillater ikke SSL på denne porten"
|
||||
@@ -3346,9 +3346,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Aktiver SFV-baserte sjekker"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Utfør ekstra verifisering basert på SFV filer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4155,22 +4154,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -44,8 +44,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"Kan niet koppelen aan OpenSSL, geoptimaliseerde SSL-verbindingsfuncties "
|
||||
"worden niet gebruikt."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -368,11 +366,11 @@ msgstr "Quotum"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Waarschuwing quotumlimiet (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Downloaden hervat na quotumreset"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -600,6 +598,11 @@ msgstr "Initialisatie van %s@%s mislukt, vanwege: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Onherstelbare fout in de Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Onbekende statuscode %s ontvangen voor artikel %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Te veel verbindingen met server %s [%s]"
|
||||
@@ -621,6 +624,11 @@ msgstr "Aanmelden bij server %s mislukt [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Verbinding %s@%s mislukt, bericht=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vedachte fout in downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Afsluiten"
|
||||
@@ -1262,16 +1270,6 @@ msgstr "Probeer SFV-verificatie"
|
||||
msgid "left"
|
||||
msgstr "over"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Onbekende statuscode %s ontvangen voor artikel %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vedachte fout in downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "De server staat geen SSL toe op deze poort"
|
||||
@@ -2447,7 +2445,7 @@ msgstr "Opnieuw"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Markeer als voltooid en verwijder tijdelijke bestanden"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3469,9 +3467,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Voer SFV-gebaseerde controles uit"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Doe een extra verificatie m.b.v. SFV-bestanden"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3805,9 +3802,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Quotum voor deze server, geteld vanaf het moment dat het is ingesteld. In "
|
||||
"bytes, optioneel gevolgd door K,M,G.<br />Wordt om de paar minuten "
|
||||
"gecontroleerd. Melding wordt verzonden wanneer het quotum is opgebruikt."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4312,30 +4306,19 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Apprise-meldingen activeren"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Stuur meldingen rechtstreeks naar elke meldingsservice die u "
|
||||
"gebruikt.<br>Bijvoorbeeld: Slack, Discord, Telegram of elke andere service "
|
||||
"uit meer dan 100 ondersteunde services!"
|
||||
"Stuur meldingen met behulp van Apprise naar bijna elke bestaande service."
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Gebruik standaard Apprise-URL's"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standaard Apprise-URL's"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definieert serviceverbindingsinformatie met behulp van "
|
||||
"URL's.<br>Lees de Apprise-wiki om te leren hoe u de URL voor elke service "
|
||||
"definieert.<br>Gebruik een komma en/of spatie om meer dan één URL te "
|
||||
"identificeren."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Gebruik een komma en/of spatie om meer dan één URL op te geven."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4806,8 +4789,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Weet u zeker dat u alle mappen in uw tijdelijke downloadmap wilt "
|
||||
"verwijderen? Dit kan niet ongedaan worden gemaakt!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5067,7 +5048,7 @@ msgstr "Wizard starten"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klik op Test server voordat u doorgaat"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
|
||||
@@ -554,6 +554,11 @@ msgstr "Błąd podczas inicjalizacji %s@%s: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Zbyt wiele połączeń do serwera %s [%s]"
|
||||
@@ -573,6 +578,11 @@ msgstr "Błąd logowania do serwera %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Błąd połączenia %s@%s, komunikat=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nieobsługiwany błąd w module pobierania"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Wyłączanie"
|
||||
@@ -1201,16 +1211,6 @@ msgstr "Próba weryfikacji SFV"
|
||||
msgid "left"
|
||||
msgstr "pozostało"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nieobsługiwany błąd w module pobierania"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Serwer nie obsługuje SSL na tym porcie"
|
||||
@@ -3357,9 +3357,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Włącz sprawdzanie przy użyciu SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Wykonuj dodatkową weryfikację na podstawie plików SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4167,22 +4166,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -568,6 +568,11 @@ msgstr "Falha ao iniciar %s@%s devido as seguintes razões: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Excesso de conexões ao servidor %s [%s]"
|
||||
@@ -587,6 +592,11 @@ msgstr "Falha de logon ao servidor %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "A conexão a %s@%s falhou. Mensagem=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erro suspeito no downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Encerrando"
|
||||
@@ -1210,16 +1220,6 @@ msgstr "Tentando verificação SFV"
|
||||
msgid "left"
|
||||
msgstr "restantes"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erro suspeito no downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Este servidor não permite SSL nesta porta"
|
||||
@@ -3367,9 +3367,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Habilitar verificações baseadas em SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fazer uma verificação extra baseada em arquivos SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4178,22 +4177,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -576,6 +576,11 @@ msgstr "Nu am putu inițializa %s@%s din cauza următorului motiv: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Prea multe conexiuni la serverul %s [%s]"
|
||||
@@ -595,6 +600,11 @@ msgstr "Autentificare nereuşită la serverul %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Conectare %s@%s eșuată, mesaj=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Eroare suspectă în sistemul de descprcare"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Închidere"
|
||||
@@ -1226,16 +1236,6 @@ msgstr "Încerc verificare SFV"
|
||||
msgid "left"
|
||||
msgstr "rămas"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Eroare suspectă în sistemul de descprcare"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Acest server nu permite SSL pe acest port"
|
||||
@@ -3385,9 +3385,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Activează verficări SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fă o verificare extra bazată pe fişiere SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4199,22 +4198,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -552,6 +552,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr ""
|
||||
@@ -571,6 +576,11 @@ msgstr "Ошибка входа на сервер %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Завершение работы"
|
||||
@@ -1196,16 +1206,6 @@ msgstr "Проверка SFV-суммы"
|
||||
msgid "left"
|
||||
msgstr "осталось"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr ""
|
||||
@@ -3349,9 +3349,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Использовать проверку по SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Выполнять дополнительную проверку по SFV-файлам."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4163,22 +4162,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -550,6 +550,11 @@ msgstr "Neuspešna inicijalizacija %s@%s iz razloga: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Previše konekcija ka serveru %s [%s]"
|
||||
@@ -569,6 +574,11 @@ msgstr "Неуспешно пријављивање на сервер %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Povezivanje na %s@%s neuspešno, poruka=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sumnja u grešku u programu za download"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Гашење"
|
||||
@@ -1191,16 +1201,6 @@ msgstr "Pokušaj SFV provere"
|
||||
msgid "left"
|
||||
msgstr "остало"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sumnja u grešku u programu za download"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Ovaj server ne dozvoljava SSL na ovom portu"
|
||||
@@ -3335,9 +3335,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Упали SFV провере"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Уради још једну проверу базирану на SFV датотеке."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4141,22 +4140,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -550,6 +550,11 @@ msgstr "Misslyckades att initiera %s@%s med orsak %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "För många anslutningar till servern %s [%s]"
|
||||
@@ -569,6 +574,11 @@ msgstr "Det gick inte att logga in på server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Anslutning %s@%s misslyckades, meddelande=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Misstänker fel i nedladdare"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Påbörjar nedstängning av SABnzbd.."
|
||||
@@ -1195,16 +1205,6 @@ msgstr "Försöker verifiera SFV"
|
||||
msgid "left"
|
||||
msgstr "kvar"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Misstänker fel i nedladdare"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Den här servern tillåter in SSL på denna port"
|
||||
@@ -3345,9 +3345,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Använd SFV-baserade kontroller"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Gör en extra kontroll med SFV filer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4154,22 +4153,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
|
||||
@@ -4,13 +4,12 @@
|
||||
# Translators:
|
||||
# Taylan Tatlı, 2025
|
||||
# mauron, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-4.6.0\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2025\n"
|
||||
"Last-Translator: mauron, 2025\n"
|
||||
"Language-Team: Turkish (https://app.transifex.com/sabnzbd/teams/111101/tr/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -597,6 +596,11 @@ msgstr "%s@%s başlatması şu sebepten dolayı başarısız oldu: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "İndirici'de ölümcül hata"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: bilinmeyen durum kodu %s, şu makale için alındı: %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "%s [%s] sunucusuna çok fazla bağlantı"
|
||||
@@ -618,6 +622,11 @@ msgstr "%s [%s] sunucusunda oturum açılışı başarısız oldu"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "%s@%s bağlantısı başarısız oldu, mesaj=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "İndiricide şüpheli hata"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Kapatılıyor"
|
||||
@@ -1250,16 +1259,6 @@ msgstr "SFV doğrulaması deneniyor"
|
||||
msgid "left"
|
||||
msgstr "kaldı"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: bilinmeyen durum kodu %s, şu makale için alındı: %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "İndiricide şüpheli hata"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Bu sunucu, bu bağlantı noktasında SSL kullanımına izin vermiyor"
|
||||
@@ -3458,9 +3457,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV temelli kontrolleri etkinleştir"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "SFV dosyalarına dayalı ilave bir doğrulama yap."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -4297,29 +4295,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Apprise bildirimlerini etkinleştir"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Bildirimleri kullandığınız herhangi bir bildirim hizmetine doğrudan "
|
||||
"gönderin.<br>Örneğin: Slack, Discord, Telegram veya 100'den fazla "
|
||||
"desteklenen hizmetten herhangi biri!"
|
||||
"Apprise kullanarak neredeyse tüm bildirim hizmetlerine bildirim gönderin"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Varsayılan Apprise URL'lerini kullan"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Varsayılan Apprise URL'leri"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise, hizmet bağlantı bilgilerini URL'ler kullanarak tanımlar.<br>Her "
|
||||
"hizmet için URL'nin nasıl tanımlanacağını öğrenmek için Apprise wiki'sini "
|
||||
"okuyun.<br>Birden fazla URL tanımlamak için virgül ve/veya boşluk kullanın."
|
||||
"Birden fazla URL (adres) tanımlamak için virgül ve/veya boşluk kullanın."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
|
||||
417
po/main/zh_CN.po
417
po/main/zh_CN.po
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,13 @@
|
||||
# Main requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
apprise==1.9.6
|
||||
sabctools==9.1.0
|
||||
CT3==3.4.0.post5
|
||||
apprise==1.9.5
|
||||
sabctools==8.2.6
|
||||
CT3==3.4.0
|
||||
cffi==2.0.0
|
||||
pycparser==2.23
|
||||
feedparser==6.0.12
|
||||
configobj==5.0.9
|
||||
cheroot==11.1.2
|
||||
cheroot==11.0.0
|
||||
six==1.17.0
|
||||
cherrypy==18.10.0
|
||||
jaraco.functools==4.3.0
|
||||
@@ -37,7 +37,7 @@ cryptography==46.0.3
|
||||
# We recommend using "orjson" as it is 2x as fast as "ujson". However, it requires
|
||||
# Rust so SABnzbd works just as well with "ujson" or the Python built in "json" module
|
||||
ujson==5.11.0
|
||||
orjson==3.11.5
|
||||
orjson==3.11.3
|
||||
|
||||
# Windows system integration
|
||||
pywin32==311; sys_platform == 'win32'
|
||||
@@ -50,8 +50,8 @@ winrt-Windows.UI.Notifications==3.2.1; sys_platform == 'win32'
|
||||
typing_extensions==4.15.0; sys_platform == 'win32'
|
||||
|
||||
# macOS system calls
|
||||
pyobjc-core==12.1; sys_platform == 'darwin'
|
||||
pyobjc-framework-Cocoa==12.1; sys_platform == 'darwin'
|
||||
pyobjc-core==12.0; sys_platform == 'darwin'
|
||||
pyobjc-framework-Cocoa==12.0; sys_platform == 'darwin'
|
||||
|
||||
# Linux notifications
|
||||
notify2==0.3.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
@@ -60,15 +60,14 @@ notify2==0.3.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
requests==2.32.5
|
||||
requests-oauthlib==2.0.0
|
||||
PyYAML==6.0.3
|
||||
markdown # Version-less for Python 3.9 and below
|
||||
markdown==3.10; python_version > '3.9'
|
||||
markdown==3.9
|
||||
paho-mqtt==1.6.1 # Pinned, newer versions don't work with AppRise yet
|
||||
|
||||
# Requests Requirements
|
||||
charset_normalizer==3.4.4
|
||||
idna==3.11
|
||||
urllib3==2.6.0
|
||||
certifi==2025.11.12
|
||||
urllib3==2.5.0
|
||||
certifi==2025.10.5
|
||||
oauthlib==3.3.1
|
||||
PyJWT==2.10.1
|
||||
blinker==1.9.0
|
||||
|
||||
@@ -269,7 +269,6 @@ def initialize(pause_downloader=False, clean_up=False, repair=0):
|
||||
cfg.language.callback(cfg.guard_language)
|
||||
cfg.enable_https_verification.callback(cfg.guard_https_ver)
|
||||
cfg.guard_https_ver()
|
||||
cfg.pipelining_requests.callback(cfg.guard_restart)
|
||||
|
||||
# Set language files
|
||||
lang.set_locale_info("SABnzbd", DIR_LANGUAGE)
|
||||
|
||||
254
sabnzbd/api.py
254
sabnzbd/api.py
@@ -28,9 +28,7 @@ import time
|
||||
import getpass
|
||||
import cherrypy
|
||||
from threading import Thread
|
||||
from typing import Optional, Any, Union
|
||||
|
||||
import sabctools
|
||||
from typing import Tuple, Optional, List, Dict, Any, Union
|
||||
|
||||
# For json.dumps, orjson is magnitudes faster than ujson, but it is harder to
|
||||
# compile due to Rust dependency. Since the output is the same, we support all modules.
|
||||
@@ -105,7 +103,7 @@ _MSG_NO_SUCH_CONFIG = "Config item does not exist"
|
||||
_MSG_CONFIG_LOCKED = "Configuration locked"
|
||||
|
||||
|
||||
def api_handler(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def api_handler(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API Dispatcher"""
|
||||
# Clean-up the arguments
|
||||
for vr in ("mode", "name", "value", "value2", "value3", "start", "limit", "search"):
|
||||
@@ -119,13 +117,13 @@ def api_handler(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return response
|
||||
|
||||
|
||||
def _api_get_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
_, data = config.get_dconfig(kwargs.get("section"), kwargs.get("keyword"))
|
||||
return report(keyword="config", data=data)
|
||||
|
||||
|
||||
def _api_set_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_set_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -146,7 +144,7 @@ def _api_set_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(keyword="config", data=data)
|
||||
|
||||
|
||||
def _api_set_config_default(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_set_config_default(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Reset requested config variables back to defaults. Currently only for misc-section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -161,7 +159,7 @@ def _api_set_config_default(name: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report()
|
||||
|
||||
|
||||
def _api_del_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_del_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -171,13 +169,13 @@ def _api_del_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_queue(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=queue"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_queue_table.get(name, (_api_queue_default, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_queue_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_delete(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value"""
|
||||
if value.lower() == "all":
|
||||
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
|
||||
@@ -190,7 +188,7 @@ def _api_queue_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_queue_delete_nzf(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_delete_nzf(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=nzf_ids)"""
|
||||
nzf_ids = clean_comma_separated_list(kwargs.get("value2"))
|
||||
if value and nzf_ids:
|
||||
@@ -200,7 +198,7 @@ def _api_queue_delete_nzf(value: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_rename(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_rename(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=old name), value2(=new name), value3(=password)"""
|
||||
value2 = kwargs.get("value2")
|
||||
value3 = kwargs.get("value3")
|
||||
@@ -211,18 +209,18 @@ def _api_queue_rename(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_change_complete_action(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_change_complete_action(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=action)"""
|
||||
change_queue_complete_action(value)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_queue_purge(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_purge(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
|
||||
return report(keyword="", data={"status": bool(removed), "nzo_ids": removed})
|
||||
|
||||
|
||||
def _api_queue_pause(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_pause(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=list of nzo_id)"""
|
||||
if items := clean_comma_separated_list(value):
|
||||
handled = sabnzbd.NzbQueue.pause_multiple_nzo(items)
|
||||
@@ -231,7 +229,7 @@ def _api_queue_pause(value: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(keyword="", data={"status": bool(handled), "nzo_ids": handled})
|
||||
|
||||
|
||||
def _api_queue_resume(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_resume(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=list of nzo_id)"""
|
||||
if items := clean_comma_separated_list(value):
|
||||
handled = sabnzbd.NzbQueue.resume_multiple_nzo(items)
|
||||
@@ -240,7 +238,7 @@ def _api_queue_resume(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(keyword="", data={"status": bool(handled), "nzo_ids": handled})
|
||||
|
||||
|
||||
def _api_queue_priority(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_priority(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=priority)"""
|
||||
nzo_ids = clean_comma_separated_list(value)
|
||||
priority = kwargs.get("value2")
|
||||
@@ -259,7 +257,7 @@ def _api_queue_priority(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_sort(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_sort(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts sort, dir"""
|
||||
sort = kwargs.get("sort", "")
|
||||
direction = kwargs.get("dir", "")
|
||||
@@ -270,7 +268,7 @@ def _api_queue_sort(value: str, kwargs: dict[str, Union[str, list[str]]]) -> byt
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_default(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts sort, dir, start, limit and search terms"""
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
@@ -298,12 +296,12 @@ def _api_queue_default(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
)
|
||||
|
||||
|
||||
def _api_translate(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_translate(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=acronym)"""
|
||||
return report(keyword="value", data=T(kwargs.get("value", "")))
|
||||
|
||||
|
||||
def _api_addfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addfile(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, pp, script, cat, priority, nzbname"""
|
||||
# Normal upload will send the nzb in a kw arg called name or nzbfile
|
||||
if not name or isinstance(name, str):
|
||||
@@ -324,7 +322,7 @@ def _api_addfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_retry(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_retry(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, value(=nzo_id), nzbfile(=optional NZB), password (optional)"""
|
||||
value = kwargs.get("value")
|
||||
# Normal upload will send the nzb in a kw arg called nzbfile
|
||||
@@ -339,7 +337,7 @@ def _api_retry(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_cancel_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_cancel_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, value(=nzo_ids)"""
|
||||
if nzo_ids := clean_comma_separated_list(kwargs.get("value")):
|
||||
if sabnzbd.PostProcessor.cancel_pp(nzo_ids):
|
||||
@@ -347,7 +345,7 @@ def _api_cancel_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_addlocalfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addlocalfile(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, pp, script, cat, priority, nzbname"""
|
||||
if name:
|
||||
if os.path.exists(name):
|
||||
@@ -374,7 +372,7 @@ def _api_addlocalfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_switch(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_switch(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=first id), value2(=second id)"""
|
||||
value = kwargs.get("value")
|
||||
value2 = kwargs.get("value2")
|
||||
@@ -386,7 +384,7 @@ def _api_switch(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_change_cat(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_cat(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=category)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
cat = kwargs.get("value2")
|
||||
@@ -399,7 +397,7 @@ def _api_change_cat(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_change_script(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_script(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=script)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
script = kwargs.get("value2")
|
||||
@@ -412,7 +410,7 @@ def _api_change_script(name: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_change_opts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_opts(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=pp)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
pp = kwargs.get("value2")
|
||||
@@ -422,7 +420,7 @@ def _api_change_opts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byt
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_fullstatus(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_fullstatus(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: full history status"""
|
||||
status = build_status(
|
||||
calculate_performance=bool_conv(kwargs.get("calculate_performance")),
|
||||
@@ -431,19 +429,19 @@ def _api_fullstatus(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(keyword="status", data=status)
|
||||
|
||||
|
||||
def _api_status(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_status(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=status, passing on the value"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_status_table.get(name, (_api_fullstatus, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_unblock_server(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_unblock_server(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Unblock a blocked server"""
|
||||
sabnzbd.Downloader.unblock(value)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_delete_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_delete_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Remove orphaned job"""
|
||||
if value:
|
||||
path = os.path.join(cfg.download_dir.get_path(), value)
|
||||
@@ -454,7 +452,7 @@ def _api_delete_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_delete_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_delete_all_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Remove all orphaned jobs"""
|
||||
paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
|
||||
for path in paths:
|
||||
@@ -462,7 +460,7 @@ def _api_delete_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report()
|
||||
|
||||
|
||||
def _api_add_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]):
|
||||
def _api_add_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]):
|
||||
"""Add orphaned job"""
|
||||
if value:
|
||||
path = os.path.join(cfg.download_dir.get_path(), value)
|
||||
@@ -473,7 +471,7 @@ def _api_add_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]):
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_add_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_add_all_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Add all orphaned jobs"""
|
||||
paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
|
||||
for path in paths:
|
||||
@@ -481,13 +479,13 @@ def _api_add_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report()
|
||||
|
||||
|
||||
def _api_history(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=history"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_history_table.get(name, (_api_history_default, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_history_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_delete(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id or special), search, archive, del_files"""
|
||||
search = kwargs.get("search")
|
||||
archive = True
|
||||
@@ -533,7 +531,7 @@ def _api_history_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_history_mark_as_completed(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_mark_as_completed(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id)"""
|
||||
if value:
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
@@ -552,7 +550,7 @@ def _api_history_mark_as_completed(value: str, kwargs: dict[str, Union[str, list
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_history_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_default(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts start, limit, search, failed_only, archive, cat, status, nzo_ids"""
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
@@ -597,7 +595,7 @@ def _api_history_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -
|
||||
return report(keyword="history", data=history)
|
||||
|
||||
|
||||
def _api_get_files(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_files(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id)"""
|
||||
value = kwargs.get("value")
|
||||
if value:
|
||||
@@ -606,7 +604,7 @@ def _api_get_files(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_move_nzf_bulk(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_move_nzf_bulk(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name(=top/up/down/bottom), value=(=nzo_id), nzf_ids, size (optional)"""
|
||||
nzo_id = kwargs.get("value")
|
||||
nzf_ids = clean_comma_separated_list(kwargs.get("nzf_ids"))
|
||||
@@ -632,7 +630,7 @@ def _api_move_nzf_bulk(name: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_addurl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addurl(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, output, pp, script, cat, priority, nzbname"""
|
||||
pp = kwargs.get("pp")
|
||||
script = kwargs.get("script")
|
||||
@@ -650,24 +648,24 @@ def _api_addurl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_pause(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_pause(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Scheduler.plan_resume(0)
|
||||
sabnzbd.Downloader.pause()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_resume(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_resume(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Scheduler.plan_resume(0)
|
||||
sabnzbd.downloader.unpause_all()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_shutdown(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_shutdown(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.shutdown_program()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_warnings(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_warnings(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, output"""
|
||||
if name == "clear":
|
||||
return report(keyword="warnings", data=sabnzbd.GUIHANDLER.clear())
|
||||
@@ -687,7 +685,7 @@ LOG_INI_HIDE_RE = re.compile(
|
||||
LOG_HASH_RE = re.compile(rb"([a-zA-Z\d]{25})", re.I)
|
||||
|
||||
|
||||
def _api_showlog(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_showlog(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Fetch the INI and the log-data and add a message at the top"""
|
||||
log_data = b"--------------------------------\n\n"
|
||||
log_data += b"The log includes a copy of your sabnzbd.ini with\nall usernames, passwords and API-keys removed."
|
||||
@@ -720,19 +718,19 @@ def _api_showlog(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return log_data
|
||||
|
||||
|
||||
def _api_get_cats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_cats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="categories", data=list_cats(False))
|
||||
|
||||
|
||||
def _api_get_scripts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_scripts(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="scripts", data=list_scripts())
|
||||
|
||||
|
||||
def _api_version(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_version(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="version", data=sabnzbd.__version__)
|
||||
|
||||
|
||||
def _api_auth(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_auth(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
key = kwargs.get("key", "")
|
||||
if not key:
|
||||
auth = "apikey"
|
||||
@@ -745,14 +743,14 @@ def _api_auth(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(keyword="auth", data=auth)
|
||||
|
||||
|
||||
def _api_restart(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_restart(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
logging.info("Restart requested by API")
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_restart_repair(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_restart_repair(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
logging.info("Queue repair requested by API")
|
||||
request_repair()
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
@@ -760,12 +758,12 @@ def _api_restart_repair(name: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report()
|
||||
|
||||
|
||||
def _api_disconnect(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_disconnect(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Downloader.disconnect()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_eval_sort(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_eval_sort(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: evaluate sorting expression"""
|
||||
sort_string = kwargs.get("sort_string", "")
|
||||
job_name = kwargs.get("job_name", "")
|
||||
@@ -777,28 +775,28 @@ def _api_eval_sort(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(keyword="result", data=path)
|
||||
|
||||
|
||||
def _api_watched_now(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_watched_now(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.DirScanner.scan()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_resume_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.resume()
|
||||
def _api_resume_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.paused = False
|
||||
return report()
|
||||
|
||||
|
||||
def _api_pause_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.pause()
|
||||
def _api_pause_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.paused = True
|
||||
return report()
|
||||
|
||||
|
||||
def _api_rss_now(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_rss_now(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
# Run RSS scan async, because it can take a long time
|
||||
sabnzbd.Scheduler.force_rss()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_retry_all(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_retry_all(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Retry all failed items in History"""
|
||||
items = sabnzbd.api.build_history()[0]
|
||||
nzo_ids = []
|
||||
@@ -808,13 +806,13 @@ def _api_retry_all(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(keyword="status", data=nzo_ids)
|
||||
|
||||
|
||||
def _api_reset_quota(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_reset_quota(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Reset quota left"""
|
||||
sabnzbd.BPSMeter.reset_quota(force=True)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_test_email(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_email(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test email, return result"""
|
||||
logging.info("Sending test email")
|
||||
pack = {"download": ["action 1", "action 2"], "unpack": ["action 1", "action 2"]}
|
||||
@@ -836,67 +834,67 @@ def _api_test_email(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_windows(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_windows(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test to Windows, return result"""
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_windows("SABnzbd", T("Test Notification"), "other")
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_notif(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_notif(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test to Notification Center, return result"""
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_notification_center("SABnzbd", T("Test Notification"), "other")
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_osd(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_osd(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test OSD notification, return result"""
|
||||
logging.info("Sending OSD notification")
|
||||
res = sabnzbd.notifier.send_notify_osd("SABnzbd", T("Test Notification"))
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_prowl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_prowl(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Prowl notification, return result"""
|
||||
logging.info("Sending Prowl notification")
|
||||
res = sabnzbd.notifier.send_prowl("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_pushover(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_pushover(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Pushover notification, return result"""
|
||||
logging.info("Sending Pushover notification")
|
||||
res = sabnzbd.notifier.send_pushover("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_pushbullet(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_pushbullet(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Pushbullet notification, return result"""
|
||||
logging.info("Sending Pushbullet notification")
|
||||
res = sabnzbd.notifier.send_pushbullet("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_apprise(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_apprise(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Apprise notification, return result"""
|
||||
logging.info("Sending Apprise notification")
|
||||
res = sabnzbd.notifier.send_apprise("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_nscript(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_nscript(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: execute a test notification script, return result"""
|
||||
logging.info("Executing notification script")
|
||||
res = sabnzbd.notifier.send_nscript("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_undefined(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_undefined(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_browse(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_browse(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Return tree of local path"""
|
||||
compact = bool_conv(kwargs.get("compact"))
|
||||
show_files = bool_conv(kwargs.get("show_files"))
|
||||
@@ -913,14 +911,14 @@ def _api_browse(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(keyword="paths", data=paths)
|
||||
|
||||
|
||||
def _api_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for "config" """
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
return _api_config_table.get(name, (_api_config_undefined, 2))[0](kwargs)
|
||||
|
||||
|
||||
def _api_config_speedlimit(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_speedlimit(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=speed)"""
|
||||
value = kwargs.get("value")
|
||||
if not value:
|
||||
@@ -929,26 +927,26 @@ def _api_config_speedlimit(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_set_pause(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_pause(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=pause interval)"""
|
||||
value = kwargs.get("value")
|
||||
sabnzbd.Scheduler.plan_resume(int_conv(value))
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_set_apikey(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_apikey(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
cfg.api_key.set(config.create_api_key())
|
||||
config.save_config()
|
||||
return report(keyword="apikey", data=cfg.api_key())
|
||||
|
||||
|
||||
def _api_config_set_nzbkey(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_nzbkey(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
cfg.nzb_key.set(config.create_api_key())
|
||||
config.save_config()
|
||||
return report(keyword="nzbkey", data=cfg.nzb_key())
|
||||
|
||||
|
||||
def _api_config_regenerate_certs(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_regenerate_certs(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
# Make sure we only over-write default locations
|
||||
result = False
|
||||
if (
|
||||
@@ -962,27 +960,27 @@ def _api_config_regenerate_certs(kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(data=result)
|
||||
|
||||
|
||||
def _api_config_test_server(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_test_server(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts server-params"""
|
||||
result, msg = test_nntp_server_dict(kwargs)
|
||||
return report(data={"result": result, "message": msg})
|
||||
|
||||
|
||||
def _api_config_create_backup(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_create_backup(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
backup_file = config.create_config_backup()
|
||||
return report(data={"result": bool(backup_file), "message": backup_file})
|
||||
|
||||
|
||||
def _api_config_purge_log_files(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_purge_log_files(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
purge_log_files()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_undefined(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_undefined(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_server_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_server_stats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sum_t, sum_m, sum_w, sum_d = sabnzbd.BPSMeter.get_sums()
|
||||
stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}}
|
||||
|
||||
@@ -1001,7 +999,7 @@ def _api_server_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(keyword="", data=stats)
|
||||
|
||||
|
||||
def _api_gc_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_gc_stats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Function only intended for internal testing of the memory handling"""
|
||||
# Collect before we check
|
||||
gc.collect()
|
||||
@@ -1212,7 +1210,7 @@ class XmlOutputFactory:
|
||||
return text
|
||||
|
||||
|
||||
def handle_server_api(kwargs: dict[str, Union[str, list[str]]]) -> str:
|
||||
def handle_server_api(kwargs: Dict[str, Union[str, List[str]]]) -> str:
|
||||
"""Special handler for API-call 'set_config' [servers]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1230,7 +1228,7 @@ def handle_server_api(kwargs: dict[str, Union[str, list[str]]]) -> str:
|
||||
return name
|
||||
|
||||
|
||||
def handle_sorter_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_sorter_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [sorters]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1246,7 +1244,7 @@ def handle_sorter_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]
|
||||
return name
|
||||
|
||||
|
||||
def handle_rss_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_rss_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [rss]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1280,7 +1278,7 @@ def handle_rss_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
return name
|
||||
|
||||
|
||||
def handle_cat_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_cat_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [categories]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1297,7 +1295,7 @@ def handle_cat_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
return name
|
||||
|
||||
|
||||
def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[bool, str]:
|
||||
def test_nntp_server_dict(kwargs: Dict[str, Union[str, List[str]]]) -> Tuple[bool, str]:
|
||||
"""Will connect (blocking) to the NNTP server and report back any errors"""
|
||||
host = kwargs.get("host", "").strip()
|
||||
port = int_conv(kwargs.get("port", 0))
|
||||
@@ -1389,20 +1387,12 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
# Sorry, no clever analysis:
|
||||
return False, T('Server address "%s:%s" is not valid.') % (host, port)
|
||||
|
||||
nw = NewsWrapper(server=test_server, thrdnum=-1, block=True)
|
||||
nntp_code: int = 0
|
||||
nntp_message: str = ""
|
||||
|
||||
def on_response(code: int, message: str):
|
||||
nonlocal nntp_code, nntp_message
|
||||
nntp_code = code
|
||||
nntp_message = message
|
||||
|
||||
try:
|
||||
nw = NewsWrapper(server=test_server, thrdnum=-1, block=True)
|
||||
nw.init_connect()
|
||||
while not nw.connected:
|
||||
nw.write()
|
||||
nw.read(on_response=on_response)
|
||||
nw.recv_chunk()
|
||||
nw.finish_connect(nw.status_code)
|
||||
|
||||
except socket.timeout:
|
||||
if port != 119 and not ssl:
|
||||
@@ -1424,37 +1414,37 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
return False, str(err)
|
||||
|
||||
if not username or not password:
|
||||
nw.queue_command(b"ARTICLE <test@home>\r\n")
|
||||
nw.nntp.sock.sendall(b"ARTICLE <test@home>\r\n")
|
||||
try:
|
||||
nw.write()
|
||||
nw.read(on_response=on_response)
|
||||
nw.reset_data_buffer()
|
||||
nw.recv_chunk()
|
||||
except Exception as err:
|
||||
# Some internal error, not always safe to close connection
|
||||
return False, str(err)
|
||||
|
||||
# Parse result
|
||||
return_status = ()
|
||||
if nntp_code:
|
||||
if nntp_code == 480:
|
||||
if nw.status_code:
|
||||
if nw.status_code == 480:
|
||||
return_status = (False, T("Server requires username and password."))
|
||||
elif nntp_code < 300 or nntp_code in (411, 423, 430):
|
||||
elif nw.status_code < 300 or nw.status_code in (411, 423, 430):
|
||||
# If no username/password set and we requested fake-article, it will return 430 Not Found
|
||||
return_status = (True, T("Connection Successful!"))
|
||||
elif nntp_code == 502 or sabnzbd.downloader.clues_login(nntp_message):
|
||||
elif nw.status_code == 502 or sabnzbd.downloader.clues_login(nw.nntp_msg):
|
||||
return_status = (False, T("Authentication failed, check username/password."))
|
||||
elif sabnzbd.downloader.clues_too_many(nntp_message):
|
||||
elif sabnzbd.downloader.clues_too_many(nw.nntp_msg):
|
||||
return_status = (False, T("Too many connections, please pause downloading or try again later"))
|
||||
|
||||
# Fallback in case no data was received or unknown status
|
||||
if not return_status:
|
||||
return_status = (False, T("Could not determine connection result (%s)") % nntp_message)
|
||||
return_status = (False, T("Could not determine connection result (%s)") % nw.nntp_msg)
|
||||
|
||||
# Close the connection and return result
|
||||
nw.hard_reset()
|
||||
return return_status
|
||||
|
||||
|
||||
def build_status(calculate_performance: bool = False, skip_dashboard: bool = False) -> dict[str, Any]:
|
||||
def build_status(calculate_performance: bool = False, skip_dashboard: bool = False) -> Dict[str, Any]:
|
||||
# build up header full of basic information
|
||||
info = build_header(trans_functions=False)
|
||||
|
||||
@@ -1512,13 +1502,13 @@ def build_status(calculate_performance: bool = False, skip_dashboard: bool = Fal
|
||||
for nw in server.busy_threads.copy():
|
||||
if nw.connected:
|
||||
activeconn += 1
|
||||
if article := nw.article:
|
||||
if nw.article:
|
||||
serverconnections.append(
|
||||
{
|
||||
"thrdnum": nw.thrdnum,
|
||||
"art_name": article.article,
|
||||
"nzf_name": article.nzf.filename,
|
||||
"nzo_name": article.nzf.nzo.final_name,
|
||||
"art_name": nw.article.article,
|
||||
"nzf_name": nw.article.nzf.filename,
|
||||
"nzo_name": nw.article.nzf.nzo.final_name,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1556,11 +1546,11 @@ def build_queue(
|
||||
start: int = 0,
|
||||
limit: int = 0,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
priorities: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> dict[str, Any]:
|
||||
categories: Optional[List[str]] = None,
|
||||
priorities: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
info = build_header(for_template=False)
|
||||
(
|
||||
queue_bytes_total,
|
||||
@@ -1669,7 +1659,7 @@ def build_queue(
|
||||
return info
|
||||
|
||||
|
||||
def fast_queue() -> tuple[bool, int, float, str]:
|
||||
def fast_queue() -> Tuple[bool, int, float, str]:
|
||||
"""Return paused, bytes_left, bpsnow, time_left"""
|
||||
bytes_left = sabnzbd.sabnzbd.NzbQueue.remaining()
|
||||
paused = sabnzbd.Downloader.paused
|
||||
@@ -1678,7 +1668,7 @@ def fast_queue() -> tuple[bool, int, float, str]:
|
||||
return paused, bytes_left, bpsnow, time_left
|
||||
|
||||
|
||||
def build_file_list(nzo_id: str) -> list[dict[str, Any]]:
|
||||
def build_file_list(nzo_id: str) -> List[Dict[str, Any]]:
|
||||
"""Build file lists for specified job"""
|
||||
jobs = []
|
||||
nzo = sabnzbd.sabnzbd.NzbQueue.get_nzo(nzo_id)
|
||||
@@ -1752,7 +1742,7 @@ def retry_job(
|
||||
return None
|
||||
|
||||
|
||||
def del_job_files(job_paths: list[str]):
|
||||
def del_job_files(job_paths: List[str]):
|
||||
"""Remove files of each path in the list"""
|
||||
for path in job_paths:
|
||||
if path and clip_path(path).lower().startswith(cfg.download_dir.get_clipped_path().lower()):
|
||||
@@ -1795,7 +1785,7 @@ def clear_trans_cache():
|
||||
sabnzbd.WEBUI_READY = True
|
||||
|
||||
|
||||
def build_header(webdir: str = "", for_template: bool = True, trans_functions: bool = True) -> dict[str, Any]:
|
||||
def build_header(webdir: str = "", for_template: bool = True, trans_functions: bool = True) -> Dict[str, Any]:
|
||||
"""Build the basic header"""
|
||||
header = {}
|
||||
|
||||
@@ -1862,10 +1852,10 @@ def build_history(
|
||||
limit: int = 1000000,
|
||||
archive: bool = False,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> tuple[list[dict[str, Any]], int, int]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Tuple[List[Dict[str, Any]], int, int]:
|
||||
"""Combine the jobs still in post-processing and the database history"""
|
||||
if not archive:
|
||||
# Grab any items that are active or queued in postproc
|
||||
@@ -1941,7 +1931,7 @@ def build_history(
|
||||
return items, postproc_queue_size, total_items
|
||||
|
||||
|
||||
def add_active_history(postproc_queue: list[NzbObject], items: list[dict[str, Any]]):
|
||||
def add_active_history(postproc_queue: List[NzbObject], items: List[Dict[str, Any]]):
|
||||
"""Get the active history queue and add it to the existing items list"""
|
||||
nzo_ids = set([nzo["nzo_id"] for nzo in items])
|
||||
|
||||
@@ -2000,7 +1990,7 @@ def calc_timeleft(bytesleft: float, bps: float) -> str:
|
||||
return format_time_left(int(bytesleft / bps))
|
||||
|
||||
|
||||
def list_cats(default: bool = True) -> list[str]:
|
||||
def list_cats(default: bool = True) -> List[str]:
|
||||
"""Return list of (ordered) categories,
|
||||
when default==False use '*' for Default category
|
||||
"""
|
||||
@@ -2029,7 +2019,7 @@ def plural_to_single(kw, def_kw=""):
|
||||
return def_kw
|
||||
|
||||
|
||||
def del_from_section(kwargs: dict[str, Union[str, list[str]]]) -> bool:
|
||||
def del_from_section(kwargs: Dict[str, Union[str, List[str]]]) -> bool:
|
||||
"""Remove keyword in section"""
|
||||
section = kwargs.get("section", "")
|
||||
if section in ("sorters", "servers", "rss", "categories"):
|
||||
|
||||
@@ -22,7 +22,7 @@ sabnzbd.articlecache - Article cache handling
|
||||
import logging
|
||||
import threading
|
||||
import struct
|
||||
from typing import Collection
|
||||
from typing import Dict, Collection
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
@@ -39,7 +39,7 @@ class ArticleCache:
|
||||
self.__cache_limit_org = 0
|
||||
self.__cache_limit = 0
|
||||
self.__cache_size = 0
|
||||
self.__article_table: dict[Article, bytes] = {} # Dict of buffered articles
|
||||
self.__article_table: Dict[Article, bytes] = {} # Dict of buffered articles
|
||||
|
||||
self.assembler_write_trigger: int = 1
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from threading import Thread
|
||||
import ctypes
|
||||
from typing import Optional
|
||||
from typing import Tuple, Optional, List
|
||||
import rarfile
|
||||
|
||||
import sabnzbd
|
||||
@@ -39,7 +39,7 @@ from sabnzbd.filesystem import (
|
||||
has_unwanted_extension,
|
||||
get_basename,
|
||||
)
|
||||
from sabnzbd.constants import Status, GIGI
|
||||
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbFile
|
||||
import sabnzbd.par2file as par2file
|
||||
@@ -48,8 +48,7 @@ import sabnzbd.par2file as par2file
|
||||
class Assembler(Thread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.max_queue_size: int = cfg.assembler_max_queue_size()
|
||||
self.queue: queue.Queue[tuple[Optional[NzbObject], Optional[NzbFile], Optional[bool]]] = queue.Queue()
|
||||
self.queue: queue.Queue[Tuple[Optional[NzbObject], Optional[NzbFile], Optional[bool]]] = queue.Queue()
|
||||
|
||||
def stop(self):
|
||||
self.queue.put((None, None, None))
|
||||
@@ -58,7 +57,7 @@ class Assembler(Thread):
|
||||
self.queue.put((nzo, nzf, file_done))
|
||||
|
||||
def queue_level(self) -> float:
|
||||
return self.queue.qsize() / self.max_queue_size
|
||||
return self.queue.qsize() / MAX_ASSEMBLER_QUEUE
|
||||
|
||||
def run(self):
|
||||
while 1:
|
||||
@@ -250,7 +249,7 @@ RE_SUBS = re.compile(r"\W+sub|subs|subpack|subtitle|subtitles(?![a-z])", re.I)
|
||||
SAFE_EXTS = (".mkv", ".mp4", ".avi", ".wmv", ".mpg", ".webm")
|
||||
|
||||
|
||||
def is_cloaked(nzo: NzbObject, path: str, names: list[str]) -> bool:
|
||||
def is_cloaked(nzo: NzbObject, path: str, names: List[str]) -> bool:
|
||||
"""Return True if this is likely to be a cloaked encrypted post"""
|
||||
fname = get_basename(get_filename(path.lower()))
|
||||
for name in names:
|
||||
@@ -279,7 +278,7 @@ def is_cloaked(nzo: NzbObject, path: str, names: list[str]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> tuple[bool, Optional[str]]:
|
||||
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Combines check for unwanted and encrypted files to save on CPU and IO"""
|
||||
encrypted = False
|
||||
unwanted = None
|
||||
|
||||
@@ -22,7 +22,7 @@ sabnzbd.bpsmeter - bpsmeter
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import BYTES_FILE_NAME, KIBI
|
||||
@@ -132,20 +132,20 @@ class BPSMeter:
|
||||
self.speed_log_time = t
|
||||
self.last_update = t
|
||||
self.bps = 0.0
|
||||
self.bps_list: list[int] = []
|
||||
self.bps_list: List[int] = []
|
||||
|
||||
self.server_bps: dict[str, float] = {}
|
||||
self.cached_amount: dict[str, int] = {}
|
||||
self.server_bps: Dict[str, float] = {}
|
||||
self.cached_amount: Dict[str, int] = {}
|
||||
self.sum_cached_amount: int = 0
|
||||
self.day_total: dict[str, int] = {}
|
||||
self.week_total: dict[str, int] = {}
|
||||
self.month_total: dict[str, int] = {}
|
||||
self.grand_total: dict[str, int] = {}
|
||||
self.day_total: Dict[str, int] = {}
|
||||
self.week_total: Dict[str, int] = {}
|
||||
self.month_total: Dict[str, int] = {}
|
||||
self.grand_total: Dict[str, int] = {}
|
||||
|
||||
self.timeline_total: dict[str, dict[str, int]] = {}
|
||||
self.timeline_total: Dict[str, Dict[str, int]] = {}
|
||||
|
||||
self.article_stats_tried: dict[str, dict[str, int]] = {}
|
||||
self.article_stats_failed: dict[str, dict[str, int]] = {}
|
||||
self.article_stats_tried: Dict[str, Dict[str, int]] = {}
|
||||
self.article_stats_failed: Dict[str, Dict[str, int]] = {}
|
||||
|
||||
self.delayed_assembler: int = 0
|
||||
|
||||
@@ -254,6 +254,8 @@ class BPSMeter:
|
||||
self.week_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.grand_total:
|
||||
self.grand_total[server] = 0
|
||||
if server not in self.timeline_total:
|
||||
@@ -300,51 +302,45 @@ class BPSMeter:
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
self.init_server_stats(server.id)
|
||||
|
||||
# Cache dict references for faster access
|
||||
day_total = self.day_total
|
||||
week_total = self.week_total
|
||||
month_total = self.month_total
|
||||
grand_total = self.grand_total
|
||||
timeline_total = self.timeline_total
|
||||
cached_amount = self.cached_amount
|
||||
server_bps = self.server_bps
|
||||
|
||||
start_time = self.start_time
|
||||
last_update = self.last_update
|
||||
# Minimum epsilon to avoid division by zero
|
||||
dt_total = max(t - start_time, 1e-6)
|
||||
dt_last = max(last_update - start_time, 1e-6)
|
||||
|
||||
# Add amounts that have been stored temporarily to statistics
|
||||
for srv in self.cached_amount:
|
||||
if cached := self.cached_amount[srv]:
|
||||
day_total[srv] += cached
|
||||
week_total[srv] += cached
|
||||
month_total[srv] += cached
|
||||
grand_total[srv] += cached
|
||||
timeline_total[srv][self.day_label] += cached
|
||||
|
||||
# Reset for next time
|
||||
cached_amount[srv] = 0
|
||||
if self.cached_amount[srv]:
|
||||
self.day_total[srv] += self.cached_amount[srv]
|
||||
self.week_total[srv] += self.cached_amount[srv]
|
||||
self.month_total[srv] += self.cached_amount[srv]
|
||||
self.grand_total[srv] += self.cached_amount[srv]
|
||||
self.timeline_total[srv][self.day_label] += self.cached_amount[srv]
|
||||
|
||||
# Update server bps
|
||||
server_bps[srv] = (server_bps[srv] * dt_last + cached) / dt_total
|
||||
try:
|
||||
self.server_bps[srv] = (
|
||||
self.server_bps[srv] * (self.last_update - self.start_time) + self.cached_amount[srv]
|
||||
) / (t - self.start_time)
|
||||
except ZeroDivisionError:
|
||||
self.server_bps[srv] = 0.0
|
||||
|
||||
# Reset for next time
|
||||
self.cached_amount[srv] = 0
|
||||
|
||||
# Quota check
|
||||
total_cached = self.sum_cached_amount
|
||||
if self.have_quota and self.quota_enabled:
|
||||
self.left -= total_cached
|
||||
self.left -= self.sum_cached_amount
|
||||
self.check_quota()
|
||||
|
||||
# Speedometer
|
||||
self.bps = (self.bps * dt_last + total_cached) / dt_total
|
||||
try:
|
||||
self.bps = (self.bps * (self.last_update - self.start_time) + self.sum_cached_amount) / (
|
||||
t - self.start_time
|
||||
)
|
||||
except ZeroDivisionError:
|
||||
self.bps = 0.0
|
||||
|
||||
self.sum_cached_amount = 0
|
||||
self.last_update = t
|
||||
|
||||
check_time = t - 5.0
|
||||
|
||||
if start_time < check_time:
|
||||
if self.start_time < check_time:
|
||||
self.start_time = check_time
|
||||
|
||||
if self.bps < 0.01:
|
||||
@@ -386,7 +382,7 @@ class BPSMeter:
|
||||
|
||||
# Always trim the list to the max-length
|
||||
if len(self.bps_list) > BPS_LIST_MAX:
|
||||
self.bps_list = self.bps_list[-BPS_LIST_MAX:]
|
||||
self.bps_list = self.bps_list[len(self.bps_list) - BPS_LIST_MAX :]
|
||||
|
||||
def get_sums(self):
|
||||
"""return tuple of grand, month, week, day totals"""
|
||||
|
||||
@@ -25,7 +25,7 @@ import re
|
||||
import argparse
|
||||
import socket
|
||||
import ipaddress
|
||||
from typing import Union
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.config import (
|
||||
@@ -52,14 +52,12 @@ from sabnzbd.constants import (
|
||||
DEF_STD_WEB_COLOR,
|
||||
DEF_HTTPS_CERT_FILE,
|
||||
DEF_HTTPS_KEY_FILE,
|
||||
DEF_MAX_ASSEMBLER_QUEUE,
|
||||
DEF_PIPELINING_REQUESTS,
|
||||
)
|
||||
from sabnzbd.filesystem import same_directory, real_path, is_valid_script, is_network_path
|
||||
|
||||
# Validators currently only are made for string/list-of-strings
|
||||
# and return those on success or an error message.
|
||||
ValidateResult = Union[tuple[None, str], tuple[None, list[str]], tuple[str, None]]
|
||||
ValidateResult = Union[Tuple[None, str], Tuple[None, List[str]], Tuple[str, None]]
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -124,21 +122,21 @@ def supported_unrar_parameters(value: str) -> ValidateResult:
|
||||
return None, value
|
||||
|
||||
|
||||
def all_lowercase(value: Union[str, list]) -> tuple[None, Union[str, list]]:
|
||||
def all_lowercase(value: Union[str, List]) -> Tuple[None, Union[str, List]]:
|
||||
"""Lowercase and strip everything!"""
|
||||
if isinstance(value, list):
|
||||
return None, [item.lower().strip() for item in value]
|
||||
return None, value.lower().strip()
|
||||
|
||||
|
||||
def lower_case_ext(value: Union[str, list]) -> tuple[None, Union[str, list]]:
|
||||
def lower_case_ext(value: Union[str, List]) -> Tuple[None, Union[str, List]]:
|
||||
"""Generate lower case extension(s), without dot"""
|
||||
if isinstance(value, list):
|
||||
return None, [item.lower().strip(" .") for item in value]
|
||||
return None, value.lower().strip(" .")
|
||||
|
||||
|
||||
def validate_single_tag(value: list[str]) -> tuple[None, list[str]]:
|
||||
def validate_single_tag(value: List[str]) -> Tuple[None, List[str]]:
|
||||
"""Don't split single indexer tags like "TV > HD"
|
||||
into ['TV', '>', 'HD']
|
||||
"""
|
||||
@@ -148,7 +146,7 @@ def validate_single_tag(value: list[str]) -> tuple[None, list[str]]:
|
||||
return None, value
|
||||
|
||||
|
||||
def validate_url_base(value: str) -> tuple[None, str]:
|
||||
def validate_url_base(value: str) -> Tuple[None, str]:
|
||||
"""Strips the right slash and adds starting slash, if not present"""
|
||||
if value and isinstance(value, str):
|
||||
if not value.startswith("/"):
|
||||
@@ -160,7 +158,7 @@ def validate_url_base(value: str) -> tuple[None, str]:
|
||||
RE_VAL = re.compile(r"[^@ ]+@[^.@ ]+\.[^.@ ]")
|
||||
|
||||
|
||||
def validate_email(value: Union[list, str]) -> ValidateResult:
|
||||
def validate_email(value: Union[List, str]) -> ValidateResult:
|
||||
if email_endjob() or email_full() or email_rss():
|
||||
if isinstance(value, list):
|
||||
values = value
|
||||
@@ -287,7 +285,7 @@ def validate_download_vs_complete_dir(root: str, value: str, default: str):
|
||||
return validate_safedir(root, value, default)
|
||||
|
||||
|
||||
def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> tuple[None, str]:
|
||||
def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> Tuple[None, str]:
|
||||
"""Warn users to not use the Program Files folder for their scripts"""
|
||||
# Need to add separator so /mnt/sabnzbd and /mnt/sabnzbd-data are not detected as equal
|
||||
if value and same_directory(sabnzbd.DIR_PROG, os.path.join(root, value)):
|
||||
@@ -300,7 +298,7 @@ def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> tuple[
|
||||
return None, value
|
||||
|
||||
|
||||
def validate_default_if_empty(root: str, value: str, default: str) -> tuple[None, str]:
|
||||
def validate_default_if_empty(root: str, value: str, default: str) -> Tuple[None, str]:
|
||||
"""If value is empty, return default"""
|
||||
if value:
|
||||
return None, value
|
||||
@@ -507,7 +505,7 @@ no_penalties = OptionBool("misc", "no_penalties", False)
|
||||
x_frame_options = OptionBool("misc", "x_frame_options", True)
|
||||
allow_old_ssl_tls = OptionBool("misc", "allow_old_ssl_tls", False)
|
||||
enable_season_sorting = OptionBool("misc", "enable_season_sorting", True)
|
||||
verify_xff_header = OptionBool("misc", "verify_xff_header", True)
|
||||
verify_xff_header = OptionBool("misc", "verify_xff_header", False)
|
||||
|
||||
# Text values
|
||||
rss_odd_titles = OptionList("misc", "rss_odd_titles", ["nzbindex.nl/", "nzbindex.com/", "nzbclub.com/"])
|
||||
@@ -529,13 +527,11 @@ local_ranges = OptionList("misc", "local_ranges", protect=True)
|
||||
max_url_retries = OptionNumber("misc", "max_url_retries", 10, minval=1)
|
||||
downloader_sleep_time = OptionNumber("misc", "downloader_sleep_time", 10, minval=0)
|
||||
receive_threads = OptionNumber("misc", "receive_threads", 2, minval=1)
|
||||
assembler_max_queue_size = OptionNumber("misc", "assembler_max_queue_size", DEF_MAX_ASSEMBLER_QUEUE, minval=1)
|
||||
switchinterval = OptionNumber("misc", "switchinterval", 0.005, minval=0.001)
|
||||
ssdp_broadcast_interval = OptionNumber("misc", "ssdp_broadcast_interval", 15, minval=1, maxval=600)
|
||||
ext_rename_ignore = OptionList("misc", "ext_rename_ignore", validation=lower_case_ext)
|
||||
unrar_parameters = OptionStr("misc", "unrar_parameters", validation=supported_unrar_parameters)
|
||||
outgoing_nntp_ip = OptionStr("misc", "outgoing_nntp_ip")
|
||||
pipelining_requests = OptionNumber("misc", "pipelining_requests", DEF_PIPELINING_REQUESTS, minval=1, maxval=10)
|
||||
|
||||
|
||||
##############################################################################
|
||||
|
||||
@@ -28,7 +28,7 @@ import time
|
||||
import uuid
|
||||
import io
|
||||
import zipfile
|
||||
from typing import Any, Callable, Optional, Union
|
||||
from typing import List, Dict, Any, Callable, Optional, Union, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import configobj
|
||||
@@ -101,14 +101,14 @@ class Option:
|
||||
def get_string(self) -> str:
|
||||
return str(self.get())
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return value as a dictionary.
|
||||
Will not show non-public options if needed for the API"""
|
||||
if not self.__public and for_public_api:
|
||||
return {}
|
||||
return {self.__keyword: self.get()}
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set value based on dictionary"""
|
||||
if not self.__protect:
|
||||
try:
|
||||
@@ -307,7 +307,7 @@ class OptionList(Option):
|
||||
self,
|
||||
section: str,
|
||||
keyword: str,
|
||||
default_val: Union[str, list, None] = None,
|
||||
default_val: Union[str, List, None] = None,
|
||||
validation: Optional[Callable] = None,
|
||||
add: bool = True,
|
||||
public: bool = True,
|
||||
@@ -318,7 +318,7 @@ class OptionList(Option):
|
||||
default_val = []
|
||||
super().__init__(section, keyword, default_val, add=add, public=public, protect=protect)
|
||||
|
||||
def set(self, value: Union[str, list]) -> Optional[str]:
|
||||
def set(self, value: Union[str, List]) -> Optional[str]:
|
||||
"""Set the list given a comma-separated string or a list"""
|
||||
error = None
|
||||
if value is not None:
|
||||
@@ -341,7 +341,7 @@ class OptionList(Option):
|
||||
"""Return the default list as a comma-separated string"""
|
||||
return ", ".join(self.default)
|
||||
|
||||
def __call__(self) -> list[str]:
|
||||
def __call__(self) -> List[str]:
|
||||
"""get() replacement"""
|
||||
return self.get()
|
||||
|
||||
@@ -406,7 +406,7 @@ class OptionPassword(Option):
|
||||
return "*" * 10
|
||||
return ""
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, str]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, str]:
|
||||
"""Return value a dictionary"""
|
||||
if for_public_api:
|
||||
return {self.keyword: self.get_stars()}
|
||||
@@ -454,7 +454,7 @@ class ConfigServer:
|
||||
self.set_dict(values)
|
||||
add_to_database("servers", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
# Replace usage_at_start value with most recent statistics if the user changes the quota value
|
||||
# Only when we are updating it from the Config
|
||||
@@ -491,7 +491,7 @@ class ConfigServer:
|
||||
if not self.displayname():
|
||||
self.displayname.set(self.__name)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -531,7 +531,7 @@ class ConfigServer:
|
||||
class ConfigCat:
|
||||
"""Class defining a single category"""
|
||||
|
||||
def __init__(self, name: str, values: dict[str, Any]):
|
||||
def __init__(self, name: str, values: Dict[str, Any]):
|
||||
self.__name = clean_section_name(name)
|
||||
name = "categories," + self.__name
|
||||
|
||||
@@ -545,7 +545,7 @@ class ConfigCat:
|
||||
self.set_dict(values)
|
||||
add_to_database("categories", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("order", "pp", "script", "dir", "newzbin", "priority"):
|
||||
try:
|
||||
@@ -554,7 +554,7 @@ class ConfigCat:
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -589,7 +589,7 @@ class ConfigSorter:
|
||||
self.set_dict(values)
|
||||
add_to_database("sorters", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("order", "min_size", "multipart_label", "sort_string", "sort_cats", "sort_type", "is_active"):
|
||||
try:
|
||||
@@ -598,7 +598,7 @@ class ConfigSorter:
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -639,7 +639,7 @@ class OptionFilters(Option):
|
||||
return
|
||||
self.set(lst)
|
||||
|
||||
def update(self, pos: int, value: tuple):
|
||||
def update(self, pos: int, value: Tuple):
|
||||
"""Update filter 'pos' definition, value is a list
|
||||
Append if 'pos' outside list
|
||||
"""
|
||||
@@ -659,14 +659,14 @@ class OptionFilters(Option):
|
||||
return
|
||||
self.set(lst)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, str]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, str]:
|
||||
"""Return filter list as a dictionary with keys 'filter[0-9]+'"""
|
||||
output_dict = {}
|
||||
for n, rss_filter in enumerate(self.get()):
|
||||
output_dict[f"filter{n}"] = rss_filter
|
||||
return output_dict
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Create filter list from dictionary with keys 'filter[0-9]+'"""
|
||||
filters = []
|
||||
# We don't know how many filters there are, so just assume all values are filters
|
||||
@@ -677,7 +677,7 @@ class OptionFilters(Option):
|
||||
if filters:
|
||||
self.set(filters)
|
||||
|
||||
def __call__(self) -> list[list[str]]:
|
||||
def __call__(self) -> List[List[str]]:
|
||||
"""get() replacement"""
|
||||
return self.get()
|
||||
|
||||
@@ -701,7 +701,7 @@ class ConfigRSS:
|
||||
self.set_dict(values)
|
||||
add_to_database("rss", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("uri", "cat", "pp", "script", "priority", "enable"):
|
||||
try:
|
||||
@@ -711,7 +711,7 @@ class ConfigRSS:
|
||||
continue
|
||||
self.filters.set_dict(values)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -755,7 +755,7 @@ AllConfigTypes = Union[
|
||||
ConfigRSS,
|
||||
ConfigServer,
|
||||
]
|
||||
CFG_DATABASE: dict[str, dict[str, AllConfigTypes]] = {}
|
||||
CFG_DATABASE: Dict[str, Dict[str, AllConfigTypes]] = {}
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
@@ -1103,7 +1103,7 @@ def restore_config_backup(config_backup_data: bytes):
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_servers() -> dict[str, ConfigServer]:
|
||||
def get_servers() -> Dict[str, ConfigServer]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
return CFG_DATABASE["servers"]
|
||||
@@ -1112,7 +1112,7 @@ def get_servers() -> dict[str, ConfigServer]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_sorters() -> dict[str, ConfigSorter]:
|
||||
def get_sorters() -> Dict[str, ConfigSorter]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
return CFG_DATABASE["sorters"]
|
||||
@@ -1120,7 +1120,7 @@ def get_sorters() -> dict[str, ConfigSorter]:
|
||||
return {}
|
||||
|
||||
|
||||
def get_ordered_sorters() -> list[dict]:
|
||||
def get_ordered_sorters() -> List[Dict]:
|
||||
"""Return sorters as an ordered list"""
|
||||
database_sorters = get_sorters()
|
||||
|
||||
@@ -1131,7 +1131,7 @@ def get_ordered_sorters() -> list[dict]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_categories() -> dict[str, ConfigCat]:
|
||||
def get_categories() -> Dict[str, ConfigCat]:
|
||||
"""Return link to categories section.
|
||||
This section will always contain special category '*'
|
||||
"""
|
||||
@@ -1163,7 +1163,7 @@ def get_category(cat: str = "*") -> ConfigCat:
|
||||
return cats["*"]
|
||||
|
||||
|
||||
def get_ordered_categories() -> list[dict]:
|
||||
def get_ordered_categories() -> List[Dict]:
|
||||
"""Return list-copy of categories section that's ordered
|
||||
by user's ordering including Default-category
|
||||
"""
|
||||
@@ -1183,7 +1183,7 @@ def get_ordered_categories() -> list[dict]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_rss() -> dict[str, ConfigRSS]:
|
||||
def get_rss() -> Dict[str, ConfigRSS]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
# We have to remove non-separator commas by detecting if they are valid URL's
|
||||
|
||||
@@ -50,7 +50,7 @@ RENAMES_FILE = "__renames__"
|
||||
ATTRIB_FILE = "SABnzbd_attrib"
|
||||
REPAIR_REQUEST = "repair-all.sab"
|
||||
|
||||
SABCTOOLS_VERSION_REQUIRED = "9.1.0"
|
||||
SABCTOOLS_VERSION_REQUIRED = "8.2.6"
|
||||
|
||||
DB_HISTORY_VERSION = 1
|
||||
DB_HISTORY_NAME = "history%s.db" % DB_HISTORY_VERSION
|
||||
@@ -97,13 +97,12 @@ CONFIG_BACKUP_HTTPS = { # "basename": "associated setting"
|
||||
}
|
||||
|
||||
# Constants affecting download performance
|
||||
DEF_MAX_ASSEMBLER_QUEUE = 12
|
||||
SOFT_ASSEMBLER_QUEUE_LIMIT = 0.5
|
||||
MAX_ASSEMBLER_QUEUE = 12
|
||||
SOFT_QUEUE_LIMIT = 0.5
|
||||
# Percentage of cache to use before adding file to assembler
|
||||
ASSEMBLER_WRITE_THRESHOLD = 5
|
||||
NNTP_BUFFER_SIZE = int(256 * KIBI)
|
||||
NNTP_BUFFER_SIZE = int(800 * KIBI)
|
||||
NTTP_MAX_BUFFER_SIZE = int(10 * MEBI)
|
||||
DEF_PIPELINING_REQUESTS = 2
|
||||
|
||||
REPAIR_PRIORITY = 3
|
||||
FORCE_PRIORITY = 2
|
||||
|
||||
@@ -27,7 +27,7 @@ import sys
|
||||
import threading
|
||||
import sqlite3
|
||||
from sqlite3 import Connection, Cursor
|
||||
from typing import Optional, Sequence, Any
|
||||
from typing import Optional, List, Sequence, Dict, Any, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
@@ -237,7 +237,7 @@ class HistoryDB:
|
||||
self.execute("""UPDATE history SET status = ? WHERE nzo_id = ?""", (Status.COMPLETED, job))
|
||||
logging.info("[%s] Marked job %s as completed", caller_name(), job)
|
||||
|
||||
def get_failed_paths(self, search: Optional[str] = None) -> list[str]:
|
||||
def get_failed_paths(self, search: Optional[str] = None) -> List[str]:
|
||||
"""Return list of all storage paths of failed jobs (may contain non-existing or empty paths)"""
|
||||
search = convert_search(search)
|
||||
fetch_ok = self.execute(
|
||||
@@ -315,10 +315,10 @@ class HistoryDB:
|
||||
limit: Optional[int] = None,
|
||||
archive: Optional[bool] = None,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Tuple[List[Dict[str, Any]], int]:
|
||||
"""Return records for specified jobs"""
|
||||
command_args = [convert_search(search)]
|
||||
|
||||
@@ -397,7 +397,7 @@ class HistoryDB:
|
||||
total = self.cursor.fetchone()["COUNT(*)"]
|
||||
return total > 0
|
||||
|
||||
def get_history_size(self) -> tuple[int, int, int]:
|
||||
def get_history_size(self) -> Tuple[int, int, int]:
|
||||
"""Returns the total size of the history and
|
||||
amounts downloaded in the last month and week
|
||||
"""
|
||||
@@ -457,7 +457,7 @@ class HistoryDB:
|
||||
return path
|
||||
return path
|
||||
|
||||
def get_other(self, nzo_id: str) -> tuple[str, str, str, str, str]:
|
||||
def get_other(self, nzo_id: str) -> Tuple[str, str, str, str, str]:
|
||||
"""Return additional data for job `nzo_id`"""
|
||||
if self.execute("""SELECT * FROM history WHERE nzo_id = ?""", (nzo_id,)):
|
||||
try:
|
||||
@@ -554,7 +554,7 @@ def build_history_info(nzo, workdir_complete: str, postproc_time: int, script_ou
|
||||
)
|
||||
|
||||
|
||||
def unpack_history_info(item: sqlite3.Row) -> dict[str, Any]:
|
||||
def unpack_history_info(item: sqlite3.Row) -> Dict[str, Any]:
|
||||
"""Expands the single line stage_log from the DB
|
||||
into a python dictionary for use in the history display
|
||||
"""
|
||||
|
||||
@@ -21,10 +21,13 @@ sabnzbd.decoder - article decoder
|
||||
|
||||
import logging
|
||||
import hashlib
|
||||
from typing import Optional
|
||||
import binascii
|
||||
from io import BytesIO
|
||||
from zlib import crc32
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import SABCTOOLS_VERSION_REQUIRED
|
||||
from sabnzbd.encoding import ubtou
|
||||
from sabnzbd.nzbstuff import Article
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
@@ -47,7 +50,7 @@ except Exception:
|
||||
|
||||
|
||||
class BadData(Exception):
|
||||
def __init__(self, data: bytearray):
|
||||
def __init__(self, data: bytes):
|
||||
super().__init__()
|
||||
self.data = data
|
||||
|
||||
@@ -60,8 +63,8 @@ class BadUu(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
decoded_data: Optional[bytearray] = None
|
||||
def decode(article: Article, data_view: memoryview):
|
||||
decoded_data = None
|
||||
nzo = article.nzf.nzo
|
||||
art_id = article.article
|
||||
|
||||
@@ -75,10 +78,10 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
if decoder.format is sabctools.EncodingFormat.UU:
|
||||
decoded_data = decode_uu(article, decoder)
|
||||
if article.nzf.type == "uu":
|
||||
decoded_data = decode_uu(article, bytes(data_view))
|
||||
else:
|
||||
decoded_data = decode_yenc(article, decoder)
|
||||
decoded_data = decode_yenc(article, data_view)
|
||||
|
||||
article_success = True
|
||||
|
||||
@@ -109,18 +112,28 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
|
||||
except (BadYenc, ValueError):
|
||||
# Handles precheck and badly formed articles
|
||||
if nzo.precheck and decoder.status_code == 223:
|
||||
if nzo.precheck and data_view and data_view[:4] == b"223 ":
|
||||
# STAT was used, so we only get a status code
|
||||
article_success = True
|
||||
else:
|
||||
# Examine the headers (for precheck) or body (for download).
|
||||
if lines := decoder.lines:
|
||||
for line in lines:
|
||||
# Try uu-decoding
|
||||
if not nzo.precheck and article.nzf.type != "yenc":
|
||||
try:
|
||||
decoded_data = decode_uu(article, bytes(data_view))
|
||||
logging.debug("Found uu-encoded article %s in job %s", art_id, nzo.final_name)
|
||||
article_success = True
|
||||
except Exception:
|
||||
pass
|
||||
# Only bother with further checks if uu-decoding didn't work out
|
||||
if not article_success:
|
||||
# Convert the first 2000 bytes of raw socket data to article lines,
|
||||
# and examine the headers (for precheck) or body (for download).
|
||||
for line in bytes(data_view[:2000]).split(b"\r\n"):
|
||||
lline = line.lower()
|
||||
if lline.startswith("message-id:"):
|
||||
if lline.startswith(b"message-id:"):
|
||||
article_success = True
|
||||
# Look for DMCA clues (while skipping "X-" headers)
|
||||
if not lline.startswith("x-") and match_str(lline, ("dmca", "removed", "cancel", "blocked")):
|
||||
if not lline.startswith(b"x-") and match_str(lline, (b"dmca", b"removed", b"cancel", b"blocked")):
|
||||
article_success = False
|
||||
logging.info("Article removed from server (%s)", art_id)
|
||||
break
|
||||
@@ -157,63 +170,164 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
sabnzbd.NzbQueue.register_article(article, article_success)
|
||||
|
||||
|
||||
def decode_yenc(article: Article, response: sabctools.NNTPResponse) -> bytearray:
|
||||
def decode_yenc(article: Article, data_view: memoryview) -> bytearray:
|
||||
# Let SABCTools do all the heavy lifting
|
||||
decoded_data = response.data
|
||||
article.file_size = response.file_size
|
||||
article.data_begin = response.part_begin
|
||||
article.data_size = response.part_size
|
||||
(
|
||||
decoded_data,
|
||||
yenc_filename,
|
||||
article.file_size,
|
||||
article.data_begin,
|
||||
article.data_size,
|
||||
crc_correct,
|
||||
) = sabctools.yenc_decode(data_view)
|
||||
|
||||
nzf = article.nzf
|
||||
# Assume it is yenc
|
||||
nzf.type = "yenc"
|
||||
|
||||
# Only set the name if it was found and not obfuscated
|
||||
if not nzf.filename_checked and (file_name := response.file_name):
|
||||
if not nzf.filename_checked and yenc_filename:
|
||||
# Set the md5-of-16k if this is the first article
|
||||
if article.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(memoryview(decoded_data)[:16384]).digest()
|
||||
nzf.md5of16k = hashlib.md5(decoded_data[:16384]).digest()
|
||||
|
||||
# Try the rename, even if it's not the first article
|
||||
# For example when the first article was missing
|
||||
nzf.nzo.verify_nzf_filename(nzf, file_name)
|
||||
nzf.nzo.verify_nzf_filename(nzf, yenc_filename)
|
||||
|
||||
# CRC check
|
||||
if (crc := response.crc) is None:
|
||||
if crc_correct is None:
|
||||
logging.info("CRC Error in %s", article.article)
|
||||
raise BadData(decoded_data)
|
||||
|
||||
article.crc32 = crc
|
||||
article.crc32 = crc_correct
|
||||
|
||||
return decoded_data
|
||||
|
||||
|
||||
def decode_uu(article: Article, response: sabctools.NNTPResponse) -> bytearray:
|
||||
"""Process a uu-decoded response"""
|
||||
if not response.bytes_decoded:
|
||||
def decode_uu(article: Article, raw_data: bytes) -> bytes:
|
||||
"""Try to uu-decode an article. The raw_data may or may not contain headers.
|
||||
If there are headers, they will be separated from the body by at least one
|
||||
empty line. In case of no headers, the first line seems to always be the nntp
|
||||
response code (220/222) directly followed by the msg body."""
|
||||
if not raw_data:
|
||||
logging.debug("No data to decode")
|
||||
raise BadUu
|
||||
|
||||
if response.baddata:
|
||||
raise BadData(response.data)
|
||||
# Line up the raw_data
|
||||
raw_data = raw_data.split(b"\r\n")
|
||||
|
||||
decoded_data = response.data
|
||||
nzf = article.nzf
|
||||
nzf.type = "uu"
|
||||
# Index of the uu payload start in raw_data
|
||||
uu_start = 0
|
||||
|
||||
# Limit the number of lines to check for the onset of uu data
|
||||
limit = min(len(raw_data), 32) - 1
|
||||
if limit < 3:
|
||||
logging.debug("Article too short to contain valid uu-encoded data")
|
||||
raise BadUu
|
||||
|
||||
# Try to find an empty line separating the body from headers or response
|
||||
# code and set the expected payload start to the next line.
|
||||
try:
|
||||
uu_start = raw_data[:limit].index(b"") + 1
|
||||
except ValueError:
|
||||
# No empty line, look for a response code instead
|
||||
if raw_data[0].startswith(b"220 ") or raw_data[0].startswith(b"222 "):
|
||||
uu_start = 1
|
||||
else:
|
||||
# Invalid data?
|
||||
logging.debug("Failed to locate start of uu payload")
|
||||
raise BadUu
|
||||
|
||||
def is_uu_junk(line: bytes) -> bool:
|
||||
"""Determine if the line is empty or contains known junk data"""
|
||||
return (not line) or line == b"-- " or line.startswith(b"Posted via ")
|
||||
|
||||
# Check the uu 'begin' line
|
||||
if article.lowest_partnum:
|
||||
try:
|
||||
# Make sure the line after the uu_start one isn't empty as well or
|
||||
# detection of the 'begin' line won't work. For articles other than
|
||||
# lowest_partnum, filtering out empty lines (and other junk) can
|
||||
# wait until the actual decoding step.
|
||||
for index in range(uu_start, limit):
|
||||
if is_uu_junk(raw_data[index]):
|
||||
uu_start = index + 1
|
||||
else:
|
||||
# Bingo
|
||||
break
|
||||
else:
|
||||
# Search reached the limit
|
||||
raise IndexError
|
||||
|
||||
uu_begin_data = raw_data[uu_start].split(b" ")
|
||||
# Filename may contain spaces
|
||||
uu_filename = ubtou(b" ".join(uu_begin_data[2:]).strip())
|
||||
|
||||
# Sanity check the 'begin' line
|
||||
if (
|
||||
len(uu_begin_data) < 3
|
||||
or uu_begin_data[0].lower() != b"begin"
|
||||
or (not int(uu_begin_data[1], 8))
|
||||
or (not uu_filename)
|
||||
):
|
||||
raise ValueError
|
||||
|
||||
# Consider this enough proof to set the type, avoiding further
|
||||
# futile attempts at decoding articles in this nzf as yenc.
|
||||
article.nzf.type = "uu"
|
||||
|
||||
# Bump the pointer for the payload to the next line
|
||||
uu_start += 1
|
||||
except Exception:
|
||||
logging.debug("Missing or invalid uu 'begin' line: %s", raw_data[uu_start] if uu_start < limit else None)
|
||||
raise BadUu
|
||||
|
||||
# Do the actual decoding
|
||||
with BytesIO() as decoded_data:
|
||||
for line in raw_data[uu_start:]:
|
||||
# Ignore junk
|
||||
if is_uu_junk(line):
|
||||
continue
|
||||
|
||||
# End of the article
|
||||
if line in (b"`", b"end", b"."):
|
||||
break
|
||||
|
||||
# Remove dot stuffing
|
||||
if line.startswith(b".."):
|
||||
line = line[1:]
|
||||
|
||||
try:
|
||||
decoded_line = binascii.a2b_uu(line)
|
||||
except binascii.Error as msg:
|
||||
try:
|
||||
# Workaround for broken uuencoders by Fredrik Lundh
|
||||
nbytes = (((line[0] - 32) & 63) * 4 + 5) // 3
|
||||
decoded_line = binascii.a2b_uu(line[:nbytes])
|
||||
except Exception as msg2:
|
||||
logging.info(
|
||||
"Error while uu-decoding %s: %s (line: %s; workaround: %s)", article.article, msg, line, msg2
|
||||
)
|
||||
raise BadData(decoded_data.getvalue())
|
||||
|
||||
# Store the decoded data
|
||||
decoded_data.write(decoded_line)
|
||||
|
||||
# Set the type to uu; the latter is still needed in
|
||||
# case the lowest_partnum article was damaged or slow to download.
|
||||
article.nzf.type = "uu"
|
||||
|
||||
# Only set the name if it was found and not obfuscated
|
||||
if not nzf.filename_checked and (file_name := response.file_name):
|
||||
# Set the md5-of-16k if this is the first article
|
||||
if article.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(memoryview(decoded_data)[:16384]).digest()
|
||||
decoded_data.seek(0)
|
||||
article.nzf.md5of16k = hashlib.md5(decoded_data.read(16384)).digest()
|
||||
# Handle the filename
|
||||
if not article.nzf.filename_checked and uu_filename:
|
||||
article.nzf.nzo.verify_nzf_filename(article.nzf, uu_filename)
|
||||
|
||||
# Try the rename, even if it's not the first article
|
||||
# For example when the first article was missing
|
||||
nzf.nzo.verify_nzf_filename(nzf, file_name)
|
||||
|
||||
article.crc32 = response.crc
|
||||
|
||||
return decoded_data
|
||||
data = decoded_data.getvalue()
|
||||
article.crc32 = crc32(data)
|
||||
return data
|
||||
|
||||
|
||||
def search_new_server(article: Article) -> bool:
|
||||
|
||||
@@ -70,7 +70,7 @@ def conditional_cache(cache_time: int):
|
||||
Empty results (None, empty collections, empty strings, False, 0) are not cached.
|
||||
If a keyword argument of `force=True` is used, the cache is skipped.
|
||||
|
||||
Unhashable types (such as list) can not be used as an input to the wrapped function in the current implementation!
|
||||
Unhashable types (such as List) can not be used as an input to the wrapped function in the current implementation!
|
||||
|
||||
:param cache_time: Time in seconds to cache non-empty results
|
||||
"""
|
||||
|
||||
11
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
11
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
@@ -38,13 +38,14 @@ from sabnzbd.par2file import is_par2_file, parse_par2_file
|
||||
import sabnzbd.utils.file_extension as file_extension
|
||||
from sabnzbd.misc import match_str
|
||||
from sabnzbd.constants import IGNORED_MOVIE_FOLDERS
|
||||
from typing import List
|
||||
|
||||
# Files to exclude and minimal file size for renaming
|
||||
EXCLUDED_FILE_EXTS = (".vob", ".rar", ".par2", ".mts", ".m2ts", ".cpi", ".clpi", ".mpl", ".mpls", ".bdm", ".bdmv")
|
||||
MIN_FILE_SIZE = 10 * 1024 * 1024
|
||||
|
||||
|
||||
def decode_par2(parfile: str) -> list[str]:
|
||||
def decode_par2(parfile: str) -> List[str]:
|
||||
"""Parse a par2 file and rename files listed in the par2 to their real name. Return list of generated files"""
|
||||
# Check if really a par2 file
|
||||
if not is_par2_file(parfile):
|
||||
@@ -76,7 +77,7 @@ def decode_par2(parfile: str) -> list[str]:
|
||||
return new_files
|
||||
|
||||
|
||||
def recover_par2_names(filelist: list[str]) -> list[str]:
|
||||
def recover_par2_names(filelist: List[str]) -> List[str]:
|
||||
"""Find par2 files and use them for renaming"""
|
||||
# Check that files exists
|
||||
filelist = [f for f in filelist if os.path.isfile(f)]
|
||||
@@ -167,7 +168,7 @@ def is_probably_obfuscated(myinputfilename: str) -> bool:
|
||||
return True # default is obfuscated
|
||||
|
||||
|
||||
def get_biggest_file(filelist: list[str]) -> str:
|
||||
def get_biggest_file(filelist: List[str]) -> str:
|
||||
"""Returns biggest file if that file is much bigger than the other files
|
||||
If only one file exists, return that. If no file, return None
|
||||
Note: the files in filelist must exist, because their sizes on disk are checked"""
|
||||
@@ -189,7 +190,7 @@ def get_biggest_file(filelist: list[str]) -> str:
|
||||
return None
|
||||
|
||||
|
||||
def deobfuscate(nzo, filelist: list[str], usefulname: str) -> list[str]:
|
||||
def deobfuscate(nzo, filelist: List[str], usefulname: str) -> List[str]:
|
||||
"""
|
||||
For files in filelist:
|
||||
1. if a file has no meaningful extension, add it (for example ".txt" or ".png")
|
||||
@@ -320,7 +321,7 @@ def without_extension(fullpathfilename: str) -> str:
|
||||
return os.path.splitext(fullpathfilename)[0]
|
||||
|
||||
|
||||
def deobfuscate_subtitles(nzo, filelist: list[str]):
|
||||
def deobfuscate_subtitles(nzo, filelist: List[str]):
|
||||
"""
|
||||
input:
|
||||
nzo, so we can update result via set_unpack_info()
|
||||
|
||||
@@ -25,7 +25,7 @@ import subprocess
|
||||
import time
|
||||
import threading
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict, List, Tuple
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -37,6 +37,7 @@ from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.newsunpack import RAR_EXTRACTFROM_RE, RAR_EXTRACTED_RE, rar_volumelist, add_time_left
|
||||
from sabnzbd.postproc import prepare_extraction_path
|
||||
from sabnzbd.misc import SABRarFile
|
||||
import rarfile
|
||||
from sabnzbd.utils.diskspeed import diskspeedmeasure
|
||||
|
||||
# Need a lock to make sure start and stop is handled correctly
|
||||
@@ -61,11 +62,11 @@ class DirectUnpacker(threading.Thread):
|
||||
self.rarfile_nzf: Optional[NzbFile] = None
|
||||
self.cur_setname: Optional[str] = None
|
||||
self.cur_volume: int = 0
|
||||
self.total_volumes: dict[str, int] = {}
|
||||
self.total_volumes: Dict[str, int] = {}
|
||||
self.unpack_time: float = 0.0
|
||||
|
||||
self.success_sets: dict[str, tuple[list[str], list[str]]] = {}
|
||||
self.next_sets: list[NzbFile] = []
|
||||
self.success_sets: Dict[str, Tuple[List[str], List[str]]] = {}
|
||||
self.next_sets: List[NzbFile] = []
|
||||
|
||||
self.duplicate_lines: int = 0
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ import asyncio
|
||||
import os
|
||||
import logging
|
||||
import threading
|
||||
from typing import Generator, Optional
|
||||
from typing import Generator, Set, Optional, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import SCAN_FILE_NAME, VALID_ARCHIVES, VALID_NZB_FILES, AddNzbFileResult
|
||||
@@ -128,7 +128,7 @@ class DirScanner(threading.Thread):
|
||||
|
||||
def get_suspected_files(
|
||||
self, folder: str, catdir: Optional[str] = None
|
||||
) -> Generator[tuple[str, Optional[str], Optional[os.stat_result]], None, None]:
|
||||
) -> Generator[Tuple[str, Optional[str], Optional[os.stat_result]], None, None]:
|
||||
"""Generator listing possible paths to NZB files"""
|
||||
|
||||
if catdir is None:
|
||||
@@ -222,15 +222,17 @@ class DirScanner(threading.Thread):
|
||||
|
||||
async def scan_async(self, dirscan_dir: str):
|
||||
"""Do one scan of the watched folder"""
|
||||
with DIR_SCANNER_LOCK:
|
||||
self.lock = asyncio.Lock()
|
||||
# On Python 3.8 we first need an event loop before we can create a asyncio.Lock
|
||||
if not self.lock:
|
||||
with DIR_SCANNER_LOCK:
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
async with self.lock:
|
||||
if sabnzbd.PAUSED_ALL:
|
||||
return
|
||||
|
||||
files: set[str] = set()
|
||||
futures: set[asyncio.Task] = set()
|
||||
files: Set[str] = set()
|
||||
futures: Set[asyncio.Task] = set()
|
||||
|
||||
for path, catdir, stat_tuple in self.get_suspected_files(dirscan_dir):
|
||||
files.add(path)
|
||||
|
||||
@@ -19,18 +19,15 @@
|
||||
sabnzbd.downloader - download engine
|
||||
"""
|
||||
|
||||
import select
|
||||
import logging
|
||||
import selectors
|
||||
from collections import deque
|
||||
from threading import Thread, RLock, current_thread
|
||||
import socket
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
from datetime import date
|
||||
from typing import Optional, Union, Deque
|
||||
|
||||
import sabctools
|
||||
from typing import List, Dict, Optional, Union, Set
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized, NzbQueueLocker, DOWNLOADER_CV, DOWNLOADER_LOCK
|
||||
@@ -39,7 +36,7 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.misc import from_units, helpful_warning, int_conv, MultiAddQueue
|
||||
from sabnzbd.get_addrinfo import get_fastest_addrinfo, AddrInfo
|
||||
from sabnzbd.constants import SOFT_ASSEMBLER_QUEUE_LIMIT
|
||||
from sabnzbd.constants import SOFT_QUEUE_LIMIT
|
||||
|
||||
|
||||
# Timeout penalty in minutes for each cause
|
||||
@@ -138,9 +135,9 @@ class Server:
|
||||
self.username: Optional[str] = username
|
||||
self.password: Optional[str] = password
|
||||
|
||||
self.busy_threads: set[NewsWrapper] = set()
|
||||
self.busy_threads: Set[NewsWrapper] = set()
|
||||
self.next_busy_threads_check: float = 0
|
||||
self.idle_threads: set[NewsWrapper] = set()
|
||||
self.idle_threads: Set[NewsWrapper] = set()
|
||||
self.next_article_search: float = 0
|
||||
self.active: bool = True
|
||||
self.bad_cons: int = 0
|
||||
@@ -151,7 +148,7 @@ class Server:
|
||||
self.request: bool = False # True if a getaddrinfo() request is pending
|
||||
self.have_body: bool = True # Assume server has "BODY", until proven otherwise
|
||||
self.have_stat: bool = True # Assume server has "STAT", until proven otherwise
|
||||
self.article_queue: Deque[sabnzbd.nzbstuff.Article] = deque()
|
||||
self.article_queue: List[sabnzbd.nzbstuff.Article] = []
|
||||
|
||||
# Skip during server testing
|
||||
if threads:
|
||||
@@ -176,19 +173,19 @@ class Server:
|
||||
self.reset_article_queue()
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def get_article(self, peek: bool = False):
|
||||
def get_article(self):
|
||||
"""Get article from pre-fetched and pre-fetch new ones if necessary.
|
||||
Articles that are too old for this server are immediately marked as tried"""
|
||||
if self.article_queue:
|
||||
return self.article_queue[0] if peek else self.article_queue.popleft()
|
||||
return self.article_queue.pop(0)
|
||||
|
||||
if self.next_article_search < time.time():
|
||||
# Pre-fetch new articles
|
||||
sabnzbd.NzbQueue.get_articles(self, sabnzbd.Downloader.servers, _ARTICLE_PREFETCH)
|
||||
self.article_queue = sabnzbd.NzbQueue.get_articles(self, sabnzbd.Downloader.servers, _ARTICLE_PREFETCH)
|
||||
if self.article_queue:
|
||||
article = self.article_queue[0] if peek else self.article_queue.popleft()
|
||||
article = self.article_queue.pop(0)
|
||||
# Mark expired articles as tried on this server
|
||||
if not peek and self.retention and article.nzf.nzo.avg_stamp < time.time() - self.retention:
|
||||
if self.retention and article.nzf.nzo.avg_stamp < time.time() - self.retention:
|
||||
sabnzbd.Downloader.decode(article)
|
||||
while self.article_queue:
|
||||
sabnzbd.Downloader.decode(self.article_queue.pop())
|
||||
@@ -204,12 +201,9 @@ class Server:
|
||||
"""Reset articles queued for the Server. Locked to prevent
|
||||
articles getting stuck in the Server when enabled/disabled"""
|
||||
logging.debug("Resetting article queue for %s (%s)", self, self.article_queue)
|
||||
while self.article_queue:
|
||||
try:
|
||||
article = self.article_queue.popleft()
|
||||
article.allow_new_fetcher()
|
||||
except IndexError:
|
||||
pass
|
||||
for article in self.article_queue:
|
||||
article.allow_new_fetcher()
|
||||
self.article_queue = []
|
||||
|
||||
def request_addrinfo(self):
|
||||
"""Launch async request to resolve server address and select the fastest.
|
||||
@@ -256,7 +250,7 @@ class Downloader(Thread):
|
||||
"shutdown",
|
||||
"server_restarts",
|
||||
"force_disconnect",
|
||||
"selector",
|
||||
"read_fds",
|
||||
"servers",
|
||||
"timers",
|
||||
"last_max_chunk_size",
|
||||
@@ -296,10 +290,10 @@ class Downloader(Thread):
|
||||
|
||||
self.force_disconnect: bool = False
|
||||
|
||||
self.selector: selectors.DefaultSelector = selectors.DefaultSelector()
|
||||
self.read_fds: Dict[int, NewsWrapper] = {}
|
||||
|
||||
self.servers: list[Server] = []
|
||||
self.timers: dict[str, list[float]] = {}
|
||||
self.servers: List[Server] = []
|
||||
self.timers: Dict[str, List[float]] = {}
|
||||
|
||||
for server in config.get_servers():
|
||||
self.init_server(None, server)
|
||||
@@ -367,34 +361,15 @@ class Downloader(Thread):
|
||||
self.servers.sort(key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def add_socket(self, nw: NewsWrapper):
|
||||
"""Add a socket to be watched for read or write availability"""
|
||||
if nw.nntp:
|
||||
try:
|
||||
self.selector.register(nw.nntp.fileno, selectors.EVENT_READ | selectors.EVENT_WRITE, nw)
|
||||
nw.selector_events = selectors.EVENT_READ | selectors.EVENT_WRITE
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def modify_socket(self, nw: NewsWrapper, events: int):
|
||||
"""Modify the events socket are watched for"""
|
||||
if nw.nntp and nw.selector_events != events:
|
||||
try:
|
||||
self.selector.modify(nw.nntp.fileno, events, nw)
|
||||
nw.selector_events = events
|
||||
except KeyError:
|
||||
pass
|
||||
def add_socket(self, fileno: int, nw: NewsWrapper):
|
||||
"""Add a socket ready to be used to the list to be watched"""
|
||||
self.read_fds[fileno] = nw
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def remove_socket(self, nw: NewsWrapper):
|
||||
"""Remove a socket to be watched"""
|
||||
if nw.nntp:
|
||||
try:
|
||||
self.selector.unregister(nw.nntp.fileno)
|
||||
nw.selector_events = 0
|
||||
except KeyError:
|
||||
pass
|
||||
self.read_fds.pop(nw.nntp.fileno, None)
|
||||
|
||||
@NzbQueueLocker
|
||||
def set_paused_state(self, state: bool):
|
||||
@@ -434,9 +409,8 @@ class Downloader(Thread):
|
||||
|
||||
@NzbQueueLocker
|
||||
def resume_from_postproc(self):
|
||||
if self.paused_for_postproc:
|
||||
logging.info("Post-processing finished, resuming download")
|
||||
self.paused_for_postproc = False
|
||||
logging.info("Post-processing finished, resuming download")
|
||||
self.paused_for_postproc = False
|
||||
|
||||
@NzbQueueLocker
|
||||
def disconnect(self):
|
||||
@@ -477,15 +451,6 @@ class Downloader(Thread):
|
||||
self.bandwidth_perc = 0
|
||||
self.bandwidth_limit = 0
|
||||
|
||||
# Increase limits for faster connections
|
||||
if limit > from_units("150M"):
|
||||
if cfg.receive_threads() == cfg.receive_threads.default:
|
||||
cfg.receive_threads.set(4)
|
||||
logging.info("Receive threads set to 4")
|
||||
if cfg.assembler_max_queue_size() == cfg.assembler_max_queue_size.default:
|
||||
cfg.assembler_max_queue_size.set(30)
|
||||
logging.info("Assembler max_queue_size set to 30")
|
||||
|
||||
def sleep_time_set(self):
|
||||
self.sleep_time = cfg.downloader_sleep_time() * 0.0001
|
||||
logging.debug("Sleep time: %f seconds", self.sleep_time)
|
||||
@@ -534,30 +499,26 @@ class Downloader(Thread):
|
||||
|
||||
# Remove all connections to server
|
||||
for nw in server.idle_threads | server.busy_threads:
|
||||
self.reset_nw(nw, "Forcing disconnect", warn=False, wait=False, retry_article=False)
|
||||
self.__reset_nw(nw, "Forcing disconnect", warn=False, wait=False, retry_article=False)
|
||||
|
||||
# Make sure server address resolution is refreshed
|
||||
server.addrinfo = None
|
||||
|
||||
@staticmethod
|
||||
def decode(article: "sabnzbd.nzbstuff.Article", response: Optional[sabctools.NNTPResponse] = None):
|
||||
def decode(article, data_view: Optional[memoryview] = None):
|
||||
"""Decode article"""
|
||||
# Need a better way of draining requests
|
||||
if article.nzf.nzo.removed_from_queue:
|
||||
return
|
||||
|
||||
# Article was requested and fetched, update article stats for the server
|
||||
sabnzbd.BPSMeter.register_server_article_tried(article.fetcher.id)
|
||||
|
||||
# Handle broken articles directly
|
||||
if not response or not response.bytes_decoded and not article.nzf.nzo.precheck:
|
||||
if not data_view:
|
||||
if not article.search_new_server():
|
||||
article.nzf.nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
return
|
||||
|
||||
# Decode and send to article cache
|
||||
sabnzbd.decoder.decode(article, response)
|
||||
sabnzbd.decoder.decode(article, data_view)
|
||||
|
||||
def run(self):
|
||||
# Warn if there are servers defined, but none are valid
|
||||
@@ -577,7 +538,7 @@ class Downloader(Thread):
|
||||
for _ in range(cfg.receive_threads()):
|
||||
# Started as daemon, so we don't need any shutdown logic in the worker
|
||||
# The Downloader code will make sure shutdown is handled gracefully
|
||||
Thread(target=self.process_nw_worker, args=(process_nw_queue,), daemon=True).start()
|
||||
Thread(target=self.process_nw_worker, args=(self.read_fds, process_nw_queue), daemon=True).start()
|
||||
|
||||
# Catch all errors, just in case
|
||||
try:
|
||||
@@ -599,9 +560,9 @@ class Downloader(Thread):
|
||||
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
|
||||
if nw.nntp and nw.nntp.error_msg:
|
||||
# Already showed error
|
||||
self.reset_nw(nw)
|
||||
self.__reset_nw(nw)
|
||||
else:
|
||||
self.reset_nw(nw, "Timed out", warn=True)
|
||||
self.__reset_nw(nw, "Timed out", warn=True)
|
||||
server.bad_cons += 1
|
||||
self.maybe_block_server(server)
|
||||
|
||||
@@ -641,14 +602,15 @@ class Downloader(Thread):
|
||||
server.request_addrinfo()
|
||||
break
|
||||
|
||||
if not server.get_article(peek=True):
|
||||
nw.article = server.get_article()
|
||||
if not nw.article:
|
||||
break
|
||||
|
||||
server.idle_threads.remove(nw)
|
||||
server.busy_threads.add(nw)
|
||||
|
||||
if nw.connected:
|
||||
self.add_socket(nw)
|
||||
self.__request_article(nw)
|
||||
else:
|
||||
try:
|
||||
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.host)
|
||||
@@ -660,14 +622,14 @@ class Downloader(Thread):
|
||||
server.host,
|
||||
sys.exc_info()[1],
|
||||
)
|
||||
self.reset_nw(nw, "Failed to initialize", warn=True)
|
||||
self.__reset_nw(nw, "Failed to initialize", warn=True)
|
||||
|
||||
if self.force_disconnect or self.shutdown:
|
||||
for server in self.servers:
|
||||
for nw in server.idle_threads | server.busy_threads:
|
||||
# Send goodbye if we have open socket
|
||||
if nw.nntp:
|
||||
self.reset_nw(nw, "Forcing disconnect", wait=False, count_article_try=False)
|
||||
self.__reset_nw(nw, "Forcing disconnect", wait=False, count_article_try=False)
|
||||
# Make sure server address resolution is refreshed
|
||||
server.addrinfo = None
|
||||
server.reset_article_queue()
|
||||
@@ -691,12 +653,10 @@ class Downloader(Thread):
|
||||
self.last_max_chunk_size = 0
|
||||
|
||||
# Use select to find sockets ready for reading/writing
|
||||
if self.selector.get_map():
|
||||
if events := self.selector.select(timeout=1.0):
|
||||
for key, ev in events:
|
||||
process_nw_queue.put((key.data, ev))
|
||||
if readkeys := self.read_fds.keys():
|
||||
read, _, _ = select.select(readkeys, (), (), 1.0)
|
||||
else:
|
||||
events = []
|
||||
read = []
|
||||
BPSMeter.reset()
|
||||
time.sleep(0.1)
|
||||
self.max_chunk_size = _DEFAULT_CHUNK_SIZE
|
||||
@@ -715,65 +675,58 @@ class Downloader(Thread):
|
||||
next_bpsmeter_update = now + _BPSMETER_UPDATE_DELAY
|
||||
self.check_assembler_levels()
|
||||
|
||||
if not events:
|
||||
if not read:
|
||||
continue
|
||||
|
||||
# Wait for socket operation completion
|
||||
# Submit all readable sockets to be processed and wait for completion
|
||||
process_nw_queue.put_multiple(read)
|
||||
process_nw_queue.join()
|
||||
|
||||
except Exception:
|
||||
logging.error(T("Fatal error in Downloader"), exc_info=True)
|
||||
|
||||
def process_nw_worker(self, nw_queue: MultiAddQueue):
|
||||
def process_nw_worker(self, read_fds: Dict[int, NewsWrapper], nw_queue: MultiAddQueue):
|
||||
"""Worker for the daemon thread to process results.
|
||||
Wrapped in try/except because in case of an exception, logging
|
||||
might get lost and the queue.join() would block forever."""
|
||||
try:
|
||||
logging.debug("Starting Downloader receive thread: %s", current_thread().name)
|
||||
while True:
|
||||
self.process_nw(*nw_queue.get())
|
||||
# The read_fds is passed by reference, so we can access its items!
|
||||
self.process_nw(read_fds[nw_queue.get()])
|
||||
nw_queue.task_done()
|
||||
except Exception:
|
||||
# We cannot break out of the Downloader from here, so just pause
|
||||
logging.error(T("Fatal error in Downloader"), exc_info=True)
|
||||
self.pause()
|
||||
|
||||
def process_nw(self, nw: NewsWrapper, event: int):
|
||||
def process_nw(self, nw: NewsWrapper):
|
||||
"""Receive data from a NewsWrapper and handle the response"""
|
||||
if event & selectors.EVENT_READ:
|
||||
self.process_nw_read(nw)
|
||||
if event & selectors.EVENT_WRITE:
|
||||
nw.write()
|
||||
|
||||
def process_nw_read(self, nw: NewsWrapper) -> None:
|
||||
bytes_received: int = 0
|
||||
bytes_pending: int = 0
|
||||
|
||||
while True:
|
||||
try:
|
||||
n, bytes_pending = nw.read(nbytes=bytes_pending)
|
||||
bytes_received += n
|
||||
except ssl.SSLWantReadError:
|
||||
return
|
||||
except (ConnectionError, ConnectionAbortedError):
|
||||
# The ConnectionAbortedError is also thrown by sabctools in case of fatal SSL-layer problems
|
||||
self.reset_nw(nw, "Server closed connection", wait=False)
|
||||
return
|
||||
except BufferError:
|
||||
# The BufferError is thrown when exceeding maximum buffer size
|
||||
# Make sure to discard the article
|
||||
self.reset_nw(nw, "Maximum data buffer size exceeded", wait=False, retry_article=False)
|
||||
return
|
||||
|
||||
if not bytes_pending:
|
||||
break
|
||||
try:
|
||||
bytes_received, end_of_line, article_done = nw.recv_chunk()
|
||||
except ssl.SSLWantReadError:
|
||||
return
|
||||
except (ConnectionError, ConnectionAbortedError):
|
||||
# The ConnectionAbortedError is also thrown by sabctools in case of fatal SSL-layer problems
|
||||
self.__reset_nw(nw, "Server closed connection", wait=False)
|
||||
return
|
||||
except BufferError:
|
||||
# The BufferError is thrown when exceeding maximum buffer size
|
||||
# Make sure to discard the article
|
||||
self.__reset_nw(nw, "Maximum data buffer size exceeded", wait=False, retry_article=False)
|
||||
return
|
||||
|
||||
article = nw.article
|
||||
server = nw.server
|
||||
|
||||
with DOWNLOADER_LOCK:
|
||||
sabnzbd.BPSMeter.update(server.id, bytes_received)
|
||||
if bytes_received > self.last_max_chunk_size:
|
||||
self.last_max_chunk_size = bytes_received
|
||||
# Update statistics only when we fetched a whole article
|
||||
# The side effect is that we don't count things like article-not-available messages
|
||||
if article_done:
|
||||
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, nw.data_position)
|
||||
# Check speedlimit
|
||||
if (
|
||||
self.bandwidth_limit
|
||||
@@ -784,10 +737,97 @@ class Downloader(Thread):
|
||||
time.sleep(0.01)
|
||||
sabnzbd.BPSMeter.update()
|
||||
|
||||
# If we are not at the end of a line, more data will follow
|
||||
if not end_of_line:
|
||||
return
|
||||
|
||||
# Response code depends on request command:
|
||||
# 220 = ARTICLE, 222 = BODY
|
||||
if nw.status_code not in (220, 222) and not article_done:
|
||||
if not nw.connected or nw.status_code == 480:
|
||||
if not self.__finish_connect_nw(nw):
|
||||
return
|
||||
if nw.connected:
|
||||
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
|
||||
self.__request_article(nw)
|
||||
|
||||
elif nw.status_code == 223:
|
||||
article_done = True
|
||||
logging.debug("Article <%s> is present", article.article)
|
||||
|
||||
elif nw.status_code in (411, 423, 430, 451):
|
||||
article_done = True
|
||||
logging.debug(
|
||||
"Thread %s@%s: Article %s missing (error=%s)",
|
||||
nw.thrdnum,
|
||||
nw.server.host,
|
||||
article.article,
|
||||
nw.status_code,
|
||||
)
|
||||
nw.reset_data_buffer()
|
||||
|
||||
elif nw.status_code == 500:
|
||||
if article.nzf.nzo.precheck:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
logging.debug("Server %s does not support STAT", server.host)
|
||||
else:
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug("Server %s does not support BODY", server.host)
|
||||
nw.reset_data_buffer()
|
||||
self.__request_article(nw)
|
||||
|
||||
else:
|
||||
# Don't warn for (internal) server errors during downloading
|
||||
if nw.status_code not in (400, 502, 503):
|
||||
logging.warning(
|
||||
T("%s@%s: Received unknown status code %s for article %s"),
|
||||
nw.thrdnum,
|
||||
nw.server.host,
|
||||
nw.status_code,
|
||||
article.article,
|
||||
)
|
||||
|
||||
# Ditch this thread, we don't know what data we got now so the buffer can be bad
|
||||
self.__reset_nw(nw, f"Server error or unknown status code: {nw.status_code}", wait=False)
|
||||
return
|
||||
|
||||
if article_done:
|
||||
# Successful data, clear "bad" counter
|
||||
server.bad_cons = 0
|
||||
server.errormsg = server.warning = ""
|
||||
|
||||
# Decode
|
||||
self.decode(article, nw.data_view[: nw.data_position])
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s done", nw.thrdnum, server.host, article.article)
|
||||
|
||||
# Reset connection for new activity
|
||||
nw.soft_reset()
|
||||
|
||||
# Request a new article immediately if possible
|
||||
if (
|
||||
nw.connected
|
||||
and server.active
|
||||
and not server.restart
|
||||
and not (self.paused or self.shutdown or self.paused_for_postproc)
|
||||
):
|
||||
nw.article = server.get_article()
|
||||
if nw.article:
|
||||
self.__request_article(nw)
|
||||
return
|
||||
|
||||
# Make socket available again
|
||||
server.busy_threads.discard(nw)
|
||||
server.idle_threads.add(nw)
|
||||
self.remove_socket(nw)
|
||||
|
||||
def check_assembler_levels(self):
|
||||
"""Check the Assembler queue to see if we need to delay, depending on queue size"""
|
||||
if (assembler_level := sabnzbd.Assembler.queue_level()) > SOFT_ASSEMBLER_QUEUE_LIMIT:
|
||||
time.sleep(min((assembler_level - SOFT_ASSEMBLER_QUEUE_LIMIT) / 4, 0.15))
|
||||
if (assembler_level := sabnzbd.Assembler.queue_level()) > SOFT_QUEUE_LIMIT:
|
||||
time.sleep(min((assembler_level - SOFT_QUEUE_LIMIT) / 4, 0.15))
|
||||
sabnzbd.BPSMeter.delayed_assembler += 1
|
||||
logged_counter = 0
|
||||
|
||||
@@ -809,12 +849,13 @@ class Downloader(Thread):
|
||||
logged_counter += 1
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def finish_connect_nw(self, nw: NewsWrapper, response: sabctools.NNTPResponse) -> bool:
|
||||
def __finish_connect_nw(self, nw: NewsWrapper) -> bool:
|
||||
server = nw.server
|
||||
try:
|
||||
nw.finish_connect(response.status_code, response.message)
|
||||
nw.finish_connect(nw.status_code)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("%s@%s last message -> %d", nw.thrdnum, server.host, response.status_code)
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, server.host, nw.nntp_msg)
|
||||
nw.reset_data_buffer()
|
||||
except NNTPPermanentError as error:
|
||||
# Handle login problems
|
||||
block = False
|
||||
@@ -827,7 +868,7 @@ class Downloader(Thread):
|
||||
errormsg = T("Too many connections to server %s [%s]") % (server.host, error.msg)
|
||||
if server.active:
|
||||
# Don't count this for the tries (max_art_tries) on this server
|
||||
self.reset_nw(nw)
|
||||
self.__reset_nw(nw)
|
||||
self.plan_server(server, _PENALTY_TOOMANY)
|
||||
elif error.code in (502, 481, 482) and clues_too_many_ip(error.msg):
|
||||
# Login from (too many) different IP addresses
|
||||
@@ -877,7 +918,7 @@ class Downloader(Thread):
|
||||
if penalty and (block or server.optional):
|
||||
self.plan_server(server, penalty)
|
||||
# Note that the article is discard for this server if the server is not required
|
||||
self.reset_nw(nw, retry_article=retry_article)
|
||||
self.__reset_nw(nw, retry_article=retry_article)
|
||||
return False
|
||||
except Exception as err:
|
||||
logging.error(
|
||||
@@ -888,11 +929,11 @@ class Downloader(Thread):
|
||||
)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# No reset-warning needed, above logging is sufficient
|
||||
self.reset_nw(nw, retry_article=False)
|
||||
self.__reset_nw(nw, retry_article=False)
|
||||
return True
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def reset_nw(
|
||||
def __reset_nw(
|
||||
self,
|
||||
nw: NewsWrapper,
|
||||
reset_msg: Optional[str] = None,
|
||||
@@ -900,7 +941,6 @@ class Downloader(Thread):
|
||||
wait: bool = True,
|
||||
count_article_try: bool = True,
|
||||
retry_article: bool = True,
|
||||
article: Optional["sabnzbd.nzbstuff.Article"] = None,
|
||||
):
|
||||
# Some warnings are errors, and not added as server.warning
|
||||
if warn and reset_msg:
|
||||
@@ -916,8 +956,20 @@ class Downloader(Thread):
|
||||
# Make sure it is not in the readable sockets
|
||||
self.remove_socket(nw)
|
||||
|
||||
# Discard the article request which failed
|
||||
nw.discard(article, count_article_try=count_article_try, retry_article=retry_article)
|
||||
if nw.article and not nw.article.nzf.nzo.removed_from_queue:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if count_article_try:
|
||||
nw.article.tries += 1
|
||||
|
||||
# Do we discard, or try again for this server
|
||||
if not retry_article or (not nw.server.required and nw.article.tries > cfg.max_art_tries()):
|
||||
# Too many tries on this server, consider article missing
|
||||
self.decode(nw.article)
|
||||
nw.article.tries = 0
|
||||
else:
|
||||
# Allow all servers again for this article
|
||||
# Do not use the article_queue, as the server could already have been disabled when we get here!
|
||||
nw.article.allow_new_fetcher()
|
||||
|
||||
# Reset connection object
|
||||
nw.hard_reset(wait)
|
||||
@@ -925,6 +977,21 @@ class Downloader(Thread):
|
||||
# Empty SSL info, it might change on next connect
|
||||
nw.server.ssl_info = ""
|
||||
|
||||
def __request_article(self, nw: NewsWrapper):
|
||||
try:
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: BODY %s", nw.thrdnum, nw.server.host, nw.article.article)
|
||||
nw.body()
|
||||
# Mark as ready to be read
|
||||
self.add_socket(nw.nntp.fileno, nw)
|
||||
except socket.error as err:
|
||||
logging.info("Looks like server closed connection: %s", err)
|
||||
self.__reset_nw(nw, "Server broke off connection", warn=True)
|
||||
except Exception:
|
||||
logging.error(T("Suspect error in downloader"))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
self.__reset_nw(nw, "Server broke off connection", warn=True)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Timed restart of servers admin.
|
||||
# For each server all planned events are kept in a list.
|
||||
|
||||
@@ -33,7 +33,7 @@ import fnmatch
|
||||
import stat
|
||||
import ctypes
|
||||
import random
|
||||
from typing import Union, Any, Optional, BinaryIO
|
||||
from typing import Union, List, Tuple, Any, Dict, Optional, BinaryIO
|
||||
|
||||
try:
|
||||
import win32api
|
||||
@@ -295,10 +295,10 @@ def sanitize_and_trim_path(path: str) -> str:
|
||||
if sabnzbd.WINDOWS:
|
||||
if path.startswith("\\\\?\\UNC\\"):
|
||||
new_path = "\\\\?\\UNC\\"
|
||||
path = path.removeprefix("\\\\?\\UNC\\")
|
||||
path = path[8:]
|
||||
elif path.startswith("\\\\?\\"):
|
||||
new_path = "\\\\?\\"
|
||||
path = path.removeprefix("\\\\?\\")
|
||||
path = path[4:]
|
||||
|
||||
path = path.replace("\\", "/")
|
||||
parts = path.split("/")
|
||||
@@ -314,7 +314,7 @@ def sanitize_and_trim_path(path: str) -> str:
|
||||
return os.path.abspath(os.path.normpath(new_path))
|
||||
|
||||
|
||||
def sanitize_files(folder: Optional[str] = None, filelist: Optional[list[str]] = None) -> list[str]:
|
||||
def sanitize_files(folder: Optional[str] = None, filelist: Optional[List[str]] = None) -> List[str]:
|
||||
"""Sanitize each file in the folder or list of filepaths, return list of new names"""
|
||||
logging.info("Checking if any resulting filenames need to be sanitized")
|
||||
if folder:
|
||||
@@ -330,7 +330,7 @@ def sanitize_files(folder: Optional[str] = None, filelist: Optional[list[str]] =
|
||||
return output_filelist
|
||||
|
||||
|
||||
def strip_extensions(name: str, ext_to_remove: tuple[str, ...] = (".nzb", ".par", ".par2")) -> str:
|
||||
def strip_extensions(name: str, ext_to_remove: Tuple[str, ...] = (".nzb", ".par", ".par2")):
|
||||
"""Strip extensions from a filename, without sanitizing the filename"""
|
||||
name_base, ext = os.path.splitext(name)
|
||||
while ext.lower() in ext_to_remove:
|
||||
@@ -378,7 +378,7 @@ def real_path(loc: str, path: str) -> str:
|
||||
|
||||
def create_real_path(
|
||||
name: str, loc: str, path: str, apply_permissions: bool = False, writable: bool = True
|
||||
) -> tuple[bool, str, Optional[str]]:
|
||||
) -> Tuple[bool, str, Optional[str]]:
|
||||
"""When 'path' is relative, create join of 'loc' and 'path'
|
||||
When 'path' is absolute, create normalized path
|
||||
'name' is used for logging.
|
||||
@@ -484,7 +484,7 @@ TS_RE = re.compile(r"\.(\d+)\.(ts$)", re.I)
|
||||
|
||||
def build_filelists(
|
||||
workdir: Optional[str], workdir_complete: Optional[str] = None, check_both: bool = False, check_rar: bool = True
|
||||
) -> tuple[list[str], list[str], list[str], list[str]]:
|
||||
) -> Tuple[List[str], List[str], List[str], List[str]]:
|
||||
"""Build filelists, if workdir_complete has files, ignore workdir.
|
||||
Optionally scan both directories.
|
||||
Optionally test content to establish RAR-ness
|
||||
@@ -535,7 +535,7 @@ def safe_fnmatch(f: str, pattern: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def globber(path: str, pattern: str = "*") -> list[str]:
|
||||
def globber(path: str, pattern: str = "*") -> List[str]:
|
||||
"""Return matching base file/folder names in folder `path`"""
|
||||
# Cannot use glob.glob() because it doesn't support Windows long name notation
|
||||
if os.path.exists(path):
|
||||
@@ -543,7 +543,7 @@ def globber(path: str, pattern: str = "*") -> list[str]:
|
||||
return []
|
||||
|
||||
|
||||
def globber_full(path: str, pattern: str = "*") -> list[str]:
|
||||
def globber_full(path: str, pattern: str = "*") -> List[str]:
|
||||
"""Return matching full file/folder names in folder `path`"""
|
||||
# Cannot use glob.glob() because it doesn't support Windows long name notation
|
||||
if os.path.exists(path):
|
||||
@@ -572,7 +572,7 @@ def is_valid_script(basename: str) -> bool:
|
||||
return basename in list_scripts(default=False, none=False)
|
||||
|
||||
|
||||
def list_scripts(default: bool = False, none: bool = True) -> list[str]:
|
||||
def list_scripts(default: bool = False, none: bool = True) -> List[str]:
|
||||
"""Return a list of script names, optionally with 'Default' added"""
|
||||
lst = []
|
||||
path = sabnzbd.cfg.script_dir.get_path()
|
||||
@@ -613,7 +613,7 @@ def make_script_path(script: str) -> Optional[str]:
|
||||
return script_path
|
||||
|
||||
|
||||
def get_admin_path(name: str, future: bool) -> str:
|
||||
def get_admin_path(name: str, future: bool):
|
||||
"""Return news-style full path to job-admin folder of names job
|
||||
or else the old cache path
|
||||
"""
|
||||
@@ -660,7 +660,7 @@ def set_permissions(path: str, recursive: bool = True):
|
||||
UNWANTED_FILE_PERMISSIONS = stat.S_ISUID | stat.S_ISGID | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
|
||||
|
||||
def removexbits(path: str, custom_permissions: Optional[int] = None):
|
||||
def removexbits(path: str, custom_permissions: int = None):
|
||||
"""Remove all the x-bits from files, respecting current or custom permissions"""
|
||||
if os.path.isfile(path):
|
||||
# Use custom permissions as base
|
||||
@@ -783,7 +783,7 @@ def get_unique_filename(path: str) -> str:
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def listdir_full(input_dir: str, recursive: bool = True) -> list[str]:
|
||||
def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
|
||||
"""List all files in dirs and sub-dirs"""
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk(input_dir):
|
||||
@@ -797,7 +797,7 @@ def listdir_full(input_dir: str, recursive: bool = True) -> list[str]:
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def move_to_path(path: str, new_path: str) -> tuple[bool, Optional[str]]:
|
||||
def move_to_path(path: str, new_path: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Move a file to a new path, optionally give unique filename
|
||||
Return (ok, new_path)
|
||||
"""
|
||||
@@ -990,7 +990,7 @@ def remove_all(path: str, pattern: str = "*", keep_folder: bool = False, recursi
|
||||
##############################################################################
|
||||
# Diskfree
|
||||
##############################################################################
|
||||
def diskspace_base(dir_to_check: str) -> tuple[float, float]:
|
||||
def diskspace_base(dir_to_check: str) -> Tuple[float, float]:
|
||||
"""Return amount of free and used diskspace in GBytes"""
|
||||
# Find first folder level that exists in the path
|
||||
x = "x"
|
||||
@@ -1024,7 +1024,7 @@ def diskspace_base(dir_to_check: str) -> tuple[float, float]:
|
||||
|
||||
|
||||
@conditional_cache(cache_time=10)
|
||||
def diskspace(force: bool = False) -> dict[str, tuple[float, float]]:
|
||||
def diskspace(force: bool = False) -> Dict[str, Tuple[float, float]]:
|
||||
"""Wrapper to keep results cached by conditional_cache
|
||||
If called with force=True, the wrapper will clear the results"""
|
||||
return {
|
||||
@@ -1033,7 +1033,7 @@ def diskspace(force: bool = False) -> dict[str, tuple[float, float]]:
|
||||
}
|
||||
|
||||
|
||||
def get_new_id(prefix: str, folder: str, check_list: Optional[list] = None) -> str:
|
||||
def get_new_id(prefix, folder, check_list=None):
|
||||
"""Return unique prefixed admin identifier within folder
|
||||
optionally making sure that id is not in the check_list.
|
||||
"""
|
||||
@@ -1054,7 +1054,7 @@ def get_new_id(prefix: str, folder: str, check_list: Optional[list] = None) -> s
|
||||
raise IOError
|
||||
|
||||
|
||||
def save_data(data: Any, _id: str, path: str, do_pickle: bool = True, silent: bool = False):
|
||||
def save_data(data, _id, path, do_pickle=True, silent=False):
|
||||
"""Save data to a diskfile"""
|
||||
if not silent:
|
||||
logging.debug("[%s] Saving data for %s in %s", sabnzbd.misc.caller_name(), _id, path)
|
||||
@@ -1081,7 +1081,7 @@ def save_data(data: Any, _id: str, path: str, do_pickle: bool = True, silent: bo
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
def load_data(data_id: str, path: str, remove: bool = True, do_pickle: bool = True, silent: bool = False) -> Any:
|
||||
def load_data(data_id, path, remove=True, do_pickle=True, silent=False):
|
||||
"""Read data from disk file"""
|
||||
path = os.path.join(path, data_id)
|
||||
|
||||
@@ -1129,7 +1129,7 @@ def save_admin(data: Any, data_id: str):
|
||||
save_data(data, data_id, sabnzbd.cfg.admin_dir.get_path())
|
||||
|
||||
|
||||
def load_admin(data_id: str, remove: bool = False, silent: bool = False) -> Any:
|
||||
def load_admin(data_id: str, remove=False, silent=False) -> Any:
|
||||
"""Read data in admin folder in specified format"""
|
||||
logging.debug("[%s] Loading data for %s", sabnzbd.misc.caller_name(), data_id)
|
||||
return load_data(data_id, sabnzbd.cfg.admin_dir.get_path(), remove=remove, silent=silent)
|
||||
@@ -1196,7 +1196,7 @@ def purge_log_files():
|
||||
logging.debug("Finished puring log files")
|
||||
|
||||
|
||||
def directory_is_writable_with_file(mydir: str, myfilename: str) -> bool:
|
||||
def directory_is_writable_with_file(mydir, myfilename):
|
||||
filename = os.path.join(mydir, myfilename)
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
@@ -1253,7 +1253,7 @@ def check_filesystem_capabilities(test_dir: str) -> bool:
|
||||
return allgood
|
||||
|
||||
|
||||
def get_win_drives() -> list[str]:
|
||||
def get_win_drives() -> List[str]:
|
||||
"""Return list of detected drives, adapted from:
|
||||
http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490
|
||||
"""
|
||||
@@ -1281,7 +1281,7 @@ PATHBROWSER_JUNKFOLDERS = (
|
||||
)
|
||||
|
||||
|
||||
def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False) -> list[dict[str, str]]:
|
||||
def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False) -> List[Dict[str, str]]:
|
||||
"""Returns a list of dictionaries with the folders and folders contained at the given path
|
||||
Give the empty string as the path to list the contents of the root path
|
||||
under Unix this means "/", on Windows this will be a list of drive letters
|
||||
|
||||
@@ -23,9 +23,10 @@ import socket
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import functools
|
||||
from dataclasses import dataclass
|
||||
from more_itertools import roundrobin
|
||||
from typing import Union, Optional
|
||||
from typing import Tuple, Union, Optional
|
||||
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT
|
||||
@@ -60,7 +61,7 @@ class AddrInfo:
|
||||
type: socket.SocketKind
|
||||
proto: int
|
||||
canonname: str
|
||||
sockaddr: Union[tuple[str, int], tuple[str, int, int, int]]
|
||||
sockaddr: Union[Tuple[str, int], Tuple[str, int, int, int]]
|
||||
ipaddress: str = ""
|
||||
port: int = 0
|
||||
connection_time: float = 0.0
|
||||
|
||||
@@ -34,7 +34,7 @@ import copy
|
||||
from random import randint
|
||||
from xml.sax.saxutils import escape
|
||||
from Cheetah.Template import Template
|
||||
from typing import Optional, Callable, Union, Any
|
||||
from typing import Optional, Callable, Union, Any, Dict, List
|
||||
from guessit.api import properties as guessit_properties
|
||||
|
||||
import sabnzbd
|
||||
@@ -264,7 +264,7 @@ def check_hostname():
|
||||
COOKIE_SECRET = str(randint(1000, 100000) * os.getpid())
|
||||
|
||||
|
||||
def remote_ip_from_xff(xff_ips: list[str]) -> str:
|
||||
def remote_ip_from_xff(xff_ips: List[str]) -> str:
|
||||
# Per MDN docs, the first non-local/non-trusted IP (rtl) is our "client"
|
||||
# However, it's possible that all IPs are local/trusted, so we may also
|
||||
# return the first ip in the list as it "should" be the client
|
||||
@@ -399,7 +399,7 @@ def check_apikey(kwargs):
|
||||
return _MSG_APIKEY_INCORRECT
|
||||
|
||||
|
||||
def template_filtered_response(file: str, search_list: dict[str, Any]):
|
||||
def template_filtered_response(file: str, search_list: Dict[str, Any]):
|
||||
"""Wrapper for Cheetah response"""
|
||||
# We need a copy, because otherwise source-dicts might be modified
|
||||
search_list_copy = copy.deepcopy(search_list)
|
||||
@@ -558,7 +558,7 @@ class Wizard:
|
||||
info["password"] = ""
|
||||
info["connections"] = ""
|
||||
info["ssl"] = 1
|
||||
info["ssl_verify"] = 3
|
||||
info["ssl_verify"] = 2
|
||||
else:
|
||||
# Sort servers to get the first enabled one
|
||||
server_names = sorted(
|
||||
@@ -906,14 +906,12 @@ SPECIAL_VALUE_LIST = (
|
||||
"max_foldername_length",
|
||||
"url_base",
|
||||
"receive_threads",
|
||||
"assembler_max_queue_size",
|
||||
"switchinterval",
|
||||
"direct_unpack_threads",
|
||||
"selftest_host",
|
||||
"ssdp_broadcast_interval",
|
||||
"unrar_parameters",
|
||||
"outgoing_nntp_ip",
|
||||
"pipelining_requests",
|
||||
)
|
||||
SPECIAL_LIST_LIST = (
|
||||
"rss_odd_titles",
|
||||
|
||||
@@ -26,6 +26,7 @@ import socket
|
||||
import ssl
|
||||
import time
|
||||
import threading
|
||||
from typing import Dict
|
||||
|
||||
import sabctools
|
||||
import sabnzbd
|
||||
@@ -43,7 +44,7 @@ NR_CONNECTIONS = 5
|
||||
TIME_LIMIT = 3
|
||||
|
||||
|
||||
def internetspeed_worker(secure_sock: ssl.SSLSocket, socket_speed: dict[ssl.SSLSocket, float]):
|
||||
def internetspeed_worker(secure_sock: ssl.SSLSocket, socket_speed: Dict[ssl.SSLSocket, float]):
|
||||
"""Worker to perform the requests in parallel"""
|
||||
secure_sock.sendall(TEST_REQUEST.encode())
|
||||
empty_buffer = memoryview(sabctools.bytearray_malloc(BUFFER_SIZE))
|
||||
|
||||
@@ -41,7 +41,7 @@ import math
|
||||
import rarfile
|
||||
from threading import Thread
|
||||
from collections.abc import Iterable
|
||||
from typing import Union, Any, AnyStr, Optional, Collection
|
||||
from typing import Union, Tuple, Any, AnyStr, Optional, List, Dict, Collection
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.getipaddress
|
||||
@@ -178,7 +178,7 @@ def is_none(inp: Any) -> bool:
|
||||
return not inp or (isinstance(inp, str) and inp.lower() == "none")
|
||||
|
||||
|
||||
def clean_comma_separated_list(inp: Any) -> list[str]:
|
||||
def clean_comma_separated_list(inp: Any) -> List[str]:
|
||||
"""Return a list of stripped values from a string or list, empty ones removed"""
|
||||
result_ids = []
|
||||
if isinstance(inp, str):
|
||||
@@ -190,7 +190,7 @@ def clean_comma_separated_list(inp: Any) -> list[str]:
|
||||
return result_ids
|
||||
|
||||
|
||||
def cmp(x: Any, y: Any) -> int:
|
||||
def cmp(x, y):
|
||||
"""
|
||||
Replacement for built-in function cmp that was removed in Python 3
|
||||
|
||||
@@ -217,7 +217,7 @@ def cat_pp_script_sanitizer(
|
||||
cat: Optional[str] = None,
|
||||
pp: Optional[Union[int, str]] = None,
|
||||
script: Optional[str] = None,
|
||||
) -> tuple[Optional[Union[int, str]], Optional[str], Optional[str]]:
|
||||
) -> Tuple[Optional[Union[int, str]], Optional[str], Optional[str]]:
|
||||
"""Basic sanitizer from outside input to a bit more predictable values"""
|
||||
# * and Default are valid values
|
||||
if safe_lower(cat) in ("", "none"):
|
||||
@@ -234,7 +234,7 @@ def cat_pp_script_sanitizer(
|
||||
return cat, pp, script
|
||||
|
||||
|
||||
def name_to_cat(fname: str, cat: Optional[str] = None) -> tuple[str, Optional[str]]:
|
||||
def name_to_cat(fname, cat=None):
|
||||
"""Retrieve category from file name, but only if "cat" is None."""
|
||||
if cat is None and fname.startswith("{{"):
|
||||
n = fname.find("}}")
|
||||
@@ -246,9 +246,7 @@ def name_to_cat(fname: str, cat: Optional[str] = None) -> tuple[str, Optional[st
|
||||
return fname, cat
|
||||
|
||||
|
||||
def cat_to_opts(
|
||||
cat: Optional[str], pp: Optional[int] = None, script: Optional[str] = None, priority: Optional[int] = None
|
||||
) -> tuple[str, int, str, int]:
|
||||
def cat_to_opts(cat, pp=None, script=None, priority=None) -> Tuple[str, int, str, int]:
|
||||
"""Derive options from category, if options not already defined.
|
||||
Specified options have priority over category-options.
|
||||
If no valid category is given, special category '*' will supply default values
|
||||
@@ -281,7 +279,7 @@ def cat_to_opts(
|
||||
return cat, pp, script, priority
|
||||
|
||||
|
||||
def pp_to_opts(pp: Optional[int]) -> tuple[bool, bool, bool]:
|
||||
def pp_to_opts(pp: Optional[int]) -> Tuple[bool, bool, bool]:
|
||||
"""Convert numeric processing options to (repair, unpack, delete)"""
|
||||
# Convert the pp to an int
|
||||
pp = int_conv(pp)
|
||||
@@ -333,12 +331,12 @@ _wildcard_to_regex = {
|
||||
}
|
||||
|
||||
|
||||
def wildcard_to_re(text: str) -> str:
|
||||
def wildcard_to_re(text):
|
||||
"""Convert plain wildcard string (with '*' and '?') to regex."""
|
||||
return "".join([_wildcard_to_regex.get(ch, ch) for ch in text])
|
||||
|
||||
|
||||
def convert_filter(text: str) -> Optional[re.Pattern]:
|
||||
def convert_filter(text):
|
||||
"""Return compiled regex.
|
||||
If string starts with re: it's a real regex
|
||||
else quote all regex specials, replace '*' by '.*'
|
||||
@@ -355,7 +353,7 @@ def convert_filter(text: str) -> Optional[re.Pattern]:
|
||||
return None
|
||||
|
||||
|
||||
def cat_convert(cat: Optional[str]) -> Optional[str]:
|
||||
def cat_convert(cat):
|
||||
"""Convert indexer's category/group-name to user categories.
|
||||
If no match found, but indexer-cat equals user-cat, then return user-cat
|
||||
If no match found, but the indexer-cat starts with the user-cat, return user-cat
|
||||
@@ -399,7 +397,7 @@ _SERVICE_KEY = "SYSTEM\\CurrentControlSet\\services\\"
|
||||
_SERVICE_PARM = "CommandLine"
|
||||
|
||||
|
||||
def get_serv_parms(service: str) -> list[str]:
|
||||
def get_serv_parms(service):
|
||||
"""Get the service command line parameters from Registry"""
|
||||
service_parms = []
|
||||
try:
|
||||
@@ -418,7 +416,7 @@ def get_serv_parms(service: str) -> list[str]:
|
||||
return service_parms
|
||||
|
||||
|
||||
def set_serv_parms(service: str, args: list) -> bool:
|
||||
def set_serv_parms(service, args):
|
||||
"""Set the service command line parameters in Registry"""
|
||||
serv = []
|
||||
for arg in args:
|
||||
@@ -446,7 +444,7 @@ def get_from_url(url: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def convert_version(text: str) -> tuple[int, bool]:
|
||||
def convert_version(text):
|
||||
"""Convert version string to numerical value and a testversion indicator"""
|
||||
version = 0
|
||||
test = True
|
||||
@@ -553,7 +551,7 @@ def check_latest_version():
|
||||
)
|
||||
|
||||
|
||||
def upload_file_to_sabnzbd(url: str, fp: str):
|
||||
def upload_file_to_sabnzbd(url, fp):
|
||||
"""Function for uploading nzbs to a running SABnzbd instance"""
|
||||
try:
|
||||
fp = urllib.parse.quote_plus(fp)
|
||||
@@ -646,7 +644,7 @@ def to_units(val: Union[int, float], postfix="") -> str:
|
||||
return f"{sign}{val:.{decimals}f}{units}"
|
||||
|
||||
|
||||
def caller_name(skip: int = 2) -> str:
|
||||
def caller_name(skip=2):
|
||||
"""Get a name of a caller in the format module.method
|
||||
Originally used: https://gist.github.com/techtonik/2151727
|
||||
Adapted for speed by using sys calls directly
|
||||
@@ -684,7 +682,7 @@ def exit_sab(value: int):
|
||||
os._exit(value)
|
||||
|
||||
|
||||
def split_host(srv: Optional[str]) -> tuple[Optional[str], Optional[int]]:
|
||||
def split_host(srv):
|
||||
"""Split host:port notation, allowing for IPV6"""
|
||||
if not srv:
|
||||
return None, None
|
||||
@@ -706,7 +704,7 @@ def split_host(srv: Optional[str]) -> tuple[Optional[str], Optional[int]]:
|
||||
return out[0], port
|
||||
|
||||
|
||||
def get_cache_limit() -> str:
|
||||
def get_cache_limit():
|
||||
"""Depending on OS, calculate cache limits.
|
||||
In ArticleCache it will make sure we stay
|
||||
within system limits for 32/64 bit
|
||||
@@ -744,7 +742,7 @@ def get_cache_limit() -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def get_windows_memory() -> int:
|
||||
def get_windows_memory():
|
||||
"""Use ctypes to extract available memory"""
|
||||
|
||||
class MEMORYSTATUSEX(ctypes.Structure):
|
||||
@@ -770,14 +768,14 @@ def get_windows_memory() -> int:
|
||||
return stat.ullTotalPhys
|
||||
|
||||
|
||||
def get_macos_memory() -> float:
|
||||
def get_macos_memory():
|
||||
"""Use system-call to extract total memory on macOS"""
|
||||
system_output = run_command(["sysctl", "hw.memsize"])
|
||||
return float(system_output.split()[1])
|
||||
|
||||
|
||||
@conditional_cache(cache_time=3600)
|
||||
def get_cpu_name() -> Optional[str]:
|
||||
def get_cpu_name():
|
||||
"""Find the CPU name (which needs a different method per OS), and return it
|
||||
If none found, return platform.platform()"""
|
||||
|
||||
@@ -877,7 +875,7 @@ def on_cleanup_list(filename: str, skip_nzb: bool = False) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def memory_usage() -> Optional[str]:
|
||||
def memory_usage():
|
||||
try:
|
||||
# Probably only works on Linux because it uses /proc/<pid>/statm
|
||||
with open("/proc/%d/statm" % os.getpid()) as t:
|
||||
@@ -899,7 +897,7 @@ except Exception:
|
||||
_HAVE_STATM = _PAGE_SIZE and memory_usage()
|
||||
|
||||
|
||||
def loadavg() -> str:
|
||||
def loadavg():
|
||||
"""Return 1, 5 and 15 minute load average of host or "" if not supported"""
|
||||
p = ""
|
||||
if not sabnzbd.WINDOWS and not sabnzbd.MACOS:
|
||||
@@ -974,7 +972,7 @@ def bool_conv(value: Any) -> bool:
|
||||
return bool(int_conv(value))
|
||||
|
||||
|
||||
def create_https_certificates(ssl_cert: str, ssl_key: str) -> bool:
|
||||
def create_https_certificates(ssl_cert, ssl_key):
|
||||
"""Create self-signed HTTPS certificates and store in paths 'ssl_cert' and 'ssl_key'"""
|
||||
try:
|
||||
from sabnzbd.utils.certgen import generate_key, generate_local_cert
|
||||
@@ -990,7 +988,7 @@ def create_https_certificates(ssl_cert: str, ssl_key: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def get_all_passwords(nzo) -> list[str]:
|
||||
def get_all_passwords(nzo) -> List[str]:
|
||||
"""Get all passwords, from the NZB, meta and password file. In case a working password is
|
||||
already known, try it first."""
|
||||
passwords = []
|
||||
@@ -1053,7 +1051,7 @@ def is_sample(filename: str) -> bool:
|
||||
return bool(re.search(RE_SAMPLE, filename))
|
||||
|
||||
|
||||
def find_on_path(targets: Union[str, tuple[str, ...]]) -> Optional[str]:
|
||||
def find_on_path(targets):
|
||||
"""Search the PATH for a program and return full path"""
|
||||
if sabnzbd.WINDOWS:
|
||||
paths = os.getenv("PATH").split(";")
|
||||
@@ -1172,7 +1170,7 @@ def is_local_addr(ip: str) -> bool:
|
||||
return is_lan_addr(ip)
|
||||
|
||||
|
||||
def ip_extract() -> list[str]:
|
||||
def ip_extract() -> List[str]:
|
||||
"""Return list of IP addresses of this system"""
|
||||
ips = []
|
||||
program = find_on_path("ip")
|
||||
@@ -1217,7 +1215,7 @@ def get_base_url(url: str) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def match_str(text: AnyStr, matches: tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
def match_str(text: AnyStr, matches: Tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
"""Return first matching element of list 'matches' in 'text', otherwise None"""
|
||||
text = text.lower()
|
||||
for match in matches:
|
||||
@@ -1226,7 +1224,7 @@ def match_str(text: AnyStr, matches: tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
return None
|
||||
|
||||
|
||||
def recursive_html_escape(input_dict_or_list: Union[dict[str, Any], list], exclude_items: tuple[str, ...] = ()):
|
||||
def recursive_html_escape(input_dict_or_list: Union[Dict[str, Any], List], exclude_items: Tuple[str, ...] = ()):
|
||||
"""Recursively update the input_dict in-place with html-safe values"""
|
||||
if isinstance(input_dict_or_list, (dict, list)):
|
||||
if isinstance(input_dict_or_list, dict):
|
||||
@@ -1247,7 +1245,7 @@ def recursive_html_escape(input_dict_or_list: Union[dict[str, Any], list], exclu
|
||||
raise ValueError("Expected dict or str, got %s" % type(input_dict_or_list))
|
||||
|
||||
|
||||
def list2cmdline_unrar(lst: list[str]) -> str:
|
||||
def list2cmdline_unrar(lst: List[str]) -> str:
|
||||
"""convert list to a unrar.exe-compatible command string
|
||||
Unrar uses "" instead of \" to escape the double quote"""
|
||||
nlst = []
|
||||
@@ -1261,9 +1259,7 @@ def list2cmdline_unrar(lst: list[str]) -> str:
|
||||
return " ".join(nlst)
|
||||
|
||||
|
||||
def build_and_run_command(
|
||||
command: list[str], windows_unrar_command: bool = False, text_mode: bool = True, **kwargs
|
||||
) -> subprocess.Popen:
|
||||
def build_and_run_command(command: List[str], windows_unrar_command: bool = False, text_mode: bool = True, **kwargs):
|
||||
"""Builds and then runs command with necessary flags and optional
|
||||
IONice and Nice commands. Optional Popen arguments can be supplied.
|
||||
On Windows we need to run our own list2cmdline for Unrar.
|
||||
@@ -1330,7 +1326,7 @@ def build_and_run_command(
|
||||
return subprocess.Popen(command, **popen_kwargs)
|
||||
|
||||
|
||||
def run_command(cmd: list[str], **kwargs) -> str:
|
||||
def run_command(cmd: List[str], **kwargs):
|
||||
"""Run simple external command and return output as a string."""
|
||||
with build_and_run_command(cmd, **kwargs) as p:
|
||||
txt = p.stdout.read()
|
||||
@@ -1363,7 +1359,7 @@ def set_socks5_proxy():
|
||||
socket.socket = socks.socksocket
|
||||
|
||||
|
||||
def set_https_verification(value: bool) -> bool:
|
||||
def set_https_verification(value):
|
||||
"""Set HTTPS-verification state while returning current setting
|
||||
False = disable verification
|
||||
"""
|
||||
@@ -1385,7 +1381,7 @@ def request_repair():
|
||||
pass
|
||||
|
||||
|
||||
def check_repair_request() -> bool:
|
||||
def check_repair_request():
|
||||
"""Return True if repair request found, remove afterwards"""
|
||||
path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
|
||||
if os.path.exists(path):
|
||||
@@ -1518,8 +1514,8 @@ def convert_sorter_settings():
|
||||
min_size: Union[str|int] = "50M"
|
||||
multipart_label: Optional[str] = ""
|
||||
sort_string: str
|
||||
sort_cats: list[str]
|
||||
sort_type: list[int]
|
||||
sort_cats: List[str]
|
||||
sort_type: List[int]
|
||||
is_active: bool = 1
|
||||
}
|
||||
|
||||
@@ -1579,7 +1575,7 @@ def convert_sorter_settings():
|
||||
def convert_history_retention():
|
||||
"""Convert single-option to the split history retention setting"""
|
||||
if "d" in cfg.history_retention():
|
||||
days_to_keep = int_conv(cfg.history_retention().strip().removesuffix("d"))
|
||||
days_to_keep = int_conv(cfg.history_retention().strip()[:-1])
|
||||
cfg.history_retention_option.set("days-delete")
|
||||
cfg.history_retention_number.set(days_to_keep)
|
||||
else:
|
||||
@@ -1619,7 +1615,7 @@ class SABRarFile(rarfile.RarFile):
|
||||
self._file_parser._info_list.append(rar_obj)
|
||||
self._file_parser._info_map[rar_obj.filename.rstrip("/")] = rar_obj
|
||||
|
||||
def filelist(self) -> list[str]:
|
||||
def filelist(self):
|
||||
"""Return list of filenames in archive."""
|
||||
return [f.filename for f in self.infolist() if not f.isdir()]
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ import io
|
||||
import shutil
|
||||
import functools
|
||||
import rarfile
|
||||
from typing import BinaryIO, Optional, Any, Union
|
||||
from typing import Tuple, List, BinaryIO, Optional, Dict, Any, Union, Set
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.encoding import correct_unknown_encoding, ubtou
|
||||
@@ -64,7 +64,6 @@ from sabnzbd.filesystem import (
|
||||
SEVENMULTI_RE,
|
||||
is_size,
|
||||
get_basename,
|
||||
create_all_dirs,
|
||||
)
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -201,7 +200,7 @@ ENV_NZO_FIELDS = [
|
||||
|
||||
def external_processing(
|
||||
extern_proc: str, nzo: NzbObject, complete_dir: str, nicename: str, status: int
|
||||
) -> tuple[str, int]:
|
||||
) -> Tuple[str, int]:
|
||||
"""Run a user postproc script, return console output and exit value"""
|
||||
failure_url = nzo.nzo_info.get("failure", "")
|
||||
# Items can be bool or null, causing POpen to fail
|
||||
@@ -263,12 +262,12 @@ def unpacker(
|
||||
nzo: NzbObject,
|
||||
workdir_complete: str,
|
||||
one_folder: bool,
|
||||
joinables: list[str] = [],
|
||||
rars: list[str] = [],
|
||||
sevens: list[str] = [],
|
||||
ts: list[str] = [],
|
||||
joinables: List[str] = [],
|
||||
rars: List[str] = [],
|
||||
sevens: List[str] = [],
|
||||
ts: List[str] = [],
|
||||
depth: int = 0,
|
||||
) -> tuple[Union[int, bool], list[str]]:
|
||||
) -> Tuple[Union[int, bool], List[str]]:
|
||||
"""Do a recursive unpack from all archives in 'download_path' to 'workdir_complete'"""
|
||||
if depth > 2:
|
||||
# Prevent going to deep down the rabbit-hole
|
||||
@@ -360,7 +359,7 @@ def unpacker(
|
||||
##############################################################################
|
||||
# Filejoin Functions
|
||||
##############################################################################
|
||||
def match_ts(file: str) -> tuple[str, int]:
|
||||
def match_ts(file: str) -> Tuple[str, int]:
|
||||
"""Return True if file is a joinable TS file"""
|
||||
match = TS_RE.search(file)
|
||||
if not match:
|
||||
@@ -375,7 +374,7 @@ def match_ts(file: str) -> tuple[str, int]:
|
||||
return setname, num
|
||||
|
||||
|
||||
def clean_up_joinables(names: list[str]):
|
||||
def clean_up_joinables(names: List[str]):
|
||||
"""Remove joinable files and their .1 backups"""
|
||||
for name in names:
|
||||
if os.path.exists(name):
|
||||
@@ -404,7 +403,7 @@ def get_seq_number(name: str) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def file_join(nzo: NzbObject, workdir_complete: str, joinables: list[str]) -> tuple[bool, list[str]]:
|
||||
def file_join(nzo: NzbObject, workdir_complete: str, joinables: List[str]) -> Tuple[bool, List[str]]:
|
||||
"""Join and joinable files in 'workdir' to 'workdir_complete' and
|
||||
when successful, delete originals
|
||||
"""
|
||||
@@ -495,7 +494,7 @@ def file_join(nzo: NzbObject, workdir_complete: str, joinables: list[str]) -> tu
|
||||
##############################################################################
|
||||
# (Un)Rar Functions
|
||||
##############################################################################
|
||||
def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: list[str]) -> tuple[int, list[str]]:
|
||||
def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: List[str]) -> Tuple[int, List[str]]:
|
||||
"""Unpack multiple sets 'rars' of RAR files from 'download_path' to 'workdir_complete.
|
||||
When 'delete' is set, originals will be deleted.
|
||||
When 'one_folder' is set, all files will be in a single folder
|
||||
@@ -617,7 +616,7 @@ def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: li
|
||||
|
||||
def rar_extract(
|
||||
rarfile_path: str, numrars: int, one_folder: bool, nzo: NzbObject, setname: str, extraction_path: str
|
||||
) -> tuple[int, list[str], list[str]]:
|
||||
) -> Tuple[int, List[str], List[str]]:
|
||||
"""Unpack single rar set 'rarfile' to 'extraction_path',
|
||||
with password tries
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
|
||||
@@ -627,12 +626,6 @@ def rar_extract(
|
||||
rars = []
|
||||
passwords = get_all_passwords(nzo)
|
||||
|
||||
# Sanity check, does the folder exist? Could be removed by aborted Direct Unpack
|
||||
if not os.path.exists(extraction_path):
|
||||
# Similar to prepare_extraction_path
|
||||
extraction_path = create_all_dirs(extraction_path, apply_permissions=True)
|
||||
logging.info("Extraction path (re)created because it was missing: %s", extraction_path)
|
||||
|
||||
for password in passwords:
|
||||
if password:
|
||||
logging.debug('Trying unrar with password "%s"', password)
|
||||
@@ -649,14 +642,14 @@ def rar_extract(
|
||||
|
||||
def rar_extract_core(
|
||||
rarfile_path: str, numrars: int, one_folder: bool, nzo: NzbObject, setname: str, extraction_path: str, password: str
|
||||
) -> tuple[int, list[str], list[str]]:
|
||||
) -> Tuple[int, List[str], List[str]]:
|
||||
"""Unpack single rar set 'rarfile_path' to 'extraction_path'
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
|
||||
"""
|
||||
start = time.time()
|
||||
|
||||
logging.debug("Extraction path: %s", extraction_path)
|
||||
logging.debug("Found rar version: %s", rarfile.get_rar_version(rarfile_path))
|
||||
logging.debug("Found rar version: %s", rarfile.is_rarfile(rarfile_path))
|
||||
|
||||
if password:
|
||||
password_command = "-p%s" % password
|
||||
@@ -873,7 +866,7 @@ def rar_extract_core(
|
||||
##############################################################################
|
||||
# 7Zip Functions
|
||||
##############################################################################
|
||||
def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: list[str]) -> tuple[bool, list[str]]:
|
||||
def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: List[str]):
|
||||
"""Unpack multiple sets '7z' of 7Zip files from 'download_path' to 'workdir_complete.
|
||||
When 'delete' is set, originals will be deleted.
|
||||
"""
|
||||
@@ -921,7 +914,7 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: lis
|
||||
|
||||
def seven_extract(
|
||||
nzo: NzbObject, seven_path: str, seven_set: str, extraction_path: str, one_folder: bool
|
||||
) -> tuple[int, list[str]]:
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Unpack single set 'sevenset' to 'extraction_path', with password tries
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, sevens
|
||||
"""
|
||||
@@ -945,7 +938,7 @@ def seven_extract(
|
||||
|
||||
def seven_extract_core(
|
||||
nzo: NzbObject, seven_path: str, extraction_path: str, seven_set: str, one_folder: bool, password: str
|
||||
) -> tuple[int, list[str]]:
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Unpack single 7Z set 'sevenset' to 'extraction_path'
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, message
|
||||
"""
|
||||
@@ -1011,7 +1004,7 @@ def seven_extract_core(
|
||||
##############################################################################
|
||||
# PAR2 Functions
|
||||
##############################################################################
|
||||
def par2_repair(nzo: NzbObject, setname: str) -> tuple[bool, bool]:
|
||||
def par2_repair(nzo: NzbObject, setname: str) -> Tuple[bool, bool]:
|
||||
"""Try to repair a set, return readd and correctness"""
|
||||
# Check which of the files exists
|
||||
for new_par in nzo.extrapars[setname]:
|
||||
@@ -1124,8 +1117,8 @@ def par2_repair(nzo: NzbObject, setname: str) -> tuple[bool, bool]:
|
||||
|
||||
|
||||
def par2cmdline_verify(
|
||||
parfile: str, nzo: NzbObject, setname: str, joinables: list[str]
|
||||
) -> tuple[bool, bool, list[str], list[str]]:
|
||||
parfile: str, nzo: NzbObject, setname: str, joinables: List[str]
|
||||
) -> Tuple[bool, bool, List[str], List[str]]:
|
||||
"""Run par2 on par-set"""
|
||||
used_joinables = []
|
||||
used_for_repair = []
|
||||
@@ -1410,7 +1403,7 @@ def par2cmdline_verify(
|
||||
return finished, readd, used_joinables, used_for_repair
|
||||
|
||||
|
||||
def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: dict[str, Any] = {}) -> Optional[dict[str, Any]]:
|
||||
def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: Dict[str, Any] = {}) -> Optional[Dict[str, Any]]:
|
||||
"""Modify the environment for pp-scripts with extra information
|
||||
macOS: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
other: return None
|
||||
@@ -1467,7 +1460,7 @@ def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: dict[str, Any]
|
||||
return env
|
||||
|
||||
|
||||
def rar_volumelist(rarfile_path: str, password: str, known_volumes: list[str]) -> list[str]:
|
||||
def rar_volumelist(rarfile_path: str, password: str, known_volumes: List[str]) -> List[str]:
|
||||
"""List volumes that are part of this rarset
|
||||
and merge them with parsed paths list, removing duplicates.
|
||||
We assume RarFile is right and use parsed paths as backup.
|
||||
@@ -1523,7 +1516,7 @@ def quick_check_set(setname: str, nzo: NzbObject) -> bool:
|
||||
result = True
|
||||
nzf_list = nzo.finished_files
|
||||
renames = {}
|
||||
found_paths: set[str] = set()
|
||||
found_paths: Set[str] = set()
|
||||
|
||||
# Files to ignore
|
||||
ignore_ext = cfg.quick_check_ext_ignore()
|
||||
@@ -1597,7 +1590,7 @@ def quick_check_set(setname: str, nzo: NzbObject) -> bool:
|
||||
return result
|
||||
|
||||
|
||||
def unrar_check(rar: str) -> tuple[int, bool]:
|
||||
def unrar_check(rar: str) -> Tuple[int, bool]:
|
||||
"""Return version number of unrar, where "5.01" returns 501
|
||||
Also return whether an original version is found
|
||||
(version, original)
|
||||
@@ -1685,7 +1678,7 @@ def is_sfv_file(myfile: str) -> bool:
|
||||
return sfv_info_line_counter >= 1
|
||||
|
||||
|
||||
def sfv_check(sfvs: list[str], nzo: NzbObject) -> bool:
|
||||
def sfv_check(sfvs: List[str], nzo: NzbObject) -> bool:
|
||||
"""Verify files using SFV files"""
|
||||
# Update status
|
||||
nzo.status = Status.VERIFYING
|
||||
@@ -1769,7 +1762,7 @@ def sfv_check(sfvs: list[str], nzo: NzbObject) -> bool:
|
||||
return result
|
||||
|
||||
|
||||
def parse_sfv(sfv_filename: str) -> dict[str, bytes]:
|
||||
def parse_sfv(sfv_filename):
|
||||
"""Parse SFV file and return dictionary of crc32's and filenames"""
|
||||
results = {}
|
||||
with open(sfv_filename, mode="rb") as sfv_list:
|
||||
@@ -1794,12 +1787,12 @@ def add_time_left(perc: float, start_time: Optional[float] = None, time_used: Op
|
||||
return ""
|
||||
|
||||
|
||||
def pre_queue(nzo: NzbObject, pp: str, cat: str) -> list[Any]:
|
||||
def pre_queue(nzo: NzbObject, pp, cat):
|
||||
"""Run pre-queue script (if any) and process results.
|
||||
pp and cat are supplied separate since they can change.
|
||||
"""
|
||||
|
||||
def fix(p: Any) -> str:
|
||||
def fix(p):
|
||||
# If added via API, some items can still be "None" (as a string)
|
||||
if is_none(p):
|
||||
return ""
|
||||
@@ -1893,7 +1886,7 @@ class SevenZip:
|
||||
if not is_sevenfile(self.path):
|
||||
raise TypeError("File is not a 7zip file")
|
||||
|
||||
def namelist(self) -> list[str]:
|
||||
def namelist(self) -> List[str]:
|
||||
"""Return list of names in 7Zip"""
|
||||
names = []
|
||||
command = [SEVENZIP_COMMAND, "l", "-p", "-y", "-slt", "-sccUTF-8", self.path]
|
||||
@@ -1916,6 +1909,6 @@ class SevenZip:
|
||||
p.wait()
|
||||
return data
|
||||
|
||||
def close(self) -> None:
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
pass
|
||||
|
||||
@@ -21,22 +21,20 @@ sabnzbd.newswrapper
|
||||
|
||||
import errno
|
||||
import socket
|
||||
import threading
|
||||
from collections import deque
|
||||
from selectors import EVENT_READ, EVENT_WRITE
|
||||
from threading import Thread
|
||||
import time
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Optional, Tuple, Union, Callable
|
||||
|
||||
import sabctools
|
||||
from typing import Optional, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT, NNTP_BUFFER_SIZE, Status, FORCE_PRIORITY
|
||||
from sabnzbd.encoding import utob
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT, NNTP_BUFFER_SIZE, NTTP_MAX_BUFFER_SIZE
|
||||
from sabnzbd.encoding import utob, ubtou
|
||||
from sabnzbd.get_addrinfo import AddrInfo
|
||||
from sabnzbd.decorators import synchronized, DOWNLOADER_LOCK
|
||||
from sabnzbd.misc import int_conv
|
||||
|
||||
# Set pre-defined socket timeout
|
||||
socket.setdefaulttimeout(DEF_NETWORKING_TIMEOUT)
|
||||
@@ -59,8 +57,10 @@ class NewsWrapper:
|
||||
"thrdnum",
|
||||
"blocking",
|
||||
"timeout",
|
||||
"decoder",
|
||||
"send_buffer",
|
||||
"article",
|
||||
"data",
|
||||
"data_view",
|
||||
"data_position",
|
||||
"nntp",
|
||||
"connected",
|
||||
"user_sent",
|
||||
@@ -69,11 +69,6 @@ class NewsWrapper:
|
||||
"user_ok",
|
||||
"pass_ok",
|
||||
"force_login",
|
||||
"next_request",
|
||||
"concurrent_requests",
|
||||
"_response_queue",
|
||||
"selector_events",
|
||||
"lock",
|
||||
)
|
||||
|
||||
def __init__(self, server, thrdnum, block=False):
|
||||
@@ -82,9 +77,11 @@ class NewsWrapper:
|
||||
self.blocking: bool = block
|
||||
|
||||
self.timeout: Optional[float] = None
|
||||
self.article: Optional[sabnzbd.nzbstuff.Article] = None
|
||||
|
||||
self.decoder: Optional[sabctools.Decoder] = None
|
||||
self.send_buffer = b""
|
||||
self.data: Optional[bytearray] = None
|
||||
self.data_view: Optional[memoryview] = None
|
||||
self.data_position: int = 0
|
||||
|
||||
self.nntp: Optional[NNTP] = None
|
||||
|
||||
@@ -96,22 +93,14 @@ class NewsWrapper:
|
||||
self.force_login: bool = False
|
||||
self.group: Optional[str] = None
|
||||
|
||||
# Command queue and concurrency
|
||||
self.next_request: Optional[tuple[bytes, Optional["sabnzbd.nzbstuff.Article"]]] = None
|
||||
self.concurrent_requests: threading.BoundedSemaphore = threading.BoundedSemaphore(
|
||||
sabnzbd.cfg.pipelining_requests()
|
||||
)
|
||||
self._response_queue: deque[Optional[sabnzbd.nzbstuff.Article]] = deque()
|
||||
self.selector_events = 0
|
||||
self.lock: threading.Lock = threading.Lock()
|
||||
@property
|
||||
def status_code(self) -> Optional[int]:
|
||||
if self.data_position >= 3:
|
||||
return int_conv(self.data[:3])
|
||||
|
||||
@property
|
||||
def article(self) -> Optional["sabnzbd.nzbstuff.Article"]:
|
||||
"""The article currently being downloaded"""
|
||||
with self.lock:
|
||||
if self._response_queue:
|
||||
return self._response_queue[0]
|
||||
return None
|
||||
def nntp_msg(self) -> str:
|
||||
return ubtou(self.data[: self.data_position]).strip()
|
||||
|
||||
def init_connect(self):
|
||||
"""Setup the connection in NNTP object"""
|
||||
@@ -120,15 +109,13 @@ class NewsWrapper:
|
||||
raise socket.error(errno.EADDRNOTAVAIL, T("Invalid server address."))
|
||||
|
||||
# Construct buffer and NNTP object
|
||||
self.decoder = sabctools.Decoder(NNTP_BUFFER_SIZE)
|
||||
self.data = sabctools.bytearray_malloc(NNTP_BUFFER_SIZE)
|
||||
self.data_view = memoryview(self.data)
|
||||
self.reset_data_buffer()
|
||||
self.nntp = NNTP(self, self.server.addrinfo)
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
# On connect the first "response" will be 200 Welcome
|
||||
self._response_queue.append(None)
|
||||
self.concurrent_requests.acquire()
|
||||
|
||||
def finish_connect(self, code: int, message: str) -> None:
|
||||
def finish_connect(self, code: int):
|
||||
"""Perform login options"""
|
||||
if not (self.server.username or self.server.password or self.force_login):
|
||||
self.connected = True
|
||||
@@ -146,10 +133,11 @@ class NewsWrapper:
|
||||
self.pass_ok = False
|
||||
|
||||
if code in (400, 500, 502):
|
||||
raise NNTPPermanentError(message, code)
|
||||
raise NNTPPermanentError(self.nntp_msg, code)
|
||||
elif not self.user_sent:
|
||||
command = utob("authinfo user %s\r\n" % self.server.username)
|
||||
self.queue_command(command)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
self.user_sent = True
|
||||
elif not self.user_ok:
|
||||
if code == 381:
|
||||
@@ -163,254 +151,98 @@ class NewsWrapper:
|
||||
|
||||
if self.user_ok and not self.pass_sent:
|
||||
command = utob("authinfo pass %s\r\n" % self.server.password)
|
||||
self.queue_command(command)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
self.pass_sent = True
|
||||
elif self.user_ok and not self.pass_ok:
|
||||
if code != 281:
|
||||
# Assume that login failed (code 481 or other)
|
||||
raise NNTPPermanentError(message, code)
|
||||
raise NNTPPermanentError(self.nntp_msg, code)
|
||||
else:
|
||||
self.connected = True
|
||||
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
def queue_command(
|
||||
self,
|
||||
command: bytes,
|
||||
article: Optional["sabnzbd.nzbstuff.Article"] = None,
|
||||
) -> None:
|
||||
"""Add a command to the command queue"""
|
||||
self.next_request = command, article
|
||||
|
||||
def body(self, article: "sabnzbd.nzbstuff.Article") -> tuple[bytes, "sabnzbd.nzbstuff.Article"]:
|
||||
def body(self):
|
||||
"""Request the body of the article"""
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
if article.nzf.nzo.precheck:
|
||||
if self.article.nzf.nzo.precheck:
|
||||
if self.server.have_stat:
|
||||
command = utob("STAT <%s>\r\n" % article.article)
|
||||
command = utob("STAT <%s>\r\n" % self.article.article)
|
||||
else:
|
||||
command = utob("HEAD <%s>\r\n" % article.article)
|
||||
command = utob("HEAD <%s>\r\n" % self.article.article)
|
||||
elif self.server.have_body:
|
||||
command = utob("BODY <%s>\r\n" % article.article)
|
||||
command = utob("BODY <%s>\r\n" % self.article.article)
|
||||
else:
|
||||
command = utob("ARTICLE <%s>\r\n" % article.article)
|
||||
return command, article
|
||||
command = utob("ARTICLE <%s>\r\n" % self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
|
||||
def on_response(self, response: sabctools.NNTPResponse, article: Optional["sabnzbd.nzbstuff.Article"]) -> None:
|
||||
"""A response to a NNTP request is received"""
|
||||
self.concurrent_requests.release()
|
||||
sabnzbd.Downloader.modify_socket(self, EVENT_READ | EVENT_WRITE)
|
||||
server = self.server
|
||||
article_done = response.status_code in (220, 222) and article
|
||||
def recv_chunk(self) -> Tuple[int, bool, bool]:
|
||||
"""Receive data, return #bytes, end-of-line, end-of-article"""
|
||||
# Resize the buffer in the extremely unlikely case that it got full
|
||||
if self.data_position == len(self.data):
|
||||
self.nntp.nw.increase_data_buffer()
|
||||
|
||||
if article_done:
|
||||
with DOWNLOADER_LOCK:
|
||||
# Update statistics only when we fetched a whole article
|
||||
# The side effect is that we don't count things like article-not-available messages
|
||||
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, response.bytes_read)
|
||||
|
||||
# Response code depends on request command:
|
||||
# 220 = ARTICLE, 222 = BODY
|
||||
if not article_done:
|
||||
if not self.connected or not article or response.status_code in (281, 381, 480, 481, 482):
|
||||
self.discard(article, count_article_try=False)
|
||||
if not sabnzbd.Downloader.finish_connect_nw(self, response):
|
||||
return
|
||||
if self.connected:
|
||||
logging.info("Connecting %s@%s finished", self.thrdnum, server.host)
|
||||
|
||||
elif response.status_code == 223:
|
||||
article_done = True
|
||||
logging.debug("Article <%s> is present on %s", article.article, server.host)
|
||||
|
||||
elif response.status_code in (411, 423, 430, 451):
|
||||
article_done = True
|
||||
logging.debug(
|
||||
"Thread %s@%s: Article %s missing (error=%s)",
|
||||
self.thrdnum,
|
||||
server.host,
|
||||
article.article,
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
elif response.status_code == 500:
|
||||
if article.nzf.nzo.precheck:
|
||||
# Did we try "STAT" already?
|
||||
if not server.have_stat:
|
||||
# Hopless server, just discard
|
||||
logging.info("Server %s does not support STAT or HEAD, precheck not possible", server.host)
|
||||
article_done = True
|
||||
else:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
logging.debug("Server %s does not support STAT, trying HEAD", server.host)
|
||||
else:
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug("Server %s does not support BODY", server.host)
|
||||
self.discard(article, count_article_try=False)
|
||||
|
||||
else:
|
||||
# Don't warn for (internal) server errors during downloading
|
||||
if response.status_code not in (400, 502, 503):
|
||||
logging.warning(
|
||||
T("%s@%s: Received unknown status code %s for article %s"),
|
||||
self.thrdnum,
|
||||
server.host,
|
||||
response.status_code,
|
||||
article.article,
|
||||
)
|
||||
|
||||
# Ditch this thread, we don't know what data we got now so the buffer can be bad
|
||||
sabnzbd.Downloader.reset_nw(
|
||||
self, f"Server error or unknown status code: {response.status_code}", wait=False, article=article
|
||||
)
|
||||
return
|
||||
|
||||
if article_done:
|
||||
# Successful data, clear "bad" counter
|
||||
server.bad_cons = 0
|
||||
server.errormsg = server.warning = ""
|
||||
|
||||
# Decode
|
||||
sabnzbd.Downloader.decode(article, response)
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s done", self.thrdnum, server.host, article.article)
|
||||
|
||||
def read(
|
||||
self,
|
||||
nbytes: int = 0,
|
||||
on_response: Optional[Callable[[int, str], None]] = None,
|
||||
) -> Tuple[int, Optional[int]]:
|
||||
"""Receive data, return #bytes, #pendingbytes
|
||||
:param nbytes: maximum number of bytes to read
|
||||
:param on_response: callback for each complete response received
|
||||
:return: #bytes, #pendingbytes
|
||||
"""
|
||||
# Receive data into the decoder pre-allocated buffer
|
||||
if not nbytes and self.nntp.nw.server.ssl and not self.nntp.nw.blocking and sabctools.openssl_linked:
|
||||
# Receive data into the pre-allocated buffer
|
||||
if self.nntp.nw.server.ssl and not self.nntp.nw.blocking and sabctools.openssl_linked:
|
||||
# Use patched version when downloading
|
||||
bytes_recv = sabctools.unlocked_ssl_recv_into(self.nntp.sock, self.decoder)
|
||||
bytes_recv = sabctools.unlocked_ssl_recv_into(self.nntp.sock, self.data_view[self.data_position :])
|
||||
else:
|
||||
bytes_recv = self.nntp.sock.recv_into(self.decoder, nbytes=nbytes)
|
||||
bytes_recv = self.nntp.sock.recv_into(self.data_view[self.data_position :])
|
||||
|
||||
# No data received
|
||||
if bytes_recv == 0:
|
||||
raise ConnectionError("Server closed connection")
|
||||
|
||||
# Success, move timeout
|
||||
# Success, move timeout and internal data position
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
self.decoder.process(bytes_recv)
|
||||
for response in self.decoder:
|
||||
with self.lock:
|
||||
article = self._response_queue.popleft()
|
||||
if on_response:
|
||||
on_response(response.status_code, response.message)
|
||||
self.on_response(response, article)
|
||||
self.data_position += bytes_recv
|
||||
|
||||
# The SSL-layer might still contain data even though the socket does not. Another Downloader-loop would
|
||||
# not identify this socket anymore as it is not returned by select(). So, we have to forcefully trigger
|
||||
# another recv_chunk so the buffer is increased and the data from the SSL-layer is read. See #2752.
|
||||
if self.server.ssl and self.nntp and (pending := self.nntp.sock.pending()):
|
||||
return bytes_recv, pending
|
||||
return bytes_recv, None
|
||||
if self.nntp.nw.server.ssl and self.data_position == len(self.data) and self.nntp.sock.pending() > 0:
|
||||
# We do not perform error-handling, as we know there is data available to read
|
||||
additional_bytes_recv, additional_end_of_line, additional_end_of_article = self.recv_chunk()
|
||||
return bytes_recv + additional_bytes_recv, additional_end_of_line, additional_end_of_article
|
||||
|
||||
def write(self):
|
||||
"""Send data to server"""
|
||||
server = self.server
|
||||
# Check for end of line
|
||||
# Using the data directly seems faster than the memoryview
|
||||
if self.data[self.data_position - 2 : self.data_position] == b"\r\n":
|
||||
# Official end-of-article is "\r\n.\r\n"
|
||||
if self.data[self.data_position - 5 : self.data_position] == b"\r\n.\r\n":
|
||||
return bytes_recv, True, True
|
||||
return bytes_recv, True, False
|
||||
|
||||
try:
|
||||
# First, try to flush any remaining data
|
||||
if self.send_buffer:
|
||||
sent = self.nntp.sock.send(self.send_buffer)
|
||||
self.send_buffer = self.send_buffer[sent:]
|
||||
if self.send_buffer:
|
||||
# Still unsent data, wait for next EVENT_WRITE
|
||||
return
|
||||
# Still in middle of data, so continue!
|
||||
return bytes_recv, False, False
|
||||
|
||||
if self.connected:
|
||||
if (
|
||||
server.active
|
||||
and not server.restart
|
||||
and not (
|
||||
sabnzbd.Downloader.paused
|
||||
or sabnzbd.Downloader.shutdown
|
||||
or sabnzbd.Downloader.paused_for_postproc
|
||||
)
|
||||
):
|
||||
# Prepare the next request
|
||||
if not self.next_request and (article := server.get_article()):
|
||||
self.next_request = self.body(article)
|
||||
elif self.next_request and self.next_request[1]:
|
||||
# Discard the next request
|
||||
self.discard(self.next_request[1], count_article_try=False, retry_article=True)
|
||||
self.next_request = None
|
||||
def soft_reset(self):
|
||||
"""Reset for the next article"""
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.reset_data_buffer()
|
||||
|
||||
# If no pending buffer, try to send new command
|
||||
if not self.send_buffer and self.next_request:
|
||||
if self.concurrent_requests.acquire(blocking=False):
|
||||
command, article = self.next_request
|
||||
self.next_request = None
|
||||
if article:
|
||||
nzo = article.nzf.nzo
|
||||
if nzo.removed_from_queue or nzo.status is Status.PAUSED and nzo.priority is not FORCE_PRIORITY:
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
self.concurrent_requests.release()
|
||||
return
|
||||
self._response_queue.append(article)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s", self.thrdnum, server.host, command)
|
||||
try:
|
||||
sent = self.nntp.sock.send(command)
|
||||
if sent < len(command):
|
||||
# Partial send, store remainder
|
||||
self.send_buffer = command[sent:]
|
||||
except (BlockingIOError, ssl.SSLWantWriteError):
|
||||
# Can't send now, store full command
|
||||
self.send_buffer = command
|
||||
else:
|
||||
# Concurrency limit reached
|
||||
sabnzbd.Downloader.modify_socket(self, EVENT_READ)
|
||||
else:
|
||||
# Is it safe to shut down this socket?
|
||||
if (
|
||||
not self.send_buffer
|
||||
and not self.next_request
|
||||
and not self._response_queue
|
||||
and (not server.active or server.restart or time.time() > self.timeout)
|
||||
):
|
||||
# Make socket available again
|
||||
server.busy_threads.discard(self)
|
||||
server.idle_threads.add(self)
|
||||
sabnzbd.Downloader.remove_socket(self)
|
||||
def reset_data_buffer(self):
|
||||
"""Reset the data position"""
|
||||
self.data_position = 0
|
||||
|
||||
except (BlockingIOError, ssl.SSLWantWriteError):
|
||||
# Socket not currently writable — just try again later
|
||||
return
|
||||
except socket.error as err:
|
||||
logging.info("Looks like server closed connection: %s", err)
|
||||
sabnzbd.Downloader.reset_nw(self, "Server broke off connection", warn=True)
|
||||
except Exception:
|
||||
logging.error(T("Suspect error in downloader"))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
sabnzbd.Downloader.reset_nw(self, "Server broke off connection", warn=True)
|
||||
def increase_data_buffer(self):
|
||||
"""Resize the buffer in the extremely unlikely case that it overflows"""
|
||||
# Sanity check before we go any further
|
||||
if len(self.data) > NTTP_MAX_BUFFER_SIZE:
|
||||
raise BufferError("Maximum data buffer size exceeded")
|
||||
|
||||
# Input needs to be integer, floats don't work
|
||||
new_buffer = sabctools.bytearray_malloc(len(self.data) + NNTP_BUFFER_SIZE // 2)
|
||||
new_buffer[: len(self.data)] = self.data
|
||||
logging.info("Increased buffer from %d to %d for %s", len(self.data), len(new_buffer), str(self))
|
||||
self.data = new_buffer
|
||||
self.data_view = memoryview(self.data)
|
||||
|
||||
def hard_reset(self, wait: bool = True):
|
||||
"""Destroy and restart"""
|
||||
with self.lock:
|
||||
# Drain unsent requests
|
||||
if self.next_request:
|
||||
_, article = self.next_request
|
||||
if article:
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
self.next_request = None
|
||||
# Drain responses
|
||||
while self._response_queue:
|
||||
if article := self._response_queue.popleft():
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
|
||||
if self.nntp:
|
||||
self.nntp.close(send_quit=self.connected)
|
||||
self.nntp = None
|
||||
@@ -426,28 +258,6 @@ class NewsWrapper:
|
||||
# Reset for internal reasons, just wait 5 sec
|
||||
self.timeout = time.time() + 5
|
||||
|
||||
def discard(
|
||||
self,
|
||||
article: Optional["sabnzbd.nzbstuff.Article"],
|
||||
count_article_try: bool = True,
|
||||
retry_article: bool = True,
|
||||
) -> None:
|
||||
"""Discard an article back to the queue"""
|
||||
if article and not article.nzf.nzo.removed_from_queue:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if count_article_try:
|
||||
article.tries += 1
|
||||
|
||||
# Do we discard, or try again for this server
|
||||
if not retry_article or (not self.server.required and article.tries > sabnzbd.cfg.max_art_tries()):
|
||||
# Too many tries on this server, consider article missing
|
||||
sabnzbd.Downloader.decode(article)
|
||||
article.tries = 0
|
||||
else:
|
||||
# Allow all servers again for this article
|
||||
# Do not use the article_queue, as the server could already have been disabled when we get here!
|
||||
article.allow_new_fetcher()
|
||||
|
||||
def __repr__(self):
|
||||
return "<NewsWrapper: server=%s:%s, thread=%s, connected=%s>" % (
|
||||
self.server.host,
|
||||
@@ -569,7 +379,7 @@ class NNTP:
|
||||
# Locked, so it can't interleave with any of the Downloader "__nw" actions
|
||||
with DOWNLOADER_LOCK:
|
||||
if not self.closed:
|
||||
sabnzbd.Downloader.add_socket(self.nw)
|
||||
sabnzbd.Downloader.add_socket(self.fileno, self.nw)
|
||||
except OSError as e:
|
||||
self.error(e)
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ import http.client
|
||||
import json
|
||||
import apprise
|
||||
from threading import Thread
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Dict, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
@@ -160,7 +160,7 @@ def send_notification(
|
||||
msg: str,
|
||||
notification_type: str,
|
||||
job_cat: Optional[str] = None,
|
||||
actions: Optional[dict[str, str]] = None,
|
||||
actions: Optional[Dict[str, str]] = None,
|
||||
):
|
||||
"""Send Notification message"""
|
||||
logging.info("Sending notification: %s - %s (type=%s, job_cat=%s)", title, msg, notification_type, job_cat)
|
||||
@@ -243,7 +243,7 @@ def send_notify_osd(title, message):
|
||||
return error
|
||||
|
||||
|
||||
def send_notification_center(title: str, msg: str, notification_type: str, actions: Optional[dict[str, str]] = None):
|
||||
def send_notification_center(title: str, msg: str, notification_type: str, actions: Optional[Dict[str, str]] = None):
|
||||
"""Send message to macOS Notification Center.
|
||||
Only 1 button is possible on macOS!"""
|
||||
logging.debug("Sending macOS notification")
|
||||
@@ -531,7 +531,7 @@ def send_nscript(title, msg, notification_type, force=False, test=None):
|
||||
return ""
|
||||
|
||||
|
||||
def send_windows(title: str, msg: str, notification_type: str, actions: Optional[dict[str, str]] = None):
|
||||
def send_windows(title: str, msg: str, notification_type: str, actions: Optional[Dict[str, str]] = None):
|
||||
"""Send Windows notifications, either fancy with buttons (Windows 10+) or basic ones"""
|
||||
# Skip any notifications if ran as a Windows Service, it can result in crashes
|
||||
if sabnzbd.WIN_SERVICE:
|
||||
|
||||
@@ -30,7 +30,7 @@ import zipfile
|
||||
import tempfile
|
||||
|
||||
import cherrypy._cpreqbody
|
||||
from typing import Optional, Any, Union
|
||||
from typing import Optional, Dict, Any, Union, List, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd import nzbstuff
|
||||
@@ -152,12 +152,12 @@ def process_nzb_archive_file(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Analyse archive and create job(s).
|
||||
Accepts archive files with ONLY nzb/nfo/folder files in it.
|
||||
"""
|
||||
@@ -271,12 +271,12 @@ def process_single_nzb(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Analyze file and create a job from it
|
||||
Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
|
||||
"""
|
||||
|
||||
@@ -23,7 +23,7 @@ import os
|
||||
import logging
|
||||
import time
|
||||
import cherrypy._cpreqbody
|
||||
from typing import Union, Optional
|
||||
from typing import List, Dict, Union, Tuple, Optional
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.nzbstuff import NzbObject, Article
|
||||
@@ -57,8 +57,8 @@ class NzbQueue:
|
||||
|
||||
def __init__(self):
|
||||
self.__top_only: bool = cfg.top_only()
|
||||
self.__nzo_list: list[NzbObject] = []
|
||||
self.__nzo_table: dict[str, NzbObject] = {}
|
||||
self.__nzo_list: List[NzbObject] = []
|
||||
self.__nzo_table: Dict[str, NzbObject] = {}
|
||||
|
||||
def read_queue(self, repair: int):
|
||||
"""Read queue from disk, supporting repair modes
|
||||
@@ -121,7 +121,7 @@ class NzbQueue:
|
||||
pass
|
||||
|
||||
@NzbQueueLocker
|
||||
def scan_jobs(self, all_jobs: bool = False, action: bool = True) -> list[str]:
|
||||
def scan_jobs(self, all_jobs: bool = False, action: bool = True) -> List[str]:
|
||||
"""Scan "incomplete" for missing folders,
|
||||
'all' is True: Include active folders
|
||||
'action' is True, do the recovery action
|
||||
@@ -247,7 +247,7 @@ class NzbQueue:
|
||||
self.__top_only = value
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_opts(self, nzo_ids: list[str], pp: int) -> int:
|
||||
def change_opts(self, nzo_ids: List[str], pp: int) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -257,7 +257,7 @@ class NzbQueue:
|
||||
return result
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_script(self, nzo_ids: list[str], script: str) -> int:
|
||||
def change_script(self, nzo_ids: List[str], script: str) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
if (script is None) or is_valid_script(script):
|
||||
@@ -269,7 +269,7 @@ class NzbQueue:
|
||||
return result
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_cat(self, nzo_ids: list[str], cat: str) -> int:
|
||||
def change_cat(self, nzo_ids: List[str], cat: str) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -387,7 +387,7 @@ class NzbQueue:
|
||||
return nzo
|
||||
|
||||
@NzbQueueLocker
|
||||
def remove_multiple(self, nzo_ids: list[str], delete_all_data=True) -> list[str]:
|
||||
def remove_multiple(self, nzo_ids: List[str], delete_all_data=True) -> List[str]:
|
||||
"""Remove multiple jobs from the queue. Also triggers duplicate handling
|
||||
and downloader-disconnect, so intended for external use only!"""
|
||||
removed = []
|
||||
@@ -405,7 +405,7 @@ class NzbQueue:
|
||||
return removed
|
||||
|
||||
@NzbQueueLocker
|
||||
def remove_all(self, search: Optional[str] = None) -> list[str]:
|
||||
def remove_all(self, search: Optional[str] = None) -> List[str]:
|
||||
"""Remove NZO's that match the search-pattern"""
|
||||
nzo_ids = []
|
||||
search = safe_lower(search)
|
||||
@@ -414,7 +414,7 @@ class NzbQueue:
|
||||
nzo_ids.append(nzo_id)
|
||||
return self.remove_multiple(nzo_ids)
|
||||
|
||||
def remove_nzfs(self, nzo_id: str, nzf_ids: list[str]) -> list[str]:
|
||||
def remove_nzfs(self, nzo_id: str, nzf_ids: List[str]) -> List[str]:
|
||||
removed = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table[nzo_id]
|
||||
@@ -441,7 +441,7 @@ class NzbQueue:
|
||||
logging.info("Removed NZFs %s from job %s", removed, nzo.final_name)
|
||||
return removed
|
||||
|
||||
def pause_multiple_nzo(self, nzo_ids: list[str]) -> list[str]:
|
||||
def pause_multiple_nzo(self, nzo_ids: List[str]) -> List[str]:
|
||||
handled = []
|
||||
for nzo_id in nzo_ids:
|
||||
self.pause_nzo(nzo_id)
|
||||
@@ -449,7 +449,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def pause_nzo(self, nzo_id: str) -> list[str]:
|
||||
def pause_nzo(self, nzo_id: str) -> List[str]:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
handled = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
@@ -459,7 +459,7 @@ class NzbQueue:
|
||||
handled.append(nzo_id)
|
||||
return handled
|
||||
|
||||
def resume_multiple_nzo(self, nzo_ids: list[str]) -> list[str]:
|
||||
def resume_multiple_nzo(self, nzo_ids: List[str]) -> List[str]:
|
||||
handled = []
|
||||
for nzo_id in nzo_ids:
|
||||
self.resume_nzo(nzo_id)
|
||||
@@ -467,7 +467,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def resume_nzo(self, nzo_id: str) -> list[str]:
|
||||
def resume_nzo(self, nzo_id: str) -> List[str]:
|
||||
handled = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table[nzo_id]
|
||||
@@ -477,7 +477,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def switch(self, item_id_1: str, item_id_2: str) -> tuple[int, int]:
|
||||
def switch(self, item_id_1: str, item_id_2: str) -> Tuple[int, int]:
|
||||
try:
|
||||
# Allow an index as second parameter, easier for some skins
|
||||
i = int(item_id_2)
|
||||
@@ -532,24 +532,24 @@ class NzbQueue:
|
||||
return -1, nzo1.priority
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_up_bulk(self, nzo_id: str, nzf_ids: list[str], size: int):
|
||||
def move_nzf_up_bulk(self, nzo_id: str, nzf_ids: List[str], size: int):
|
||||
if nzo_id in self.__nzo_table:
|
||||
for _ in range(size):
|
||||
self.__nzo_table[nzo_id].move_up_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_top_bulk(self, nzo_id: str, nzf_ids: list[str]):
|
||||
def move_nzf_top_bulk(self, nzo_id: str, nzf_ids: List[str]):
|
||||
if nzo_id in self.__nzo_table:
|
||||
self.__nzo_table[nzo_id].move_top_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_down_bulk(self, nzo_id: str, nzf_ids: list[str], size: int):
|
||||
def move_nzf_down_bulk(self, nzo_id: str, nzf_ids: List[str], size: int):
|
||||
if nzo_id in self.__nzo_table:
|
||||
for _ in range(size):
|
||||
self.__nzo_table[nzo_id].move_down_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_bottom_bulk(self, nzo_id: str, nzf_ids: list[str]):
|
||||
def move_nzf_bottom_bulk(self, nzo_id: str, nzf_ids: List[str]):
|
||||
if nzo_id in self.__nzo_table:
|
||||
self.__nzo_table[nzo_id].move_bottom_bulk(nzf_ids)
|
||||
|
||||
@@ -670,7 +670,7 @@ class NzbQueue:
|
||||
return -1
|
||||
|
||||
@NzbQueueLocker
|
||||
def set_priority(self, nzo_ids: list[str], priority: int) -> int:
|
||||
def set_priority(self, nzo_ids: List[str], priority: int) -> int:
|
||||
try:
|
||||
n = -1
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -692,7 +692,7 @@ class NzbQueue:
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int) -> None:
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next article for jobs in the queue
|
||||
Not locked for performance, since it only reads the queue
|
||||
"""
|
||||
@@ -705,12 +705,12 @@ class NzbQueue:
|
||||
and not nzo.propagation_delay_left
|
||||
) or nzo.priority == FORCE_PRIORITY:
|
||||
if not nzo.server_in_try_list(server):
|
||||
nzo.get_articles(server, servers, fetch_limit)
|
||||
if server.article_queue:
|
||||
break
|
||||
if articles := nzo.get_articles(server, servers, fetch_limit):
|
||||
return articles
|
||||
# Stop after first job that wasn't paused/propagating/etc
|
||||
if self.__top_only:
|
||||
break
|
||||
return []
|
||||
return []
|
||||
|
||||
def register_article(self, article: Article, success: bool = True):
|
||||
"""Register the articles we tried
|
||||
@@ -768,9 +768,10 @@ class NzbQueue:
|
||||
nzo.removed_from_queue = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
# If not enough data is present, fail flag will be set (also used by postproc)
|
||||
if not nzo.fail_msg:
|
||||
# Send back for real download
|
||||
# Check result
|
||||
enough, _ = nzo.check_availability_ratio()
|
||||
if enough:
|
||||
# Enough data present, do real download
|
||||
self.send_back(nzo)
|
||||
return
|
||||
else:
|
||||
@@ -801,13 +802,13 @@ class NzbQueue:
|
||||
def queue_info(
|
||||
self,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
priorities: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
categories: Optional[List[str]] = None,
|
||||
priorities: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
start: int = 0,
|
||||
limit: int = 0,
|
||||
) -> tuple[int, int, int, list[NzbObject], int, int]:
|
||||
) -> Tuple[int, int, int, List[NzbObject], int, int]:
|
||||
"""Return list of queued jobs, optionally filtered and limited by start and limit.
|
||||
Not locked for performance, only reads the queue
|
||||
"""
|
||||
@@ -893,14 +894,11 @@ class NzbQueue:
|
||||
|
||||
if nzf.all_servers_in_try_list(active_servers):
|
||||
# Check for articles where all active servers have already been tried
|
||||
with nzf:
|
||||
for article in nzf.articles:
|
||||
if article.all_servers_in_try_list(active_servers):
|
||||
logging.debug(
|
||||
"Removing article %s with bad trylist in file %s", article, nzf.filename
|
||||
)
|
||||
nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
for article in nzf.articles[:]:
|
||||
if article.all_servers_in_try_list(active_servers):
|
||||
logging.debug("Removing article %s with bad trylist in file %s", article, nzf.filename)
|
||||
nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
|
||||
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
|
||||
nzf.reset_try_list()
|
||||
@@ -936,7 +934,7 @@ class NzbQueue:
|
||||
# Don't use nzo.resume() to avoid resetting job warning flags
|
||||
nzo.status = Status.QUEUED
|
||||
|
||||
def get_urls(self) -> list[tuple[str, NzbObject]]:
|
||||
def get_urls(self) -> List[Tuple[str, NzbObject]]:
|
||||
"""Return list of future-types needing URL"""
|
||||
lst = []
|
||||
for nzo_id in self.__nzo_table:
|
||||
|
||||
@@ -26,7 +26,7 @@ import datetime
|
||||
import threading
|
||||
import functools
|
||||
import difflib
|
||||
from typing import Any, Optional, Union, BinaryIO, Deque
|
||||
from typing import List, Dict, Any, Tuple, Optional, Union, BinaryIO, Set
|
||||
|
||||
# SABnzbd modules
|
||||
import sabnzbd
|
||||
@@ -122,14 +122,14 @@ class TryList:
|
||||
|
||||
def __init__(self):
|
||||
# Sets are faster than lists
|
||||
self.try_list: set[Server] = set()
|
||||
self.try_list: Set[Server] = set()
|
||||
|
||||
def server_in_try_list(self, server: Server) -> bool:
|
||||
"""Return whether specified server has been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return server in self.try_list
|
||||
|
||||
def all_servers_in_try_list(self, all_servers: set[Server]) -> bool:
|
||||
def all_servers_in_try_list(self, all_servers: Set[Server]) -> bool:
|
||||
"""Check if all servers have been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return all_servers.issubset(self.try_list)
|
||||
@@ -155,7 +155,7 @@ class TryList:
|
||||
"""Save the servers"""
|
||||
return set(server.id for server in self.try_list)
|
||||
|
||||
def __setstate__(self, servers_ids: list[str]):
|
||||
def __setstate__(self, servers_ids: List[str]):
|
||||
self.try_list = set()
|
||||
for server in sabnzbd.Downloader.servers:
|
||||
if server.id in servers_ids:
|
||||
@@ -222,7 +222,7 @@ class Article(TryList):
|
||||
self.nzf.reset_try_list()
|
||||
self.nzf.nzo.reset_try_list()
|
||||
|
||||
def get_article(self, server: Server, servers: list[Server]):
|
||||
def get_article(self, server: Server, servers: List[Server]):
|
||||
"""Return article when appropriate for specified server"""
|
||||
if self.fetcher or self.server_in_try_list(server):
|
||||
return None
|
||||
@@ -328,12 +328,11 @@ class NzbFile(TryList):
|
||||
"""Representation of one file consisting of multiple articles"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = NzbFileSaver + ("lock",)
|
||||
__slots__ = NzbFileSaver
|
||||
|
||||
def __init__(self, date, subject, raw_article_db, file_bytes, nzo):
|
||||
"""Setup object"""
|
||||
super().__init__()
|
||||
self.lock = threading.RLock()
|
||||
|
||||
self.date: datetime.datetime = date
|
||||
self.type: Optional[str] = None
|
||||
@@ -348,8 +347,8 @@ class NzbFile(TryList):
|
||||
self.setname: Optional[str] = None
|
||||
|
||||
# Articles are removed from "articles" after being fetched
|
||||
self.articles: dict[Article, Article] = {}
|
||||
self.decodetable: list[Article] = []
|
||||
self.articles: List[Article] = []
|
||||
self.decodetable: List[Article] = []
|
||||
|
||||
self.bytes: int = file_bytes
|
||||
self.bytes_left: int = file_bytes
|
||||
@@ -403,18 +402,17 @@ class NzbFile(TryList):
|
||||
def add_article(self, article_info):
|
||||
"""Add article to object database and return article object"""
|
||||
article = Article(article_info[0], article_info[1], self)
|
||||
with self.lock:
|
||||
self.articles[article] = article
|
||||
self.decodetable.append(article)
|
||||
self.articles.append(article)
|
||||
self.decodetable.append(article)
|
||||
return article
|
||||
|
||||
def remove_article(self, article: Article, success: bool) -> int:
|
||||
"""Handle completed article, possibly end of file"""
|
||||
with self.lock:
|
||||
if self.articles.pop(article, None) is not None:
|
||||
if success:
|
||||
self.bytes_left -= article.bytes
|
||||
return len(self.articles)
|
||||
if article in self.articles:
|
||||
self.articles.remove(article)
|
||||
if success:
|
||||
self.bytes_left -= article.bytes
|
||||
return len(self.articles)
|
||||
|
||||
def set_par2(self, setname, vol, blocks):
|
||||
"""Designate this file as a par2 file"""
|
||||
@@ -429,45 +427,29 @@ class NzbFile(TryList):
|
||||
else:
|
||||
self.crc32 = sabctools.crc32_combine(self.crc32, crc32, length)
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int):
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next articles to be downloaded"""
|
||||
articles = server.article_queue
|
||||
with self.lock:
|
||||
for article in self.articles:
|
||||
if article := article.get_article(server, servers):
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
return
|
||||
articles = []
|
||||
for article in self.articles:
|
||||
if article := article.get_article(server, servers):
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
return articles
|
||||
self.add_to_try_list(server)
|
||||
return articles
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def reset_all_try_lists(self):
|
||||
"""Reset all try lists. Locked so reset is performed
|
||||
for all items at the same time without chance of another
|
||||
thread changing any of the items while we are resetting"""
|
||||
with self.lock:
|
||||
for art in self.articles:
|
||||
art.reset_try_list()
|
||||
for art in self.articles:
|
||||
art.reset_try_list()
|
||||
self.reset_try_list()
|
||||
|
||||
def first_article_processed(self) -> bool:
|
||||
"""Check if the first article has been processed.
|
||||
This ensures we have attempted to extract md5of16k and filename information
|
||||
before creating the filepath.
|
||||
"""
|
||||
# The first article of decodetable is always the lowest
|
||||
first_article = self.decodetable[0]
|
||||
# If it's still in nzo.first_articles, it hasn't been processed yet
|
||||
return first_article not in self.nzo.first_articles
|
||||
|
||||
def prepare_filepath(self):
|
||||
"""Do all checks before making the final path"""
|
||||
if not self.filepath:
|
||||
# Wait for the first article to be processed so we can get md5of16k
|
||||
# and proper filename before creating the filepath
|
||||
if not self.first_article_processed():
|
||||
return None
|
||||
|
||||
self.nzo.verify_nzf_filename(self)
|
||||
filename = sanitize_filename(self.filename)
|
||||
self.filepath = get_unique_filename(os.path.join(self.nzo.download_path, filename))
|
||||
@@ -477,10 +459,7 @@ class NzbFile(TryList):
|
||||
@property
|
||||
def completed(self):
|
||||
"""Is this file completed?"""
|
||||
if not self.import_finished:
|
||||
return False
|
||||
with self.lock:
|
||||
return not self.articles
|
||||
return self.import_finished and not bool(self.articles)
|
||||
|
||||
def remove_admin(self):
|
||||
"""Remove article database from disk (sabnzbd_nzf_<id>)"""
|
||||
@@ -490,12 +469,6 @@ class NzbFile(TryList):
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.lock.acquire()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.lock.release()
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
@@ -513,10 +486,6 @@ class NzbFile(TryList):
|
||||
# Handle new attributes
|
||||
setattr(self, item, None)
|
||||
super().__setstate__(dict_.get("try_list", []))
|
||||
self.lock = threading.RLock()
|
||||
if isinstance(self.articles, list):
|
||||
# Converted from list to dict
|
||||
self.articles = {x: x for x in self.articles}
|
||||
|
||||
def __eq__(self, other: "NzbFile"):
|
||||
"""Assume it's the same file if the number bytes and first article
|
||||
@@ -645,7 +614,7 @@ class NzbObject(TryList):
|
||||
password: Optional[str] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
status: str = Status.QUEUED,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
@@ -708,7 +677,7 @@ class NzbObject(TryList):
|
||||
|
||||
# Bookkeeping values
|
||||
self.meta = {}
|
||||
self.servercount: dict[str, int] = {} # Dict to keep bytes per server
|
||||
self.servercount: Dict[str, int] = {} # Dict to keep bytes per server
|
||||
self.direct_unpacker: Optional[sabnzbd.directunpacker.DirectUnpacker] = None # The DirectUnpacker instance
|
||||
self.bytes: int = 0 # Original bytesize
|
||||
self.bytes_par2: int = 0 # Bytes available for repair
|
||||
@@ -717,15 +686,15 @@ class NzbObject(TryList):
|
||||
self.bytes_missing: int = 0 # Bytes missing
|
||||
self.bad_articles: int = 0 # How many bad (non-recoverable) articles
|
||||
|
||||
self.extrapars: dict[str, list[NzbFile]] = {} # Holds the extra parfile names for all sets
|
||||
self.par2packs: dict[str, dict[str, FilePar2Info]] = {} # Holds the par2info for each file in each set
|
||||
self.md5of16k: dict[bytes, str] = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name)
|
||||
self.extrapars: Dict[str, List[NzbFile]] = {} # Holds the extra parfile names for all sets
|
||||
self.par2packs: Dict[str, Dict[str, FilePar2Info]] = {} # Holds the par2info for each file in each set
|
||||
self.md5of16k: Dict[bytes, str] = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name)
|
||||
|
||||
self.files: list[NzbFile] = [] # List of all NZFs
|
||||
self.files_table: dict[str, NzbFile] = {} # Dictionary of NZFs indexed using NZF_ID
|
||||
self.renames: dict[str, str] = {} # Dictionary of all renamed files
|
||||
self.files: List[NzbFile] = [] # List of all NZFs
|
||||
self.files_table: Dict[str, NzbFile] = {} # Dictionary of NZFs indexed using NZF_ID
|
||||
self.renames: Dict[str, str] = {} # Dictionary of all renamed files
|
||||
|
||||
self.finished_files: list[NzbFile] = [] # List of all finished NZFs
|
||||
self.finished_files: List[NzbFile] = [] # List of all finished NZFs
|
||||
|
||||
# The current status of the nzo eg:
|
||||
# Queued, Downloading, Repairing, Unpacking, Failed, Complete
|
||||
@@ -734,9 +703,9 @@ class NzbObject(TryList):
|
||||
self.avg_bps_freq = 0
|
||||
self.avg_bps_total = 0
|
||||
|
||||
self.first_articles: list[Article] = []
|
||||
self.first_articles: List[Article] = []
|
||||
self.first_articles_count = 0
|
||||
self.saved_articles: set[Article] = set()
|
||||
self.saved_articles: Set[Article] = set()
|
||||
self.nzo_id: Optional[str] = None
|
||||
|
||||
self.duplicate: Optional[str] = None
|
||||
@@ -758,11 +727,11 @@ class NzbObject(TryList):
|
||||
# Store one line responses for filejoin/par2/unrar here for history display
|
||||
self.action_line = ""
|
||||
# Store the results from various filejoin/par2/unrar stages
|
||||
self.unpack_info: dict[str, list[str]] = {}
|
||||
self.unpack_info: Dict[str, List[str]] = {}
|
||||
# Stores one line containing the last failure
|
||||
self.fail_msg = ""
|
||||
# Stores various info about the nzo to be
|
||||
self.nzo_info: dict[str, Any] = nzo_info or {}
|
||||
self.nzo_info: Dict[str, Any] = nzo_info or {}
|
||||
|
||||
self.next_save = None
|
||||
self.save_timeout = None
|
||||
@@ -1553,7 +1522,7 @@ class NzbObject(TryList):
|
||||
if hasattr(self, "direct_unpacker") and self.direct_unpacker:
|
||||
self.direct_unpacker.abort()
|
||||
|
||||
def check_availability_ratio(self) -> tuple[bool, float]:
|
||||
def check_availability_ratio(self) -> Tuple[bool, float]:
|
||||
"""Determine if we are still meeting the required ratio"""
|
||||
availability_ratio = req_ratio = cfg.req_completion_rate()
|
||||
|
||||
@@ -1656,9 +1625,8 @@ class NzbObject(TryList):
|
||||
self.nzo_info[bad_article_type] += 1
|
||||
self.bad_articles += 1
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int):
|
||||
"""Assign articles server up to the fetch_limit"""
|
||||
articles: Deque[Article] = server.article_queue
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
articles = []
|
||||
nzf_remove_list = []
|
||||
|
||||
# Did we go through all first-articles?
|
||||
@@ -1693,8 +1661,7 @@ class NzbObject(TryList):
|
||||
else:
|
||||
break
|
||||
|
||||
nzf.get_articles(server, servers, fetch_limit)
|
||||
if articles:
|
||||
if articles := nzf.get_articles(server, servers, fetch_limit):
|
||||
break
|
||||
|
||||
# Remove all files for which admin could not be read
|
||||
@@ -1709,9 +1676,10 @@ class NzbObject(TryList):
|
||||
if not articles:
|
||||
# No articles for this server, block for next time
|
||||
self.add_to_try_list(server)
|
||||
return articles
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def move_top_bulk(self, nzf_ids: list[str]):
|
||||
def move_top_bulk(self, nzf_ids: List[str]):
|
||||
self.cleanup_nzf_ids(nzf_ids)
|
||||
if nzf_ids:
|
||||
target = list(range(len(nzf_ids)))
|
||||
@@ -1931,7 +1899,7 @@ class NzbObject(TryList):
|
||||
logging.debug("Saving attributes %s for %s", attribs, self.final_name)
|
||||
save_data(attribs, ATTRIB_FILE, self.admin_path, silent=True)
|
||||
|
||||
def load_attribs(self) -> tuple[Optional[str], Optional[int], Optional[str]]:
|
||||
def load_attribs(self) -> Tuple[Optional[str], Optional[int], Optional[str]]:
|
||||
"""Load saved attributes and return them to be parsed"""
|
||||
attribs = load_data(ATTRIB_FILE, self.admin_path, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
@@ -1954,7 +1922,7 @@ class NzbObject(TryList):
|
||||
return attribs["cat"], attribs["pp"], attribs["script"]
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def build_pos_nzf_table(self, nzf_ids: list[str]) -> dict[int, NzbFile]:
|
||||
def build_pos_nzf_table(self, nzf_ids: List[str]) -> Dict[int, NzbFile]:
|
||||
pos_nzf_table = {}
|
||||
for nzf_id in nzf_ids:
|
||||
if nzf_id in self.files_table:
|
||||
@@ -1965,7 +1933,7 @@ class NzbObject(TryList):
|
||||
return pos_nzf_table
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def cleanup_nzf_ids(self, nzf_ids: list[str]):
|
||||
def cleanup_nzf_ids(self, nzf_ids: List[str]):
|
||||
for nzf_id in nzf_ids[:]:
|
||||
if nzf_id in self.files_table:
|
||||
if self.files_table[nzf_id] not in self.files:
|
||||
@@ -2182,7 +2150,7 @@ def create_work_name(name: str) -> str:
|
||||
return name.strip()
|
||||
|
||||
|
||||
def scan_password(name: str) -> tuple[str, Optional[str]]:
|
||||
def scan_password(name: str) -> Tuple[str, Optional[str]]:
|
||||
"""Get password (if any) from the title"""
|
||||
if "http://" in name or "https://" in name:
|
||||
return name, None
|
||||
|
||||
@@ -25,11 +25,11 @@ import re
|
||||
import struct
|
||||
import sabctools
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
from sabnzbd.constants import MEBI
|
||||
from sabnzbd.encoding import correct_unknown_encoding
|
||||
from sabnzbd.filesystem import get_basename
|
||||
from sabnzbd.filesystem import get_basename, get_ext
|
||||
|
||||
PROBABLY_PAR2_RE = re.compile(r"(.*)\.vol(\d*)[+\-](\d*)\.par2", re.I)
|
||||
SCAN_LIMIT = 10 * MEBI
|
||||
@@ -71,7 +71,7 @@ def is_par2_file(filepath: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def analyse_par2(name: str, filepath: Optional[str] = None) -> tuple[str, int, int]:
|
||||
def analyse_par2(name: str, filepath: Optional[str] = None) -> Tuple[str, int, int]:
|
||||
"""Check if file is a par2-file and determine vol/block
|
||||
return setname, vol, block
|
||||
setname is empty when not a par2 file
|
||||
@@ -103,7 +103,7 @@ def analyse_par2(name: str, filepath: Optional[str] = None) -> tuple[str, int, i
|
||||
return setname, vol, block
|
||||
|
||||
|
||||
def parse_par2_file(fname: str, md5of16k: dict[bytes, str]) -> tuple[str, dict[str, FilePar2Info]]:
|
||||
def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Tuple[str, Dict[str, FilePar2Info]]:
|
||||
"""Get the hash table and the first-16k hash table from a PAR2 file
|
||||
Return as dictionary, indexed on names or hashes for the first-16 table
|
||||
The input md5of16k is modified in place and thus not returned!
|
||||
|
||||
@@ -27,7 +27,7 @@ import re
|
||||
import gc
|
||||
import queue
|
||||
import rarfile
|
||||
from typing import Optional
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.newsunpack import (
|
||||
@@ -39,7 +39,7 @@ from sabnzbd.newsunpack import (
|
||||
rar_sort,
|
||||
is_sfv_file,
|
||||
)
|
||||
from threading import Thread, Event
|
||||
from threading import Thread
|
||||
from sabnzbd.misc import (
|
||||
on_cleanup_list,
|
||||
is_sample,
|
||||
@@ -107,7 +107,7 @@ class PostProcessor(Thread):
|
||||
super().__init__()
|
||||
|
||||
# This history queue is simply used to log what active items to display in the web_ui
|
||||
self.history_queue: list[NzbObject] = []
|
||||
self.history_queue: List[NzbObject] = []
|
||||
self.load()
|
||||
|
||||
# Fast-queue for jobs already finished by DirectUnpack
|
||||
@@ -116,9 +116,6 @@ class PostProcessor(Thread):
|
||||
# Regular queue for jobs that might need more attention
|
||||
self.slow_queue: queue.Queue[Optional[NzbObject]] = queue.Queue()
|
||||
|
||||
# Event to signal when work is available or state changes
|
||||
self.work_available = Event()
|
||||
|
||||
# Load all old jobs
|
||||
for nzo in self.history_queue:
|
||||
self.process(nzo)
|
||||
@@ -183,9 +180,6 @@ class PostProcessor(Thread):
|
||||
self.save()
|
||||
history_updated()
|
||||
|
||||
# Signal that work is available
|
||||
self.work_available.set()
|
||||
|
||||
def remove(self, nzo: NzbObject):
|
||||
"""Remove given nzo from the queue"""
|
||||
try:
|
||||
@@ -198,22 +192,10 @@ class PostProcessor(Thread):
|
||||
def stop(self):
|
||||
"""Stop thread after finishing running job"""
|
||||
self.__stop = True
|
||||
# Wake up the processor thread to check stop flag
|
||||
self.work_available.set()
|
||||
self.slow_queue.put(None)
|
||||
self.fast_queue.put(None)
|
||||
|
||||
def pause(self):
|
||||
"""Pause post-processing"""
|
||||
self.paused = True
|
||||
logging.info("Pausing post-processing")
|
||||
|
||||
def resume(self):
|
||||
"""Resume post-processing"""
|
||||
self.paused = False
|
||||
logging.info("Resuming post-processing")
|
||||
# Wake up the processor thread
|
||||
self.work_available.set()
|
||||
|
||||
def cancel_pp(self, nzo_ids: list[str]) -> Optional[bool]:
|
||||
def cancel_pp(self, nzo_ids: List[str]) -> Optional[bool]:
|
||||
"""Abort Direct Unpack and change the status, so that the PP is canceled"""
|
||||
result = None
|
||||
for nzo in self.history_queue:
|
||||
@@ -238,10 +220,10 @@ class PostProcessor(Thread):
|
||||
def get_queue(
|
||||
self,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> list[NzbObject]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> List[NzbObject]:
|
||||
"""Return list of NZOs that still need to be processed.
|
||||
Optionally filtered by the search terms"""
|
||||
re_search = None
|
||||
@@ -283,40 +265,27 @@ class PostProcessor(Thread):
|
||||
while not self.__stop:
|
||||
self.__busy = False
|
||||
|
||||
if self.paused:
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
# Set NzbObject object to None so references from this thread do not keep the
|
||||
# object alive until the next job is added to post-processing (see #1628)
|
||||
nzo = None
|
||||
|
||||
# Wait for work to be available (no timeout!)
|
||||
self.work_available.wait()
|
||||
|
||||
# Check if we should stop
|
||||
if self.__stop:
|
||||
break
|
||||
|
||||
# If paused, clear event and wait for resume
|
||||
if self.paused:
|
||||
self.work_available.clear()
|
||||
continue
|
||||
|
||||
# If queues are empty (spurious wake or race condition), clear and loop back
|
||||
if self.slow_queue.empty() and self.fast_queue.empty():
|
||||
self.work_available.clear()
|
||||
continue
|
||||
|
||||
# Something in the fast queue?
|
||||
try:
|
||||
# Every few fast-jobs we should allow a
|
||||
# Every few fast-jobs we should check allow a
|
||||
# slow job so that they don't wait forever
|
||||
if self.__fast_job_count >= MAX_FAST_JOB_COUNT and self.slow_queue.qsize():
|
||||
raise queue.Empty
|
||||
|
||||
nzo = self.fast_queue.get_nowait()
|
||||
nzo = self.fast_queue.get(timeout=2)
|
||||
self.__fast_job_count += 1
|
||||
except queue.Empty:
|
||||
# Try the slow queue
|
||||
try:
|
||||
nzo = self.slow_queue.get_nowait()
|
||||
nzo = self.slow_queue.get(timeout=2)
|
||||
# Reset fast-counter
|
||||
self.__fast_job_count = 0
|
||||
except queue.Empty:
|
||||
@@ -327,6 +296,10 @@ class PostProcessor(Thread):
|
||||
# No fast or slow jobs, better luck next loop!
|
||||
continue
|
||||
|
||||
# Stop job
|
||||
if not nzo:
|
||||
continue
|
||||
|
||||
# Job was already deleted.
|
||||
if not nzo.work_name:
|
||||
check_eoq = True
|
||||
@@ -355,7 +328,7 @@ class PostProcessor(Thread):
|
||||
self.external_process = None
|
||||
check_eoq = True
|
||||
|
||||
# Allow download to proceed if it was paused for post-processing
|
||||
# Allow download to proceed
|
||||
sabnzbd.Downloader.resume_from_postproc()
|
||||
|
||||
|
||||
@@ -419,13 +392,14 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
par_error = True
|
||||
unpack_error = 1
|
||||
|
||||
script = nzo.script
|
||||
logging.info(
|
||||
"Starting Post-Processing on %s => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s",
|
||||
filename,
|
||||
flag_repair,
|
||||
flag_unpack,
|
||||
nzo.delete,
|
||||
nzo.script,
|
||||
script,
|
||||
nzo.cat,
|
||||
)
|
||||
|
||||
@@ -518,10 +492,10 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
|
||||
# Check if this is an NZB-only download, if so redirect to queue
|
||||
# except when PP was Download-only
|
||||
nzb_list = None
|
||||
if flag_repair:
|
||||
nzb_list = process_nzb_only_download(tmp_workdir_complete, nzo)
|
||||
|
||||
nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, nzo.priority)
|
||||
else:
|
||||
nzb_list = None
|
||||
if nzb_list:
|
||||
nzo.set_unpack_info("Download", T("Sent %s to queue") % nzb_list)
|
||||
cleanup_empty_directories(tmp_workdir_complete)
|
||||
@@ -529,10 +503,9 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
# Full cleanup including nzb's
|
||||
cleanup_list(tmp_workdir_complete, skip_nzb=False)
|
||||
|
||||
# No further processing for NZB-only downloads
|
||||
script_ret = 0
|
||||
script_error = False
|
||||
if not nzb_list:
|
||||
script_ret = 0
|
||||
script_error = False
|
||||
# Give destination its final name
|
||||
if cfg.folder_rename() and tmp_workdir_complete and not one_folder:
|
||||
if not all_ok:
|
||||
@@ -584,11 +557,11 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
deobfuscate.deobfuscate_subtitles(nzo, newfiles)
|
||||
|
||||
# Run the user script
|
||||
if script_path := make_script_path(nzo.script):
|
||||
if script_path := make_script_path(script):
|
||||
# Set the current nzo status to "Ext Script...". Used in History
|
||||
nzo.status = Status.RUNNING
|
||||
nzo.set_action_line(T("Running script"), nzo.script)
|
||||
nzo.set_unpack_info("Script", T("Running user script %s") % nzo.script, unique=True)
|
||||
nzo.set_action_line(T("Running script"), script)
|
||||
nzo.set_unpack_info("Script", T("Running user script %s") % script, unique=True)
|
||||
script_log, script_ret = external_processing(
|
||||
script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result
|
||||
)
|
||||
@@ -601,7 +574,7 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
else:
|
||||
script_line = T("Script exit code is %s") % script_ret
|
||||
elif not script_line:
|
||||
script_line = T("Ran %s") % nzo.script
|
||||
script_line = T("Ran %s") % script
|
||||
nzo.set_unpack_info("Script", script_line, unique=True)
|
||||
|
||||
# Maybe bad script result should fail job
|
||||
@@ -610,30 +583,30 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
all_ok = False
|
||||
nzo.fail_msg = script_line
|
||||
|
||||
# Email the results
|
||||
if cfg.email_endjob():
|
||||
if cfg.email_endjob() == 1 or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)):
|
||||
emailer.endjob(
|
||||
nzo.final_name,
|
||||
nzo.cat,
|
||||
all_ok,
|
||||
workdir_complete,
|
||||
nzo.bytes_downloaded,
|
||||
nzo.fail_msg,
|
||||
nzo.unpack_info,
|
||||
nzo.script,
|
||||
script_log,
|
||||
script_ret,
|
||||
)
|
||||
|
||||
if script_log and len(script_log.rstrip().split("\n")) > 1:
|
||||
# Can do this only now, otherwise it would show up in the email
|
||||
nzo.set_unpack_info(
|
||||
"Script",
|
||||
'%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_line, nzo.nzo_id, T("More")),
|
||||
unique=True,
|
||||
# Email the results
|
||||
if not nzb_list and cfg.email_endjob():
|
||||
if cfg.email_endjob() == 1 or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)):
|
||||
emailer.endjob(
|
||||
nzo.final_name,
|
||||
nzo.cat,
|
||||
all_ok,
|
||||
workdir_complete,
|
||||
nzo.bytes_downloaded,
|
||||
nzo.fail_msg,
|
||||
nzo.unpack_info,
|
||||
script,
|
||||
script_log,
|
||||
script_ret,
|
||||
)
|
||||
|
||||
if script_log and len(script_log.rstrip().split("\n")) > 1:
|
||||
# Can do this only now, otherwise it would show up in the email
|
||||
nzo.set_unpack_info(
|
||||
"Script",
|
||||
'%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_line, nzo.nzo_id, T("More")),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
# Cleanup again, including NZB files
|
||||
if all_ok and os.path.isdir(workdir_complete):
|
||||
cleanup_list(workdir_complete, False)
|
||||
@@ -720,7 +693,7 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def prepare_extraction_path(nzo: NzbObject) -> tuple[str, str, Sorter, bool, Optional[str]]:
|
||||
def prepare_extraction_path(nzo: NzbObject) -> Tuple[str, str, Sorter, bool, Optional[str]]:
|
||||
"""Based on the information that we have, generate
|
||||
the extraction path and create the directory.
|
||||
Separated so it can be called from DirectUnpacker
|
||||
@@ -784,7 +757,7 @@ def prepare_extraction_path(nzo: NzbObject) -> tuple[str, str, Sorter, bool, Opt
|
||||
return tmp_workdir_complete, workdir_complete, file_sorter, not create_job_dir, marker_file
|
||||
|
||||
|
||||
def parring(nzo: NzbObject) -> tuple[bool, bool]:
|
||||
def parring(nzo: NzbObject) -> Tuple[bool, bool]:
|
||||
"""Perform par processing. Returns: (par_error, re_add)"""
|
||||
logging.info("Starting verification and repair of %s", nzo.final_name)
|
||||
par_error = False
|
||||
@@ -903,7 +876,7 @@ def try_sfv_check(nzo: NzbObject) -> Optional[bool]:
|
||||
return True
|
||||
|
||||
|
||||
def try_rar_check(nzo: NzbObject, rars: list[str]) -> bool:
|
||||
def try_rar_check(nzo: NzbObject, rars: List[str]) -> bool:
|
||||
"""Attempt to verify set using the RARs
|
||||
Return True if verified, False when failed
|
||||
When setname is '', all RAR files will be used, otherwise only the matching one
|
||||
@@ -1159,36 +1132,34 @@ def prefix(path: str, pre: str) -> str:
|
||||
return os.path.join(p, pre + d)
|
||||
|
||||
|
||||
def process_nzb_only_download(workdir: str, nzo: NzbObject) -> Optional[list[str]]:
|
||||
def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
|
||||
"""Check if this job contains only NZB files,
|
||||
if so send to queue and remove if on clean-up list
|
||||
Returns list of processed NZB's
|
||||
"""
|
||||
if files := listdir_full(workdir):
|
||||
for nzb_file in files:
|
||||
if get_ext(nzb_file) != ".nzb":
|
||||
return None
|
||||
files = listdir_full(wdir)
|
||||
|
||||
# Process all NZB files
|
||||
new_nzbname = nzo.final_name
|
||||
for nzb_file in files:
|
||||
# Determine name based on number of files
|
||||
nzb_filename = get_filename(nzb_file)
|
||||
if len(files) > 1:
|
||||
new_nzbname = f"{nzo.final_name} - {nzb_filename}"
|
||||
for nzb_file in files:
|
||||
if get_ext(nzb_file) != ".nzb":
|
||||
return None
|
||||
|
||||
process_single_nzb(
|
||||
nzb_filename,
|
||||
nzb_file,
|
||||
pp=nzo.pp,
|
||||
script=nzo.script,
|
||||
cat=nzo.cat,
|
||||
url=nzo.url,
|
||||
priority=nzo.priority,
|
||||
nzbname=new_nzbname,
|
||||
dup_check=False,
|
||||
)
|
||||
return files
|
||||
# For multiple NZBs, cannot use the current job name
|
||||
if len(files) != 1:
|
||||
nzbname = None
|
||||
|
||||
# Process all NZB files
|
||||
for nzb_file in files:
|
||||
process_single_nzb(
|
||||
get_filename(nzb_file),
|
||||
nzb_file,
|
||||
pp=pp,
|
||||
script=script,
|
||||
cat=cat,
|
||||
priority=priority,
|
||||
dup_check=False,
|
||||
nzbname=nzbname,
|
||||
)
|
||||
return files
|
||||
|
||||
|
||||
def one_file_or_folder(folder: str) -> str:
|
||||
@@ -1250,7 +1221,7 @@ def remove_samples(path: str):
|
||||
logging.info("Skipping sample-removal, false-positive")
|
||||
|
||||
|
||||
def rename_and_collapse_folder(oldpath: str, newpath: str, files: list[str]) -> list[str]:
|
||||
def rename_and_collapse_folder(oldpath: str, newpath: str, files: List[str]) -> List[str]:
|
||||
"""Rename folder, collapsing when there's just a single subfolder
|
||||
oldpath --> newpath OR oldpath/subfolder --> newpath
|
||||
Modify list of filenames accordingly
|
||||
@@ -1302,7 +1273,7 @@ def del_marker(path: str):
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
|
||||
def remove_from_list(name: Optional[str], lst: list[str]):
|
||||
def remove_from_list(name: Optional[str], lst: List[str]):
|
||||
if name:
|
||||
for n in range(len(lst)):
|
||||
if lst[n].endswith(name):
|
||||
|
||||
@@ -337,11 +337,7 @@ class Scheduler:
|
||||
sabnzbd.downloader.unpause_all()
|
||||
sabnzbd.Downloader.set_paused_state(paused or paused_all)
|
||||
|
||||
# Handle pause_post state with proper notification
|
||||
if pause_post and not sabnzbd.PostProcessor.paused:
|
||||
sabnzbd.PostProcessor.pause()
|
||||
elif not pause_post and sabnzbd.PostProcessor.paused:
|
||||
sabnzbd.PostProcessor.resume()
|
||||
sabnzbd.PostProcessor.paused = pause_post
|
||||
if speedlimit is not None:
|
||||
sabnzbd.Downloader.limit_speed(speedlimit)
|
||||
|
||||
@@ -510,11 +506,11 @@ def sort_schedules(all_events, now=None):
|
||||
|
||||
|
||||
def pp_pause():
|
||||
sabnzbd.PostProcessor.pause()
|
||||
sabnzbd.PostProcessor.paused = True
|
||||
|
||||
|
||||
def pp_resume():
|
||||
sabnzbd.PostProcessor.resume()
|
||||
sabnzbd.PostProcessor.paused = False
|
||||
|
||||
|
||||
def enable_server(server):
|
||||
|
||||
@@ -442,7 +442,7 @@ SKIN_TEXT = {
|
||||
"Select a mode and list all (un)wanted extensions. For example: <b>exe</b> or <b>exe, com</b>"
|
||||
),
|
||||
"opt-sfv_check": TT("Enable SFV-based checks"),
|
||||
"explain-sfv_check": TT("If no par2 files are available, use sfv files (if present) to verify files"),
|
||||
"explain-sfv_check": TT("Do an extra verification based on SFV files."),
|
||||
"opt-script_can_fail": TT("User script can flag job as failed"),
|
||||
"explain-script_can_fail": TT(
|
||||
"When the user script returns a non-zero exit code, the job will be flagged as failed."
|
||||
@@ -686,15 +686,10 @@ SKIN_TEXT = {
|
||||
"explain-pushbullet_device": TT("Device to which message should be sent"), #: Pushbullet settings
|
||||
"opt-apprise_enable": TT("Enable Apprise notifications"), #: Apprise settings
|
||||
"explain-apprise_enable": TT(
|
||||
"Send notifications directly to any notification service you use.<br>"
|
||||
"For example: Slack, Discord, Telegram, or any service from over 100 supported services!"
|
||||
), #: Apprise settings
|
||||
"opt-apprise_urls": TT("Use default Apprise URLs"), #: Apprise settings
|
||||
"explain-apprise_urls": TT(
|
||||
"Apprise defines service connection information using URLs.<br>"
|
||||
"Read the Apprise wiki how to define the URL for each service.<br>"
|
||||
"Use a comma and/or space to identify more than one URL."
|
||||
"Send notifications using Apprise to almost any notification service"
|
||||
), #: Apprise settings
|
||||
"opt-apprise_urls": TT("Default Apprise URLs"), #: Apprise settings
|
||||
"explain-apprise_urls": TT("Use a comma and/or space to identify more than one URL."), #: Apprise settings
|
||||
"explain-apprise_extra_urls": TT(
|
||||
"Override the default URLs for specific notification types below, if desired."
|
||||
), #: Apprise settings
|
||||
|
||||
@@ -25,7 +25,7 @@ import re
|
||||
import guessit
|
||||
from rebulk.match import MatchesDict
|
||||
from string import whitespace, punctuation
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Union, List, Tuple, Dict
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.filesystem import (
|
||||
@@ -179,7 +179,7 @@ class Sorter:
|
||||
self.get_showdescriptions()
|
||||
self.get_date()
|
||||
|
||||
def format_series_numbers(self, numbers: Union[int, list[int]], info_name: str):
|
||||
def format_series_numbers(self, numbers: Union[int, List[int]], info_name: str):
|
||||
"""Format the numbers in both plain and alternative (zero-padded) format and set as showinfo"""
|
||||
# Guessit returns multiple episodes or seasons as a list of integers, single values as int
|
||||
if isinstance(numbers, int):
|
||||
@@ -283,7 +283,7 @@ class Sorter:
|
||||
if ends_in_file(sort_string):
|
||||
extension = True
|
||||
if sort_string.endswith(".%ext"):
|
||||
sort_string = sort_string.removesuffix(".%ext") # Strip '.%ext' off the end; other %ext may remain
|
||||
sort_string = sort_string[:-5] # Strip '.%ext' off the end; other %ext may remain in sort_string
|
||||
if self.is_season_pack:
|
||||
# Create a record of the filename part of the sort_string
|
||||
_, self.season_pack_setname = os.path.split(sort_string)
|
||||
@@ -417,7 +417,7 @@ class Sorter:
|
||||
# The normpath function translates "" to "." which results in an incorrect path
|
||||
return os.path.normpath(path) if path else path
|
||||
|
||||
def _rename_season_pack(self, files: list[str], base_path: str, all_job_files: list[str] = []) -> bool:
|
||||
def _rename_season_pack(self, files: List[str], base_path: str, all_job_files: List[str] = []) -> bool:
|
||||
success = False
|
||||
for f in files:
|
||||
f_name, f_ext = os.path.splitext(os.path.basename(f))
|
||||
@@ -476,7 +476,7 @@ class Sorter:
|
||||
)
|
||||
return success
|
||||
|
||||
def _rename_sequential(self, sequential_files: dict[str, str], base_path: str) -> bool:
|
||||
def _rename_sequential(self, sequential_files: Dict[str, str], base_path: str) -> bool:
|
||||
success = False
|
||||
for index, f in sequential_files.items():
|
||||
filepath = self._to_filepath(f, base_path)
|
||||
@@ -515,7 +515,7 @@ class Sorter:
|
||||
and os.stat(filepath).st_size >= self.rename_limit
|
||||
)
|
||||
|
||||
def rename(self, files: list[str], base_path: str) -> tuple[str, bool]:
|
||||
def rename(self, files: List[str], base_path: str) -> Tuple[str, bool]:
|
||||
if not self.rename_files:
|
||||
return move_to_parent_directory(base_path)
|
||||
|
||||
@@ -607,7 +607,7 @@ def ends_in_file(path: str) -> bool:
|
||||
return bool(RE_ENDEXT.search(path) or RE_ENDFN.search(path))
|
||||
|
||||
|
||||
def move_to_parent_directory(workdir: str) -> tuple[str, bool]:
|
||||
def move_to_parent_directory(workdir: str) -> Tuple[str, bool]:
|
||||
"""Move all files under 'workdir' into 'workdir/..'"""
|
||||
# Determine 'folder'/..
|
||||
workdir = os.path.abspath(os.path.normpath(workdir))
|
||||
@@ -658,7 +658,7 @@ def guess_what(name: str) -> MatchesDict:
|
||||
|
||||
if digit_fix:
|
||||
# Unfix the title
|
||||
guess["title"] = guess.get("title", "").removeprefix(digit_fix)
|
||||
guess["title"] = guess.get("title", "")[len(digit_fix) :]
|
||||
|
||||
# Handle weird anime episode notation, that results in the episode number ending up as the episode title
|
||||
if (
|
||||
@@ -696,7 +696,7 @@ def guess_what(name: str) -> MatchesDict:
|
||||
return guess
|
||||
|
||||
|
||||
def path_subst(path: str, mapping: list[tuple[str, str]]) -> str:
|
||||
def path_subst(path: str, mapping: List[Tuple[str, str]]) -> str:
|
||||
"""Replace the sort string elements in the path with the real values provided by the mapping;
|
||||
non-elements are copied verbatim."""
|
||||
# Added ugly hack to prevent %ext from being masked by %e
|
||||
@@ -719,7 +719,7 @@ def path_subst(path: str, mapping: list[tuple[str, str]]) -> str:
|
||||
|
||||
def get_titles(
|
||||
nzo: Optional[NzbObject], guess: Optional[MatchesDict], jobname: str, titleing: bool = False
|
||||
) -> tuple[str, str, str]:
|
||||
) -> Tuple[str, str, str]:
|
||||
"""Get the title from NZB metadata or jobname, and return it in various formats. Formatting
|
||||
mostly deals with working around quirks of Python's str.title(). NZB metadata is used as-is,
|
||||
further processing done only for info obtained from guessit or the jobname."""
|
||||
@@ -779,7 +779,7 @@ def replace_word(word_input: str, one: str, two: str) -> str:
|
||||
return word_input
|
||||
|
||||
|
||||
def get_descriptions(nzo: Optional[NzbObject], guess: Optional[MatchesDict]) -> tuple[str, str, str]:
|
||||
def get_descriptions(nzo: Optional[NzbObject], guess: Optional[MatchesDict]) -> Tuple[str, str, str]:
|
||||
"""Try to get an episode title or similar description from the NZB metadata or jobname, e.g.
|
||||
'Download This' in Show.S01E23.Download.This.1080p.HDTV.x264 and return multiple formats"""
|
||||
ep_name = None
|
||||
@@ -836,7 +836,7 @@ def strip_path_elements(path: str) -> str:
|
||||
return "\\\\" + path if is_unc else path
|
||||
|
||||
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: Optional[list[str]] = None):
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: Optional[List[str]] = None):
|
||||
"""Rename all other files in the 'folder' hierarchy after 'name'
|
||||
and move them to the root of 'folder'.
|
||||
Files having extension 'skip_ext' will be moved, but not renamed.
|
||||
@@ -921,7 +921,7 @@ def eval_sort(sort_string: str, job_name: str, multipart_label: str = "") -> Opt
|
||||
return sorted_path
|
||||
|
||||
|
||||
def check_for_multiple(files: list[str]) -> Optional[dict[str, str]]:
|
||||
def check_for_multiple(files: List[str]) -> Optional[Dict[str, str]]:
|
||||
"""Return a dictionary of a single set of files that look like parts of
|
||||
a multi-part post. Takes a limited set of indicators from guessit into
|
||||
consideration and only accepts numerical sequences. The files argument
|
||||
|
||||
@@ -32,7 +32,7 @@ from http.client import IncompleteRead, HTTPResponse
|
||||
from mailbox import Message
|
||||
from threading import Thread
|
||||
import base64
|
||||
from typing import Optional, Union, Any
|
||||
from typing import Tuple, Optional, Union, List, Dict, Any
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import (
|
||||
@@ -57,7 +57,7 @@ from sabnzbd.nzbstuff import NzbObject, NzbRejected, NzbRejectToHistory
|
||||
class URLGrabber(Thread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.queue: queue.Queue[tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
|
||||
self.queue: queue.Queue[Tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
|
||||
self.shutdown = False
|
||||
|
||||
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None):
|
||||
@@ -417,9 +417,9 @@ def add_url(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Add NZB based on a URL, attributes optional"""
|
||||
if not url.lower().startswith("http"):
|
||||
return AddNzbFileResult.NO_FILES_FOUND, []
|
||||
|
||||
@@ -7,9 +7,10 @@ Functions to check if the path filesystem uses FAT
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
from typing import List
|
||||
|
||||
|
||||
def getcmdoutput(cmd: list[str]) -> list[str]:
|
||||
def getcmdoutput(cmd: List[str]) -> List[str]:
|
||||
"""execute cmd, and return a list of output lines"""
|
||||
subprocess_kwargs = {
|
||||
"bufsize": 0,
|
||||
|
||||
@@ -7,7 +7,7 @@ import sys
|
||||
import logging
|
||||
import time
|
||||
|
||||
BUFFERSIZE = 16 * 1024 * 1024
|
||||
_DUMP_DATA_SIZE = 10 * 1024 * 1024
|
||||
|
||||
|
||||
def diskspeedmeasure(dirname: str) -> float:
|
||||
@@ -16,57 +16,39 @@ def diskspeedmeasure(dirname: str) -> float:
|
||||
Then divide bytes written by time passed
|
||||
In case of problems (ie non-writable dir or file), return 0.0
|
||||
"""
|
||||
maxtime = 1 # sec
|
||||
dump_data = os.urandom(_DUMP_DATA_SIZE)
|
||||
start = time.time()
|
||||
maxtime = 0.5 # sec
|
||||
total_written = 0
|
||||
filename = os.path.join(dirname, "outputTESTING.txt")
|
||||
|
||||
# Prepare the whole buffer now for better write performance later
|
||||
# This is done before timing starts to exclude buffer creation from measurement
|
||||
buffer = os.urandom(BUFFERSIZE)
|
||||
|
||||
try:
|
||||
# Use low-level I/O
|
||||
fp_testfile = os.open(
|
||||
filename,
|
||||
os.O_CREAT | os.O_WRONLY | getattr(os, "O_BINARY", 0) | getattr(os, "O_SYNC", 0),
|
||||
0o777,
|
||||
)
|
||||
|
||||
overall_start = time.perf_counter()
|
||||
maxtime = overall_start + 1
|
||||
total_time = 0.0
|
||||
try:
|
||||
fp_testfile = os.open(filename, os.O_CREAT | os.O_WRONLY | os.O_BINARY, 0o777)
|
||||
except AttributeError:
|
||||
fp_testfile = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777)
|
||||
|
||||
# Start looping
|
||||
for i in range(1, 5):
|
||||
# Stop writing next buffer block, if time exceeds limit
|
||||
if time.perf_counter() >= maxtime:
|
||||
break
|
||||
# Prepare the data chunk outside of timing
|
||||
data_chunk = buffer * (i**2)
|
||||
|
||||
# Only measure the actual write and sync operations
|
||||
write_start = time.perf_counter()
|
||||
total_written += os.write(fp_testfile, data_chunk)
|
||||
total_time = 0.0
|
||||
while total_time < maxtime:
|
||||
start = time.time()
|
||||
os.write(fp_testfile, dump_data)
|
||||
os.fsync(fp_testfile)
|
||||
total_time += time.perf_counter() - write_start
|
||||
total_time += time.time() - start
|
||||
total_written += _DUMP_DATA_SIZE
|
||||
|
||||
# Have to use low-level close
|
||||
os.close(fp_testfile)
|
||||
# Remove the file
|
||||
os.remove(filename)
|
||||
|
||||
except OSError:
|
||||
# Could not write, so ... report 0.0
|
||||
logging.debug("Failed to measure disk speed on %s", dirname)
|
||||
return 0.0
|
||||
|
||||
megabyte_per_second = round(total_written / total_time / 1024 / 1024, 1)
|
||||
logging.debug(
|
||||
"Disk speed of %s = %.2f MB/s (in %.2f seconds)",
|
||||
dirname,
|
||||
megabyte_per_second,
|
||||
time.perf_counter() - overall_start,
|
||||
)
|
||||
logging.debug("Disk speed of %s = %.2f MB/s (in %.2f seconds)", dirname, megabyte_per_second, time.time() - start)
|
||||
return megabyte_per_second
|
||||
|
||||
|
||||
@@ -86,7 +68,7 @@ if __name__ == "__main__":
|
||||
try:
|
||||
SPEED = max(diskspeedmeasure(DIRNAME), diskspeedmeasure(DIRNAME))
|
||||
if SPEED:
|
||||
print("Disk writing speed: %.2f MBytes per second" % SPEED)
|
||||
print("Disk writing speed: %.2f Mbytes per second" % SPEED)
|
||||
else:
|
||||
print("No measurement possible. Check that directory is writable.")
|
||||
except Exception:
|
||||
|
||||
@@ -8,6 +8,7 @@ Note: extension always contains a leading dot
|
||||
import puremagic
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from sabnzbd.filesystem import get_ext, RAR_RE
|
||||
import sabnzbd.cfg as cfg
|
||||
|
||||
@@ -259,7 +260,7 @@ ALL_EXT = tuple(set(POPULAR_EXT + DOWNLOAD_EXT))
|
||||
ALL_EXT = tuple(["." + i for i in ALL_EXT])
|
||||
|
||||
|
||||
def all_extensions() -> tuple[str, ...]:
|
||||
def all_extensions() -> Tuple[str, ...]:
|
||||
"""returns tuple with ALL (standard + userdef) extensions (including leading dot in extension)"""
|
||||
user_defined_extensions = tuple(["." + i for i in cfg.ext_rename_ignore()])
|
||||
return ALL_EXT + user_defined_extensions
|
||||
@@ -271,7 +272,7 @@ def has_popular_extension(file_path: str) -> bool:
|
||||
return file_extension in all_extensions() or RAR_RE.match(file_extension)
|
||||
|
||||
|
||||
def all_possible_extensions(file_path: str) -> list[str]:
|
||||
def all_possible_extensions(file_path: str) -> List[str]:
|
||||
"""returns a list with all possible extensions (with leading dot) for given file_path as reported by puremagic"""
|
||||
extension_list = []
|
||||
for i in puremagic.magic_file(file_path):
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
"""
|
||||
sabnzbd.utils.rarvolinfo - Find out volume number and/or original extension of a rar file. Useful with obfuscated files
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import rarfile
|
||||
|
||||
|
||||
|
||||
@@ -161,7 +161,6 @@ class SysTrayIconThread(Thread):
|
||||
pass
|
||||
|
||||
def restart(self, hwnd, msg, wparam, lparam):
|
||||
self.notify_id = None
|
||||
self.refresh_icon()
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
# You MUST use double quotes (so " and not ')
|
||||
# Do not forget to update the appdata file for every major release!
|
||||
|
||||
__version__ = "4.6.0Alpha2"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "4.5.5"
|
||||
__baseline__ = "a61a5539a7e0e0dc1f9ae140222436ba8f9fe679"
|
||||
|
||||
@@ -27,6 +27,7 @@ from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
from warnings import warn
|
||||
|
||||
from sabnzbd.constants import DEF_INI_FILE
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
|
||||
@@ -45,38 +45,32 @@ ARTICLE_INFO = re.compile(
|
||||
YENC_ESCAPE = [0x00, 0x0A, 0x0D, ord("="), ord(".")]
|
||||
|
||||
|
||||
class NewsServerSession:
|
||||
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
||||
self.reader = reader
|
||||
self.writer = writer
|
||||
class NewsServerProtocol(asyncio.Protocol):
|
||||
def __init__(self):
|
||||
self.transport = None
|
||||
self.connected = False
|
||||
self.in_article = False
|
||||
super().__init__()
|
||||
|
||||
async def run(self):
|
||||
self.writer.write(b"200 Welcome (SABNews)\r\n")
|
||||
await self.writer.drain()
|
||||
def connection_made(self, transport):
|
||||
logging.info("Connection from %s", transport.get_extra_info("peername"))
|
||||
self.transport = transport
|
||||
self.connected = True
|
||||
self.transport.write(b"200 Welcome (SABNews)\r\n")
|
||||
|
||||
try:
|
||||
while not self.reader.at_eof():
|
||||
message = await self.reader.readuntil(b"\r\n")
|
||||
logging.debug("Data received: %s", message.strip())
|
||||
await self.handle_command(message)
|
||||
except (ConnectionResetError, asyncio.IncompleteReadError):
|
||||
logging.debug("Client closed connection")
|
||||
def data_received(self, message):
|
||||
logging.debug("Data received: %s", message.strip())
|
||||
|
||||
async def handle_command(self, message: bytes):
|
||||
"""Handle basic NNTP commands, \r\n is already stripped."""
|
||||
# Handle basic commands
|
||||
if message.startswith(b"QUIT"):
|
||||
await self.close_connection()
|
||||
return
|
||||
|
||||
if message.startswith((b"ARTICLE", b"BODY")):
|
||||
self.close_connection()
|
||||
elif message.startswith((b"ARTICLE", b"BODY")):
|
||||
parsed_message = ARTICLE_INFO.search(message)
|
||||
await self.serve_article(parsed_message)
|
||||
return
|
||||
self.serve_article(parsed_message)
|
||||
|
||||
self.writer.write(b"500 Unknown command\r\n")
|
||||
await self.writer.drain()
|
||||
# self.transport.write(data)
|
||||
|
||||
async def serve_article(self, parsed_message):
|
||||
def serve_article(self, parsed_message):
|
||||
# Check if we parsed everything
|
||||
try:
|
||||
message_id = parsed_message.group("message_id")
|
||||
@@ -87,37 +81,34 @@ class NewsServerSession:
|
||||
size = int(parsed_message.group("size"))
|
||||
except (AttributeError, ValueError):
|
||||
logging.warning("Can't parse article information")
|
||||
self.writer.write(b"430 No Such Article Found (bad message-id)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (bad message-id)\r\n")
|
||||
return
|
||||
|
||||
# Check if file exists
|
||||
if not os.path.exists(file):
|
||||
logging.warning("File not found: %s", file)
|
||||
self.writer.write(b"430 No Such Article Found (no file on disk)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (no file on disk)\r\n")
|
||||
return
|
||||
|
||||
# Check if sizes are valid
|
||||
file_size = os.path.getsize(file)
|
||||
if start + size > file_size:
|
||||
logging.warning("Invalid start/size attributes")
|
||||
self.writer.write(b"430 No Such Article Found (invalid start/size attributes)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (invalid start/size attributes)\r\n")
|
||||
return
|
||||
|
||||
logging.debug("Serving %s" % message_id)
|
||||
|
||||
# File is found, send headers
|
||||
self.writer.write(b"222 0 %s\r\n" % message_id)
|
||||
self.writer.write(b"Message-ID: %s\r\n" % message_id)
|
||||
self.writer.write(b'Subject: "%s"\r\n\r\n' % file_base.encode("utf-8"))
|
||||
self.transport.write(b"222 0 %s\r\n" % message_id)
|
||||
self.transport.write(b"Message-ID: %s\r\n" % message_id)
|
||||
self.transport.write(b'Subject: "%s"\r\n\r\n' % file_base.encode("utf-8"))
|
||||
|
||||
# Write yEnc headers
|
||||
self.writer.write(
|
||||
self.transport.write(
|
||||
b"=ybegin part=%d line=128 size=%d name=%s\r\n" % (part, file_size, file_base.encode("utf-8"))
|
||||
)
|
||||
self.writer.write(b"=ypart begin=%d end=%d\r\n" % (start + 1, start + size))
|
||||
self.transport.write(b"=ypart begin=%d end=%d\r\n" % (start + 1, start + size))
|
||||
|
||||
with open(file, "rb") as inp_file:
|
||||
inp_file.seek(start)
|
||||
@@ -125,31 +116,24 @@ class NewsServerSession:
|
||||
|
||||
# Encode data
|
||||
output_string, crc = sabctools.yenc_encode(inp_buffer)
|
||||
self.writer.write(output_string)
|
||||
self.transport.write(output_string)
|
||||
|
||||
# Write footer
|
||||
self.writer.write(b"\r\n=yend size=%d part=%d pcrc32=%08x\r\n" % (size, part, crc))
|
||||
self.writer.write(b".\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"\r\n=yend size=%d part=%d pcrc32=%08x\r\n" % (size, part, crc))
|
||||
self.transport.write(b".\r\n")
|
||||
|
||||
async def close_connection(self):
|
||||
def close_connection(self):
|
||||
logging.debug("Closing connection")
|
||||
self.writer.write(b"205 Connection closing\r\n")
|
||||
await self.writer.drain()
|
||||
self.writer.close()
|
||||
await self.writer.wait_closed()
|
||||
|
||||
|
||||
async def connection_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
||||
session = NewsServerSession(reader, writer)
|
||||
await session.run()
|
||||
self.transport.write(b"205 Connection closing\r\n")
|
||||
self.transport.close()
|
||||
|
||||
|
||||
async def serve_sabnews(hostname, port):
|
||||
# Start server
|
||||
logging.info("Starting SABNews on %s:%d", hostname, port)
|
||||
|
||||
server = await asyncio.start_server(connection_handler, hostname, port)
|
||||
loop = asyncio.get_running_loop()
|
||||
server = await loop.create_server(lambda: NewsServerProtocol(), hostname, port)
|
||||
async with server:
|
||||
await server.serve_forever()
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from tests.testhelper import *
|
||||
import shutil
|
||||
import zipfile
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.constants import (
|
||||
@@ -86,7 +87,7 @@ class TestConfig:
|
||||
return zip_buffer.getvalue()
|
||||
|
||||
@staticmethod
|
||||
def create_and_verify_backup(admin_dir: str, must_haves: list[str]):
|
||||
def create_and_verify_backup(admin_dir: str, must_haves: List[str]):
|
||||
# Create the backup
|
||||
config_backup_path = config.create_config_backup()
|
||||
assert os.path.exists(config_backup_path)
|
||||
|
||||
@@ -68,10 +68,7 @@ class TestWiki:
|
||||
config_diff = {}
|
||||
for url in ("general", "switches", "special"):
|
||||
config_tree = lxml.html.fromstring(
|
||||
requests.get(
|
||||
"http://%s:%s/config/%s/" % (SAB_HOST, SAB_PORT, url),
|
||||
headers={"User-Agent": "SABnzbd/%s" % sabnzbd.__version__},
|
||||
).content
|
||||
requests.get("http://%s:%s/config/%s/" % (SAB_HOST, SAB_PORT, url)).content
|
||||
)
|
||||
# Have to remove some decorating stuff and empty values
|
||||
config_labels = [
|
||||
@@ -82,10 +79,7 @@ class TestWiki:
|
||||
# Parse the version info to get the right Wiki version
|
||||
version = re.search(r"(\d+\.\d+)\.(\d+)([a-zA-Z]*)(\d*)", sabnzbd.__version__).group(1)
|
||||
wiki_tree = lxml.html.fromstring(
|
||||
requests.get(
|
||||
"https://sabnzbd.org/wiki/configuration/%s/%s" % (version, url),
|
||||
headers={"User-Agent": "SABnzbd/%s" % sabnzbd.__version__},
|
||||
).content
|
||||
requests.get("https://sabnzbd.org/wiki/configuration/%s/%s" % (version, url)).content
|
||||
)
|
||||
|
||||
# Special-page needs different label locator
|
||||
|
||||
@@ -21,12 +21,10 @@ tests.test_decoder- Testing functions in decoder.py
|
||||
import binascii
|
||||
import os
|
||||
import pytest
|
||||
from io import BytesIO
|
||||
|
||||
from random import randint
|
||||
from unittest import mock
|
||||
|
||||
import sabctools
|
||||
import sabnzbd.decoder as decoder
|
||||
from sabnzbd.nzbstuff import Article
|
||||
|
||||
@@ -113,7 +111,7 @@ class TestUuDecoder:
|
||||
result.append(END_DATA)
|
||||
|
||||
# Signal the end of the message with a dot on a line of its own
|
||||
data.append(b".\r\n")
|
||||
data.append(b".")
|
||||
|
||||
# Join the data with \r\n line endings, just like we get from socket reads
|
||||
data = b"\r\n".join(data)
|
||||
@@ -122,26 +120,22 @@ class TestUuDecoder:
|
||||
|
||||
return article, bytearray(data), result
|
||||
|
||||
@staticmethod
|
||||
def _response(raw_data: bytes) -> sabctools.NNTPResponse:
|
||||
dec = sabctools.Decoder(len(raw_data))
|
||||
reader = BytesIO(raw_data)
|
||||
reader.readinto(dec)
|
||||
dec.process(len(raw_data))
|
||||
return next(dec)
|
||||
def test_no_data(self):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(None, None)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
[
|
||||
b"222 0 <foo@bar>\r\n.\r\n",
|
||||
b"222 0 <foo@bar>\r\n\r\n.\r\n",
|
||||
b"222 0 <foo@bar>\r\nfoobar\r\n.\r\n", # Plenty of list items, but (too) few actual lines
|
||||
b"222 0 <foo@bar>\r\nX-Too-Short: yup\r\n.\r\n",
|
||||
b"",
|
||||
b"\r\n\r\n",
|
||||
b"foobar\r\n", # Plenty of list items, but (too) few actual lines
|
||||
b"222 0 <artid@woteva>\r\nX-Too-Short: yup\r\n",
|
||||
],
|
||||
)
|
||||
def test_short_data(self, raw_data):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(Article("foo@bar", 4321, None), self._response(raw_data))
|
||||
assert decoder.decode_uu(None, bytearray(raw_data))
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
@@ -164,8 +158,7 @@ class TestUuDecoder:
|
||||
with pytest.raises(decoder.BadUu):
|
||||
raw_data = bytearray(raw_data)
|
||||
raw_data.extend(filler)
|
||||
raw_data.extend(b".\r\n")
|
||||
assert decoder.decode_uu(article, self._response(raw_data))
|
||||
assert decoder.decode_uu(article, raw_data)
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
@pytest.mark.parametrize("insert_excess_empty_lines", [True, False])
|
||||
@@ -201,7 +194,7 @@ class TestUuDecoder:
|
||||
insert_dot_stuffing_line,
|
||||
begin_line,
|
||||
)
|
||||
assert decoder.decode_uu(article, self._response(raw_data)) == expected_result
|
||||
assert decoder.decode_uu(article, raw_data) == expected_result
|
||||
assert article.nzf.filename_checked
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
@@ -212,7 +205,7 @@ class TestUuDecoder:
|
||||
decoded_data = expected_data = b""
|
||||
for part in ("begin", "middle", "middle", "end"):
|
||||
article, data, result = self._generate_msg_part(part, insert_empty_line, False, False, True)
|
||||
decoded_data += decoder.decode_uu(article, self._response(data))
|
||||
decoded_data += decoder.decode_uu(article, data)
|
||||
expected_data += result
|
||||
|
||||
# Verify results
|
||||
@@ -230,6 +223,4 @@ class TestUuDecoder:
|
||||
article.lowest_partnum = False
|
||||
filler = b"\r\n".join(VALID_UU_LINES[:4]) + b"\r\n"
|
||||
with pytest.raises(decoder.BadData):
|
||||
assert decoder.decode_uu(
|
||||
article, self._response(bytearray(b"222 0 <foo@bar>\r\n" + filler + bad_data + b"\r\n.\r\n"))
|
||||
)
|
||||
assert decoder.decode_uu(article, bytearray(b"222 0 <foo@bar>\r\n" + filler + bad_data + b"\r\n"))
|
||||
|
||||
@@ -25,6 +25,7 @@ import sys
|
||||
|
||||
from math import ceil
|
||||
from random import sample
|
||||
from typing import List
|
||||
|
||||
from tavern.core import run
|
||||
from warnings import warn
|
||||
@@ -171,7 +172,7 @@ class ApiTestFunctions:
|
||||
self._get_api_json("queue", extra_args={"name": "purge", "del_files": del_files})
|
||||
assert len(self._get_api_json("queue")["queue"]["slots"]) == 0
|
||||
|
||||
def _get_files(self, nzo_id: str) -> list[str]:
|
||||
def _get_files(self, nzo_id: str) -> List[str]:
|
||||
files_json = self._get_api_json("get_files", extra_args={"value": nzo_id})
|
||||
assert "files" in files_json
|
||||
return [file["nzf_id"] for file in files_json["files"]]
|
||||
|
||||
@@ -76,7 +76,7 @@ def get_local_ip(protocol_version: IPProtocolVersion) -> Optional[str]:
|
||||
sending any traffic but already prefills what would be the sender ip address.
|
||||
"""
|
||||
s: Optional[socket.socket] = None
|
||||
address_to_connect_to: Optional[tuple[str, int]] = None
|
||||
address_to_connect_to: Optional[Tuple[str, int]] = None
|
||||
if protocol_version == IPProtocolVersion.IPV4:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
# Google DNS IPv4
|
||||
|
||||
@@ -257,137 +257,3 @@ class TestPostProc:
|
||||
assert tmp_workdir_complete == workdir_complete
|
||||
|
||||
_func()
|
||||
|
||||
|
||||
class TestNzbOnlyDownload:
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_single_nzb(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with a single NZB file"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
fake_nzo.pp = 3
|
||||
fake_nzo.script = "test_script.py"
|
||||
fake_nzo.cat = "movies"
|
||||
fake_nzo.url = "http://example.com/test.nzb"
|
||||
fake_nzo.priority = 0
|
||||
|
||||
# Mock single NZB file
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
nzb_file = os.path.join(workdir, "test.nzb")
|
||||
mock_listdir.return_value = [nzb_file]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result
|
||||
assert result == [nzb_file]
|
||||
|
||||
# Verify process_single_nzb was called with correct arguments
|
||||
mock_process_single_nzb.assert_called_once_with(
|
||||
"test.nzb",
|
||||
nzb_file,
|
||||
pp=3,
|
||||
script="test_script.py",
|
||||
cat="movies",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=0,
|
||||
nzbname="TestDownload",
|
||||
dup_check=False,
|
||||
)
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_multiple_nzbs(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with multiple NZB files"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
fake_nzo.pp = 2
|
||||
fake_nzo.script = None
|
||||
fake_nzo.cat = "tv"
|
||||
fake_nzo.url = "http://example.com/test.nzb"
|
||||
fake_nzo.priority = 1
|
||||
|
||||
# Mock multiple NZB files
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
first_nzb = os.path.join(workdir, "first.nzb")
|
||||
second_nzb = os.path.join(workdir, "second.nzb")
|
||||
mock_listdir.return_value = [first_nzb, second_nzb]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result
|
||||
assert result == [first_nzb, second_nzb]
|
||||
|
||||
# Verify process_single_nzb was called twice with correct arguments
|
||||
assert mock_process_single_nzb.call_count == 2
|
||||
mock_process_single_nzb.assert_any_call(
|
||||
"first.nzb",
|
||||
first_nzb,
|
||||
pp=2,
|
||||
script=None,
|
||||
cat="tv",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=1,
|
||||
nzbname="TestDownload - first.nzb",
|
||||
dup_check=False,
|
||||
)
|
||||
mock_process_single_nzb.assert_any_call(
|
||||
"second.nzb",
|
||||
second_nzb,
|
||||
pp=2,
|
||||
script=None,
|
||||
cat="tv",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=1,
|
||||
nzbname="TestDownload - second.nzb",
|
||||
dup_check=False,
|
||||
)
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_mixed_files(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with mixed file types returns None"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
|
||||
# Mock mixed files (NZB and non-NZB)
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
mock_listdir.return_value = [
|
||||
os.path.join(workdir, "test.nzb"),
|
||||
os.path.join(workdir, "readme.txt"),
|
||||
]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result is None (not NZB-only)
|
||||
assert result is None
|
||||
|
||||
# Verify process_single_nzb was NOT called
|
||||
mock_process_single_nzb.assert_not_called()
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_empty_directory(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with empty directory returns None"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
|
||||
# Mock empty directory
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
mock_listdir.return_value = []
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result is None (no files)
|
||||
assert result is None
|
||||
|
||||
# Verify process_single_nzb was NOT called
|
||||
mock_process_single_nzb.assert_not_called()
|
||||
|
||||
@@ -22,7 +22,7 @@ import io
|
||||
import os
|
||||
import time
|
||||
from http.client import RemoteDisconnected
|
||||
from typing import BinaryIO, Optional
|
||||
from typing import BinaryIO, Optional, Dict, List
|
||||
|
||||
import pytest
|
||||
from random import choice, randint
|
||||
@@ -149,13 +149,13 @@ def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}):
|
||||
return r.text
|
||||
|
||||
|
||||
def create_nzb(nzb_dir: str, metadata: Optional[dict[str, str]] = None) -> str:
|
||||
def create_nzb(nzb_dir: str, metadata: Optional[Dict[str, str]] = None) -> str:
|
||||
"""Create NZB from directory using SABNews"""
|
||||
nzb_dir_full = os.path.join(SAB_DATA_DIR, nzb_dir)
|
||||
return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full, metadata=metadata)
|
||||
|
||||
|
||||
def create_and_read_nzb_fp(nzbdir: str, metadata: Optional[dict[str, str]] = None) -> BinaryIO:
|
||||
def create_and_read_nzb_fp(nzbdir: str, metadata: Optional[Dict[str, str]] = None) -> BinaryIO:
|
||||
"""Create NZB, return data and delete file"""
|
||||
# Create NZB-file to import
|
||||
nzb_path = create_nzb(nzbdir, metadata)
|
||||
@@ -332,7 +332,7 @@ class DownloadFlowBasics(SABnzbdBaseTest):
|
||||
self.selenium_wrapper(self.driver.find_element, By.CSS_SELECTOR, ".btn.btn-success").click()
|
||||
self.no_page_crash()
|
||||
|
||||
def download_nzb(self, nzb_dir: str, file_output: list[str], dir_name_as_job_name: bool = False):
|
||||
def download_nzb(self, nzb_dir: str, file_output: List[str], dir_name_as_job_name: bool = False):
|
||||
# Verify if the server was setup before we start
|
||||
self.is_server_configured()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user