mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-24 16:19:31 -05:00
Compare commits
423 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11ba9ae12a | ||
|
|
a61a5539a7 | ||
|
|
77f7490aea | ||
|
|
a7198b6a81 | ||
|
|
977dbc805f | ||
|
|
abcca19820 | ||
|
|
52a7b5dcff | ||
|
|
9518714885 | ||
|
|
4a89fcf8ea | ||
|
|
d11e757c6e | ||
|
|
4f9ed7803f | ||
|
|
95bc069af9 | ||
|
|
d1d9bab65a | ||
|
|
e2560bf214 | ||
|
|
825322baa4 | ||
|
|
7a5ca5b226 | ||
|
|
cb4f022d17 | ||
|
|
913e4ea02e | ||
|
|
aa0d44a60b | ||
|
|
8b5f29df8f | ||
|
|
82954f5930 | ||
|
|
67f1858315 | ||
|
|
55bb81ceef | ||
|
|
6864810ace | ||
|
|
bae55636a8 | ||
|
|
f4778abd1f | ||
|
|
2cb716ce26 | ||
|
|
3246e9c6d4 | ||
|
|
ae6d5f54bd | ||
|
|
bd95c29866 | ||
|
|
074eed16e1 | ||
|
|
3b72a005fd | ||
|
|
afb9a4758f | ||
|
|
35c180216b | ||
|
|
f0c6fe5786 | ||
|
|
bd8c245b83 | ||
|
|
effc7265d4 | ||
|
|
bd9a8e5c33 | ||
|
|
c55d662e1f | ||
|
|
dbfabc1d80 | ||
|
|
d897936da5 | ||
|
|
f81a8c97c4 | ||
|
|
e93e01dd59 | ||
|
|
79b504ff93 | ||
|
|
52dafd4ab8 | ||
|
|
0cc538ac5a | ||
|
|
4b99d04454 | ||
|
|
708fad33f3 | ||
|
|
c6dc25c9c2 | ||
|
|
07be38cd01 | ||
|
|
0121e0ae16 | ||
|
|
f24b3ced28 | ||
|
|
157dfc928d | ||
|
|
d10639542d | ||
|
|
c0f0b7eb31 | ||
|
|
d6d70325db | ||
|
|
46954165d2 | ||
|
|
58e7d520bf | ||
|
|
a4f8040324 | ||
|
|
8d5cc9a3e6 | ||
|
|
4592ce4d55 | ||
|
|
b62b38b5af | ||
|
|
14b1d4630c | ||
|
|
8a42abd1e7 | ||
|
|
41e5dfdf18 | ||
|
|
41de13388c | ||
|
|
1f16f13169 | ||
|
|
ef23d40972 | ||
|
|
b07b43496c | ||
|
|
2ba04f1a6a | ||
|
|
e7e06dea41 | ||
|
|
ce32504a81 | ||
|
|
7cd6c94482 | ||
|
|
fcb3d01194 | ||
|
|
af0b53990c | ||
|
|
e3861954ba | ||
|
|
006dd8dc77 | ||
|
|
dbff203c62 | ||
|
|
f45eb891cd | ||
|
|
77b58240cf | ||
|
|
97ae1ff10e | ||
|
|
8734a4f24b | ||
|
|
480fce55a8 | ||
|
|
d4136fadd2 | ||
|
|
308bc375bd | ||
|
|
3bbcf6a41e | ||
|
|
3d5d10a4c1 | ||
|
|
0e979c14f0 | ||
|
|
70f49114ac | ||
|
|
f730607414 | ||
|
|
0172ee25c9 | ||
|
|
699d75bb9f | ||
|
|
95822704c8 | ||
|
|
76e5f69e67 | ||
|
|
abd31d0249 | ||
|
|
9ae7ee6e2d | ||
|
|
18f4cc25f3 | ||
|
|
b755192600 | ||
|
|
045140cfbc | ||
|
|
4e7e44e25f | ||
|
|
5c4dfa4cc6 | ||
|
|
b7e3401e8e | ||
|
|
90cee7fb31 | ||
|
|
8e0e3cf35e | ||
|
|
7f72584537 | ||
|
|
8f0d606892 | ||
|
|
9fafe64cff | ||
|
|
94b42e0597 | ||
|
|
b2c1960d93 | ||
|
|
9d24b4cc35 | ||
|
|
3d675b033c | ||
|
|
0d2d9be8b3 | ||
|
|
6e9b6dab97 | ||
|
|
44a1717f6d | ||
|
|
4f51c74297 | ||
|
|
87c64a8c5d | ||
|
|
b6c6635f22 | ||
|
|
5a7abcb07c | ||
|
|
65232d134b | ||
|
|
d7b4bdefe5 | ||
|
|
6d9174bea1 | ||
|
|
921edfd4c5 | ||
|
|
786d5b0667 | ||
|
|
e846c71f20 | ||
|
|
0108e2ef5a | ||
|
|
9a81277ff6 | ||
|
|
06cc2ff316 | ||
|
|
7cdf4cb48c | ||
|
|
c34c547f1f | ||
|
|
9507294db7 | ||
|
|
ae7dd62d9f | ||
|
|
52e309cb09 | ||
|
|
b580373982 | ||
|
|
ec7bde5bb2 | ||
|
|
3516eeec5b | ||
|
|
52351192e6 | ||
|
|
3a6f04496d | ||
|
|
47f2df2112 | ||
|
|
363a26b8a1 | ||
|
|
7e50a00f55 | ||
|
|
a7d6a80e82 | ||
|
|
e7da95b2ac | ||
|
|
74fca23d59 | ||
|
|
0a12fa1253 | ||
|
|
1263068140 | ||
|
|
916c191b18 | ||
|
|
d8c0220353 | ||
|
|
4ab425d15c | ||
|
|
74e5633d1c | ||
|
|
89d36bbc61 | ||
|
|
1877ac18a5 | ||
|
|
5e42e25617 | ||
|
|
c27c9564cf | ||
|
|
c4b0da335d | ||
|
|
fab36ec008 | ||
|
|
8a2b875779 | ||
|
|
efaffb8298 | ||
|
|
e004eb3f00 | ||
|
|
43e8f6dc81 | ||
|
|
f5bff8fe7c | ||
|
|
fad8484b93 | ||
|
|
7664b54f89 | ||
|
|
21cbc353dd | ||
|
|
8d66306ec4 | ||
|
|
479daf0e76 | ||
|
|
bf0fbb7b10 | ||
|
|
d3c91f1585 | ||
|
|
ca165b328a | ||
|
|
fa2ffeea92 | ||
|
|
0d00965ac3 | ||
|
|
7d7bec1f80 | ||
|
|
b6fd915365 | ||
|
|
fecae72267 | ||
|
|
7bffd91e3f | ||
|
|
f859521a7e | ||
|
|
a869386fac | ||
|
|
8bc7885b7a | ||
|
|
78be46738d | ||
|
|
6fce73855c | ||
|
|
fa844a6223 | ||
|
|
906379dd09 | ||
|
|
37cded612f | ||
|
|
73e8fade61 | ||
|
|
758cc7afab | ||
|
|
d74b7b06d2 | ||
|
|
39009f2f71 | ||
|
|
9fdc1c6813 | ||
|
|
c5568fe830 | ||
|
|
bad81f84b9 | ||
|
|
2ac08dd0e6 | ||
|
|
408ffc4539 | ||
|
|
eb958327c5 | ||
|
|
e157d77a1e | ||
|
|
e961c9ea8f | ||
|
|
258c4f769d | ||
|
|
b31fedd857 | ||
|
|
eafe69500b | ||
|
|
ae09990c43 | ||
|
|
cf54b65c32 | ||
|
|
7974421fa1 | ||
|
|
847a098d4e | ||
|
|
eb4de0ae0f | ||
|
|
bca9f3b753 | ||
|
|
cad8a9a5d3 | ||
|
|
f5f36d21e8 | ||
|
|
c51435c114 | ||
|
|
2a7f1780b4 | ||
|
|
98a44e40fb | ||
|
|
65cf6fa9a1 | ||
|
|
b2e32d1720 | ||
|
|
f0bfedbe8e | ||
|
|
fd4e059c13 | ||
|
|
a53575e154 | ||
|
|
4a73484603 | ||
|
|
03b380f90b | ||
|
|
a2bd3b2dfe | ||
|
|
56fe140ebf | ||
|
|
4fafcce740 | ||
|
|
02352c4ae6 | ||
|
|
4b74aab335 | ||
|
|
2d67ac189d | ||
|
|
8ece62e23d | ||
|
|
56c2bdd77d | ||
|
|
1f555f1930 | ||
|
|
8496432c14 | ||
|
|
1672ffa670 | ||
|
|
6aab199f12 | ||
|
|
46d0c379a4 | ||
|
|
99240f145a | ||
|
|
3c9079d73c | ||
|
|
0eb98b9a6c | ||
|
|
76bfd98b77 | ||
|
|
3348640c88 | ||
|
|
d81c64fd2b | ||
|
|
8b4c919617 | ||
|
|
76c58953df | ||
|
|
4ddc5caa49 | ||
|
|
694663bd95 | ||
|
|
62aba5844e | ||
|
|
d0d60cef05 | ||
|
|
3d293fdcb0 | ||
|
|
96e9528046 | ||
|
|
4ea24b3203 | ||
|
|
a756eea25a | ||
|
|
210020e489 | ||
|
|
e586ead024 | ||
|
|
14c80bf1dc | ||
|
|
bdd56e794a | ||
|
|
a544548934 | ||
|
|
e06c1d61fb | ||
|
|
600c5209c6 | ||
|
|
bee90366ee | ||
|
|
e9bc4e9417 | ||
|
|
f01ff15761 | ||
|
|
356ada159d | ||
|
|
cc831e16d8 | ||
|
|
b8dc46ad01 | ||
|
|
d8ab19087d | ||
|
|
ec8a79eedd | ||
|
|
f1e2a8e9d8 | ||
|
|
4042a5fe5d | ||
|
|
a4752751ed | ||
|
|
e23ecf46d1 | ||
|
|
70a8c597a6 | ||
|
|
fa639bdb53 | ||
|
|
233bdd5b1d | ||
|
|
a0ab6d35c7 | ||
|
|
bd29680ce7 | ||
|
|
7139e92554 | ||
|
|
897df53466 | ||
|
|
58281711f6 | ||
|
|
b524383aa3 | ||
|
|
75a16e3588 | ||
|
|
1453032ad6 | ||
|
|
824ab4afad | ||
|
|
73dd41c67f | ||
|
|
59ee77355d | ||
|
|
5c758773ad | ||
|
|
46de49df06 | ||
|
|
d1c54a9a74 | ||
|
|
e7527c45cd | ||
|
|
7d5207aa67 | ||
|
|
654302e691 | ||
|
|
ee673b57fd | ||
|
|
2be374b841 | ||
|
|
906e1eda89 | ||
|
|
ece02cc4fa | ||
|
|
876ad60ddf | ||
|
|
862da354ac | ||
|
|
8fd477b979 | ||
|
|
2d7005655c | ||
|
|
7322f8348a | ||
|
|
e3e3a12e73 | ||
|
|
77cdd057a4 | ||
|
|
e8206fbdd9 | ||
|
|
589f15a77b | ||
|
|
7bb443678a | ||
|
|
6390415101 | ||
|
|
4abf192e11 | ||
|
|
1fed37f9da | ||
|
|
a9d86a7447 | ||
|
|
2abe4c3cef | ||
|
|
0542c25003 | ||
|
|
1b8ee4e290 | ||
|
|
51128cba55 | ||
|
|
3612432581 | ||
|
|
deca000a1b | ||
|
|
39cccb5653 | ||
|
|
f6838dc985 | ||
|
|
8cd4d92395 | ||
|
|
3bf9906f45 | ||
|
|
9f7daf96ef | ||
|
|
67de4df155 | ||
|
|
bc51a4bd1c | ||
|
|
bb54616018 | ||
|
|
6bcff5e014 | ||
|
|
8970a03a9a | ||
|
|
3ad717ca35 | ||
|
|
b14f72c67a | ||
|
|
45d036804f | ||
|
|
8f606db233 | ||
|
|
3766ba5402 | ||
|
|
e851813cef | ||
|
|
4d49ad9141 | ||
|
|
16618b3af2 | ||
|
|
0e5c0f664f | ||
|
|
7be9281431 | ||
|
|
ee0327fac1 | ||
|
|
9930de3e7f | ||
|
|
e8503e89c6 | ||
|
|
1d9ed419eb | ||
|
|
0207652e3e | ||
|
|
0f1e99c5cb | ||
|
|
f134bc7efb | ||
|
|
dcd7c7180e | ||
|
|
fbbfcd075b | ||
|
|
f42d2e4140 | ||
|
|
88882cebbc | ||
|
|
17a979675c | ||
|
|
4642850c79 | ||
|
|
e8d6eebb04 | ||
|
|
864c5160c0 | ||
|
|
99b5a00c12 | ||
|
|
85ee1f07d7 | ||
|
|
e58b4394e0 | ||
|
|
1e91a57bf1 | ||
|
|
39cee52a7e | ||
|
|
72068f939d | ||
|
|
096d0d3cad | ||
|
|
2472ab0121 | ||
|
|
00421717b8 | ||
|
|
ae96d93f94 | ||
|
|
8522c40c8f | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 | ||
|
|
feebbb9f04 | ||
|
|
bc4f06dd1d | ||
|
|
971e4fc909 | ||
|
|
51cc765949 | ||
|
|
19c6a4fffa | ||
|
|
105ac32d2f | ||
|
|
57550675d2 | ||
|
|
e674abc5c0 | ||
|
|
f965c96f51 | ||
|
|
c76b8ed9e0 | ||
|
|
4fbd0d8a7b | ||
|
|
2186c0fff6 | ||
|
|
1adca9a9c1 | ||
|
|
9408353f2b | ||
|
|
84f4d453d2 | ||
|
|
d10209f2a1 | ||
|
|
3ae149c72f | ||
|
|
47385acc3b | ||
|
|
814eeaa900 | ||
|
|
5f2ea13aad | ||
|
|
41ca217931 | ||
|
|
b57d36e8dd | ||
|
|
9a4be70734 | ||
|
|
a8443595a6 | ||
|
|
fd0a70ac58 | ||
|
|
8a8685c968 | ||
|
|
9e6cb8da8e | ||
|
|
054ec54d51 | ||
|
|
272ce773cb | ||
|
|
050b925f7b | ||
|
|
0087940898 | ||
|
|
e323c014f9 | ||
|
|
cc465c7554 | ||
|
|
14cb37564f | ||
|
|
094db56c3b | ||
|
|
aabb709b8b | ||
|
|
0833dd2db9 | ||
|
|
cd3f912be4 | ||
|
|
665c516db6 | ||
|
|
b670da9fa0 | ||
|
|
80bee9bffe | ||
|
|
d85a70e8ad | ||
|
|
8f21533e76 | ||
|
|
89996482a1 | ||
|
|
03c10dce91 | ||
|
|
bd5331be05 | ||
|
|
46e1645289 | ||
|
|
4ce3965747 | ||
|
|
9d4af19db3 | ||
|
|
48e034f4be | ||
|
|
f8959baa2f | ||
|
|
8ed5997eae | ||
|
|
daf9f50ac8 | ||
|
|
6b11013c1a |
3
.github/renovate.json
vendored
3
.github/renovate.json
vendored
@@ -23,8 +23,7 @@
|
||||
"jaraco.collections",
|
||||
"sabctools",
|
||||
"paho-mqtt",
|
||||
"werkzeug",
|
||||
"tavern"
|
||||
"werkzeug"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
|
||||
76
.github/workflows/build_release.yml
vendored
76
.github/workflows/build_release.yml
vendored
@@ -8,26 +8,16 @@ env:
|
||||
|
||||
jobs:
|
||||
build_windows:
|
||||
name: Build Windows binary (${{ matrix.architecture }})
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- architecture: x64
|
||||
runs-on: windows-2022
|
||||
- architecture: arm64
|
||||
runs-on: windows-11-arm
|
||||
|
||||
runs-on: ${{ matrix.runs-on }}
|
||||
timeout-minutes: 15
|
||||
name: Build Windows binary
|
||||
runs-on: windows-2022
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.14"
|
||||
architecture: ${{ matrix.architecture }}
|
||||
architecture: "x64"
|
||||
cache: pip
|
||||
cache-dependency-path: "**/requirements.txt"
|
||||
- name: Install Python dependencies
|
||||
@@ -41,13 +31,13 @@ jobs:
|
||||
id: windows_binary
|
||||
run: python builder/package.py binary
|
||||
- name: Upload Windows standalone binary (unsigned)
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
id: upload-unsigned-binary
|
||||
with:
|
||||
path: "*-win*-bin.zip"
|
||||
name: Windows standalone binary (${{ matrix.architecture }})
|
||||
path: "*-win64-bin.zip"
|
||||
name: Windows standalone binary
|
||||
- name: Sign Windows standalone binary
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
uses: signpath/github-action-submit-signing-request@v1
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
@@ -59,24 +49,22 @@ jobs:
|
||||
wait-for-completion: true
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows standalone binary (signed)
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
name: Windows standalone binary (${{ matrix.architecture }}, signed)
|
||||
name: Windows standalone binary (signed)
|
||||
path: "signed"
|
||||
- name: Build Windows installer
|
||||
if: matrix.architecture == 'x64'
|
||||
run: python builder/package.py installer
|
||||
- name: Upload Windows installer
|
||||
if: matrix.architecture == 'x64'
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
id: upload-unsigned-installer
|
||||
with:
|
||||
path: "*-win-setup.exe"
|
||||
name: Windows installer (${{ matrix.architecture }})
|
||||
name: Windows installer
|
||||
- name: Sign Windows installer
|
||||
if: matrix.architecture == 'x64' && contains(github.ref, 'refs/tags/')
|
||||
uses: signpath/github-action-submit-signing-request@v2
|
||||
uses: signpath/github-action-submit-signing-request@v1
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
with:
|
||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
||||
organization-id: ${{ secrets.SIGNPATH_ORG_ID }}
|
||||
@@ -87,27 +75,27 @@ jobs:
|
||||
wait-for-completion: true
|
||||
output-artifact-directory: "signed"
|
||||
- name: Upload Windows installer (signed)
|
||||
if: matrix.architecture == 'x64' && contains(github.ref, 'refs/tags/')
|
||||
uses: actions/upload-artifact@v6
|
||||
if: contains(github.ref, 'refs/tags/')
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Windows installer (${{ matrix.architecture }}, signed)
|
||||
name: Windows installer (signed)
|
||||
path: "signed/*-win-setup.exe"
|
||||
|
||||
build_macos:
|
||||
name: Build macOS binary
|
||||
runs-on: macos-14
|
||||
timeout-minutes: 15
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: "3.14.2"
|
||||
PYTHON_VERSION: "3.14.0"
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.15"
|
||||
# We need to force compile for universal2 support
|
||||
CFLAGS: -arch x86_64 -arch arm64
|
||||
ARCHFLAGS: -arch x86_64 -arch arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
# Only use this for the caching of pip packages!
|
||||
uses: actions/setup-python@v6
|
||||
@@ -117,7 +105,7 @@ jobs:
|
||||
cache-dependency-path: "**/requirements.txt"
|
||||
- name: Cache Python download
|
||||
id: cache-python-download
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/python.pkg
|
||||
key: cache-macOS-Python-${{ env.PYTHON_VERSION }}
|
||||
@@ -152,7 +140,7 @@ jobs:
|
||||
# Run this on macOS so the line endings are correct by default
|
||||
run: python builder/package.py source
|
||||
- name: Upload source distribution
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: "*-src.tar.gz"
|
||||
name: Source distribution
|
||||
@@ -165,7 +153,7 @@ jobs:
|
||||
python3 builder/package.py app
|
||||
python3 builder/make_dmg.py
|
||||
- name: Upload macOS binary
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: "*-macos.dmg"
|
||||
name: macOS binary
|
||||
@@ -179,14 +167,14 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
linux_arch: x64
|
||||
linux_arch: amd64
|
||||
- os: ubuntu-24.04-arm
|
||||
linux_arch: arm64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Cache par2cmdline-turbo tarball
|
||||
uses: actions/cache@v5
|
||||
uses: actions/cache@v4
|
||||
id: cache-par2cmdline
|
||||
# Clearing the cache in case of new version requires manual clearing in GitHub!
|
||||
with:
|
||||
@@ -208,7 +196,7 @@ jobs:
|
||||
timeout 10s snap run sabnzbd --help || true
|
||||
sudo snap remove sabnzbd
|
||||
- name: Upload snap
|
||||
uses: actions/upload-artifact@v6
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: Snap package (${{ matrix.linux_arch }})
|
||||
path: ${{ steps.snapcraft.outputs.snap }}
|
||||
@@ -227,7 +215,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_windows, build_macos]
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
@@ -235,15 +223,15 @@ jobs:
|
||||
cache: pip
|
||||
cache-dependency-path: "builder/release-requirements.txt"
|
||||
- name: Download Source distribution artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: Source distribution
|
||||
- name: Download macOS artifact
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
name: macOS binary
|
||||
- name: Download Windows artifacts
|
||||
uses: actions/download-artifact@v7
|
||||
uses: actions/download-artifact@v5
|
||||
with:
|
||||
pattern: ${{ (contains(github.ref, 'refs/tags/')) && '*signed*' || '*Windows*' }}
|
||||
merge-multiple: true
|
||||
|
||||
10
.github/workflows/integration_testing.yml
vendored
10
.github/workflows/integration_testing.yml
vendored
@@ -7,7 +7,7 @@ jobs:
|
||||
name: Black Code Formatter
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Black Code Formatter
|
||||
uses: lgeiger/black-action@master
|
||||
with:
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
builder/SABnzbd.spec
|
||||
tests
|
||||
--line-length=120
|
||||
--target-version=py39
|
||||
--target-version=py38
|
||||
--check
|
||||
--diff
|
||||
|
||||
@@ -31,19 +31,19 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14" ]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
name: ["Linux"]
|
||||
os: [ubuntu-latest]
|
||||
include:
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
os: macos-13
|
||||
python-version: "3.14"
|
||||
- name: Windows
|
||||
os: windows-2022
|
||||
python-version: "3.14"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
|
||||
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
if: github.repository_owner == 'sabnzbd'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v6
|
||||
- uses: dessant/lock-threads@v5
|
||||
with:
|
||||
log-output: true
|
||||
issue-inactive-days: 60
|
||||
|
||||
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
token: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
- name: Generate translatable texts
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
run: |
|
||||
python3 tools/make_mo.py
|
||||
- name: Push translatable and translated texts back to repo
|
||||
uses: stefanzweifel/git-auto-commit-action@v7.1.0
|
||||
uses: stefanzweifel/git-auto-commit-action@v7.0.0
|
||||
if: env.TX_TOKEN
|
||||
with:
|
||||
commit_message: |
|
||||
|
||||
@@ -52,7 +52,7 @@ Specific guides to install from source are available for Windows and macOS:
|
||||
https://sabnzbd.org/wiki/installation/install-macos
|
||||
https://sabnzbd.org/wiki/installation/install-from-source-windows
|
||||
|
||||
Only Python 3.9 and above is supported.
|
||||
Only Python 3.8 and above is supported.
|
||||
|
||||
On Linux systems you need to install:
|
||||
par2 unrar python3-setuptools python3-pip
|
||||
|
||||
@@ -16,7 +16,7 @@ If you want to know more you can head over to our website: https://sabnzbd.org.
|
||||
|
||||
SABnzbd has a few dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (Python 3.9 and above, often called `python3`)
|
||||
- `python` (Python 3.8 and above, often called `python3`)
|
||||
- Python modules listed in `requirements.txt`. Install with `python3 -m pip install -r requirements.txt -U`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://sabnzbd.org/wiki/installation/multicore-par2))
|
||||
- `unrar` (make sure you get the "official" non-free version of unrar)
|
||||
|
||||
107
README.mkd
107
README.mkd
@@ -1,38 +1,95 @@
|
||||
Release Notes - SABnzbd 4.6.0 Beta 2
|
||||
Release Notes - SABnzbd 4.5.5
|
||||
=========================================================
|
||||
|
||||
This is the second beta release of version 4.6.
|
||||
## Bug fixes and changes in 4.5.5
|
||||
|
||||
## New features in 4.6.0
|
||||
* macOS: Failed to start on versions of macOS older than 11.
|
||||
Python 3.14 dropped support for macOS 10.13 and 10.14.
|
||||
Because of that macOS 10.15 is required to run 4.5.5.
|
||||
|
||||
* Added support for NNTP Pipelining which eliminates idle waiting between
|
||||
requests, significantly improving speeds on high-latency connections.
|
||||
Read more here: https://sabnzbd.org/wiki/advanced/nntp-pipelining
|
||||
* Dynamically increase Assembler limits on faster connections.
|
||||
* Improved disk speed measurement in Status window.
|
||||
* Enable `verify_xff_header` by default.
|
||||
* Reduce delays between jobs during post-processing.
|
||||
* If a download only has `.nzb` files inside, the new downloads
|
||||
will include the name of the original download.
|
||||
* Dropped support for Python 3.8.
|
||||
* Windows: Added Windows ARM (portable) release.
|
||||
## Bug fixes and changes in 4.5.4
|
||||
|
||||
## Bug fixes since 4.5.0
|
||||
### New Features
|
||||
* History details now includes option to mark job as `Completed`.
|
||||
* `Quota` notifications available for all notification services.
|
||||
- Sends alerts at 75%, 90%, and 100% quota usage.
|
||||
* Multi-Operations now supports Move to Top/Bottom.
|
||||
* New `outgoing_nntp_ip` option to bind outgoing NNTP connections to specific IP address.
|
||||
|
||||
* `Check before download` could get stuck or fail to reject.
|
||||
* No error was shown in case NZB upload failed.
|
||||
* Correct mobile layout if `Full Width` is enabled.
|
||||
* Aborted Direct Unpack could result in no files being unpacked.
|
||||
* Windows: Tray icon disappears after Explorer restart.
|
||||
* macOS: Slow to start on some network setups.
|
||||
### Improvements
|
||||
* Setup wizard now requires successful Server Test before proceeding.
|
||||
* Anime episode notation `S04 - 10` now supported for Sorting and Duplicate Detection.
|
||||
* Multi-Operations: Play/Resume button unselects on second click for better usability.
|
||||
* Unrar now handles renaming of invalid characters on Windows filesystem.
|
||||
* Switched from vendored `sabnzbd.rarfile` module to `rarfile>=4.2`.
|
||||
* Warning displayed when removing all Orphaned jobs (clears Temporary Download folder).
|
||||
|
||||
### Bug Fixes
|
||||
* Active connections counter in Status window now updates correctly.
|
||||
* Job setting changes during URL-grabbing no longer ignored.
|
||||
* Incomplete `.par2` file parsing no longer leaves files behind.
|
||||
* `Local IPv4 address` now detectable when using Socks5 proxy.
|
||||
* Server configuration changes no longer show `Failure` message during page reload.
|
||||
|
||||
### Platform-Specific
|
||||
* Linux: `Make Windows compatible` automatically enabled when needed.
|
||||
* Windows: Executables are now signed using SignPath Foundation certificate.
|
||||
* Windows: Can now start SABnzbd directly from installer.
|
||||
* Windows and macOS: Binaries now use Python 3.14.
|
||||
|
||||
## Bug fixes and changes in 4.5.3
|
||||
|
||||
* Remember if `Permanently delete` was previously checked.
|
||||
* All available IP-addresses will be included when selecting the fastest.
|
||||
* Pre-queue script rejected NZBs were sometimes reported as `URL Fetching failed`.
|
||||
* RSS `Next scan` time was not adjusted after manual `Read All Feeds Now`.
|
||||
* Prevent renaming of `.cbr` files during verification.
|
||||
* If `--disable-file-log` was enabled, `Show Logging` would crash.
|
||||
* API: Added `time_added`, timestamp of when the job was added to the queue.
|
||||
* API: History output could contain duplicate items.
|
||||
* Snap: Updated packages and changed build process for reliability.
|
||||
* macOS: Repair would fail on macOS 10.13 High Sierra.
|
||||
* Windows: Unable to start on Windows 8.
|
||||
* Windows: Updated Unrar to 7.13, which resolves CVE-2025-8088.
|
||||
|
||||
## Bug fixes and changes in 4.5.2
|
||||
|
||||
* Added Tab and Shift+Tab navigation to move between rename fields in queue.
|
||||
* Invalid cookies of other services could result in errors.
|
||||
* Internet Bandwidth test could be stuck in infinite loop.
|
||||
* RSS readout did not ignore torrent alternatives.
|
||||
* Prowl and Pushover settings did not load correctly.
|
||||
* Renamed `osx` to `macos` internally.
|
||||
* API: Removed `B` post-fix from `quota` and `left_quota` fields in `queue`.
|
||||
* Windows: Support more languages in the installer.
|
||||
* Windows and macOS: Updated par2cmdline-turbo to 1.3.0 and Unrar to 7.12.
|
||||
|
||||
## Bug fixes and changes in 4.5.1
|
||||
|
||||
* Correct platform detection on Linux.
|
||||
* The `From SxxEyy` RSS filters did not always work.
|
||||
* Windows and macOS: Update Unrar to 7.11.
|
||||
|
||||
## New features in 4.5.0
|
||||
|
||||
* Improved failure detection by downloading additional par2 files right away.
|
||||
* Added more diagnostic information about the system.
|
||||
* Use XFF headers for login validation if `verify_xff_header` is enabled.
|
||||
* Added Turkish translation (by @cardpuncher).
|
||||
* Added `unrar_parameters` option to supply custom Unrar parameters.
|
||||
* Windows: Removed MultiPar support.
|
||||
* Windows and macOS: Updated Python to 3.13.2, 7zip to 24.09,
|
||||
Unrar to 7.10 and par2cmdline-turbo to 1.2.0.
|
||||
|
||||
## Bug fixes since 4.4.0
|
||||
|
||||
* Handle filenames that exceed maximum filesystem lengths.
|
||||
* Directly decompress gzip responses when retrieving NZB's.
|
||||
|
||||
## Upgrade notices
|
||||
|
||||
* You can directly upgrade from version 3.0.0 and newer.
|
||||
* Upgrading from older versions will require performing a `Queue repair`.
|
||||
* Downgrading from version 4.2.0 or newer to 3.7.2 or older will require
|
||||
performing a `Queue repair` due to changes in the internal data format.
|
||||
* Direct upgrade supported from version 3.0.0 and newer.
|
||||
* Older versions require performing a `Queue repair` after upgrading.
|
||||
|
||||
## Known problems and solutions
|
||||
|
||||
|
||||
28
SABnzbd.py
28
SABnzbd.py
@@ -19,8 +19,8 @@ import sys
|
||||
|
||||
# Trick to show a better message on older Python
|
||||
# releases that don't support walrus operator
|
||||
if Python_39_is_required_to_run_SABnzbd := sys.hexversion < 0x03090000:
|
||||
print("Sorry, requires Python 3.9 or above")
|
||||
if Python_38_is_required_to_run_SABnzbd := sys.hexversion < 0x03080000:
|
||||
print("Sorry, requires Python 3.8 or above")
|
||||
print("You can read more at: https://sabnzbd.org/wiki/installation/install-off-modules")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -40,7 +40,7 @@ import re
|
||||
import gc
|
||||
import threading
|
||||
import http.cookies
|
||||
from typing import Any
|
||||
from typing import List, Dict, Any
|
||||
|
||||
try:
|
||||
import sabctools
|
||||
@@ -142,7 +142,7 @@ class GUIHandler(logging.Handler):
|
||||
"""Initializes the handler"""
|
||||
logging.Handler.__init__(self)
|
||||
self._size: int = size
|
||||
self.store: list[dict[str, Any]] = []
|
||||
self.store: List[Dict[str, Any]] = []
|
||||
|
||||
def emit(self, record: logging.LogRecord):
|
||||
"""Emit a record by adding it to our private queue"""
|
||||
@@ -540,19 +540,21 @@ def get_webhost(web_host, web_port, https_port):
|
||||
# If only APIPA's or IPV6 are found, fall back to localhost
|
||||
ipv4 = ipv6 = False
|
||||
localhost = hostip = "localhost"
|
||||
|
||||
try:
|
||||
# Valid user defined name?
|
||||
info = socket.getaddrinfo(web_host, None)
|
||||
info = socket.getaddrinfo(socket.gethostname(), None)
|
||||
except socket.error:
|
||||
if not is_localhost(web_host):
|
||||
web_host = "0.0.0.0"
|
||||
# Hostname does not resolve
|
||||
try:
|
||||
info = socket.getaddrinfo(localhost, None)
|
||||
# Valid user defined name?
|
||||
info = socket.getaddrinfo(web_host, None)
|
||||
except socket.error:
|
||||
info = socket.getaddrinfo("127.0.0.1", None)
|
||||
localhost = "127.0.0.1"
|
||||
|
||||
if not is_localhost(web_host):
|
||||
web_host = "0.0.0.0"
|
||||
try:
|
||||
info = socket.getaddrinfo(localhost, None)
|
||||
except socket.error:
|
||||
info = socket.getaddrinfo("127.0.0.1", None)
|
||||
localhost = "127.0.0.1"
|
||||
for item in info:
|
||||
ip = str(item[4][0])
|
||||
if ip.startswith("169.254."):
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
|
||||
# Constants
|
||||
@@ -44,17 +43,11 @@ RELEASE_VERSION_BASE = f"{RELEASE_VERSION_TUPLE[0]}.{RELEASE_VERSION_TUPLE[1]}.{
|
||||
RELEASE_NAME = "SABnzbd-%s" % RELEASE_VERSION
|
||||
RELEASE_TITLE = "SABnzbd %s" % RELEASE_VERSION
|
||||
RELEASE_SRC = RELEASE_NAME + "-src.tar.gz"
|
||||
RELEASE_WIN_BIN_X64 = RELEASE_NAME + "-win64-bin.zip"
|
||||
RELEASE_WIN_BIN_ARM64 = RELEASE_NAME + "-win-arm64-bin.zip"
|
||||
RELEASE_WIN_INSTALLER = RELEASE_NAME + "-win-setup.exe"
|
||||
RELEASE_BINARY = RELEASE_NAME + "-win64-bin.zip"
|
||||
RELEASE_INSTALLER = RELEASE_NAME + "-win-setup.exe"
|
||||
RELEASE_MACOS = RELEASE_NAME + "-macos.dmg"
|
||||
RELEASE_README = "README.mkd"
|
||||
|
||||
# Detect architecture
|
||||
RELEASE_WIN_BIN = RELEASE_WIN_BIN_X64
|
||||
if platform.machine() == "ARM64":
|
||||
RELEASE_WIN_BIN = RELEASE_WIN_BIN_ARM64
|
||||
|
||||
# Used in package.py and SABnzbd.spec
|
||||
EXTRA_FILES = [
|
||||
RELEASE_README,
|
||||
|
||||
@@ -28,6 +28,7 @@ import urllib.request
|
||||
import urllib.error
|
||||
import configobj
|
||||
import packaging.version
|
||||
from typing import List
|
||||
|
||||
from constants import (
|
||||
RELEASE_VERSION,
|
||||
@@ -35,8 +36,8 @@ from constants import (
|
||||
VERSION_FILE,
|
||||
RELEASE_README,
|
||||
RELEASE_NAME,
|
||||
RELEASE_WIN_BIN,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
RELEASE_BINARY,
|
||||
RELEASE_INSTALLER,
|
||||
ON_GITHUB_ACTIONS,
|
||||
RELEASE_THIS,
|
||||
RELEASE_SRC,
|
||||
@@ -69,7 +70,7 @@ def delete_files_glob(glob_pattern: str, allow_no_matches: bool = False):
|
||||
raise FileNotFoundError(f"No files found that match '{glob_pattern}'")
|
||||
|
||||
|
||||
def run_external_command(command: list[str], print_output: bool = True, **kwargs):
|
||||
def run_external_command(command: List[str], print_output: bool = True, **kwargs):
|
||||
"""Wrapper to ease the use of calling external programs"""
|
||||
process = subprocess.Popen(command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs)
|
||||
output, _ = process.communicate()
|
||||
@@ -257,7 +258,7 @@ if __name__ == "__main__":
|
||||
|
||||
# Remove any leftovers
|
||||
safe_remove(RELEASE_NAME)
|
||||
safe_remove(RELEASE_WIN_BIN)
|
||||
safe_remove(RELEASE_BINARY)
|
||||
|
||||
# Run PyInstaller and check output
|
||||
shutil.copyfile("builder/SABnzbd.spec", "SABnzbd.spec")
|
||||
@@ -275,8 +276,8 @@ if __name__ == "__main__":
|
||||
test_sab_binary("dist/SABnzbd/SABnzbd.exe")
|
||||
|
||||
# Create the archive
|
||||
run_external_command(["win/7zip/7za.exe", "a", RELEASE_WIN_BIN, "SABnzbd"], cwd="dist")
|
||||
shutil.move(f"dist/{RELEASE_WIN_BIN}", RELEASE_WIN_BIN)
|
||||
run_external_command(["win/7zip/7za.exe", "a", RELEASE_BINARY, "SABnzbd"], cwd="dist")
|
||||
shutil.move(f"dist/{RELEASE_BINARY}", RELEASE_BINARY)
|
||||
|
||||
if "installer" in sys.argv:
|
||||
# Check if we have the dist folder
|
||||
@@ -284,10 +285,10 @@ if __name__ == "__main__":
|
||||
raise FileNotFoundError("SABnzbd executable not found, run binary creation first")
|
||||
|
||||
# Check if we have a signed version
|
||||
if os.path.exists(f"signed/{RELEASE_WIN_BIN}"):
|
||||
if os.path.exists(f"signed/{RELEASE_BINARY}"):
|
||||
print("Using signed version of SABnzbd binaries")
|
||||
safe_remove("dist/SABnzbd")
|
||||
run_external_command(["win/7zip/7za.exe", "x", "-odist", f"signed/{RELEASE_WIN_BIN}"])
|
||||
run_external_command(["win/7zip/7za.exe", "x", "-odist", f"signed/{RELEASE_BINARY}"])
|
||||
|
||||
# Make sure it exists
|
||||
if not os.path.exists("dist/SABnzbd/SABnzbd.exe"):
|
||||
@@ -310,7 +311,7 @@ if __name__ == "__main__":
|
||||
"/V3",
|
||||
"/DSAB_VERSION=%s" % RELEASE_VERSION,
|
||||
"/DSAB_VERSIONKEY=%s" % ".".join(map(str, RELEASE_VERSION_TUPLE)),
|
||||
"/DSAB_FILE=%s" % RELEASE_WIN_INSTALLER,
|
||||
"/DSAB_FILE=%s" % RELEASE_INSTALLER,
|
||||
"NSIS_Installer.nsi.tmp",
|
||||
]
|
||||
)
|
||||
|
||||
@@ -29,9 +29,8 @@ from constants import (
|
||||
RELEASE_VERSION_BASE,
|
||||
PRERELEASE,
|
||||
RELEASE_SRC,
|
||||
RELEASE_WIN_BIN_X64,
|
||||
RELEASE_WIN_BIN_ARM64,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
RELEASE_BINARY,
|
||||
RELEASE_INSTALLER,
|
||||
RELEASE_MACOS,
|
||||
RELEASE_README,
|
||||
RELEASE_THIS,
|
||||
@@ -43,9 +42,8 @@ from constants import (
|
||||
# Verify we have all assets
|
||||
files_to_check = (
|
||||
RELEASE_SRC,
|
||||
RELEASE_WIN_BIN_X64,
|
||||
RELEASE_WIN_BIN_ARM64,
|
||||
RELEASE_WIN_INSTALLER,
|
||||
RELEASE_BINARY,
|
||||
RELEASE_INSTALLER,
|
||||
RELEASE_MACOS,
|
||||
RELEASE_README,
|
||||
)
|
||||
@@ -114,7 +112,7 @@ if RELEASE_THIS and gh_token:
|
||||
print("Removing existing asset %s " % gh_asset.name)
|
||||
gh_asset.delete_asset()
|
||||
# Upload the new one
|
||||
print("Uploading %s to release %s" % (file_to_check, gh_release.name))
|
||||
print("Uploading %s to release %s" % (file_to_check, gh_release.title))
|
||||
gh_release.upload_asset(file_to_check)
|
||||
|
||||
# Check if we now have all files
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# Basic build requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
pyinstaller==6.17.0
|
||||
pyinstaller==6.16.0
|
||||
packaging==25.0
|
||||
pyinstaller-hooks-contrib==2025.10
|
||||
altgraph==0.17.5
|
||||
wrapt==2.0.1
|
||||
pyinstaller-hooks-contrib==2025.9
|
||||
altgraph==0.17.4
|
||||
wrapt==2.0.0
|
||||
setuptools==80.9.0
|
||||
|
||||
# For the Windows build
|
||||
@@ -12,8 +12,8 @@ pefile==2024.8.26; sys_platform == 'win32'
|
||||
pywin32-ctypes==0.2.3; sys_platform == 'win32'
|
||||
|
||||
# For the macOS build
|
||||
dmgbuild==1.6.6; sys_platform == 'darwin'
|
||||
mac-alias==2.2.3; sys_platform == 'darwin'
|
||||
macholib==1.16.4; sys_platform == 'darwin'
|
||||
ds-store==1.3.2; sys_platform == 'darwin'
|
||||
PyNaCl==1.6.1; sys_platform == 'darwin'
|
||||
dmgbuild==1.6.5; sys_platform == 'darwin'
|
||||
mac-alias==2.2.2; sys_platform == 'darwin'
|
||||
macholib==1.16.3; sys_platform == 'darwin'
|
||||
ds-store==1.3.1; sys_platform == 'darwin'
|
||||
PyNaCl==1.6.0; sys_platform == 'darwin'
|
||||
|
||||
@@ -187,7 +187,7 @@
|
||||
<td><label for="apprise_enable"> $T('opt-apprise_enable')</label></td>
|
||||
</tr>
|
||||
</table>
|
||||
<p>$T('explain-apprise_enable')</p>
|
||||
<em>$T('explain-apprise_enable')</em><br>
|
||||
<p>$T('version'): ${apprise.__version__}</p>
|
||||
|
||||
$show_cat_box('apprise')
|
||||
@@ -197,7 +197,7 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="apprise_urls">$T('opt-apprise_urls')</label>
|
||||
<input type="text" name="apprise_urls" id="apprise_urls" value="$apprise_urls" />
|
||||
<span class="desc">$T('explain-apprise_urls')</span>
|
||||
<span class="desc">$T('explain-apprise_urls'). <br>$T('readwiki')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<span class="desc">$T('explain-apprise_extra_urls')</span>
|
||||
|
||||
@@ -117,12 +117,6 @@
|
||||
<input type="checkbox" name="optional" id="optional" value="1" />
|
||||
<span class="desc">$T('explain-optional')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="pipelining_requests">$T('srv-pipelining_requests')</label>
|
||||
<input type="number" name="pipelining_requests" id="pipelining_requests" min="1" max="20" value="1" />
|
||||
<span class="desc">$T('explain-pipelining_requests')<br>$T('readwiki')
|
||||
<a href="https://sabnzbd.org/wiki/advanced/nntp-pipelining" target="_blank">https://sabnzbd.org/wiki/advanced/nntp-pipelining</a></span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="expire_date">$T('srv-expire_date')</label>
|
||||
<input type="date" name="expire_date" id="expire_date" />
|
||||
@@ -254,12 +248,6 @@
|
||||
<input type="checkbox" name="optional" id="optional$cur" value="1" <!--#if int($server['optional']) != 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-optional')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="pipelining_requests$cur">$T('srv-pipelining_requests')</label>
|
||||
<input type="number" name="pipelining_requests" id="pipelining_requests$cur" value="$server['pipelining_requests']" min="1" max="20" required />
|
||||
<span class="desc">$T('explain-pipelining_requests')<br>$T('readwiki')
|
||||
<a href="https://sabnzbd.org/wiki/advanced/nntp-pipelining" target="_blank">https://sabnzbd.org/wiki/advanced/nntp-pipelining</a></span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="expire_date$cur">$T('srv-expire_date')</label>
|
||||
<input type="date" name="expire_date" id="expire_date$cur" value="$server['expire_date']" />
|
||||
|
||||
@@ -6,12 +6,8 @@
|
||||
<span class="glyphicon glyphicon-open"></span> $T('Glitter-notification-uploading') <span class="main-notification-box-file-count"></span>
|
||||
</div>
|
||||
|
||||
<div class="main-notification-box-uploading-failed">
|
||||
<span class="glyphicon glyphicon-exclamation-sign"></span> $T('Glitter-notification-upload-failed').replace('%s', '') <span class="main-notification-box-file-count"></span>
|
||||
</div>
|
||||
|
||||
<div class="main-notification-box-queue-repair">
|
||||
<span class="glyphicon glyphicon-wrench"></span> $T('Glitter-repairQueue')
|
||||
<span class="glyphicon glyphicon glyphicon-wrench"></span> $T('Glitter-repairQueue')
|
||||
</div>
|
||||
|
||||
<div class="main-notification-box-disconnect">
|
||||
|
||||
@@ -726,9 +726,6 @@ function ViewModel() {
|
||||
$('#nzbname').val('')
|
||||
$('.btn-file em').html(glitterTranslate.chooseFile + '…')
|
||||
}
|
||||
}).fail(function(xhr, status, error) {
|
||||
// Update the uploading notification text to show error
|
||||
showNotification('.main-notification-box-uploading-failed', 0, error)
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -69,10 +69,6 @@ legend,
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.2);
|
||||
}
|
||||
|
||||
.main-notification-box-uploading-failed {
|
||||
color: #F95151;
|
||||
}
|
||||
|
||||
.container,
|
||||
.modal-body,
|
||||
.modal-footer {
|
||||
|
||||
@@ -7,10 +7,6 @@
|
||||
padding-right: 8px;
|
||||
}
|
||||
|
||||
.container-full-width .container {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.main-navbar {
|
||||
margin-top: 0;
|
||||
padding: 0;
|
||||
|
||||
@@ -5,10 +5,6 @@
|
||||
<metadata_license>MIT</metadata_license>
|
||||
<name>SABnzbd</name>
|
||||
<summary>Free and easy binary newsreader</summary>
|
||||
<branding>
|
||||
<color type="primary" scheme_preference="light">#e7e7e7</color>
|
||||
<color type="primary" scheme_preference="dark">#444444</color>
|
||||
</branding>
|
||||
<description>
|
||||
<p>
|
||||
SABnzbd is a free and Open Source web-based binary newsreader,
|
||||
@@ -21,13 +17,6 @@
|
||||
and services that help automate the download process.
|
||||
</p>
|
||||
</description>
|
||||
<keywords>
|
||||
<keyword>usenet</keyword>
|
||||
<keyword>nzb</keyword>
|
||||
<keyword>download</keyword>
|
||||
<keyword>newsreader</keyword>
|
||||
<keyword>binary</keyword>
|
||||
</keywords>
|
||||
<categories>
|
||||
<category>Network</category>
|
||||
<category>FileTransfer</category>
|
||||
@@ -35,49 +24,33 @@
|
||||
<url type="homepage">https://sabnzbd.org</url>
|
||||
<url type="bugtracker">https://github.com/sabnzbd/sabnzbd/issues</url>
|
||||
<url type="vcs-browser">https://github.com/sabnzbd/sabnzbd</url>
|
||||
<url type="contribute">https://github.com/sabnzbd/sabnzbd</url>
|
||||
<url type="translate">https://sabnzbd.org/wiki/translate</url>
|
||||
<url type="donation">https://sabnzbd.org/donate</url>
|
||||
<url type="help">https://sabnzbd.org/wiki/</url>
|
||||
<url type="faq">https://sabnzbd.org/wiki/faq</url>
|
||||
<url type="contact">https://sabnzbd.org/live-chat.html</url>
|
||||
<releases>
|
||||
<release version="4.6.0" date="2025-12-24" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.6.0</url>
|
||||
</release>
|
||||
<release version="4.5.5" date="2025-10-24" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.5</url>
|
||||
</release>
|
||||
<release version="4.5.4" date="2025-10-22" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.4</url>
|
||||
</release>
|
||||
<release version="4.5.3" date="2025-08-25" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.3</url>
|
||||
</release>
|
||||
<release version="4.5.2" date="2025-07-09" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.2</url>
|
||||
</release>
|
||||
<release version="4.5.1" date="2025-04-11" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.1</url>
|
||||
</release>
|
||||
<release version="4.5.0" date="2025-04-01" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.5.0</url>
|
||||
</release>
|
||||
<release version="4.4.1" date="2024-12-23" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.4.1</url>
|
||||
</release>
|
||||
<release version="4.4.0" date="2024-12-09" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.4.0</url>
|
||||
</release>
|
||||
<release version="4.3.3" date="2024-08-01" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.3.3</url>
|
||||
</release>
|
||||
<release version="4.3.2" date="2024-05-30" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.3.2</url>
|
||||
</release>
|
||||
<release version="4.3.1" date="2024-05-03" type="stable">
|
||||
<url type="details">https://github.com/sabnzbd/sabnzbd/releases/tag/4.3.1</url>
|
||||
</release>
|
||||
<release version="4.5.5" date="2025-10-24" type="stable"/>
|
||||
<release version="4.5.4" date="2025-10-22" type="stable"/>
|
||||
<release version="4.5.3" date="2025-08-25" type="stable"/>
|
||||
<release version="4.5.2" date="2025-07-09" type="stable"/>
|
||||
<release version="4.5.1" date="2025-04-11" type="stable"/>
|
||||
<release version="4.5.0" date="2025-04-01" type="stable"/>
|
||||
<release version="4.4.1" date="2024-12-23" type="stable"/>
|
||||
<release version="4.4.0" date="2024-12-09" type="stable"/>
|
||||
<release version="4.3.3" date="2024-08-01" type="stable"/>
|
||||
<release version="4.3.2" date="2024-05-30" type="stable"/>
|
||||
<release version="4.3.1" date="2024-05-03" type="stable"/>
|
||||
<release version="4.3.0" date="2024-05-01" type="stable"/>
|
||||
<release version="4.2.2" date="2024-02-01" type="stable"/>
|
||||
<release version="4.2.1" date="2024-01-05" type="stable"/>
|
||||
<release version="4.2.0" date="2024-01-03" type="stable"/>
|
||||
<release version="4.1.0" date="2023-09-26" type="stable"/>
|
||||
<release version="4.0.3" date="2023-06-16" type="stable"/>
|
||||
<release version="4.0.2" date="2023-06-09" type="stable"/>
|
||||
<release version="4.0.1" date="2023-05-01" type="stable"/>
|
||||
<release version="4.0.0" date="2023-04-28" type="stable"/>
|
||||
<release version="3.7.2" date="2023-02-05" type="stable"/>
|
||||
</releases>
|
||||
<launchable type="desktop-id">sabnzbd.desktop</launchable>
|
||||
<provides>
|
||||
@@ -100,59 +73,11 @@
|
||||
<screenshots>
|
||||
<screenshot type="default">
|
||||

|
||||
<caption>Intuitive interface</caption>
|
||||
<caption>Web interface</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Also comes in Night-mode</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Add NZB's or use drag-and-drop!</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Scales to any screen size</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Easy overview of all history details</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Every option, on every screen size</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Manage a job's individual files</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Easy speed limiting</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Quickly change settings</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Easy system check</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>See active connections</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Customize the interface</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Tabbed-mode</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>Specify any pause duration</caption>
|
||||
<caption>Night mode</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
|
||||
@@ -125,11 +125,6 @@ msgstr ""
|
||||
msgid "Current umask (%o) might deny SABnzbd access to the files and folders it creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Completed Download Folder %s is on FAT file system, limiting maximum file size to 4GB"
|
||||
@@ -284,7 +279,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr ""
|
||||
|
||||
@@ -513,6 +508,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr ""
|
||||
@@ -530,6 +530,11 @@ msgstr ""
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr ""
|
||||
@@ -889,7 +894,7 @@ msgid "Update Available!"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1122,16 +1127,6 @@ msgstr ""
|
||||
msgid "left"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr ""
|
||||
@@ -1305,18 +1300,103 @@ msgstr ""
|
||||
msgid "NZB added to queue"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr ""
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr ""
|
||||
@@ -3047,7 +3127,7 @@ msgid "Enable SFV-based checks"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3402,14 +3482,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Request multiple articles per connection without waiting for each response first.<br />This can improve download speeds, especially on connections with higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -3813,16 +3885,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Send notifications directly to any notification service you use.<br>For example: Slack, Discord, Telegram, or any service from over 100 supported services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Apprise defines service connection information using URLs.<br>Read the Apprise wiki how to define the URL for each service.<br>Use a comma and/or space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4117,11 +4190,6 @@ msgstr ""
|
||||
msgid "Filename"
|
||||
msgstr ""
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr ""
|
||||
@@ -4531,10 +4599,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
156
po/main/cs.po
156
po/main/cs.po
@@ -144,11 +144,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -321,7 +316,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Neočekávaná přípona v rar souboru %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Přerušeno, nalezena neočekávaná připona"
|
||||
|
||||
@@ -565,6 +560,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Příliš mnoho spojení k serveru %s [%s]"
|
||||
@@ -584,6 +584,11 @@ msgstr "Přihlášení k serveru %s se nezdařilo [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nejspíše chyba downloaderu"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Vypínání"
|
||||
@@ -960,7 +965,7 @@ msgid "Update Available!"
|
||||
msgstr "Dostupná aktualizace!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Nezdařilo se nahrát soubor: %s"
|
||||
|
||||
@@ -1202,16 +1207,6 @@ msgstr "Zkouším SFV ověření"
|
||||
msgid "left"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nejspíše chyba downloaderu"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Tento server nepovoluje SSL na tomto portu"
|
||||
@@ -1393,18 +1388,103 @@ msgstr "Nelze nahrát %s, detekován porušený soubor"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB přidáno do fronty"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignoruji duplikátní NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Nezdařilo se duplikovat NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Duplikátní NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Prázdný NZB soubor %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Nechtěná přípona v souboru %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Zrušeno, nelze dokončit"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Chyba při importu %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLIKÁT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ŠIFROVANÉ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "PŘÍLIŠ VELKÝ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "NEKOMPLETNÍ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NECHTĚNÝ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "ČEKÁNÍ %s s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "PROPAGUJI %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Staženo do %s s průměrnou rychlostí %s B/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Stáří"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Pozastavuji duplikátní NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problém s"
|
||||
@@ -3207,8 +3287,7 @@ msgid "Enable SFV-based checks"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3592,17 +3671,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4012,22 +4080,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4344,11 +4407,6 @@ msgstr ""
|
||||
msgid "Filename"
|
||||
msgstr ""
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Stáří"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr ""
|
||||
@@ -4768,10 +4826,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Prázdný NZB soubor %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
369
po/main/da.po
369
po/main/da.po
@@ -147,11 +147,6 @@ msgid ""
|
||||
msgstr ""
|
||||
"Aktuel umask (%o) kan nægte SABnzbd adgang til filer og mapper den opretter."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -336,7 +331,7 @@ msgstr "I \"%s\" uønsket extension i RAR fil. Uønsket fil er \"%s\" "
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Uønsket extension i rar fil %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Afbrudt, uønsket extension fundet"
|
||||
|
||||
@@ -365,11 +360,11 @@ msgstr "Kvota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Advarsel om kvotegrænse (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download genoptaget efter nulstilling af kvote"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -590,6 +585,11 @@ msgstr "Det lykkedes ikke at initialisere %s@%s med begrundelse %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Alvorlig fejl i Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Modtog ukendt statuskode %s for artikel %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Alt for mange forbindelser til serveren %s [%s]"
|
||||
@@ -611,6 +611,11 @@ msgstr "Det lykkedes ikke at logge på serveren %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Forbindelse %s@%s mislykkedes, besked %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Suspect fejl i downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Påbegynder lukning af SABnzbd"
|
||||
@@ -1001,7 +1006,7 @@ msgid "Update Available!"
|
||||
msgstr "Opdatering tilgængelig!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Kunne ikke uploade fil: %s"
|
||||
|
||||
@@ -1243,16 +1248,6 @@ msgstr "Forsøger SFV verifikation"
|
||||
msgid "left"
|
||||
msgstr "tilbage"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Modtog ukendt statuskode %s for artikel %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Suspect fejl i downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Denne server tillader ikke SSL på denne port"
|
||||
@@ -1437,18 +1432,103 @@ msgstr "Downloadnings fejl %s, ødelagt fil fundet"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB tilføjet i køen"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorerer identiske NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Fejler dublet NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Dublet NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Ødelagt NZB fil %s, springer over (årsag=%s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tom NZB fil %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Før-kø script job markeret som mislykkedet"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Afbrudt, kan ikke afsluttes"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Det lykkedes ikke at importere %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLIKERE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "KRYPTEREDE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "FOR STOR"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "UFULDSTÆNDIG"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "UØNSKET"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "VENT %s sekunder"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "PROPAGATING %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Hentede i %s med et gennemsnit på %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Alder"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artikler misdannede"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artikler manglede"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artikler havde ikke-matchende dubletter"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Pause duplikeret NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problem med"
|
||||
@@ -1637,7 +1717,7 @@ msgstr "Efterbehandling mislykkedes for %s (%s)"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Post-processing was aborted"
|
||||
msgstr "Efterbehandling blev afbrudt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Download Failed"
|
||||
@@ -1691,12 +1771,12 @@ msgstr "RAR filer kunne ikke bekræfte"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Trying RAR renamer"
|
||||
msgstr "Forsøger RAR-omdøbning"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "No matching earlier rar file for %s"
|
||||
msgstr "Ingen matchende tidligere rar-fil for %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/postproc.py
|
||||
@@ -1721,7 +1801,7 @@ msgstr "Fejl ved lukning af system"
|
||||
#. Error message
|
||||
#: sabnzbd/powersup.py
|
||||
msgid "Received a DBus exception %s"
|
||||
msgstr "Modtog en DBus-undtagelse %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/rss.py
|
||||
@@ -2097,7 +2177,7 @@ msgstr "Denne måned"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Selected date range"
|
||||
msgstr "Valgt datointerval"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Today"
|
||||
@@ -2192,7 +2272,7 @@ msgstr "Forum"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Live Chat"
|
||||
msgstr "Live chat"
|
||||
msgstr ""
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2341,7 +2421,7 @@ msgstr "Forsøg igen"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Markér som fuldført og fjern midlertidige filer"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2356,7 +2436,7 @@ msgstr "Fjern alt fra køen?"
|
||||
#. Delete confirmation popup
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Are you sure you want to remove these jobs?"
|
||||
msgstr "Er du sikker på, at du vil fjerne disse jobs?"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page button
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2381,7 +2461,7 @@ msgstr "Fjern NZB & slet filer"
|
||||
#. Checkbox if job should be added to Archive
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Permanently delete (skip archive)"
|
||||
msgstr "Slet permanent (spring arkiv over)"
|
||||
msgstr ""
|
||||
|
||||
#. Caption for missing articles in Queue
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2404,7 +2484,7 @@ msgstr "Nulstil kvota nu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Archive"
|
||||
msgstr "Arkiv"
|
||||
msgstr ""
|
||||
|
||||
#. Button/link hiding History job details
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2429,7 +2509,7 @@ msgstr "Vis Alt"
|
||||
#. Button showing all archived jobs
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Show Archive"
|
||||
msgstr "Vis arkiv"
|
||||
msgstr ""
|
||||
|
||||
#. History table header - Size of the download quota
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2480,8 +2560,6 @@ msgid ""
|
||||
"Disconnect all active connections to usenet servers. Connections will be "
|
||||
"reopened after a few seconds if there are items in the queue."
|
||||
msgstr ""
|
||||
"Afbryd alle aktive forbindelser til usenet-servere. Forbindelser genåbnes "
|
||||
"efter få sekunder, hvis der er elementer i køen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "This will send a test email to your account."
|
||||
@@ -2672,8 +2750,6 @@ msgid ""
|
||||
"Speed up repairs by installing par2cmdline-turbo, it is available for many "
|
||||
"platforms."
|
||||
msgstr ""
|
||||
"Sæt fart på reparationer ved at installere par2cmdline-turbo, det er "
|
||||
"tilgængeligt for mange platforme."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Version"
|
||||
@@ -2749,8 +2825,6 @@ msgid ""
|
||||
"If the SABnzbd Host or Port is exposed to the internet, your current "
|
||||
"settings allow full external access to the SABnzbd interface."
|
||||
msgstr ""
|
||||
"Hvis SABnzbd-værten eller porten er eksponeret på internettet, tillader dine"
|
||||
" nuværende indstillinger fuld ekstern adgang til SABnzbd-grænsefladen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Security"
|
||||
@@ -2861,10 +2935,6 @@ msgid ""
|
||||
"the Completed Download Folder.<br>Recurring backups can be configured on the"
|
||||
" Scheduling page."
|
||||
msgstr ""
|
||||
"Opret en sikkerhedskopi af konfigurationsfilen og databaser i "
|
||||
"sikkerhedskopimappen.<br>Hvis sikkerhedskopimappen ikke er indstillet, "
|
||||
"oprettes sikkerhedskopien i den fuldførte downloadmappe.<br>Tilbagevendende "
|
||||
"sikkerhedskopier kan konfigureres på planlægningssiden."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Cleanup List"
|
||||
@@ -2979,8 +3049,6 @@ msgstr "Eksterne internetadgang"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "You can set access rights for systems outside your local network."
|
||||
msgstr ""
|
||||
"Du kan indstille adgangsrettigheder for systemer uden for dit lokale "
|
||||
"netværk."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "No access"
|
||||
@@ -3084,9 +3152,6 @@ msgid ""
|
||||
" again.<br />Applies to both the Temporary and Complete Download Folder.<br "
|
||||
"/>Checked every few minutes."
|
||||
msgstr ""
|
||||
"Download genoptages automatisk, hvis den minimale ledige plads er "
|
||||
"tilgængelig igen.<br />Gælder for både den midlertidige og den fuldførte "
|
||||
"downloadmappe.<br />Kontrolleres hvert par minutter."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Permissions for completed downloads"
|
||||
@@ -3172,9 +3237,6 @@ msgid ""
|
||||
"stored.<br />If left empty, the backup will be created in the Completed "
|
||||
"Download Folder."
|
||||
msgstr ""
|
||||
"Placering, hvor sikkerhedskopier af konfigurationsfilen og databaser "
|
||||
"gemmes.<br />Hvis den efterlades tom, oprettes sikkerhedskopien i den "
|
||||
"fuldførte downloadmappe."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "<i>Data will <b>not</b> be moved. Requires SABnzbd restart!</i>"
|
||||
@@ -3192,7 +3254,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Purge Logs"
|
||||
msgstr "Ryd logfiler"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ".nzb Backup Folder"
|
||||
@@ -3256,8 +3318,6 @@ msgid ""
|
||||
"turned off, all jobs will be marked as Completed even if they are "
|
||||
"incomplete."
|
||||
msgstr ""
|
||||
"Udpak kun og kør scripts på jobs, der bestod verifikationsstadiet. Hvis "
|
||||
"slået fra, markeres alle jobs som fuldført, selvom de er ufuldstændige."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when encrypted RAR is downloaded"
|
||||
@@ -3270,19 +3330,19 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Identical download detection"
|
||||
msgstr "Identisk downloaddetektering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Detect identical downloads based on name or NZB contents."
|
||||
msgstr "Detektér identiske downloads baseret på navn eller NZB-indhold."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Smart duplicate detection"
|
||||
msgstr "Smart dubletdetektering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Detect duplicates based on analysis of the filename."
|
||||
msgstr "Detektér dubletter baseret på analyse af filnavnet."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Allow proper releases"
|
||||
@@ -3293,8 +3353,6 @@ msgid ""
|
||||
"Bypass smart duplicate detection if PROPER, REAL or REPACK is detected in "
|
||||
"the download name."
|
||||
msgstr ""
|
||||
"Spring smart dubletdetektering over, hvis PROPER, REAL eller REPACK "
|
||||
"registreres i downloadnavnet."
|
||||
|
||||
#. Four way switch for duplicates
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3313,7 +3371,7 @@ msgstr "Mislykkes job (flyt til historik)"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Abort post-processing"
|
||||
msgstr "Afbryd efterbehandling"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when unwanted extension detected"
|
||||
@@ -3321,7 +3379,7 @@ msgstr "Aktion når uønsket extension er fundet"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Action when an unwanted extension is detected"
|
||||
msgstr "Handling når en uønsket filtype registreres"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Unwanted extensions"
|
||||
@@ -3329,28 +3387,25 @@ msgstr "Uønsket extension"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Blacklist"
|
||||
msgstr "Sortliste"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Whitelist"
|
||||
msgstr "Hvidliste"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Select a mode and list all (un)wanted extensions. For example: <b>exe</b> or"
|
||||
" <b>exe, com</b>"
|
||||
msgstr ""
|
||||
"Vælg en tilstand og angiv alle (u)ønskede filtypeendelser. For eksempel: "
|
||||
"<b>exe</b> eller <b>exe, com</b>"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Enable SFV-based checks"
|
||||
msgstr "Aktiver SFV-baseret kontrol"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Udfør en ekstra kontrol baseret på SFV-filer."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3418,15 +3473,15 @@ msgstr "Afbryd fra usenet-serverne når køen er tom eller sat på pause."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Automatically sort queue"
|
||||
msgstr "Sortér kø automatisk"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Automatically sort jobs in the queue when a new job is added."
|
||||
msgstr "Sortér automatisk jobs i køen, når et nyt job tilføjes."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "The queue will resort every 30 seconds if % downloaded is selected."
|
||||
msgstr "Køen vil sortere hver 30. sekund, hvis % downloadet er valgt."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Propagation delay"
|
||||
@@ -3459,11 +3514,11 @@ msgstr "Erstat mellemrum med understreg i mappenavn."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace underscores in folder name"
|
||||
msgstr "Erstat understreger i mappenavn"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace underscores with dots in folder names."
|
||||
msgstr "Erstat understreger med punktummer i mappenavne."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Replace dots in Foldername"
|
||||
@@ -3515,23 +3570,19 @@ msgstr "Fjern efter download"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Deobfuscate final filenames"
|
||||
msgstr "Afslør endelige filnavne"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If filenames of (large) files in the final folder look obfuscated or "
|
||||
"meaningless they will be renamed to the job name."
|
||||
msgstr ""
|
||||
"Hvis filnavne på (store) filer i den endelige mappe ser slørede eller "
|
||||
"meningsløse ud, omdøbes de til jobnavnet."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Additionally, attempts to set the correct file extension based on the file "
|
||||
"signature if the extension is not present or meaningless."
|
||||
msgstr ""
|
||||
"Forsøger derudover at indstille den korrekte filendelse baseret på "
|
||||
"filsignaturen, hvis endelsen ikke er til stede eller meningsløs."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "HTTPS certificate verification"
|
||||
@@ -3546,11 +3597,11 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr "SOCKS5-proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr "Brug den angivne SOCKS5-proxy til alle udgående forbindelser."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
@@ -3663,11 +3714,11 @@ msgstr "Tidsudløb"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Account expiration date"
|
||||
msgstr "Kontoudløbsdato"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Warn 5 days in advance of account expiration date."
|
||||
msgstr "Advar 5 dage før kontoudløbsdato."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -3675,9 +3726,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Kvote for denne server, talt fra det tidspunkt, den indstilles. I bytes, "
|
||||
"efterfulgt eventuelt af K,M,G.<br />Kontrolleres hvert par minutter. Besked "
|
||||
"sendes, når kvoten er brugt."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3708,13 +3756,6 @@ msgid ""
|
||||
"used. - Disabled: no certification verification. This is not secure at all, "
|
||||
"anyone could intercept your connection. "
|
||||
msgstr ""
|
||||
"Når SSL er aktiveret: - Streng: gennemtving fuld certifikatverifikation. "
|
||||
"Dette er den mest sikre indstilling. - Medium: verificér at certifikatet er "
|
||||
"gyldigt og matcher serveradressen, men tillad lokalt injicerede certifikater"
|
||||
" (f.eks. af firewall eller virusscanner). - Minimal: verificér at "
|
||||
"certifikatet er gyldigt. Dette er ikke sikkert, ethvert gyldigt certifikat "
|
||||
"kan bruges. - Deaktiveret: ingen certifikatverifikation. Dette er slet ikke "
|
||||
"sikkert, enhver kan opfange din forbindelse."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Disabled"
|
||||
@@ -3726,7 +3767,7 @@ msgstr "Minimal"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Medium"
|
||||
msgstr "Medium"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Strict"
|
||||
@@ -3740,15 +3781,13 @@ msgstr "0 er højeste prioritet, 100 er den laveste prioritet"
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "Påkrævet"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"I tilfælde af forbindelsesfejl vil downloadkøen blive sat på pause i et par "
|
||||
"minutter i stedet for at springe denne server over"
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3765,17 +3804,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Aktivere"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -3805,11 +3833,11 @@ msgstr "Personlige notater"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Article availability"
|
||||
msgstr "Artikeltilgængelighed"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "%f% available of %d requested articles"
|
||||
msgstr "%f% tilgængelige af %d anmodede artikler"
|
||||
msgstr ""
|
||||
|
||||
#. Config->Scheduling
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3870,12 +3898,12 @@ msgstr "Anvend filtre"
|
||||
#. Config->RSS edit button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Edit"
|
||||
msgstr "Redigér"
|
||||
msgstr ""
|
||||
|
||||
#. Config->RSS when will be the next RSS scan
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Next scan at"
|
||||
msgstr "Næste scanning kl."
|
||||
msgstr ""
|
||||
|
||||
#. Config->RSS table column header
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3957,8 +3985,6 @@ msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
"Hvis kun kategorien <em>Standard</em> er valgt, er beskeder aktiveret for "
|
||||
"jobs i alle kategorier."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
@@ -4135,20 +4161,20 @@ msgstr "Enhed(er) som meddelelse skal sendes til"
|
||||
#. Pushover settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Emergency retry"
|
||||
msgstr "Nødforsøg"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How often (in seconds) the same notification will be sent"
|
||||
msgstr "Hvor ofte (i sekunder) samme besked vil blive sendt"
|
||||
msgstr ""
|
||||
|
||||
#. Pushover settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Emergency expire"
|
||||
msgstr "Nødudløb"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How many seconds your notification will continue to be retried"
|
||||
msgstr "Hvor mange sekunder din besked fortsætter med at blive forsøgt"
|
||||
msgstr ""
|
||||
|
||||
#. Header for Pushbullet notification section
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4191,30 +4217,19 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Aktiver Apprise-notifikationer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Send beskeder direkte til enhver beskedtjeneste, du bruger.<br>For eksempel:"
|
||||
" Slack, Discord, Telegram eller enhver tjeneste fra over 100 understøttede "
|
||||
"tjenester!"
|
||||
"Send notifikationer via Apprise til næsten enhver notifikationstjeneste"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Brug standard Apprise-URL'er"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standard Apprise-URL'er"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definerer tjenesteforbindelsesoplysninger ved hjælp af "
|
||||
"URL'er.<br>Læs Apprise-wikien om, hvordan man definerer URL'en for hver "
|
||||
"tjeneste.<br>Brug komma og/eller mellemrum til at identificere mere end én "
|
||||
"URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Brug komma og/eller mellemrum for at angive flere URL'er."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4393,15 +4408,15 @@ msgstr "Sorteringsstreng"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Multi-part Label"
|
||||
msgstr "Fler-dels-etiket"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Show folder"
|
||||
msgstr "Vis mappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Season folder"
|
||||
msgstr "Sæsonmappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "In folders"
|
||||
@@ -4417,7 +4432,7 @@ msgstr "Job Navn som Filnavn"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Series"
|
||||
msgstr "Serier"
|
||||
msgstr ""
|
||||
|
||||
#. Note for title expression in Sorting that does case adjustment
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4430,31 +4445,31 @@ msgstr "Forarbejdede resultat"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Any property"
|
||||
msgstr "Enhver egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "property"
|
||||
msgstr "egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt Property"
|
||||
msgstr "GuessIt-egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt.Property"
|
||||
msgstr "GuessIt.Egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt_Property"
|
||||
msgstr "GuessIt_Egenskab"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Minimum Filesize"
|
||||
msgstr "Minimum filstørrelse"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Affected Job Types"
|
||||
msgstr "Berørte jobtyper"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "All"
|
||||
@@ -4462,15 +4477,15 @@ msgstr "Alle"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Series with air dates"
|
||||
msgstr "Serier med sendetidspunkter"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Movies"
|
||||
msgstr "Film"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Other / Unknown"
|
||||
msgstr "Andet / Ukendt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4482,43 +4497,34 @@ msgid ""
|
||||
"applied.</p><p>More options are available when Advanced Settings is "
|
||||
"checked.<br/>Detailed information can be found on the Wiki.</p>"
|
||||
msgstr ""
|
||||
"<p>Brug sorteringsværktøjer til automatisk at organisere dine fuldførte "
|
||||
"downloads. For eksempel, placer alle episoder fra en serie i en "
|
||||
"sæsonspecifik mappe. Eller placer film i en mappe opkaldt efter "
|
||||
"filmen.</p><p>Sorteringsværktøjer afprøves i den rækkefølge, de vises, og "
|
||||
"kan omarrangeres ved at trække og slippe.<br/>Den første aktive sortering, "
|
||||
"der matcher både den berørte kategori og jobtype, anvendes.</p><p>Flere "
|
||||
"muligheder er tilgængelige, når Avancerede indstillinger er "
|
||||
"markeret.<br/>Detaljeret information kan findes på Wiki'en.</p>"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Add Sorter"
|
||||
msgstr "Tilføj sortering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Sorter"
|
||||
msgstr "Fjern sortering"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Test Data"
|
||||
msgstr "Testdata"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Quick start"
|
||||
msgstr "Hurtig start"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Move and rename all episodes in the \"tv\" category to a show-specific "
|
||||
"folder"
|
||||
msgstr ""
|
||||
"Flyt og omdøb alle episoder i kategorien \"tv\" til en programspecifik mappe"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Move and rename all movies in the \"movies\" category to a movie-specific "
|
||||
"folder"
|
||||
msgstr "Flyt og omdøb alle film i kategorien \"movies\" til en filmspecifik mappe"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4551,11 +4557,6 @@ msgstr "Slet"
|
||||
msgid "Filename"
|
||||
msgstr "Filnavn"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Alder"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Ledig diskplads"
|
||||
@@ -4634,11 +4635,11 @@ msgstr "Datoformat"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extra queue columns"
|
||||
msgstr "Ekstra køkolonner"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extra history columns"
|
||||
msgstr "Ekstra historikkolonner"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "page"
|
||||
@@ -4689,8 +4690,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Er du sikker på, at du vil slette alle mapper i din midlertidige "
|
||||
"downloadmappe? Dette kan ikke fortrydes!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -4729,8 +4728,6 @@ msgid ""
|
||||
"When you Retry a job, 'Duplicate Detection' and 'Abort jobs that cannot be "
|
||||
"completed' are disabled."
|
||||
msgstr ""
|
||||
"Når du genforsøger et job, er 'Dubletdetektering' og 'Afbryd jobs, der ikke "
|
||||
"kan fuldføres' deaktiveret."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "View Script Log"
|
||||
@@ -4738,7 +4735,7 @@ msgstr "Vis scriptlog"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Renaming the job will abort Direct Unpack."
|
||||
msgstr "Omdøbning af jobbet vil afbryde direkte udpakning."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4762,7 +4759,7 @@ msgstr "Kompakt layout"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Always use full screen width"
|
||||
msgstr "Brug altid fuld skærmbredde"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Tabbed layout <br/>(separate queue and history)"
|
||||
@@ -4782,11 +4779,11 @@ msgstr "Bekræft Historik-fjernelse"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Keyboard shortcuts"
|
||||
msgstr "Tastaturgenveje"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Shift+Arrow key: Browse Queue and History pages"
|
||||
msgstr "Shift+piletast: Gennemse Kø- og Historiksider"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "How long or untill when do you want to pause? (in English!)"
|
||||
@@ -4809,12 +4806,10 @@ msgid ""
|
||||
"All usernames, passwords and API-keys are automatically removed from the log"
|
||||
" and the included copy of your settings."
|
||||
msgstr ""
|
||||
"Alle brugernavne, adgangskoder og API-nøgler fjernes automatisk fra loggen "
|
||||
"og den inkluderede kopi af dine indstillinger."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Sort by % downloaded <small>Most→Least</small>"
|
||||
msgstr "Sortér efter % downloadet <small>Mest→Mindst</small>"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Sort by Age <small>Oldest→Newest</small>"
|
||||
@@ -4949,11 +4944,11 @@ msgstr "Start guide"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klik på Test server før du fortsætter"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
msgstr "Gendan sikkerhedskopi"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4970,7 +4965,7 @@ msgstr ""
|
||||
#. Error message
|
||||
#: sabnzbd/sorting.py
|
||||
msgid "Failed to rename %s to %s"
|
||||
msgstr "Kunne ikke omdøbe %s til %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/sorting.py
|
||||
@@ -4989,10 +4984,6 @@ msgstr "Fil ikke på server"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "Serveren kunne ikke fuldføre anmodningen"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tom NZB fil %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
193
po/main/de.po
193
po/main/de.po
@@ -15,14 +15,14 @@
|
||||
# Stefan Rodriguez Galeano, 2024
|
||||
# M Z, 2024
|
||||
# Gjelbrim Haskaj, 2024
|
||||
# Safihre <safihre@sabnzbd.org>, 2024
|
||||
# Media Cat, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-4.6.0\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2025\n"
|
||||
"Last-Translator: Media Cat, 2025\n"
|
||||
"Language-Team: German (https://app.transifex.com/sabnzbd/teams/111101/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -58,8 +58,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"OpenSSL kann nicht verknüpft werden, optimierte SSL-Verbindungsfunktionen "
|
||||
"werden nicht verwendet."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -167,11 +165,6 @@ msgstr ""
|
||||
"Die aktuellen Zugriffseinstellungen (%o) könnte SABnzbd den Zugriff auf die "
|
||||
"erstellten Dateien und Ordner von SABnzbd verweigern."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -362,7 +355,7 @@ msgstr "Unerwünschter Typ \"%s\" in RAR Datei. Unerwünschte Datei ist %s "
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Unerwünschter Dateityp im RAR-Archiv %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Abgebrochen, unerwünschte Dateieindung gefunden"
|
||||
|
||||
@@ -391,11 +384,11 @@ msgstr "Kontingent"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Warnung zur Kontingentgrenze (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download nach Kontingentzurücksetzung fortgesetzt"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -622,6 +615,11 @@ msgstr "Fehler %s@%s zu initialisieren, aus folgendem Grund: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Schwerer Fehler im Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s:Unbekannter Statuscode%s für Artikel erhalten %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Zu viele Verbindungen zu Server %s [%s]"
|
||||
@@ -643,6 +641,11 @@ msgstr "Anmelden beim Server fehlgeschlagen. %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Fehler beim Verbinden mit %s@%s, Meldung = %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vermute Fehler im Downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Wird beendet …"
|
||||
@@ -1041,7 +1044,7 @@ msgid "Update Available!"
|
||||
msgstr "Neue Version verfügbar!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Hochladen fehlgeschlagen: %s"
|
||||
|
||||
@@ -1288,16 +1291,6 @@ msgstr "Versuche SFV-Überprüfung"
|
||||
msgid "left"
|
||||
msgstr "rest"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s:Unbekannter Statuscode%s für Artikel erhalten %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vermute Fehler im Downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Dieser Server erlaubt kein SSL auf diesem Port"
|
||||
@@ -1484,18 +1477,106 @@ msgstr "Fehler beim Laden von %s. Beschädigte Datei gefunden."
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB zur Warteschlange hinzugefügt"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Doppelte NZB \"%s\" wird ignoriert"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "kopieren der NZB \"%s\" fehlgeschlagen"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Doppelte NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Ungültige NZB-Datei %s wird übersprungen (Fehler: %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Leere NZB-Datei %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
"Das Vorwarteschlangen (pre-queue) Skript hat die Downloadaufgabe als "
|
||||
"gescheitert markiert"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Ungewollte Dateiendung in der Datei %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Abgebrochen, kann nicht fertiggestellt werden"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Fehler beim Importieren von %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLIKAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIVE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "VERSCHLÜSSELT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "ZU GROSS"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "UNVOLLSTÄNDIG"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "UNERWÜNSCHT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "WARTE %s Sek"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "AUSBREITUNG %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr ""
|
||||
"Heruntergeladen in %s mit einer Durchschnittsgeschwindigkeit von %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Alter"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s Artikel hatten ein ungültiges Format"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s Artikel fehlten"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s Artikel hatten nicht übereinstimmende Duplikate"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Doppelt vorhandene NZB \"%s\" angehalten"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problem mit"
|
||||
@@ -2397,7 +2478,7 @@ msgstr "Erneut versuchen"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Als abgeschlossen markieren und temporäre Dateien entfernen"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3436,9 +3517,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV-basierte Überprüfung aktivieren"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Zusätzliche Überprüfung mittels SFV-Dateien durchführen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3781,9 +3861,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Kontingent für diesen Server, gezählt ab dem Zeitpunkt der Festlegung. In "
|
||||
"Bytes, optional gefolgt von K,M,G.<br />Wird alle paar Minuten überprüft. "
|
||||
"Benachrichtigung wird gesendet, wenn das Kontingent aufgebraucht ist."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3871,17 +3948,6 @@ msgstr "Für unzuverlässige Server, wird bei Fehlern länger ignoriert"
|
||||
msgid "Enable"
|
||||
msgstr "Aktivieren"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4301,30 +4367,22 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Aktivieren Sie Info-Benachrichtigungen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Senden Sie Benachrichtigungen direkt an jeden von Ihnen genutzten "
|
||||
"Benachrichtigungsdienst.<br>Zum Beispiel: Slack, Discord, Telegram oder "
|
||||
"jeden anderen Dienst aus über 100 unterstützten Diensten!"
|
||||
"Senden Sie Benachrichtigungen mit Anfragen an fast jeden "
|
||||
"Benachrichtigungsdienst"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Standard-Apprise-URLs verwenden"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standard Apprise URLs"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definiert Dienstverbindungsinformationen über URLs.<br>Lesen Sie das"
|
||||
" Apprise-Wiki, um zu erfahren, wie Sie die URL für jeden Dienst "
|
||||
"definieren.<br>Verwenden Sie ein Komma und/oder Leerzeichen, um mehr als "
|
||||
"eine URL anzugeben."
|
||||
"Verwenden Sie ein Komma und/oder ein Leerzeichen, um mehr als eine URL zu "
|
||||
"kennzeichnen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4665,11 +4723,6 @@ msgstr "Löschen"
|
||||
msgid "Filename"
|
||||
msgstr "Dateiname"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Alter"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Freier Speicherplatz"
|
||||
@@ -4803,8 +4856,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Sind Sie sicher, dass Sie alle Ordner in Ihrem temporären Download-Ordner "
|
||||
"löschen möchten? Dies kann nicht rückgängig gemacht werden!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5064,7 +5115,7 @@ msgstr "Assistenten starten"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klicken Sie auf \"Server testen\", bevor Sie fortfahren"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
@@ -5104,10 +5155,6 @@ msgstr "Datei nicht auf dem Server"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "Server konnte nicht vollständig antworten"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Leere NZB-Datei %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
187
po/main/es.po
187
po/main/es.po
@@ -156,11 +156,6 @@ msgstr ""
|
||||
"La umask actual (%o) podría denegarle acceso a SABnzbd a los archivos y "
|
||||
"carpetas que este crea."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -347,7 +342,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Se ha encontrado una extensión desconocida en el fichero rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Se interrumpió la acción porque se detectó una extensión no deseada"
|
||||
|
||||
@@ -378,11 +373,11 @@ msgstr "Cuota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Advertencia de límite de cuota (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Descarga reanudada después de reiniciar la cuota"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -607,6 +602,12 @@ msgstr "Error al inicializar %s@%s con la razón: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Error grave en el descargador"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
"%s@%s: Se recibió un código de estado desconocido %s para el artículo %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Demasiadas conexiones con el servidor %s [%s]"
|
||||
@@ -628,6 +629,11 @@ msgstr "Registraccion fallo para servidor %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Ha fallado la conexión a %s@%s, el mensaje=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Error sospechoso en downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Apagando"
|
||||
@@ -1025,7 +1031,7 @@ msgid "Update Available!"
|
||||
msgstr "¡Actualización Disponible!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Error al subir archivo: %s"
|
||||
|
||||
@@ -1276,17 +1282,6 @@ msgstr "Intentando verificación por SFV"
|
||||
msgid "left"
|
||||
msgstr "Restante"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
"%s@%s: Se recibió un código de estado desconocido %s para el artículo %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Error sospechoso en downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Este servidor no permite SSL en este puerto"
|
||||
@@ -1476,18 +1471,105 @@ msgstr "Error al cargar %s, archivo corrupto"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB añadido a la cola"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorando NZB Duplicado \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Fallo al duplicar NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Duplicar NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Fichero NBZ inválido: %s, omitiendo (razón=%s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fichero NZB vacío: %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
"La secuencia de comandos de la cola preestablecida ha marcado la tarea como "
|
||||
"fallida"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Extensión no deseada en el archivo %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Abortado, No puede ser completado"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Error importando %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLICADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIVO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ENCRIPTADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "DEMASIADO GRANDE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INCOMPLETO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NO DESEADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "ESPERAR %s seg"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "PROPAGANDO %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Descargado en %s a una media de %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Edad"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artículos estaban mal formados."
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artículos no encontrados"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artículos contenían duplicados inconexos"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Pausando NZB duplicados \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problema con"
|
||||
@@ -2387,7 +2469,7 @@ msgstr "Reintentar"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Marcar como completado y eliminar archivos temporales"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3413,9 +3495,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Habilitar verificacion basada en SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Realiza una verificación extra basada en ficheros SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3750,9 +3831,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Cuota para este servidor, contada desde el momento en que se establece. En "
|
||||
"bytes, opcionalmente seguido de K,M,G.<br />Comprobado cada pocos minutos. "
|
||||
"Se envía una notificación cuando se agota la cuota."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3842,17 +3920,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Habilitar"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4271,29 +4338,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Habilitar notificaciones Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Envíe notificaciones directamente a cualquier servicio de notificaciones que"
|
||||
" utilice.<br>Por ejemplo: Slack, Discord, Telegram o cualquier servicio de "
|
||||
"más de 100 servicios compatibles."
|
||||
"Enviar notificaciones usando Apprise a casi cualquier servicio de "
|
||||
"notificación"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Usar URLs de Apprise predeterminadas"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URLs predeterminadas de Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise define la información de conexión del servicio mediante URLs.<br>Lea"
|
||||
" el wiki de Apprise para saber cómo definir la URL de cada servicio.<br>Use "
|
||||
"una coma y/o espacio para identificar más de una URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Use una coma y/o espacio para identificar más de una URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4635,11 +4693,6 @@ msgstr "Eliminar"
|
||||
msgid "Filename"
|
||||
msgstr "Nombre de archivo"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Edad"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Espacio libre"
|
||||
@@ -4775,8 +4828,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"¿Está seguro de que desea eliminar todas las carpetas en su carpeta de "
|
||||
"descargas temporales? ¡Esto no se puede deshacer!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5036,7 +5087,7 @@ msgstr "Iniciar Asistente"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Haga clic en Probar servidor antes de continuar"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
@@ -5076,10 +5127,6 @@ msgstr "El fichero no se encuentra en el servidor"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "El servidor no ha podido completar la solicitud"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fichero NZB vacío: %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/fi.po
158
po/main/fi.po
@@ -146,11 +146,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -320,7 +315,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Ei toivottu tiedostopääte on rar arkistossa %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Peruutettu, ei toivottu tiedostopääte havaittu"
|
||||
|
||||
@@ -561,6 +556,11 @@ msgstr "Alustaminen epäonnistui kohteessa %s@%s syy: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Liikaa yhteyksiä palvelimelle %s [%s]"
|
||||
@@ -580,6 +580,11 @@ msgstr "Kirjautuminen palvelimelle %s epäonnistui [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Yhdistäminen %s@%s epäonnistui, viesti=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mahdollinen virhe lataajassa"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Sammutetaan"
|
||||
@@ -966,7 +971,7 @@ msgid "Update Available!"
|
||||
msgstr "Päivitys saatavilla!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1203,16 +1208,6 @@ msgstr "Yritetään SFV varmennusta"
|
||||
msgid "left"
|
||||
msgstr "jäljellä"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mahdollinen virhe lataajassa"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Tämä palvelin ei salli SSL yhteyksiä tähän porttiin"
|
||||
@@ -1392,18 +1387,103 @@ msgstr "Virhe ladattaessa %s, korruptoitunut tiedosto havaittu"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB lisätty jonoon"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ohitetaan kaksoiskappale NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tyhjä NZB tiedosto %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Peruutettu, ei voi valmistua"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Virhe tuotaessa %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "KAKSOISKAPPALE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "SALATTU"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "LIIAN SUURI"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "KESKENERÄINEN"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "EI TOIVOTTU"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "ODOTA %s sekuntia"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "LEVITETÄÄN %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Ladattiin ajassa %s keskilatausnopeudella %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Ikä"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artikkelia oli väärin muotoiltuja"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artikkelia puuttui"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artikkelissa oli ei-vastaavia kaksoiskappaleita"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Keskeytetään kaksoiskappale NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Ongelma"
|
||||
@@ -3283,9 +3363,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV-pohjaiset tarkistukset käytössä"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Suorittaa ylimääräisen varmennuksen SFV tiedostojen avulla."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3683,17 +3762,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Ota käyttöön"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4107,22 +4175,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4450,11 +4513,6 @@ msgstr "Poista"
|
||||
msgid "Filename"
|
||||
msgstr "Tiedostonimi"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Ikä"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Vapaa tila"
|
||||
@@ -4885,10 +4943,6 @@ msgstr "Tiedostoa ei ole palvelimella"
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tyhjä NZB tiedosto %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
173
po/main/fr.po
173
po/main/fr.po
@@ -157,11 +157,6 @@ msgstr ""
|
||||
"L'umask actuel (%o) pourrait refuser à SABnzbd l'accès aux fichiers et "
|
||||
"dossiers qu'il crée."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -352,7 +347,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "L'extension indésirable est dans le fichier rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Interrompu, extension indésirable détectée"
|
||||
|
||||
@@ -611,6 +606,11 @@ msgstr "Échec d'initialisation de %s@%s pour la raison suivante : %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Erreur fatale dans le Téléchargeur"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s a reçu le code d'état inconnu %s pour l'article %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Trop de connexions au serveur %s [%s]"
|
||||
@@ -632,6 +632,11 @@ msgstr "Échec de la connexion au serveur %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "La connexion à %s@%s a échoué, message=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erreur suspecte dans le téléchargeur"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Arrêt en cours..."
|
||||
@@ -1030,7 +1035,7 @@ msgid "Update Available!"
|
||||
msgstr "Mise à Jour disponible!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Échec de l'upload du fichier : %s"
|
||||
|
||||
@@ -1277,16 +1282,6 @@ msgstr "Essai vérification SFV"
|
||||
msgid "left"
|
||||
msgstr "restant"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s a reçu le code d'état inconnu %s pour l'article %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erreur suspecte dans le téléchargeur"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Ce serveur n'authorise pas de connexion SSL sur ce port"
|
||||
@@ -1471,18 +1466,103 @@ msgstr "Erreur lors du chargement de %s, fichier corrompu détecté"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB ajouté à la file d'attente"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Doublon NZB ignoré \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Échec de duplication du NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Dupliquer NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Fichier NZB %s invalide, sera ignoré (erreur : %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fichier NZB %s vide"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Le script de pré-file d'attente a marqué la tâche comme échouée"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Extension non souhaitée dans le fichier %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Interrompu, ne peut être achevé"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Erreur lors de l'importation de %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DOUBLON"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIVE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "CHIFFRÉ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "TROP VOLUMINEUX"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INCOMPLET"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "INDÉSIRABLE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "PATIENTER %s sec"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "PROPAGATION %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Téléchargé en %s à %sB/s de moyenne"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Âge"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s articles malformés"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s articles manquants"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s articles avec doublons sans correspondance"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Mise en pause du doublon NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problème avec"
|
||||
@@ -3422,11 +3502,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Activer les contrôles SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
"Si aucun fichier par2 n'est disponible, utiliser les fichiers sfv (si "
|
||||
"présents) pour vérifier les fichiers"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fait une vérification supplémentaire basée sur les fichiers SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3857,17 +3934,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Activer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4287,30 +4353,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Activer les notifications Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Envoyez des notifications directement vers n'importe quel service de "
|
||||
"notification que vous utilisez.<br>Par exemple : Slack, Discord, Telegram ou"
|
||||
" tout autre service parmi plus de 100 services pris en charge !"
|
||||
"Envoyer des notifications en utilisant Apprise vers presque n'importe quel "
|
||||
"service de notification"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Utiliser les URLs Apprise par défaut"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URLs par défaut d'Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise définit les informations de connexion au service à l'aide "
|
||||
"d'URL.<br>Consultez le wiki Apprise pour savoir comment définir l'URL de "
|
||||
"chaque service.<br>Utilisez une virgule et/ou un espace pour identifier "
|
||||
"plusieurs URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Utilisez une virgule et/ou un espace pour identifier plusieurs URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4653,11 +4709,6 @@ msgstr "Supprimer"
|
||||
msgid "Filename"
|
||||
msgstr "Nom de fichier"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Âge"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Espace libre"
|
||||
@@ -5096,10 +5147,6 @@ msgstr "Fichier introuvable sur le serveur"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "Le serveur n'a pas pu terminer la requête"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fichier NZB %s vide"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
210
po/main/he.po
210
po/main/he.po
@@ -2,14 +2,14 @@
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# Translators:
|
||||
# Safihre <safihre@sabnzbd.org>, 2023
|
||||
# ION, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-4.6.0\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2025\n"
|
||||
"Last-Translator: ION, 2025\n"
|
||||
"Language-Team: Hebrew (https://app.transifex.com/sabnzbd/teams/111101/he/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -42,7 +42,7 @@ msgstr "לא ניתן למצוא תבניות רשת: %s, מנסה תבנית ת
|
||||
msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr "לא ניתן לקשר ל-OpenSSL, פונקציות חיבור SSL מותאמות לא יהיו בשימוש."
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -143,11 +143,6 @@ msgstr ""
|
||||
"פקודת umask נוכחית (%o) עשויה לדחות גישה מן SABnzbd אל הקבצים והתיקיות שהוא "
|
||||
"יוצר."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -215,16 +210,12 @@ msgid ""
|
||||
"Could not connect to %s on port %s. Use the default usenet settings: port "
|
||||
"563 and SSL turned on"
|
||||
msgstr ""
|
||||
"לא ניתן להתחבר ל-%s בפורט %s. השתמש בהגדרות ברירת המחדל של usenet: פורט 563 "
|
||||
"ו-SSL מופעל"
|
||||
|
||||
#: sabnzbd/api.py
|
||||
msgid ""
|
||||
"Could not connect to %s on port %s. Use the default usenet settings: port "
|
||||
"119 and SSL turned off"
|
||||
msgstr ""
|
||||
"לא ניתן להתחבר ל-%s בפורט %s. השתמש בהגדרות ברירת המחדל של usenet: פורט 119 "
|
||||
"ו-SSL כבוי"
|
||||
|
||||
#: sabnzbd/api.py, sabnzbd/interface.py
|
||||
msgid "Server address \"%s:%s\" is not valid."
|
||||
@@ -325,7 +316,7 @@ msgstr "בעבודה \"%s\" יש סיומת בלתי רצויה בתוך קוב
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "סיומת בלתי רצויה בקובץ rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "בוטל, סיומת בלתי רצויה התגלתה"
|
||||
|
||||
@@ -352,11 +343,11 @@ msgstr "מכסה"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "אזהרת מגבלת מכסה (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "ההורדה התחדשה לאחר איפוס מכסה"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -420,7 +411,7 @@ msgstr ""
|
||||
#: sabnzbd/cfg.py
|
||||
msgid ""
|
||||
"The par2 application was switched, any custom par2 parameters were removed"
|
||||
msgstr "יישום par2 הוחלף, כל פרמטרי par2 מותאמים אישית הוסרו"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/config.py
|
||||
@@ -496,7 +487,7 @@ msgstr "אי־האפלה שינתה שם של %d קבצים"
|
||||
|
||||
#: sabnzbd/deobfuscate_filenames.py
|
||||
msgid "Deobfuscate renamed %d subtitle file(s)"
|
||||
msgstr "בוצע ביטול ערפול של %d קבצי כתוביות ששמם שונה"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/directunpacker.py, sabnzbd/skintext.py
|
||||
msgid "Direct Unpack"
|
||||
@@ -572,6 +563,11 @@ msgstr "כישלון באתחול %s@%s עם סיבה: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "שגיאה גורלית במורידן"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: קוד בלתי ידוע של מעמד התקבל %s עבור מאמר %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "יותר מדי חיבורים לשרת %s [%s]"
|
||||
@@ -593,6 +589,11 @@ msgstr "כניסה נכשלה עבור שרת %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "התחברות אל %s@%s נכשלה, הודעה=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "הורדה חשודה במורידן"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "מכבה"
|
||||
@@ -978,7 +979,7 @@ msgid "Update Available!"
|
||||
msgstr "עדכון זמין!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "כישלון בהעלאת קובץ: %s"
|
||||
|
||||
@@ -1216,16 +1217,6 @@ msgstr "מנסה וידוא SFV"
|
||||
msgid "left"
|
||||
msgstr "נותר"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: קוד בלתי ידוע של מעמד התקבל %s עבור מאמר %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "הורדה חשודה במורידן"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "שרת זה אינו מתיר SSL על פתחה זו"
|
||||
@@ -1244,8 +1235,6 @@ msgid ""
|
||||
" locally injected certificate (for example by firewall or virus scanner). "
|
||||
"Try setting Certificate verification to Medium."
|
||||
msgstr ""
|
||||
"לא ניתן לאמת את האישור. זה יכול להיות בעיית שרת או בגלל אישור מוזרק מקומית "
|
||||
"(לדוגמה על ידי חומת אש או סורק וירוסים). נסה להגדיר את אימות האישור לבינוני."
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Server %s uses an untrusted certificate [%s]"
|
||||
@@ -1326,7 +1315,7 @@ msgstr "כישלון בשליחת הודעת Prowl"
|
||||
#. Warning message
|
||||
#: sabnzbd/notifier.py
|
||||
msgid "Failed to send Apprise message - no URLs defined"
|
||||
msgstr "שליחת הודעת Apprise נכשלה - לא הוגדרו כתובות URL"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/notifier.py
|
||||
@@ -1409,18 +1398,103 @@ msgstr "שגיאה בטעינת %s, קובץ פגום התגלה"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB התווסף לתור"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "מתעלם מן NZB כפול \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "מכשיל NZB כפול \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "NZB כפול"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "קובץ NZB בלתי תקף %s, מדלג (שגיאה: %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "קובץ NZB ריק %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "תסריט קדם־תור סומן כנכשל"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "סיומת בלתי רצויה בקובץ %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "בוטל, לא יכול להיות שלם"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "שגיאה ביבוא %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "כפול"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "חלופה"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "מוצפן"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "גדול מדי"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "בלתי שלם"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "בלתי רצוי"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "המתן %s שניות"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "מפיץ %s דקות"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "ירד תוך %s בממוצע של %s ב/ש"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "גיל"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s מאמרים עוותו"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s מאמרים היו חסרים"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "אל %s מאמרים יש כפילויות בלתי תואמות"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "משהה NZB כפול \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "בעיה עם"
|
||||
@@ -2313,7 +2387,7 @@ msgstr "נסה שוב"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "סמן כהושלם והסר קבצים זמניים"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2859,7 +2933,7 @@ msgstr "העבר עבודות אל הארכיון אם ההיסטוריה חור
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Delete jobs if the history and archive exceeds specified number of jobs"
|
||||
msgstr "מחק עבודות אם ההיסטוריה והארכיון עוברים את מספר העבודות שצוין"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Move jobs to the archive after specified number of days"
|
||||
@@ -2868,7 +2942,7 @@ msgstr "העבר עבודות אל הארכיון לאחר מספר מצוין
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Delete jobs from the history and archive after specified number of days"
|
||||
msgstr "מחק עבודות מההיסטוריה והארכיון לאחר מספר הימים שצוין"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Move all completed jobs to archive"
|
||||
@@ -3297,9 +3371,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "אפשר בדיקות מבוססות SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "בצע וידוא נוסף שמבוסס על קבצי SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3615,8 +3688,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"מכסה לשרת זה, נספרת מהרגע שהיא נקבעה. בבייטים, באופן אופציונלי ניתן להוסיף "
|
||||
"K,M,G.<br />נבדקת כל כמה דקות. הודעה נשלחת כאשר המכסה מוצתה."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3647,11 +3718,6 @@ msgid ""
|
||||
"used. - Disabled: no certification verification. This is not secure at all, "
|
||||
"anyone could intercept your connection. "
|
||||
msgstr ""
|
||||
"כאשר SSL מופעל: - מחמיר: אכוף אימות אישור מלא. זוהי ההגדרה המאובטחת ביותר. -"
|
||||
" בינוני: אמת שהאישור תקף ותואם לכתובת השרת, אך אפשר אישורים המוזרקים מקומית "
|
||||
"(למשל על ידי חומת אש או סורק וירוסים). - מינימלי: אמת שהאישור תקף. זה לא "
|
||||
"מאובטח, כל אישור תקף יכול לשמש. - מושבת: ללא אימות אישור. זה לא מאובטח כלל, "
|
||||
"כל אחד יכול ליירט את החיבור שלך."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Disabled"
|
||||
@@ -3663,7 +3729,7 @@ msgstr "מזערי"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Medium"
|
||||
msgstr "בינוני"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Strict"
|
||||
@@ -3701,17 +3767,6 @@ msgstr "עבור שרתים בלתי מהימנים, ייתקל בהתעלמות
|
||||
msgid "Enable"
|
||||
msgstr "אפשר"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4126,28 +4181,18 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "אפשר התראות Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgstr ""
|
||||
"שלח הודעות ישירות לכל שירות הודעות שאתה משתמש בו.<br>לדוגמה: Slack, Discord,"
|
||||
" Telegram או כל שירות מתוך למעלה מ-100 שירותים נתמכים!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr "שלח התראות ע״י שימוש בשירות Apprise אל כמעט כל שירות התראות"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "השתמש בכתובות URL של Apprise המוגדרות כברירת מחדל"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "כתובות Apprise ברירות מחדל"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise מגדיר מידע על חיבור שירות באמצעות כתובות URL.<br>קרא את הוויקי של "
|
||||
"Apprise כדי ללמוד כיצד להגדיר את כתובת ה-URL עבור כל שירות.<br>השתמש בפסיק "
|
||||
"ו/או רווח כדי לזהות יותר מכתובת URL אחת."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "השתמש בפסיק, ברווח או בשניהם כדי לזהות יותר מכתובת אחת."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4477,11 +4522,6 @@ msgstr "מחק"
|
||||
msgid "Filename"
|
||||
msgstr "שם קובץ"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "גיל"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "שטח פנוי"
|
||||
@@ -4615,8 +4655,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"האם אתה בטוח שברצונך למחוק את כל התיקיות בתיקיית ההורדות הזמנית שלך? לא ניתן"
|
||||
" לבטל פעולה זו!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -4875,7 +4913,7 @@ msgstr "התחל אשף"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "לחץ על בדיקת שרת לפני המשך"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
@@ -4915,10 +4953,6 @@ msgstr "קובץ לא על השרת"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "השרת לא היה יכול להשלים בקשה"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "קובץ NZB ריק %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
185
po/main/it.po
185
po/main/it.po
@@ -42,8 +42,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"Impossibile collegarsi a OpenSSL, le funzioni di connessione SSL ottimizzate"
|
||||
" non verranno utilizzate."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -150,11 +148,6 @@ msgstr ""
|
||||
"L'umask corrente (%o) potrebbe negare a SABnzbd l'accesso ai file e alle "
|
||||
"cartelle che crea."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -345,7 +338,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "L'estensione non desiderata è nel file rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Annullato, rilevata estensione non desiderata"
|
||||
|
||||
@@ -376,11 +369,11 @@ msgstr "Quota"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Avviso limite quota (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Download ripreso dopo il ripristino della quota"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -604,6 +597,11 @@ msgstr "Inizializzazione di %s@%s fallita con motivo: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Errore fatale nel Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Ricevuto codice di stato sconosciuto %s per l'articolo %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Troppe connessioni al server %s [%s]"
|
||||
@@ -625,6 +623,11 @@ msgstr "Accesso fallito per il server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Connessione a %s@%s fallita, messaggio=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sospetto errore nel downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Spegnimento in corso"
|
||||
@@ -1021,7 +1024,7 @@ msgid "Update Available!"
|
||||
msgstr "Aggiornamento disponibile!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Caricamento del file %s fallito"
|
||||
|
||||
@@ -1264,16 +1267,6 @@ msgstr "Tentativo di verifica SFV"
|
||||
msgid "left"
|
||||
msgstr "rimanente"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Ricevuto codice di stato sconosciuto %s per l'articolo %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sospetto errore nel downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Questo server non permette SSL su questa porta"
|
||||
@@ -1459,18 +1452,103 @@ msgstr "Errore durante il caricamento di %s, rilevato file corrotto"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB aggiunto alla coda"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorando NZB duplicato \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Fallimento NZB duplicato \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "NZB duplicato"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "File NZB non valido %s, saltato (errore: %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "File NZB vuoto %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Lo script pre-coda ha contrassegnato il processo come fallito"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Estensione non desiderata nel file %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Annullato, non può essere completato"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Errore durante l'importazione di %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLICATO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIVO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "CRITTOGRAFATO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "TROPPO GRANDE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INCOMPLETO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NON DESIDERATO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "ATTENDI %s sec"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "PROPAGAZIONE %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Scaricato in %s a una media di %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Età"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s articoli erano malformati"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s articoli erano mancanti"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s articoli avevano duplicati non corrispondenti"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Messa in pausa NZB duplicato \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problema con"
|
||||
@@ -2364,7 +2442,7 @@ msgstr "Riprova"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Segna come completato e rimuovi i file temporanei"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3391,9 +3469,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Abilita controlli basati su SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Esegui una verifica extra basata sui file SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3723,9 +3800,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Quota per questo server, contata dal momento in cui viene impostata. In "
|
||||
"byte, opzionalmente seguito da K,M,G.<br />Controllato ogni pochi minuti. La"
|
||||
" notifica viene inviata quando la quota è esaurita."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3814,17 +3888,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Abilita"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4241,29 +4304,18 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Abilita notifiche Apprise"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgstr ""
|
||||
"Invia notifiche direttamente a qualsiasi servizio di notifica che "
|
||||
"utilizzi.<br>Ad esempio: Slack, Discord, Telegram o qualsiasi servizio tra "
|
||||
"oltre 100 servizi supportati!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr "Invia notifiche usando Apprise a quasi tutti i servizi di notifica"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Usa URL Apprise predefiniti"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "URL predefiniti di Apprise"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definisce le informazioni di connessione del servizio utilizzando "
|
||||
"URL.<br>Leggi il wiki di Apprise per sapere come definire l'URL per ogni "
|
||||
"servizio.<br>Usa una virgola e/o uno spazio per identificare più di un URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Usa una virgola e/o uno spazio per identificare più di un URL."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4606,11 +4658,6 @@ msgstr "Elimina"
|
||||
msgid "Filename"
|
||||
msgstr "Nome file"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Età"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Spazio libero"
|
||||
@@ -4745,8 +4792,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Sei sicuro di voler eliminare tutte le cartelle nella tua cartella di "
|
||||
"download temporanei? Questo non può essere annullato!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5007,7 +5052,7 @@ msgstr "Avvia procedura guidata"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Fai clic su Prova server prima di continuare"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
@@ -5047,10 +5092,6 @@ msgstr "File non presente sul server"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "Il server non ha potuto completare la richiesta"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "File NZB vuoto %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/nb.po
158
po/main/nb.po
@@ -142,11 +142,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -318,7 +313,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Uønsket forlenging finnes i rar fil %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Avbryt, uønsket forlenging oppdaget"
|
||||
|
||||
@@ -558,6 +553,11 @@ msgstr "Feilet å starte %s@%s grunnet: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "For mange tilkoblinger til server %s [%s]"
|
||||
@@ -577,6 +577,11 @@ msgstr "Kunne ikke logge inn på server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Kontaker %s@%s feilet, feilmelding=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mistenker feil i nedlaster"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Starter avslutning av SABnzbd.."
|
||||
@@ -963,7 +968,7 @@ msgid "Update Available!"
|
||||
msgstr "Oppdatering tilgjengelig"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1201,16 +1206,6 @@ msgstr "Prøver SFV-verifisering"
|
||||
msgid "left"
|
||||
msgstr "gjenstår"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Mistenker feil i nedlaster"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Denne serveren tillater ikke SSL på denne porten"
|
||||
@@ -1390,18 +1385,103 @@ msgstr "Lastingsfeil %s, feilaktig fil oppdaget"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB er lagt til i køen"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorerer duplikatfil \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tom NZB-fil %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Avbrutt, kan ikke fullføres"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Kunne ikke importere %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLIKAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "KRYPTERT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "FOR STOR"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "UFULLSTENDIG"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "UØNSKET"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "VENT %s sek"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Hentet filer på %s med gjenomsnitts hastighet på %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Tid"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artikler var korrupte"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artikler manglet"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artikler hadde ulike duplikater"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Stanser duplikatfil \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problem med"
|
||||
@@ -3266,9 +3346,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Aktiver SFV-baserte sjekker"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Utfør ekstra verifisering basert på SFV filer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3662,17 +3741,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Aktivere"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4086,22 +4154,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4424,11 +4487,6 @@ msgstr "Fjern"
|
||||
msgid "Filename"
|
||||
msgstr "Filnavn"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Tid"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Ledig plass"
|
||||
@@ -4857,10 +4915,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Tom NZB-fil %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
185
po/main/nl.po
185
po/main/nl.po
@@ -44,8 +44,6 @@ msgid ""
|
||||
"Unable to link to OpenSSL, optimized SSL connection functions will not be "
|
||||
"used."
|
||||
msgstr ""
|
||||
"Kan niet koppelen aan OpenSSL, geoptimaliseerde SSL-verbindingsfuncties "
|
||||
"worden niet gebruikt."
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -152,11 +150,6 @@ msgstr ""
|
||||
"Huidige umask (%o) zou kunnen beletten dat SABnzbd toegang heeft tot de "
|
||||
"aangemaakte bestanden en mappen."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -342,7 +335,7 @@ msgstr "Ongewenste extensie ontdekt in \"%s\". Het ongewenste bestand is \"%s\"
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "De ongewenste extensie zit in RAR-bestand %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Afgebroken, ongewenste extensie ontdekt"
|
||||
|
||||
@@ -373,11 +366,11 @@ msgstr "Quotum"
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Quota limit warning (%d%%)"
|
||||
msgstr "Waarschuwing quotumlimiet (%d%%)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/bpsmeter.py
|
||||
msgid "Downloading resumed after quota reset"
|
||||
msgstr "Downloaden hervat na quotumreset"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/cfg.py, sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
@@ -605,6 +598,11 @@ msgstr "Initialisatie van %s@%s mislukt, vanwege: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "Onherstelbare fout in de Downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Onbekende statuscode %s ontvangen voor artikel %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Te veel verbindingen met server %s [%s]"
|
||||
@@ -626,6 +624,11 @@ msgstr "Aanmelden bij server %s mislukt [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Verbinding %s@%s mislukt, bericht=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vedachte fout in downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Afsluiten"
|
||||
@@ -1024,7 +1027,7 @@ msgid "Update Available!"
|
||||
msgstr "Update beschikbaar!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Kon het volgende bestand niet uploaden: %s"
|
||||
|
||||
@@ -1267,16 +1270,6 @@ msgstr "Probeer SFV-verificatie"
|
||||
msgid "left"
|
||||
msgstr "over"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: Onbekende statuscode %s ontvangen voor artikel %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Vedachte fout in downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "De server staat geen SSL toe op deze poort"
|
||||
@@ -1462,18 +1455,103 @@ msgstr "Fout bij inladen van %s, corrupt bestand gevonden"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "Download aan wachtrij toegevoegd"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Dubbele download \"%s\" overgeslagen"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Download '%s' geweigerd omdat het een dubbele is"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Dubbele download"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Corrupte NZB %s wordt overgeslagen (foutmelding: %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "NZB-bestand %s is leeg"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Wachtrij filter script heeft de download afgekeurd"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Ongewenste extensie gevonden in %s (%s) "
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Afgebroken, kan niet voltooid worden"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Fout bij importeren van %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUBBEL"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATIEF"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "VERSLEUTELD"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "TE GROOT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "ONVOLLEDIG"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "ONGEWENST"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "WACHT %s sec"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "VERSPREIDINGSWACHTTIJD %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Gedownload in %s met een gemiddelde snelheid van %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Leeftijd"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artikelen zijn misvormd"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artikelen ontbreken"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artikelen hadden afwijkende duplicaten"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Dubbele download \"%s\" gepauzeerd"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Probleem met"
|
||||
@@ -2367,7 +2445,7 @@ msgstr "Opnieuw"
|
||||
#. History page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Mark as Completed & Remove Temporary Files"
|
||||
msgstr "Markeer als voltooid en verwijder tijdelijke bestanden"
|
||||
msgstr ""
|
||||
|
||||
#. Queue page table, script selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3389,9 +3467,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Voer SFV-gebaseerde controles uit"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Doe een extra verificatie m.b.v. SFV-bestanden"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3725,9 +3802,6 @@ msgid ""
|
||||
" follow with K,M,G.<br />Checked every few minutes. Notification is sent "
|
||||
"when quota is spent."
|
||||
msgstr ""
|
||||
"Quotum voor deze server, geteld vanaf het moment dat het is ingesteld. In "
|
||||
"bytes, optioneel gevolgd door K,M,G.<br />Wordt om de paar minuten "
|
||||
"gecontroleerd. Melding wordt verzonden wanneer het quotum is opgebruikt."
|
||||
|
||||
#. Server's retention time in days
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -3814,17 +3888,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Inschakelen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4243,30 +4306,19 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Apprise-meldingen activeren"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Stuur meldingen rechtstreeks naar elke meldingsservice die u "
|
||||
"gebruikt.<br>Bijvoorbeeld: Slack, Discord, Telegram of elke andere service "
|
||||
"uit meer dan 100 ondersteunde services!"
|
||||
"Stuur meldingen met behulp van Apprise naar bijna elke bestaande service."
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Gebruik standaard Apprise-URL's"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Standaard Apprise-URL's"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise definieert serviceverbindingsinformatie met behulp van "
|
||||
"URL's.<br>Lees de Apprise-wiki om te leren hoe u de URL voor elke service "
|
||||
"definieert.<br>Gebruik een komma en/of spatie om meer dan één URL te "
|
||||
"identificeren."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr "Gebruik een komma en/of spatie om meer dan één URL op te geven."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4604,11 +4656,6 @@ msgstr "Verwijder"
|
||||
msgid "Filename"
|
||||
msgstr "Bestandsnaam"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Leeftijd"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Vrije ruimte"
|
||||
@@ -4742,8 +4789,6 @@ msgid ""
|
||||
"Are you sure you want to delete all folders in your Temporary Download "
|
||||
"Folder? This cannot be undone!"
|
||||
msgstr ""
|
||||
"Weet u zeker dat u alle mappen in uw tijdelijke downloadmap wilt "
|
||||
"verwijderen? Dit kan niet ongedaan worden gemaakt!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetch NZB from URL"
|
||||
@@ -5003,7 +5048,7 @@ msgstr "Wizard starten"
|
||||
#. Tooltip for disabled Next button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Click on Test Server before continuing"
|
||||
msgstr "Klik op Test server voordat u doorgaat"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restore backup"
|
||||
@@ -5043,10 +5088,6 @@ msgstr "Bestand bestaat niet op de server"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "De server kon de opdracht niet uitvoeren"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "NZB-bestand %s is leeg"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/pl.po
158
po/main/pl.po
@@ -138,11 +138,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -317,7 +312,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Niepożądane rozszerzenie w pliku RAR %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Przerwano, wykryto niepożądane rozszerzenie"
|
||||
|
||||
@@ -559,6 +554,11 @@ msgstr "Błąd podczas inicjalizacji %s@%s: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Zbyt wiele połączeń do serwera %s [%s]"
|
||||
@@ -578,6 +578,11 @@ msgstr "Błąd logowania do serwera %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Błąd połączenia %s@%s, komunikat=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nieobsługiwany błąd w module pobierania"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Wyłączanie"
|
||||
@@ -966,7 +971,7 @@ msgid "Update Available!"
|
||||
msgstr "Dostępna aktualizacja!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1206,16 +1211,6 @@ msgstr "Próba weryfikacji SFV"
|
||||
msgid "left"
|
||||
msgstr "pozostało"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Nieobsługiwany błąd w module pobierania"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Serwer nie obsługuje SSL na tym porcie"
|
||||
@@ -1395,18 +1390,103 @@ msgstr "Błąd ładowania %s, wykryto uszkodzony plik"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB dodany do kolejki"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignoruję zduplikowany NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Pusty plik NZB %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Przerwano, nie można ukończyć"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Błąd importu %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLIKAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ZASZYFROWANY"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "ZA DUŻY"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "NIEKOMPLETNY"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NIEPOŻĄDANY"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "CZEKAM %s s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Pobrano w %s ze średnią %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Wiek"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artykułów było uszkodzonych"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "Brakowało %s artykułów"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artykułów posiadało niepasujące duplikaty"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Wstrzymuję zduplikowany NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problem z"
|
||||
@@ -3277,9 +3357,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Włącz sprawdzanie przy użyciu SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Wykonuj dodatkową weryfikację na podstawie plików SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3674,17 +3753,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Włączony"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4098,22 +4166,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4436,11 +4499,6 @@ msgstr "Usuń"
|
||||
msgid "Filename"
|
||||
msgstr "Nazwa pliku"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Wiek"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Wolne miejsce"
|
||||
@@ -4867,10 +4925,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Pusty plik NZB %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/pt_BR.po
158
po/main/pt_BR.po
@@ -147,11 +147,6 @@ msgstr ""
|
||||
"Mascara atual (%o) pode negar ao SABnzbd acesso aos arquivos e diretórios "
|
||||
"criados."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -329,7 +324,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "A extensão indesejada está no arquivo rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Cancelado, extensão indesejada detectada"
|
||||
|
||||
@@ -573,6 +568,11 @@ msgstr "Falha ao iniciar %s@%s devido as seguintes razões: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Excesso de conexões ao servidor %s [%s]"
|
||||
@@ -592,6 +592,11 @@ msgstr "Falha de logon ao servidor %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "A conexão a %s@%s falhou. Mensagem=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erro suspeito no downloader"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Encerrando"
|
||||
@@ -978,7 +983,7 @@ msgid "Update Available!"
|
||||
msgstr "Atualização Disponível!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1215,16 +1220,6 @@ msgstr "Tentando verificação SFV"
|
||||
msgid "left"
|
||||
msgstr "restantes"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Erro suspeito no downloader"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Este servidor não permite SSL nesta porta"
|
||||
@@ -1404,18 +1399,103 @@ msgstr "Erro ao carregar %s. Arquivo corrompido detectado"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB adicionado à fila"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorando NZB duplicado \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Arquivo NZB %s vazio"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Cancelado, não é possível concluir"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Erro ao importar %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLICADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "CRIPTOGRAFADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "MUITO GRANDE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INCOMPLETO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "INDESEJADO"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "Espere %s segundo(s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Baixado em %s a uma média de %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Idade"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artigos estavam malformados"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artigos estavam faltando"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artigos tinham duplicatas não-correspondentes"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Pausando NZB duplicado \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problema com"
|
||||
@@ -3287,9 +3367,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Habilitar verificações baseadas em SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fazer uma verificação extra baseada em arquivos SFV."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3685,17 +3764,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Habilitar"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4109,22 +4177,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4447,11 +4510,6 @@ msgstr "Eliminar"
|
||||
msgid "Filename"
|
||||
msgstr "Nome do arquivo"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Idade"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Espaço Disponível"
|
||||
@@ -4878,10 +4936,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Arquivo NZB %s vazio"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/ro.po
158
po/main/ro.po
@@ -147,11 +147,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -331,7 +326,7 @@ msgstr "Extensie nedorită în fișierul RAR al „%s”. Fișierul nedorit este
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Extensii fișier nedorite în fișierul rar %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Oprit, extensii nedorite detectate"
|
||||
|
||||
@@ -581,6 +576,11 @@ msgstr "Nu am putu inițializa %s@%s din cauza următorului motiv: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Prea multe conexiuni la serverul %s [%s]"
|
||||
@@ -600,6 +600,11 @@ msgstr "Autentificare nereuşită la serverul %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Conectare %s@%s eșuată, mesaj=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Eroare suspectă în sistemul de descprcare"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Închidere"
|
||||
@@ -988,7 +993,7 @@ msgid "Update Available!"
|
||||
msgstr "Actualizare Disponibilă!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Eșuare la încărcarea fișierului: %s"
|
||||
|
||||
@@ -1231,16 +1236,6 @@ msgstr "Încerc verificare SFV"
|
||||
msgid "left"
|
||||
msgstr "rămas"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Eroare suspectă în sistemul de descprcare"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Acest server nu permite SSL pe acest port"
|
||||
@@ -1422,18 +1417,103 @@ msgstr "Eroare încărcare %s, fişier corupt detectat"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB adăugat în coadă"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorăm duplicat NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "Eșuare duplicat NZB „%s”"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "NZB duplicat"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fişier NZB gol %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Scriptul pre-coadă a marcat sarcina ca nereușită"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "Extensie nedorită în fișierul %s (%s)"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Anulat nu poate fi finalizat"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Eroare importare %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUPLICAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ENCRIPTAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "PREA MARE"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INCOMPLET"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NEDORIT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "AŞTEAPTĂ %s sec"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "SE PROPAGHEAZĂ %s min"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Descărcat în %s cu o medie de %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Vârsta"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s articolele au fost incorecte"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s articolele au fost lipsă"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s articolele au avut duplicate diferite"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Întrerupem duplicat NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problemă cu"
|
||||
@@ -3305,9 +3385,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Activează verficări SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Fă o verificare extra bazată pe fişiere SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3706,17 +3785,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Activează"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4130,22 +4198,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4467,11 +4530,6 @@ msgstr "Şterge"
|
||||
msgid "Filename"
|
||||
msgstr "Nume de fișier"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Vârsta"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Spațiu liber"
|
||||
@@ -4900,10 +4958,6 @@ msgstr "Fișierul nu este pe server"
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Fişier NZB gol %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/ru.po
158
po/main/ru.po
@@ -142,11 +142,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -317,7 +312,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr ""
|
||||
|
||||
@@ -557,6 +552,11 @@ msgstr ""
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr ""
|
||||
@@ -576,6 +576,11 @@ msgstr "Ошибка входа на сервер %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Завершение работы"
|
||||
@@ -962,7 +967,7 @@ msgid "Update Available!"
|
||||
msgstr "Доступно обновление!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1201,16 +1206,6 @@ msgstr "Проверка SFV-суммы"
|
||||
msgid "left"
|
||||
msgstr "осталось"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr ""
|
||||
@@ -1390,18 +1385,103 @@ msgstr "Ошибка загрузки %s: обнаружен повреждён
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB-файл добавлен в очередь"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Пропущен повторяющийся NZB-файл «%s»"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Пустой NZB-файл %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Ошибка импорта %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "ПОВТОР"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ЗАШИФРОВАН"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "СЛИШКОМ БОЛЬШОЙ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "НЕПОЛНЫЙ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "ОЖИДАНИЕ %s с"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Загружено за %s со средней скоростью %sБ/с"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Возраст"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s статей с ошибками"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s статей отсутствует"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s статей содержат несовпадающие повторы"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Приостановлен повторяющийся NZB-файл «%s»"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Проблема с"
|
||||
@@ -3269,9 +3349,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Использовать проверку по SFV"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Выполнять дополнительную проверку по SFV-файлам."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3663,17 +3742,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Включить"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4094,22 +4162,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4431,11 +4494,6 @@ msgstr "Удалить"
|
||||
msgid "Filename"
|
||||
msgstr "Название файла"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Возраст"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "свободно на диске"
|
||||
@@ -4863,10 +4921,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Пустой NZB-файл %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/sr.po
158
po/main/sr.po
@@ -140,11 +140,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -314,7 +309,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Neželjena ekstenzija je u rar datoteci %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Prekinuto, detektovana neželjena ekstenzija"
|
||||
|
||||
@@ -555,6 +550,11 @@ msgstr "Neuspešna inicijalizacija %s@%s iz razloga: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "Previše konekcija ka serveru %s [%s]"
|
||||
@@ -574,6 +574,11 @@ msgstr "Неуспешно пријављивање на сервер %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Povezivanje na %s@%s neuspešno, poruka=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sumnja u grešku u programu za download"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Гашење"
|
||||
@@ -958,7 +963,7 @@ msgid "Update Available!"
|
||||
msgstr "Нова верзија доступна!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1196,16 +1201,6 @@ msgstr "Pokušaj SFV provere"
|
||||
msgid "left"
|
||||
msgstr "остало"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Sumnja u grešku u programu za download"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Ovaj server ne dozvoljava SSL na ovom portu"
|
||||
@@ -1385,18 +1380,103 @@ msgstr "Грешка учитавање %s, покварена датотека
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB додат у ред"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Игнорисање дуплог NZB-а \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Празан NZB %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Поништено, не може да се заврши"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Грешка увоза %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "ДУПЛИКАТ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ШИФРИРАНО"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "ПРЕВЕЛИКО"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "НЕПОТПУНО"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "NEŽELJENI"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "Чекање %s сек"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Преузето за %s на просек од %sБ/с"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Старост"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s артикла нису добро формирани"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s артикла недостају"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s артикла нису дупликате"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Паузирам због дуплог NZB-а \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Проблем са"
|
||||
@@ -3255,9 +3335,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Упали SFV провере"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Уради још једну проверу базирану на SFV датотеке."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3649,17 +3728,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Омогући"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4072,22 +4140,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4409,11 +4472,6 @@ msgstr "Обриши"
|
||||
msgid "Filename"
|
||||
msgstr "Име датотеке"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Старост"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Слободан простор"
|
||||
@@ -4840,10 +4898,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Празан NZB %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
158
po/main/sv.po
158
po/main/sv.po
@@ -140,11 +140,6 @@ msgid ""
|
||||
"creates."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -314,7 +309,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "Oönskad filändelse i RAR-fil %s"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "Avbruten, oönskad filändelse detekterad"
|
||||
|
||||
@@ -555,6 +550,11 @@ msgstr "Misslyckades att initiera %s@%s med orsak %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "För många anslutningar till servern %s [%s]"
|
||||
@@ -574,6 +574,11 @@ msgstr "Det gick inte att logga in på server %s [%s]"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "Anslutning %s@%s misslyckades, meddelande=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Misstänker fel i nedladdare"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Påbörjar nedstängning av SABnzbd.."
|
||||
@@ -960,7 +965,7 @@ msgid "Update Available!"
|
||||
msgstr "Uppdatering tillgänglig"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1200,16 +1205,6 @@ msgstr "Försöker verifiera SFV"
|
||||
msgid "left"
|
||||
msgstr "kvar"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr ""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "Misstänker fel i nedladdare"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Den här servern tillåter in SSL på denna port"
|
||||
@@ -1389,18 +1384,103 @@ msgstr "Laddningsfel %s, felaktig fil detekterad"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB tillagd i kön"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Ignorerar dubblett för NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "NZB filen %s är tom"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "Avbrutet, kan inte slutföras"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "Det gick inte att importera %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "DUBLETT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "KRYPTERAT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "FÖR STOR"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "INKOMPLETT"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "OÖNSKAD"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "VÄNTA %s SEKUNDER"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "Hämtade i %s vid ett genomsnitt på %sB/s"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Ålder"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s artiklar var felaktiga"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s artiklar saknades"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s artiklar hade icke-matchande dubletter"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Pausar dubblett för NZB \"%s\""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Problem med"
|
||||
@@ -3265,9 +3345,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "Använd SFV-baserade kontroller"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "Gör en extra kontroll med SFV filer"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3661,17 +3740,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Aktivera"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4085,22 +4153,17 @@ msgid "Enable Apprise notifications"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr ""
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4422,11 +4485,6 @@ msgstr "Ta bort"
|
||||
msgid "Filename"
|
||||
msgstr "Filnamn"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Ålder"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Ledigt diskutrymme"
|
||||
@@ -4854,10 +4912,6 @@ msgstr ""
|
||||
msgid "Server could not complete request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "NZB filen %s är tom"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
171
po/main/tr.po
171
po/main/tr.po
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# Translators:
|
||||
# Taylan Tatlı, 2025
|
||||
# Safihre <safihre@sabnzbd.org>, 2025
|
||||
# mauron, 2025
|
||||
#
|
||||
msgid ""
|
||||
@@ -151,11 +150,6 @@ msgstr ""
|
||||
"Güncel umask (%o), SABnzbd'nin oluşturduğu dosya ve dizinlere erişimini "
|
||||
"reddedebilir."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Windows ARM version of SABnzbd is available from our Downloads page!"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid ""
|
||||
@@ -347,7 +341,7 @@ msgstr ""
|
||||
msgid "Unwanted extension is in rar file %s"
|
||||
msgstr "İstenmeyen uzantı %s rar dosyasındadır"
|
||||
|
||||
#: sabnzbd/assembler.py
|
||||
#: sabnzbd/assembler.py, sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, unwanted extension detected"
|
||||
msgstr "İptal edildi, istenmeyen uzantı tespit edildi"
|
||||
|
||||
@@ -602,6 +596,11 @@ msgstr "%s@%s başlatması şu sebepten dolayı başarısız oldu: %s"
|
||||
msgid "Fatal error in Downloader"
|
||||
msgstr "İndirici'de ölümcül hata"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: bilinmeyen durum kodu %s, şu makale için alındı: %s"
|
||||
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Too many connections to server %s [%s]"
|
||||
msgstr "%s [%s] sunucusuna çok fazla bağlantı"
|
||||
@@ -623,6 +622,11 @@ msgstr "%s [%s] sunucusunda oturum açılışı başarısız oldu"
|
||||
msgid "Connecting %s@%s failed, message=%s"
|
||||
msgstr "%s@%s bağlantısı başarısız oldu, mesaj=%s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "İndiricide şüpheli hata"
|
||||
|
||||
#: sabnzbd/downloader.py, sabnzbd/skintext.py
|
||||
msgid "Shutting down"
|
||||
msgstr "Kapatılıyor"
|
||||
@@ -1013,7 +1017,7 @@ msgid "Update Available!"
|
||||
msgstr "Güncelleme Mevcut!"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py, sabnzbd/skintext.py
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Failed to upload file: %s"
|
||||
msgstr "Dosyanın gönderilmesi başarısız oldu: %s"
|
||||
|
||||
@@ -1255,16 +1259,6 @@ msgstr "SFV doğrulaması deneniyor"
|
||||
msgid "left"
|
||||
msgstr "kaldı"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "%s@%s: Received unknown status code %s for article %s"
|
||||
msgstr "%s@%s: bilinmeyen durum kodu %s, şu makale için alındı: %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "Suspect error in downloader"
|
||||
msgstr "İndiricide şüpheli hata"
|
||||
|
||||
#: sabnzbd/newswrapper.py
|
||||
msgid "This server does not allow SSL on this port"
|
||||
msgstr "Bu sunucu, bu bağlantı noktasında SSL kullanımına izin vermiyor"
|
||||
@@ -1450,18 +1444,103 @@ msgstr "%s yüklenirken hata, bozuk dosya tespit edildi"
|
||||
msgid "NZB added to queue"
|
||||
msgstr "NZB kuyruğa ilave edildi"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Ignoring duplicate NZB \"%s\""
|
||||
msgstr "Yinelenmiş NZB \"%s\" dikkate alınmıyor"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Failing duplicate NZB \"%s\""
|
||||
msgstr "\"%s\" NSB dosyasının yinelenmesi başarısız"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
#: sabnzbd/nzbqueue.py, sabnzbd/nzbstuff.py
|
||||
msgid "Duplicate NZB"
|
||||
msgstr "Yinelenmiş NZB"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Geçersiz NZB dosyası %s, atlanıyor (hata: %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Boş NZB dosyası %s"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pre-queue script marked job as failed"
|
||||
msgstr "Kuyruk öncesi betiği işi başarısız oldu olarak işaretlemiş"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Unwanted Extension in file %s (%s)"
|
||||
msgstr "%s (%s) dosyasında İstenmeyen Uzantı"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr "İptal edildi, tamamlanamıyor"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Error importing %s"
|
||||
msgstr "%s unsurunun içe aktarılmasında hata"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "DUPLICATE"
|
||||
msgstr "YİNELENMİŞ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ALTERNATIVE"
|
||||
msgstr "ALTERNATİF"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "ENCRYPTED"
|
||||
msgstr "ŞİFRELENMİŞ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "TOO LARGE"
|
||||
msgstr "ÇOK BÜYÜK"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "INCOMPLETE"
|
||||
msgstr "TAMAMLANMAMIŞ"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "UNWANTED"
|
||||
msgstr "İSTENMEYEN"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "WAIT %s sec"
|
||||
msgstr "%s saniye BEKLEYİN"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "PROPAGATING %s min"
|
||||
msgstr "YAYINLANIYOR %s dakika"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Downloaded in %s at an average of %sB/s"
|
||||
msgstr "%s içinde ortalama %sB/s hızında indirildi"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Yaş"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were malformed"
|
||||
msgstr "%s makale yanlış şekillendirilmişti"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles were missing"
|
||||
msgstr "%s makale eksikti"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "%s articles had non-matching duplicates"
|
||||
msgstr "%s makale eşleşmeyen yinelenmişler bulunduruyordu"
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Pausing duplicate NZB \"%s\""
|
||||
msgstr "Yinelenmiş NZB \"%s\" duraklatılıyor"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr "Şununla sorun"
|
||||
@@ -3378,11 +3457,8 @@ msgid "Enable SFV-based checks"
|
||||
msgstr "SFV temelli kontrolleri etkinleştir"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If no par2 files are available, use sfv files (if present) to verify files"
|
||||
msgstr ""
|
||||
"Eğer hiçbir par2 dosyası mevcut değilse, dosyaları kontrol etmek için "
|
||||
"(mevcutsa) sfv dosyalarını kullan"
|
||||
msgid "Do an extra verification based on SFV files."
|
||||
msgstr "SFV dosyalarına dayalı ilave bir doğrulama yap."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "User script can flag job as failed"
|
||||
@@ -3803,17 +3879,6 @@ msgstr ""
|
||||
msgid "Enable"
|
||||
msgstr "Etkinleştir"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Articles per request"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Request multiple articles per connection without waiting for each response "
|
||||
"first.<br />This can improve download speeds, especially on connections with"
|
||||
" higher latency."
|
||||
msgstr ""
|
||||
|
||||
#. Button: Remove server
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove Server"
|
||||
@@ -4230,29 +4295,20 @@ msgid "Enable Apprise notifications"
|
||||
msgstr "Apprise bildirimlerini etkinleştir"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Send notifications directly to any notification service you use.<br>For "
|
||||
"example: Slack, Discord, Telegram, or any service from over 100 supported "
|
||||
"services!"
|
||||
msgid "Send notifications using Apprise to almost any notification service"
|
||||
msgstr ""
|
||||
"Bildirimleri kullandığınız herhangi bir bildirim hizmetine doğrudan "
|
||||
"gönderin.<br>Örneğin: Slack, Discord, Telegram veya 100'den fazla "
|
||||
"desteklenen hizmetten herhangi biri!"
|
||||
"Apprise kullanarak neredeyse tüm bildirim hizmetlerine bildirim gönderin"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use default Apprise URLs"
|
||||
msgstr "Varsayılan Apprise URL'lerini kullan"
|
||||
msgid "Default Apprise URLs"
|
||||
msgstr "Varsayılan Apprise URL'leri"
|
||||
|
||||
#. Apprise settings
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"Apprise defines service connection information using URLs.<br>Read the "
|
||||
"Apprise wiki how to define the URL for each service.<br>Use a comma and/or "
|
||||
"space to identify more than one URL."
|
||||
msgid "Use a comma and/or space to identify more than one URL."
|
||||
msgstr ""
|
||||
"Apprise, hizmet bağlantı bilgilerini URL'ler kullanarak tanımlar.<br>Her "
|
||||
"hizmet için URL'nin nasıl tanımlanacağını öğrenmek için Apprise wiki'sini "
|
||||
"okuyun.<br>Birden fazla URL tanımlamak için virgül ve/veya boşluk kullanın."
|
||||
"Birden fazla URL (adres) tanımlamak için virgül ve/veya boşluk kullanın."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -4594,11 +4650,6 @@ msgstr "Sil"
|
||||
msgid "Filename"
|
||||
msgstr "Dosya ismi"
|
||||
|
||||
#. Job details page, file age column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Age"
|
||||
msgstr "Yaş"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Free Space"
|
||||
msgstr "Boş alan"
|
||||
@@ -5038,10 +5089,6 @@ msgstr "Dosya sunucuda yok"
|
||||
msgid "Server could not complete request"
|
||||
msgstr "Sunucu talebi tamamlayamadı"
|
||||
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "Empty NZB file %s"
|
||||
msgstr "Boş NZB dosyası %s"
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/urlgrabber.py
|
||||
msgid "URLGRABBER CRASHED"
|
||||
|
||||
529
po/main/zh_CN.po
529
po/main/zh_CN.po
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,16 @@
|
||||
# Main requirements
|
||||
# Note that not all sub-dependencies are listed, but only ones we know could cause trouble
|
||||
apprise==1.9.6
|
||||
sabctools==9.1.0
|
||||
CT3==3.4.0.post5
|
||||
apprise==1.9.5
|
||||
sabctools==8.2.6
|
||||
CT3==3.4.0
|
||||
cffi==2.0.0
|
||||
pycparser==2.23
|
||||
feedparser==6.0.12
|
||||
configobj==5.0.9
|
||||
cheroot==11.1.2
|
||||
cheroot==11.0.0
|
||||
six==1.17.0
|
||||
cherrypy==18.10.0
|
||||
jaraco.functools==4.4.0
|
||||
jaraco.functools==4.3.0
|
||||
jaraco.collections==5.0.0
|
||||
jaraco.text==3.8.1 # Newer version introduces irrelevant extra dependencies
|
||||
jaraco.classes==3.4.0
|
||||
@@ -37,7 +37,7 @@ cryptography==46.0.3
|
||||
# We recommend using "orjson" as it is 2x as fast as "ujson". However, it requires
|
||||
# Rust so SABnzbd works just as well with "ujson" or the Python built in "json" module
|
||||
ujson==5.11.0
|
||||
orjson==3.11.5
|
||||
orjson==3.11.3
|
||||
|
||||
# Windows system integration
|
||||
pywin32==311; sys_platform == 'win32'
|
||||
@@ -50,8 +50,8 @@ winrt-Windows.UI.Notifications==3.2.1; sys_platform == 'win32'
|
||||
typing_extensions==4.15.0; sys_platform == 'win32'
|
||||
|
||||
# macOS system calls
|
||||
pyobjc-core==12.1; sys_platform == 'darwin'
|
||||
pyobjc-framework-Cocoa==12.1; sys_platform == 'darwin'
|
||||
pyobjc-core==12.0; sys_platform == 'darwin'
|
||||
pyobjc-framework-Cocoa==12.0; sys_platform == 'darwin'
|
||||
|
||||
# Linux notifications
|
||||
notify2==0.3.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
@@ -60,15 +60,14 @@ notify2==0.3.1; sys_platform != 'win32' and sys_platform != 'darwin'
|
||||
requests==2.32.5
|
||||
requests-oauthlib==2.0.0
|
||||
PyYAML==6.0.3
|
||||
markdown # Version-less for Python 3.9 and below
|
||||
markdown==3.10; python_version > '3.9'
|
||||
markdown==3.9
|
||||
paho-mqtt==1.6.1 # Pinned, newer versions don't work with AppRise yet
|
||||
|
||||
# Requests Requirements
|
||||
charset_normalizer==3.4.4
|
||||
idna==3.11
|
||||
urllib3==2.6.2
|
||||
certifi==2025.11.12
|
||||
urllib3==2.5.0
|
||||
certifi==2025.10.5
|
||||
oauthlib==3.3.1
|
||||
PyJWT==2.10.1
|
||||
blinker==1.9.0
|
||||
|
||||
@@ -32,12 +32,11 @@ from threading import Lock, Condition
|
||||
# Determine platform flags
|
||||
##############################################################################
|
||||
|
||||
WINDOWS = WINDOWSARM64 = MACOS = MACOSARM64 = FOUNDATION = False
|
||||
WINDOWS = MACOS = MACOSARM64 = FOUNDATION = False
|
||||
KERNEL32 = LIBC = MACOSLIBC = PLATFORM = None
|
||||
|
||||
if os.name == "nt":
|
||||
WINDOWS = True
|
||||
WINDOWSARM64 = platform.uname().machine == "ARM64"
|
||||
|
||||
if platform.uname().machine not in ["AMD64", "ARM64"]:
|
||||
print("SABnzbd only supports 64-bit Windows")
|
||||
@@ -83,15 +82,15 @@ from sabnzbd.version import __version__, __baseline__
|
||||
import sabnzbd.misc as misc
|
||||
import sabnzbd.filesystem as filesystem
|
||||
import sabnzbd.powersup as powersup
|
||||
import sabnzbd.rss as rss
|
||||
import sabnzbd.emailer as emailer
|
||||
import sabnzbd.encoding as encoding
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
import sabnzbd.database
|
||||
import sabnzbd.lang as lang
|
||||
import sabnzbd.nzb
|
||||
import sabnzbd.nzbparser as nzbparser
|
||||
import sabnzbd.rss as rss
|
||||
import sabnzbd.emailer as emailer
|
||||
import sabnzbd.nzbstuff
|
||||
import sabnzbd.getipaddress
|
||||
import sabnzbd.newsunpack
|
||||
import sabnzbd.par2file
|
||||
@@ -482,10 +481,6 @@ def delayed_startup_actions():
|
||||
sabnzbd.ORG_UMASK,
|
||||
)
|
||||
|
||||
# Check if maybe we are running x64 version on ARM hardware
|
||||
if sabnzbd.WINDOWSARM64 and "AMD64" in sys.version:
|
||||
misc.helpful_warning(T("Windows ARM version of SABnzbd is available from our Downloads page!"))
|
||||
|
||||
# List the number of certificates available (can take up to 1.5 seconds)
|
||||
if cfg.log_level() > 1:
|
||||
logging.debug("Available certificates = %s", repr(ssl.create_default_context().cert_store_stats()))
|
||||
|
||||
261
sabnzbd/api.py
261
sabnzbd/api.py
@@ -28,9 +28,7 @@ import time
|
||||
import getpass
|
||||
import cherrypy
|
||||
from threading import Thread
|
||||
from typing import Optional, Any, Union
|
||||
|
||||
import sabctools
|
||||
from typing import Tuple, Optional, List, Dict, Any, Union
|
||||
|
||||
# For json.dumps, orjson is magnitudes faster than ujson, but it is harder to
|
||||
# compile due to Rust dependency. Since the output is the same, we support all modules.
|
||||
@@ -57,7 +55,6 @@ from sabnzbd.constants import (
|
||||
PP_LOOKUP,
|
||||
STAGES,
|
||||
DEF_NETWORKING_TEST_TIMEOUT,
|
||||
DEF_PIPELINING_REQUESTS,
|
||||
)
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -86,7 +83,7 @@ from sabnzbd.encoding import xml_name, utob
|
||||
from sabnzbd.getipaddress import local_ipv4, public_ipv4, public_ipv6, dnslookup, active_socks5_proxy
|
||||
from sabnzbd.database import HistoryDB
|
||||
from sabnzbd.lang import is_rtl
|
||||
from sabnzbd.nzb import TryList, NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
from sabnzbd.newswrapper import NewsWrapper, NNTPPermanentError
|
||||
import sabnzbd.emailer
|
||||
import sabnzbd.sorting
|
||||
@@ -106,7 +103,7 @@ _MSG_NO_SUCH_CONFIG = "Config item does not exist"
|
||||
_MSG_CONFIG_LOCKED = "Configuration locked"
|
||||
|
||||
|
||||
def api_handler(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def api_handler(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API Dispatcher"""
|
||||
# Clean-up the arguments
|
||||
for vr in ("mode", "name", "value", "value2", "value3", "start", "limit", "search"):
|
||||
@@ -120,13 +117,13 @@ def api_handler(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return response
|
||||
|
||||
|
||||
def _api_get_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
_, data = config.get_dconfig(kwargs.get("section"), kwargs.get("keyword"))
|
||||
return report(keyword="config", data=data)
|
||||
|
||||
|
||||
def _api_set_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_set_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -147,7 +144,7 @@ def _api_set_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(keyword="config", data=data)
|
||||
|
||||
|
||||
def _api_set_config_default(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_set_config_default(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Reset requested config variables back to defaults. Currently only for misc-section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -162,7 +159,7 @@ def _api_set_config_default(name: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report()
|
||||
|
||||
|
||||
def _api_del_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_del_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts keyword, section"""
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
@@ -172,13 +169,13 @@ def _api_del_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_queue(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=queue"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_queue_table.get(name, (_api_queue_default, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_queue_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_delete(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value"""
|
||||
if value.lower() == "all":
|
||||
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
|
||||
@@ -191,7 +188,7 @@ def _api_queue_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_queue_delete_nzf(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_delete_nzf(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=nzf_ids)"""
|
||||
nzf_ids = clean_comma_separated_list(kwargs.get("value2"))
|
||||
if value and nzf_ids:
|
||||
@@ -201,7 +198,7 @@ def _api_queue_delete_nzf(value: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_rename(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_rename(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=old name), value2(=new name), value3(=password)"""
|
||||
value2 = kwargs.get("value2")
|
||||
value3 = kwargs.get("value3")
|
||||
@@ -212,18 +209,18 @@ def _api_queue_rename(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_change_complete_action(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_change_complete_action(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=action)"""
|
||||
change_queue_complete_action(value)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_queue_purge(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_purge(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
|
||||
return report(keyword="", data={"status": bool(removed), "nzo_ids": removed})
|
||||
|
||||
|
||||
def _api_queue_pause(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_pause(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=list of nzo_id)"""
|
||||
if items := clean_comma_separated_list(value):
|
||||
handled = sabnzbd.NzbQueue.pause_multiple_nzo(items)
|
||||
@@ -232,7 +229,7 @@ def _api_queue_pause(value: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(keyword="", data={"status": bool(handled), "nzo_ids": handled})
|
||||
|
||||
|
||||
def _api_queue_resume(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_resume(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=list of nzo_id)"""
|
||||
if items := clean_comma_separated_list(value):
|
||||
handled = sabnzbd.NzbQueue.resume_multiple_nzo(items)
|
||||
@@ -241,7 +238,7 @@ def _api_queue_resume(value: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(keyword="", data={"status": bool(handled), "nzo_ids": handled})
|
||||
|
||||
|
||||
def _api_queue_priority(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_priority(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=priority)"""
|
||||
nzo_ids = clean_comma_separated_list(value)
|
||||
priority = kwargs.get("value2")
|
||||
@@ -260,7 +257,7 @@ def _api_queue_priority(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_sort(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_sort(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts sort, dir"""
|
||||
sort = kwargs.get("sort", "")
|
||||
direction = kwargs.get("dir", "")
|
||||
@@ -271,7 +268,7 @@ def _api_queue_sort(value: str, kwargs: dict[str, Union[str, list[str]]]) -> byt
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_queue_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_queue_default(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts sort, dir, start, limit and search terms"""
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
@@ -299,12 +296,12 @@ def _api_queue_default(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
)
|
||||
|
||||
|
||||
def _api_translate(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_translate(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=acronym)"""
|
||||
return report(keyword="value", data=T(kwargs.get("value", "")))
|
||||
|
||||
|
||||
def _api_addfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addfile(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, pp, script, cat, priority, nzbname"""
|
||||
# Normal upload will send the nzb in a kw arg called name or nzbfile
|
||||
if not name or isinstance(name, str):
|
||||
@@ -325,7 +322,7 @@ def _api_addfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_retry(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_retry(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, value(=nzo_id), nzbfile(=optional NZB), password (optional)"""
|
||||
value = kwargs.get("value")
|
||||
# Normal upload will send the nzb in a kw arg called nzbfile
|
||||
@@ -340,7 +337,7 @@ def _api_retry(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_cancel_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_cancel_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, value(=nzo_ids)"""
|
||||
if nzo_ids := clean_comma_separated_list(kwargs.get("value")):
|
||||
if sabnzbd.PostProcessor.cancel_pp(nzo_ids):
|
||||
@@ -348,7 +345,7 @@ def _api_cancel_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_addlocalfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addlocalfile(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, pp, script, cat, priority, nzbname"""
|
||||
if name:
|
||||
if os.path.exists(name):
|
||||
@@ -375,7 +372,7 @@ def _api_addlocalfile(name: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_switch(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_switch(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=first id), value2(=second id)"""
|
||||
value = kwargs.get("value")
|
||||
value2 = kwargs.get("value2")
|
||||
@@ -387,7 +384,7 @@ def _api_switch(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE2)
|
||||
|
||||
|
||||
def _api_change_cat(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_cat(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=category)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
cat = kwargs.get("value2")
|
||||
@@ -400,7 +397,7 @@ def _api_change_cat(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_change_script(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_script(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=script)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
script = kwargs.get("value2")
|
||||
@@ -413,7 +410,7 @@ def _api_change_script(name: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_change_opts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_change_opts(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id), value2(=pp)"""
|
||||
nzo_ids = clean_comma_separated_list(kwargs.get("value"))
|
||||
pp = kwargs.get("value2")
|
||||
@@ -423,7 +420,7 @@ def _api_change_opts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byt
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_fullstatus(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_fullstatus(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: full history status"""
|
||||
status = build_status(
|
||||
calculate_performance=bool_conv(kwargs.get("calculate_performance")),
|
||||
@@ -432,19 +429,19 @@ def _api_fullstatus(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(keyword="status", data=status)
|
||||
|
||||
|
||||
def _api_status(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_status(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=status, passing on the value"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_status_table.get(name, (_api_fullstatus, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_unblock_server(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_unblock_server(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Unblock a blocked server"""
|
||||
sabnzbd.Downloader.unblock(value)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_delete_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_delete_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Remove orphaned job"""
|
||||
if value:
|
||||
path = os.path.join(cfg.download_dir.get_path(), value)
|
||||
@@ -455,7 +452,7 @@ def _api_delete_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_delete_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_delete_all_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Remove all orphaned jobs"""
|
||||
paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
|
||||
for path in paths:
|
||||
@@ -463,7 +460,7 @@ def _api_delete_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]])
|
||||
return report()
|
||||
|
||||
|
||||
def _api_add_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]):
|
||||
def _api_add_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]):
|
||||
"""Add orphaned job"""
|
||||
if value:
|
||||
path = os.path.join(cfg.download_dir.get_path(), value)
|
||||
@@ -474,7 +471,7 @@ def _api_add_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]):
|
||||
return report(_MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_add_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_add_all_orphan(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Add all orphaned jobs"""
|
||||
paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
|
||||
for path in paths:
|
||||
@@ -482,13 +479,13 @@ def _api_add_all_orphan(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report()
|
||||
|
||||
|
||||
def _api_history(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for mode=history"""
|
||||
value = kwargs.get("value", "")
|
||||
return _api_history_table.get(name, (_api_history_default, 2))[0](value, kwargs)
|
||||
|
||||
|
||||
def _api_history_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_delete(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id or special), search, archive, del_files"""
|
||||
search = kwargs.get("search")
|
||||
archive = True
|
||||
@@ -534,7 +531,7 @@ def _api_history_delete(value: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_history_mark_as_completed(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_mark_as_completed(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id)"""
|
||||
if value:
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
@@ -553,7 +550,7 @@ def _api_history_mark_as_completed(value: str, kwargs: dict[str, Union[str, list
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_history_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_history_default(value: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts start, limit, search, failed_only, archive, cat, status, nzo_ids"""
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
@@ -598,7 +595,7 @@ def _api_history_default(value: str, kwargs: dict[str, Union[str, list[str]]]) -
|
||||
return report(keyword="history", data=history)
|
||||
|
||||
|
||||
def _api_get_files(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_files(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=nzo_id)"""
|
||||
value = kwargs.get("value")
|
||||
if value:
|
||||
@@ -607,7 +604,7 @@ def _api_get_files(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_move_nzf_bulk(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_move_nzf_bulk(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name(=top/up/down/bottom), value=(=nzo_id), nzf_ids, size (optional)"""
|
||||
nzo_id = kwargs.get("value")
|
||||
nzf_ids = clean_comma_separated_list(kwargs.get("nzf_ids"))
|
||||
@@ -633,7 +630,7 @@ def _api_move_nzf_bulk(name: str, kwargs: dict[str, Union[str, list[str]]]) -> b
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_addurl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_addurl(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, output, pp, script, cat, priority, nzbname"""
|
||||
pp = kwargs.get("pp")
|
||||
script = kwargs.get("script")
|
||||
@@ -651,24 +648,24 @@ def _api_addurl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(_MSG_NO_VALUE)
|
||||
|
||||
|
||||
def _api_pause(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_pause(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Scheduler.plan_resume(0)
|
||||
sabnzbd.Downloader.pause()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_resume(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_resume(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Scheduler.plan_resume(0)
|
||||
sabnzbd.downloader.unpause_all()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_shutdown(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_shutdown(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.shutdown_program()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_warnings(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_warnings(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts name, output"""
|
||||
if name == "clear":
|
||||
return report(keyword="warnings", data=sabnzbd.GUIHANDLER.clear())
|
||||
@@ -688,7 +685,7 @@ LOG_INI_HIDE_RE = re.compile(
|
||||
LOG_HASH_RE = re.compile(rb"([a-zA-Z\d]{25})", re.I)
|
||||
|
||||
|
||||
def _api_showlog(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_showlog(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Fetch the INI and the log-data and add a message at the top"""
|
||||
log_data = b"--------------------------------\n\n"
|
||||
log_data += b"The log includes a copy of your sabnzbd.ini with\nall usernames, passwords and API-keys removed."
|
||||
@@ -721,19 +718,19 @@ def _api_showlog(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return log_data
|
||||
|
||||
|
||||
def _api_get_cats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_cats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="categories", data=list_cats(False))
|
||||
|
||||
|
||||
def _api_get_scripts(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_get_scripts(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="scripts", data=list_scripts())
|
||||
|
||||
|
||||
def _api_version(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_version(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(keyword="version", data=sabnzbd.__version__)
|
||||
|
||||
|
||||
def _api_auth(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_auth(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
key = kwargs.get("key", "")
|
||||
if not key:
|
||||
auth = "apikey"
|
||||
@@ -746,14 +743,14 @@ def _api_auth(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(keyword="auth", data=auth)
|
||||
|
||||
|
||||
def _api_restart(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_restart(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
logging.info("Restart requested by API")
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_restart_repair(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_restart_repair(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
logging.info("Queue repair requested by API")
|
||||
request_repair()
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
@@ -761,12 +758,12 @@ def _api_restart_repair(name: str, kwargs: dict[str, Union[str, list[str]]]) ->
|
||||
return report()
|
||||
|
||||
|
||||
def _api_disconnect(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_disconnect(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.Downloader.disconnect()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_eval_sort(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_eval_sort(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: evaluate sorting expression"""
|
||||
sort_string = kwargs.get("sort_string", "")
|
||||
job_name = kwargs.get("job_name", "")
|
||||
@@ -778,28 +775,28 @@ def _api_eval_sort(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(keyword="result", data=path)
|
||||
|
||||
|
||||
def _api_watched_now(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_watched_now(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.DirScanner.scan()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_resume_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.resume()
|
||||
def _api_resume_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.paused = False
|
||||
return report()
|
||||
|
||||
|
||||
def _api_pause_pp(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.pause()
|
||||
def _api_pause_pp(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sabnzbd.PostProcessor.paused = True
|
||||
return report()
|
||||
|
||||
|
||||
def _api_rss_now(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_rss_now(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
# Run RSS scan async, because it can take a long time
|
||||
sabnzbd.Scheduler.force_rss()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_retry_all(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_retry_all(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Retry all failed items in History"""
|
||||
items = sabnzbd.api.build_history()[0]
|
||||
nzo_ids = []
|
||||
@@ -809,13 +806,13 @@ def _api_retry_all(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes
|
||||
return report(keyword="status", data=nzo_ids)
|
||||
|
||||
|
||||
def _api_reset_quota(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_reset_quota(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Reset quota left"""
|
||||
sabnzbd.BPSMeter.reset_quota(force=True)
|
||||
return report()
|
||||
|
||||
|
||||
def _api_test_email(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_email(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test email, return result"""
|
||||
logging.info("Sending test email")
|
||||
pack = {"download": ["action 1", "action 2"], "unpack": ["action 1", "action 2"]}
|
||||
@@ -837,67 +834,67 @@ def _api_test_email(name: str, kwargs: dict[str, Union[str, list[str]]]) -> byte
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_windows(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_windows(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test to Windows, return result"""
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_windows("SABnzbd", T("Test Notification"), "other")
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_notif(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_notif(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test to Notification Center, return result"""
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_notification_center("SABnzbd", T("Test Notification"), "other")
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_osd(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_osd(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test OSD notification, return result"""
|
||||
logging.info("Sending OSD notification")
|
||||
res = sabnzbd.notifier.send_notify_osd("SABnzbd", T("Test Notification"))
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_prowl(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_prowl(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Prowl notification, return result"""
|
||||
logging.info("Sending Prowl notification")
|
||||
res = sabnzbd.notifier.send_prowl("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_pushover(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_pushover(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Pushover notification, return result"""
|
||||
logging.info("Sending Pushover notification")
|
||||
res = sabnzbd.notifier.send_pushover("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_pushbullet(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_pushbullet(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Pushbullet notification, return result"""
|
||||
logging.info("Sending Pushbullet notification")
|
||||
res = sabnzbd.notifier.send_pushbullet("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_apprise(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_apprise(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: send a test Apprise notification, return result"""
|
||||
logging.info("Sending Apprise notification")
|
||||
res = sabnzbd.notifier.send_apprise("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_test_nscript(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_test_nscript(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: execute a test notification script, return result"""
|
||||
logging.info("Executing notification script")
|
||||
res = sabnzbd.notifier.send_nscript("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs)
|
||||
return report(error=res)
|
||||
|
||||
|
||||
def _api_undefined(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_undefined(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_browse(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_browse(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Return tree of local path"""
|
||||
compact = bool_conv(kwargs.get("compact"))
|
||||
show_files = bool_conv(kwargs.get("show_files"))
|
||||
@@ -914,14 +911,14 @@ def _api_browse(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report(keyword="paths", data=paths)
|
||||
|
||||
|
||||
def _api_config(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: Dispatcher for "config" """
|
||||
if cfg.configlock():
|
||||
return report(_MSG_CONFIG_LOCKED)
|
||||
return _api_config_table.get(name, (_api_config_undefined, 2))[0](kwargs)
|
||||
|
||||
|
||||
def _api_config_speedlimit(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_speedlimit(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=speed)"""
|
||||
value = kwargs.get("value")
|
||||
if not value:
|
||||
@@ -930,26 +927,26 @@ def _api_config_speedlimit(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_set_pause(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_pause(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts value(=pause interval)"""
|
||||
value = kwargs.get("value")
|
||||
sabnzbd.Scheduler.plan_resume(int_conv(value))
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_set_apikey(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_apikey(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
cfg.api_key.set(config.create_api_key())
|
||||
config.save_config()
|
||||
return report(keyword="apikey", data=cfg.api_key())
|
||||
|
||||
|
||||
def _api_config_set_nzbkey(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_set_nzbkey(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
cfg.nzb_key.set(config.create_api_key())
|
||||
config.save_config()
|
||||
return report(keyword="nzbkey", data=cfg.nzb_key())
|
||||
|
||||
|
||||
def _api_config_regenerate_certs(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_regenerate_certs(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
# Make sure we only over-write default locations
|
||||
result = False
|
||||
if (
|
||||
@@ -963,27 +960,27 @@ def _api_config_regenerate_certs(kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(data=result)
|
||||
|
||||
|
||||
def _api_config_test_server(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_test_server(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""API: accepts server-params"""
|
||||
result, msg = test_nntp_server_dict(kwargs)
|
||||
return report(data={"result": result, "message": msg})
|
||||
|
||||
|
||||
def _api_config_create_backup(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_create_backup(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
backup_file = config.create_config_backup()
|
||||
return report(data={"result": bool(backup_file), "message": backup_file})
|
||||
|
||||
|
||||
def _api_config_purge_log_files(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_purge_log_files(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
purge_log_files()
|
||||
return report()
|
||||
|
||||
|
||||
def _api_config_undefined(kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_config_undefined(kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
return report(_MSG_NOT_IMPLEMENTED)
|
||||
|
||||
|
||||
def _api_server_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_server_stats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
sum_t, sum_m, sum_w, sum_d = sabnzbd.BPSMeter.get_sums()
|
||||
stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}}
|
||||
|
||||
@@ -1002,12 +999,12 @@ def _api_server_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> by
|
||||
return report(keyword="", data=stats)
|
||||
|
||||
|
||||
def _api_gc_stats(name: str, kwargs: dict[str, Union[str, list[str]]]) -> bytes:
|
||||
def _api_gc_stats(name: str, kwargs: Dict[str, Union[str, List[str]]]) -> bytes:
|
||||
"""Function only intended for internal testing of the memory handling"""
|
||||
# Collect before we check
|
||||
gc.collect()
|
||||
# We cannot create any lists/dicts, as they would create a reference
|
||||
return report(data=[str(obj) for obj in gc.get_objects() if isinstance(obj, TryList)])
|
||||
return report(data=[str(obj) for obj in gc.get_objects() if isinstance(obj, sabnzbd.nzbstuff.TryList)])
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -1213,7 +1210,7 @@ class XmlOutputFactory:
|
||||
return text
|
||||
|
||||
|
||||
def handle_server_api(kwargs: dict[str, Union[str, list[str]]]) -> str:
|
||||
def handle_server_api(kwargs: Dict[str, Union[str, List[str]]]) -> str:
|
||||
"""Special handler for API-call 'set_config' [servers]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1231,7 +1228,7 @@ def handle_server_api(kwargs: dict[str, Union[str, list[str]]]) -> str:
|
||||
return name
|
||||
|
||||
|
||||
def handle_sorter_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_sorter_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [sorters]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1247,7 +1244,7 @@ def handle_sorter_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]
|
||||
return name
|
||||
|
||||
|
||||
def handle_rss_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_rss_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [rss]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1281,7 +1278,7 @@ def handle_rss_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
return name
|
||||
|
||||
|
||||
def handle_cat_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
def handle_cat_api(kwargs: Dict[str, Union[str, List[str]]]) -> Optional[str]:
|
||||
"""Special handler for API-call 'set_config' [categories]"""
|
||||
name = kwargs.get("keyword")
|
||||
if not name:
|
||||
@@ -1298,7 +1295,7 @@ def handle_cat_api(kwargs: dict[str, Union[str, list[str]]]) -> Optional[str]:
|
||||
return name
|
||||
|
||||
|
||||
def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[bool, str]:
|
||||
def test_nntp_server_dict(kwargs: Dict[str, Union[str, List[str]]]) -> Tuple[bool, str]:
|
||||
"""Will connect (blocking) to the NNTP server and report back any errors"""
|
||||
host = kwargs.get("host", "").strip()
|
||||
port = int_conv(kwargs.get("port", 0))
|
||||
@@ -1310,7 +1307,6 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
ssl = int_conv(kwargs.get("ssl", 0))
|
||||
ssl_verify = int_conv(kwargs.get("ssl_verify", 3))
|
||||
ssl_ciphers = kwargs.get("ssl_ciphers", "").strip()
|
||||
pipelining_requests = int_conv(kwargs.get("pipelining_requests", DEF_PIPELINING_REQUESTS))
|
||||
|
||||
if not host:
|
||||
return False, T("The hostname is not set.")
|
||||
@@ -1347,7 +1343,6 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
use_ssl=ssl,
|
||||
ssl_verify=ssl_verify,
|
||||
ssl_ciphers=ssl_ciphers,
|
||||
pipelining_requests=lambda: pipelining_requests,
|
||||
username=username,
|
||||
password=password,
|
||||
)
|
||||
@@ -1392,20 +1387,12 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
# Sorry, no clever analysis:
|
||||
return False, T('Server address "%s:%s" is not valid.') % (host, port)
|
||||
|
||||
nw = NewsWrapper(server=test_server, thrdnum=-1, block=True)
|
||||
nntp_code: int = 0
|
||||
nntp_message: str = ""
|
||||
|
||||
def on_response(code: int, message: str):
|
||||
nonlocal nntp_code, nntp_message
|
||||
nntp_code = code
|
||||
nntp_message = message
|
||||
|
||||
try:
|
||||
nw = NewsWrapper(server=test_server, thrdnum=-1, block=True)
|
||||
nw.init_connect()
|
||||
while not nw.connected:
|
||||
nw.write()
|
||||
nw.read(on_response=on_response)
|
||||
nw.recv_chunk()
|
||||
nw.finish_connect(nw.status_code)
|
||||
|
||||
except socket.timeout:
|
||||
if port != 119 and not ssl:
|
||||
@@ -1427,37 +1414,37 @@ def test_nntp_server_dict(kwargs: dict[str, Union[str, list[str]]]) -> tuple[boo
|
||||
return False, str(err)
|
||||
|
||||
if not username or not password:
|
||||
nw.queue_command(b"ARTICLE <test@home>\r\n")
|
||||
nw.nntp.sock.sendall(b"ARTICLE <test@home>\r\n")
|
||||
try:
|
||||
nw.write()
|
||||
nw.read(on_response=on_response)
|
||||
nw.reset_data_buffer()
|
||||
nw.recv_chunk()
|
||||
except Exception as err:
|
||||
# Some internal error, not always safe to close connection
|
||||
return False, str(err)
|
||||
|
||||
# Parse result
|
||||
return_status = ()
|
||||
if nntp_code:
|
||||
if nntp_code == 480:
|
||||
if nw.status_code:
|
||||
if nw.status_code == 480:
|
||||
return_status = (False, T("Server requires username and password."))
|
||||
elif nntp_code < 300 or nntp_code in (411, 423, 430):
|
||||
elif nw.status_code < 300 or nw.status_code in (411, 423, 430):
|
||||
# If no username/password set and we requested fake-article, it will return 430 Not Found
|
||||
return_status = (True, T("Connection Successful!"))
|
||||
elif nntp_code == 502 or sabnzbd.downloader.clues_login(nntp_message):
|
||||
elif nw.status_code == 502 or sabnzbd.downloader.clues_login(nw.nntp_msg):
|
||||
return_status = (False, T("Authentication failed, check username/password."))
|
||||
elif sabnzbd.downloader.clues_too_many(nntp_message):
|
||||
elif sabnzbd.downloader.clues_too_many(nw.nntp_msg):
|
||||
return_status = (False, T("Too many connections, please pause downloading or try again later"))
|
||||
|
||||
# Fallback in case no data was received or unknown status
|
||||
if not return_status:
|
||||
return_status = (False, T("Could not determine connection result (%s)") % nntp_message)
|
||||
return_status = (False, T("Could not determine connection result (%s)") % nw.nntp_msg)
|
||||
|
||||
# Close the connection and return result
|
||||
nw.hard_reset()
|
||||
return return_status
|
||||
|
||||
|
||||
def build_status(calculate_performance: bool = False, skip_dashboard: bool = False) -> dict[str, Any]:
|
||||
def build_status(calculate_performance: bool = False, skip_dashboard: bool = False) -> Dict[str, Any]:
|
||||
# build up header full of basic information
|
||||
info = build_header(trans_functions=False)
|
||||
|
||||
@@ -1515,13 +1502,13 @@ def build_status(calculate_performance: bool = False, skip_dashboard: bool = Fal
|
||||
for nw in server.busy_threads.copy():
|
||||
if nw.connected:
|
||||
activeconn += 1
|
||||
if article := nw.article:
|
||||
if nw.article:
|
||||
serverconnections.append(
|
||||
{
|
||||
"thrdnum": nw.thrdnum,
|
||||
"art_name": article.article,
|
||||
"nzf_name": article.nzf.filename,
|
||||
"nzo_name": article.nzf.nzo.final_name,
|
||||
"art_name": nw.article.article,
|
||||
"nzf_name": nw.article.nzf.filename,
|
||||
"nzo_name": nw.article.nzf.nzo.final_name,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1559,11 +1546,11 @@ def build_queue(
|
||||
start: int = 0,
|
||||
limit: int = 0,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
priorities: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> dict[str, Any]:
|
||||
categories: Optional[List[str]] = None,
|
||||
priorities: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Dict[str, Any]:
|
||||
info = build_header(for_template=False)
|
||||
(
|
||||
queue_bytes_total,
|
||||
@@ -1672,7 +1659,7 @@ def build_queue(
|
||||
return info
|
||||
|
||||
|
||||
def fast_queue() -> tuple[bool, int, float, str]:
|
||||
def fast_queue() -> Tuple[bool, int, float, str]:
|
||||
"""Return paused, bytes_left, bpsnow, time_left"""
|
||||
bytes_left = sabnzbd.sabnzbd.NzbQueue.remaining()
|
||||
paused = sabnzbd.Downloader.paused
|
||||
@@ -1681,7 +1668,7 @@ def fast_queue() -> tuple[bool, int, float, str]:
|
||||
return paused, bytes_left, bpsnow, time_left
|
||||
|
||||
|
||||
def build_file_list(nzo_id: str) -> list[dict[str, Any]]:
|
||||
def build_file_list(nzo_id: str) -> List[Dict[str, Any]]:
|
||||
"""Build file lists for specified job"""
|
||||
jobs = []
|
||||
nzo = sabnzbd.sabnzbd.NzbQueue.get_nzo(nzo_id)
|
||||
@@ -1755,7 +1742,7 @@ def retry_job(
|
||||
return None
|
||||
|
||||
|
||||
def del_job_files(job_paths: list[str]):
|
||||
def del_job_files(job_paths: List[str]):
|
||||
"""Remove files of each path in the list"""
|
||||
for path in job_paths:
|
||||
if path and clip_path(path).lower().startswith(cfg.download_dir.get_clipped_path().lower()):
|
||||
@@ -1798,7 +1785,7 @@ def clear_trans_cache():
|
||||
sabnzbd.WEBUI_READY = True
|
||||
|
||||
|
||||
def build_header(webdir: str = "", for_template: bool = True, trans_functions: bool = True) -> dict[str, Any]:
|
||||
def build_header(webdir: str = "", for_template: bool = True, trans_functions: bool = True) -> Dict[str, Any]:
|
||||
"""Build the basic header"""
|
||||
header = {}
|
||||
|
||||
@@ -1865,10 +1852,10 @@ def build_history(
|
||||
limit: int = 1000000,
|
||||
archive: bool = False,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> tuple[list[dict[str, Any]], int, int]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Tuple[List[Dict[str, Any]], int, int]:
|
||||
"""Combine the jobs still in post-processing and the database history"""
|
||||
if not archive:
|
||||
# Grab any items that are active or queued in postproc
|
||||
@@ -1944,7 +1931,7 @@ def build_history(
|
||||
return items, postproc_queue_size, total_items
|
||||
|
||||
|
||||
def add_active_history(postproc_queue: list[NzbObject], items: list[dict[str, Any]]):
|
||||
def add_active_history(postproc_queue: List[NzbObject], items: List[Dict[str, Any]]):
|
||||
"""Get the active history queue and add it to the existing items list"""
|
||||
nzo_ids = set([nzo["nzo_id"] for nzo in items])
|
||||
|
||||
@@ -2003,7 +1990,7 @@ def calc_timeleft(bytesleft: float, bps: float) -> str:
|
||||
return format_time_left(int(bytesleft / bps))
|
||||
|
||||
|
||||
def list_cats(default: bool = True) -> list[str]:
|
||||
def list_cats(default: bool = True) -> List[str]:
|
||||
"""Return list of (ordered) categories,
|
||||
when default==False use '*' for Default category
|
||||
"""
|
||||
@@ -2032,7 +2019,7 @@ def plural_to_single(kw, def_kw=""):
|
||||
return def_kw
|
||||
|
||||
|
||||
def del_from_section(kwargs: dict[str, Union[str, list[str]]]) -> bool:
|
||||
def del_from_section(kwargs: Dict[str, Union[str, List[str]]]) -> bool:
|
||||
"""Remove keyword in section"""
|
||||
section = kwargs.get("section", "")
|
||||
if section in ("sorters", "servers", "rss", "categories"):
|
||||
|
||||
@@ -22,12 +22,12 @@ sabnzbd.articlecache - Article cache handling
|
||||
import logging
|
||||
import threading
|
||||
import struct
|
||||
from typing import Collection
|
||||
from typing import Dict, Collection
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.constants import GIGI, ANFO, ASSEMBLER_WRITE_THRESHOLD
|
||||
from sabnzbd.nzb import Article
|
||||
from sabnzbd.nzbstuff import Article
|
||||
|
||||
# Operations on the article table are handled via try/except.
|
||||
# The counters need to be made atomic to ensure consistency.
|
||||
@@ -39,7 +39,7 @@ class ArticleCache:
|
||||
self.__cache_limit_org = 0
|
||||
self.__cache_limit = 0
|
||||
self.__cache_size = 0
|
||||
self.__article_table: dict[Article, bytes] = {} # Dict of buffered articles
|
||||
self.__article_table: Dict[Article, bytes] = {} # Dict of buffered articles
|
||||
|
||||
self.assembler_write_trigger: int = 1
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import logging
|
||||
import re
|
||||
from threading import Thread
|
||||
import ctypes
|
||||
from typing import Optional
|
||||
from typing import Tuple, Optional, List
|
||||
import rarfile
|
||||
|
||||
import sabnzbd
|
||||
@@ -39,17 +39,16 @@ from sabnzbd.filesystem import (
|
||||
has_unwanted_extension,
|
||||
get_basename,
|
||||
)
|
||||
from sabnzbd.constants import Status, GIGI
|
||||
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.nzb import NzbFile, NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbFile
|
||||
import sabnzbd.par2file as par2file
|
||||
|
||||
|
||||
class Assembler(Thread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.max_queue_size: int = cfg.assembler_max_queue_size()
|
||||
self.queue: queue.Queue[tuple[Optional[NzbObject], Optional[NzbFile], Optional[bool]]] = queue.Queue()
|
||||
self.queue: queue.Queue[Tuple[Optional[NzbObject], Optional[NzbFile], Optional[bool]]] = queue.Queue()
|
||||
|
||||
def stop(self):
|
||||
self.queue.put((None, None, None))
|
||||
@@ -58,7 +57,7 @@ class Assembler(Thread):
|
||||
self.queue.put((nzo, nzf, file_done))
|
||||
|
||||
def queue_level(self) -> float:
|
||||
return self.queue.qsize() / self.max_queue_size
|
||||
return self.queue.qsize() / MAX_ASSEMBLER_QUEUE
|
||||
|
||||
def run(self):
|
||||
while 1:
|
||||
@@ -250,7 +249,7 @@ RE_SUBS = re.compile(r"\W+sub|subs|subpack|subtitle|subtitles(?![a-z])", re.I)
|
||||
SAFE_EXTS = (".mkv", ".mp4", ".avi", ".wmv", ".mpg", ".webm")
|
||||
|
||||
|
||||
def is_cloaked(nzo: NzbObject, path: str, names: list[str]) -> bool:
|
||||
def is_cloaked(nzo: NzbObject, path: str, names: List[str]) -> bool:
|
||||
"""Return True if this is likely to be a cloaked encrypted post"""
|
||||
fname = get_basename(get_filename(path.lower()))
|
||||
for name in names:
|
||||
@@ -279,7 +278,7 @@ def is_cloaked(nzo: NzbObject, path: str, names: list[str]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> tuple[bool, Optional[str]]:
|
||||
def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Combines check for unwanted and encrypted files to save on CPU and IO"""
|
||||
encrypted = False
|
||||
unwanted = None
|
||||
|
||||
@@ -22,7 +22,7 @@ sabnzbd.bpsmeter - bpsmeter
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import BYTES_FILE_NAME, KIBI
|
||||
@@ -132,20 +132,20 @@ class BPSMeter:
|
||||
self.speed_log_time = t
|
||||
self.last_update = t
|
||||
self.bps = 0.0
|
||||
self.bps_list: list[int] = []
|
||||
self.bps_list: List[int] = []
|
||||
|
||||
self.server_bps: dict[str, float] = {}
|
||||
self.cached_amount: dict[str, int] = {}
|
||||
self.server_bps: Dict[str, float] = {}
|
||||
self.cached_amount: Dict[str, int] = {}
|
||||
self.sum_cached_amount: int = 0
|
||||
self.day_total: dict[str, int] = {}
|
||||
self.week_total: dict[str, int] = {}
|
||||
self.month_total: dict[str, int] = {}
|
||||
self.grand_total: dict[str, int] = {}
|
||||
self.day_total: Dict[str, int] = {}
|
||||
self.week_total: Dict[str, int] = {}
|
||||
self.month_total: Dict[str, int] = {}
|
||||
self.grand_total: Dict[str, int] = {}
|
||||
|
||||
self.timeline_total: dict[str, dict[str, int]] = {}
|
||||
self.timeline_total: Dict[str, Dict[str, int]] = {}
|
||||
|
||||
self.article_stats_tried: dict[str, dict[str, int]] = {}
|
||||
self.article_stats_failed: dict[str, dict[str, int]] = {}
|
||||
self.article_stats_tried: Dict[str, Dict[str, int]] = {}
|
||||
self.article_stats_failed: Dict[str, Dict[str, int]] = {}
|
||||
|
||||
self.delayed_assembler: int = 0
|
||||
|
||||
@@ -254,6 +254,8 @@ class BPSMeter:
|
||||
self.week_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.month_total:
|
||||
self.month_total[server] = 0
|
||||
if server not in self.grand_total:
|
||||
self.grand_total[server] = 0
|
||||
if server not in self.timeline_total:
|
||||
@@ -300,51 +302,45 @@ class BPSMeter:
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
self.init_server_stats(server.id)
|
||||
|
||||
# Cache dict references for faster access
|
||||
day_total = self.day_total
|
||||
week_total = self.week_total
|
||||
month_total = self.month_total
|
||||
grand_total = self.grand_total
|
||||
timeline_total = self.timeline_total
|
||||
cached_amount = self.cached_amount
|
||||
server_bps = self.server_bps
|
||||
|
||||
start_time = self.start_time
|
||||
last_update = self.last_update
|
||||
# Minimum epsilon to avoid division by zero
|
||||
dt_total = max(t - start_time, 1e-6)
|
||||
dt_last = max(last_update - start_time, 1e-6)
|
||||
|
||||
# Add amounts that have been stored temporarily to statistics
|
||||
for srv in self.cached_amount:
|
||||
if cached := self.cached_amount[srv]:
|
||||
day_total[srv] += cached
|
||||
week_total[srv] += cached
|
||||
month_total[srv] += cached
|
||||
grand_total[srv] += cached
|
||||
timeline_total[srv][self.day_label] += cached
|
||||
|
||||
# Reset for next time
|
||||
cached_amount[srv] = 0
|
||||
if self.cached_amount[srv]:
|
||||
self.day_total[srv] += self.cached_amount[srv]
|
||||
self.week_total[srv] += self.cached_amount[srv]
|
||||
self.month_total[srv] += self.cached_amount[srv]
|
||||
self.grand_total[srv] += self.cached_amount[srv]
|
||||
self.timeline_total[srv][self.day_label] += self.cached_amount[srv]
|
||||
|
||||
# Update server bps
|
||||
server_bps[srv] = (server_bps[srv] * dt_last + cached) / dt_total
|
||||
try:
|
||||
self.server_bps[srv] = (
|
||||
self.server_bps[srv] * (self.last_update - self.start_time) + self.cached_amount[srv]
|
||||
) / (t - self.start_time)
|
||||
except ZeroDivisionError:
|
||||
self.server_bps[srv] = 0.0
|
||||
|
||||
# Reset for next time
|
||||
self.cached_amount[srv] = 0
|
||||
|
||||
# Quota check
|
||||
total_cached = self.sum_cached_amount
|
||||
if self.have_quota and self.quota_enabled:
|
||||
self.left -= total_cached
|
||||
self.left -= self.sum_cached_amount
|
||||
self.check_quota()
|
||||
|
||||
# Speedometer
|
||||
self.bps = (self.bps * dt_last + total_cached) / dt_total
|
||||
try:
|
||||
self.bps = (self.bps * (self.last_update - self.start_time) + self.sum_cached_amount) / (
|
||||
t - self.start_time
|
||||
)
|
||||
except ZeroDivisionError:
|
||||
self.bps = 0.0
|
||||
|
||||
self.sum_cached_amount = 0
|
||||
self.last_update = t
|
||||
|
||||
check_time = t - 5.0
|
||||
|
||||
if start_time < check_time:
|
||||
if self.start_time < check_time:
|
||||
self.start_time = check_time
|
||||
|
||||
if self.bps < 0.01:
|
||||
@@ -386,7 +382,7 @@ class BPSMeter:
|
||||
|
||||
# Always trim the list to the max-length
|
||||
if len(self.bps_list) > BPS_LIST_MAX:
|
||||
self.bps_list = self.bps_list[-BPS_LIST_MAX:]
|
||||
self.bps_list = self.bps_list[len(self.bps_list) - BPS_LIST_MAX :]
|
||||
|
||||
def get_sums(self):
|
||||
"""return tuple of grand, month, week, day totals"""
|
||||
|
||||
@@ -25,7 +25,7 @@ import re
|
||||
import argparse
|
||||
import socket
|
||||
import ipaddress
|
||||
from typing import Union
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.config import (
|
||||
@@ -52,14 +52,12 @@ from sabnzbd.constants import (
|
||||
DEF_STD_WEB_COLOR,
|
||||
DEF_HTTPS_CERT_FILE,
|
||||
DEF_HTTPS_KEY_FILE,
|
||||
DEF_MAX_ASSEMBLER_QUEUE,
|
||||
DEF_PIPELINING_REQUESTS,
|
||||
)
|
||||
from sabnzbd.filesystem import same_directory, real_path, is_valid_script, is_network_path
|
||||
|
||||
# Validators currently only are made for string/list-of-strings
|
||||
# and return those on success or an error message.
|
||||
ValidateResult = Union[tuple[None, str], tuple[None, list[str]], tuple[str, None]]
|
||||
ValidateResult = Union[Tuple[None, str], Tuple[None, List[str]], Tuple[str, None]]
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -124,21 +122,21 @@ def supported_unrar_parameters(value: str) -> ValidateResult:
|
||||
return None, value
|
||||
|
||||
|
||||
def all_lowercase(value: Union[str, list]) -> tuple[None, Union[str, list]]:
|
||||
def all_lowercase(value: Union[str, List]) -> Tuple[None, Union[str, List]]:
|
||||
"""Lowercase and strip everything!"""
|
||||
if isinstance(value, list):
|
||||
return None, [item.lower().strip() for item in value]
|
||||
return None, value.lower().strip()
|
||||
|
||||
|
||||
def lower_case_ext(value: Union[str, list]) -> tuple[None, Union[str, list]]:
|
||||
def lower_case_ext(value: Union[str, List]) -> Tuple[None, Union[str, List]]:
|
||||
"""Generate lower case extension(s), without dot"""
|
||||
if isinstance(value, list):
|
||||
return None, [item.lower().strip(" .") for item in value]
|
||||
return None, value.lower().strip(" .")
|
||||
|
||||
|
||||
def validate_single_tag(value: list[str]) -> tuple[None, list[str]]:
|
||||
def validate_single_tag(value: List[str]) -> Tuple[None, List[str]]:
|
||||
"""Don't split single indexer tags like "TV > HD"
|
||||
into ['TV', '>', 'HD']
|
||||
"""
|
||||
@@ -148,7 +146,7 @@ def validate_single_tag(value: list[str]) -> tuple[None, list[str]]:
|
||||
return None, value
|
||||
|
||||
|
||||
def validate_url_base(value: str) -> tuple[None, str]:
|
||||
def validate_url_base(value: str) -> Tuple[None, str]:
|
||||
"""Strips the right slash and adds starting slash, if not present"""
|
||||
if value and isinstance(value, str):
|
||||
if not value.startswith("/"):
|
||||
@@ -160,7 +158,7 @@ def validate_url_base(value: str) -> tuple[None, str]:
|
||||
RE_VAL = re.compile(r"[^@ ]+@[^.@ ]+\.[^.@ ]")
|
||||
|
||||
|
||||
def validate_email(value: Union[list, str]) -> ValidateResult:
|
||||
def validate_email(value: Union[List, str]) -> ValidateResult:
|
||||
if email_endjob() or email_full() or email_rss():
|
||||
if isinstance(value, list):
|
||||
values = value
|
||||
@@ -287,7 +285,7 @@ def validate_download_vs_complete_dir(root: str, value: str, default: str):
|
||||
return validate_safedir(root, value, default)
|
||||
|
||||
|
||||
def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> tuple[None, str]:
|
||||
def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> Tuple[None, str]:
|
||||
"""Warn users to not use the Program Files folder for their scripts"""
|
||||
# Need to add separator so /mnt/sabnzbd and /mnt/sabnzbd-data are not detected as equal
|
||||
if value and same_directory(sabnzbd.DIR_PROG, os.path.join(root, value)):
|
||||
@@ -300,7 +298,7 @@ def validate_scriptdir_not_appdir(root: str, value: str, default: str) -> tuple[
|
||||
return None, value
|
||||
|
||||
|
||||
def validate_default_if_empty(root: str, value: str, default: str) -> tuple[None, str]:
|
||||
def validate_default_if_empty(root: str, value: str, default: str) -> Tuple[None, str]:
|
||||
"""If value is empty, return default"""
|
||||
if value:
|
||||
return None, value
|
||||
@@ -507,7 +505,7 @@ no_penalties = OptionBool("misc", "no_penalties", False)
|
||||
x_frame_options = OptionBool("misc", "x_frame_options", True)
|
||||
allow_old_ssl_tls = OptionBool("misc", "allow_old_ssl_tls", False)
|
||||
enable_season_sorting = OptionBool("misc", "enable_season_sorting", True)
|
||||
verify_xff_header = OptionBool("misc", "verify_xff_header", True)
|
||||
verify_xff_header = OptionBool("misc", "verify_xff_header", False)
|
||||
|
||||
# Text values
|
||||
rss_odd_titles = OptionList("misc", "rss_odd_titles", ["nzbindex.nl/", "nzbindex.com/", "nzbclub.com/"])
|
||||
@@ -529,7 +527,6 @@ local_ranges = OptionList("misc", "local_ranges", protect=True)
|
||||
max_url_retries = OptionNumber("misc", "max_url_retries", 10, minval=1)
|
||||
downloader_sleep_time = OptionNumber("misc", "downloader_sleep_time", 10, minval=0)
|
||||
receive_threads = OptionNumber("misc", "receive_threads", 2, minval=1)
|
||||
assembler_max_queue_size = OptionNumber("misc", "assembler_max_queue_size", DEF_MAX_ASSEMBLER_QUEUE, minval=1)
|
||||
switchinterval = OptionNumber("misc", "switchinterval", 0.005, minval=0.001)
|
||||
ssdp_broadcast_interval = OptionNumber("misc", "ssdp_broadcast_interval", 15, minval=1, maxval=600)
|
||||
ext_rename_ignore = OptionList("misc", "ext_rename_ignore", validation=lower_case_ext)
|
||||
|
||||
@@ -28,7 +28,7 @@ import time
|
||||
import uuid
|
||||
import io
|
||||
import zipfile
|
||||
from typing import Any, Callable, Optional, Union
|
||||
from typing import List, Dict, Any, Callable, Optional, Union, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import configobj
|
||||
@@ -42,7 +42,6 @@ from sabnzbd.constants import (
|
||||
CONFIG_BACKUP_HTTPS,
|
||||
DEF_INI_FILE,
|
||||
DEF_SORTER_RENAME_SIZE,
|
||||
DEF_PIPELINING_REQUESTS,
|
||||
)
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.filesystem import clip_path, real_path, create_real_path, renamer, remove_file, is_writable
|
||||
@@ -102,14 +101,14 @@ class Option:
|
||||
def get_string(self) -> str:
|
||||
return str(self.get())
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return value as a dictionary.
|
||||
Will not show non-public options if needed for the API"""
|
||||
if not self.__public and for_public_api:
|
||||
return {}
|
||||
return {self.__keyword: self.get()}
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set value based on dictionary"""
|
||||
if not self.__protect:
|
||||
try:
|
||||
@@ -308,7 +307,7 @@ class OptionList(Option):
|
||||
self,
|
||||
section: str,
|
||||
keyword: str,
|
||||
default_val: Union[str, list, None] = None,
|
||||
default_val: Union[str, List, None] = None,
|
||||
validation: Optional[Callable] = None,
|
||||
add: bool = True,
|
||||
public: bool = True,
|
||||
@@ -319,7 +318,7 @@ class OptionList(Option):
|
||||
default_val = []
|
||||
super().__init__(section, keyword, default_val, add=add, public=public, protect=protect)
|
||||
|
||||
def set(self, value: Union[str, list]) -> Optional[str]:
|
||||
def set(self, value: Union[str, List]) -> Optional[str]:
|
||||
"""Set the list given a comma-separated string or a list"""
|
||||
error = None
|
||||
if value is not None:
|
||||
@@ -342,7 +341,7 @@ class OptionList(Option):
|
||||
"""Return the default list as a comma-separated string"""
|
||||
return ", ".join(self.default)
|
||||
|
||||
def __call__(self) -> list[str]:
|
||||
def __call__(self) -> List[str]:
|
||||
"""get() replacement"""
|
||||
return self.get()
|
||||
|
||||
@@ -407,7 +406,7 @@ class OptionPassword(Option):
|
||||
return "*" * 10
|
||||
return ""
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, str]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, str]:
|
||||
"""Return value a dictionary"""
|
||||
if for_public_api:
|
||||
return {self.keyword: self.get_stars()}
|
||||
@@ -445,7 +444,6 @@ class ConfigServer:
|
||||
self.enable = OptionBool(name, "enable", True, add=False)
|
||||
self.required = OptionBool(name, "required", False, add=False)
|
||||
self.optional = OptionBool(name, "optional", False, add=False)
|
||||
self.pipelining_requests = OptionNumber(name, "pipelining_requests", DEF_PIPELINING_REQUESTS, 1, 20, add=False)
|
||||
self.retention = OptionNumber(name, "retention", 0, add=False)
|
||||
self.expire_date = OptionStr(name, "expire_date", add=False)
|
||||
self.quota = OptionStr(name, "quota", add=False)
|
||||
@@ -456,7 +454,7 @@ class ConfigServer:
|
||||
self.set_dict(values)
|
||||
add_to_database("servers", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
# Replace usage_at_start value with most recent statistics if the user changes the quota value
|
||||
# Only when we are updating it from the Config
|
||||
@@ -478,7 +476,6 @@ class ConfigServer:
|
||||
"enable",
|
||||
"required",
|
||||
"optional",
|
||||
"pipelining_requests",
|
||||
"retention",
|
||||
"expire_date",
|
||||
"quota",
|
||||
@@ -494,7 +491,7 @@ class ConfigServer:
|
||||
if not self.displayname():
|
||||
self.displayname.set(self.__name)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -514,7 +511,6 @@ class ConfigServer:
|
||||
output_dict["enable"] = self.enable()
|
||||
output_dict["required"] = self.required()
|
||||
output_dict["optional"] = self.optional()
|
||||
output_dict["pipelining_requests"] = self.pipelining_requests()
|
||||
output_dict["retention"] = self.retention()
|
||||
output_dict["expire_date"] = self.expire_date()
|
||||
output_dict["quota"] = self.quota()
|
||||
@@ -535,7 +531,7 @@ class ConfigServer:
|
||||
class ConfigCat:
|
||||
"""Class defining a single category"""
|
||||
|
||||
def __init__(self, name: str, values: dict[str, Any]):
|
||||
def __init__(self, name: str, values: Dict[str, Any]):
|
||||
self.__name = clean_section_name(name)
|
||||
name = "categories," + self.__name
|
||||
|
||||
@@ -549,7 +545,7 @@ class ConfigCat:
|
||||
self.set_dict(values)
|
||||
add_to_database("categories", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("order", "pp", "script", "dir", "newzbin", "priority"):
|
||||
try:
|
||||
@@ -558,7 +554,7 @@ class ConfigCat:
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -593,7 +589,7 @@ class ConfigSorter:
|
||||
self.set_dict(values)
|
||||
add_to_database("sorters", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("order", "min_size", "multipart_label", "sort_string", "sort_cats", "sort_type", "is_active"):
|
||||
try:
|
||||
@@ -602,7 +598,7 @@ class ConfigSorter:
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -643,7 +639,7 @@ class OptionFilters(Option):
|
||||
return
|
||||
self.set(lst)
|
||||
|
||||
def update(self, pos: int, value: tuple):
|
||||
def update(self, pos: int, value: Tuple):
|
||||
"""Update filter 'pos' definition, value is a list
|
||||
Append if 'pos' outside list
|
||||
"""
|
||||
@@ -663,14 +659,14 @@ class OptionFilters(Option):
|
||||
return
|
||||
self.set(lst)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, str]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, str]:
|
||||
"""Return filter list as a dictionary with keys 'filter[0-9]+'"""
|
||||
output_dict = {}
|
||||
for n, rss_filter in enumerate(self.get()):
|
||||
output_dict[f"filter{n}"] = rss_filter
|
||||
return output_dict
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Create filter list from dictionary with keys 'filter[0-9]+'"""
|
||||
filters = []
|
||||
# We don't know how many filters there are, so just assume all values are filters
|
||||
@@ -681,7 +677,7 @@ class OptionFilters(Option):
|
||||
if filters:
|
||||
self.set(filters)
|
||||
|
||||
def __call__(self) -> list[list[str]]:
|
||||
def __call__(self) -> List[List[str]]:
|
||||
"""get() replacement"""
|
||||
return self.get()
|
||||
|
||||
@@ -705,7 +701,7 @@ class ConfigRSS:
|
||||
self.set_dict(values)
|
||||
add_to_database("rss", self.__name, self)
|
||||
|
||||
def set_dict(self, values: dict[str, Any]):
|
||||
def set_dict(self, values: Dict[str, Any]):
|
||||
"""Set one or more fields, passed as dictionary"""
|
||||
for kw in ("uri", "cat", "pp", "script", "priority", "enable"):
|
||||
try:
|
||||
@@ -715,7 +711,7 @@ class ConfigRSS:
|
||||
continue
|
||||
self.filters.set_dict(values)
|
||||
|
||||
def get_dict(self, for_public_api: bool = False) -> dict[str, Any]:
|
||||
def get_dict(self, for_public_api: bool = False) -> Dict[str, Any]:
|
||||
"""Return a dictionary with all attributes"""
|
||||
output_dict = {}
|
||||
output_dict["name"] = self.__name
|
||||
@@ -759,7 +755,7 @@ AllConfigTypes = Union[
|
||||
ConfigRSS,
|
||||
ConfigServer,
|
||||
]
|
||||
CFG_DATABASE: dict[str, dict[str, AllConfigTypes]] = {}
|
||||
CFG_DATABASE: Dict[str, Dict[str, AllConfigTypes]] = {}
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
@@ -1107,7 +1103,7 @@ def restore_config_backup(config_backup_data: bytes):
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_servers() -> dict[str, ConfigServer]:
|
||||
def get_servers() -> Dict[str, ConfigServer]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
return CFG_DATABASE["servers"]
|
||||
@@ -1116,7 +1112,7 @@ def get_servers() -> dict[str, ConfigServer]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_sorters() -> dict[str, ConfigSorter]:
|
||||
def get_sorters() -> Dict[str, ConfigSorter]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
return CFG_DATABASE["sorters"]
|
||||
@@ -1124,7 +1120,7 @@ def get_sorters() -> dict[str, ConfigSorter]:
|
||||
return {}
|
||||
|
||||
|
||||
def get_ordered_sorters() -> list[dict]:
|
||||
def get_ordered_sorters() -> List[Dict]:
|
||||
"""Return sorters as an ordered list"""
|
||||
database_sorters = get_sorters()
|
||||
|
||||
@@ -1135,7 +1131,7 @@ def get_ordered_sorters() -> list[dict]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_categories() -> dict[str, ConfigCat]:
|
||||
def get_categories() -> Dict[str, ConfigCat]:
|
||||
"""Return link to categories section.
|
||||
This section will always contain special category '*'
|
||||
"""
|
||||
@@ -1167,7 +1163,7 @@ def get_category(cat: str = "*") -> ConfigCat:
|
||||
return cats["*"]
|
||||
|
||||
|
||||
def get_ordered_categories() -> list[dict]:
|
||||
def get_ordered_categories() -> List[Dict]:
|
||||
"""Return list-copy of categories section that's ordered
|
||||
by user's ordering including Default-category
|
||||
"""
|
||||
@@ -1187,7 +1183,7 @@ def get_ordered_categories() -> list[dict]:
|
||||
|
||||
|
||||
@synchronized(CONFIG_LOCK)
|
||||
def get_rss() -> dict[str, ConfigRSS]:
|
||||
def get_rss() -> Dict[str, ConfigRSS]:
|
||||
global CFG_DATABASE
|
||||
try:
|
||||
# We have to remove non-separator commas by detecting if they are valid URL's
|
||||
|
||||
@@ -50,7 +50,7 @@ RENAMES_FILE = "__renames__"
|
||||
ATTRIB_FILE = "SABnzbd_attrib"
|
||||
REPAIR_REQUEST = "repair-all.sab"
|
||||
|
||||
SABCTOOLS_VERSION_REQUIRED = "9.1.0"
|
||||
SABCTOOLS_VERSION_REQUIRED = "8.2.6"
|
||||
|
||||
DB_HISTORY_VERSION = 1
|
||||
DB_HISTORY_NAME = "history%s.db" % DB_HISTORY_VERSION
|
||||
@@ -97,13 +97,12 @@ CONFIG_BACKUP_HTTPS = { # "basename": "associated setting"
|
||||
}
|
||||
|
||||
# Constants affecting download performance
|
||||
DEF_MAX_ASSEMBLER_QUEUE = 12
|
||||
SOFT_ASSEMBLER_QUEUE_LIMIT = 0.5
|
||||
MAX_ASSEMBLER_QUEUE = 12
|
||||
SOFT_QUEUE_LIMIT = 0.5
|
||||
# Percentage of cache to use before adding file to assembler
|
||||
ASSEMBLER_WRITE_THRESHOLD = 5
|
||||
NNTP_BUFFER_SIZE = int(256 * KIBI)
|
||||
NNTP_BUFFER_SIZE = int(800 * KIBI)
|
||||
NTTP_MAX_BUFFER_SIZE = int(10 * MEBI)
|
||||
DEF_PIPELINING_REQUESTS = 1
|
||||
|
||||
REPAIR_PRIORITY = 3
|
||||
FORCE_PRIORITY = 2
|
||||
|
||||
@@ -27,7 +27,7 @@ import sys
|
||||
import threading
|
||||
import sqlite3
|
||||
from sqlite3 import Connection, Cursor
|
||||
from typing import Optional, Sequence, Any
|
||||
from typing import Optional, List, Sequence, Dict, Any, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
@@ -237,7 +237,7 @@ class HistoryDB:
|
||||
self.execute("""UPDATE history SET status = ? WHERE nzo_id = ?""", (Status.COMPLETED, job))
|
||||
logging.info("[%s] Marked job %s as completed", caller_name(), job)
|
||||
|
||||
def get_failed_paths(self, search: Optional[str] = None) -> list[str]:
|
||||
def get_failed_paths(self, search: Optional[str] = None) -> List[str]:
|
||||
"""Return list of all storage paths of failed jobs (may contain non-existing or empty paths)"""
|
||||
search = convert_search(search)
|
||||
fetch_ok = self.execute(
|
||||
@@ -315,10 +315,10 @@ class HistoryDB:
|
||||
limit: Optional[int] = None,
|
||||
archive: Optional[bool] = None,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> tuple[list[dict[str, Any]], int]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> Tuple[List[Dict[str, Any]], int]:
|
||||
"""Return records for specified jobs"""
|
||||
command_args = [convert_search(search)]
|
||||
|
||||
@@ -397,7 +397,7 @@ class HistoryDB:
|
||||
total = self.cursor.fetchone()["COUNT(*)"]
|
||||
return total > 0
|
||||
|
||||
def get_history_size(self) -> tuple[int, int, int]:
|
||||
def get_history_size(self) -> Tuple[int, int, int]:
|
||||
"""Returns the total size of the history and
|
||||
amounts downloaded in the last month and week
|
||||
"""
|
||||
@@ -457,7 +457,7 @@ class HistoryDB:
|
||||
return path
|
||||
return path
|
||||
|
||||
def get_other(self, nzo_id: str) -> tuple[str, str, str, str, str]:
|
||||
def get_other(self, nzo_id: str) -> Tuple[str, str, str, str, str]:
|
||||
"""Return additional data for job `nzo_id`"""
|
||||
if self.execute("""SELECT * FROM history WHERE nzo_id = ?""", (nzo_id,)):
|
||||
try:
|
||||
@@ -498,14 +498,9 @@ def convert_search(search: str) -> str:
|
||||
return search
|
||||
|
||||
|
||||
def build_history_info(
|
||||
nzo: "sabnzbd.nzb.NzbObject",
|
||||
workdir_complete: str,
|
||||
postproc_time: int,
|
||||
script_output: str,
|
||||
script_line: str,
|
||||
):
|
||||
def build_history_info(nzo, workdir_complete: str, postproc_time: int, script_output: str, script_line: str):
|
||||
"""Collects all the information needed for the database"""
|
||||
nzo: sabnzbd.nzbstuff.NzbObject
|
||||
completed = int(time.time())
|
||||
pp = PP_LOOKUP.get(opts_to_pp(nzo.repair, nzo.unpack, nzo.delete), "X")
|
||||
|
||||
@@ -559,7 +554,7 @@ def build_history_info(
|
||||
)
|
||||
|
||||
|
||||
def unpack_history_info(item: sqlite3.Row) -> dict[str, Any]:
|
||||
def unpack_history_info(item: sqlite3.Row) -> Dict[str, Any]:
|
||||
"""Expands the single line stage_log from the DB
|
||||
into a python dictionary for use in the history display
|
||||
"""
|
||||
|
||||
@@ -21,11 +21,14 @@ sabnzbd.decoder - article decoder
|
||||
|
||||
import logging
|
||||
import hashlib
|
||||
from typing import Optional
|
||||
import binascii
|
||||
from io import BytesIO
|
||||
from zlib import crc32
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import SABCTOOLS_VERSION_REQUIRED
|
||||
from sabnzbd.nzb import Article
|
||||
from sabnzbd.encoding import ubtou
|
||||
from sabnzbd.nzbstuff import Article
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
# Check for correct SABCTools version
|
||||
@@ -47,7 +50,7 @@ except Exception:
|
||||
|
||||
|
||||
class BadData(Exception):
|
||||
def __init__(self, data: bytearray):
|
||||
def __init__(self, data: bytes):
|
||||
super().__init__()
|
||||
self.data = data
|
||||
|
||||
@@ -60,8 +63,8 @@ class BadUu(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
decoded_data: Optional[bytearray] = None
|
||||
def decode(article: Article, data_view: memoryview):
|
||||
decoded_data = None
|
||||
nzo = article.nzf.nzo
|
||||
art_id = article.article
|
||||
|
||||
@@ -75,10 +78,10 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
if decoder.format is sabctools.EncodingFormat.UU:
|
||||
decoded_data = decode_uu(article, decoder)
|
||||
if article.nzf.type == "uu":
|
||||
decoded_data = decode_uu(article, bytes(data_view))
|
||||
else:
|
||||
decoded_data = decode_yenc(article, decoder)
|
||||
decoded_data = decode_yenc(article, data_view)
|
||||
|
||||
article_success = True
|
||||
|
||||
@@ -109,18 +112,28 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
|
||||
except (BadYenc, ValueError):
|
||||
# Handles precheck and badly formed articles
|
||||
if nzo.precheck and decoder.status_code == 223:
|
||||
if nzo.precheck and data_view and data_view[:4] == b"223 ":
|
||||
# STAT was used, so we only get a status code
|
||||
article_success = True
|
||||
else:
|
||||
# Examine the headers (for precheck) or body (for download).
|
||||
if lines := decoder.lines:
|
||||
for line in lines:
|
||||
# Try uu-decoding
|
||||
if not nzo.precheck and article.nzf.type != "yenc":
|
||||
try:
|
||||
decoded_data = decode_uu(article, bytes(data_view))
|
||||
logging.debug("Found uu-encoded article %s in job %s", art_id, nzo.final_name)
|
||||
article_success = True
|
||||
except Exception:
|
||||
pass
|
||||
# Only bother with further checks if uu-decoding didn't work out
|
||||
if not article_success:
|
||||
# Convert the first 2000 bytes of raw socket data to article lines,
|
||||
# and examine the headers (for precheck) or body (for download).
|
||||
for line in bytes(data_view[:2000]).split(b"\r\n"):
|
||||
lline = line.lower()
|
||||
if lline.startswith("message-id:"):
|
||||
if lline.startswith(b"message-id:"):
|
||||
article_success = True
|
||||
# Look for DMCA clues (while skipping "X-" headers)
|
||||
if not lline.startswith("x-") and match_str(lline, ("dmca", "removed", "cancel", "blocked")):
|
||||
if not lline.startswith(b"x-") and match_str(lline, (b"dmca", b"removed", b"cancel", b"blocked")):
|
||||
article_success = False
|
||||
logging.info("Article removed from server (%s)", art_id)
|
||||
break
|
||||
@@ -157,63 +170,164 @@ def decode(article: Article, decoder: sabctools.NNTPResponse):
|
||||
sabnzbd.NzbQueue.register_article(article, article_success)
|
||||
|
||||
|
||||
def decode_yenc(article: Article, response: sabctools.NNTPResponse) -> bytearray:
|
||||
def decode_yenc(article: Article, data_view: memoryview) -> bytearray:
|
||||
# Let SABCTools do all the heavy lifting
|
||||
decoded_data = response.data
|
||||
article.file_size = response.file_size
|
||||
article.data_begin = response.part_begin
|
||||
article.data_size = response.part_size
|
||||
(
|
||||
decoded_data,
|
||||
yenc_filename,
|
||||
article.file_size,
|
||||
article.data_begin,
|
||||
article.data_size,
|
||||
crc_correct,
|
||||
) = sabctools.yenc_decode(data_view)
|
||||
|
||||
nzf = article.nzf
|
||||
# Assume it is yenc
|
||||
nzf.type = "yenc"
|
||||
|
||||
# Only set the name if it was found and not obfuscated
|
||||
if not nzf.filename_checked and (file_name := response.file_name):
|
||||
if not nzf.filename_checked and yenc_filename:
|
||||
# Set the md5-of-16k if this is the first article
|
||||
if article.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(memoryview(decoded_data)[:16384]).digest()
|
||||
nzf.md5of16k = hashlib.md5(decoded_data[:16384]).digest()
|
||||
|
||||
# Try the rename, even if it's not the first article
|
||||
# For example when the first article was missing
|
||||
nzf.nzo.verify_nzf_filename(nzf, file_name)
|
||||
nzf.nzo.verify_nzf_filename(nzf, yenc_filename)
|
||||
|
||||
# CRC check
|
||||
if (crc := response.crc) is None:
|
||||
if crc_correct is None:
|
||||
logging.info("CRC Error in %s", article.article)
|
||||
raise BadData(decoded_data)
|
||||
|
||||
article.crc32 = crc
|
||||
article.crc32 = crc_correct
|
||||
|
||||
return decoded_data
|
||||
|
||||
|
||||
def decode_uu(article: Article, response: sabctools.NNTPResponse) -> bytearray:
|
||||
"""Process a uu-decoded response"""
|
||||
if not response.bytes_decoded:
|
||||
def decode_uu(article: Article, raw_data: bytes) -> bytes:
|
||||
"""Try to uu-decode an article. The raw_data may or may not contain headers.
|
||||
If there are headers, they will be separated from the body by at least one
|
||||
empty line. In case of no headers, the first line seems to always be the nntp
|
||||
response code (220/222) directly followed by the msg body."""
|
||||
if not raw_data:
|
||||
logging.debug("No data to decode")
|
||||
raise BadUu
|
||||
|
||||
if response.baddata:
|
||||
raise BadData(response.data)
|
||||
# Line up the raw_data
|
||||
raw_data = raw_data.split(b"\r\n")
|
||||
|
||||
decoded_data = response.data
|
||||
nzf = article.nzf
|
||||
nzf.type = "uu"
|
||||
# Index of the uu payload start in raw_data
|
||||
uu_start = 0
|
||||
|
||||
# Limit the number of lines to check for the onset of uu data
|
||||
limit = min(len(raw_data), 32) - 1
|
||||
if limit < 3:
|
||||
logging.debug("Article too short to contain valid uu-encoded data")
|
||||
raise BadUu
|
||||
|
||||
# Try to find an empty line separating the body from headers or response
|
||||
# code and set the expected payload start to the next line.
|
||||
try:
|
||||
uu_start = raw_data[:limit].index(b"") + 1
|
||||
except ValueError:
|
||||
# No empty line, look for a response code instead
|
||||
if raw_data[0].startswith(b"220 ") or raw_data[0].startswith(b"222 "):
|
||||
uu_start = 1
|
||||
else:
|
||||
# Invalid data?
|
||||
logging.debug("Failed to locate start of uu payload")
|
||||
raise BadUu
|
||||
|
||||
def is_uu_junk(line: bytes) -> bool:
|
||||
"""Determine if the line is empty or contains known junk data"""
|
||||
return (not line) or line == b"-- " or line.startswith(b"Posted via ")
|
||||
|
||||
# Check the uu 'begin' line
|
||||
if article.lowest_partnum:
|
||||
try:
|
||||
# Make sure the line after the uu_start one isn't empty as well or
|
||||
# detection of the 'begin' line won't work. For articles other than
|
||||
# lowest_partnum, filtering out empty lines (and other junk) can
|
||||
# wait until the actual decoding step.
|
||||
for index in range(uu_start, limit):
|
||||
if is_uu_junk(raw_data[index]):
|
||||
uu_start = index + 1
|
||||
else:
|
||||
# Bingo
|
||||
break
|
||||
else:
|
||||
# Search reached the limit
|
||||
raise IndexError
|
||||
|
||||
uu_begin_data = raw_data[uu_start].split(b" ")
|
||||
# Filename may contain spaces
|
||||
uu_filename = ubtou(b" ".join(uu_begin_data[2:]).strip())
|
||||
|
||||
# Sanity check the 'begin' line
|
||||
if (
|
||||
len(uu_begin_data) < 3
|
||||
or uu_begin_data[0].lower() != b"begin"
|
||||
or (not int(uu_begin_data[1], 8))
|
||||
or (not uu_filename)
|
||||
):
|
||||
raise ValueError
|
||||
|
||||
# Consider this enough proof to set the type, avoiding further
|
||||
# futile attempts at decoding articles in this nzf as yenc.
|
||||
article.nzf.type = "uu"
|
||||
|
||||
# Bump the pointer for the payload to the next line
|
||||
uu_start += 1
|
||||
except Exception:
|
||||
logging.debug("Missing or invalid uu 'begin' line: %s", raw_data[uu_start] if uu_start < limit else None)
|
||||
raise BadUu
|
||||
|
||||
# Do the actual decoding
|
||||
with BytesIO() as decoded_data:
|
||||
for line in raw_data[uu_start:]:
|
||||
# Ignore junk
|
||||
if is_uu_junk(line):
|
||||
continue
|
||||
|
||||
# End of the article
|
||||
if line in (b"`", b"end", b"."):
|
||||
break
|
||||
|
||||
# Remove dot stuffing
|
||||
if line.startswith(b".."):
|
||||
line = line[1:]
|
||||
|
||||
try:
|
||||
decoded_line = binascii.a2b_uu(line)
|
||||
except binascii.Error as msg:
|
||||
try:
|
||||
# Workaround for broken uuencoders by Fredrik Lundh
|
||||
nbytes = (((line[0] - 32) & 63) * 4 + 5) // 3
|
||||
decoded_line = binascii.a2b_uu(line[:nbytes])
|
||||
except Exception as msg2:
|
||||
logging.info(
|
||||
"Error while uu-decoding %s: %s (line: %s; workaround: %s)", article.article, msg, line, msg2
|
||||
)
|
||||
raise BadData(decoded_data.getvalue())
|
||||
|
||||
# Store the decoded data
|
||||
decoded_data.write(decoded_line)
|
||||
|
||||
# Set the type to uu; the latter is still needed in
|
||||
# case the lowest_partnum article was damaged or slow to download.
|
||||
article.nzf.type = "uu"
|
||||
|
||||
# Only set the name if it was found and not obfuscated
|
||||
if not nzf.filename_checked and (file_name := response.file_name):
|
||||
# Set the md5-of-16k if this is the first article
|
||||
if article.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(memoryview(decoded_data)[:16384]).digest()
|
||||
decoded_data.seek(0)
|
||||
article.nzf.md5of16k = hashlib.md5(decoded_data.read(16384)).digest()
|
||||
# Handle the filename
|
||||
if not article.nzf.filename_checked and uu_filename:
|
||||
article.nzf.nzo.verify_nzf_filename(article.nzf, uu_filename)
|
||||
|
||||
# Try the rename, even if it's not the first article
|
||||
# For example when the first article was missing
|
||||
nzf.nzo.verify_nzf_filename(nzf, file_name)
|
||||
|
||||
article.crc32 = response.crc
|
||||
|
||||
return decoded_data
|
||||
data = decoded_data.getvalue()
|
||||
article.crc32 = crc32(data)
|
||||
return data
|
||||
|
||||
|
||||
def search_new_server(article: Article) -> bool:
|
||||
|
||||
@@ -70,7 +70,7 @@ def conditional_cache(cache_time: int):
|
||||
Empty results (None, empty collections, empty strings, False, 0) are not cached.
|
||||
If a keyword argument of `force=True` is used, the cache is skipped.
|
||||
|
||||
Unhashable types (such as list) can not be used as an input to the wrapped function in the current implementation!
|
||||
Unhashable types (such as List) can not be used as an input to the wrapped function in the current implementation!
|
||||
|
||||
:param cache_time: Time in seconds to cache non-empty results
|
||||
"""
|
||||
|
||||
18
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
18
sabnzbd/deobfuscate_filenames.py
Normal file → Executable file
@@ -38,13 +38,14 @@ from sabnzbd.par2file import is_par2_file, parse_par2_file
|
||||
import sabnzbd.utils.file_extension as file_extension
|
||||
from sabnzbd.misc import match_str
|
||||
from sabnzbd.constants import IGNORED_MOVIE_FOLDERS
|
||||
from typing import List
|
||||
|
||||
# Files to exclude and minimal file size for renaming
|
||||
EXCLUDED_FILE_EXTS = (".vob", ".rar", ".par2", ".mts", ".m2ts", ".cpi", ".clpi", ".mpl", ".mpls", ".bdm", ".bdmv")
|
||||
MIN_FILE_SIZE = 10 * 1024 * 1024
|
||||
|
||||
|
||||
def decode_par2(parfile: str) -> list[str]:
|
||||
def decode_par2(parfile: str) -> List[str]:
|
||||
"""Parse a par2 file and rename files listed in the par2 to their real name. Return list of generated files"""
|
||||
# Check if really a par2 file
|
||||
if not is_par2_file(parfile):
|
||||
@@ -76,7 +77,7 @@ def decode_par2(parfile: str) -> list[str]:
|
||||
return new_files
|
||||
|
||||
|
||||
def recover_par2_names(filelist: list[str]) -> list[str]:
|
||||
def recover_par2_names(filelist: List[str]) -> List[str]:
|
||||
"""Find par2 files and use them for renaming"""
|
||||
# Check that files exists
|
||||
filelist = [f for f in filelist if os.path.isfile(f)]
|
||||
@@ -167,7 +168,7 @@ def is_probably_obfuscated(myinputfilename: str) -> bool:
|
||||
return True # default is obfuscated
|
||||
|
||||
|
||||
def get_biggest_file(filelist: list[str]) -> str:
|
||||
def get_biggest_file(filelist: List[str]) -> str:
|
||||
"""Returns biggest file if that file is much bigger than the other files
|
||||
If only one file exists, return that. If no file, return None
|
||||
Note: the files in filelist must exist, because their sizes on disk are checked"""
|
||||
@@ -189,7 +190,7 @@ def get_biggest_file(filelist: list[str]) -> str:
|
||||
return None
|
||||
|
||||
|
||||
def deobfuscate(nzo: "sabnzbd.nzb.NzbObject", filelist: list[str], usefulname: str) -> list[str]:
|
||||
def deobfuscate(nzo, filelist: List[str], usefulname: str) -> List[str]:
|
||||
"""
|
||||
For files in filelist:
|
||||
1. if a file has no meaningful extension, add it (for example ".txt" or ".png")
|
||||
@@ -227,6 +228,9 @@ def deobfuscate(nzo: "sabnzbd.nzb.NzbObject", filelist: list[str], usefulname: s
|
||||
|
||||
"""
|
||||
|
||||
# Can't be imported directly due to circular import
|
||||
nzo: sabnzbd.nzbstuff.NzbObject
|
||||
|
||||
# to be sure, only keep really existing files and remove any duplicates:
|
||||
filtered_filelist = list(set(f for f in filelist if os.path.isfile(f)))
|
||||
|
||||
@@ -317,7 +321,7 @@ def without_extension(fullpathfilename: str) -> str:
|
||||
return os.path.splitext(fullpathfilename)[0]
|
||||
|
||||
|
||||
def deobfuscate_subtitles(nzo: "sabnzbd.nzb.NzbObject", filelist: list[str]):
|
||||
def deobfuscate_subtitles(nzo, filelist: List[str]):
|
||||
"""
|
||||
input:
|
||||
nzo, so we can update result via set_unpack_info()
|
||||
@@ -342,6 +346,10 @@ def deobfuscate_subtitles(nzo: "sabnzbd.nzb.NzbObject", filelist: list[str]):
|
||||
Something.else.txt
|
||||
|
||||
"""
|
||||
|
||||
# Can't be imported directly due to circular import
|
||||
nzo: sabnzbd.nzbstuff.NzbObject
|
||||
|
||||
# find .srt files
|
||||
if not (srt_files := [f for f in filelist if f.endswith(".srt")]):
|
||||
logging.debug("No .srt files found, so nothing to do")
|
||||
|
||||
@@ -25,18 +25,19 @@ import subprocess
|
||||
import time
|
||||
import threading
|
||||
import logging
|
||||
from typing import Optional
|
||||
from typing import Optional, Dict, List, Tuple
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.misc import int_conv, format_time_string, build_and_run_command
|
||||
from sabnzbd.filesystem import remove_all, real_path, remove_file, get_basename, clip_path
|
||||
from sabnzbd.nzb import NzbFile, NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbFile
|
||||
from sabnzbd.encoding import platform_btou
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.newsunpack import RAR_EXTRACTFROM_RE, RAR_EXTRACTED_RE, rar_volumelist, add_time_left
|
||||
from sabnzbd.postproc import prepare_extraction_path
|
||||
from sabnzbd.misc import SABRarFile
|
||||
import rarfile
|
||||
from sabnzbd.utils.diskspeed import diskspeedmeasure
|
||||
|
||||
# Need a lock to make sure start and stop is handled correctly
|
||||
@@ -61,11 +62,11 @@ class DirectUnpacker(threading.Thread):
|
||||
self.rarfile_nzf: Optional[NzbFile] = None
|
||||
self.cur_setname: Optional[str] = None
|
||||
self.cur_volume: int = 0
|
||||
self.total_volumes: dict[str, int] = {}
|
||||
self.total_volumes: Dict[str, int] = {}
|
||||
self.unpack_time: float = 0.0
|
||||
|
||||
self.success_sets: dict[str, tuple[list[str], list[str]]] = {}
|
||||
self.next_sets: list[NzbFile] = []
|
||||
self.success_sets: Dict[str, Tuple[List[str], List[str]]] = {}
|
||||
self.next_sets: List[NzbFile] = []
|
||||
|
||||
self.duplicate_lines: int = 0
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ import asyncio
|
||||
import os
|
||||
import logging
|
||||
import threading
|
||||
from typing import Generator, Optional
|
||||
from typing import Generator, Set, Optional, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import SCAN_FILE_NAME, VALID_ARCHIVES, VALID_NZB_FILES, AddNzbFileResult
|
||||
@@ -128,7 +128,7 @@ class DirScanner(threading.Thread):
|
||||
|
||||
def get_suspected_files(
|
||||
self, folder: str, catdir: Optional[str] = None
|
||||
) -> Generator[tuple[str, Optional[str], Optional[os.stat_result]], None, None]:
|
||||
) -> Generator[Tuple[str, Optional[str], Optional[os.stat_result]], None, None]:
|
||||
"""Generator listing possible paths to NZB files"""
|
||||
|
||||
if catdir is None:
|
||||
@@ -222,15 +222,17 @@ class DirScanner(threading.Thread):
|
||||
|
||||
async def scan_async(self, dirscan_dir: str):
|
||||
"""Do one scan of the watched folder"""
|
||||
with DIR_SCANNER_LOCK:
|
||||
self.lock = asyncio.Lock()
|
||||
# On Python 3.8 we first need an event loop before we can create a asyncio.Lock
|
||||
if not self.lock:
|
||||
with DIR_SCANNER_LOCK:
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
async with self.lock:
|
||||
if sabnzbd.PAUSED_ALL:
|
||||
return
|
||||
|
||||
files: set[str] = set()
|
||||
futures: set[asyncio.Task] = set()
|
||||
files: Set[str] = set()
|
||||
futures: Set[asyncio.Task] = set()
|
||||
|
||||
for path, catdir, stat_tuple in self.get_suspected_files(dirscan_dir):
|
||||
files.add(path)
|
||||
|
||||
@@ -19,18 +19,15 @@
|
||||
sabnzbd.downloader - download engine
|
||||
"""
|
||||
|
||||
import select
|
||||
import logging
|
||||
import selectors
|
||||
from collections import deque
|
||||
from threading import Thread, RLock, current_thread
|
||||
import socket
|
||||
import sys
|
||||
import ssl
|
||||
import time
|
||||
from datetime import date
|
||||
from typing import Optional, Union, Deque
|
||||
|
||||
import sabctools
|
||||
from typing import List, Dict, Optional, Union, Set
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized, NzbQueueLocker, DOWNLOADER_CV, DOWNLOADER_LOCK
|
||||
@@ -39,7 +36,7 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.misc import from_units, helpful_warning, int_conv, MultiAddQueue
|
||||
from sabnzbd.get_addrinfo import get_fastest_addrinfo, AddrInfo
|
||||
from sabnzbd.constants import SOFT_ASSEMBLER_QUEUE_LIMIT
|
||||
from sabnzbd.constants import SOFT_QUEUE_LIMIT
|
||||
|
||||
|
||||
# Timeout penalty in minutes for each cause
|
||||
@@ -85,7 +82,6 @@ class Server:
|
||||
"retention",
|
||||
"username",
|
||||
"password",
|
||||
"pipelining_requests",
|
||||
"busy_threads",
|
||||
"next_busy_threads_check",
|
||||
"idle_threads",
|
||||
@@ -114,7 +110,6 @@ class Server:
|
||||
use_ssl,
|
||||
ssl_verify,
|
||||
ssl_ciphers,
|
||||
pipelining_requests,
|
||||
username=None,
|
||||
password=None,
|
||||
required=False,
|
||||
@@ -139,11 +134,10 @@ class Server:
|
||||
self.retention: int = retention
|
||||
self.username: Optional[str] = username
|
||||
self.password: Optional[str] = password
|
||||
self.pipelining_requests: Callable[[], int] = pipelining_requests
|
||||
|
||||
self.busy_threads: set[NewsWrapper] = set()
|
||||
self.busy_threads: Set[NewsWrapper] = set()
|
||||
self.next_busy_threads_check: float = 0
|
||||
self.idle_threads: set[NewsWrapper] = set()
|
||||
self.idle_threads: Set[NewsWrapper] = set()
|
||||
self.next_article_search: float = 0
|
||||
self.active: bool = True
|
||||
self.bad_cons: int = 0
|
||||
@@ -154,7 +148,7 @@ class Server:
|
||||
self.request: bool = False # True if a getaddrinfo() request is pending
|
||||
self.have_body: bool = True # Assume server has "BODY", until proven otherwise
|
||||
self.have_stat: bool = True # Assume server has "STAT", until proven otherwise
|
||||
self.article_queue: Deque[sabnzbd.nzb.Article] = deque()
|
||||
self.article_queue: List[sabnzbd.nzbstuff.Article] = []
|
||||
|
||||
# Skip during server testing
|
||||
if threads:
|
||||
@@ -179,19 +173,19 @@ class Server:
|
||||
self.reset_article_queue()
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def get_article(self, peek: bool = False):
|
||||
def get_article(self):
|
||||
"""Get article from pre-fetched and pre-fetch new ones if necessary.
|
||||
Articles that are too old for this server are immediately marked as tried"""
|
||||
if self.article_queue:
|
||||
return self.article_queue[0] if peek else self.article_queue.popleft()
|
||||
return self.article_queue.pop(0)
|
||||
|
||||
if self.next_article_search < time.time():
|
||||
# Pre-fetch new articles
|
||||
sabnzbd.NzbQueue.get_articles(self, sabnzbd.Downloader.servers, _ARTICLE_PREFETCH)
|
||||
self.article_queue = sabnzbd.NzbQueue.get_articles(self, sabnzbd.Downloader.servers, _ARTICLE_PREFETCH)
|
||||
if self.article_queue:
|
||||
article = self.article_queue[0] if peek else self.article_queue.popleft()
|
||||
article = self.article_queue.pop(0)
|
||||
# Mark expired articles as tried on this server
|
||||
if not peek and self.retention and article.nzf.nzo.avg_stamp < time.time() - self.retention:
|
||||
if self.retention and article.nzf.nzo.avg_stamp < time.time() - self.retention:
|
||||
sabnzbd.Downloader.decode(article)
|
||||
while self.article_queue:
|
||||
sabnzbd.Downloader.decode(self.article_queue.pop())
|
||||
@@ -207,12 +201,9 @@ class Server:
|
||||
"""Reset articles queued for the Server. Locked to prevent
|
||||
articles getting stuck in the Server when enabled/disabled"""
|
||||
logging.debug("Resetting article queue for %s (%s)", self, self.article_queue)
|
||||
while self.article_queue:
|
||||
try:
|
||||
article = self.article_queue.popleft()
|
||||
article.allow_new_fetcher()
|
||||
except IndexError:
|
||||
pass
|
||||
for article in self.article_queue:
|
||||
article.allow_new_fetcher()
|
||||
self.article_queue = []
|
||||
|
||||
def request_addrinfo(self):
|
||||
"""Launch async request to resolve server address and select the fastest.
|
||||
@@ -259,7 +250,7 @@ class Downloader(Thread):
|
||||
"shutdown",
|
||||
"server_restarts",
|
||||
"force_disconnect",
|
||||
"selector",
|
||||
"read_fds",
|
||||
"servers",
|
||||
"timers",
|
||||
"last_max_chunk_size",
|
||||
@@ -299,10 +290,10 @@ class Downloader(Thread):
|
||||
|
||||
self.force_disconnect: bool = False
|
||||
|
||||
self.selector: selectors.DefaultSelector = selectors.DefaultSelector()
|
||||
self.read_fds: Dict[int, NewsWrapper] = {}
|
||||
|
||||
self.servers: list[Server] = []
|
||||
self.timers: dict[str, list[float]] = {}
|
||||
self.servers: List[Server] = []
|
||||
self.timers: Dict[str, List[float]] = {}
|
||||
|
||||
for server in config.get_servers():
|
||||
self.init_server(None, server)
|
||||
@@ -328,7 +319,6 @@ class Downloader(Thread):
|
||||
ssl = srv.ssl()
|
||||
ssl_verify = srv.ssl_verify()
|
||||
ssl_ciphers = srv.ssl_ciphers()
|
||||
pipelining_requests = srv.pipelining_requests
|
||||
username = srv.username()
|
||||
password = srv.password()
|
||||
required = srv.required()
|
||||
@@ -359,7 +349,6 @@ class Downloader(Thread):
|
||||
ssl,
|
||||
ssl_verify,
|
||||
ssl_ciphers,
|
||||
pipelining_requests,
|
||||
username,
|
||||
password,
|
||||
required,
|
||||
@@ -372,34 +361,15 @@ class Downloader(Thread):
|
||||
self.servers.sort(key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def add_socket(self, nw: NewsWrapper):
|
||||
"""Add a socket to be watched for read or write availability"""
|
||||
if nw.nntp:
|
||||
try:
|
||||
self.selector.register(nw.nntp.fileno, selectors.EVENT_READ | selectors.EVENT_WRITE, nw)
|
||||
nw.selector_events = selectors.EVENT_READ | selectors.EVENT_WRITE
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def modify_socket(self, nw: NewsWrapper, events: int):
|
||||
"""Modify the events socket are watched for"""
|
||||
if nw.nntp and nw.selector_events != events:
|
||||
try:
|
||||
self.selector.modify(nw.nntp.fileno, events, nw)
|
||||
nw.selector_events = events
|
||||
except KeyError:
|
||||
pass
|
||||
def add_socket(self, fileno: int, nw: NewsWrapper):
|
||||
"""Add a socket ready to be used to the list to be watched"""
|
||||
self.read_fds[fileno] = nw
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def remove_socket(self, nw: NewsWrapper):
|
||||
"""Remove a socket to be watched"""
|
||||
if nw.nntp:
|
||||
try:
|
||||
self.selector.unregister(nw.nntp.fileno)
|
||||
nw.selector_events = 0
|
||||
except KeyError:
|
||||
pass
|
||||
self.read_fds.pop(nw.nntp.fileno, None)
|
||||
|
||||
@NzbQueueLocker
|
||||
def set_paused_state(self, state: bool):
|
||||
@@ -439,9 +409,8 @@ class Downloader(Thread):
|
||||
|
||||
@NzbQueueLocker
|
||||
def resume_from_postproc(self):
|
||||
if self.paused_for_postproc:
|
||||
logging.info("Post-processing finished, resuming download")
|
||||
self.paused_for_postproc = False
|
||||
logging.info("Post-processing finished, resuming download")
|
||||
self.paused_for_postproc = False
|
||||
|
||||
@NzbQueueLocker
|
||||
def disconnect(self):
|
||||
@@ -482,15 +451,6 @@ class Downloader(Thread):
|
||||
self.bandwidth_perc = 0
|
||||
self.bandwidth_limit = 0
|
||||
|
||||
# Increase limits for faster connections
|
||||
if limit > from_units("150M"):
|
||||
if cfg.receive_threads() == cfg.receive_threads.default:
|
||||
cfg.receive_threads.set(4)
|
||||
logging.info("Receive threads set to 4")
|
||||
if cfg.assembler_max_queue_size() == cfg.assembler_max_queue_size.default:
|
||||
cfg.assembler_max_queue_size.set(30)
|
||||
logging.info("Assembler max_queue_size set to 30")
|
||||
|
||||
def sleep_time_set(self):
|
||||
self.sleep_time = cfg.downloader_sleep_time() * 0.0001
|
||||
logging.debug("Sleep time: %f seconds", self.sleep_time)
|
||||
@@ -539,30 +499,26 @@ class Downloader(Thread):
|
||||
|
||||
# Remove all connections to server
|
||||
for nw in server.idle_threads | server.busy_threads:
|
||||
self.reset_nw(nw, "Forcing disconnect", warn=False, wait=False, retry_article=False)
|
||||
self.__reset_nw(nw, "Forcing disconnect", warn=False, wait=False, retry_article=False)
|
||||
|
||||
# Make sure server address resolution is refreshed
|
||||
server.addrinfo = None
|
||||
|
||||
@staticmethod
|
||||
def decode(article: "sabnzbd.nzb.Article", response: Optional[sabctools.NNTPResponse] = None):
|
||||
def decode(article, data_view: Optional[memoryview] = None):
|
||||
"""Decode article"""
|
||||
# Need a better way of draining requests
|
||||
if article.nzf.nzo.removed_from_queue:
|
||||
return
|
||||
|
||||
# Article was requested and fetched, update article stats for the server
|
||||
sabnzbd.BPSMeter.register_server_article_tried(article.fetcher.id)
|
||||
|
||||
# Handle broken articles directly
|
||||
if not response or not response.bytes_decoded and not article.nzf.nzo.precheck:
|
||||
if not data_view:
|
||||
if not article.search_new_server():
|
||||
article.nzf.nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
return
|
||||
|
||||
# Decode and send to article cache
|
||||
sabnzbd.decoder.decode(article, response)
|
||||
sabnzbd.decoder.decode(article, data_view)
|
||||
|
||||
def run(self):
|
||||
# Warn if there are servers defined, but none are valid
|
||||
@@ -582,7 +538,7 @@ class Downloader(Thread):
|
||||
for _ in range(cfg.receive_threads()):
|
||||
# Started as daemon, so we don't need any shutdown logic in the worker
|
||||
# The Downloader code will make sure shutdown is handled gracefully
|
||||
Thread(target=self.process_nw_worker, args=(process_nw_queue,), daemon=True).start()
|
||||
Thread(target=self.process_nw_worker, args=(self.read_fds, process_nw_queue), daemon=True).start()
|
||||
|
||||
# Catch all errors, just in case
|
||||
try:
|
||||
@@ -604,9 +560,9 @@ class Downloader(Thread):
|
||||
if (nw.nntp and nw.nntp.error_msg) or (nw.timeout and now > nw.timeout):
|
||||
if nw.nntp and nw.nntp.error_msg:
|
||||
# Already showed error
|
||||
self.reset_nw(nw)
|
||||
self.__reset_nw(nw)
|
||||
else:
|
||||
self.reset_nw(nw, "Timed out", warn=True)
|
||||
self.__reset_nw(nw, "Timed out", warn=True)
|
||||
server.bad_cons += 1
|
||||
self.maybe_block_server(server)
|
||||
|
||||
@@ -646,14 +602,15 @@ class Downloader(Thread):
|
||||
server.request_addrinfo()
|
||||
break
|
||||
|
||||
if not server.get_article(peek=True):
|
||||
nw.article = server.get_article()
|
||||
if not nw.article:
|
||||
break
|
||||
|
||||
server.idle_threads.remove(nw)
|
||||
server.busy_threads.add(nw)
|
||||
|
||||
if nw.connected:
|
||||
self.add_socket(nw)
|
||||
self.__request_article(nw)
|
||||
else:
|
||||
try:
|
||||
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.host)
|
||||
@@ -665,14 +622,14 @@ class Downloader(Thread):
|
||||
server.host,
|
||||
sys.exc_info()[1],
|
||||
)
|
||||
self.reset_nw(nw, "Failed to initialize", warn=True)
|
||||
self.__reset_nw(nw, "Failed to initialize", warn=True)
|
||||
|
||||
if self.force_disconnect or self.shutdown:
|
||||
for server in self.servers:
|
||||
for nw in server.idle_threads | server.busy_threads:
|
||||
# Send goodbye if we have open socket
|
||||
if nw.nntp:
|
||||
self.reset_nw(nw, "Forcing disconnect", wait=False, count_article_try=False)
|
||||
self.__reset_nw(nw, "Forcing disconnect", wait=False, count_article_try=False)
|
||||
# Make sure server address resolution is refreshed
|
||||
server.addrinfo = None
|
||||
server.reset_article_queue()
|
||||
@@ -696,13 +653,10 @@ class Downloader(Thread):
|
||||
self.last_max_chunk_size = 0
|
||||
|
||||
# Use select to find sockets ready for reading/writing
|
||||
if self.selector.get_map():
|
||||
if events := self.selector.select(timeout=1.0):
|
||||
for key, ev in events:
|
||||
nw = key.data
|
||||
process_nw_queue.put((nw, ev, nw.generation))
|
||||
if readkeys := self.read_fds.keys():
|
||||
read, _, _ = select.select(readkeys, (), (), 1.0)
|
||||
else:
|
||||
events = []
|
||||
read = []
|
||||
BPSMeter.reset()
|
||||
time.sleep(0.1)
|
||||
self.max_chunk_size = _DEFAULT_CHUNK_SIZE
|
||||
@@ -721,75 +675,58 @@ class Downloader(Thread):
|
||||
next_bpsmeter_update = now + _BPSMETER_UPDATE_DELAY
|
||||
self.check_assembler_levels()
|
||||
|
||||
if not events:
|
||||
if not read:
|
||||
continue
|
||||
|
||||
# Wait for socket operation completion
|
||||
# Submit all readable sockets to be processed and wait for completion
|
||||
process_nw_queue.put_multiple(read)
|
||||
process_nw_queue.join()
|
||||
|
||||
except Exception:
|
||||
logging.error(T("Fatal error in Downloader"), exc_info=True)
|
||||
|
||||
def process_nw_worker(self, nw_queue: MultiAddQueue):
|
||||
def process_nw_worker(self, read_fds: Dict[int, NewsWrapper], nw_queue: MultiAddQueue):
|
||||
"""Worker for the daemon thread to process results.
|
||||
Wrapped in try/except because in case of an exception, logging
|
||||
might get lost and the queue.join() would block forever."""
|
||||
try:
|
||||
logging.debug("Starting Downloader receive thread: %s", current_thread().name)
|
||||
while True:
|
||||
self.process_nw(*nw_queue.get())
|
||||
# The read_fds is passed by reference, so we can access its items!
|
||||
self.process_nw(read_fds[nw_queue.get()])
|
||||
nw_queue.task_done()
|
||||
except Exception:
|
||||
# We cannot break out of the Downloader from here, so just pause
|
||||
logging.error(T("Fatal error in Downloader"), exc_info=True)
|
||||
self.pause()
|
||||
|
||||
def process_nw(self, nw: NewsWrapper, event: int, generation: int):
|
||||
def process_nw(self, nw: NewsWrapper):
|
||||
"""Receive data from a NewsWrapper and handle the response"""
|
||||
# Drop stale items
|
||||
if nw.generation != generation:
|
||||
try:
|
||||
bytes_received, end_of_line, article_done = nw.recv_chunk()
|
||||
except ssl.SSLWantReadError:
|
||||
return
|
||||
if event & selectors.EVENT_READ:
|
||||
self.process_nw_read(nw, generation)
|
||||
# If read caused a reset, don't proceed to write
|
||||
if nw.generation != generation:
|
||||
return
|
||||
if event & selectors.EVENT_WRITE:
|
||||
nw.write()
|
||||
|
||||
def process_nw_read(self, nw: NewsWrapper, generation: int) -> None:
|
||||
bytes_received: int = 0
|
||||
bytes_pending: int = 0
|
||||
|
||||
while nw.decoder and nw.generation == generation:
|
||||
try:
|
||||
n, bytes_pending = nw.read(nbytes=bytes_pending, generation=generation)
|
||||
bytes_received += n
|
||||
except ssl.SSLWantReadError:
|
||||
return
|
||||
except (ConnectionError, ConnectionAbortedError):
|
||||
# The ConnectionAbortedError is also thrown by sabctools in case of fatal SSL-layer problems
|
||||
self.reset_nw(nw, "Server closed connection", wait=False)
|
||||
return
|
||||
except BufferError:
|
||||
# The BufferError is thrown when exceeding maximum buffer size
|
||||
# Make sure to discard the article
|
||||
self.reset_nw(nw, "Maximum data buffer size exceeded", wait=False, retry_article=False)
|
||||
return
|
||||
|
||||
if not bytes_pending:
|
||||
break
|
||||
|
||||
# Ignore metrics for reset connections
|
||||
if nw.generation != generation:
|
||||
except (ConnectionError, ConnectionAbortedError):
|
||||
# The ConnectionAbortedError is also thrown by sabctools in case of fatal SSL-layer problems
|
||||
self.__reset_nw(nw, "Server closed connection", wait=False)
|
||||
return
|
||||
except BufferError:
|
||||
# The BufferError is thrown when exceeding maximum buffer size
|
||||
# Make sure to discard the article
|
||||
self.__reset_nw(nw, "Maximum data buffer size exceeded", wait=False, retry_article=False)
|
||||
return
|
||||
|
||||
article = nw.article
|
||||
server = nw.server
|
||||
|
||||
with DOWNLOADER_LOCK:
|
||||
sabnzbd.BPSMeter.update(server.id, bytes_received)
|
||||
if bytes_received > self.last_max_chunk_size:
|
||||
self.last_max_chunk_size = bytes_received
|
||||
# Update statistics only when we fetched a whole article
|
||||
# The side effect is that we don't count things like article-not-available messages
|
||||
if article_done:
|
||||
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, nw.data_position)
|
||||
# Check speedlimit
|
||||
if (
|
||||
self.bandwidth_limit
|
||||
@@ -800,10 +737,97 @@ class Downloader(Thread):
|
||||
time.sleep(0.01)
|
||||
sabnzbd.BPSMeter.update()
|
||||
|
||||
# If we are not at the end of a line, more data will follow
|
||||
if not end_of_line:
|
||||
return
|
||||
|
||||
# Response code depends on request command:
|
||||
# 220 = ARTICLE, 222 = BODY
|
||||
if nw.status_code not in (220, 222) and not article_done:
|
||||
if not nw.connected or nw.status_code == 480:
|
||||
if not self.__finish_connect_nw(nw):
|
||||
return
|
||||
if nw.connected:
|
||||
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.host)
|
||||
self.__request_article(nw)
|
||||
|
||||
elif nw.status_code == 223:
|
||||
article_done = True
|
||||
logging.debug("Article <%s> is present", article.article)
|
||||
|
||||
elif nw.status_code in (411, 423, 430, 451):
|
||||
article_done = True
|
||||
logging.debug(
|
||||
"Thread %s@%s: Article %s missing (error=%s)",
|
||||
nw.thrdnum,
|
||||
nw.server.host,
|
||||
article.article,
|
||||
nw.status_code,
|
||||
)
|
||||
nw.reset_data_buffer()
|
||||
|
||||
elif nw.status_code == 500:
|
||||
if article.nzf.nzo.precheck:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
logging.debug("Server %s does not support STAT", server.host)
|
||||
else:
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug("Server %s does not support BODY", server.host)
|
||||
nw.reset_data_buffer()
|
||||
self.__request_article(nw)
|
||||
|
||||
else:
|
||||
# Don't warn for (internal) server errors during downloading
|
||||
if nw.status_code not in (400, 502, 503):
|
||||
logging.warning(
|
||||
T("%s@%s: Received unknown status code %s for article %s"),
|
||||
nw.thrdnum,
|
||||
nw.server.host,
|
||||
nw.status_code,
|
||||
article.article,
|
||||
)
|
||||
|
||||
# Ditch this thread, we don't know what data we got now so the buffer can be bad
|
||||
self.__reset_nw(nw, f"Server error or unknown status code: {nw.status_code}", wait=False)
|
||||
return
|
||||
|
||||
if article_done:
|
||||
# Successful data, clear "bad" counter
|
||||
server.bad_cons = 0
|
||||
server.errormsg = server.warning = ""
|
||||
|
||||
# Decode
|
||||
self.decode(article, nw.data_view[: nw.data_position])
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s done", nw.thrdnum, server.host, article.article)
|
||||
|
||||
# Reset connection for new activity
|
||||
nw.soft_reset()
|
||||
|
||||
# Request a new article immediately if possible
|
||||
if (
|
||||
nw.connected
|
||||
and server.active
|
||||
and not server.restart
|
||||
and not (self.paused or self.shutdown or self.paused_for_postproc)
|
||||
):
|
||||
nw.article = server.get_article()
|
||||
if nw.article:
|
||||
self.__request_article(nw)
|
||||
return
|
||||
|
||||
# Make socket available again
|
||||
server.busy_threads.discard(nw)
|
||||
server.idle_threads.add(nw)
|
||||
self.remove_socket(nw)
|
||||
|
||||
def check_assembler_levels(self):
|
||||
"""Check the Assembler queue to see if we need to delay, depending on queue size"""
|
||||
if (assembler_level := sabnzbd.Assembler.queue_level()) > SOFT_ASSEMBLER_QUEUE_LIMIT:
|
||||
time.sleep(min((assembler_level - SOFT_ASSEMBLER_QUEUE_LIMIT) / 4, 0.15))
|
||||
if (assembler_level := sabnzbd.Assembler.queue_level()) > SOFT_QUEUE_LIMIT:
|
||||
time.sleep(min((assembler_level - SOFT_QUEUE_LIMIT) / 4, 0.15))
|
||||
sabnzbd.BPSMeter.delayed_assembler += 1
|
||||
logged_counter = 0
|
||||
|
||||
@@ -825,12 +849,13 @@ class Downloader(Thread):
|
||||
logged_counter += 1
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def finish_connect_nw(self, nw: NewsWrapper, response: sabctools.NNTPResponse) -> bool:
|
||||
def __finish_connect_nw(self, nw: NewsWrapper) -> bool:
|
||||
server = nw.server
|
||||
try:
|
||||
nw.finish_connect(response.status_code, response.message)
|
||||
nw.finish_connect(nw.status_code)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("%s@%s last message -> %d", nw.thrdnum, server.host, response.status_code)
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, server.host, nw.nntp_msg)
|
||||
nw.reset_data_buffer()
|
||||
except NNTPPermanentError as error:
|
||||
# Handle login problems
|
||||
block = False
|
||||
@@ -843,7 +868,7 @@ class Downloader(Thread):
|
||||
errormsg = T("Too many connections to server %s [%s]") % (server.host, error.msg)
|
||||
if server.active:
|
||||
# Don't count this for the tries (max_art_tries) on this server
|
||||
self.reset_nw(nw)
|
||||
self.__reset_nw(nw)
|
||||
self.plan_server(server, _PENALTY_TOOMANY)
|
||||
elif error.code in (502, 481, 482) and clues_too_many_ip(error.msg):
|
||||
# Login from (too many) different IP addresses
|
||||
@@ -893,7 +918,7 @@ class Downloader(Thread):
|
||||
if penalty and (block or server.optional):
|
||||
self.plan_server(server, penalty)
|
||||
# Note that the article is discard for this server if the server is not required
|
||||
self.reset_nw(nw, retry_article=retry_article)
|
||||
self.__reset_nw(nw, retry_article=retry_article)
|
||||
return False
|
||||
except Exception as err:
|
||||
logging.error(
|
||||
@@ -904,11 +929,11 @@ class Downloader(Thread):
|
||||
)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# No reset-warning needed, above logging is sufficient
|
||||
self.reset_nw(nw, retry_article=False)
|
||||
self.__reset_nw(nw, retry_article=False)
|
||||
return True
|
||||
|
||||
@synchronized(DOWNLOADER_LOCK)
|
||||
def reset_nw(
|
||||
def __reset_nw(
|
||||
self,
|
||||
nw: NewsWrapper,
|
||||
reset_msg: Optional[str] = None,
|
||||
@@ -916,7 +941,6 @@ class Downloader(Thread):
|
||||
wait: bool = True,
|
||||
count_article_try: bool = True,
|
||||
retry_article: bool = True,
|
||||
article: Optional["sabnzbd.nzb.Article"] = None,
|
||||
):
|
||||
# Some warnings are errors, and not added as server.warning
|
||||
if warn and reset_msg:
|
||||
@@ -932,8 +956,20 @@ class Downloader(Thread):
|
||||
# Make sure it is not in the readable sockets
|
||||
self.remove_socket(nw)
|
||||
|
||||
# Discard the article request which failed
|
||||
nw.discard(article, count_article_try=count_article_try, retry_article=retry_article)
|
||||
if nw.article and not nw.article.nzf.nzo.removed_from_queue:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if count_article_try:
|
||||
nw.article.tries += 1
|
||||
|
||||
# Do we discard, or try again for this server
|
||||
if not retry_article or (not nw.server.required and nw.article.tries > cfg.max_art_tries()):
|
||||
# Too many tries on this server, consider article missing
|
||||
self.decode(nw.article)
|
||||
nw.article.tries = 0
|
||||
else:
|
||||
# Allow all servers again for this article
|
||||
# Do not use the article_queue, as the server could already have been disabled when we get here!
|
||||
nw.article.allow_new_fetcher()
|
||||
|
||||
# Reset connection object
|
||||
nw.hard_reset(wait)
|
||||
@@ -941,6 +977,21 @@ class Downloader(Thread):
|
||||
# Empty SSL info, it might change on next connect
|
||||
nw.server.ssl_info = ""
|
||||
|
||||
def __request_article(self, nw: NewsWrapper):
|
||||
try:
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: BODY %s", nw.thrdnum, nw.server.host, nw.article.article)
|
||||
nw.body()
|
||||
# Mark as ready to be read
|
||||
self.add_socket(nw.nntp.fileno, nw)
|
||||
except socket.error as err:
|
||||
logging.info("Looks like server closed connection: %s", err)
|
||||
self.__reset_nw(nw, "Server broke off connection", warn=True)
|
||||
except Exception:
|
||||
logging.error(T("Suspect error in downloader"))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
self.__reset_nw(nw, "Server broke off connection", warn=True)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Timed restart of servers admin.
|
||||
# For each server all planned events are kept in a list.
|
||||
|
||||
@@ -33,7 +33,7 @@ import fnmatch
|
||||
import stat
|
||||
import ctypes
|
||||
import random
|
||||
from typing import Union, Any, Optional, BinaryIO
|
||||
from typing import Union, List, Tuple, Any, Dict, Optional, BinaryIO
|
||||
|
||||
try:
|
||||
import win32api
|
||||
@@ -295,10 +295,10 @@ def sanitize_and_trim_path(path: str) -> str:
|
||||
if sabnzbd.WINDOWS:
|
||||
if path.startswith("\\\\?\\UNC\\"):
|
||||
new_path = "\\\\?\\UNC\\"
|
||||
path = path.removeprefix("\\\\?\\UNC\\")
|
||||
path = path[8:]
|
||||
elif path.startswith("\\\\?\\"):
|
||||
new_path = "\\\\?\\"
|
||||
path = path.removeprefix("\\\\?\\")
|
||||
path = path[4:]
|
||||
|
||||
path = path.replace("\\", "/")
|
||||
parts = path.split("/")
|
||||
@@ -314,7 +314,7 @@ def sanitize_and_trim_path(path: str) -> str:
|
||||
return os.path.abspath(os.path.normpath(new_path))
|
||||
|
||||
|
||||
def sanitize_files(folder: Optional[str] = None, filelist: Optional[list[str]] = None) -> list[str]:
|
||||
def sanitize_files(folder: Optional[str] = None, filelist: Optional[List[str]] = None) -> List[str]:
|
||||
"""Sanitize each file in the folder or list of filepaths, return list of new names"""
|
||||
logging.info("Checking if any resulting filenames need to be sanitized")
|
||||
if folder:
|
||||
@@ -330,7 +330,7 @@ def sanitize_files(folder: Optional[str] = None, filelist: Optional[list[str]] =
|
||||
return output_filelist
|
||||
|
||||
|
||||
def strip_extensions(name: str, ext_to_remove: tuple[str, ...] = (".nzb", ".par", ".par2")) -> str:
|
||||
def strip_extensions(name: str, ext_to_remove: Tuple[str, ...] = (".nzb", ".par", ".par2")):
|
||||
"""Strip extensions from a filename, without sanitizing the filename"""
|
||||
name_base, ext = os.path.splitext(name)
|
||||
while ext.lower() in ext_to_remove:
|
||||
@@ -378,7 +378,7 @@ def real_path(loc: str, path: str) -> str:
|
||||
|
||||
def create_real_path(
|
||||
name: str, loc: str, path: str, apply_permissions: bool = False, writable: bool = True
|
||||
) -> tuple[bool, str, Optional[str]]:
|
||||
) -> Tuple[bool, str, Optional[str]]:
|
||||
"""When 'path' is relative, create join of 'loc' and 'path'
|
||||
When 'path' is absolute, create normalized path
|
||||
'name' is used for logging.
|
||||
@@ -484,7 +484,7 @@ TS_RE = re.compile(r"\.(\d+)\.(ts$)", re.I)
|
||||
|
||||
def build_filelists(
|
||||
workdir: Optional[str], workdir_complete: Optional[str] = None, check_both: bool = False, check_rar: bool = True
|
||||
) -> tuple[list[str], list[str], list[str], list[str]]:
|
||||
) -> Tuple[List[str], List[str], List[str], List[str]]:
|
||||
"""Build filelists, if workdir_complete has files, ignore workdir.
|
||||
Optionally scan both directories.
|
||||
Optionally test content to establish RAR-ness
|
||||
@@ -535,7 +535,7 @@ def safe_fnmatch(f: str, pattern: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def globber(path: str, pattern: str = "*") -> list[str]:
|
||||
def globber(path: str, pattern: str = "*") -> List[str]:
|
||||
"""Return matching base file/folder names in folder `path`"""
|
||||
# Cannot use glob.glob() because it doesn't support Windows long name notation
|
||||
if os.path.exists(path):
|
||||
@@ -543,7 +543,7 @@ def globber(path: str, pattern: str = "*") -> list[str]:
|
||||
return []
|
||||
|
||||
|
||||
def globber_full(path: str, pattern: str = "*") -> list[str]:
|
||||
def globber_full(path: str, pattern: str = "*") -> List[str]:
|
||||
"""Return matching full file/folder names in folder `path`"""
|
||||
# Cannot use glob.glob() because it doesn't support Windows long name notation
|
||||
if os.path.exists(path):
|
||||
@@ -572,7 +572,7 @@ def is_valid_script(basename: str) -> bool:
|
||||
return basename in list_scripts(default=False, none=False)
|
||||
|
||||
|
||||
def list_scripts(default: bool = False, none: bool = True) -> list[str]:
|
||||
def list_scripts(default: bool = False, none: bool = True) -> List[str]:
|
||||
"""Return a list of script names, optionally with 'Default' added"""
|
||||
lst = []
|
||||
path = sabnzbd.cfg.script_dir.get_path()
|
||||
@@ -613,7 +613,7 @@ def make_script_path(script: str) -> Optional[str]:
|
||||
return script_path
|
||||
|
||||
|
||||
def get_admin_path(name: str, future: bool) -> str:
|
||||
def get_admin_path(name: str, future: bool):
|
||||
"""Return news-style full path to job-admin folder of names job
|
||||
or else the old cache path
|
||||
"""
|
||||
@@ -660,7 +660,7 @@ def set_permissions(path: str, recursive: bool = True):
|
||||
UNWANTED_FILE_PERMISSIONS = stat.S_ISUID | stat.S_ISGID | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
||||
|
||||
|
||||
def removexbits(path: str, custom_permissions: Optional[int] = None):
|
||||
def removexbits(path: str, custom_permissions: int = None):
|
||||
"""Remove all the x-bits from files, respecting current or custom permissions"""
|
||||
if os.path.isfile(path):
|
||||
# Use custom permissions as base
|
||||
@@ -783,7 +783,7 @@ def get_unique_filename(path: str) -> str:
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def listdir_full(input_dir: str, recursive: bool = True) -> list[str]:
|
||||
def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
|
||||
"""List all files in dirs and sub-dirs"""
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk(input_dir):
|
||||
@@ -797,7 +797,7 @@ def listdir_full(input_dir: str, recursive: bool = True) -> list[str]:
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def move_to_path(path: str, new_path: str) -> tuple[bool, Optional[str]]:
|
||||
def move_to_path(path: str, new_path: str) -> Tuple[bool, Optional[str]]:
|
||||
"""Move a file to a new path, optionally give unique filename
|
||||
Return (ok, new_path)
|
||||
"""
|
||||
@@ -990,7 +990,7 @@ def remove_all(path: str, pattern: str = "*", keep_folder: bool = False, recursi
|
||||
##############################################################################
|
||||
# Diskfree
|
||||
##############################################################################
|
||||
def diskspace_base(dir_to_check: str) -> tuple[float, float]:
|
||||
def diskspace_base(dir_to_check: str) -> Tuple[float, float]:
|
||||
"""Return amount of free and used diskspace in GBytes"""
|
||||
# Find first folder level that exists in the path
|
||||
x = "x"
|
||||
@@ -1024,7 +1024,7 @@ def diskspace_base(dir_to_check: str) -> tuple[float, float]:
|
||||
|
||||
|
||||
@conditional_cache(cache_time=10)
|
||||
def diskspace(force: bool = False) -> dict[str, tuple[float, float]]:
|
||||
def diskspace(force: bool = False) -> Dict[str, Tuple[float, float]]:
|
||||
"""Wrapper to keep results cached by conditional_cache
|
||||
If called with force=True, the wrapper will clear the results"""
|
||||
return {
|
||||
@@ -1033,7 +1033,7 @@ def diskspace(force: bool = False) -> dict[str, tuple[float, float]]:
|
||||
}
|
||||
|
||||
|
||||
def get_new_id(prefix: str, folder: str, check_list: Optional[list] = None) -> str:
|
||||
def get_new_id(prefix, folder, check_list=None):
|
||||
"""Return unique prefixed admin identifier within folder
|
||||
optionally making sure that id is not in the check_list.
|
||||
"""
|
||||
@@ -1054,7 +1054,7 @@ def get_new_id(prefix: str, folder: str, check_list: Optional[list] = None) -> s
|
||||
raise IOError
|
||||
|
||||
|
||||
def save_data(data: Any, _id: str, path: str, do_pickle: bool = True, silent: bool = False):
|
||||
def save_data(data, _id, path, do_pickle=True, silent=False):
|
||||
"""Save data to a diskfile"""
|
||||
if not silent:
|
||||
logging.debug("[%s] Saving data for %s in %s", sabnzbd.misc.caller_name(), _id, path)
|
||||
@@ -1081,7 +1081,7 @@ def save_data(data: Any, _id: str, path: str, do_pickle: bool = True, silent: bo
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
def load_data(data_id: str, path: str, remove: bool = True, do_pickle: bool = True, silent: bool = False) -> Any:
|
||||
def load_data(data_id, path, remove=True, do_pickle=True, silent=False):
|
||||
"""Read data from disk file"""
|
||||
path = os.path.join(path, data_id)
|
||||
|
||||
@@ -1129,7 +1129,7 @@ def save_admin(data: Any, data_id: str):
|
||||
save_data(data, data_id, sabnzbd.cfg.admin_dir.get_path())
|
||||
|
||||
|
||||
def load_admin(data_id: str, remove: bool = False, silent: bool = False) -> Any:
|
||||
def load_admin(data_id: str, remove=False, silent=False) -> Any:
|
||||
"""Read data in admin folder in specified format"""
|
||||
logging.debug("[%s] Loading data for %s", sabnzbd.misc.caller_name(), data_id)
|
||||
return load_data(data_id, sabnzbd.cfg.admin_dir.get_path(), remove=remove, silent=silent)
|
||||
@@ -1196,7 +1196,7 @@ def purge_log_files():
|
||||
logging.debug("Finished puring log files")
|
||||
|
||||
|
||||
def directory_is_writable_with_file(mydir: str, myfilename: str) -> bool:
|
||||
def directory_is_writable_with_file(mydir, myfilename):
|
||||
filename = os.path.join(mydir, myfilename)
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
@@ -1253,7 +1253,7 @@ def check_filesystem_capabilities(test_dir: str) -> bool:
|
||||
return allgood
|
||||
|
||||
|
||||
def get_win_drives() -> list[str]:
|
||||
def get_win_drives() -> List[str]:
|
||||
"""Return list of detected drives, adapted from:
|
||||
http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490
|
||||
"""
|
||||
@@ -1281,7 +1281,7 @@ PATHBROWSER_JUNKFOLDERS = (
|
||||
)
|
||||
|
||||
|
||||
def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False) -> list[dict[str, str]]:
|
||||
def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False) -> List[Dict[str, str]]:
|
||||
"""Returns a list of dictionaries with the folders and folders contained at the given path
|
||||
Give the empty string as the path to list the contents of the root path
|
||||
under Unix this means "/", on Windows this will be a list of drive letters
|
||||
@@ -1367,54 +1367,3 @@ def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False)
|
||||
)
|
||||
|
||||
return file_list
|
||||
|
||||
|
||||
def create_work_name(name: str) -> str:
|
||||
"""Remove ".nzb" and ".par(2)" and sanitize, skip URL's"""
|
||||
if name.find("://") < 0:
|
||||
# Invalid charters need to be removed before and after (see unit-tests)
|
||||
return sanitize_foldername(strip_extensions(sanitize_foldername(name)))
|
||||
else:
|
||||
return name.strip()
|
||||
|
||||
|
||||
def nzf_cmp_name(nzf1, nzf2):
|
||||
"""Comparison function for sorting NZB files.
|
||||
The comparison will sort .par2 files to the top of the queue followed by .rar files,
|
||||
they will then be sorted by name.
|
||||
|
||||
Note: nzf1 and nzf2 should be NzbFile objects, but we can't import that here
|
||||
to avoid circular dependencies.
|
||||
"""
|
||||
nzf1_name = nzf1.filename.lower()
|
||||
nzf2_name = nzf2.filename.lower()
|
||||
|
||||
# Determine vol-pars
|
||||
is_par1 = ".vol" in nzf1_name and ".par2" in nzf1_name
|
||||
is_par2 = ".vol" in nzf2_name and ".par2" in nzf2_name
|
||||
|
||||
# mini-par2 in front
|
||||
if not is_par1 and nzf1_name.endswith(".par2"):
|
||||
return -1
|
||||
if not is_par2 and nzf2_name.endswith(".par2"):
|
||||
return 1
|
||||
|
||||
# vol-pars go to the back
|
||||
if is_par1 and not is_par2:
|
||||
return 1
|
||||
if is_par2 and not is_par1:
|
||||
return -1
|
||||
|
||||
# Prioritize .rar files above any other type of file (other than vol-par)
|
||||
m1 = RAR_RE.search(nzf1_name)
|
||||
m2 = RAR_RE.search(nzf2_name)
|
||||
if m1 and not (is_par2 or m2):
|
||||
return -1
|
||||
elif m2 and not (is_par1 or m1):
|
||||
return 1
|
||||
# Force .rar to come before 'r00'
|
||||
if m1 and m1.group(1) == ".rar":
|
||||
nzf1_name = nzf1_name.replace(".rar", ".r//")
|
||||
if m2 and m2.group(1) == ".rar":
|
||||
nzf2_name = nzf2_name.replace(".rar", ".r//")
|
||||
return sabnzbd.misc.cmp(nzf1_name, nzf2_name)
|
||||
|
||||
@@ -23,9 +23,10 @@ import socket
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import functools
|
||||
from dataclasses import dataclass
|
||||
from more_itertools import roundrobin
|
||||
from typing import Union, Optional
|
||||
from typing import Tuple, Union, Optional
|
||||
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT
|
||||
@@ -60,7 +61,7 @@ class AddrInfo:
|
||||
type: socket.SocketKind
|
||||
proto: int
|
||||
canonname: str
|
||||
sockaddr: Union[tuple[str, int], tuple[str, int, int, int]]
|
||||
sockaddr: Union[Tuple[str, int], Tuple[str, int, int, int]]
|
||||
ipaddress: str = ""
|
||||
port: int = 0
|
||||
connection_time: float = 0.0
|
||||
|
||||
@@ -34,7 +34,7 @@ import copy
|
||||
from random import randint
|
||||
from xml.sax.saxutils import escape
|
||||
from Cheetah.Template import Template
|
||||
from typing import Optional, Callable, Union, Any
|
||||
from typing import Optional, Callable, Union, Any, Dict, List
|
||||
from guessit.api import properties as guessit_properties
|
||||
|
||||
import sabnzbd
|
||||
@@ -264,7 +264,7 @@ def check_hostname():
|
||||
COOKIE_SECRET = str(randint(1000, 100000) * os.getpid())
|
||||
|
||||
|
||||
def remote_ip_from_xff(xff_ips: list[str]) -> str:
|
||||
def remote_ip_from_xff(xff_ips: List[str]) -> str:
|
||||
# Per MDN docs, the first non-local/non-trusted IP (rtl) is our "client"
|
||||
# However, it's possible that all IPs are local/trusted, so we may also
|
||||
# return the first ip in the list as it "should" be the client
|
||||
@@ -399,7 +399,7 @@ def check_apikey(kwargs):
|
||||
return _MSG_APIKEY_INCORRECT
|
||||
|
||||
|
||||
def template_filtered_response(file: str, search_list: dict[str, Any]):
|
||||
def template_filtered_response(file: str, search_list: Dict[str, Any]):
|
||||
"""Wrapper for Cheetah response"""
|
||||
# We need a copy, because otherwise source-dicts might be modified
|
||||
search_list_copy = copy.deepcopy(search_list)
|
||||
@@ -558,7 +558,7 @@ class Wizard:
|
||||
info["password"] = ""
|
||||
info["connections"] = ""
|
||||
info["ssl"] = 1
|
||||
info["ssl_verify"] = 3
|
||||
info["ssl_verify"] = 2
|
||||
else:
|
||||
# Sort servers to get the first enabled one
|
||||
server_names = sorted(
|
||||
@@ -906,7 +906,6 @@ SPECIAL_VALUE_LIST = (
|
||||
"max_foldername_length",
|
||||
"url_base",
|
||||
"receive_threads",
|
||||
"assembler_max_queue_size",
|
||||
"switchinterval",
|
||||
"direct_unpack_threads",
|
||||
"selftest_host",
|
||||
|
||||
@@ -26,6 +26,7 @@ import socket
|
||||
import ssl
|
||||
import time
|
||||
import threading
|
||||
from typing import Dict
|
||||
|
||||
import sabctools
|
||||
import sabnzbd
|
||||
@@ -43,7 +44,7 @@ NR_CONNECTIONS = 5
|
||||
TIME_LIMIT = 3
|
||||
|
||||
|
||||
def internetspeed_worker(secure_sock: ssl.SSLSocket, socket_speed: dict[ssl.SSLSocket, float]):
|
||||
def internetspeed_worker(secure_sock: ssl.SSLSocket, socket_speed: Dict[ssl.SSLSocket, float]):
|
||||
"""Worker to perform the requests in parallel"""
|
||||
secure_sock.sendall(TEST_REQUEST.encode())
|
||||
empty_buffer = memoryview(sabctools.bytearray_malloc(BUFFER_SIZE))
|
||||
|
||||
144
sabnzbd/misc.py
144
sabnzbd/misc.py
@@ -41,7 +41,7 @@ import math
|
||||
import rarfile
|
||||
from threading import Thread
|
||||
from collections.abc import Iterable
|
||||
from typing import Union, Any, AnyStr, Optional, Collection
|
||||
from typing import Union, Tuple, Any, AnyStr, Optional, List, Dict, Collection
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.getipaddress
|
||||
@@ -57,7 +57,7 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.decorators import conditional_cache
|
||||
from sabnzbd.encoding import ubtou, platform_btou
|
||||
from sabnzbd.filesystem import userxbit, make_script_path, remove_file, strip_extensions
|
||||
from sabnzbd.filesystem import userxbit, make_script_path, remove_file
|
||||
|
||||
if sabnzbd.WINDOWS:
|
||||
try:
|
||||
@@ -85,10 +85,6 @@ RE_SAMPLE = re.compile(r"((^|[\W_])(sample|proof))", re.I) # something-sample o
|
||||
RE_IP4 = re.compile(r"inet\s+(addr:\s*)?(\d+\.\d+\.\d+\.\d+)")
|
||||
RE_IP6 = re.compile(r"inet6\s+(addr:\s*)?([0-9a-f:]+)", re.I)
|
||||
|
||||
# Name patterns for NZB parsing
|
||||
RE_SUBJECT_FILENAME_QUOTES = re.compile(r'"([^"]*)"')
|
||||
RE_SUBJECT_BASIC_FILENAME = re.compile(r"\b([\w\-+()' .,]+(?:\[[\w\-/+()' .,]*][\w\-+()' .,]*)*\.[A-Za-z0-9]{2,4})\b")
|
||||
|
||||
# Check if strings are defined for AM and PM
|
||||
HAVE_AMPM = bool(time.strftime("%p"))
|
||||
|
||||
@@ -182,7 +178,7 @@ def is_none(inp: Any) -> bool:
|
||||
return not inp or (isinstance(inp, str) and inp.lower() == "none")
|
||||
|
||||
|
||||
def clean_comma_separated_list(inp: Any) -> list[str]:
|
||||
def clean_comma_separated_list(inp: Any) -> List[str]:
|
||||
"""Return a list of stripped values from a string or list, empty ones removed"""
|
||||
result_ids = []
|
||||
if isinstance(inp, str):
|
||||
@@ -194,7 +190,7 @@ def clean_comma_separated_list(inp: Any) -> list[str]:
|
||||
return result_ids
|
||||
|
||||
|
||||
def cmp(x: Any, y: Any) -> int:
|
||||
def cmp(x, y):
|
||||
"""
|
||||
Replacement for built-in function cmp that was removed in Python 3
|
||||
|
||||
@@ -221,7 +217,7 @@ def cat_pp_script_sanitizer(
|
||||
cat: Optional[str] = None,
|
||||
pp: Optional[Union[int, str]] = None,
|
||||
script: Optional[str] = None,
|
||||
) -> tuple[Optional[Union[int, str]], Optional[str], Optional[str]]:
|
||||
) -> Tuple[Optional[Union[int, str]], Optional[str], Optional[str]]:
|
||||
"""Basic sanitizer from outside input to a bit more predictable values"""
|
||||
# * and Default are valid values
|
||||
if safe_lower(cat) in ("", "none"):
|
||||
@@ -238,7 +234,7 @@ def cat_pp_script_sanitizer(
|
||||
return cat, pp, script
|
||||
|
||||
|
||||
def name_to_cat(fname: str, cat: Optional[str] = None) -> tuple[str, Optional[str]]:
|
||||
def name_to_cat(fname, cat=None):
|
||||
"""Retrieve category from file name, but only if "cat" is None."""
|
||||
if cat is None and fname.startswith("{{"):
|
||||
n = fname.find("}}")
|
||||
@@ -250,9 +246,7 @@ def name_to_cat(fname: str, cat: Optional[str] = None) -> tuple[str, Optional[st
|
||||
return fname, cat
|
||||
|
||||
|
||||
def cat_to_opts(
|
||||
cat: Optional[str], pp: Optional[int] = None, script: Optional[str] = None, priority: Optional[int] = None
|
||||
) -> tuple[str, int, str, int]:
|
||||
def cat_to_opts(cat, pp=None, script=None, priority=None) -> Tuple[str, int, str, int]:
|
||||
"""Derive options from category, if options not already defined.
|
||||
Specified options have priority over category-options.
|
||||
If no valid category is given, special category '*' will supply default values
|
||||
@@ -285,7 +279,7 @@ def cat_to_opts(
|
||||
return cat, pp, script, priority
|
||||
|
||||
|
||||
def pp_to_opts(pp: Optional[int]) -> tuple[bool, bool, bool]:
|
||||
def pp_to_opts(pp: Optional[int]) -> Tuple[bool, bool, bool]:
|
||||
"""Convert numeric processing options to (repair, unpack, delete)"""
|
||||
# Convert the pp to an int
|
||||
pp = int_conv(pp)
|
||||
@@ -337,12 +331,12 @@ _wildcard_to_regex = {
|
||||
}
|
||||
|
||||
|
||||
def wildcard_to_re(text: str) -> str:
|
||||
def wildcard_to_re(text):
|
||||
"""Convert plain wildcard string (with '*' and '?') to regex."""
|
||||
return "".join([_wildcard_to_regex.get(ch, ch) for ch in text])
|
||||
|
||||
|
||||
def convert_filter(text: str) -> Optional[re.Pattern]:
|
||||
def convert_filter(text):
|
||||
"""Return compiled regex.
|
||||
If string starts with re: it's a real regex
|
||||
else quote all regex specials, replace '*' by '.*'
|
||||
@@ -359,7 +353,7 @@ def convert_filter(text: str) -> Optional[re.Pattern]:
|
||||
return None
|
||||
|
||||
|
||||
def cat_convert(cat: Optional[str]) -> Optional[str]:
|
||||
def cat_convert(cat):
|
||||
"""Convert indexer's category/group-name to user categories.
|
||||
If no match found, but indexer-cat equals user-cat, then return user-cat
|
||||
If no match found, but the indexer-cat starts with the user-cat, return user-cat
|
||||
@@ -403,7 +397,7 @@ _SERVICE_KEY = "SYSTEM\\CurrentControlSet\\services\\"
|
||||
_SERVICE_PARM = "CommandLine"
|
||||
|
||||
|
||||
def get_serv_parms(service: str) -> list[str]:
|
||||
def get_serv_parms(service):
|
||||
"""Get the service command line parameters from Registry"""
|
||||
service_parms = []
|
||||
try:
|
||||
@@ -422,7 +416,7 @@ def get_serv_parms(service: str) -> list[str]:
|
||||
return service_parms
|
||||
|
||||
|
||||
def set_serv_parms(service: str, args: list) -> bool:
|
||||
def set_serv_parms(service, args):
|
||||
"""Set the service command line parameters in Registry"""
|
||||
serv = []
|
||||
for arg in args:
|
||||
@@ -450,7 +444,7 @@ def get_from_url(url: str) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def convert_version(text: str) -> tuple[int, bool]:
|
||||
def convert_version(text):
|
||||
"""Convert version string to numerical value and a testversion indicator"""
|
||||
version = 0
|
||||
test = True
|
||||
@@ -557,7 +551,7 @@ def check_latest_version():
|
||||
)
|
||||
|
||||
|
||||
def upload_file_to_sabnzbd(url: str, fp: str):
|
||||
def upload_file_to_sabnzbd(url, fp):
|
||||
"""Function for uploading nzbs to a running SABnzbd instance"""
|
||||
try:
|
||||
fp = urllib.parse.quote_plus(fp)
|
||||
@@ -650,7 +644,7 @@ def to_units(val: Union[int, float], postfix="") -> str:
|
||||
return f"{sign}{val:.{decimals}f}{units}"
|
||||
|
||||
|
||||
def caller_name(skip: int = 2) -> str:
|
||||
def caller_name(skip=2):
|
||||
"""Get a name of a caller in the format module.method
|
||||
Originally used: https://gist.github.com/techtonik/2151727
|
||||
Adapted for speed by using sys calls directly
|
||||
@@ -688,7 +682,7 @@ def exit_sab(value: int):
|
||||
os._exit(value)
|
||||
|
||||
|
||||
def split_host(srv: Optional[str]) -> tuple[Optional[str], Optional[int]]:
|
||||
def split_host(srv):
|
||||
"""Split host:port notation, allowing for IPV6"""
|
||||
if not srv:
|
||||
return None, None
|
||||
@@ -710,7 +704,7 @@ def split_host(srv: Optional[str]) -> tuple[Optional[str], Optional[int]]:
|
||||
return out[0], port
|
||||
|
||||
|
||||
def get_cache_limit() -> str:
|
||||
def get_cache_limit():
|
||||
"""Depending on OS, calculate cache limits.
|
||||
In ArticleCache it will make sure we stay
|
||||
within system limits for 32/64 bit
|
||||
@@ -748,7 +742,7 @@ def get_cache_limit() -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def get_windows_memory() -> int:
|
||||
def get_windows_memory():
|
||||
"""Use ctypes to extract available memory"""
|
||||
|
||||
class MEMORYSTATUSEX(ctypes.Structure):
|
||||
@@ -774,14 +768,14 @@ def get_windows_memory() -> int:
|
||||
return stat.ullTotalPhys
|
||||
|
||||
|
||||
def get_macos_memory() -> float:
|
||||
def get_macos_memory():
|
||||
"""Use system-call to extract total memory on macOS"""
|
||||
system_output = run_command(["sysctl", "hw.memsize"])
|
||||
return float(system_output.split()[1])
|
||||
|
||||
|
||||
@conditional_cache(cache_time=3600)
|
||||
def get_cpu_name() -> Optional[str]:
|
||||
def get_cpu_name():
|
||||
"""Find the CPU name (which needs a different method per OS), and return it
|
||||
If none found, return platform.platform()"""
|
||||
|
||||
@@ -881,7 +875,7 @@ def on_cleanup_list(filename: str, skip_nzb: bool = False) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def memory_usage() -> Optional[str]:
|
||||
def memory_usage():
|
||||
try:
|
||||
# Probably only works on Linux because it uses /proc/<pid>/statm
|
||||
with open("/proc/%d/statm" % os.getpid()) as t:
|
||||
@@ -903,7 +897,7 @@ except Exception:
|
||||
_HAVE_STATM = _PAGE_SIZE and memory_usage()
|
||||
|
||||
|
||||
def loadavg() -> str:
|
||||
def loadavg():
|
||||
"""Return 1, 5 and 15 minute load average of host or "" if not supported"""
|
||||
p = ""
|
||||
if not sabnzbd.WINDOWS and not sabnzbd.MACOS:
|
||||
@@ -978,7 +972,7 @@ def bool_conv(value: Any) -> bool:
|
||||
return bool(int_conv(value))
|
||||
|
||||
|
||||
def create_https_certificates(ssl_cert: str, ssl_key: str) -> bool:
|
||||
def create_https_certificates(ssl_cert, ssl_key):
|
||||
"""Create self-signed HTTPS certificates and store in paths 'ssl_cert' and 'ssl_key'"""
|
||||
try:
|
||||
from sabnzbd.utils.certgen import generate_key, generate_local_cert
|
||||
@@ -994,7 +988,7 @@ def create_https_certificates(ssl_cert: str, ssl_key: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def get_all_passwords(nzo) -> list[str]:
|
||||
def get_all_passwords(nzo) -> List[str]:
|
||||
"""Get all passwords, from the NZB, meta and password file. In case a working password is
|
||||
already known, try it first."""
|
||||
passwords = []
|
||||
@@ -1057,7 +1051,7 @@ def is_sample(filename: str) -> bool:
|
||||
return bool(re.search(RE_SAMPLE, filename))
|
||||
|
||||
|
||||
def find_on_path(targets: Union[str, tuple[str, ...]]) -> Optional[str]:
|
||||
def find_on_path(targets):
|
||||
"""Search the PATH for a program and return full path"""
|
||||
if sabnzbd.WINDOWS:
|
||||
paths = os.getenv("PATH").split(";")
|
||||
@@ -1176,7 +1170,7 @@ def is_local_addr(ip: str) -> bool:
|
||||
return is_lan_addr(ip)
|
||||
|
||||
|
||||
def ip_extract() -> list[str]:
|
||||
def ip_extract() -> List[str]:
|
||||
"""Return list of IP addresses of this system"""
|
||||
ips = []
|
||||
program = find_on_path("ip")
|
||||
@@ -1221,7 +1215,7 @@ def get_base_url(url: str) -> str:
|
||||
return ""
|
||||
|
||||
|
||||
def match_str(text: AnyStr, matches: tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
def match_str(text: AnyStr, matches: Tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
"""Return first matching element of list 'matches' in 'text', otherwise None"""
|
||||
text = text.lower()
|
||||
for match in matches:
|
||||
@@ -1230,7 +1224,7 @@ def match_str(text: AnyStr, matches: tuple[AnyStr, ...]) -> Optional[AnyStr]:
|
||||
return None
|
||||
|
||||
|
||||
def recursive_html_escape(input_dict_or_list: Union[dict[str, Any], list], exclude_items: tuple[str, ...] = ()):
|
||||
def recursive_html_escape(input_dict_or_list: Union[Dict[str, Any], List], exclude_items: Tuple[str, ...] = ()):
|
||||
"""Recursively update the input_dict in-place with html-safe values"""
|
||||
if isinstance(input_dict_or_list, (dict, list)):
|
||||
if isinstance(input_dict_or_list, dict):
|
||||
@@ -1251,7 +1245,7 @@ def recursive_html_escape(input_dict_or_list: Union[dict[str, Any], list], exclu
|
||||
raise ValueError("Expected dict or str, got %s" % type(input_dict_or_list))
|
||||
|
||||
|
||||
def list2cmdline_unrar(lst: list[str]) -> str:
|
||||
def list2cmdline_unrar(lst: List[str]) -> str:
|
||||
"""convert list to a unrar.exe-compatible command string
|
||||
Unrar uses "" instead of \" to escape the double quote"""
|
||||
nlst = []
|
||||
@@ -1265,9 +1259,7 @@ def list2cmdline_unrar(lst: list[str]) -> str:
|
||||
return " ".join(nlst)
|
||||
|
||||
|
||||
def build_and_run_command(
|
||||
command: list[str], windows_unrar_command: bool = False, text_mode: bool = True, **kwargs
|
||||
) -> subprocess.Popen:
|
||||
def build_and_run_command(command: List[str], windows_unrar_command: bool = False, text_mode: bool = True, **kwargs):
|
||||
"""Builds and then runs command with necessary flags and optional
|
||||
IONice and Nice commands. Optional Popen arguments can be supplied.
|
||||
On Windows we need to run our own list2cmdline for Unrar.
|
||||
@@ -1334,7 +1326,7 @@ def build_and_run_command(
|
||||
return subprocess.Popen(command, **popen_kwargs)
|
||||
|
||||
|
||||
def run_command(cmd: list[str], **kwargs) -> str:
|
||||
def run_command(cmd: List[str], **kwargs):
|
||||
"""Run simple external command and return output as a string."""
|
||||
with build_and_run_command(cmd, **kwargs) as p:
|
||||
txt = p.stdout.read()
|
||||
@@ -1367,7 +1359,7 @@ def set_socks5_proxy():
|
||||
socket.socket = socks.socksocket
|
||||
|
||||
|
||||
def set_https_verification(value: bool) -> bool:
|
||||
def set_https_verification(value):
|
||||
"""Set HTTPS-verification state while returning current setting
|
||||
False = disable verification
|
||||
"""
|
||||
@@ -1389,7 +1381,7 @@ def request_repair():
|
||||
pass
|
||||
|
||||
|
||||
def check_repair_request() -> bool:
|
||||
def check_repair_request():
|
||||
"""Return True if repair request found, remove afterwards"""
|
||||
path = os.path.join(cfg.admin_dir.get_path(), REPAIR_REQUEST)
|
||||
if os.path.exists(path):
|
||||
@@ -1522,8 +1514,8 @@ def convert_sorter_settings():
|
||||
min_size: Union[str|int] = "50M"
|
||||
multipart_label: Optional[str] = ""
|
||||
sort_string: str
|
||||
sort_cats: list[str]
|
||||
sort_type: list[int]
|
||||
sort_cats: List[str]
|
||||
sort_type: List[int]
|
||||
is_active: bool = 1
|
||||
}
|
||||
|
||||
@@ -1583,7 +1575,7 @@ def convert_sorter_settings():
|
||||
def convert_history_retention():
|
||||
"""Convert single-option to the split history retention setting"""
|
||||
if "d" in cfg.history_retention():
|
||||
days_to_keep = int_conv(cfg.history_retention().strip().removesuffix("d"))
|
||||
days_to_keep = int_conv(cfg.history_retention().strip()[:-1])
|
||||
cfg.history_retention_option.set("days-delete")
|
||||
cfg.history_retention_number.set(days_to_keep)
|
||||
else:
|
||||
@@ -1595,66 +1587,6 @@ def convert_history_retention():
|
||||
cfg.history_retention_option.set("all-delete")
|
||||
|
||||
|
||||
def scan_password(name: str) -> tuple[str, Optional[str]]:
|
||||
"""Get password (if any) from the title"""
|
||||
if "http://" in name or "https://" in name:
|
||||
return name, None
|
||||
|
||||
# Strip any unwanted usenet-related extensions
|
||||
name = strip_extensions(name)
|
||||
|
||||
# Identify any braces
|
||||
braces = name[1:].find("{{")
|
||||
if braces < 0:
|
||||
braces = len(name)
|
||||
else:
|
||||
braces += 1
|
||||
slash = name.find("/")
|
||||
|
||||
# Look for name/password, but make sure that '/' comes before any {{
|
||||
if 0 < slash < braces and "password=" not in name:
|
||||
# Is it maybe in 'name / password' notation?
|
||||
if slash == name.find(" / ") + 1 and name[: slash - 1].strip(". "):
|
||||
# Remove the extra space after name and before password
|
||||
return name[: slash - 1].strip(". "), name[slash + 2 :]
|
||||
if name[:slash].strip(". "):
|
||||
return name[:slash].strip(". "), name[slash + 1 :]
|
||||
|
||||
# Look for "name password=password"
|
||||
pw = name.find("password=")
|
||||
if pw > 0 and name[:pw].strip(". "):
|
||||
return name[:pw].strip(". "), name[pw + 9 :]
|
||||
|
||||
# Look for name{{password}}
|
||||
if braces < len(name):
|
||||
closing_braces = name.rfind("}}")
|
||||
if closing_braces > braces and name[:braces].strip(". "):
|
||||
return name[:braces].strip(". "), name[braces + 2 : closing_braces]
|
||||
|
||||
# Look again for name/password
|
||||
if slash > 0 and name[:slash].strip(". "):
|
||||
return name[:slash].strip(". "), name[slash + 1 :]
|
||||
|
||||
# No password found
|
||||
return name, None
|
||||
|
||||
|
||||
def subject_name_extractor(subject: str) -> str:
|
||||
"""Try to extract a file name from a subject line, return `subject` if in doubt"""
|
||||
# Filename nicely wrapped in quotes
|
||||
for name in re.findall(RE_SUBJECT_FILENAME_QUOTES, subject):
|
||||
if name := name.strip(' "'):
|
||||
return name
|
||||
|
||||
# Found nothing? Try a basic filename-like search
|
||||
for name in re.findall(RE_SUBJECT_BASIC_FILENAME, subject):
|
||||
if name := name.strip():
|
||||
return name
|
||||
|
||||
# Return the subject
|
||||
return subject
|
||||
|
||||
|
||||
##
|
||||
## SABnzbd patched rarfile classes
|
||||
## Patch for https://github.com/markokr/rarfile/issues/56#issuecomment-711146569
|
||||
@@ -1683,7 +1615,7 @@ class SABRarFile(rarfile.RarFile):
|
||||
self._file_parser._info_list.append(rar_obj)
|
||||
self._file_parser._info_map[rar_obj.filename.rstrip("/")] = rar_obj
|
||||
|
||||
def filelist(self) -> list[str]:
|
||||
def filelist(self):
|
||||
"""Return list of filenames in archive."""
|
||||
return [f.filename for f in self.infolist() if not f.isdir()]
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ import io
|
||||
import shutil
|
||||
import functools
|
||||
import rarfile
|
||||
from typing import BinaryIO, Optional, Any, Union
|
||||
from typing import Tuple, List, BinaryIO, Optional, Dict, Any, Union, Set
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.encoding import correct_unknown_encoding, ubtou
|
||||
@@ -64,9 +64,8 @@ from sabnzbd.filesystem import (
|
||||
SEVENMULTI_RE,
|
||||
is_size,
|
||||
get_basename,
|
||||
create_all_dirs,
|
||||
)
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import Status
|
||||
|
||||
@@ -118,14 +117,7 @@ def find_programs(curdir: str):
|
||||
sabnzbd.newsunpack.SEVENZIP_COMMAND = check(curdir, "macos/7zip/7zz")
|
||||
|
||||
if sabnzbd.WINDOWS:
|
||||
if sabnzbd.WINDOWSARM64:
|
||||
# ARM64 version of par2
|
||||
sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, "win/par2/arm64/par2.exe")
|
||||
else:
|
||||
# Regular x64 version
|
||||
sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, "win/par2/par2.exe")
|
||||
|
||||
# UnRAR has no arm64 version, so we skip it also for 7zip
|
||||
sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, "win/par2/par2.exe")
|
||||
sabnzbd.newsunpack.RAR_COMMAND = check(curdir, "win/unrar/UnRAR.exe")
|
||||
sabnzbd.newsunpack.SEVENZIP_COMMAND = check(curdir, "win/7zip/7za.exe")
|
||||
else:
|
||||
@@ -208,7 +200,7 @@ ENV_NZO_FIELDS = [
|
||||
|
||||
def external_processing(
|
||||
extern_proc: str, nzo: NzbObject, complete_dir: str, nicename: str, status: int
|
||||
) -> tuple[str, int]:
|
||||
) -> Tuple[str, int]:
|
||||
"""Run a user postproc script, return console output and exit value"""
|
||||
failure_url = nzo.nzo_info.get("failure", "")
|
||||
# Items can be bool or null, causing POpen to fail
|
||||
@@ -270,12 +262,12 @@ def unpacker(
|
||||
nzo: NzbObject,
|
||||
workdir_complete: str,
|
||||
one_folder: bool,
|
||||
joinables: list[str] = [],
|
||||
rars: list[str] = [],
|
||||
sevens: list[str] = [],
|
||||
ts: list[str] = [],
|
||||
joinables: List[str] = [],
|
||||
rars: List[str] = [],
|
||||
sevens: List[str] = [],
|
||||
ts: List[str] = [],
|
||||
depth: int = 0,
|
||||
) -> tuple[Union[int, bool], list[str]]:
|
||||
) -> Tuple[Union[int, bool], List[str]]:
|
||||
"""Do a recursive unpack from all archives in 'download_path' to 'workdir_complete'"""
|
||||
if depth > 2:
|
||||
# Prevent going to deep down the rabbit-hole
|
||||
@@ -367,7 +359,7 @@ def unpacker(
|
||||
##############################################################################
|
||||
# Filejoin Functions
|
||||
##############################################################################
|
||||
def match_ts(file: str) -> tuple[str, int]:
|
||||
def match_ts(file: str) -> Tuple[str, int]:
|
||||
"""Return True if file is a joinable TS file"""
|
||||
match = TS_RE.search(file)
|
||||
if not match:
|
||||
@@ -382,7 +374,7 @@ def match_ts(file: str) -> tuple[str, int]:
|
||||
return setname, num
|
||||
|
||||
|
||||
def clean_up_joinables(names: list[str]):
|
||||
def clean_up_joinables(names: List[str]):
|
||||
"""Remove joinable files and their .1 backups"""
|
||||
for name in names:
|
||||
if os.path.exists(name):
|
||||
@@ -411,7 +403,7 @@ def get_seq_number(name: str) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def file_join(nzo: NzbObject, workdir_complete: str, joinables: list[str]) -> tuple[bool, list[str]]:
|
||||
def file_join(nzo: NzbObject, workdir_complete: str, joinables: List[str]) -> Tuple[bool, List[str]]:
|
||||
"""Join and joinable files in 'workdir' to 'workdir_complete' and
|
||||
when successful, delete originals
|
||||
"""
|
||||
@@ -502,7 +494,7 @@ def file_join(nzo: NzbObject, workdir_complete: str, joinables: list[str]) -> tu
|
||||
##############################################################################
|
||||
# (Un)Rar Functions
|
||||
##############################################################################
|
||||
def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: list[str]) -> tuple[int, list[str]]:
|
||||
def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: List[str]) -> Tuple[int, List[str]]:
|
||||
"""Unpack multiple sets 'rars' of RAR files from 'download_path' to 'workdir_complete.
|
||||
When 'delete' is set, originals will be deleted.
|
||||
When 'one_folder' is set, all files will be in a single folder
|
||||
@@ -624,7 +616,7 @@ def rar_unpack(nzo: NzbObject, workdir_complete: str, one_folder: bool, rars: li
|
||||
|
||||
def rar_extract(
|
||||
rarfile_path: str, numrars: int, one_folder: bool, nzo: NzbObject, setname: str, extraction_path: str
|
||||
) -> tuple[int, list[str], list[str]]:
|
||||
) -> Tuple[int, List[str], List[str]]:
|
||||
"""Unpack single rar set 'rarfile' to 'extraction_path',
|
||||
with password tries
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
|
||||
@@ -634,12 +626,6 @@ def rar_extract(
|
||||
rars = []
|
||||
passwords = get_all_passwords(nzo)
|
||||
|
||||
# Sanity check, does the folder exist? Could be removed by aborted Direct Unpack
|
||||
if not os.path.exists(extraction_path):
|
||||
# Similar to prepare_extraction_path
|
||||
extraction_path = create_all_dirs(extraction_path, apply_permissions=True)
|
||||
logging.info("Extraction path (re)created because it was missing: %s", extraction_path)
|
||||
|
||||
for password in passwords:
|
||||
if password:
|
||||
logging.debug('Trying unrar with password "%s"', password)
|
||||
@@ -656,14 +642,14 @@ def rar_extract(
|
||||
|
||||
def rar_extract_core(
|
||||
rarfile_path: str, numrars: int, one_folder: bool, nzo: NzbObject, setname: str, extraction_path: str, password: str
|
||||
) -> tuple[int, list[str], list[str]]:
|
||||
) -> Tuple[int, List[str], List[str]]:
|
||||
"""Unpack single rar set 'rarfile_path' to 'extraction_path'
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
|
||||
"""
|
||||
start = time.time()
|
||||
|
||||
logging.debug("Extraction path: %s", extraction_path)
|
||||
logging.debug("Found rar version: %s", rarfile.get_rar_version(rarfile_path))
|
||||
logging.debug("Found rar version: %s", rarfile.is_rarfile(rarfile_path))
|
||||
|
||||
if password:
|
||||
password_command = "-p%s" % password
|
||||
@@ -880,7 +866,7 @@ def rar_extract_core(
|
||||
##############################################################################
|
||||
# 7Zip Functions
|
||||
##############################################################################
|
||||
def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: list[str]) -> tuple[bool, list[str]]:
|
||||
def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: List[str]):
|
||||
"""Unpack multiple sets '7z' of 7Zip files from 'download_path' to 'workdir_complete.
|
||||
When 'delete' is set, originals will be deleted.
|
||||
"""
|
||||
@@ -928,7 +914,7 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: lis
|
||||
|
||||
def seven_extract(
|
||||
nzo: NzbObject, seven_path: str, seven_set: str, extraction_path: str, one_folder: bool
|
||||
) -> tuple[int, list[str]]:
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Unpack single set 'sevenset' to 'extraction_path', with password tries
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, sevens
|
||||
"""
|
||||
@@ -952,7 +938,7 @@ def seven_extract(
|
||||
|
||||
def seven_extract_core(
|
||||
nzo: NzbObject, seven_path: str, extraction_path: str, seven_set: str, one_folder: bool, password: str
|
||||
) -> tuple[int, list[str]]:
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Unpack single 7Z set 'sevenset' to 'extraction_path'
|
||||
Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, message
|
||||
"""
|
||||
@@ -1018,7 +1004,7 @@ def seven_extract_core(
|
||||
##############################################################################
|
||||
# PAR2 Functions
|
||||
##############################################################################
|
||||
def par2_repair(nzo: NzbObject, setname: str) -> tuple[bool, bool]:
|
||||
def par2_repair(nzo: NzbObject, setname: str) -> Tuple[bool, bool]:
|
||||
"""Try to repair a set, return readd and correctness"""
|
||||
# Check which of the files exists
|
||||
for new_par in nzo.extrapars[setname]:
|
||||
@@ -1131,8 +1117,8 @@ def par2_repair(nzo: NzbObject, setname: str) -> tuple[bool, bool]:
|
||||
|
||||
|
||||
def par2cmdline_verify(
|
||||
parfile: str, nzo: NzbObject, setname: str, joinables: list[str]
|
||||
) -> tuple[bool, bool, list[str], list[str]]:
|
||||
parfile: str, nzo: NzbObject, setname: str, joinables: List[str]
|
||||
) -> Tuple[bool, bool, List[str], List[str]]:
|
||||
"""Run par2 on par-set"""
|
||||
used_joinables = []
|
||||
used_for_repair = []
|
||||
@@ -1417,7 +1403,7 @@ def par2cmdline_verify(
|
||||
return finished, readd, used_joinables, used_for_repair
|
||||
|
||||
|
||||
def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: dict[str, Any] = {}) -> Optional[dict[str, Any]]:
|
||||
def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: Dict[str, Any] = {}) -> Optional[Dict[str, Any]]:
|
||||
"""Modify the environment for pp-scripts with extra information
|
||||
macOS: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
other: return None
|
||||
@@ -1474,7 +1460,7 @@ def create_env(nzo: Optional[NzbObject] = None, extra_env_fields: dict[str, Any]
|
||||
return env
|
||||
|
||||
|
||||
def rar_volumelist(rarfile_path: str, password: str, known_volumes: list[str]) -> list[str]:
|
||||
def rar_volumelist(rarfile_path: str, password: str, known_volumes: List[str]) -> List[str]:
|
||||
"""List volumes that are part of this rarset
|
||||
and merge them with parsed paths list, removing duplicates.
|
||||
We assume RarFile is right and use parsed paths as backup.
|
||||
@@ -1530,7 +1516,7 @@ def quick_check_set(setname: str, nzo: NzbObject) -> bool:
|
||||
result = True
|
||||
nzf_list = nzo.finished_files
|
||||
renames = {}
|
||||
found_paths: set[str] = set()
|
||||
found_paths: Set[str] = set()
|
||||
|
||||
# Files to ignore
|
||||
ignore_ext = cfg.quick_check_ext_ignore()
|
||||
@@ -1604,7 +1590,7 @@ def quick_check_set(setname: str, nzo: NzbObject) -> bool:
|
||||
return result
|
||||
|
||||
|
||||
def unrar_check(rar: str) -> tuple[int, bool]:
|
||||
def unrar_check(rar: str) -> Tuple[int, bool]:
|
||||
"""Return version number of unrar, where "5.01" returns 501
|
||||
Also return whether an original version is found
|
||||
(version, original)
|
||||
@@ -1692,7 +1678,7 @@ def is_sfv_file(myfile: str) -> bool:
|
||||
return sfv_info_line_counter >= 1
|
||||
|
||||
|
||||
def sfv_check(sfvs: list[str], nzo: NzbObject) -> bool:
|
||||
def sfv_check(sfvs: List[str], nzo: NzbObject) -> bool:
|
||||
"""Verify files using SFV files"""
|
||||
# Update status
|
||||
nzo.status = Status.VERIFYING
|
||||
@@ -1776,7 +1762,7 @@ def sfv_check(sfvs: list[str], nzo: NzbObject) -> bool:
|
||||
return result
|
||||
|
||||
|
||||
def parse_sfv(sfv_filename: str) -> dict[str, bytes]:
|
||||
def parse_sfv(sfv_filename):
|
||||
"""Parse SFV file and return dictionary of crc32's and filenames"""
|
||||
results = {}
|
||||
with open(sfv_filename, mode="rb") as sfv_list:
|
||||
@@ -1801,12 +1787,12 @@ def add_time_left(perc: float, start_time: Optional[float] = None, time_used: Op
|
||||
return ""
|
||||
|
||||
|
||||
def pre_queue(nzo: NzbObject, pp: str, cat: str) -> list[Any]:
|
||||
def pre_queue(nzo: NzbObject, pp, cat):
|
||||
"""Run pre-queue script (if any) and process results.
|
||||
pp and cat are supplied separate since they can change.
|
||||
"""
|
||||
|
||||
def fix(p: Any) -> str:
|
||||
def fix(p):
|
||||
# If added via API, some items can still be "None" (as a string)
|
||||
if is_none(p):
|
||||
return ""
|
||||
@@ -1900,7 +1886,7 @@ class SevenZip:
|
||||
if not is_sevenfile(self.path):
|
||||
raise TypeError("File is not a 7zip file")
|
||||
|
||||
def namelist(self) -> list[str]:
|
||||
def namelist(self) -> List[str]:
|
||||
"""Return list of names in 7Zip"""
|
||||
names = []
|
||||
command = [SEVENZIP_COMMAND, "l", "-p", "-y", "-slt", "-sccUTF-8", self.path]
|
||||
@@ -1923,6 +1909,6 @@ class SevenZip:
|
||||
p.wait()
|
||||
return data
|
||||
|
||||
def close(self) -> None:
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
pass
|
||||
|
||||
@@ -21,22 +21,20 @@ sabnzbd.newswrapper
|
||||
|
||||
import errno
|
||||
import socket
|
||||
import threading
|
||||
from collections import deque
|
||||
from selectors import EVENT_READ, EVENT_WRITE
|
||||
from threading import Thread
|
||||
import time
|
||||
import logging
|
||||
import ssl
|
||||
from typing import Optional, Tuple, Union, Callable
|
||||
|
||||
import sabctools
|
||||
from typing import Optional, Tuple, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT, NNTP_BUFFER_SIZE, Status, FORCE_PRIORITY
|
||||
from sabnzbd.encoding import utob
|
||||
from sabnzbd.constants import DEF_NETWORKING_TIMEOUT, NNTP_BUFFER_SIZE, NTTP_MAX_BUFFER_SIZE
|
||||
from sabnzbd.encoding import utob, ubtou
|
||||
from sabnzbd.get_addrinfo import AddrInfo
|
||||
from sabnzbd.decorators import synchronized, DOWNLOADER_LOCK
|
||||
from sabnzbd.misc import int_conv
|
||||
|
||||
# Set pre-defined socket timeout
|
||||
socket.setdefaulttimeout(DEF_NETWORKING_TIMEOUT)
|
||||
@@ -59,8 +57,10 @@ class NewsWrapper:
|
||||
"thrdnum",
|
||||
"blocking",
|
||||
"timeout",
|
||||
"decoder",
|
||||
"send_buffer",
|
||||
"article",
|
||||
"data",
|
||||
"data_view",
|
||||
"data_position",
|
||||
"nntp",
|
||||
"connected",
|
||||
"user_sent",
|
||||
@@ -69,24 +69,19 @@ class NewsWrapper:
|
||||
"user_ok",
|
||||
"pass_ok",
|
||||
"force_login",
|
||||
"next_request",
|
||||
"concurrent_requests",
|
||||
"_response_queue",
|
||||
"selector_events",
|
||||
"lock",
|
||||
"generation",
|
||||
)
|
||||
|
||||
def __init__(self, server: "sabnzbd.downloader.Server", thrdnum: int, block: bool = False, generation: int = 0):
|
||||
def __init__(self, server, thrdnum, block=False):
|
||||
self.server: sabnzbd.downloader.Server = server
|
||||
self.thrdnum: int = thrdnum
|
||||
self.blocking: bool = block
|
||||
self.generation: int = generation
|
||||
|
||||
self.timeout: Optional[float] = None
|
||||
self.article: Optional[sabnzbd.nzbstuff.Article] = None
|
||||
|
||||
self.decoder: Optional[sabctools.Decoder] = None
|
||||
self.send_buffer = b""
|
||||
self.data: Optional[bytearray] = None
|
||||
self.data_view: Optional[memoryview] = None
|
||||
self.data_position: int = 0
|
||||
|
||||
self.nntp: Optional[NNTP] = None
|
||||
|
||||
@@ -98,22 +93,14 @@ class NewsWrapper:
|
||||
self.force_login: bool = False
|
||||
self.group: Optional[str] = None
|
||||
|
||||
# Command queue and concurrency
|
||||
self.next_request: Optional[tuple[bytes, Optional["sabnzbd.nzb.Article"]]] = None
|
||||
self.concurrent_requests: threading.BoundedSemaphore = threading.BoundedSemaphore(
|
||||
self.server.pipelining_requests()
|
||||
)
|
||||
self._response_queue: deque[Optional[sabnzbd.nzb.Article]] = deque()
|
||||
self.selector_events = 0
|
||||
self.lock: threading.Lock = threading.Lock()
|
||||
@property
|
||||
def status_code(self) -> Optional[int]:
|
||||
if self.data_position >= 3:
|
||||
return int_conv(self.data[:3])
|
||||
|
||||
@property
|
||||
def article(self) -> Optional["sabnzbd.nzb.Article"]:
|
||||
"""The article currently being downloaded"""
|
||||
with self.lock:
|
||||
if self._response_queue:
|
||||
return self._response_queue[0]
|
||||
return None
|
||||
def nntp_msg(self) -> str:
|
||||
return ubtou(self.data[: self.data_position]).strip()
|
||||
|
||||
def init_connect(self):
|
||||
"""Setup the connection in NNTP object"""
|
||||
@@ -122,15 +109,13 @@ class NewsWrapper:
|
||||
raise socket.error(errno.EADDRNOTAVAIL, T("Invalid server address."))
|
||||
|
||||
# Construct buffer and NNTP object
|
||||
self.decoder = sabctools.Decoder(NNTP_BUFFER_SIZE)
|
||||
self.data = sabctools.bytearray_malloc(NNTP_BUFFER_SIZE)
|
||||
self.data_view = memoryview(self.data)
|
||||
self.reset_data_buffer()
|
||||
self.nntp = NNTP(self, self.server.addrinfo)
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
# On connect the first "response" will be 200 Welcome
|
||||
self._response_queue.append(None)
|
||||
self.concurrent_requests.acquire()
|
||||
|
||||
def finish_connect(self, code: int, message: str) -> None:
|
||||
def finish_connect(self, code: int):
|
||||
"""Perform login options"""
|
||||
if not (self.server.username or self.server.password or self.force_login):
|
||||
self.connected = True
|
||||
@@ -148,10 +133,11 @@ class NewsWrapper:
|
||||
self.pass_ok = False
|
||||
|
||||
if code in (400, 500, 502):
|
||||
raise NNTPPermanentError(message, code)
|
||||
raise NNTPPermanentError(self.nntp_msg, code)
|
||||
elif not self.user_sent:
|
||||
command = utob("authinfo user %s\r\n" % self.server.username)
|
||||
self.queue_command(command)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
self.user_sent = True
|
||||
elif not self.user_ok:
|
||||
if code == 381:
|
||||
@@ -165,275 +151,104 @@ class NewsWrapper:
|
||||
|
||||
if self.user_ok and not self.pass_sent:
|
||||
command = utob("authinfo pass %s\r\n" % self.server.password)
|
||||
self.queue_command(command)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
self.pass_sent = True
|
||||
elif self.user_ok and not self.pass_ok:
|
||||
if code != 281:
|
||||
# Assume that login failed (code 481 or other)
|
||||
raise NNTPPermanentError(message, code)
|
||||
raise NNTPPermanentError(self.nntp_msg, code)
|
||||
else:
|
||||
self.connected = True
|
||||
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
def queue_command(
|
||||
self,
|
||||
command: bytes,
|
||||
article: Optional["sabnzbd.nzb.Article"] = None,
|
||||
) -> None:
|
||||
"""Add a command to the command queue"""
|
||||
self.next_request = command, article
|
||||
|
||||
def body(self, article: "sabnzbd.nzb.Article") -> tuple[bytes, "sabnzbd.nzb.Article"]:
|
||||
def body(self):
|
||||
"""Request the body of the article"""
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
if article.nzf.nzo.precheck:
|
||||
if self.article.nzf.nzo.precheck:
|
||||
if self.server.have_stat:
|
||||
command = utob("STAT <%s>\r\n" % article.article)
|
||||
command = utob("STAT <%s>\r\n" % self.article.article)
|
||||
else:
|
||||
command = utob("HEAD <%s>\r\n" % article.article)
|
||||
command = utob("HEAD <%s>\r\n" % self.article.article)
|
||||
elif self.server.have_body:
|
||||
command = utob("BODY <%s>\r\n" % article.article)
|
||||
command = utob("BODY <%s>\r\n" % self.article.article)
|
||||
else:
|
||||
command = utob("ARTICLE <%s>\r\n" % article.article)
|
||||
return command, article
|
||||
command = utob("ARTICLE <%s>\r\n" % self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.reset_data_buffer()
|
||||
|
||||
def on_response(self, response: sabctools.NNTPResponse, article: Optional["sabnzbd.nzb.Article"]) -> None:
|
||||
"""A response to a NNTP request is received"""
|
||||
self.concurrent_requests.release()
|
||||
sabnzbd.Downloader.modify_socket(self, EVENT_READ | EVENT_WRITE)
|
||||
server = self.server
|
||||
article_done = response.status_code in (220, 222) and article
|
||||
def recv_chunk(self) -> Tuple[int, bool, bool]:
|
||||
"""Receive data, return #bytes, end-of-line, end-of-article"""
|
||||
# Resize the buffer in the extremely unlikely case that it got full
|
||||
if self.data_position == len(self.data):
|
||||
self.nntp.nw.increase_data_buffer()
|
||||
|
||||
if article_done:
|
||||
with DOWNLOADER_LOCK:
|
||||
# Update statistics only when we fetched a whole article
|
||||
# The side effect is that we don't count things like article-not-available messages
|
||||
article.nzf.nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, response.bytes_read)
|
||||
|
||||
# Response code depends on request command:
|
||||
# 220 = ARTICLE, 222 = BODY
|
||||
if not article_done:
|
||||
if not self.connected or not article or response.status_code in (281, 381, 480, 481, 482):
|
||||
self.discard(article, count_article_try=False)
|
||||
if not sabnzbd.Downloader.finish_connect_nw(self, response):
|
||||
return
|
||||
if self.connected:
|
||||
logging.info("Connecting %s@%s finished", self.thrdnum, server.host)
|
||||
|
||||
elif response.status_code == 223:
|
||||
article_done = True
|
||||
logging.debug("Article <%s> is present on %s", article.article, server.host)
|
||||
|
||||
elif response.status_code in (411, 423, 430, 451):
|
||||
article_done = True
|
||||
logging.debug(
|
||||
"Thread %s@%s: Article %s missing (error=%s)",
|
||||
self.thrdnum,
|
||||
server.host,
|
||||
article.article,
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
elif response.status_code == 500:
|
||||
if article.nzf.nzo.precheck:
|
||||
# Did we try "STAT" already?
|
||||
if not server.have_stat:
|
||||
# Hopless server, just discard
|
||||
logging.info("Server %s does not support STAT or HEAD, precheck not possible", server.host)
|
||||
article_done = True
|
||||
else:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
logging.debug("Server %s does not support STAT, trying HEAD", server.host)
|
||||
else:
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug("Server %s does not support BODY", server.host)
|
||||
self.discard(article, count_article_try=False)
|
||||
|
||||
else:
|
||||
# Don't warn for (internal) server errors during downloading
|
||||
if response.status_code not in (400, 502, 503):
|
||||
logging.warning(
|
||||
T("%s@%s: Received unknown status code %s for article %s"),
|
||||
self.thrdnum,
|
||||
server.host,
|
||||
response.status_code,
|
||||
article.article,
|
||||
)
|
||||
|
||||
# Ditch this thread, we don't know what data we got now so the buffer can be bad
|
||||
sabnzbd.Downloader.reset_nw(
|
||||
self, f"Server error or unknown status code: {response.status_code}", wait=False, article=article
|
||||
)
|
||||
return
|
||||
|
||||
if article_done:
|
||||
# Successful data, clear "bad" counter
|
||||
server.bad_cons = 0
|
||||
server.errormsg = server.warning = ""
|
||||
|
||||
# Decode
|
||||
sabnzbd.Downloader.decode(article, response)
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s done", self.thrdnum, server.host, article.article)
|
||||
|
||||
def read(
|
||||
self,
|
||||
nbytes: int = 0,
|
||||
on_response: Optional[Callable[[int, str], None]] = None,
|
||||
generation: Optional[int] = None,
|
||||
) -> Tuple[int, Optional[int]]:
|
||||
"""Receive data, return #bytes, #pendingbytes
|
||||
:param nbytes: maximum number of bytes to read
|
||||
:param on_response: callback for each complete response received
|
||||
:param generation: expected reset generation
|
||||
:return: #bytes, #pendingbytes
|
||||
"""
|
||||
if generation is None:
|
||||
generation = self.generation
|
||||
|
||||
# NewsWrapper is being reset
|
||||
if not self.decoder:
|
||||
return 0, None
|
||||
|
||||
# Receive data into the decoder pre-allocated buffer
|
||||
if not nbytes and self.nntp.nw.server.ssl and not self.nntp.nw.blocking and sabctools.openssl_linked:
|
||||
# Receive data into the pre-allocated buffer
|
||||
if self.nntp.nw.server.ssl and not self.nntp.nw.blocking and sabctools.openssl_linked:
|
||||
# Use patched version when downloading
|
||||
bytes_recv = sabctools.unlocked_ssl_recv_into(self.nntp.sock, self.decoder)
|
||||
bytes_recv = sabctools.unlocked_ssl_recv_into(self.nntp.sock, self.data_view[self.data_position :])
|
||||
else:
|
||||
bytes_recv = self.nntp.sock.recv_into(self.decoder, nbytes=nbytes)
|
||||
bytes_recv = self.nntp.sock.recv_into(self.data_view[self.data_position :])
|
||||
|
||||
# No data received
|
||||
if bytes_recv == 0:
|
||||
raise ConnectionError("Server closed connection")
|
||||
|
||||
# Success, move timeout
|
||||
# Success, move timeout and internal data position
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
|
||||
self.decoder.process(bytes_recv)
|
||||
for response in self.decoder:
|
||||
if self.generation != generation:
|
||||
break
|
||||
with self.lock:
|
||||
# Re-check under lock to avoid racing with hard_reset
|
||||
if self.generation != generation or not self._response_queue:
|
||||
break
|
||||
article = self._response_queue.popleft()
|
||||
if on_response:
|
||||
on_response(response.status_code, response.message)
|
||||
self.on_response(response, article)
|
||||
self.data_position += bytes_recv
|
||||
|
||||
# The SSL-layer might still contain data even though the socket does not. Another Downloader-loop would
|
||||
# not identify this socket anymore as it is not returned by select(). So, we have to forcefully trigger
|
||||
# another recv_chunk so the buffer is increased and the data from the SSL-layer is read. See #2752.
|
||||
if self.server.ssl and self.nntp and (pending := self.nntp.sock.pending()):
|
||||
return bytes_recv, pending
|
||||
return bytes_recv, None
|
||||
if self.nntp.nw.server.ssl and self.data_position == len(self.data) and self.nntp.sock.pending() > 0:
|
||||
# We do not perform error-handling, as we know there is data available to read
|
||||
additional_bytes_recv, additional_end_of_line, additional_end_of_article = self.recv_chunk()
|
||||
return bytes_recv + additional_bytes_recv, additional_end_of_line, additional_end_of_article
|
||||
|
||||
def write(self):
|
||||
"""Send data to server"""
|
||||
server = self.server
|
||||
# Check for end of line
|
||||
# Using the data directly seems faster than the memoryview
|
||||
if self.data[self.data_position - 2 : self.data_position] == b"\r\n":
|
||||
# Official end-of-article is "\r\n.\r\n"
|
||||
if self.data[self.data_position - 5 : self.data_position] == b"\r\n.\r\n":
|
||||
return bytes_recv, True, True
|
||||
return bytes_recv, True, False
|
||||
|
||||
try:
|
||||
# First, try to flush any remaining data
|
||||
if self.send_buffer:
|
||||
sent = self.nntp.sock.send(self.send_buffer)
|
||||
self.send_buffer = self.send_buffer[sent:]
|
||||
if self.send_buffer:
|
||||
# Still unsent data, wait for next EVENT_WRITE
|
||||
return
|
||||
# Still in middle of data, so continue!
|
||||
return bytes_recv, False, False
|
||||
|
||||
if self.connected:
|
||||
if (
|
||||
server.active
|
||||
and not server.restart
|
||||
and not (
|
||||
sabnzbd.Downloader.paused
|
||||
or sabnzbd.Downloader.shutdown
|
||||
or sabnzbd.Downloader.paused_for_postproc
|
||||
)
|
||||
):
|
||||
# Prepare the next request
|
||||
if not self.next_request and (article := server.get_article()):
|
||||
self.next_request = self.body(article)
|
||||
elif self.next_request and self.next_request[1]:
|
||||
# Discard the next request
|
||||
self.discard(self.next_request[1], count_article_try=False, retry_article=True)
|
||||
self.next_request = None
|
||||
def soft_reset(self):
|
||||
"""Reset for the next article"""
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.reset_data_buffer()
|
||||
|
||||
# If no pending buffer, try to send new command
|
||||
if not self.send_buffer and self.next_request:
|
||||
if self.concurrent_requests.acquire(blocking=False):
|
||||
command, article = self.next_request
|
||||
self.next_request = None
|
||||
if article:
|
||||
nzo = article.nzf.nzo
|
||||
if nzo.removed_from_queue or nzo.status is Status.PAUSED and nzo.priority is not FORCE_PRIORITY:
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
self.concurrent_requests.release()
|
||||
return
|
||||
self._response_queue.append(article)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Thread %s@%s: %s", self.thrdnum, server.host, command)
|
||||
try:
|
||||
sent = self.nntp.sock.send(command)
|
||||
if sent < len(command):
|
||||
# Partial send, store remainder
|
||||
self.send_buffer = command[sent:]
|
||||
except (BlockingIOError, ssl.SSLWantWriteError):
|
||||
# Can't send now, store full command
|
||||
self.send_buffer = command
|
||||
else:
|
||||
# Concurrency limit reached
|
||||
sabnzbd.Downloader.modify_socket(self, EVENT_READ)
|
||||
else:
|
||||
# Is it safe to shut down this socket?
|
||||
if (
|
||||
not self.send_buffer
|
||||
and not self.next_request
|
||||
and not self._response_queue
|
||||
and (not server.active or server.restart or not self.timeout or time.time() > self.timeout)
|
||||
):
|
||||
# Make socket available again
|
||||
server.busy_threads.discard(self)
|
||||
server.idle_threads.add(self)
|
||||
sabnzbd.Downloader.remove_socket(self)
|
||||
def reset_data_buffer(self):
|
||||
"""Reset the data position"""
|
||||
self.data_position = 0
|
||||
|
||||
except (BlockingIOError, ssl.SSLWantWriteError):
|
||||
# Socket not currently writable — just try again later
|
||||
return
|
||||
except socket.error as err:
|
||||
logging.info("Looks like server closed connection: %s", err)
|
||||
sabnzbd.Downloader.reset_nw(self, "Server broke off connection", warn=True)
|
||||
except Exception:
|
||||
logging.error(T("Suspect error in downloader"))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
sabnzbd.Downloader.reset_nw(self, "Server broke off connection", warn=True)
|
||||
def increase_data_buffer(self):
|
||||
"""Resize the buffer in the extremely unlikely case that it overflows"""
|
||||
# Sanity check before we go any further
|
||||
if len(self.data) > NTTP_MAX_BUFFER_SIZE:
|
||||
raise BufferError("Maximum data buffer size exceeded")
|
||||
|
||||
# Input needs to be integer, floats don't work
|
||||
new_buffer = sabctools.bytearray_malloc(len(self.data) + NNTP_BUFFER_SIZE // 2)
|
||||
new_buffer[: len(self.data)] = self.data
|
||||
logging.info("Increased buffer from %d to %d for %s", len(self.data), len(new_buffer), str(self))
|
||||
self.data = new_buffer
|
||||
self.data_view = memoryview(self.data)
|
||||
|
||||
def hard_reset(self, wait: bool = True):
|
||||
"""Destroy and restart"""
|
||||
with self.lock:
|
||||
# Drain unsent requests
|
||||
if self.next_request:
|
||||
_, article = self.next_request
|
||||
if article:
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
self.next_request = None
|
||||
# Drain responses
|
||||
while self._response_queue:
|
||||
if article := self._response_queue.popleft():
|
||||
self.discard(article, count_article_try=False, retry_article=True)
|
||||
|
||||
if self.nntp:
|
||||
self.nntp.close(send_quit=self.connected)
|
||||
self.nntp = None
|
||||
|
||||
with self.lock:
|
||||
# Reset all variables (including the NNTP connection) and increment the generation counter
|
||||
self.__init__(self.server, self.thrdnum, generation=self.generation + 1)
|
||||
# Reset all variables (including the NNTP connection)
|
||||
self.__init__(self.server, self.thrdnum)
|
||||
|
||||
# Wait before re-using this newswrapper
|
||||
if wait:
|
||||
@@ -443,28 +258,6 @@ class NewsWrapper:
|
||||
# Reset for internal reasons, just wait 5 sec
|
||||
self.timeout = time.time() + 5
|
||||
|
||||
def discard(
|
||||
self,
|
||||
article: Optional["sabnzbd.nzb.Article"],
|
||||
count_article_try: bool = True,
|
||||
retry_article: bool = True,
|
||||
) -> None:
|
||||
"""Discard an article back to the queue"""
|
||||
if article and not article.nzf.nzo.removed_from_queue:
|
||||
# Only some errors should count towards the total tries for each server
|
||||
if count_article_try:
|
||||
article.tries += 1
|
||||
|
||||
# Do we discard, or try again for this server
|
||||
if not retry_article or (not self.server.required and article.tries > sabnzbd.cfg.max_art_tries()):
|
||||
# Too many tries on this server, consider article missing
|
||||
sabnzbd.Downloader.decode(article)
|
||||
article.tries = 0
|
||||
else:
|
||||
# Allow all servers again for this article
|
||||
# Do not use the article_queue, as the server could already have been disabled when we get here!
|
||||
article.allow_new_fetcher()
|
||||
|
||||
def __repr__(self):
|
||||
return "<NewsWrapper: server=%s:%s, thread=%s, connected=%s>" % (
|
||||
self.server.host,
|
||||
@@ -586,7 +379,7 @@ class NNTP:
|
||||
# Locked, so it can't interleave with any of the Downloader "__nw" actions
|
||||
with DOWNLOADER_LOCK:
|
||||
if not self.closed:
|
||||
sabnzbd.Downloader.add_socket(self.nw)
|
||||
sabnzbd.Downloader.add_socket(self.fileno, self.nw)
|
||||
except OSError as e:
|
||||
self.error(e)
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ import http.client
|
||||
import json
|
||||
import apprise
|
||||
from threading import Thread
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Dict, Union
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
@@ -160,7 +160,7 @@ def send_notification(
|
||||
msg: str,
|
||||
notification_type: str,
|
||||
job_cat: Optional[str] = None,
|
||||
actions: Optional[dict[str, str]] = None,
|
||||
actions: Optional[Dict[str, str]] = None,
|
||||
):
|
||||
"""Send Notification message"""
|
||||
logging.info("Sending notification: %s - %s (type=%s, job_cat=%s)", title, msg, notification_type, job_cat)
|
||||
@@ -243,7 +243,7 @@ def send_notify_osd(title, message):
|
||||
return error
|
||||
|
||||
|
||||
def send_notification_center(title: str, msg: str, notification_type: str, actions: Optional[dict[str, str]] = None):
|
||||
def send_notification_center(title: str, msg: str, notification_type: str, actions: Optional[Dict[str, str]] = None):
|
||||
"""Send message to macOS Notification Center.
|
||||
Only 1 button is possible on macOS!"""
|
||||
logging.debug("Sending macOS notification")
|
||||
@@ -531,7 +531,7 @@ def send_nscript(title, msg, notification_type, force=False, test=None):
|
||||
return ""
|
||||
|
||||
|
||||
def send_windows(title: str, msg: str, notification_type: str, actions: Optional[dict[str, str]] = None):
|
||||
def send_windows(title: str, msg: str, notification_type: str, actions: Optional[Dict[str, str]] = None):
|
||||
"""Send Windows notifications, either fancy with buttons (Windows 10+) or basic ones"""
|
||||
# Skip any notifications if ran as a Windows Service, it can result in crashes
|
||||
if sabnzbd.WIN_SERVICE:
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
sabnzbd.nzb - NZB-related classes and functionality
|
||||
"""
|
||||
|
||||
# Article-related classes
|
||||
from sabnzbd.nzb.article import Article, ArticleSaver, TryList, TRYLIST_LOCK
|
||||
|
||||
# File-related classes
|
||||
from sabnzbd.nzb.file import NzbFile, NzbFileSaver, SkippedNzbFile
|
||||
|
||||
# Object-related classes
|
||||
from sabnzbd.nzb.object import (
|
||||
NzbObject,
|
||||
NzbObjectSaver,
|
||||
NzoAttributeSaver,
|
||||
NZO_LOCK,
|
||||
NzbEmpty,
|
||||
NzbRejected,
|
||||
NzbPreQueueRejected,
|
||||
NzbRejectToHistory,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Article
|
||||
"Article",
|
||||
"ArticleSaver",
|
||||
"TryList",
|
||||
"TRYLIST_LOCK",
|
||||
# File
|
||||
"NzbFile",
|
||||
"NzbFileSaver",
|
||||
"SkippedNzbFile",
|
||||
# Object
|
||||
"NzbObject",
|
||||
"NzbObjectSaver",
|
||||
"NzoAttributeSaver",
|
||||
"NZO_LOCK",
|
||||
"NzbEmpty",
|
||||
"NzbRejected",
|
||||
"NzbPreQueueRejected",
|
||||
"NzbRejectToHistory",
|
||||
]
|
||||
@@ -1,214 +0,0 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
sabnzbd.article - Article and TryList classes for NZB downloading
|
||||
"""
|
||||
import logging
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.downloader import Server
|
||||
from sabnzbd.filesystem import get_new_id
|
||||
from sabnzbd.decorators import synchronized
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Trylist
|
||||
##############################################################################
|
||||
|
||||
TRYLIST_LOCK = threading.RLock()
|
||||
|
||||
|
||||
class TryList:
|
||||
"""TryList keeps track of which servers have been tried for a specific article"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = ("try_list",)
|
||||
|
||||
def __init__(self):
|
||||
# Sets are faster than lists
|
||||
self.try_list: set[Server] = set()
|
||||
|
||||
def server_in_try_list(self, server: Server) -> bool:
|
||||
"""Return whether specified server has been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return server in self.try_list
|
||||
|
||||
def all_servers_in_try_list(self, all_servers: set[Server]) -> bool:
|
||||
"""Check if all servers have been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return all_servers.issubset(self.try_list)
|
||||
|
||||
def add_to_try_list(self, server: Server):
|
||||
"""Register server as having been tried already"""
|
||||
with TRYLIST_LOCK:
|
||||
# Sets cannot contain duplicate items
|
||||
self.try_list.add(server)
|
||||
|
||||
def remove_from_try_list(self, server: Server):
|
||||
"""Remove server from list of tried servers"""
|
||||
with TRYLIST_LOCK:
|
||||
# Discard does not require the item to be present
|
||||
self.try_list.discard(server)
|
||||
|
||||
def reset_try_list(self):
|
||||
"""Clean the list"""
|
||||
with TRYLIST_LOCK:
|
||||
self.try_list = set()
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save the servers"""
|
||||
return set(server.id for server in self.try_list)
|
||||
|
||||
def __setstate__(self, servers_ids: list[str]):
|
||||
self.try_list = set()
|
||||
for server in sabnzbd.Downloader.servers:
|
||||
if server.id in servers_ids:
|
||||
self.add_to_try_list(server)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Article
|
||||
##############################################################################
|
||||
ArticleSaver = (
|
||||
"article",
|
||||
"art_id",
|
||||
"bytes",
|
||||
"lowest_partnum",
|
||||
"decoded",
|
||||
"file_size",
|
||||
"data_begin",
|
||||
"data_size",
|
||||
"on_disk",
|
||||
"nzf",
|
||||
"crc32",
|
||||
)
|
||||
|
||||
|
||||
class Article(TryList):
|
||||
"""Representation of one article"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = ArticleSaver + ("fetcher", "fetcher_priority", "tries")
|
||||
|
||||
def __init__(self, article, article_bytes, nzf):
|
||||
super().__init__()
|
||||
self.article: str = article
|
||||
self.art_id: Optional[str] = None
|
||||
self.bytes: int = article_bytes
|
||||
self.lowest_partnum: bool = False
|
||||
self.fetcher: Optional[Server] = None
|
||||
self.fetcher_priority: int = 0
|
||||
self.tries: int = 0 # Try count
|
||||
self.decoded: bool = False
|
||||
self.file_size: Optional[int] = None
|
||||
self.data_begin: Optional[int] = None
|
||||
self.data_size: Optional[int] = None
|
||||
self.on_disk: bool = False
|
||||
self.crc32: Optional[int] = None
|
||||
self.nzf = nzf # NzbFile reference
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def reset_try_list(self):
|
||||
"""In addition to resetting the try list, also reset fetcher so all servers
|
||||
are tried again. Locked so fetcher setting changes are also protected."""
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
super().reset_try_list()
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def allow_new_fetcher(self, remove_fetcher_from_try_list: bool = True):
|
||||
"""Let article get new fetcher and reset try lists of file and job.
|
||||
Locked so all resets are performed at once"""
|
||||
if remove_fetcher_from_try_list:
|
||||
self.remove_from_try_list(self.fetcher)
|
||||
self.fetcher = None
|
||||
self.tries = 0
|
||||
self.nzf.reset_try_list()
|
||||
self.nzf.nzo.reset_try_list()
|
||||
|
||||
def get_article(self, server: Server, servers: list[Server]):
|
||||
"""Return article when appropriate for specified server"""
|
||||
if self.fetcher or self.server_in_try_list(server):
|
||||
return None
|
||||
|
||||
if server.priority > self.fetcher_priority:
|
||||
# Check for higher priority server, taking advantage of servers list being sorted by priority
|
||||
for server_check in servers:
|
||||
if server_check.priority < server.priority:
|
||||
if server_check.active and not self.server_in_try_list(server_check):
|
||||
# There is a higher priority server, so set article priority and return
|
||||
self.fetcher_priority = server_check.priority
|
||||
return None
|
||||
else:
|
||||
# All servers with a higher priority have been checked
|
||||
break
|
||||
|
||||
# If no higher priority servers, use this server
|
||||
self.fetcher_priority = server.priority
|
||||
self.fetcher = server
|
||||
self.tries += 1
|
||||
return self
|
||||
|
||||
def get_art_id(self):
|
||||
"""Return unique article storage name, create if needed"""
|
||||
if not self.art_id:
|
||||
self.art_id = get_new_id("article", self.nzf.nzo.admin_path)
|
||||
return self.art_id
|
||||
|
||||
def search_new_server(self):
|
||||
"""Search for a new server for this article"""
|
||||
# Since we need a new server, this one can be listed as failed
|
||||
sabnzbd.BPSMeter.register_server_article_failed(self.fetcher.id)
|
||||
self.add_to_try_list(self.fetcher)
|
||||
# Servers-list could be modified during iteration, so we need a copy
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
if server.active and not self.server_in_try_list(server):
|
||||
if server.priority >= self.fetcher.priority:
|
||||
self.tries = 0
|
||||
# Allow all servers for this nzo and nzf again (but not this fetcher for this article)
|
||||
self.allow_new_fetcher(remove_fetcher_from_try_list=False)
|
||||
return True
|
||||
|
||||
logging.info("Article %s unavailable on all servers, discarding", self.article)
|
||||
return False
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
for item in ArticleSaver:
|
||||
dict_[item] = getattr(self, item)
|
||||
dict_["try_list"] = super().__getstate__()
|
||||
return dict_
|
||||
|
||||
def __setstate__(self, dict_):
|
||||
"""Load from pickle file, selecting attributes"""
|
||||
for item in ArticleSaver:
|
||||
try:
|
||||
setattr(self, item, dict_[item])
|
||||
except KeyError:
|
||||
# Handle new attributes
|
||||
setattr(self, item, None)
|
||||
super().__setstate__(dict_.get("try_list", []))
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
self.tries = 0
|
||||
|
||||
def __repr__(self):
|
||||
return "<Article: article=%s, bytes=%s, art_id=%s>" % (self.article, self.bytes, self.art_id)
|
||||
@@ -1,290 +0,0 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
sabnzbd.nzb.file - NzbFile class for representing files in NZB downloads
|
||||
"""
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
import sabctools
|
||||
from sabnzbd.nzb.article import TryList, Article, TRYLIST_LOCK
|
||||
from sabnzbd.downloader import Server
|
||||
from sabnzbd.filesystem import (
|
||||
sanitize_filename,
|
||||
get_unique_filename,
|
||||
get_filename,
|
||||
remove_file,
|
||||
get_new_id,
|
||||
save_data,
|
||||
load_data,
|
||||
)
|
||||
from sabnzbd.misc import int_conv, subject_name_extractor
|
||||
from sabnzbd.decorators import synchronized
|
||||
|
||||
|
||||
##############################################################################
|
||||
# NzbFile
|
||||
##############################################################################
|
||||
class SkippedNzbFile(Exception):
|
||||
pass
|
||||
|
||||
|
||||
NzbFileSaver = (
|
||||
"date",
|
||||
"filename",
|
||||
"filename_checked",
|
||||
"filepath",
|
||||
"type",
|
||||
"is_par2",
|
||||
"vol",
|
||||
"blocks",
|
||||
"setname",
|
||||
"articles",
|
||||
"decodetable",
|
||||
"bytes",
|
||||
"bytes_left",
|
||||
"nzo",
|
||||
"nzf_id",
|
||||
"deleted",
|
||||
"import_finished",
|
||||
"crc32",
|
||||
"assembled",
|
||||
"md5of16k",
|
||||
)
|
||||
|
||||
|
||||
class NzbFile(TryList):
|
||||
"""Representation of one file consisting of multiple articles"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = NzbFileSaver + ("lock",)
|
||||
|
||||
def __init__(self, date, subject, raw_article_db, file_bytes, nzo):
|
||||
"""Setup object"""
|
||||
super().__init__()
|
||||
self.lock = threading.RLock()
|
||||
|
||||
self.date: datetime.datetime = date
|
||||
self.type: Optional[str] = None
|
||||
self.filename: str = sanitize_filename(subject_name_extractor(subject))
|
||||
self.filename_checked = False
|
||||
self.filepath: Optional[str] = None
|
||||
|
||||
# Identifiers for par2 files
|
||||
self.is_par2: bool = False
|
||||
self.vol: Optional[int] = None
|
||||
self.blocks: Optional[int] = None
|
||||
self.setname: Optional[str] = None
|
||||
|
||||
# Articles are removed from "articles" after being fetched
|
||||
self.articles: dict[Article, Article] = {}
|
||||
self.decodetable: list[Article] = []
|
||||
|
||||
self.bytes: int = file_bytes
|
||||
self.bytes_left: int = file_bytes
|
||||
|
||||
self.nzo = nzo # NzbObject reference
|
||||
self.deleted = False
|
||||
self.import_finished = False
|
||||
|
||||
self.crc32: Optional[int] = 0
|
||||
self.assembled: bool = False
|
||||
self.md5of16k: Optional[bytes] = None
|
||||
|
||||
# Add first article to decodetable, this way we can check
|
||||
# if this is maybe a duplicate nzf
|
||||
if raw_article_db:
|
||||
first_article = self.add_article(raw_article_db.pop(0))
|
||||
first_article.lowest_partnum = True
|
||||
|
||||
if self in nzo.files:
|
||||
logging.info("File %s occurred twice in NZB, skipping", self.filename)
|
||||
raise SkippedNzbFile
|
||||
|
||||
# Create file on disk, which can fail in case of disk errors
|
||||
self.nzf_id: str = get_new_id("nzf", nzo.admin_path)
|
||||
if not self.nzf_id:
|
||||
# Error already shown to user from get_new_id
|
||||
raise SkippedNzbFile
|
||||
|
||||
# Any articles left?
|
||||
if raw_article_db:
|
||||
# Save the rest
|
||||
save_data(raw_article_db, self.nzf_id, nzo.admin_path)
|
||||
else:
|
||||
# All imported
|
||||
self.import_finished = True
|
||||
|
||||
def finish_import(self):
|
||||
"""Load the article objects from disk"""
|
||||
logging.debug("Finishing import on %s", self.filename)
|
||||
if raw_article_db := load_data(self.nzf_id, self.nzo.admin_path, remove=False):
|
||||
for raw_article in raw_article_db:
|
||||
self.add_article(raw_article)
|
||||
|
||||
# Make sure we have labeled the lowest part number
|
||||
# Also when DirectUnpack is disabled we need to know
|
||||
self.decodetable[0].lowest_partnum = True
|
||||
|
||||
# Mark safe to continue
|
||||
self.import_finished = True
|
||||
|
||||
def add_article(self, article_info):
|
||||
"""Add article to object database and return article object"""
|
||||
article = Article(article_info[0], article_info[1], self)
|
||||
with self.lock:
|
||||
self.articles[article] = article
|
||||
self.decodetable.append(article)
|
||||
return article
|
||||
|
||||
def remove_article(self, article: Article, success: bool) -> int:
|
||||
"""Handle completed article, possibly end of file"""
|
||||
with self.lock:
|
||||
if self.articles.pop(article, None) is not None:
|
||||
if success:
|
||||
self.bytes_left -= article.bytes
|
||||
return len(self.articles)
|
||||
|
||||
def set_par2(self, setname, vol, blocks):
|
||||
"""Designate this file as a par2 file"""
|
||||
self.is_par2 = True
|
||||
self.setname = setname
|
||||
self.vol = vol
|
||||
self.blocks = int_conv(blocks)
|
||||
|
||||
def update_crc32(self, crc32: Optional[int], length: int) -> None:
|
||||
if self.crc32 is None or crc32 is None:
|
||||
self.crc32 = None
|
||||
else:
|
||||
self.crc32 = sabctools.crc32_combine(self.crc32, crc32, length)
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int):
|
||||
"""Get next articles to be downloaded"""
|
||||
articles = server.article_queue
|
||||
with self.lock:
|
||||
for article in self.articles:
|
||||
if article := article.get_article(server, servers):
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
return
|
||||
self.add_to_try_list(server)
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def reset_all_try_lists(self):
|
||||
"""Reset all try lists. Locked so reset is performed
|
||||
for all items at the same time without chance of another
|
||||
thread changing any of the items while we are resetting"""
|
||||
with self.lock:
|
||||
for art in self.articles:
|
||||
art.reset_try_list()
|
||||
self.reset_try_list()
|
||||
|
||||
def first_article_processed(self) -> bool:
|
||||
"""Check if the first article has been processed.
|
||||
This ensures we have attempted to extract md5of16k and filename information
|
||||
before creating the filepath.
|
||||
"""
|
||||
# The first article of decodetable is always the lowest
|
||||
first_article = self.decodetable[0]
|
||||
# If it's still in nzo.first_articles, it hasn't been processed yet
|
||||
return first_article not in self.nzo.first_articles
|
||||
|
||||
def prepare_filepath(self):
|
||||
"""Do all checks before making the final path"""
|
||||
if not self.filepath:
|
||||
# Wait for the first article to be processed so we can get md5of16k
|
||||
# and proper filename before creating the filepath
|
||||
if not self.first_article_processed():
|
||||
return None
|
||||
|
||||
self.nzo.verify_nzf_filename(self)
|
||||
filename = sanitize_filename(self.filename)
|
||||
self.filepath = get_unique_filename(os.path.join(self.nzo.download_path, filename))
|
||||
self.filename = get_filename(self.filepath)
|
||||
return self.filepath
|
||||
|
||||
@property
|
||||
def completed(self):
|
||||
"""Is this file completed?"""
|
||||
if not self.import_finished:
|
||||
return False
|
||||
with self.lock:
|
||||
return not self.articles
|
||||
|
||||
def remove_admin(self):
|
||||
"""Remove article database from disk (sabnzbd_nzf_<id>)"""
|
||||
try:
|
||||
logging.debug("Removing article database for %s", self.nzf_id)
|
||||
remove_file(os.path.join(self.nzo.admin_path, self.nzf_id))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __enter__(self):
|
||||
self.lock.acquire()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.lock.release()
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
for item in NzbFileSaver:
|
||||
dict_[item] = getattr(self, item)
|
||||
dict_["try_list"] = super().__getstate__()
|
||||
return dict_
|
||||
|
||||
def __setstate__(self, dict_):
|
||||
"""Load from pickle file, selecting attributes"""
|
||||
for item in NzbFileSaver:
|
||||
try:
|
||||
setattr(self, item, dict_[item])
|
||||
except KeyError:
|
||||
# Handle new attributes
|
||||
setattr(self, item, None)
|
||||
super().__setstate__(dict_.get("try_list", []))
|
||||
self.lock = threading.RLock()
|
||||
if isinstance(self.articles, list):
|
||||
# Converted from list to dict
|
||||
self.articles = {x: x for x in self.articles}
|
||||
|
||||
def __eq__(self, other: "NzbFile"):
|
||||
"""Assume it's the same file if the number bytes and first article
|
||||
are the same or if there are no articles left, use the filenames.
|
||||
Some NZB's are just a mess and report different sizes for the same article.
|
||||
We used to compare (__eq__) articles based on article-ID, however, this failed
|
||||
because some NZB's had the same article-ID twice within one NZF.
|
||||
"""
|
||||
if other and (self.bytes == other.bytes or len(self.decodetable) == len(other.decodetable)):
|
||||
if self.decodetable and other.decodetable:
|
||||
return self.decodetable[0].article == other.decodetable[0].article
|
||||
# Fallback to filename comparison
|
||||
return self.filename == other.filename
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
"""Required because we implement eq. The same file can be spread
|
||||
over multiple NZO's so we make every NZF unique. Even though
|
||||
it's considered bad practice.
|
||||
"""
|
||||
return id(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<NzbFile: filename=%s, bytes=%s, nzf_id=%s>" % (self.filename, self.bytes, self.nzf_id)
|
||||
@@ -30,18 +30,10 @@ import zipfile
|
||||
import tempfile
|
||||
|
||||
import cherrypy._cpreqbody
|
||||
from typing import Optional, Any, Union
|
||||
from typing import Optional, Dict, Any, Union, List, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.nzb import (
|
||||
NzbObject,
|
||||
NzbEmpty,
|
||||
NzbRejected,
|
||||
NzbPreQueueRejected,
|
||||
NzbRejectToHistory,
|
||||
NzbFile,
|
||||
SkippedNzbFile,
|
||||
)
|
||||
from sabnzbd import nzbstuff
|
||||
from sabnzbd.encoding import utob, correct_cherrypy_encoding
|
||||
from sabnzbd.filesystem import (
|
||||
get_filename,
|
||||
@@ -160,12 +152,12 @@ def process_nzb_archive_file(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Analyse archive and create job(s).
|
||||
Accepts archive files with ONLY nzb/nfo/folder files in it.
|
||||
"""
|
||||
@@ -212,7 +204,7 @@ def process_nzb_archive_file(
|
||||
if datap:
|
||||
nzo = None
|
||||
try:
|
||||
nzo = NzbObject(
|
||||
nzo = nzbstuff.NzbObject(
|
||||
name,
|
||||
pp=pp,
|
||||
script=script,
|
||||
@@ -228,13 +220,13 @@ def process_nzb_archive_file(
|
||||
dup_check=dup_check,
|
||||
)
|
||||
except (
|
||||
NzbEmpty,
|
||||
NzbRejected,
|
||||
NzbPreQueueRejected,
|
||||
sabnzbd.nzbstuff.NzbEmpty,
|
||||
sabnzbd.nzbstuff.NzbRejected,
|
||||
sabnzbd.nzbstuff.NzbPreQueueRejected,
|
||||
):
|
||||
# Empty or fully rejected (including pre-queue rejections)
|
||||
pass
|
||||
except NzbRejectToHistory as err:
|
||||
except sabnzbd.nzbstuff.NzbRejectToHistory as err:
|
||||
# Duplicate or unwanted extension directed to history
|
||||
sabnzbd.NzbQueue.fail_to_history(err.nzo)
|
||||
nzo_ids.append(err.nzo.nzo_id)
|
||||
@@ -279,12 +271,12 @@ def process_single_nzb(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Analyze file and create a job from it
|
||||
Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
|
||||
"""
|
||||
@@ -323,7 +315,7 @@ def process_single_nzb(
|
||||
nzo = None
|
||||
nzo_ids = []
|
||||
try:
|
||||
nzo = NzbObject(
|
||||
nzo = nzbstuff.NzbObject(
|
||||
filename,
|
||||
pp=pp,
|
||||
script=script,
|
||||
@@ -338,16 +330,16 @@ def process_single_nzb(
|
||||
nzo_id=nzo_id,
|
||||
dup_check=dup_check,
|
||||
)
|
||||
except NzbEmpty:
|
||||
except sabnzbd.nzbstuff.NzbEmpty:
|
||||
# Malformed or might not be an NZB file
|
||||
result = AddNzbFileResult.NO_FILES_FOUND
|
||||
except NzbRejected:
|
||||
except sabnzbd.nzbstuff.NzbRejected:
|
||||
# Rejected as duplicate
|
||||
result = AddNzbFileResult.ERROR
|
||||
except NzbPreQueueRejected:
|
||||
except sabnzbd.nzbstuff.NzbPreQueueRejected:
|
||||
# Rejected by pre-queue script - should be silently ignored for URL fetches
|
||||
result = AddNzbFileResult.PREQUEUE_REJECTED
|
||||
except NzbRejectToHistory as err:
|
||||
except sabnzbd.nzbstuff.NzbRejectToHistory as err:
|
||||
# Duplicate or unwanted extension directed to history
|
||||
sabnzbd.NzbQueue.fail_to_history(err.nzo)
|
||||
nzo_ids.append(err.nzo.nzo_id)
|
||||
@@ -374,7 +366,7 @@ def process_single_nzb(
|
||||
|
||||
def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
# For type-hinting
|
||||
nzo: NzbObject
|
||||
nzo: sabnzbd.nzbstuff.NzbObject
|
||||
|
||||
# Hash for dupe-checking
|
||||
md5sum = hashlib.md5()
|
||||
@@ -478,8 +470,8 @@ def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
|
||||
# Create NZF
|
||||
try:
|
||||
nzf = NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo)
|
||||
except SkippedNzbFile:
|
||||
nzf = sabnzbd.nzbstuff.NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo)
|
||||
except sabnzbd.nzbstuff.SkippedNzbFile:
|
||||
# Did not meet requirements, so continue
|
||||
skipped_files += 1
|
||||
continue
|
||||
|
||||
@@ -23,10 +23,10 @@ import os
|
||||
import logging
|
||||
import time
|
||||
import cherrypy._cpreqbody
|
||||
from typing import Union, Optional
|
||||
from typing import List, Dict, Union, Tuple, Optional
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.nzb import Article, NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject, Article
|
||||
from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, safe_lower, duplicate_warning
|
||||
from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file, is_valid_script
|
||||
from sabnzbd.nzbparser import process_single_nzb
|
||||
@@ -57,8 +57,8 @@ class NzbQueue:
|
||||
|
||||
def __init__(self):
|
||||
self.__top_only: bool = cfg.top_only()
|
||||
self.__nzo_list: list[NzbObject] = []
|
||||
self.__nzo_table: dict[str, NzbObject] = {}
|
||||
self.__nzo_list: List[NzbObject] = []
|
||||
self.__nzo_table: Dict[str, NzbObject] = {}
|
||||
|
||||
def read_queue(self, repair: int):
|
||||
"""Read queue from disk, supporting repair modes
|
||||
@@ -121,7 +121,7 @@ class NzbQueue:
|
||||
pass
|
||||
|
||||
@NzbQueueLocker
|
||||
def scan_jobs(self, all_jobs: bool = False, action: bool = True) -> list[str]:
|
||||
def scan_jobs(self, all_jobs: bool = False, action: bool = True) -> List[str]:
|
||||
"""Scan "incomplete" for missing folders,
|
||||
'all' is True: Include active folders
|
||||
'action' is True, do the recovery action
|
||||
@@ -247,7 +247,7 @@ class NzbQueue:
|
||||
self.__top_only = value
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_opts(self, nzo_ids: list[str], pp: int) -> int:
|
||||
def change_opts(self, nzo_ids: List[str], pp: int) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -257,7 +257,7 @@ class NzbQueue:
|
||||
return result
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_script(self, nzo_ids: list[str], script: str) -> int:
|
||||
def change_script(self, nzo_ids: List[str], script: str) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
if (script is None) or is_valid_script(script):
|
||||
@@ -269,7 +269,7 @@ class NzbQueue:
|
||||
return result
|
||||
|
||||
@NzbQueueLocker
|
||||
def change_cat(self, nzo_ids: list[str], cat: str) -> int:
|
||||
def change_cat(self, nzo_ids: List[str], cat: str) -> int:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
result = 0
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -387,7 +387,7 @@ class NzbQueue:
|
||||
return nzo
|
||||
|
||||
@NzbQueueLocker
|
||||
def remove_multiple(self, nzo_ids: list[str], delete_all_data=True) -> list[str]:
|
||||
def remove_multiple(self, nzo_ids: List[str], delete_all_data=True) -> List[str]:
|
||||
"""Remove multiple jobs from the queue. Also triggers duplicate handling
|
||||
and downloader-disconnect, so intended for external use only!"""
|
||||
removed = []
|
||||
@@ -405,7 +405,7 @@ class NzbQueue:
|
||||
return removed
|
||||
|
||||
@NzbQueueLocker
|
||||
def remove_all(self, search: Optional[str] = None) -> list[str]:
|
||||
def remove_all(self, search: Optional[str] = None) -> List[str]:
|
||||
"""Remove NZO's that match the search-pattern"""
|
||||
nzo_ids = []
|
||||
search = safe_lower(search)
|
||||
@@ -414,7 +414,7 @@ class NzbQueue:
|
||||
nzo_ids.append(nzo_id)
|
||||
return self.remove_multiple(nzo_ids)
|
||||
|
||||
def remove_nzfs(self, nzo_id: str, nzf_ids: list[str]) -> list[str]:
|
||||
def remove_nzfs(self, nzo_id: str, nzf_ids: List[str]) -> List[str]:
|
||||
removed = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table[nzo_id]
|
||||
@@ -441,7 +441,7 @@ class NzbQueue:
|
||||
logging.info("Removed NZFs %s from job %s", removed, nzo.final_name)
|
||||
return removed
|
||||
|
||||
def pause_multiple_nzo(self, nzo_ids: list[str]) -> list[str]:
|
||||
def pause_multiple_nzo(self, nzo_ids: List[str]) -> List[str]:
|
||||
handled = []
|
||||
for nzo_id in nzo_ids:
|
||||
self.pause_nzo(nzo_id)
|
||||
@@ -449,7 +449,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def pause_nzo(self, nzo_id: str) -> list[str]:
|
||||
def pause_nzo(self, nzo_id: str) -> List[str]:
|
||||
"""Locked so changes during URLGrabbing are correctly passed to new job"""
|
||||
handled = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
@@ -459,7 +459,7 @@ class NzbQueue:
|
||||
handled.append(nzo_id)
|
||||
return handled
|
||||
|
||||
def resume_multiple_nzo(self, nzo_ids: list[str]) -> list[str]:
|
||||
def resume_multiple_nzo(self, nzo_ids: List[str]) -> List[str]:
|
||||
handled = []
|
||||
for nzo_id in nzo_ids:
|
||||
self.resume_nzo(nzo_id)
|
||||
@@ -467,7 +467,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def resume_nzo(self, nzo_id: str) -> list[str]:
|
||||
def resume_nzo(self, nzo_id: str) -> List[str]:
|
||||
handled = []
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table[nzo_id]
|
||||
@@ -477,7 +477,7 @@ class NzbQueue:
|
||||
return handled
|
||||
|
||||
@NzbQueueLocker
|
||||
def switch(self, item_id_1: str, item_id_2: str) -> tuple[int, int]:
|
||||
def switch(self, item_id_1: str, item_id_2: str) -> Tuple[int, int]:
|
||||
try:
|
||||
# Allow an index as second parameter, easier for some skins
|
||||
i = int(item_id_2)
|
||||
@@ -532,24 +532,24 @@ class NzbQueue:
|
||||
return -1, nzo1.priority
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_up_bulk(self, nzo_id: str, nzf_ids: list[str], size: int):
|
||||
def move_nzf_up_bulk(self, nzo_id: str, nzf_ids: List[str], size: int):
|
||||
if nzo_id in self.__nzo_table:
|
||||
for _ in range(size):
|
||||
self.__nzo_table[nzo_id].move_up_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_top_bulk(self, nzo_id: str, nzf_ids: list[str]):
|
||||
def move_nzf_top_bulk(self, nzo_id: str, nzf_ids: List[str]):
|
||||
if nzo_id in self.__nzo_table:
|
||||
self.__nzo_table[nzo_id].move_top_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_down_bulk(self, nzo_id: str, nzf_ids: list[str], size: int):
|
||||
def move_nzf_down_bulk(self, nzo_id: str, nzf_ids: List[str], size: int):
|
||||
if nzo_id in self.__nzo_table:
|
||||
for _ in range(size):
|
||||
self.__nzo_table[nzo_id].move_down_bulk(nzf_ids)
|
||||
|
||||
@NzbQueueLocker
|
||||
def move_nzf_bottom_bulk(self, nzo_id: str, nzf_ids: list[str]):
|
||||
def move_nzf_bottom_bulk(self, nzo_id: str, nzf_ids: List[str]):
|
||||
if nzo_id in self.__nzo_table:
|
||||
self.__nzo_table[nzo_id].move_bottom_bulk(nzf_ids)
|
||||
|
||||
@@ -670,7 +670,7 @@ class NzbQueue:
|
||||
return -1
|
||||
|
||||
@NzbQueueLocker
|
||||
def set_priority(self, nzo_ids: list[str], priority: int) -> int:
|
||||
def set_priority(self, nzo_ids: List[str], priority: int) -> int:
|
||||
try:
|
||||
n = -1
|
||||
for nzo_id in nzo_ids:
|
||||
@@ -692,7 +692,7 @@ class NzbQueue:
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int) -> None:
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next article for jobs in the queue
|
||||
Not locked for performance, since it only reads the queue
|
||||
"""
|
||||
@@ -705,12 +705,12 @@ class NzbQueue:
|
||||
and not nzo.propagation_delay_left
|
||||
) or nzo.priority == FORCE_PRIORITY:
|
||||
if not nzo.server_in_try_list(server):
|
||||
nzo.get_articles(server, servers, fetch_limit)
|
||||
if server.article_queue:
|
||||
break
|
||||
if articles := nzo.get_articles(server, servers, fetch_limit):
|
||||
return articles
|
||||
# Stop after first job that wasn't paused/propagating/etc
|
||||
if self.__top_only:
|
||||
break
|
||||
return []
|
||||
return []
|
||||
|
||||
def register_article(self, article: Article, success: bool = True):
|
||||
"""Register the articles we tried
|
||||
@@ -768,9 +768,10 @@ class NzbQueue:
|
||||
nzo.removed_from_queue = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
# If not enough data is present, fail flag will be set (also used by postproc)
|
||||
if not nzo.fail_msg:
|
||||
# Send back for real download
|
||||
# Check result
|
||||
enough, _ = nzo.check_availability_ratio()
|
||||
if enough:
|
||||
# Enough data present, do real download
|
||||
self.send_back(nzo)
|
||||
return
|
||||
else:
|
||||
@@ -801,13 +802,13 @@ class NzbQueue:
|
||||
def queue_info(
|
||||
self,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
priorities: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
categories: Optional[List[str]] = None,
|
||||
priorities: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
start: int = 0,
|
||||
limit: int = 0,
|
||||
) -> tuple[int, int, int, list[NzbObject], int, int]:
|
||||
) -> Tuple[int, int, int, List[NzbObject], int, int]:
|
||||
"""Return list of queued jobs, optionally filtered and limited by start and limit.
|
||||
Not locked for performance, only reads the queue
|
||||
"""
|
||||
@@ -893,14 +894,11 @@ class NzbQueue:
|
||||
|
||||
if nzf.all_servers_in_try_list(active_servers):
|
||||
# Check for articles where all active servers have already been tried
|
||||
with nzf:
|
||||
for article in nzf.articles:
|
||||
if article.all_servers_in_try_list(active_servers):
|
||||
logging.debug(
|
||||
"Removing article %s with bad trylist in file %s", article, nzf.filename
|
||||
)
|
||||
nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
for article in nzf.articles[:]:
|
||||
if article.all_servers_in_try_list(active_servers):
|
||||
logging.debug("Removing article %s with bad trylist in file %s", article, nzf.filename)
|
||||
nzo.increase_bad_articles_counter("missing_articles")
|
||||
sabnzbd.NzbQueue.register_article(article, success=False)
|
||||
|
||||
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
|
||||
nzf.reset_try_list()
|
||||
@@ -936,7 +934,7 @@ class NzbQueue:
|
||||
# Don't use nzo.resume() to avoid resetting job warning flags
|
||||
nzo.status = Status.QUEUED
|
||||
|
||||
def get_urls(self) -> list[tuple[str, NzbObject]]:
|
||||
def get_urls(self) -> List[Tuple[str, NzbObject]]:
|
||||
"""Return list of future-types needing URL"""
|
||||
lst = []
|
||||
for nzo_id in self.__nzo_table:
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
sabnzbd.nzb.object - NzbObject class for representing NZB download jobs
|
||||
sabnzbd.nzbstuff - misc
|
||||
"""
|
||||
import os
|
||||
import time
|
||||
@@ -26,12 +26,11 @@ import datetime
|
||||
import threading
|
||||
import functools
|
||||
import difflib
|
||||
from typing import Any, Optional, Union, BinaryIO, Deque
|
||||
from typing import List, Dict, Any, Tuple, Optional, Union, BinaryIO, Set
|
||||
|
||||
# SABnzbd modules
|
||||
import sabnzbd
|
||||
from sabnzbd.nzb.article import TryList, Article, TRYLIST_LOCK
|
||||
from sabnzbd.nzb.file import NzbFile
|
||||
import sabctools
|
||||
from sabnzbd.constants import (
|
||||
GIGI,
|
||||
ATTRIB_FILE,
|
||||
@@ -61,8 +60,6 @@ from sabnzbd.misc import (
|
||||
opts_to_pp,
|
||||
pp_to_opts,
|
||||
duplicate_warning,
|
||||
scan_password,
|
||||
subject_name_extractor,
|
||||
)
|
||||
from sabnzbd.filesystem import (
|
||||
sanitize_foldername,
|
||||
@@ -92,19 +89,432 @@ from sabnzbd.filesystem import (
|
||||
remove_data,
|
||||
strip_extensions,
|
||||
get_ext,
|
||||
create_work_name,
|
||||
nzf_cmp_name,
|
||||
RAR_RE,
|
||||
)
|
||||
from sabnzbd.par2file import FilePar2Info, has_par2_in_filename, analyse_par2, parse_par2_file, is_par2_file
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
import sabnzbd.nzbparser
|
||||
from sabnzbd.downloader import Server
|
||||
from sabnzbd.database import HistoryDB
|
||||
from sabnzbd.deobfuscate_filenames import is_probably_obfuscated
|
||||
|
||||
# Name patterns
|
||||
# In the subject, we expect the filename within double quotes
|
||||
RE_SUBJECT_FILENAME_QUOTES = re.compile(r'"([^"]*)"')
|
||||
# Otherwise something that looks like a filename
|
||||
RE_SUBJECT_BASIC_FILENAME = re.compile(r"\b([\w\-+()' .,]+(?:\[[\w\-/+()' .,]*][\w\-+()' .,]*)*\.[A-Za-z0-9]{2,4})\b")
|
||||
RE_RAR = re.compile(r"(\.rar|\.r\d\d|\.s\d\d|\.t\d\d|\.u\d\d|\.v\d\d)$", re.I)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Trylist
|
||||
##############################################################################
|
||||
|
||||
TRYLIST_LOCK = threading.RLock()
|
||||
|
||||
|
||||
class TryList:
|
||||
"""TryList keeps track of which servers have been tried for a specific article"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = ("try_list",)
|
||||
|
||||
def __init__(self):
|
||||
# Sets are faster than lists
|
||||
self.try_list: Set[Server] = set()
|
||||
|
||||
def server_in_try_list(self, server: Server) -> bool:
|
||||
"""Return whether specified server has been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return server in self.try_list
|
||||
|
||||
def all_servers_in_try_list(self, all_servers: Set[Server]) -> bool:
|
||||
"""Check if all servers have been tried"""
|
||||
with TRYLIST_LOCK:
|
||||
return all_servers.issubset(self.try_list)
|
||||
|
||||
def add_to_try_list(self, server: Server):
|
||||
"""Register server as having been tried already"""
|
||||
with TRYLIST_LOCK:
|
||||
# Sets cannot contain duplicate items
|
||||
self.try_list.add(server)
|
||||
|
||||
def remove_from_try_list(self, server: Server):
|
||||
"""Remove server from list of tried servers"""
|
||||
with TRYLIST_LOCK:
|
||||
# Discard does not require the item to be present
|
||||
self.try_list.discard(server)
|
||||
|
||||
def reset_try_list(self):
|
||||
"""Clean the list"""
|
||||
with TRYLIST_LOCK:
|
||||
self.try_list = set()
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save the servers"""
|
||||
return set(server.id for server in self.try_list)
|
||||
|
||||
def __setstate__(self, servers_ids: List[str]):
|
||||
self.try_list = set()
|
||||
for server in sabnzbd.Downloader.servers:
|
||||
if server.id in servers_ids:
|
||||
self.add_to_try_list(server)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Article
|
||||
##############################################################################
|
||||
ArticleSaver = (
|
||||
"article",
|
||||
"art_id",
|
||||
"bytes",
|
||||
"lowest_partnum",
|
||||
"decoded",
|
||||
"file_size",
|
||||
"data_begin",
|
||||
"data_size",
|
||||
"on_disk",
|
||||
"nzf",
|
||||
"crc32",
|
||||
)
|
||||
|
||||
|
||||
class Article(TryList):
|
||||
"""Representation of one article"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = ArticleSaver + ("fetcher", "fetcher_priority", "tries")
|
||||
|
||||
def __init__(self, article, article_bytes, nzf):
|
||||
super().__init__()
|
||||
self.article: str = article
|
||||
self.art_id: Optional[str] = None
|
||||
self.bytes: int = article_bytes
|
||||
self.lowest_partnum: bool = False
|
||||
self.fetcher: Optional[Server] = None
|
||||
self.fetcher_priority: int = 0
|
||||
self.tries: int = 0 # Try count
|
||||
self.decoded: bool = False
|
||||
self.file_size: Optional[int] = None
|
||||
self.data_begin: Optional[int] = None
|
||||
self.data_size: Optional[int] = None
|
||||
self.on_disk: bool = False
|
||||
self.crc32: Optional[int] = None
|
||||
self.nzf: NzbFile = nzf
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def reset_try_list(self):
|
||||
"""In addition to resetting the try list, also reset fetcher so all servers
|
||||
are tried again. Locked so fetcher setting changes are also protected."""
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
super().reset_try_list()
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def allow_new_fetcher(self, remove_fetcher_from_try_list: bool = True):
|
||||
"""Let article get new fetcher and reset try lists of file and job.
|
||||
Locked so all resets are performed at once"""
|
||||
if remove_fetcher_from_try_list:
|
||||
self.remove_from_try_list(self.fetcher)
|
||||
self.fetcher = None
|
||||
self.tries = 0
|
||||
self.nzf.reset_try_list()
|
||||
self.nzf.nzo.reset_try_list()
|
||||
|
||||
def get_article(self, server: Server, servers: List[Server]):
|
||||
"""Return article when appropriate for specified server"""
|
||||
if self.fetcher or self.server_in_try_list(server):
|
||||
return None
|
||||
|
||||
if server.priority > self.fetcher_priority:
|
||||
# Check for higher priority server, taking advantage of servers list being sorted by priority
|
||||
for server_check in servers:
|
||||
if server_check.priority < server.priority:
|
||||
if server_check.active and not self.server_in_try_list(server_check):
|
||||
# There is a higher priority server, so set article priority and return
|
||||
self.fetcher_priority = server_check.priority
|
||||
return None
|
||||
else:
|
||||
# All servers with a higher priority have been checked
|
||||
break
|
||||
|
||||
# If no higher priority servers, use this server
|
||||
self.fetcher_priority = server.priority
|
||||
self.fetcher = server
|
||||
self.tries += 1
|
||||
return self
|
||||
|
||||
def get_art_id(self):
|
||||
"""Return unique article storage name, create if needed"""
|
||||
if not self.art_id:
|
||||
self.art_id = get_new_id("article", self.nzf.nzo.admin_path)
|
||||
return self.art_id
|
||||
|
||||
def search_new_server(self):
|
||||
"""Search for a new server for this article"""
|
||||
# Since we need a new server, this one can be listed as failed
|
||||
sabnzbd.BPSMeter.register_server_article_failed(self.fetcher.id)
|
||||
self.add_to_try_list(self.fetcher)
|
||||
# Servers-list could be modified during iteration, so we need a copy
|
||||
for server in sabnzbd.Downloader.servers[:]:
|
||||
if server.active and not self.server_in_try_list(server):
|
||||
if server.priority >= self.fetcher.priority:
|
||||
self.tries = 0
|
||||
# Allow all servers for this nzo and nzf again (but not this fetcher for this article)
|
||||
self.allow_new_fetcher(remove_fetcher_from_try_list=False)
|
||||
return True
|
||||
|
||||
logging.info("Article %s unavailable on all servers, discarding", self.article)
|
||||
return False
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
for item in ArticleSaver:
|
||||
dict_[item] = getattr(self, item)
|
||||
dict_["try_list"] = super().__getstate__()
|
||||
return dict_
|
||||
|
||||
def __setstate__(self, dict_):
|
||||
"""Load from pickle file, selecting attributes"""
|
||||
for item in ArticleSaver:
|
||||
try:
|
||||
setattr(self, item, dict_[item])
|
||||
except KeyError:
|
||||
# Handle new attributes
|
||||
setattr(self, item, None)
|
||||
super().__setstate__(dict_.get("try_list", []))
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
self.tries = 0
|
||||
|
||||
def __repr__(self):
|
||||
return "<Article: article=%s, bytes=%s, art_id=%s>" % (self.article, self.bytes, self.art_id)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# NzbFile
|
||||
##############################################################################
|
||||
class SkippedNzbFile(Exception):
|
||||
pass
|
||||
|
||||
|
||||
NzbFileSaver = (
|
||||
"date",
|
||||
"filename",
|
||||
"filename_checked",
|
||||
"filepath",
|
||||
"type",
|
||||
"is_par2",
|
||||
"vol",
|
||||
"blocks",
|
||||
"setname",
|
||||
"articles",
|
||||
"decodetable",
|
||||
"bytes",
|
||||
"bytes_left",
|
||||
"nzo",
|
||||
"nzf_id",
|
||||
"deleted",
|
||||
"import_finished",
|
||||
"crc32",
|
||||
"assembled",
|
||||
"md5of16k",
|
||||
)
|
||||
|
||||
|
||||
class NzbFile(TryList):
|
||||
"""Representation of one file consisting of multiple articles"""
|
||||
|
||||
# Pre-define attributes to save memory
|
||||
__slots__ = NzbFileSaver
|
||||
|
||||
def __init__(self, date, subject, raw_article_db, file_bytes, nzo):
|
||||
"""Setup object"""
|
||||
super().__init__()
|
||||
|
||||
self.date: datetime.datetime = date
|
||||
self.type: Optional[str] = None
|
||||
self.filename: str = sanitize_filename(name_extractor(subject))
|
||||
self.filename_checked = False
|
||||
self.filepath: Optional[str] = None
|
||||
|
||||
# Identifiers for par2 files
|
||||
self.is_par2: bool = False
|
||||
self.vol: Optional[int] = None
|
||||
self.blocks: Optional[int] = None
|
||||
self.setname: Optional[str] = None
|
||||
|
||||
# Articles are removed from "articles" after being fetched
|
||||
self.articles: List[Article] = []
|
||||
self.decodetable: List[Article] = []
|
||||
|
||||
self.bytes: int = file_bytes
|
||||
self.bytes_left: int = file_bytes
|
||||
|
||||
self.nzo: NzbObject = nzo
|
||||
self.deleted = False
|
||||
self.import_finished = False
|
||||
|
||||
self.crc32: Optional[int] = 0
|
||||
self.assembled: bool = False
|
||||
self.md5of16k: Optional[bytes] = None
|
||||
|
||||
# Add first article to decodetable, this way we can check
|
||||
# if this is maybe a duplicate nzf
|
||||
if raw_article_db:
|
||||
first_article = self.add_article(raw_article_db.pop(0))
|
||||
first_article.lowest_partnum = True
|
||||
|
||||
if self in nzo.files:
|
||||
logging.info("File %s occurred twice in NZB, skipping", self.filename)
|
||||
raise SkippedNzbFile
|
||||
|
||||
# Create file on disk, which can fail in case of disk errors
|
||||
self.nzf_id: str = get_new_id("nzf", nzo.admin_path)
|
||||
if not self.nzf_id:
|
||||
# Error already shown to user from get_new_id
|
||||
raise SkippedNzbFile
|
||||
|
||||
# Any articles left?
|
||||
if raw_article_db:
|
||||
# Save the rest
|
||||
save_data(raw_article_db, self.nzf_id, nzo.admin_path)
|
||||
else:
|
||||
# All imported
|
||||
self.import_finished = True
|
||||
|
||||
def finish_import(self):
|
||||
"""Load the article objects from disk"""
|
||||
logging.debug("Finishing import on %s", self.filename)
|
||||
if raw_article_db := load_data(self.nzf_id, self.nzo.admin_path, remove=False):
|
||||
for raw_article in raw_article_db:
|
||||
self.add_article(raw_article)
|
||||
|
||||
# Make sure we have labeled the lowest part number
|
||||
# Also when DirectUnpack is disabled we need to know
|
||||
self.decodetable[0].lowest_partnum = True
|
||||
|
||||
# Mark safe to continue
|
||||
self.import_finished = True
|
||||
|
||||
def add_article(self, article_info):
|
||||
"""Add article to object database and return article object"""
|
||||
article = Article(article_info[0], article_info[1], self)
|
||||
self.articles.append(article)
|
||||
self.decodetable.append(article)
|
||||
return article
|
||||
|
||||
def remove_article(self, article: Article, success: bool) -> int:
|
||||
"""Handle completed article, possibly end of file"""
|
||||
if article in self.articles:
|
||||
self.articles.remove(article)
|
||||
if success:
|
||||
self.bytes_left -= article.bytes
|
||||
return len(self.articles)
|
||||
|
||||
def set_par2(self, setname, vol, blocks):
|
||||
"""Designate this file as a par2 file"""
|
||||
self.is_par2 = True
|
||||
self.setname = setname
|
||||
self.vol = vol
|
||||
self.blocks = int_conv(blocks)
|
||||
|
||||
def update_crc32(self, crc32: Optional[int], length: int) -> None:
|
||||
if self.crc32 is None or crc32 is None:
|
||||
self.crc32 = None
|
||||
else:
|
||||
self.crc32 = sabctools.crc32_combine(self.crc32, crc32, length)
|
||||
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
"""Get next articles to be downloaded"""
|
||||
articles = []
|
||||
for article in self.articles:
|
||||
if article := article.get_article(server, servers):
|
||||
articles.append(article)
|
||||
if len(articles) >= fetch_limit:
|
||||
return articles
|
||||
self.add_to_try_list(server)
|
||||
return articles
|
||||
|
||||
@synchronized(TRYLIST_LOCK)
|
||||
def reset_all_try_lists(self):
|
||||
"""Reset all try lists. Locked so reset is performed
|
||||
for all items at the same time without chance of another
|
||||
thread changing any of the items while we are resetting"""
|
||||
for art in self.articles:
|
||||
art.reset_try_list()
|
||||
self.reset_try_list()
|
||||
|
||||
def prepare_filepath(self):
|
||||
"""Do all checks before making the final path"""
|
||||
if not self.filepath:
|
||||
self.nzo.verify_nzf_filename(self)
|
||||
filename = sanitize_filename(self.filename)
|
||||
self.filepath = get_unique_filename(os.path.join(self.nzo.download_path, filename))
|
||||
self.filename = get_filename(self.filepath)
|
||||
return self.filepath
|
||||
|
||||
@property
|
||||
def completed(self):
|
||||
"""Is this file completed?"""
|
||||
return self.import_finished and not bool(self.articles)
|
||||
|
||||
def remove_admin(self):
|
||||
"""Remove article database from disk (sabnzbd_nzf_<id>)"""
|
||||
try:
|
||||
logging.debug("Removing article database for %s", self.nzf_id)
|
||||
remove_file(os.path.join(self.nzo.admin_path, self.nzf_id))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __getstate__(self):
|
||||
"""Save to pickle file, selecting attributes"""
|
||||
dict_ = {}
|
||||
for item in NzbFileSaver:
|
||||
dict_[item] = getattr(self, item)
|
||||
dict_["try_list"] = super().__getstate__()
|
||||
return dict_
|
||||
|
||||
def __setstate__(self, dict_):
|
||||
"""Load from pickle file, selecting attributes"""
|
||||
for item in NzbFileSaver:
|
||||
try:
|
||||
setattr(self, item, dict_[item])
|
||||
except KeyError:
|
||||
# Handle new attributes
|
||||
setattr(self, item, None)
|
||||
super().__setstate__(dict_.get("try_list", []))
|
||||
|
||||
def __eq__(self, other: "NzbFile"):
|
||||
"""Assume it's the same file if the number bytes and first article
|
||||
are the same or if there are no articles left, use the filenames.
|
||||
Some NZB's are just a mess and report different sizes for the same article.
|
||||
We used to compare (__eq__) articles based on article-ID, however, this failed
|
||||
because some NZB's had the same article-ID twice within one NZF.
|
||||
"""
|
||||
if other and (self.bytes == other.bytes or len(self.decodetable) == len(other.decodetable)):
|
||||
if self.decodetable and other.decodetable:
|
||||
return self.decodetable[0].article == other.decodetable[0].article
|
||||
# Fallback to filename comparison
|
||||
return self.filename == other.filename
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
"""Required because we implement eq. The same file can be spread
|
||||
over multiple NZO's so we make every NZF unique. Even though
|
||||
it's considered bad practice.
|
||||
"""
|
||||
return id(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<NzbFile: filename=%s, bytes=%s, nzf_id=%s>" % (self.filename, self.bytes, self.nzf_id)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# NzbObject
|
||||
##############################################################################
|
||||
class NzbEmpty(Exception):
|
||||
pass
|
||||
|
||||
@@ -204,7 +614,7 @@ class NzbObject(TryList):
|
||||
password: Optional[str] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
status: str = Status.QUEUED,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
reuse: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
@@ -267,7 +677,7 @@ class NzbObject(TryList):
|
||||
|
||||
# Bookkeeping values
|
||||
self.meta = {}
|
||||
self.servercount: dict[str, int] = {} # Dict to keep bytes per server
|
||||
self.servercount: Dict[str, int] = {} # Dict to keep bytes per server
|
||||
self.direct_unpacker: Optional[sabnzbd.directunpacker.DirectUnpacker] = None # The DirectUnpacker instance
|
||||
self.bytes: int = 0 # Original bytesize
|
||||
self.bytes_par2: int = 0 # Bytes available for repair
|
||||
@@ -276,15 +686,15 @@ class NzbObject(TryList):
|
||||
self.bytes_missing: int = 0 # Bytes missing
|
||||
self.bad_articles: int = 0 # How many bad (non-recoverable) articles
|
||||
|
||||
self.extrapars: dict[str, list[NzbFile]] = {} # Holds the extra parfile names for all sets
|
||||
self.par2packs: dict[str, dict[str, FilePar2Info]] = {} # Holds the par2info for each file in each set
|
||||
self.md5of16k: dict[bytes, str] = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name)
|
||||
self.extrapars: Dict[str, List[NzbFile]] = {} # Holds the extra parfile names for all sets
|
||||
self.par2packs: Dict[str, Dict[str, FilePar2Info]] = {} # Holds the par2info for each file in each set
|
||||
self.md5of16k: Dict[bytes, str] = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name)
|
||||
|
||||
self.files: list[NzbFile] = [] # List of all NZFs
|
||||
self.files_table: dict[str, NzbFile] = {} # Dictionary of NZFs indexed using NZF_ID
|
||||
self.renames: dict[str, str] = {} # Dictionary of all renamed files
|
||||
self.files: List[NzbFile] = [] # List of all NZFs
|
||||
self.files_table: Dict[str, NzbFile] = {} # Dictionary of NZFs indexed using NZF_ID
|
||||
self.renames: Dict[str, str] = {} # Dictionary of all renamed files
|
||||
|
||||
self.finished_files: list[NzbFile] = [] # List of all finished NZFs
|
||||
self.finished_files: List[NzbFile] = [] # List of all finished NZFs
|
||||
|
||||
# The current status of the nzo eg:
|
||||
# Queued, Downloading, Repairing, Unpacking, Failed, Complete
|
||||
@@ -293,9 +703,9 @@ class NzbObject(TryList):
|
||||
self.avg_bps_freq = 0
|
||||
self.avg_bps_total = 0
|
||||
|
||||
self.first_articles: list[Article] = []
|
||||
self.first_articles: List[Article] = []
|
||||
self.first_articles_count = 0
|
||||
self.saved_articles: set[Article] = set()
|
||||
self.saved_articles: Set[Article] = set()
|
||||
self.nzo_id: Optional[str] = None
|
||||
|
||||
self.duplicate: Optional[str] = None
|
||||
@@ -317,11 +727,11 @@ class NzbObject(TryList):
|
||||
# Store one line responses for filejoin/par2/unrar here for history display
|
||||
self.action_line = ""
|
||||
# Store the results from various filejoin/par2/unrar stages
|
||||
self.unpack_info: dict[str, list[str]] = {}
|
||||
self.unpack_info: Dict[str, List[str]] = {}
|
||||
# Stores one line containing the last failure
|
||||
self.fail_msg = ""
|
||||
# Stores various info about the nzo to be
|
||||
self.nzo_info: dict[str, Any] = nzo_info or {}
|
||||
self.nzo_info: Dict[str, Any] = nzo_info or {}
|
||||
|
||||
self.next_save = None
|
||||
self.save_timeout = None
|
||||
@@ -591,7 +1001,7 @@ class NzbObject(TryList):
|
||||
logging.debug("Unwanted Extension: putting last rar after first rar")
|
||||
firstrarpos = lastrarpos = 0
|
||||
for nzfposcounter, nzf in enumerate(self.files):
|
||||
if RAR_RE.search(nzf.filename.lower()):
|
||||
if RE_RAR.search(nzf.filename.lower()):
|
||||
# a NZF found with '.rar' in the name
|
||||
if firstrarpos == 0:
|
||||
# this is the first .rar found, so remember this position
|
||||
@@ -1112,7 +1522,7 @@ class NzbObject(TryList):
|
||||
if hasattr(self, "direct_unpacker") and self.direct_unpacker:
|
||||
self.direct_unpacker.abort()
|
||||
|
||||
def check_availability_ratio(self) -> tuple[bool, float]:
|
||||
def check_availability_ratio(self) -> Tuple[bool, float]:
|
||||
"""Determine if we are still meeting the required ratio"""
|
||||
availability_ratio = req_ratio = cfg.req_completion_rate()
|
||||
|
||||
@@ -1215,9 +1625,8 @@ class NzbObject(TryList):
|
||||
self.nzo_info[bad_article_type] += 1
|
||||
self.bad_articles += 1
|
||||
|
||||
def get_articles(self, server: Server, servers: list[Server], fetch_limit: int):
|
||||
"""Assign articles server up to the fetch_limit"""
|
||||
articles: Deque[Article] = server.article_queue
|
||||
def get_articles(self, server: Server, servers: List[Server], fetch_limit: int) -> List[Article]:
|
||||
articles = []
|
||||
nzf_remove_list = []
|
||||
|
||||
# Did we go through all first-articles?
|
||||
@@ -1252,8 +1661,7 @@ class NzbObject(TryList):
|
||||
else:
|
||||
break
|
||||
|
||||
nzf.get_articles(server, servers, fetch_limit)
|
||||
if articles:
|
||||
if articles := nzf.get_articles(server, servers, fetch_limit):
|
||||
break
|
||||
|
||||
# Remove all files for which admin could not be read
|
||||
@@ -1268,9 +1676,10 @@ class NzbObject(TryList):
|
||||
if not articles:
|
||||
# No articles for this server, block for next time
|
||||
self.add_to_try_list(server)
|
||||
return articles
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def move_top_bulk(self, nzf_ids: list[str]):
|
||||
def move_top_bulk(self, nzf_ids: List[str]):
|
||||
self.cleanup_nzf_ids(nzf_ids)
|
||||
if nzf_ids:
|
||||
target = list(range(len(nzf_ids)))
|
||||
@@ -1490,7 +1899,7 @@ class NzbObject(TryList):
|
||||
logging.debug("Saving attributes %s for %s", attribs, self.final_name)
|
||||
save_data(attribs, ATTRIB_FILE, self.admin_path, silent=True)
|
||||
|
||||
def load_attribs(self) -> tuple[Optional[str], Optional[int], Optional[str]]:
|
||||
def load_attribs(self) -> Tuple[Optional[str], Optional[int], Optional[str]]:
|
||||
"""Load saved attributes and return them to be parsed"""
|
||||
attribs = load_data(ATTRIB_FILE, self.admin_path, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
@@ -1513,7 +1922,7 @@ class NzbObject(TryList):
|
||||
return attribs["cat"], attribs["pp"], attribs["script"]
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def build_pos_nzf_table(self, nzf_ids: list[str]) -> dict[int, NzbFile]:
|
||||
def build_pos_nzf_table(self, nzf_ids: List[str]) -> Dict[int, NzbFile]:
|
||||
pos_nzf_table = {}
|
||||
for nzf_id in nzf_ids:
|
||||
if nzf_id in self.files_table:
|
||||
@@ -1524,7 +1933,7 @@ class NzbObject(TryList):
|
||||
return pos_nzf_table
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def cleanup_nzf_ids(self, nzf_ids: list[str]):
|
||||
def cleanup_nzf_ids(self, nzf_ids: List[str]):
|
||||
for nzf_id in nzf_ids[:]:
|
||||
if nzf_id in self.files_table:
|
||||
if self.files_table[nzf_id] not in self.files:
|
||||
@@ -1693,3 +2102,109 @@ class NzbObject(TryList):
|
||||
|
||||
def __repr__(self):
|
||||
return "<NzbObject: filename=%s, bytes=%s, nzo_id=%s>" % (self.filename, self.bytes, self.nzo_id)
|
||||
|
||||
|
||||
def nzf_cmp_name(nzf1: NzbFile, nzf2: NzbFile):
|
||||
# The comparison will sort .par2 files to the top of the queue followed by .rar files,
|
||||
# they will then be sorted by name.
|
||||
nzf1_name = nzf1.filename.lower()
|
||||
nzf2_name = nzf2.filename.lower()
|
||||
|
||||
# Determine vol-pars
|
||||
is_par1 = ".vol" in nzf1_name and ".par2" in nzf1_name
|
||||
is_par2 = ".vol" in nzf2_name and ".par2" in nzf2_name
|
||||
|
||||
# mini-par2 in front
|
||||
if not is_par1 and nzf1_name.endswith(".par2"):
|
||||
return -1
|
||||
if not is_par2 and nzf2_name.endswith(".par2"):
|
||||
return 1
|
||||
|
||||
# vol-pars go to the back
|
||||
if is_par1 and not is_par2:
|
||||
return 1
|
||||
if is_par2 and not is_par1:
|
||||
return -1
|
||||
|
||||
# Prioritize .rar files above any other type of file (other than vol-par)
|
||||
m1 = RE_RAR.search(nzf1_name)
|
||||
m2 = RE_RAR.search(nzf2_name)
|
||||
if m1 and not (is_par2 or m2):
|
||||
return -1
|
||||
elif m2 and not (is_par1 or m1):
|
||||
return 1
|
||||
# Force .rar to come before 'r00'
|
||||
if m1 and m1.group(1) == ".rar":
|
||||
nzf1_name = nzf1_name.replace(".rar", ".r//")
|
||||
if m2 and m2.group(1) == ".rar":
|
||||
nzf2_name = nzf2_name.replace(".rar", ".r//")
|
||||
return cmp(nzf1_name, nzf2_name)
|
||||
|
||||
|
||||
def create_work_name(name: str) -> str:
|
||||
"""Remove ".nzb" and ".par(2)" and sanitize, skip URL's"""
|
||||
if name.find("://") < 0:
|
||||
# Invalid charters need to be removed before and after (see unit-tests)
|
||||
return sanitize_foldername(strip_extensions(sanitize_foldername(name)))
|
||||
else:
|
||||
return name.strip()
|
||||
|
||||
|
||||
def scan_password(name: str) -> Tuple[str, Optional[str]]:
|
||||
"""Get password (if any) from the title"""
|
||||
if "http://" in name or "https://" in name:
|
||||
return name, None
|
||||
|
||||
# Strip any unwanted usenet-related extensions
|
||||
name = strip_extensions(name)
|
||||
|
||||
# Identify any braces
|
||||
braces = name[1:].find("{{")
|
||||
if braces < 0:
|
||||
braces = len(name)
|
||||
else:
|
||||
braces += 1
|
||||
slash = name.find("/")
|
||||
|
||||
# Look for name/password, but make sure that '/' comes before any {{
|
||||
if 0 < slash < braces and "password=" not in name:
|
||||
# Is it maybe in 'name / password' notation?
|
||||
if slash == name.find(" / ") + 1 and name[: slash - 1].strip(". "):
|
||||
# Remove the extra space after name and before password
|
||||
return name[: slash - 1].strip(". "), name[slash + 2 :]
|
||||
if name[:slash].strip(". "):
|
||||
return name[:slash].strip(". "), name[slash + 1 :]
|
||||
|
||||
# Look for "name password=password"
|
||||
pw = name.find("password=")
|
||||
if pw > 0 and name[:pw].strip(". "):
|
||||
return name[:pw].strip(". "), name[pw + 9 :]
|
||||
|
||||
# Look for name{{password}}
|
||||
if braces < len(name):
|
||||
closing_braces = name.rfind("}}")
|
||||
if closing_braces > braces and name[:braces].strip(". "):
|
||||
return name[:braces].strip(". "), name[braces + 2 : closing_braces]
|
||||
|
||||
# Look again for name/password
|
||||
if slash > 0 and name[:slash].strip(". "):
|
||||
return name[:slash].strip(". "), name[slash + 1 :]
|
||||
|
||||
# No password found
|
||||
return name, None
|
||||
|
||||
|
||||
def name_extractor(subject: str) -> str:
|
||||
"""Try to extract a file name from a subject line, return `subject` if in doubt"""
|
||||
# Filename nicely wrapped in quotes
|
||||
for name in re.findall(RE_SUBJECT_FILENAME_QUOTES, subject):
|
||||
if name := name.strip(' "'):
|
||||
return name
|
||||
|
||||
# Found nothing? Try a basic filename-like search
|
||||
for name in re.findall(RE_SUBJECT_BASIC_FILENAME, subject):
|
||||
if name := name.strip():
|
||||
return name
|
||||
|
||||
# Return the subject
|
||||
return subject
|
||||
@@ -25,11 +25,11 @@ import re
|
||||
import struct
|
||||
import sabctools
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
from sabnzbd.constants import MEBI
|
||||
from sabnzbd.encoding import correct_unknown_encoding
|
||||
from sabnzbd.filesystem import get_basename
|
||||
from sabnzbd.filesystem import get_basename, get_ext
|
||||
|
||||
PROBABLY_PAR2_RE = re.compile(r"(.*)\.vol(\d*)[+\-](\d*)\.par2", re.I)
|
||||
SCAN_LIMIT = 10 * MEBI
|
||||
@@ -71,7 +71,7 @@ def is_par2_file(filepath: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def analyse_par2(name: str, filepath: Optional[str] = None) -> tuple[str, int, int]:
|
||||
def analyse_par2(name: str, filepath: Optional[str] = None) -> Tuple[str, int, int]:
|
||||
"""Check if file is a par2-file and determine vol/block
|
||||
return setname, vol, block
|
||||
setname is empty when not a par2 file
|
||||
@@ -103,7 +103,7 @@ def analyse_par2(name: str, filepath: Optional[str] = None) -> tuple[str, int, i
|
||||
return setname, vol, block
|
||||
|
||||
|
||||
def parse_par2_file(fname: str, md5of16k: dict[bytes, str]) -> tuple[str, dict[str, FilePar2Info]]:
|
||||
def parse_par2_file(fname: str, md5of16k: Dict[bytes, str]) -> Tuple[str, Dict[str, FilePar2Info]]:
|
||||
"""Get the hash table and the first-16k hash table from a PAR2 file
|
||||
Return as dictionary, indexed on names or hashes for the first-16 table
|
||||
The input md5of16k is modified in place and thus not returned!
|
||||
|
||||
@@ -27,7 +27,7 @@ import re
|
||||
import gc
|
||||
import queue
|
||||
import rarfile
|
||||
from typing import Optional
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.newsunpack import (
|
||||
@@ -39,7 +39,7 @@ from sabnzbd.newsunpack import (
|
||||
rar_sort,
|
||||
is_sfv_file,
|
||||
)
|
||||
from threading import Thread, Event
|
||||
from threading import Thread
|
||||
from sabnzbd.misc import (
|
||||
on_cleanup_list,
|
||||
is_sample,
|
||||
@@ -73,7 +73,7 @@ from sabnzbd.filesystem import (
|
||||
get_ext,
|
||||
get_filename,
|
||||
)
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
from sabnzbd.sorting import Sorter
|
||||
from sabnzbd.constants import (
|
||||
REPAIR_PRIORITY,
|
||||
@@ -107,7 +107,7 @@ class PostProcessor(Thread):
|
||||
super().__init__()
|
||||
|
||||
# This history queue is simply used to log what active items to display in the web_ui
|
||||
self.history_queue: list[NzbObject] = []
|
||||
self.history_queue: List[NzbObject] = []
|
||||
self.load()
|
||||
|
||||
# Fast-queue for jobs already finished by DirectUnpack
|
||||
@@ -116,9 +116,6 @@ class PostProcessor(Thread):
|
||||
# Regular queue for jobs that might need more attention
|
||||
self.slow_queue: queue.Queue[Optional[NzbObject]] = queue.Queue()
|
||||
|
||||
# Event to signal when work is available or state changes
|
||||
self.work_available = Event()
|
||||
|
||||
# Load all old jobs
|
||||
for nzo in self.history_queue:
|
||||
self.process(nzo)
|
||||
@@ -183,9 +180,6 @@ class PostProcessor(Thread):
|
||||
self.save()
|
||||
history_updated()
|
||||
|
||||
# Signal that work is available
|
||||
self.work_available.set()
|
||||
|
||||
def remove(self, nzo: NzbObject):
|
||||
"""Remove given nzo from the queue"""
|
||||
try:
|
||||
@@ -198,22 +192,10 @@ class PostProcessor(Thread):
|
||||
def stop(self):
|
||||
"""Stop thread after finishing running job"""
|
||||
self.__stop = True
|
||||
# Wake up the processor thread to check stop flag
|
||||
self.work_available.set()
|
||||
self.slow_queue.put(None)
|
||||
self.fast_queue.put(None)
|
||||
|
||||
def pause(self):
|
||||
"""Pause post-processing"""
|
||||
self.paused = True
|
||||
logging.info("Pausing post-processing")
|
||||
|
||||
def resume(self):
|
||||
"""Resume post-processing"""
|
||||
self.paused = False
|
||||
logging.info("Resuming post-processing")
|
||||
# Wake up the processor thread
|
||||
self.work_available.set()
|
||||
|
||||
def cancel_pp(self, nzo_ids: list[str]) -> Optional[bool]:
|
||||
def cancel_pp(self, nzo_ids: List[str]) -> Optional[bool]:
|
||||
"""Abort Direct Unpack and change the status, so that the PP is canceled"""
|
||||
result = None
|
||||
for nzo in self.history_queue:
|
||||
@@ -238,10 +220,10 @@ class PostProcessor(Thread):
|
||||
def get_queue(
|
||||
self,
|
||||
search: Optional[str] = None,
|
||||
categories: Optional[list[str]] = None,
|
||||
statuses: Optional[list[str]] = None,
|
||||
nzo_ids: Optional[list[str]] = None,
|
||||
) -> list[NzbObject]:
|
||||
categories: Optional[List[str]] = None,
|
||||
statuses: Optional[List[str]] = None,
|
||||
nzo_ids: Optional[List[str]] = None,
|
||||
) -> List[NzbObject]:
|
||||
"""Return list of NZOs that still need to be processed.
|
||||
Optionally filtered by the search terms"""
|
||||
re_search = None
|
||||
@@ -283,40 +265,27 @@ class PostProcessor(Thread):
|
||||
while not self.__stop:
|
||||
self.__busy = False
|
||||
|
||||
if self.paused:
|
||||
time.sleep(5)
|
||||
continue
|
||||
|
||||
# Set NzbObject object to None so references from this thread do not keep the
|
||||
# object alive until the next job is added to post-processing (see #1628)
|
||||
nzo = None
|
||||
|
||||
# Wait for work to be available (no timeout!)
|
||||
self.work_available.wait()
|
||||
|
||||
# Check if we should stop
|
||||
if self.__stop:
|
||||
break
|
||||
|
||||
# If paused, clear event and wait for resume
|
||||
if self.paused:
|
||||
self.work_available.clear()
|
||||
continue
|
||||
|
||||
# If queues are empty (spurious wake or race condition), clear and loop back
|
||||
if self.slow_queue.empty() and self.fast_queue.empty():
|
||||
self.work_available.clear()
|
||||
continue
|
||||
|
||||
# Something in the fast queue?
|
||||
try:
|
||||
# Every few fast-jobs we should allow a
|
||||
# Every few fast-jobs we should check allow a
|
||||
# slow job so that they don't wait forever
|
||||
if self.__fast_job_count >= MAX_FAST_JOB_COUNT and self.slow_queue.qsize():
|
||||
raise queue.Empty
|
||||
|
||||
nzo = self.fast_queue.get_nowait()
|
||||
nzo = self.fast_queue.get(timeout=2)
|
||||
self.__fast_job_count += 1
|
||||
except queue.Empty:
|
||||
# Try the slow queue
|
||||
try:
|
||||
nzo = self.slow_queue.get_nowait()
|
||||
nzo = self.slow_queue.get(timeout=2)
|
||||
# Reset fast-counter
|
||||
self.__fast_job_count = 0
|
||||
except queue.Empty:
|
||||
@@ -327,6 +296,10 @@ class PostProcessor(Thread):
|
||||
# No fast or slow jobs, better luck next loop!
|
||||
continue
|
||||
|
||||
# Stop job
|
||||
if not nzo:
|
||||
continue
|
||||
|
||||
# Job was already deleted.
|
||||
if not nzo.work_name:
|
||||
check_eoq = True
|
||||
@@ -355,7 +328,7 @@ class PostProcessor(Thread):
|
||||
self.external_process = None
|
||||
check_eoq = True
|
||||
|
||||
# Allow download to proceed if it was paused for post-processing
|
||||
# Allow download to proceed
|
||||
sabnzbd.Downloader.resume_from_postproc()
|
||||
|
||||
|
||||
@@ -419,13 +392,14 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
par_error = True
|
||||
unpack_error = 1
|
||||
|
||||
script = nzo.script
|
||||
logging.info(
|
||||
"Starting Post-Processing on %s => Repair:%s, Unpack:%s, Delete:%s, Script:%s, Cat:%s",
|
||||
filename,
|
||||
flag_repair,
|
||||
flag_unpack,
|
||||
nzo.delete,
|
||||
nzo.script,
|
||||
script,
|
||||
nzo.cat,
|
||||
)
|
||||
|
||||
@@ -518,10 +492,10 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
|
||||
# Check if this is an NZB-only download, if so redirect to queue
|
||||
# except when PP was Download-only
|
||||
nzb_list = None
|
||||
if flag_repair:
|
||||
nzb_list = process_nzb_only_download(tmp_workdir_complete, nzo)
|
||||
|
||||
nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, nzo.priority)
|
||||
else:
|
||||
nzb_list = None
|
||||
if nzb_list:
|
||||
nzo.set_unpack_info("Download", T("Sent %s to queue") % nzb_list)
|
||||
cleanup_empty_directories(tmp_workdir_complete)
|
||||
@@ -529,10 +503,9 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
# Full cleanup including nzb's
|
||||
cleanup_list(tmp_workdir_complete, skip_nzb=False)
|
||||
|
||||
# No further processing for NZB-only downloads
|
||||
script_ret = 0
|
||||
script_error = False
|
||||
if not nzb_list:
|
||||
script_ret = 0
|
||||
script_error = False
|
||||
# Give destination its final name
|
||||
if cfg.folder_rename() and tmp_workdir_complete and not one_folder:
|
||||
if not all_ok:
|
||||
@@ -584,11 +557,11 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
deobfuscate.deobfuscate_subtitles(nzo, newfiles)
|
||||
|
||||
# Run the user script
|
||||
if script_path := make_script_path(nzo.script):
|
||||
if script_path := make_script_path(script):
|
||||
# Set the current nzo status to "Ext Script...". Used in History
|
||||
nzo.status = Status.RUNNING
|
||||
nzo.set_action_line(T("Running script"), nzo.script)
|
||||
nzo.set_unpack_info("Script", T("Running user script %s") % nzo.script, unique=True)
|
||||
nzo.set_action_line(T("Running script"), script)
|
||||
nzo.set_unpack_info("Script", T("Running user script %s") % script, unique=True)
|
||||
script_log, script_ret = external_processing(
|
||||
script_path, nzo, clip_path(workdir_complete), nzo.final_name, job_result
|
||||
)
|
||||
@@ -601,7 +574,7 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
else:
|
||||
script_line = T("Script exit code is %s") % script_ret
|
||||
elif not script_line:
|
||||
script_line = T("Ran %s") % nzo.script
|
||||
script_line = T("Ran %s") % script
|
||||
nzo.set_unpack_info("Script", script_line, unique=True)
|
||||
|
||||
# Maybe bad script result should fail job
|
||||
@@ -610,30 +583,30 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
all_ok = False
|
||||
nzo.fail_msg = script_line
|
||||
|
||||
# Email the results
|
||||
if cfg.email_endjob():
|
||||
if cfg.email_endjob() == 1 or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)):
|
||||
emailer.endjob(
|
||||
nzo.final_name,
|
||||
nzo.cat,
|
||||
all_ok,
|
||||
workdir_complete,
|
||||
nzo.bytes_downloaded,
|
||||
nzo.fail_msg,
|
||||
nzo.unpack_info,
|
||||
nzo.script,
|
||||
script_log,
|
||||
script_ret,
|
||||
)
|
||||
|
||||
if script_log and len(script_log.rstrip().split("\n")) > 1:
|
||||
# Can do this only now, otherwise it would show up in the email
|
||||
nzo.set_unpack_info(
|
||||
"Script",
|
||||
'%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_line, nzo.nzo_id, T("More")),
|
||||
unique=True,
|
||||
# Email the results
|
||||
if not nzb_list and cfg.email_endjob():
|
||||
if cfg.email_endjob() == 1 or (cfg.email_endjob() == 2 and (unpack_error or par_error or script_error)):
|
||||
emailer.endjob(
|
||||
nzo.final_name,
|
||||
nzo.cat,
|
||||
all_ok,
|
||||
workdir_complete,
|
||||
nzo.bytes_downloaded,
|
||||
nzo.fail_msg,
|
||||
nzo.unpack_info,
|
||||
script,
|
||||
script_log,
|
||||
script_ret,
|
||||
)
|
||||
|
||||
if script_log and len(script_log.rstrip().split("\n")) > 1:
|
||||
# Can do this only now, otherwise it would show up in the email
|
||||
nzo.set_unpack_info(
|
||||
"Script",
|
||||
'%s <a href="./scriptlog?name=%s">(%s)</a>' % (script_line, nzo.nzo_id, T("More")),
|
||||
unique=True,
|
||||
)
|
||||
|
||||
# Cleanup again, including NZB files
|
||||
if all_ok and os.path.isdir(workdir_complete):
|
||||
cleanup_list(workdir_complete, False)
|
||||
@@ -720,7 +693,7 @@ def process_job(nzo: NzbObject) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def prepare_extraction_path(nzo: NzbObject) -> tuple[str, str, Sorter, bool, Optional[str]]:
|
||||
def prepare_extraction_path(nzo: NzbObject) -> Tuple[str, str, Sorter, bool, Optional[str]]:
|
||||
"""Based on the information that we have, generate
|
||||
the extraction path and create the directory.
|
||||
Separated so it can be called from DirectUnpacker
|
||||
@@ -784,7 +757,7 @@ def prepare_extraction_path(nzo: NzbObject) -> tuple[str, str, Sorter, bool, Opt
|
||||
return tmp_workdir_complete, workdir_complete, file_sorter, not create_job_dir, marker_file
|
||||
|
||||
|
||||
def parring(nzo: NzbObject) -> tuple[bool, bool]:
|
||||
def parring(nzo: NzbObject) -> Tuple[bool, bool]:
|
||||
"""Perform par processing. Returns: (par_error, re_add)"""
|
||||
logging.info("Starting verification and repair of %s", nzo.final_name)
|
||||
par_error = False
|
||||
@@ -903,7 +876,7 @@ def try_sfv_check(nzo: NzbObject) -> Optional[bool]:
|
||||
return True
|
||||
|
||||
|
||||
def try_rar_check(nzo: NzbObject, rars: list[str]) -> bool:
|
||||
def try_rar_check(nzo: NzbObject, rars: List[str]) -> bool:
|
||||
"""Attempt to verify set using the RARs
|
||||
Return True if verified, False when failed
|
||||
When setname is '', all RAR files will be used, otherwise only the matching one
|
||||
@@ -1159,36 +1132,34 @@ def prefix(path: str, pre: str) -> str:
|
||||
return os.path.join(p, pre + d)
|
||||
|
||||
|
||||
def process_nzb_only_download(workdir: str, nzo: NzbObject) -> Optional[list[str]]:
|
||||
def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
|
||||
"""Check if this job contains only NZB files,
|
||||
if so send to queue and remove if on clean-up list
|
||||
Returns list of processed NZB's
|
||||
"""
|
||||
if files := listdir_full(workdir):
|
||||
for nzb_file in files:
|
||||
if get_ext(nzb_file) != ".nzb":
|
||||
return None
|
||||
files = listdir_full(wdir)
|
||||
|
||||
# Process all NZB files
|
||||
new_nzbname = nzo.final_name
|
||||
for nzb_file in files:
|
||||
# Determine name based on number of files
|
||||
nzb_filename = get_filename(nzb_file)
|
||||
if len(files) > 1:
|
||||
new_nzbname = f"{nzo.final_name} - {nzb_filename}"
|
||||
for nzb_file in files:
|
||||
if get_ext(nzb_file) != ".nzb":
|
||||
return None
|
||||
|
||||
process_single_nzb(
|
||||
nzb_filename,
|
||||
nzb_file,
|
||||
pp=nzo.pp,
|
||||
script=nzo.script,
|
||||
cat=nzo.cat,
|
||||
url=nzo.url,
|
||||
priority=nzo.priority,
|
||||
nzbname=new_nzbname,
|
||||
dup_check=False,
|
||||
)
|
||||
return files
|
||||
# For multiple NZBs, cannot use the current job name
|
||||
if len(files) != 1:
|
||||
nzbname = None
|
||||
|
||||
# Process all NZB files
|
||||
for nzb_file in files:
|
||||
process_single_nzb(
|
||||
get_filename(nzb_file),
|
||||
nzb_file,
|
||||
pp=pp,
|
||||
script=script,
|
||||
cat=cat,
|
||||
priority=priority,
|
||||
dup_check=False,
|
||||
nzbname=nzbname,
|
||||
)
|
||||
return files
|
||||
|
||||
|
||||
def one_file_or_folder(folder: str) -> str:
|
||||
@@ -1250,7 +1221,7 @@ def remove_samples(path: str):
|
||||
logging.info("Skipping sample-removal, false-positive")
|
||||
|
||||
|
||||
def rename_and_collapse_folder(oldpath: str, newpath: str, files: list[str]) -> list[str]:
|
||||
def rename_and_collapse_folder(oldpath: str, newpath: str, files: List[str]) -> List[str]:
|
||||
"""Rename folder, collapsing when there's just a single subfolder
|
||||
oldpath --> newpath OR oldpath/subfolder --> newpath
|
||||
Modify list of filenames accordingly
|
||||
@@ -1302,7 +1273,7 @@ def del_marker(path: str):
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
|
||||
def remove_from_list(name: Optional[str], lst: list[str]):
|
||||
def remove_from_list(name: Optional[str], lst: List[str]):
|
||||
if name:
|
||||
for n in range(len(lst)):
|
||||
if lst[n].endswith(name):
|
||||
|
||||
183
sabnzbd/rss.py
183
sabnzbd/rss.py
@@ -25,8 +25,6 @@ import time
|
||||
import datetime
|
||||
import threading
|
||||
import urllib.parse
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Union, Optional, Iterator
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import RSS_FILE_NAME, DEFAULT_PRIORITY
|
||||
@@ -144,7 +142,43 @@ class RSSReader:
|
||||
return T('Incorrect RSS feed description "%s"') % feed
|
||||
|
||||
uris = feeds.uri()
|
||||
filters = prepare_feed(feeds)
|
||||
defCat = feeds.cat()
|
||||
|
||||
if not notdefault(defCat) or defCat not in sabnzbd.api.list_cats(default=False):
|
||||
defCat = None
|
||||
defPP = feeds.pp()
|
||||
if not notdefault(defPP):
|
||||
defPP = None
|
||||
defScript = feeds.script()
|
||||
if not notdefault(defScript):
|
||||
defScript = None
|
||||
defPrio = feeds.priority()
|
||||
if not notdefault(defPrio):
|
||||
defPrio = None
|
||||
|
||||
# Preparations, convert filters to regex's
|
||||
regexes = []
|
||||
reTypes = []
|
||||
reCats = []
|
||||
rePPs = []
|
||||
rePrios = []
|
||||
reScripts = []
|
||||
reEnabled = []
|
||||
for feed_filter in feeds.filters():
|
||||
reCat = feed_filter[0]
|
||||
if defCat in ("", "*"):
|
||||
reCat = None
|
||||
reCats.append(reCat)
|
||||
rePPs.append(feed_filter[1])
|
||||
reScripts.append(feed_filter[2])
|
||||
reTypes.append(feed_filter[3])
|
||||
if feed_filter[3] in ("<", ">", "F", "S"):
|
||||
regexes.append(feed_filter[4])
|
||||
else:
|
||||
regexes.append(convert_filter(feed_filter[4]))
|
||||
rePrios.append(feed_filter[5])
|
||||
reEnabled.append(feed_filter[6] != "0")
|
||||
regcount = len(regexes)
|
||||
|
||||
# Set first if this is the very first scan of this URI
|
||||
first = (feed not in self.jobs) and ignoreFirst
|
||||
@@ -268,71 +302,71 @@ class RSSReader:
|
||||
# Match this title against all filters
|
||||
logging.debug("Trying title %s", title)
|
||||
result = False
|
||||
myCat = filters.default_category
|
||||
myPP = filters.default_pp
|
||||
myScript = filters.default_script
|
||||
myPrio = filters.default_priority
|
||||
myCat = defCat
|
||||
myPP = defPP
|
||||
myScript = defScript
|
||||
myPrio = defPrio
|
||||
n = 0
|
||||
if filters.has_type("F", "S") and (not season or not episode):
|
||||
if ("F" in reTypes or "S" in reTypes) and (not season or not episode):
|
||||
show_analysis = sabnzbd.sorting.BasicAnalyzer(title)
|
||||
season = show_analysis.info.get("season_num")
|
||||
episode = show_analysis.info.get("episode_num")
|
||||
|
||||
# Match against all filters until an positive or negative match
|
||||
logging.debug("Size %s", size)
|
||||
for rule in filters:
|
||||
if rule.enabled:
|
||||
if category and rule.type == "C":
|
||||
found = re.search(rule.regex, category)
|
||||
for n in range(regcount):
|
||||
if reEnabled[n]:
|
||||
if category and reTypes[n] == "C":
|
||||
found = re.search(regexes[n], category)
|
||||
if not found:
|
||||
logging.debug("Filter rejected on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
elif rule.type == "<" and size and from_units(rule.regex) < size:
|
||||
elif reTypes[n] == "<" and size and from_units(regexes[n]) < size:
|
||||
# "Size at most" : too large
|
||||
logging.debug("Filter rejected on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
elif rule.type == ">" and size and from_units(rule.regex) > size:
|
||||
elif reTypes[n] == ">" and size and from_units(regexes[n]) > size:
|
||||
# "Size at least" : too small
|
||||
logging.debug("Filter rejected on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
elif rule.type == "F" and not ep_match(season, episode, rule.regex):
|
||||
elif reTypes[n] == "F" and not ep_match(season, episode, regexes[n]):
|
||||
# "Starting from SxxEyy", too early episode
|
||||
logging.debug("Filter requirement match on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
elif rule.type == "S" and ep_match(season, episode, rule.regex, title):
|
||||
elif reTypes[n] == "S" and ep_match(season, episode, regexes[n], title):
|
||||
logging.debug("Filter matched on rule %d", n)
|
||||
result = True
|
||||
break
|
||||
else:
|
||||
if rule.regex:
|
||||
found = re.search(rule.regex, title)
|
||||
if regexes[n]:
|
||||
found = re.search(regexes[n], title)
|
||||
else:
|
||||
found = False
|
||||
if rule.type == "M" and not found:
|
||||
if reTypes[n] == "M" and not found:
|
||||
logging.debug("Filter rejected on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
if found and rule.type == "A":
|
||||
if found and reTypes[n] == "A":
|
||||
logging.debug("Filter matched on rule %d", n)
|
||||
result = True
|
||||
break
|
||||
if found and rule.type == "R":
|
||||
if found and reTypes[n] == "R":
|
||||
logging.debug("Filter rejected on rule %d", n)
|
||||
result = False
|
||||
break
|
||||
|
||||
if filters and (rule := filters.rules[-1]):
|
||||
if not result and filters.default_category:
|
||||
if len(reCats):
|
||||
if not result and defCat:
|
||||
# Apply Feed-category on non-matched items
|
||||
myCat = filters.default_category
|
||||
elif result and notdefault(rule.category):
|
||||
myCat = defCat
|
||||
elif result and notdefault(reCats[n]):
|
||||
# Use the matched info
|
||||
myCat = rule.category
|
||||
elif category and not filters.default_category:
|
||||
myCat = reCats[n]
|
||||
elif category and not defCat:
|
||||
# No result and no Feed-category
|
||||
myCat = cat_convert(category)
|
||||
|
||||
@@ -340,17 +374,17 @@ class RSSReader:
|
||||
myCat, catPP, catScript, catPrio = cat_to_opts(myCat)
|
||||
else:
|
||||
myCat = catPP = catScript = catPrio = None
|
||||
if notdefault(rule.pp):
|
||||
myPP = rule.pp
|
||||
elif not (rule.category or category):
|
||||
if notdefault(rePPs[n]):
|
||||
myPP = rePPs[n]
|
||||
elif not (reCats[n] or category):
|
||||
myPP = catPP
|
||||
if notdefault(rule.script):
|
||||
myScript = rule.script
|
||||
elif not (notdefault(rule.category) or category):
|
||||
if notdefault(reScripts[n]):
|
||||
myScript = reScripts[n]
|
||||
elif not (notdefault(reCats[n]) or category):
|
||||
myScript = catScript
|
||||
if rule.priority not in (str(DEFAULT_PRIORITY), ""):
|
||||
myPrio = rule.priority
|
||||
elif not ((rule.priority != str(DEFAULT_PRIORITY)) or category):
|
||||
if rePrios[n] not in (str(DEFAULT_PRIORITY), ""):
|
||||
myPrio = rePrios[n]
|
||||
elif not ((rePrios[n] != str(DEFAULT_PRIORITY)) or category):
|
||||
myPrio = catPrio
|
||||
|
||||
act = download and not first
|
||||
@@ -498,83 +532,6 @@ class RSSReader:
|
||||
self.jobs[feed][item]["status"] = "D-"
|
||||
|
||||
|
||||
@dataclass
|
||||
class FeedRule:
|
||||
regex: Union[str, re.Pattern]
|
||||
type: str
|
||||
category: Optional[str] = None
|
||||
pp: Optional[str] = None
|
||||
priority: Optional[int] = None
|
||||
script: Optional[str] = None
|
||||
enabled: bool = True
|
||||
|
||||
def __post_init__(self):
|
||||
# Convert regex if needed
|
||||
if self.type not in {"<", ">", "F", "S"}:
|
||||
self.regex = convert_filter(self.regex)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FeedConfig:
|
||||
default_category: Optional[str] = None
|
||||
default_pp: Optional[str] = None
|
||||
default_script: Optional[str] = None
|
||||
default_priority: Optional[int] = None
|
||||
rules: list[FeedRule] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
# Normalise categories for all rules automatically
|
||||
if self.default_category in ("", "*"):
|
||||
for rule in self.rules:
|
||||
rule.category = None
|
||||
|
||||
def __bool__(self):
|
||||
return bool(self.rules) # True if there are any rules
|
||||
|
||||
def __iter__(self) -> Iterator[FeedRule]:
|
||||
"""Allow iteration directly over FeedConfig to access rules."""
|
||||
return iter(self.rules)
|
||||
|
||||
def has_type(self, *types: str) -> bool:
|
||||
"""Check if any rule matches the given types"""
|
||||
return any(rule.type in types for rule in self.rules)
|
||||
|
||||
|
||||
def prepare_feed(c: config.ConfigRSS) -> FeedConfig:
|
||||
def normalise_default(value):
|
||||
return value if notdefault(value) else None
|
||||
|
||||
default_category = normalise_default(c.cat())
|
||||
if default_category not in sabnzbd.api.list_cats(default=False):
|
||||
default_category = None
|
||||
default_pp = normalise_default(c.pp())
|
||||
default_script = normalise_default(c.script())
|
||||
default_priority = normalise_default(c.priority())
|
||||
|
||||
# Preparations, convert filters to regex's
|
||||
rules: list[FeedRule] = []
|
||||
for cat, pp, script, ftype, regex, priority, enabled in c.filters():
|
||||
rules.append(
|
||||
FeedRule(
|
||||
regex=regex,
|
||||
type=ftype,
|
||||
category=cat,
|
||||
pp=pp,
|
||||
priority=priority,
|
||||
script=script,
|
||||
enabled=(enabled != "0"),
|
||||
)
|
||||
)
|
||||
|
||||
return FeedConfig(
|
||||
default_category=default_category,
|
||||
default_pp=default_pp,
|
||||
default_script=default_script,
|
||||
default_priority=default_priority,
|
||||
rules=rules,
|
||||
)
|
||||
|
||||
|
||||
def patch_feedparser():
|
||||
"""Apply options that work for SABnzbd
|
||||
Add additional parsing of attributes
|
||||
|
||||
@@ -337,11 +337,7 @@ class Scheduler:
|
||||
sabnzbd.downloader.unpause_all()
|
||||
sabnzbd.Downloader.set_paused_state(paused or paused_all)
|
||||
|
||||
# Handle pause_post state with proper notification
|
||||
if pause_post and not sabnzbd.PostProcessor.paused:
|
||||
sabnzbd.PostProcessor.pause()
|
||||
elif not pause_post and sabnzbd.PostProcessor.paused:
|
||||
sabnzbd.PostProcessor.resume()
|
||||
sabnzbd.PostProcessor.paused = pause_post
|
||||
if speedlimit is not None:
|
||||
sabnzbd.Downloader.limit_speed(speedlimit)
|
||||
|
||||
@@ -510,11 +506,11 @@ def sort_schedules(all_events, now=None):
|
||||
|
||||
|
||||
def pp_pause():
|
||||
sabnzbd.PostProcessor.pause()
|
||||
sabnzbd.PostProcessor.paused = True
|
||||
|
||||
|
||||
def pp_resume():
|
||||
sabnzbd.PostProcessor.resume()
|
||||
sabnzbd.PostProcessor.paused = False
|
||||
|
||||
|
||||
def enable_server(server):
|
||||
|
||||
@@ -442,7 +442,7 @@ SKIN_TEXT = {
|
||||
"Select a mode and list all (un)wanted extensions. For example: <b>exe</b> or <b>exe, com</b>"
|
||||
),
|
||||
"opt-sfv_check": TT("Enable SFV-based checks"),
|
||||
"explain-sfv_check": TT("If no par2 files are available, use sfv files (if present) to verify files"),
|
||||
"explain-sfv_check": TT("Do an extra verification based on SFV files."),
|
||||
"opt-script_can_fail": TT("User script can flag job as failed"),
|
||||
"explain-script_can_fail": TT(
|
||||
"When the user script returns a non-zero exit code, the job will be flagged as failed."
|
||||
@@ -574,11 +574,6 @@ SKIN_TEXT = {
|
||||
"For unreliable servers, will be ignored longer in case of failures"
|
||||
), #: Explain server optional tickbox
|
||||
"srv-enable": TT("Enable"), #: Enable server tickbox
|
||||
"srv-pipelining_requests": TT("Articles per request"),
|
||||
"explain-pipelining_requests": TT(
|
||||
"Request multiple articles per connection without waiting for each response first.<br />"
|
||||
"This can improve download speeds, especially on connections with higher latency."
|
||||
),
|
||||
"button-addServer": TT("Add Server"), #: Button: Add server
|
||||
"button-delServer": TT("Remove Server"), #: Button: Remove server
|
||||
"button-testServer": TT("Test Server"), #: Button: Test server
|
||||
@@ -691,15 +686,10 @@ SKIN_TEXT = {
|
||||
"explain-pushbullet_device": TT("Device to which message should be sent"), #: Pushbullet settings
|
||||
"opt-apprise_enable": TT("Enable Apprise notifications"), #: Apprise settings
|
||||
"explain-apprise_enable": TT(
|
||||
"Send notifications directly to any notification service you use.<br>"
|
||||
"For example: Slack, Discord, Telegram, or any service from over 100 supported services!"
|
||||
), #: Apprise settings
|
||||
"opt-apprise_urls": TT("Use default Apprise URLs"), #: Apprise settings
|
||||
"explain-apprise_urls": TT(
|
||||
"Apprise defines service connection information using URLs.<br>"
|
||||
"Read the Apprise wiki how to define the URL for each service.<br>"
|
||||
"Use a comma and/or space to identify more than one URL."
|
||||
"Send notifications using Apprise to almost any notification service"
|
||||
), #: Apprise settings
|
||||
"opt-apprise_urls": TT("Default Apprise URLs"), #: Apprise settings
|
||||
"explain-apprise_urls": TT("Use a comma and/or space to identify more than one URL."), #: Apprise settings
|
||||
"explain-apprise_extra_urls": TT(
|
||||
"Override the default URLs for specific notification types below, if desired."
|
||||
), #: Apprise settings
|
||||
@@ -904,7 +894,6 @@ SKIN_TEXT = {
|
||||
"Glitter-notification-removing1": TT("Removing job"), # Notification window
|
||||
"Glitter-notification-removing": TT("Removing jobs"), # Notification window
|
||||
"Glitter-notification-shutdown": TT("Shutting down"), # Notification window
|
||||
"Glitter-notification-upload-failed": TT("Failed to upload file: %s"), # Notification window
|
||||
# Wizard
|
||||
"wizard-quickstart": TT("SABnzbd Quick-Start Wizard"),
|
||||
"wizard-version": TT("SABnzbd Version"),
|
||||
|
||||
@@ -25,7 +25,7 @@ import re
|
||||
import guessit
|
||||
from rebulk.match import MatchesDict
|
||||
from string import whitespace, punctuation
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Union, List, Tuple, Dict
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.filesystem import (
|
||||
@@ -47,8 +47,7 @@ from sabnzbd.constants import (
|
||||
GUESSIT_SORT_TYPES,
|
||||
)
|
||||
from sabnzbd.misc import is_sample, from_units, sort_to_opts
|
||||
from sabnzbd.misc import scan_password
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject, scan_password
|
||||
|
||||
# Do not rename .vob files as they are usually DVD's
|
||||
EXCLUDED_FILE_EXTS = (".vob", ".bin")
|
||||
@@ -180,7 +179,7 @@ class Sorter:
|
||||
self.get_showdescriptions()
|
||||
self.get_date()
|
||||
|
||||
def format_series_numbers(self, numbers: Union[int, list[int]], info_name: str):
|
||||
def format_series_numbers(self, numbers: Union[int, List[int]], info_name: str):
|
||||
"""Format the numbers in both plain and alternative (zero-padded) format and set as showinfo"""
|
||||
# Guessit returns multiple episodes or seasons as a list of integers, single values as int
|
||||
if isinstance(numbers, int):
|
||||
@@ -284,7 +283,7 @@ class Sorter:
|
||||
if ends_in_file(sort_string):
|
||||
extension = True
|
||||
if sort_string.endswith(".%ext"):
|
||||
sort_string = sort_string.removesuffix(".%ext") # Strip '.%ext' off the end; other %ext may remain
|
||||
sort_string = sort_string[:-5] # Strip '.%ext' off the end; other %ext may remain in sort_string
|
||||
if self.is_season_pack:
|
||||
# Create a record of the filename part of the sort_string
|
||||
_, self.season_pack_setname = os.path.split(sort_string)
|
||||
@@ -418,7 +417,7 @@ class Sorter:
|
||||
# The normpath function translates "" to "." which results in an incorrect path
|
||||
return os.path.normpath(path) if path else path
|
||||
|
||||
def _rename_season_pack(self, files: list[str], base_path: str, all_job_files: list[str] = []) -> bool:
|
||||
def _rename_season_pack(self, files: List[str], base_path: str, all_job_files: List[str] = []) -> bool:
|
||||
success = False
|
||||
for f in files:
|
||||
f_name, f_ext = os.path.splitext(os.path.basename(f))
|
||||
@@ -477,7 +476,7 @@ class Sorter:
|
||||
)
|
||||
return success
|
||||
|
||||
def _rename_sequential(self, sequential_files: dict[str, str], base_path: str) -> bool:
|
||||
def _rename_sequential(self, sequential_files: Dict[str, str], base_path: str) -> bool:
|
||||
success = False
|
||||
for index, f in sequential_files.items():
|
||||
filepath = self._to_filepath(f, base_path)
|
||||
@@ -516,7 +515,7 @@ class Sorter:
|
||||
and os.stat(filepath).st_size >= self.rename_limit
|
||||
)
|
||||
|
||||
def rename(self, files: list[str], base_path: str) -> tuple[str, bool]:
|
||||
def rename(self, files: List[str], base_path: str) -> Tuple[str, bool]:
|
||||
if not self.rename_files:
|
||||
return move_to_parent_directory(base_path)
|
||||
|
||||
@@ -608,7 +607,7 @@ def ends_in_file(path: str) -> bool:
|
||||
return bool(RE_ENDEXT.search(path) or RE_ENDFN.search(path))
|
||||
|
||||
|
||||
def move_to_parent_directory(workdir: str) -> tuple[str, bool]:
|
||||
def move_to_parent_directory(workdir: str) -> Tuple[str, bool]:
|
||||
"""Move all files under 'workdir' into 'workdir/..'"""
|
||||
# Determine 'folder'/..
|
||||
workdir = os.path.abspath(os.path.normpath(workdir))
|
||||
@@ -659,7 +658,7 @@ def guess_what(name: str) -> MatchesDict:
|
||||
|
||||
if digit_fix:
|
||||
# Unfix the title
|
||||
guess["title"] = guess.get("title", "").removeprefix(digit_fix)
|
||||
guess["title"] = guess.get("title", "")[len(digit_fix) :]
|
||||
|
||||
# Handle weird anime episode notation, that results in the episode number ending up as the episode title
|
||||
if (
|
||||
@@ -697,7 +696,7 @@ def guess_what(name: str) -> MatchesDict:
|
||||
return guess
|
||||
|
||||
|
||||
def path_subst(path: str, mapping: list[tuple[str, str]]) -> str:
|
||||
def path_subst(path: str, mapping: List[Tuple[str, str]]) -> str:
|
||||
"""Replace the sort string elements in the path with the real values provided by the mapping;
|
||||
non-elements are copied verbatim."""
|
||||
# Added ugly hack to prevent %ext from being masked by %e
|
||||
@@ -720,7 +719,7 @@ def path_subst(path: str, mapping: list[tuple[str, str]]) -> str:
|
||||
|
||||
def get_titles(
|
||||
nzo: Optional[NzbObject], guess: Optional[MatchesDict], jobname: str, titleing: bool = False
|
||||
) -> tuple[str, str, str]:
|
||||
) -> Tuple[str, str, str]:
|
||||
"""Get the title from NZB metadata or jobname, and return it in various formats. Formatting
|
||||
mostly deals with working around quirks of Python's str.title(). NZB metadata is used as-is,
|
||||
further processing done only for info obtained from guessit or the jobname."""
|
||||
@@ -780,7 +779,7 @@ def replace_word(word_input: str, one: str, two: str) -> str:
|
||||
return word_input
|
||||
|
||||
|
||||
def get_descriptions(nzo: Optional[NzbObject], guess: Optional[MatchesDict]) -> tuple[str, str, str]:
|
||||
def get_descriptions(nzo: Optional[NzbObject], guess: Optional[MatchesDict]) -> Tuple[str, str, str]:
|
||||
"""Try to get an episode title or similar description from the NZB metadata or jobname, e.g.
|
||||
'Download This' in Show.S01E23.Download.This.1080p.HDTV.x264 and return multiple formats"""
|
||||
ep_name = None
|
||||
@@ -837,7 +836,7 @@ def strip_path_elements(path: str) -> str:
|
||||
return "\\\\" + path if is_unc else path
|
||||
|
||||
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: Optional[list[str]] = None):
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: Optional[List[str]] = None):
|
||||
"""Rename all other files in the 'folder' hierarchy after 'name'
|
||||
and move them to the root of 'folder'.
|
||||
Files having extension 'skip_ext' will be moved, but not renamed.
|
||||
@@ -922,7 +921,7 @@ def eval_sort(sort_string: str, job_name: str, multipart_label: str = "") -> Opt
|
||||
return sorted_path
|
||||
|
||||
|
||||
def check_for_multiple(files: list[str]) -> Optional[dict[str, str]]:
|
||||
def check_for_multiple(files: List[str]) -> Optional[Dict[str, str]]:
|
||||
"""Return a dictionary of a single set of files that look like parts of
|
||||
a multi-part post. Takes a limited set of indicators from guessit into
|
||||
consideration and only accepts numerical sequences. The files argument
|
||||
|
||||
@@ -32,7 +32,7 @@ from http.client import IncompleteRead, HTTPResponse
|
||||
from mailbox import Message
|
||||
from threading import Thread
|
||||
import base64
|
||||
from typing import Optional, Union, Any
|
||||
from typing import Tuple, Optional, Union, List, Dict, Any
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import (
|
||||
@@ -51,13 +51,13 @@ import sabnzbd.notifier as notifier
|
||||
from sabnzbd.decorators import NZBQUEUE_LOCK
|
||||
from sabnzbd.encoding import ubtou, utob
|
||||
from sabnzbd.nzbparser import AddNzbFileResult
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbRejected, NzbRejectToHistory
|
||||
|
||||
|
||||
class URLGrabber(Thread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.queue: queue.Queue[tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
|
||||
self.queue: queue.Queue[Tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
|
||||
self.shutdown = False
|
||||
|
||||
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None):
|
||||
@@ -417,9 +417,9 @@ def add_url(
|
||||
priority: Optional[Union[int, str]] = None,
|
||||
nzbname: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_info: Optional[dict[str, Any]] = None,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
dup_check: bool = True,
|
||||
) -> tuple[AddNzbFileResult, list[str]]:
|
||||
) -> Tuple[AddNzbFileResult, List[str]]:
|
||||
"""Add NZB based on a URL, attributes optional"""
|
||||
if not url.lower().startswith("http"):
|
||||
return AddNzbFileResult.NO_FILES_FOUND, []
|
||||
|
||||
@@ -7,9 +7,10 @@ Functions to check if the path filesystem uses FAT
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
from typing import List
|
||||
|
||||
|
||||
def getcmdoutput(cmd: list[str]) -> list[str]:
|
||||
def getcmdoutput(cmd: List[str]) -> List[str]:
|
||||
"""execute cmd, and return a list of output lines"""
|
||||
subprocess_kwargs = {
|
||||
"bufsize": 0,
|
||||
|
||||
@@ -7,7 +7,7 @@ import sys
|
||||
import logging
|
||||
import time
|
||||
|
||||
BUFFERSIZE = 16 * 1024 * 1024
|
||||
_DUMP_DATA_SIZE = 10 * 1024 * 1024
|
||||
|
||||
|
||||
def diskspeedmeasure(dirname: str) -> float:
|
||||
@@ -16,57 +16,39 @@ def diskspeedmeasure(dirname: str) -> float:
|
||||
Then divide bytes written by time passed
|
||||
In case of problems (ie non-writable dir or file), return 0.0
|
||||
"""
|
||||
maxtime = 1 # sec
|
||||
dump_data = os.urandom(_DUMP_DATA_SIZE)
|
||||
start = time.time()
|
||||
maxtime = 0.5 # sec
|
||||
total_written = 0
|
||||
filename = os.path.join(dirname, "outputTESTING.txt")
|
||||
|
||||
# Prepare the whole buffer now for better write performance later
|
||||
# This is done before timing starts to exclude buffer creation from measurement
|
||||
buffer = os.urandom(BUFFERSIZE)
|
||||
|
||||
try:
|
||||
# Use low-level I/O
|
||||
fp_testfile = os.open(
|
||||
filename,
|
||||
os.O_CREAT | os.O_WRONLY | getattr(os, "O_BINARY", 0) | getattr(os, "O_SYNC", 0),
|
||||
0o777,
|
||||
)
|
||||
|
||||
overall_start = time.perf_counter()
|
||||
maxtime = overall_start + 1
|
||||
total_time = 0.0
|
||||
try:
|
||||
fp_testfile = os.open(filename, os.O_CREAT | os.O_WRONLY | os.O_BINARY, 0o777)
|
||||
except AttributeError:
|
||||
fp_testfile = os.open(filename, os.O_CREAT | os.O_WRONLY, 0o777)
|
||||
|
||||
# Start looping
|
||||
for i in range(1, 5):
|
||||
# Stop writing next buffer block, if time exceeds limit
|
||||
if time.perf_counter() >= maxtime:
|
||||
break
|
||||
# Prepare the data chunk outside of timing
|
||||
data_chunk = buffer * (i**2)
|
||||
|
||||
# Only measure the actual write and sync operations
|
||||
write_start = time.perf_counter()
|
||||
total_written += os.write(fp_testfile, data_chunk)
|
||||
total_time = 0.0
|
||||
while total_time < maxtime:
|
||||
start = time.time()
|
||||
os.write(fp_testfile, dump_data)
|
||||
os.fsync(fp_testfile)
|
||||
total_time += time.perf_counter() - write_start
|
||||
total_time += time.time() - start
|
||||
total_written += _DUMP_DATA_SIZE
|
||||
|
||||
# Have to use low-level close
|
||||
os.close(fp_testfile)
|
||||
# Remove the file
|
||||
os.remove(filename)
|
||||
|
||||
except OSError:
|
||||
# Could not write, so ... report 0.0
|
||||
logging.debug("Failed to measure disk speed on %s", dirname)
|
||||
return 0.0
|
||||
|
||||
megabyte_per_second = round(total_written / total_time / 1024 / 1024, 1)
|
||||
logging.debug(
|
||||
"Disk speed of %s = %.2f MB/s (in %.2f seconds)",
|
||||
dirname,
|
||||
megabyte_per_second,
|
||||
time.perf_counter() - overall_start,
|
||||
)
|
||||
logging.debug("Disk speed of %s = %.2f MB/s (in %.2f seconds)", dirname, megabyte_per_second, time.time() - start)
|
||||
return megabyte_per_second
|
||||
|
||||
|
||||
@@ -86,7 +68,7 @@ if __name__ == "__main__":
|
||||
try:
|
||||
SPEED = max(diskspeedmeasure(DIRNAME), diskspeedmeasure(DIRNAME))
|
||||
if SPEED:
|
||||
print("Disk writing speed: %.2f MBytes per second" % SPEED)
|
||||
print("Disk writing speed: %.2f Mbytes per second" % SPEED)
|
||||
else:
|
||||
print("No measurement possible. Check that directory is writable.")
|
||||
except Exception:
|
||||
|
||||
@@ -8,6 +8,7 @@ Note: extension always contains a leading dot
|
||||
import puremagic
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from sabnzbd.filesystem import get_ext, RAR_RE
|
||||
import sabnzbd.cfg as cfg
|
||||
|
||||
@@ -259,7 +260,7 @@ ALL_EXT = tuple(set(POPULAR_EXT + DOWNLOAD_EXT))
|
||||
ALL_EXT = tuple(["." + i for i in ALL_EXT])
|
||||
|
||||
|
||||
def all_extensions() -> tuple[str, ...]:
|
||||
def all_extensions() -> Tuple[str, ...]:
|
||||
"""returns tuple with ALL (standard + userdef) extensions (including leading dot in extension)"""
|
||||
user_defined_extensions = tuple(["." + i for i in cfg.ext_rename_ignore()])
|
||||
return ALL_EXT + user_defined_extensions
|
||||
@@ -271,7 +272,7 @@ def has_popular_extension(file_path: str) -> bool:
|
||||
return file_extension in all_extensions() or RAR_RE.match(file_extension)
|
||||
|
||||
|
||||
def all_possible_extensions(file_path: str) -> list[str]:
|
||||
def all_possible_extensions(file_path: str) -> List[str]:
|
||||
"""returns a list with all possible extensions (with leading dot) for given file_path as reported by puremagic"""
|
||||
extension_list = []
|
||||
for i in puremagic.magic_file(file_path):
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
"""
|
||||
sabnzbd.utils.rarvolinfo - Find out volume number and/or original extension of a rar file. Useful with obfuscated files
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import rarfile
|
||||
|
||||
|
||||
|
||||
@@ -161,7 +161,6 @@ class SysTrayIconThread(Thread):
|
||||
pass
|
||||
|
||||
def restart(self, hwnd, msg, wparam, lparam):
|
||||
self.notify_id = None
|
||||
self.refresh_icon()
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
# You MUST use double quotes (so " and not ')
|
||||
# Do not forget to update the appdata file for every major release!
|
||||
|
||||
__version__ = "4.6.0Beta2"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "4.5.5"
|
||||
__baseline__ = "a61a5539a7e0e0dc1f9ae140222436ba8f9fe679"
|
||||
|
||||
@@ -27,6 +27,7 @@ from selenium import webdriver
|
||||
from selenium.webdriver.chrome.options import Options as ChromeOptions
|
||||
from warnings import warn
|
||||
|
||||
from sabnzbd.constants import DEF_INI_FILE
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
|
||||
@@ -45,38 +45,32 @@ ARTICLE_INFO = re.compile(
|
||||
YENC_ESCAPE = [0x00, 0x0A, 0x0D, ord("="), ord(".")]
|
||||
|
||||
|
||||
class NewsServerSession:
|
||||
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
||||
self.reader = reader
|
||||
self.writer = writer
|
||||
class NewsServerProtocol(asyncio.Protocol):
|
||||
def __init__(self):
|
||||
self.transport = None
|
||||
self.connected = False
|
||||
self.in_article = False
|
||||
super().__init__()
|
||||
|
||||
async def run(self):
|
||||
self.writer.write(b"200 Welcome (SABNews)\r\n")
|
||||
await self.writer.drain()
|
||||
def connection_made(self, transport):
|
||||
logging.info("Connection from %s", transport.get_extra_info("peername"))
|
||||
self.transport = transport
|
||||
self.connected = True
|
||||
self.transport.write(b"200 Welcome (SABNews)\r\n")
|
||||
|
||||
try:
|
||||
while not self.reader.at_eof():
|
||||
message = await self.reader.readuntil(b"\r\n")
|
||||
logging.debug("Data received: %s", message.strip())
|
||||
await self.handle_command(message)
|
||||
except (ConnectionResetError, asyncio.IncompleteReadError):
|
||||
logging.debug("Client closed connection")
|
||||
def data_received(self, message):
|
||||
logging.debug("Data received: %s", message.strip())
|
||||
|
||||
async def handle_command(self, message: bytes):
|
||||
"""Handle basic NNTP commands, \r\n is already stripped."""
|
||||
# Handle basic commands
|
||||
if message.startswith(b"QUIT"):
|
||||
await self.close_connection()
|
||||
return
|
||||
|
||||
if message.startswith((b"ARTICLE", b"BODY")):
|
||||
self.close_connection()
|
||||
elif message.startswith((b"ARTICLE", b"BODY")):
|
||||
parsed_message = ARTICLE_INFO.search(message)
|
||||
await self.serve_article(parsed_message)
|
||||
return
|
||||
self.serve_article(parsed_message)
|
||||
|
||||
self.writer.write(b"500 Unknown command\r\n")
|
||||
await self.writer.drain()
|
||||
# self.transport.write(data)
|
||||
|
||||
async def serve_article(self, parsed_message):
|
||||
def serve_article(self, parsed_message):
|
||||
# Check if we parsed everything
|
||||
try:
|
||||
message_id = parsed_message.group("message_id")
|
||||
@@ -87,37 +81,34 @@ class NewsServerSession:
|
||||
size = int(parsed_message.group("size"))
|
||||
except (AttributeError, ValueError):
|
||||
logging.warning("Can't parse article information")
|
||||
self.writer.write(b"430 No Such Article Found (bad message-id)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (bad message-id)\r\n")
|
||||
return
|
||||
|
||||
# Check if file exists
|
||||
if not os.path.exists(file):
|
||||
logging.warning("File not found: %s", file)
|
||||
self.writer.write(b"430 No Such Article Found (no file on disk)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (no file on disk)\r\n")
|
||||
return
|
||||
|
||||
# Check if sizes are valid
|
||||
file_size = os.path.getsize(file)
|
||||
if start + size > file_size:
|
||||
logging.warning("Invalid start/size attributes")
|
||||
self.writer.write(b"430 No Such Article Found (invalid start/size attributes)\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"430 No Such Article Found (invalid start/size attributes)\r\n")
|
||||
return
|
||||
|
||||
logging.debug("Serving %s" % message_id)
|
||||
|
||||
# File is found, send headers
|
||||
self.writer.write(b"222 0 %s\r\n" % message_id)
|
||||
self.writer.write(b"Message-ID: %s\r\n" % message_id)
|
||||
self.writer.write(b'Subject: "%s"\r\n\r\n' % file_base.encode("utf-8"))
|
||||
self.transport.write(b"222 0 %s\r\n" % message_id)
|
||||
self.transport.write(b"Message-ID: %s\r\n" % message_id)
|
||||
self.transport.write(b'Subject: "%s"\r\n\r\n' % file_base.encode("utf-8"))
|
||||
|
||||
# Write yEnc headers
|
||||
self.writer.write(
|
||||
self.transport.write(
|
||||
b"=ybegin part=%d line=128 size=%d name=%s\r\n" % (part, file_size, file_base.encode("utf-8"))
|
||||
)
|
||||
self.writer.write(b"=ypart begin=%d end=%d\r\n" % (start + 1, start + size))
|
||||
self.transport.write(b"=ypart begin=%d end=%d\r\n" % (start + 1, start + size))
|
||||
|
||||
with open(file, "rb") as inp_file:
|
||||
inp_file.seek(start)
|
||||
@@ -125,31 +116,24 @@ class NewsServerSession:
|
||||
|
||||
# Encode data
|
||||
output_string, crc = sabctools.yenc_encode(inp_buffer)
|
||||
self.writer.write(output_string)
|
||||
self.transport.write(output_string)
|
||||
|
||||
# Write footer
|
||||
self.writer.write(b"\r\n=yend size=%d part=%d pcrc32=%08x\r\n" % (size, part, crc))
|
||||
self.writer.write(b".\r\n")
|
||||
await self.writer.drain()
|
||||
self.transport.write(b"\r\n=yend size=%d part=%d pcrc32=%08x\r\n" % (size, part, crc))
|
||||
self.transport.write(b".\r\n")
|
||||
|
||||
async def close_connection(self):
|
||||
def close_connection(self):
|
||||
logging.debug("Closing connection")
|
||||
self.writer.write(b"205 Connection closing\r\n")
|
||||
await self.writer.drain()
|
||||
self.writer.close()
|
||||
await self.writer.wait_closed()
|
||||
|
||||
|
||||
async def connection_handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
|
||||
session = NewsServerSession(reader, writer)
|
||||
await session.run()
|
||||
self.transport.write(b"205 Connection closing\r\n")
|
||||
self.transport.close()
|
||||
|
||||
|
||||
async def serve_sabnews(hostname, port):
|
||||
# Start server
|
||||
logging.info("Starting SABNews on %s:%d", hostname, port)
|
||||
|
||||
server = await asyncio.start_server(connection_handler, hostname, port)
|
||||
loop = asyncio.get_running_loop()
|
||||
server = await loop.create_server(lambda: NewsServerProtocol(), hostname, port)
|
||||
async with server:
|
||||
await server.serve_forever()
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ from tests.testhelper import *
|
||||
import shutil
|
||||
import zipfile
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.constants import (
|
||||
@@ -86,7 +87,7 @@ class TestConfig:
|
||||
return zip_buffer.getvalue()
|
||||
|
||||
@staticmethod
|
||||
def create_and_verify_backup(admin_dir: str, must_haves: list[str]):
|
||||
def create_and_verify_backup(admin_dir: str, must_haves: List[str]):
|
||||
# Create the backup
|
||||
config_backup_path = config.create_config_backup()
|
||||
assert os.path.exists(config_backup_path)
|
||||
|
||||
@@ -68,10 +68,7 @@ class TestWiki:
|
||||
config_diff = {}
|
||||
for url in ("general", "switches", "special"):
|
||||
config_tree = lxml.html.fromstring(
|
||||
requests.get(
|
||||
"http://%s:%s/config/%s/" % (SAB_HOST, SAB_PORT, url),
|
||||
headers={"User-Agent": "SABnzbd/%s" % sabnzbd.__version__},
|
||||
).content
|
||||
requests.get("http://%s:%s/config/%s/" % (SAB_HOST, SAB_PORT, url)).content
|
||||
)
|
||||
# Have to remove some decorating stuff and empty values
|
||||
config_labels = [
|
||||
@@ -82,10 +79,7 @@ class TestWiki:
|
||||
# Parse the version info to get the right Wiki version
|
||||
version = re.search(r"(\d+\.\d+)\.(\d+)([a-zA-Z]*)(\d*)", sabnzbd.__version__).group(1)
|
||||
wiki_tree = lxml.html.fromstring(
|
||||
requests.get(
|
||||
"https://sabnzbd.org/wiki/configuration/%s/%s" % (version, url),
|
||||
headers={"User-Agent": "SABnzbd/%s" % sabnzbd.__version__},
|
||||
).content
|
||||
requests.get("https://sabnzbd.org/wiki/configuration/%s/%s" % (version, url)).content
|
||||
)
|
||||
|
||||
# Special-page needs different label locator
|
||||
|
||||
@@ -21,14 +21,12 @@ tests.test_decoder- Testing functions in decoder.py
|
||||
import binascii
|
||||
import os
|
||||
import pytest
|
||||
from io import BytesIO
|
||||
|
||||
from random import randint
|
||||
from unittest import mock
|
||||
|
||||
import sabctools
|
||||
import sabnzbd.decoder as decoder
|
||||
from sabnzbd.nzb import Article
|
||||
from sabnzbd.nzbstuff import Article
|
||||
|
||||
|
||||
def uu(data: bytes):
|
||||
@@ -113,7 +111,7 @@ class TestUuDecoder:
|
||||
result.append(END_DATA)
|
||||
|
||||
# Signal the end of the message with a dot on a line of its own
|
||||
data.append(b".\r\n")
|
||||
data.append(b".")
|
||||
|
||||
# Join the data with \r\n line endings, just like we get from socket reads
|
||||
data = b"\r\n".join(data)
|
||||
@@ -122,26 +120,22 @@ class TestUuDecoder:
|
||||
|
||||
return article, bytearray(data), result
|
||||
|
||||
@staticmethod
|
||||
def _response(raw_data: bytes) -> sabctools.NNTPResponse:
|
||||
dec = sabctools.Decoder(len(raw_data))
|
||||
reader = BytesIO(raw_data)
|
||||
reader.readinto(dec)
|
||||
dec.process(len(raw_data))
|
||||
return next(dec)
|
||||
def test_no_data(self):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(None, None)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
[
|
||||
b"222 0 <foo@bar>\r\n.\r\n",
|
||||
b"222 0 <foo@bar>\r\n\r\n.\r\n",
|
||||
b"222 0 <foo@bar>\r\nfoobar\r\n.\r\n", # Plenty of list items, but (too) few actual lines
|
||||
b"222 0 <foo@bar>\r\nX-Too-Short: yup\r\n.\r\n",
|
||||
b"",
|
||||
b"\r\n\r\n",
|
||||
b"foobar\r\n", # Plenty of list items, but (too) few actual lines
|
||||
b"222 0 <artid@woteva>\r\nX-Too-Short: yup\r\n",
|
||||
],
|
||||
)
|
||||
def test_short_data(self, raw_data):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(Article("foo@bar", 4321, None), self._response(raw_data))
|
||||
assert decoder.decode_uu(None, bytearray(raw_data))
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
@@ -164,8 +158,7 @@ class TestUuDecoder:
|
||||
with pytest.raises(decoder.BadUu):
|
||||
raw_data = bytearray(raw_data)
|
||||
raw_data.extend(filler)
|
||||
raw_data.extend(b".\r\n")
|
||||
assert decoder.decode_uu(article, self._response(raw_data))
|
||||
assert decoder.decode_uu(article, raw_data)
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
@pytest.mark.parametrize("insert_excess_empty_lines", [True, False])
|
||||
@@ -201,7 +194,7 @@ class TestUuDecoder:
|
||||
insert_dot_stuffing_line,
|
||||
begin_line,
|
||||
)
|
||||
assert decoder.decode_uu(article, self._response(raw_data)) == expected_result
|
||||
assert decoder.decode_uu(article, raw_data) == expected_result
|
||||
assert article.nzf.filename_checked
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
@@ -212,7 +205,7 @@ class TestUuDecoder:
|
||||
decoded_data = expected_data = b""
|
||||
for part in ("begin", "middle", "middle", "end"):
|
||||
article, data, result = self._generate_msg_part(part, insert_empty_line, False, False, True)
|
||||
decoded_data += decoder.decode_uu(article, self._response(data))
|
||||
decoded_data += decoder.decode_uu(article, data)
|
||||
expected_data += result
|
||||
|
||||
# Verify results
|
||||
@@ -230,6 +223,4 @@ class TestUuDecoder:
|
||||
article.lowest_partnum = False
|
||||
filler = b"\r\n".join(VALID_UU_LINES[:4]) + b"\r\n"
|
||||
with pytest.raises(decoder.BadData):
|
||||
assert decoder.decode_uu(
|
||||
article, self._response(bytearray(b"222 0 <foo@bar>\r\n" + filler + bad_data + b"\r\n.\r\n"))
|
||||
)
|
||||
assert decoder.decode_uu(article, bytearray(b"222 0 <foo@bar>\r\n" + filler + bad_data + b"\r\n"))
|
||||
|
||||
@@ -663,7 +663,7 @@ class TestListdirFull(ffs.TestCase):
|
||||
):
|
||||
self.fs.create_file(file)
|
||||
assert os.path.exists(file) is True
|
||||
assert sorted(filesystem.listdir_full("/rsc")) == ["/rsc/base_file", "/rsc/not._base_file"]
|
||||
assert filesystem.listdir_full("/rsc") == ["/rsc/base_file", "/rsc/not._base_file"]
|
||||
|
||||
def test_invalid_file_argument(self):
|
||||
# This is obviously not intended use; the function expects a directory
|
||||
@@ -750,7 +750,7 @@ class TestListdirFullWin(ffs.TestCase):
|
||||
):
|
||||
self.fs.create_file(file)
|
||||
assert os.path.exists(file) is True
|
||||
assert sorted(filesystem.listdir_full(r"f:\rsc")) == [r"f:\rsc\base_file", r"f:\rsc\not._base_file"]
|
||||
assert filesystem.listdir_full(r"f:\rsc") == [r"f:\rsc\base_file", r"f:\rsc\not._base_file"]
|
||||
|
||||
def test_invalid_file_argument(self):
|
||||
# This is obviously not intended use; the function expects a directory
|
||||
@@ -1256,21 +1256,3 @@ class TestOtherFileSystemFunctions:
|
||||
)
|
||||
def test_strip_extensions(self, name, ext_to_remove, output):
|
||||
assert filesystem.strip_extensions(name, ext_to_remove) == output
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"file_name, clean_file_name",
|
||||
[
|
||||
("my_awesome_nzb_file.pAr2.nZb", "my_awesome_nzb_file"),
|
||||
("my_awesome_nzb_file.....pAr2.nZb", "my_awesome_nzb_file"),
|
||||
("my_awesome_nzb_file....par2..", "my_awesome_nzb_file"),
|
||||
(" my_awesome_nzb_file .pAr.nZb", "my_awesome_nzb_file"),
|
||||
("with.extension.and.period.par2.", "with.extension.and.period"),
|
||||
("nothing.in.here", "nothing.in.here"),
|
||||
(" just.space ", "just.space"),
|
||||
("http://test.par2 ", "http://test.par2"),
|
||||
],
|
||||
)
|
||||
def test_create_work_name(self, file_name, clean_file_name):
|
||||
# Only test stuff specific for create_work_name
|
||||
# The sanitizing is already tested in tests for sanitize_foldername
|
||||
assert filesystem.create_work_name(file_name) == clean_file_name
|
||||
|
||||
@@ -25,6 +25,7 @@ import sys
|
||||
|
||||
from math import ceil
|
||||
from random import sample
|
||||
from typing import List
|
||||
|
||||
from tavern.core import run
|
||||
from warnings import warn
|
||||
@@ -171,7 +172,7 @@ class ApiTestFunctions:
|
||||
self._get_api_json("queue", extra_args={"name": "purge", "del_files": del_files})
|
||||
assert len(self._get_api_json("queue")["queue"]["slots"]) == 0
|
||||
|
||||
def _get_files(self, nzo_id: str) -> list[str]:
|
||||
def _get_files(self, nzo_id: str) -> List[str]:
|
||||
files_json = self._get_api_json("get_files", extra_args={"value": nzo_id})
|
||||
assert "files" in files_json
|
||||
return [file["nzf_id"] for file in files_json["files"]]
|
||||
|
||||
@@ -837,94 +837,6 @@ class TestMisc:
|
||||
|
||||
_func()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"argument, name, password",
|
||||
[
|
||||
("my_awesome_nzb_file{{password}}", "my_awesome_nzb_file", "password"),
|
||||
("file_with_text_after_pw{{passw0rd}}_[180519]", "file_with_text_after_pw", "passw0rd"),
|
||||
("file_without_pw", "file_without_pw", None),
|
||||
("multiple_pw{{first-pw}}_{{second-pw}}", "multiple_pw", "first-pw}}_{{second-pw"), # Greed is Good
|
||||
("デビアン", "デビアン", None), # Unicode
|
||||
("Gentoo_Hobby_Edition {{secret}}", "Gentoo_Hobby_Edition", "secret"), # Space between name and password
|
||||
("Test {{secret}}.nzb", "Test", "secret"),
|
||||
("Mandrake{{top{{secret}}", "Mandrake", "top{{secret"), # Double opening {{
|
||||
("Красная}}{{Шляпа}}", "Красная}}", "Шляпа"), # Double closing }}
|
||||
("{{Jobname{{PassWord}}", "{{Jobname", "PassWord"), # {{ at start
|
||||
("Hello/kITTY", "Hello", "kITTY"), # Notation with slash
|
||||
("Hello/kITTY.nzb", "Hello", "kITTY"), # Notation with slash and extension
|
||||
("/Jobname", "/Jobname", None), # Slash at start
|
||||
("Jobname/Top{{Secret}}", "Jobname", "Top{{Secret}}"), # Slash with braces
|
||||
("Jobname / Top{{Secret}}", "Jobname", "Top{{Secret}}"), # Slash with braces and extra spaces
|
||||
("Jobname / Top{{Secret}}.nzb", "Jobname", "Top{{Secret}}"),
|
||||
("לינוקס/معلومات سرية", "לינוקס", "معلومات سرية"), # LTR with slash
|
||||
("לינוקס{{معلومات سرية}}", "לינוקס", "معلومات سرية"), # LTR with brackets
|
||||
("thư điện tử password=mật_khẩu", "thư điện tử", "mật_khẩu"), # Password= notation
|
||||
("password=PartOfTheJobname", "password=PartOfTheJobname", None), # Password= at the start
|
||||
("Job password=Test.par2", "Job", "Test"), # Password= including extension
|
||||
("Job}}Name{{FTW", "Job}}Name{{FTW", None), # Both {{ and }} present but incorrect order (no password)
|
||||
("./Text", "./Text", None), # Name would end up empty after the function strips the dot
|
||||
],
|
||||
)
|
||||
def test_scan_password(self, argument, name, password):
|
||||
assert misc.scan_password(argument) == (name, password)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"subject, filename",
|
||||
[
|
||||
('Great stuff (001/143) - "Filename.txt" yEnc (1/1)', "Filename.txt"),
|
||||
(
|
||||
'"910a284f98ebf57f6a531cd96da48838.vol01-03.par2" yEnc (1/3)',
|
||||
"910a284f98ebf57f6a531cd96da48838.vol01-03.par2",
|
||||
),
|
||||
('Subject-KrzpfTest [02/30] - ""KrzpfTest.part.nzb"" yEnc', "KrzpfTest.part.nzb"),
|
||||
(
|
||||
'[PRiVATE]-[WtFnZb]-[Supertje-_S03E11-12_-blabla_+_blabla_WEBDL-480p.mkv]-[4/12] - "" yEnc 9786 (1/1366)',
|
||||
"Supertje-_S03E11-12_-blabla_+_blabla_WEBDL-480p.mkv",
|
||||
),
|
||||
(
|
||||
'[N3wZ] MAlXD245333\\::[PRiVATE]-[WtFnZb]-[Show.S04E04.720p.AMZN.WEBRip.x264-GalaxyTV.mkv]-[1/2] - "" yEnc 293197257 (1/573)',
|
||||
"Show.S04E04.720p.AMZN.WEBRip.x264-GalaxyTV.mkv",
|
||||
),
|
||||
(
|
||||
'reftestnzb bf1664007a71 [1/6] - "20b9152c-57eb-4d02-9586-66e30b8e3ac2" yEnc (1/22) 15728640',
|
||||
"20b9152c-57eb-4d02-9586-66e30b8e3ac2",
|
||||
),
|
||||
(
|
||||
"Re: REQ Author Child's The Book-Thanks much - Child, Lee - Author - The Book.epub (1/1)",
|
||||
"REQ Author Child's The Book-Thanks much - Child, Lee - Author - The Book.epub",
|
||||
),
|
||||
('63258-0[001/101] - "63258-2.0" yEnc (1/250) (1/250)', "63258-2.0"),
|
||||
# If specified between ", the extension is allowed to be too long
|
||||
('63258-0[001/101] - "63258-2.0toolong" yEnc (1/250) (1/250)', "63258-2.0toolong"),
|
||||
(
|
||||
"Singer - A Album (2005) - [04/25] - 02 Sweetest Somebody (I Know).flac",
|
||||
"Singer - A Album (2005) - [04/25] - 02 Sweetest Somebody (I Know).flac",
|
||||
),
|
||||
("<>random!>", "<>random!>"),
|
||||
("nZb]-[Supertje-_S03E11-12_", "nZb]-[Supertje-_S03E11-12_"),
|
||||
("Bla [Now it's done.exe]", "Now it's done.exe"),
|
||||
# If specified between [], the extension should be a valid one
|
||||
("Bla [Now it's done.123nonsense]", "Bla [Now it's done.123nonsense]"),
|
||||
('[PRiVATE]-[WtFnZb]-[00000.clpi]-[1/46] - "" yEnc 788 (1/1)', "00000.clpi"),
|
||||
(
|
||||
'[PRiVATE]-[WtFnZb]-[Video_(2001)_AC5.1_-RELEASE_[TAoE].mkv]-[1/23] - "" yEnc 1234567890 (1/23456)',
|
||||
"Video_(2001)_AC5.1_-RELEASE_[TAoE].mkv",
|
||||
),
|
||||
(
|
||||
"[PRiVATE]-[WtFnZb]-[219]-[1/series.name.s01e01.1080p.web.h264-group.mkv] - "
|
||||
" yEnc (1/[PRiVATE] \\c2b510b594\\::686ea969999193.155368eba4965e56a8cd263382e012.f2712fdc::/97bd201cf931/) 1 (1/0)",
|
||||
"series.name.s01e01.1080p.web.h264-group.mkv",
|
||||
),
|
||||
(
|
||||
"[PRiVATE]-[WtFnZb]-[/More.Bla.S02E01.1080p.WEB.h264-EDITH[eztv.re].mkv-WtF[nZb]/"
|
||||
'More.Bla.S02E01.1080p.WEB.h264-EDITH.mkv]-[1/2] - "" yEnc 2990558544 (1/4173)',
|
||||
"More.Bla.S02E01.1080p.WEB.h264-EDITH[eztv.re].mkv",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_name_extractor(self, subject, filename):
|
||||
assert misc.subject_name_extractor(subject) == filename
|
||||
|
||||
|
||||
class TestBuildAndRunCommand:
|
||||
# Path should exist
|
||||
|
||||
@@ -76,7 +76,7 @@ def get_local_ip(protocol_version: IPProtocolVersion) -> Optional[str]:
|
||||
sending any traffic but already prefills what would be the sender ip address.
|
||||
"""
|
||||
s: Optional[socket.socket] = None
|
||||
address_to_connect_to: Optional[tuple[str, int]] = None
|
||||
address_to_connect_to: Optional[Tuple[str, int]] = None
|
||||
if protocol_version == IPProtocolVersion.IPV4:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
# Google DNS IPv4
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_nzbarticle - Testing functions in nzbarticle.py
|
||||
"""
|
||||
from sabnzbd.nzb import Article
|
||||
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
class Server:
|
||||
def __init__(self, host, priority, active):
|
||||
self.host = host
|
||||
self.priority = priority
|
||||
self.active = active
|
||||
|
||||
|
||||
class TestArticle:
|
||||
def test_get_article(self):
|
||||
article_id = "test@host" + os.urandom(8).hex() + ".sab"
|
||||
article = Article(article_id, randint(4321, 54321), None)
|
||||
servers = []
|
||||
servers.append(Server("testserver1", 10, True))
|
||||
servers.append(Server("testserver2", 20, True))
|
||||
servers.append(Server("testserver3", 30, True))
|
||||
|
||||
# Test fetching top priority server
|
||||
server = servers[0]
|
||||
assert article.get_article(server, servers) == article
|
||||
assert article.fetcher_priority == 10
|
||||
assert article.fetcher == server
|
||||
assert article.get_article(server, servers) == None
|
||||
article.fetcher = None
|
||||
article.add_to_try_list(server)
|
||||
assert article.get_article(server, servers) == None
|
||||
|
||||
# Test fetching when there is a higher priority server available
|
||||
server = servers[2]
|
||||
assert article.fetcher_priority == 10
|
||||
assert article.get_article(server, servers) == None
|
||||
assert article.fetcher_priority == 20
|
||||
|
||||
# Server should be used even if article.fetcher_priority is a higher number than server.priority
|
||||
article.fetcher_priority = 30
|
||||
server = servers[1]
|
||||
assert article.get_article(server, servers) == article
|
||||
|
||||
# Inactive servers in servers list should be ignored
|
||||
article.fetcher = None
|
||||
article.fetcher_priority = 0
|
||||
servers[1].active = False
|
||||
server = servers[2]
|
||||
assert article.get_article(server, servers) == article
|
||||
assert article.tries == 3
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_nzbobject - Testing functions in nzbobject.py
|
||||
"""
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd.config import ConfigCat
|
||||
from sabnzbd.constants import NORMAL_PRIORITY
|
||||
from sabnzbd.filesystem import globber
|
||||
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clean_cache_dir")
|
||||
class TestNZO:
|
||||
@set_config({"download_dir": SAB_CACHE_DIR})
|
||||
def test_nzo_basic(self):
|
||||
# Need to create the Default category, as we would in normal instance
|
||||
# Otherwise it will try to save the config
|
||||
def_cat = ConfigCat("*", {"pp": 3, "script": "None", "priority": NORMAL_PRIORITY})
|
||||
|
||||
# Create empty object, normally used to grab URL's
|
||||
nzo = NzbObject("test_basic")
|
||||
assert nzo.work_name == "test_basic"
|
||||
assert not nzo.files
|
||||
|
||||
# Create NZB-file to import
|
||||
nzb_fp = create_and_read_nzb_fp("basic_rar5")
|
||||
|
||||
# Very basic test of NZO creation with data
|
||||
nzo = NzbObject("test_basic_data", nzb_fp=nzb_fp)
|
||||
assert nzo.final_name == "test_basic_data"
|
||||
assert nzo.files
|
||||
assert nzo.files[0].filename == "testfile.rar"
|
||||
assert nzo.bytes == 283
|
||||
assert nzo.files[0].bytes == 283
|
||||
|
||||
# work_name can be trimmed in Windows due to max-path-length
|
||||
assert "test_basic_data".startswith(nzo.work_name)
|
||||
assert os.path.exists(nzo.admin_path)
|
||||
|
||||
# Check if there's an nzf file and the backed-up nzb
|
||||
assert globber(nzo.admin_path, "*.nzb.gz")
|
||||
assert globber(nzo.admin_path, "SABnzbd_nzf*")
|
||||
|
||||
# Should have picked up the default category settings
|
||||
assert nzo.cat == "*"
|
||||
assert nzo.script == def_cat.script() == "None"
|
||||
assert nzo.priority == def_cat.priority() == NORMAL_PRIORITY
|
||||
assert nzo.repair and nzo.unpack and nzo.delete
|
||||
|
||||
# TODO: More checks!
|
||||
@@ -21,7 +21,7 @@ tests.test_nzbparser - Tests of basic NZB parsing
|
||||
|
||||
from tests.testhelper import *
|
||||
import sabnzbd.nzbparser as nzbparser
|
||||
from sabnzbd.nzb import NzbObject
|
||||
from sabnzbd import nzbstuff
|
||||
from sabnzbd.filesystem import save_compressed
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ from sabnzbd.filesystem import save_compressed
|
||||
class TestNzbParser:
|
||||
@set_config({"download_dir": SAB_CACHE_DIR})
|
||||
def test_nzbparser(self):
|
||||
nzo = NzbObject("test_basic")
|
||||
nzo = nzbstuff.NzbObject("test_basic")
|
||||
# Create test file
|
||||
metadata = {"category": "test", "password": "testpass"}
|
||||
nzb_fp = create_and_read_nzb_fp("..", metadata=metadata)
|
||||
|
||||
221
tests/test_nzbstuff.py
Normal file
221
tests/test_nzbstuff.py
Normal file
@@ -0,0 +1,221 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2025 by The SABnzbd-Team (sabnzbd.org)
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_nzbstuff - Testing functions in nzbstuff.py
|
||||
"""
|
||||
import sabnzbd.nzbstuff as nzbstuff
|
||||
from sabnzbd.config import ConfigCat
|
||||
from sabnzbd.constants import NORMAL_PRIORITY
|
||||
from sabnzbd.filesystem import globber
|
||||
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clean_cache_dir")
|
||||
class TestNZO:
|
||||
@set_config({"download_dir": SAB_CACHE_DIR})
|
||||
def test_nzo_basic(self):
|
||||
# Need to create the Default category, as we would in normal instance
|
||||
# Otherwise it will try to save the config
|
||||
def_cat = ConfigCat("*", {"pp": 3, "script": "None", "priority": NORMAL_PRIORITY})
|
||||
|
||||
# Create empty object, normally used to grab URL's
|
||||
nzo = nzbstuff.NzbObject("test_basic")
|
||||
assert nzo.work_name == "test_basic"
|
||||
assert not nzo.files
|
||||
|
||||
# Create NZB-file to import
|
||||
nzb_fp = create_and_read_nzb_fp("basic_rar5")
|
||||
|
||||
# Very basic test of NZO creation with data
|
||||
nzo = nzbstuff.NzbObject("test_basic_data", nzb_fp=nzb_fp)
|
||||
assert nzo.final_name == "test_basic_data"
|
||||
assert nzo.files
|
||||
assert nzo.files[0].filename == "testfile.rar"
|
||||
assert nzo.bytes == 283
|
||||
assert nzo.files[0].bytes == 283
|
||||
|
||||
# work_name can be trimmed in Windows due to max-path-length
|
||||
assert "test_basic_data".startswith(nzo.work_name)
|
||||
assert os.path.exists(nzo.admin_path)
|
||||
|
||||
# Check if there's an nzf file and the backed-up nzb
|
||||
assert globber(nzo.admin_path, "*.nzb.gz")
|
||||
assert globber(nzo.admin_path, "SABnzbd_nzf*")
|
||||
|
||||
# Should have picked up the default category settings
|
||||
assert nzo.cat == "*"
|
||||
assert nzo.script == def_cat.script() == "None"
|
||||
assert nzo.priority == def_cat.priority() == NORMAL_PRIORITY
|
||||
assert nzo.repair and nzo.unpack and nzo.delete
|
||||
|
||||
# TODO: More checks!
|
||||
|
||||
|
||||
class Server:
|
||||
def __init__(self, host, priority, active):
|
||||
self.host = host
|
||||
self.priority = priority
|
||||
self.active = active
|
||||
|
||||
|
||||
class TestArticle:
|
||||
def test_get_article(self):
|
||||
article_id = "test@host" + os.urandom(8).hex() + ".sab"
|
||||
article = nzbstuff.Article(article_id, randint(4321, 54321), None)
|
||||
servers = []
|
||||
servers.append(Server("testserver1", 10, True))
|
||||
servers.append(Server("testserver2", 20, True))
|
||||
servers.append(Server("testserver3", 30, True))
|
||||
|
||||
# Test fetching top priority server
|
||||
server = servers[0]
|
||||
assert article.get_article(server, servers) == article
|
||||
assert article.fetcher_priority == 10
|
||||
assert article.fetcher == server
|
||||
assert article.get_article(server, servers) == None
|
||||
article.fetcher = None
|
||||
article.add_to_try_list(server)
|
||||
assert article.get_article(server, servers) == None
|
||||
|
||||
# Test fetching when there is a higher priority server available
|
||||
server = servers[2]
|
||||
assert article.fetcher_priority == 10
|
||||
assert article.get_article(server, servers) == None
|
||||
assert article.fetcher_priority == 20
|
||||
|
||||
# Server should be used even if article.fetcher_priority is a higher number than server.priority
|
||||
article.fetcher_priority = 30
|
||||
server = servers[1]
|
||||
assert article.get_article(server, servers) == article
|
||||
|
||||
# Inactive servers in servers list should be ignored
|
||||
article.fetcher = None
|
||||
article.fetcher_priority = 0
|
||||
servers[1].active = False
|
||||
server = servers[2]
|
||||
assert article.get_article(server, servers) == article
|
||||
assert article.tries == 3
|
||||
|
||||
|
||||
class TestNZBStuffHelpers:
|
||||
@pytest.mark.parametrize(
|
||||
"argument, name, password",
|
||||
[
|
||||
("my_awesome_nzb_file{{password}}", "my_awesome_nzb_file", "password"),
|
||||
("file_with_text_after_pw{{passw0rd}}_[180519]", "file_with_text_after_pw", "passw0rd"),
|
||||
("file_without_pw", "file_without_pw", None),
|
||||
("multiple_pw{{first-pw}}_{{second-pw}}", "multiple_pw", "first-pw}}_{{second-pw"), # Greed is Good
|
||||
("デビアン", "デビアン", None), # Unicode
|
||||
("Gentoo_Hobby_Edition {{secret}}", "Gentoo_Hobby_Edition", "secret"), # Space between name and password
|
||||
("Test {{secret}}.nzb", "Test", "secret"),
|
||||
("Mandrake{{top{{secret}}", "Mandrake", "top{{secret"), # Double opening {{
|
||||
("Красная}}{{Шляпа}}", "Красная}}", "Шляпа"), # Double closing }}
|
||||
("{{Jobname{{PassWord}}", "{{Jobname", "PassWord"), # {{ at start
|
||||
("Hello/kITTY", "Hello", "kITTY"), # Notation with slash
|
||||
("Hello/kITTY.nzb", "Hello", "kITTY"), # Notation with slash and extension
|
||||
("/Jobname", "/Jobname", None), # Slash at start
|
||||
("Jobname/Top{{Secret}}", "Jobname", "Top{{Secret}}"), # Slash with braces
|
||||
("Jobname / Top{{Secret}}", "Jobname", "Top{{Secret}}"), # Slash with braces and extra spaces
|
||||
("Jobname / Top{{Secret}}.nzb", "Jobname", "Top{{Secret}}"),
|
||||
("לינוקס/معلومات سرية", "לינוקס", "معلومات سرية"), # LTR with slash
|
||||
("לינוקס{{معلومات سرية}}", "לינוקס", "معلومات سرية"), # LTR with brackets
|
||||
("thư điện tử password=mật_khẩu", "thư điện tử", "mật_khẩu"), # Password= notation
|
||||
("password=PartOfTheJobname", "password=PartOfTheJobname", None), # Password= at the start
|
||||
("Job password=Test.par2", "Job", "Test"), # Password= including extension
|
||||
("Job}}Name{{FTW", "Job}}Name{{FTW", None), # Both {{ and }} present but incorrect order (no password)
|
||||
("./Text", "./Text", None), # Name would end up empty after the function strips the dot
|
||||
],
|
||||
)
|
||||
def test_scan_password(self, argument, name, password):
|
||||
assert nzbstuff.scan_password(argument) == (name, password)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"file_name, clean_file_name",
|
||||
[
|
||||
("my_awesome_nzb_file.pAr2.nZb", "my_awesome_nzb_file"),
|
||||
("my_awesome_nzb_file.....pAr2.nZb", "my_awesome_nzb_file"),
|
||||
("my_awesome_nzb_file....par2..", "my_awesome_nzb_file"),
|
||||
(" my_awesome_nzb_file .pAr.nZb", "my_awesome_nzb_file"),
|
||||
("with.extension.and.period.par2.", "with.extension.and.period"),
|
||||
("nothing.in.here", "nothing.in.here"),
|
||||
(" just.space ", "just.space"),
|
||||
("http://test.par2 ", "http://test.par2"),
|
||||
],
|
||||
)
|
||||
def test_create_work_name(self, file_name, clean_file_name):
|
||||
# Only test stuff specific for create_work_name
|
||||
# The sanitizing is already tested in tests for sanitize_foldername
|
||||
assert nzbstuff.create_work_name(file_name) == clean_file_name
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"subject, filename",
|
||||
[
|
||||
('Great stuff (001/143) - "Filename.txt" yEnc (1/1)', "Filename.txt"),
|
||||
(
|
||||
'"910a284f98ebf57f6a531cd96da48838.vol01-03.par2" yEnc (1/3)',
|
||||
"910a284f98ebf57f6a531cd96da48838.vol01-03.par2",
|
||||
),
|
||||
('Subject-KrzpfTest [02/30] - ""KrzpfTest.part.nzb"" yEnc', "KrzpfTest.part.nzb"),
|
||||
(
|
||||
'[PRiVATE]-[WtFnZb]-[Supertje-_S03E11-12_-blabla_+_blabla_WEBDL-480p.mkv]-[4/12] - "" yEnc 9786 (1/1366)',
|
||||
"Supertje-_S03E11-12_-blabla_+_blabla_WEBDL-480p.mkv",
|
||||
),
|
||||
(
|
||||
'[N3wZ] MAlXD245333\\::[PRiVATE]-[WtFnZb]-[Show.S04E04.720p.AMZN.WEBRip.x264-GalaxyTV.mkv]-[1/2] - "" yEnc 293197257 (1/573)',
|
||||
"Show.S04E04.720p.AMZN.WEBRip.x264-GalaxyTV.mkv",
|
||||
),
|
||||
(
|
||||
'reftestnzb bf1664007a71 [1/6] - "20b9152c-57eb-4d02-9586-66e30b8e3ac2" yEnc (1/22) 15728640',
|
||||
"20b9152c-57eb-4d02-9586-66e30b8e3ac2",
|
||||
),
|
||||
(
|
||||
"Re: REQ Author Child's The Book-Thanks much - Child, Lee - Author - The Book.epub (1/1)",
|
||||
"REQ Author Child's The Book-Thanks much - Child, Lee - Author - The Book.epub",
|
||||
),
|
||||
('63258-0[001/101] - "63258-2.0" yEnc (1/250) (1/250)', "63258-2.0"),
|
||||
# If specified between ", the extension is allowed to be too long
|
||||
('63258-0[001/101] - "63258-2.0toolong" yEnc (1/250) (1/250)', "63258-2.0toolong"),
|
||||
(
|
||||
"Singer - A Album (2005) - [04/25] - 02 Sweetest Somebody (I Know).flac",
|
||||
"Singer - A Album (2005) - [04/25] - 02 Sweetest Somebody (I Know).flac",
|
||||
),
|
||||
("<>random!>", "<>random!>"),
|
||||
("nZb]-[Supertje-_S03E11-12_", "nZb]-[Supertje-_S03E11-12_"),
|
||||
("Bla [Now it's done.exe]", "Now it's done.exe"),
|
||||
# If specified between [], the extension should be a valid one
|
||||
("Bla [Now it's done.123nonsense]", "Bla [Now it's done.123nonsense]"),
|
||||
('[PRiVATE]-[WtFnZb]-[00000.clpi]-[1/46] - "" yEnc 788 (1/1)', "00000.clpi"),
|
||||
(
|
||||
'[PRiVATE]-[WtFnZb]-[Video_(2001)_AC5.1_-RELEASE_[TAoE].mkv]-[1/23] - "" yEnc 1234567890 (1/23456)',
|
||||
"Video_(2001)_AC5.1_-RELEASE_[TAoE].mkv",
|
||||
),
|
||||
(
|
||||
"[PRiVATE]-[WtFnZb]-[219]-[1/series.name.s01e01.1080p.web.h264-group.mkv] - "
|
||||
" yEnc (1/[PRiVATE] \\c2b510b594\\::686ea969999193.155368eba4965e56a8cd263382e012.f2712fdc::/97bd201cf931/) 1 (1/0)",
|
||||
"series.name.s01e01.1080p.web.h264-group.mkv",
|
||||
),
|
||||
(
|
||||
"[PRiVATE]-[WtFnZb]-[/More.Bla.S02E01.1080p.WEB.h264-EDITH[eztv.re].mkv-WtF[nZb]/"
|
||||
'More.Bla.S02E01.1080p.WEB.h264-EDITH.mkv]-[1/2] - "" yEnc 2990558544 (1/4173)',
|
||||
"More.Bla.S02E01.1080p.WEB.h264-EDITH[eztv.re].mkv",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_name_extractor(self, subject, filename):
|
||||
assert nzbstuff.name_extractor(subject) == filename
|
||||
@@ -257,137 +257,3 @@ class TestPostProc:
|
||||
assert tmp_workdir_complete == workdir_complete
|
||||
|
||||
_func()
|
||||
|
||||
|
||||
class TestNzbOnlyDownload:
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_single_nzb(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with a single NZB file"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
fake_nzo.pp = 3
|
||||
fake_nzo.script = "test_script.py"
|
||||
fake_nzo.cat = "movies"
|
||||
fake_nzo.url = "http://example.com/test.nzb"
|
||||
fake_nzo.priority = 0
|
||||
|
||||
# Mock single NZB file
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
nzb_file = os.path.join(workdir, "test.nzb")
|
||||
mock_listdir.return_value = [nzb_file]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result
|
||||
assert result == [nzb_file]
|
||||
|
||||
# Verify process_single_nzb was called with correct arguments
|
||||
mock_process_single_nzb.assert_called_once_with(
|
||||
"test.nzb",
|
||||
nzb_file,
|
||||
pp=3,
|
||||
script="test_script.py",
|
||||
cat="movies",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=0,
|
||||
nzbname="TestDownload",
|
||||
dup_check=False,
|
||||
)
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_multiple_nzbs(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with multiple NZB files"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
fake_nzo.pp = 2
|
||||
fake_nzo.script = None
|
||||
fake_nzo.cat = "tv"
|
||||
fake_nzo.url = "http://example.com/test.nzb"
|
||||
fake_nzo.priority = 1
|
||||
|
||||
# Mock multiple NZB files
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
first_nzb = os.path.join(workdir, "first.nzb")
|
||||
second_nzb = os.path.join(workdir, "second.nzb")
|
||||
mock_listdir.return_value = [first_nzb, second_nzb]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result
|
||||
assert result == [first_nzb, second_nzb]
|
||||
|
||||
# Verify process_single_nzb was called twice with correct arguments
|
||||
assert mock_process_single_nzb.call_count == 2
|
||||
mock_process_single_nzb.assert_any_call(
|
||||
"first.nzb",
|
||||
first_nzb,
|
||||
pp=2,
|
||||
script=None,
|
||||
cat="tv",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=1,
|
||||
nzbname="TestDownload - first.nzb",
|
||||
dup_check=False,
|
||||
)
|
||||
mock_process_single_nzb.assert_any_call(
|
||||
"second.nzb",
|
||||
second_nzb,
|
||||
pp=2,
|
||||
script=None,
|
||||
cat="tv",
|
||||
url="http://example.com/test.nzb",
|
||||
priority=1,
|
||||
nzbname="TestDownload - second.nzb",
|
||||
dup_check=False,
|
||||
)
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_mixed_files(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with mixed file types returns None"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
|
||||
# Mock mixed files (NZB and non-NZB)
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
mock_listdir.return_value = [
|
||||
os.path.join(workdir, "test.nzb"),
|
||||
os.path.join(workdir, "readme.txt"),
|
||||
]
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result is None (not NZB-only)
|
||||
assert result is None
|
||||
|
||||
# Verify process_single_nzb was NOT called
|
||||
mock_process_single_nzb.assert_not_called()
|
||||
|
||||
@mock.patch("sabnzbd.postproc.process_single_nzb")
|
||||
@mock.patch("sabnzbd.postproc.listdir_full")
|
||||
def test_process_nzb_only_download_empty_directory(self, mock_listdir, mock_process_single_nzb):
|
||||
"""Test process_nzb_only_download with empty directory returns None"""
|
||||
# Setup mock NZO
|
||||
fake_nzo = mock.Mock()
|
||||
fake_nzo.final_name = "TestDownload"
|
||||
|
||||
# Mock empty directory
|
||||
workdir = os.path.join(SAB_CACHE_DIR, "test_workdir")
|
||||
mock_listdir.return_value = []
|
||||
|
||||
# Call the function
|
||||
result = postproc.process_nzb_only_download(workdir, fake_nzo)
|
||||
|
||||
# Verify result is None (no files)
|
||||
assert result is None
|
||||
|
||||
# Verify process_single_nzb was NOT called
|
||||
mock_process_single_nzb.assert_not_called()
|
||||
|
||||
@@ -22,7 +22,7 @@ import io
|
||||
import os
|
||||
import time
|
||||
from http.client import RemoteDisconnected
|
||||
from typing import BinaryIO, Optional
|
||||
from typing import BinaryIO, Optional, Dict, List
|
||||
|
||||
import pytest
|
||||
from random import choice, randint
|
||||
@@ -149,13 +149,13 @@ def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}):
|
||||
return r.text
|
||||
|
||||
|
||||
def create_nzb(nzb_dir: str, metadata: Optional[dict[str, str]] = None) -> str:
|
||||
def create_nzb(nzb_dir: str, metadata: Optional[Dict[str, str]] = None) -> str:
|
||||
"""Create NZB from directory using SABNews"""
|
||||
nzb_dir_full = os.path.join(SAB_DATA_DIR, nzb_dir)
|
||||
return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full, metadata=metadata)
|
||||
|
||||
|
||||
def create_and_read_nzb_fp(nzbdir: str, metadata: Optional[dict[str, str]] = None) -> BinaryIO:
|
||||
def create_and_read_nzb_fp(nzbdir: str, metadata: Optional[Dict[str, str]] = None) -> BinaryIO:
|
||||
"""Create NZB, return data and delete file"""
|
||||
# Create NZB-file to import
|
||||
nzb_path = create_nzb(nzbdir, metadata)
|
||||
@@ -332,7 +332,7 @@ class DownloadFlowBasics(SABnzbdBaseTest):
|
||||
self.selenium_wrapper(self.driver.find_element, By.CSS_SELECTOR, ".btn.btn-success").click()
|
||||
self.no_page_crash()
|
||||
|
||||
def download_nzb(self, nzb_dir: str, file_output: list[str], dir_name_as_job_name: bool = False):
|
||||
def download_nzb(self, nzb_dir: str, file_output: List[str], dir_name_as_job_name: bool = False):
|
||||
# Verify if the server was setup before we start
|
||||
self.is_server_configured()
|
||||
|
||||
|
||||
Binary file not shown.
Reference in New Issue
Block a user