mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-06 06:28:45 -05:00
Compare commits
87 Commits
3.4.x
...
3.5.0Beta3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
792825bdaa | ||
|
|
ad2371dc9a | ||
|
|
dfb771a51e | ||
|
|
14a0dbbd5c | ||
|
|
bb1c63cc92 | ||
|
|
38ca26e6f3 | ||
|
|
285d5688f5 | ||
|
|
26cdfc2279 | ||
|
|
d915dfc941 | ||
|
|
d29bd65f97 | ||
|
|
81d378498e | ||
|
|
d32630c759 | ||
|
|
b8d7ec0723 | ||
|
|
207c5430c1 | ||
|
|
b922274b61 | ||
|
|
863c0e5eb7 | ||
|
|
46f9956791 | ||
|
|
879a6f2552 | ||
|
|
5e3d237c99 | ||
|
|
1eb83e4eb0 | ||
|
|
194c0e6708 | ||
|
|
a87d38c61f | ||
|
|
ed0e5bbf9b | ||
|
|
2249b623e6 | ||
|
|
854ca5f5d4 | ||
|
|
a0a8029c36 | ||
|
|
e7eec8e4f1 | ||
|
|
a354c1984b | ||
|
|
34799c397c | ||
|
|
2c88dddc1e | ||
|
|
64b0216ba9 | ||
|
|
f950520475 | ||
|
|
60f3de2a91 | ||
|
|
06e13483bd | ||
|
|
f00cbe89bc | ||
|
|
e450f5744b | ||
|
|
255242eca5 | ||
|
|
cbeab4dd55 | ||
|
|
6b8506c986 | ||
|
|
d5d5647b7c | ||
|
|
1a76de1ca3 | ||
|
|
1913109623 | ||
|
|
e76b4395a7 | ||
|
|
6670156397 | ||
|
|
37b7a77b70 | ||
|
|
ddb5a007a5 | ||
|
|
8568df4552 | ||
|
|
493a5f715c | ||
|
|
a61b27992e | ||
|
|
798eec7aa8 | ||
|
|
0d29603e2b | ||
|
|
48882220d6 | ||
|
|
b4ad292ec5 | ||
|
|
b59a14f6b7 | ||
|
|
80ed385a41 | ||
|
|
04cd67b98b | ||
|
|
68eded2c0c | ||
|
|
389a0d3afa | ||
|
|
5a3e4a28fe | ||
|
|
66b5629a31 | ||
|
|
eae77eb236 | ||
|
|
5f44ec8a0d | ||
|
|
9d8c62de6b | ||
|
|
3229fd8d28 | ||
|
|
fdee789637 | ||
|
|
c762dda1b1 | ||
|
|
c5c8b902c4 | ||
|
|
ee255a5042 | ||
|
|
3952965632 | ||
|
|
85db706bbe | ||
|
|
ea570442c6 | ||
|
|
9c109b803d | ||
|
|
86f77f8064 | ||
|
|
81c33d65e4 | ||
|
|
a1cf822141 | ||
|
|
ce48a9697a | ||
|
|
9b22c4b23c | ||
|
|
6283b0460a | ||
|
|
4fe977fa47 | ||
|
|
f188e55692 | ||
|
|
f5487ed932 | ||
|
|
c69b25ff0d | ||
|
|
b608af640f | ||
|
|
315f1ff3bc | ||
|
|
b6c2ac194b | ||
|
|
00570d2089 | ||
|
|
56375b16fe |
2
.github/workflows/black.yml
vendored
2
.github/workflows/black.yml
vendored
@@ -17,6 +17,6 @@ jobs:
|
||||
builder
|
||||
tests
|
||||
--line-length=120
|
||||
--target-version=py36
|
||||
--target-version=py37
|
||||
--check
|
||||
--diff
|
||||
|
||||
50
.github/workflows/build_release.yml
vendored
50
.github/workflows/build_release.yml
vendored
@@ -10,11 +10,11 @@ jobs:
|
||||
AUTOMATION_GITHUB_TOKEN: ${{ secrets.AUTOMATION_GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9 (64bit)
|
||||
- name: Set up Python 3.10 (64bit)
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
architecture: x64
|
||||
python-version: "3.10"
|
||||
architecture: "x64"
|
||||
- name: Install Python dependencies (64bit)
|
||||
run: |
|
||||
python --version
|
||||
@@ -43,8 +43,8 @@ jobs:
|
||||
- name: Set up Python 3.8 (32bit and legacy)
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
architecture: x86
|
||||
python-version: "3.8"
|
||||
architecture: "x86"
|
||||
- name: Install Python dependencies (32bit and legacy)
|
||||
run: |
|
||||
python --version
|
||||
@@ -64,7 +64,7 @@ jobs:
|
||||
|
||||
build_macos:
|
||||
name: Build macOS binary
|
||||
runs-on: macos-latest
|
||||
runs-on: macos-11
|
||||
env:
|
||||
SIGNING_AUTH: ${{ secrets.SIGNING_AUTH }}
|
||||
NOTARIZATION_USER: ${{ secrets.NOTARIZATION_USER }}
|
||||
@@ -73,8 +73,10 @@ jobs:
|
||||
# We need the official Python, because the GA ones only support newer macOS versions
|
||||
# The deployment target is picked up by the Python build tools automatically
|
||||
# If updated, make sure to also set LSMinimumSystemVersion in SABnzbd.spec
|
||||
PYTHON_VERSION: 3.9.7
|
||||
MACOSX_DEPLOYMENT_TARGET: 10.9
|
||||
PYTHON_VERSION: "3.10.0"
|
||||
MACOSX_DEPLOYMENT_TARGET: "10.9"
|
||||
# We need to force compile for universal2 support
|
||||
CFLAGS: -arch arm64 -arch x86_64
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Cache Python download
|
||||
@@ -85,15 +87,43 @@ jobs:
|
||||
key: macOS-Python-${{ env.PYTHON_VERSION }}
|
||||
- name: Get Python
|
||||
if: steps.cache-python-download.outputs.cache-hit != 'true'
|
||||
run: curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macosx10.9.pkg -o ~/python.pkg
|
||||
run: curl https://www.python.org/ftp/python/${PYTHON_VERSION}/python-${PYTHON_VERSION}-macos11.pkg -o ~/python.pkg
|
||||
- name: Install Python
|
||||
run: sudo installer -pkg ~/python.pkg -target /
|
||||
- name: Install Python dependencies
|
||||
# cffi will pick up the single-arch libffi from Homebrew, while Apple provides universal2-version
|
||||
# of libffi with XCode. So we forcefully have to remove the Homebrew one.
|
||||
# Because building cryptography is hard, and we cannot force pip to fetch universal2 version we
|
||||
# first install the x86 version (and it's dependencies) and then manually fetch the universal2 build
|
||||
# https://github.com/pyca/cryptography/issues/5918
|
||||
run: |
|
||||
brew uninstall libffi --ignore-dependencies
|
||||
python3 --version
|
||||
pip3 install --upgrade pip wheel
|
||||
pip3 install --upgrade -r requirements.txt
|
||||
ARCHFLAGS="-arch x86_64 -arch arm64" pip3 install --upgrade cffi --no-binary cffi
|
||||
pip3 install --upgrade -r requirements.txt --no-binary sabyenc3
|
||||
pip3 uninstall cryptography -y
|
||||
pip3 download cryptography --platform macosx_10_10_universal2 --only-binary :all: --no-deps --dest .
|
||||
pip3 install cryptography --no-cache-dir --no-index --find-links .
|
||||
pip3 install --upgrade -r builder/requirements.txt
|
||||
- name: Checkout PyInstaller
|
||||
# We need to build the PyInstaller bootloader from sources:
|
||||
# https://github.com/pyinstaller/pyinstaller/issues/6235
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: pyinstaller/pyinstaller
|
||||
path: pyinstaller
|
||||
- name: Build PyInstaller bootloader for macOS 10.9 support
|
||||
# Make sure we checkout the latest stable release
|
||||
run: |
|
||||
python3 --version
|
||||
cd pyinstaller/bootloader
|
||||
git fetch --tags
|
||||
git checkout $(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
python3 ./waf all
|
||||
cd ..
|
||||
python3 -m pip install .
|
||||
cd ..
|
||||
- name: Import macOS codesign certificates
|
||||
uses: apple-actions/import-codesign-certs@v1
|
||||
if: env.SIGNING_AUTH
|
||||
|
||||
12
.github/workflows/integration_testing.yml
vendored
12
.github/workflows/integration_testing.yml
vendored
@@ -6,17 +6,19 @@ jobs:
|
||||
test:
|
||||
name: Test ${{ matrix.name }} - Python ${{ matrix.python-version }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||
os: [ubuntu-20.04]
|
||||
include:
|
||||
- name: macOS
|
||||
os: macos-latest
|
||||
python-version: 3.9
|
||||
python-version: "3.10"
|
||||
- name: Windows
|
||||
os: windows-latest
|
||||
python-version: 3.9
|
||||
python-version: "3.10"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -26,11 +28,11 @@ jobs:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install system dependencies
|
||||
if: runner.os == 'Linux'
|
||||
run: sudo apt-get install unrar p7zip-full par2 chromium-chromedriver
|
||||
run: sudo apt-get install unrar p7zip-full par2
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python --version
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade pip wheel
|
||||
pip install --upgrade -r requirements.txt
|
||||
pip install --upgrade -r tests/requirements.txt
|
||||
- name: Test SABnzbd
|
||||
|
||||
7
.github/workflows/translations.yml
vendored
7
.github/workflows/translations.yml
vendored
@@ -8,27 +8,30 @@ on:
|
||||
jobs:
|
||||
translations:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Generate translatable texts
|
||||
run: |
|
||||
python3 tools/extract_pot.py
|
||||
- name: Install Transifex client
|
||||
if: env.TX_TOKEN
|
||||
# Sudo is needed to link the "tx"-command
|
||||
run: |
|
||||
sudo -H python3 -m pip install setuptools wheel
|
||||
sudo -H python3 -m pip install transifex-client
|
||||
- name: Push/pull Transifex translations
|
||||
if: env.TX_TOKEN
|
||||
run: |
|
||||
tx push --source --parallel
|
||||
tx pull --all --force --parallel
|
||||
env:
|
||||
TX_TOKEN: ${{ secrets.TX_TOKEN }}
|
||||
- name: Compile translations to validate them
|
||||
run: |
|
||||
python3 tools/make_mo.py
|
||||
- name: Push translatable and translated texts back to repo
|
||||
uses: stefanzweifel/git-auto-commit-action@v4.5.1
|
||||
if: env.TX_TOKEN
|
||||
with:
|
||||
commit_message: Update translatable texts
|
||||
commit_user_name: SABnzbd Automation
|
||||
|
||||
@@ -52,7 +52,7 @@ Specific guides to install from source are available for Windows and macOS:
|
||||
https://sabnzbd.org/wiki/installation/install-macos
|
||||
https://sabnzbd.org/wiki/installation/install-from-source-windows
|
||||
|
||||
Only Python 3.6 and above is supported.
|
||||
Only Python 3.7 and above is supported.
|
||||
|
||||
On Linux systems you need to install:
|
||||
par2 unrar unzip python3-setuptools python3-pip
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.4.1
|
||||
Summary: SABnzbd-3.4.1
|
||||
Version: 3.5.0Beta3
|
||||
Summary: SABnzbd-3.5.0Beta3
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
@@ -18,7 +18,7 @@ If you want to know more you can head over to our website: https://sabnzbd.org.
|
||||
|
||||
SABnzbd has a few dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (Python 3.6 and higher, often called `python3`)
|
||||
- `python` (Python 3.7 and above, often called `python3`)
|
||||
- Python modules listed in `requirements.txt`. Install with `python3 -m pip install -r requirements.txt -U`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://sabnzbd.org/wiki/installation/multicore-par2))
|
||||
- `unrar` (make sure you get the "official" non-free version of unrar)
|
||||
|
||||
45
README.mkd
45
README.mkd
@@ -1,28 +1,31 @@
|
||||
Release Notes - SABnzbd 3.4.1
|
||||
Release Notes - SABnzbd 3.5.0 Beta 3
|
||||
=========================================================
|
||||
|
||||
## Bugfixes since 3.4.0
|
||||
- macOS: Failed to run on M1 systems or older macOS versions.
|
||||
## Changes since 3.5.0 Beta 2
|
||||
- Reduced memory usage during and after parsing `.nzb` files.
|
||||
- Added option to preserve paused state after a restart.
|
||||
- `unrar` logging of Direct Unpack was not logged if it was aborted.
|
||||
|
||||
## Changes since 3.3.1
|
||||
- Extended `Deobfuscate final filenames` to attempt to set the correct
|
||||
file extension based on the file signature if the file extension is
|
||||
not present or meaningless.
|
||||
- Added additional pattern keys that can be used in the `Sort String`
|
||||
for Sorting, by using the `guessit` package internally for parsing.
|
||||
- If unpacked files contain `.par2` files they will always be read and
|
||||
used to rename any matching files.
|
||||
- Regular expressions can be used to specify `Unwanted extensions`.
|
||||
- Not all passwords will be tried if a matching one was found.
|
||||
- Some interface-only options were added as API-call.
|
||||
- The Plush skin has been removed.
|
||||
## Changes since 3.5.0 Beta 1
|
||||
- Updated news server SSL setup due to changes in Python 3.10.
|
||||
- Setting custom ciphers forces the maximum TLS version to 1.2.
|
||||
- Handle multiple passwords stored in NZB-file.
|
||||
- Print low-level Windows status error on `IOError`.
|
||||
- macOS: full native support for M1 systems.
|
||||
|
||||
## Bugfixes since 3.3.1
|
||||
- Duplicate check based on `.nzb` MD5 was performed before it was calculated.
|
||||
- Enforce `local_ranges` for broadcasts (Bonjour/SSDP).
|
||||
- Correctly parse the filename in `Content-Disposition` header.
|
||||
- `Warning` instead of `Info` when there is a restart due to crashed thread.
|
||||
- Only run Direct Unpack if `enable_unrar` is enabled.
|
||||
## Changes since 3.4.2
|
||||
- Removed Python 3.6 support.
|
||||
- SOCKS5 proxy support for all outgoing connections.
|
||||
- Restored support for UUencoded jobs.
|
||||
- `Required` server option: in case of connection failures, the queue
|
||||
will be paused for a few minutes instead of skipping the server.
|
||||
- Require TLSv1.2 or higher for SSL news server connections.
|
||||
- macOS/Windows: updated to Python 3.10.
|
||||
- macOS: native support for M1 systems.
|
||||
|
||||
## Bugfixes since 3.4.2
|
||||
- Email notification setting was not shown correctly.
|
||||
- Windows: `portable.cmd` was not included in the release.
|
||||
|
||||
## Upgrade notices
|
||||
- The download statistics file `totals10.sab` is updated in 3.2.x
|
||||
|
||||
23
SABnzbd.py
23
SABnzbd.py
@@ -17,8 +17,8 @@
|
||||
|
||||
import sys
|
||||
|
||||
if sys.hexversion < 0x03060000:
|
||||
print("Sorry, requires Python 3.6 or above")
|
||||
if sys.hexversion < 0x03070000:
|
||||
print("Sorry, requires Python 3.7 or above")
|
||||
print("You can read more at: https://sabnzbd.org/wiki/installation/install-off-modules")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -32,6 +32,7 @@ import signal
|
||||
import socket
|
||||
import platform
|
||||
import subprocess
|
||||
import multiprocessing
|
||||
import ssl
|
||||
import time
|
||||
import re
|
||||
@@ -46,6 +47,9 @@ try:
|
||||
import portend
|
||||
import cryptography
|
||||
import chardet
|
||||
import guessit
|
||||
import puremagic
|
||||
import socks
|
||||
except ImportError as e:
|
||||
print("Not all required Python modules are available, please check requirements.txt")
|
||||
print("Missing module:", e.name)
|
||||
@@ -92,7 +96,6 @@ from sabnzbd.filesystem import get_ext, real_path, long_path, globber_full, remo
|
||||
from sabnzbd.panic import panic_tmpl, panic_port, panic_host, panic, launch_a_browser
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.notifier as notifier
|
||||
import sabnzbd.zconfig
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6
|
||||
@@ -106,11 +109,10 @@ try:
|
||||
import win32event
|
||||
import win32service
|
||||
import win32ts
|
||||
import pywintypes
|
||||
import servicemanager
|
||||
from win32com.shell import shell, shellcon
|
||||
|
||||
from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info
|
||||
from sabnzbd.utils.apireg import get_connection_info, set_connection_info
|
||||
import sabnzbd.sabtray
|
||||
|
||||
win32api.SetConsoleCtrlHandler(sabnzbd.sig_handler, True)
|
||||
@@ -1439,12 +1441,11 @@ def main():
|
||||
try:
|
||||
cherrypy.engine.start()
|
||||
except:
|
||||
# Since the webserver is started by cherrypy in a separate thread, we can't really catch any
|
||||
# start-up errors. This try/except only catches very few errors, the rest is only shown in the console.
|
||||
logging.error(T("Failed to start web-interface: "), exc_info=True)
|
||||
abort_and_show_error(browserhost, cherryport)
|
||||
|
||||
# Wait for server to become ready
|
||||
cherrypy.engine.wait(cherrypy.process.wspbus.states.STARTED)
|
||||
|
||||
if sabnzbd.WIN32:
|
||||
if enable_https:
|
||||
mode = "s"
|
||||
@@ -1468,6 +1469,9 @@ def main():
|
||||
if sabnzbd.NO_DOWNLOADING:
|
||||
return
|
||||
|
||||
# Apply proxy, if configured, before main requests are made
|
||||
sabnzbd.set_socks5_proxy()
|
||||
|
||||
# Start all SABnzbd tasks
|
||||
logging.info("Starting %s-%s", sabnzbd.MY_NAME, sabnzbd.__version__)
|
||||
try:
|
||||
@@ -1739,6 +1743,9 @@ def handle_windows_service():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Require for freezing
|
||||
multiprocessing.freeze_support()
|
||||
|
||||
# We can only register these in the main thread
|
||||
signal.signal(signal.SIGINT, sabnzbd.sig_handler)
|
||||
signal.signal(signal.SIGTERM, sabnzbd.sig_handler)
|
||||
|
||||
@@ -41,6 +41,7 @@ RELEASE_VERSION = pkginfo.Develop(".").version
|
||||
# Add hidden imports
|
||||
extra_hiddenimports = ["Cheetah.DummyTransaction", "cheroot.ssl.builtin", "certifi"]
|
||||
extra_hiddenimports.extend(collect_submodules("babelfish.converters"))
|
||||
extra_hiddenimports.extend(collect_submodules("guessit.data"))
|
||||
|
||||
# Add platform specific stuff
|
||||
if sys.platform == "darwin":
|
||||
@@ -66,6 +67,7 @@ else:
|
||||
# Windows
|
||||
extra_hiddenimports.append("win32timezone")
|
||||
extra_folders += ["win/multipar/", "win/unrar/", "win/7zip/"]
|
||||
extra_files += ["portable.cmd"]
|
||||
|
||||
# Parse the version info
|
||||
version_regexed = re.search(r"(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)", RELEASE_VERSION)
|
||||
@@ -135,6 +137,7 @@ exe = EXE(
|
||||
append_pkg=False,
|
||||
icon="icons/sabnzbd.ico",
|
||||
version=version_info,
|
||||
target_arch="universal2",
|
||||
)
|
||||
|
||||
coll = COLLECT(exe, pyi_analysis.binaries, pyi_analysis.zipfiles, pyi_analysis.datas, name="SABnzbd")
|
||||
|
||||
@@ -22,11 +22,11 @@ import sys
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import shlex
|
||||
import subprocess
|
||||
import tarfile
|
||||
import pkginfo
|
||||
import github
|
||||
from distutils.dir_util import copy_tree
|
||||
|
||||
|
||||
VERSION_FILE = "sabnzbd/version.py"
|
||||
@@ -185,8 +185,7 @@ if __name__ == "__main__":
|
||||
# Run PyInstaller and check output
|
||||
run_external_command([sys.executable, "-O", "-m", "PyInstaller", "SABnzbd.spec"])
|
||||
|
||||
# Use special distutils function to merge the main and console directories
|
||||
copy_tree("dist/SABnzbd-console", "dist/SABnzbd")
|
||||
shutil.copytree("dist/SABnzbd-console", "dist/SABnzbd", dirs_exist_ok=True)
|
||||
safe_remove("dist/SABnzbd-console")
|
||||
|
||||
# Remove unwanted DLL's
|
||||
@@ -245,6 +244,14 @@ if __name__ == "__main__":
|
||||
# Run PyInstaller and check output
|
||||
run_external_command([sys.executable, "-O", "-m", "PyInstaller", "SABnzbd.spec"])
|
||||
|
||||
# Make sure we created a fully universal2 release
|
||||
for bin_to_check in glob.glob("dist/SABnzbd.app/Contents/MacOS/**/*.so", recursive=True):
|
||||
print("Checking if binary is universal2: %s" % bin_to_check)
|
||||
file_output = run_external_command(["file", bin_to_check])
|
||||
# Make sure we have both arm64 and x86
|
||||
if not ("x86_64" in file_output and "arm64" in file_output):
|
||||
raise RuntimeError("Non-universal2 binary found!")
|
||||
|
||||
# Only continue if we can sign
|
||||
if authority:
|
||||
files_to_sign = [
|
||||
@@ -361,7 +368,7 @@ if __name__ == "__main__":
|
||||
|
||||
# Copy all folders and files to the new folder
|
||||
for source_folder in extra_folders:
|
||||
copy_tree(source_folder, os.path.join(src_folder, source_folder))
|
||||
shutil.copytree(source_folder, os.path.join(src_folder, source_folder), dirs_exist_ok=True)
|
||||
|
||||
# Copy all files
|
||||
for source_file in extra_files:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Basic build requirements
|
||||
pyinstaller==4.2
|
||||
pyinstaller
|
||||
setuptools
|
||||
pkginfo
|
||||
certifi
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Config"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/configure"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/configure"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#from sabnzbd.encoding import CODEPAGE#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Categories"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/categories"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/categories"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<div class="colmask">
|
||||
<div class="section">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Folders"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/folders"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/folders"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="General"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/general"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/general"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -179,6 +179,11 @@
|
||||
<input type="checkbox" name="enable_https_verification" id="enable_https_verification" value="1" <!--#if int($enable_https_verification) > 0 then 'checked="checked"' else ""#--> <!--#if int($certificate_validation) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<span class="desc">$T('explain-enable_https_verification')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="socks5_proxy_url">$T('opt-socks5_proxy_url')</label>
|
||||
<input type="text" name="socks5_proxy_url" id="socks5_proxy_url" value="$socks5_proxy_url" placeholder="socks5://username:password@hostname:port" />
|
||||
<span class="desc">$T('explain-socks5_proxy_url') <br/>$T('readwiki')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<button class="btn btn-default saveButton"><span class="glyphicon glyphicon-ok"></span> $T('button-saveChanges')</button>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Email"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/notifications"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/notifications"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#def show_notify_checkboxes($section_label)#-->
|
||||
@@ -22,6 +22,7 @@
|
||||
<option value="$ct" <!--#if $ct in $getVar($section_label + '_cats') then 'selected="selected"' else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<p>$T('defaultNotifiesAll')</p>
|
||||
</div>
|
||||
<!--#end def#-->
|
||||
|
||||
@@ -40,6 +41,7 @@
|
||||
<option value="$ct" <!--#if $ct in $email_cats then 'selected="selected"' else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<p>$T('defaultNotifiesAll')</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col1">
|
||||
@@ -47,9 +49,9 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="email_endjob">$T('opt-email_endjob')</label>
|
||||
<select name="email_endjob" id="email_endjob">
|
||||
<option value="0" <!--#if int($email_endjob) == "0" then 'selected="selected"' else ""#--> >$T('email-never')</option>
|
||||
<option value="1" <!--#if int($email_endjob) == "1" then 'selected="selected"' else ""#--> >$T('email-always')</option>
|
||||
<option value="2" <!--#if int($email_endjob) == "2" then 'selected="selected"' else ""#--> >$T('email-errorOnly')</option>
|
||||
<option value="0" <!--#if int($email_endjob) == 0 then 'selected="selected"' else ""#--> >$T('email-never')</option>
|
||||
<option value="1" <!--#if int($email_endjob) == 1 then 'selected="selected"' else ""#--> >$T('email-always')</option>
|
||||
<option value="2" <!--#if int($email_endjob) == 2 then 'selected="selected"' else ""#--> >$T('email-errorOnly')</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="RSS"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/rss"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/rss"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<!--#import html#-->
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/3.4s/scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/scheduling"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<%
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Servers"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/servers"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/servers"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#import json#-->
|
||||
@@ -103,6 +103,11 @@
|
||||
<input type="checkbox" name="send_group" id="send_group" value="1" />
|
||||
<span class="desc">$T('srv-explain-send_group')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="required">$T('srv-required')</label>
|
||||
<input type="checkbox" name="required" id="required" value="1" />
|
||||
<span class="desc">$T('explain-required')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="optional">$T('srv-optional')</label>
|
||||
<input type="checkbox" name="optional" id="optional" value="1" />
|
||||
@@ -225,6 +230,11 @@
|
||||
<span class="desc">$T('explain-ssl_ciphers') <br>$T('readwiki')
|
||||
<a href="${helpuri}advanced/ssl-ciphers" target="_blank">${helpuri}advanced/ssl-ciphers</a></span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="required$cur">$T('srv-required')</label>
|
||||
<input type="checkbox" name="required" id="required$cur" value="1" <!--#if int($server['required']) != 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-required')</span>
|
||||
</div>
|
||||
<div class="field-pair advanced-settings">
|
||||
<label class="config" for="optional$cur">$T('srv-optional')</label>
|
||||
<input type="checkbox" name="optional" id="optional$cur" value="1" <!--#if int($server['optional']) != 0 then 'checked="checked"' else ""#--> />
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Sorting"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/sorting"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/sorting"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -11,12 +11,13 @@
|
||||
<h3>$T('seriesSorting') <a href="$helpuri$help_uri#toc0" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a></h3>
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="tv_categories" multiple="multiple" class="multiple_cats">
|
||||
<select name="tv_categories" multiple="multiple" class="multiple_cats" required="required">
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $tv_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
</p>
|
||||
<p>$T('selectOneCat')</p>
|
||||
</div>
|
||||
<!-- /col2 -->
|
||||
<div class="col1">
|
||||
@@ -223,12 +224,13 @@
|
||||
<h3>$T('movieSort') <a href="$helpuri$help_uri#toc6" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a></h3>
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="movie_categories" multiple="multiple" class="multiple_cats">
|
||||
<select name="movie_categories" multiple="multiple" class="multiple_cats" required="required">
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $movie_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
</p>
|
||||
<p>$T('selectOneCat')</p>
|
||||
</div>
|
||||
<!-- /col2 -->
|
||||
<div class="col1">
|
||||
@@ -419,12 +421,13 @@
|
||||
<h3>$T('dateSorting') <a href="$helpuri$help_uri#toc9" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a></h3>
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="date_categories" multiple="multiple" class="multiple_cats">
|
||||
<select name="date_categories" multiple="multiple" class="multiple_cats" required="required">
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $date_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
</p>
|
||||
<p>$T('selectOneCat')</p>
|
||||
</div>
|
||||
<!-- /col2 -->
|
||||
<div class="col1">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Special"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/special"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/special"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Switches"#-->
|
||||
<!--#set global $help_uri="configuration/3.4/switches"#-->
|
||||
<!--#set global $help_uri="configuration/3.5/switches"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -85,6 +85,11 @@
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
<div class="tab-pane fade in active" id="options-status">
|
||||
<div class="row" data-bind="visible: statusInfo.active_socks5_proxy">
|
||||
<div class="col-sm-6">$T('opt-socks5_proxy_url')   </div>
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo, text: statusInfo.active_socks5_proxy"></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-localIP4') </div>
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo, text: !statusInfo.localipv4() ? '$T('dashboard-connectionError')' : statusInfo.localipv4(), css: { 'options-bad-status' : !statusInfo.localipv4() }"></div>
|
||||
|
||||
@@ -55,6 +55,7 @@ function ViewModel() {
|
||||
self.statusInfo = {};
|
||||
self.statusInfo.folders = ko.observableArray([]);
|
||||
self.statusInfo.servers = ko.observableArray([]);
|
||||
self.statusInfo.active_socks5_proxy = ko.observable();
|
||||
self.statusInfo.localipv4 = ko.observable();
|
||||
self.statusInfo.publicipv4 = ko.observable();
|
||||
self.statusInfo.ipv6 = ko.observable();
|
||||
@@ -773,6 +774,7 @@ function ViewModel() {
|
||||
self.statusInfo.completedirspeed(data.status.completedirspeed)
|
||||
self.statusInfo.internetbandwidth(data.status.internetbandwidth)
|
||||
self.statusInfo.dnslookup(data.status.dnslookup)
|
||||
self.statusInfo.active_socks5_proxy(data.status.active_socks5_proxy)
|
||||
self.statusInfo.localipv4(data.status.localipv4)
|
||||
self.statusInfo.publicipv4(data.status.publicipv4)
|
||||
self.statusInfo.ipv6(data.status.ipv6 || glitterTranslate.noneText)
|
||||
@@ -1116,6 +1118,9 @@ function ViewModel() {
|
||||
// Save servers (for reporting functionality of OZnzb)
|
||||
self.servers = response.config.servers;
|
||||
|
||||
// Already set if we are using a proxy
|
||||
if(response.config.misc.socks5_proxy_url) self.statusInfo.active_socks5_proxy(true)
|
||||
|
||||
// Update message
|
||||
if(newRelease) {
|
||||
self.allMessages.push({
|
||||
|
||||
@@ -531,11 +531,10 @@ function QueueModel(parent, data) {
|
||||
return self.name()
|
||||
})
|
||||
self.missingText = ko.pureComputed(function() {
|
||||
// Check for missing data, the value is arbitrary! (1%)
|
||||
if(self.missingMB()/self.totalMB() > 0.01) {
|
||||
// Check for missing data, can show 0 if article-size is smaller than 500K, but we accept that
|
||||
if(self.missingMB()) {
|
||||
return self.missingMB().toFixed(0) + ' MB ' + glitterTranslate.misingArt
|
||||
}
|
||||
return;
|
||||
})
|
||||
self.statusText = ko.computed(function() {
|
||||
// Checking
|
||||
|
||||
@@ -387,10 +387,6 @@ msgstr ""
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -541,7 +537,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr ""
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr ""
|
||||
|
||||
@@ -1127,7 +1123,7 @@ msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
@@ -3454,6 +3450,14 @@ msgstr ""
|
||||
msgid "Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr ""
|
||||
@@ -3686,6 +3690,15 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr ""
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "In case of connection failures, the download queue will be paused for a few minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -3862,6 +3875,10 @@ msgstr ""
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "If only the <em>Default</em> category is selected, notifications are enabled for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr ""
|
||||
@@ -4132,7 +4149,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4160,7 +4183,8 @@ msgstr ""
|
||||
msgid "Example"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4312,7 +4336,8 @@ msgstr ""
|
||||
msgid "No folders"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr ""
|
||||
|
||||
|
||||
281
po/main/cs.po
281
po/main/cs.po
@@ -36,7 +36,7 @@ msgstr "Nezdařilo se spustit webové rozhraní"
|
||||
#. Warning message
|
||||
#: SABnzbd.py
|
||||
msgid "Cannot find web template: %s, trying standard template"
|
||||
msgstr ""
|
||||
msgstr "Šablona pro web nebyla nalezena: %s, zkouším standardní šablonu"
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -49,6 +49,7 @@ msgstr ""
|
||||
msgid ""
|
||||
"SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc"
|
||||
msgstr ""
|
||||
"Modul SABYenc... nebyl nalezen! Očekávána v%s - https://sabnzbd.org/sabyenc"
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -84,6 +85,8 @@ msgid ""
|
||||
"Please be aware the 0.0.0.0 hostname will need an IPv6 address for external "
|
||||
"access"
|
||||
msgstr ""
|
||||
"Pozor, hostitelská adresa 0.0.0.0 bude vyžadovat IPv6 adresu pro externí "
|
||||
"přístup"
|
||||
|
||||
#. Error message
|
||||
#: SABnzbd.py
|
||||
@@ -169,22 +172,22 @@ msgstr "Načítání %s selhalo"
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Restarting because of crashed postprocessor"
|
||||
msgstr ""
|
||||
msgstr "Restartuji protože postprocessor selhal"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Restarting because of crashed downloader"
|
||||
msgstr ""
|
||||
msgstr "Restartuji protože selhal downloader"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Restarting because of crashed decoder"
|
||||
msgstr ""
|
||||
msgstr "Restartuji protože selhal decoder"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
msgid "Restarting because of crashed assembler"
|
||||
msgstr ""
|
||||
msgstr "Restartuji protože selhal assembler"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/__init__.py
|
||||
@@ -340,7 +343,7 @@ msgstr "Adresa serveru je vyžadována"
|
||||
|
||||
#: sabnzbd/cfg.py
|
||||
msgid "%s is not a valid script"
|
||||
msgstr ""
|
||||
msgstr "%s neni validní skript"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/config.py
|
||||
@@ -408,10 +411,6 @@ msgstr ""
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Chyba dekodéru: nedostatek paměti"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "Detekováno UUencode, pouze yEnc je podporováno [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -516,12 +515,12 @@ msgstr "Vypínání"
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Server %s is expiring in %s day(s)"
|
||||
msgstr ""
|
||||
msgstr "Server %s vyprší za %s dnů"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Server %s has used the specified quota"
|
||||
msgstr ""
|
||||
msgstr "Server %s vyčerpal nastavenou kvótu"
|
||||
|
||||
#: sabnzbd/emailer.py
|
||||
msgid "Failed to connect to mail server"
|
||||
@@ -569,8 +568,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Nebyli zadáni přijemci, žádný email neodeslán"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Nelze číst %s"
|
||||
|
||||
@@ -618,11 +616,11 @@ msgstr "Chyba přesunu %s do %s"
|
||||
#. Error message
|
||||
#: sabnzbd/filesystem.py
|
||||
msgid "Blocked attempt to create directory %s"
|
||||
msgstr ""
|
||||
msgstr "Zablokován pokus vytvořit adresář %s"
|
||||
|
||||
#: sabnzbd/interface.py
|
||||
msgid "Refused connection from:"
|
||||
msgstr ""
|
||||
msgstr "Odmítnuto spojení z:"
|
||||
|
||||
#: sabnzbd/interface.py
|
||||
msgid "Refused connection with hostname \"%s\" from:"
|
||||
@@ -791,7 +789,7 @@ msgstr ""
|
||||
#. Error message
|
||||
#: sabnzbd/misc.py
|
||||
msgid "Python script \"%s\" does not have execute (+x) permission set"
|
||||
msgstr ""
|
||||
msgstr "Python skript \"%s\" nemá nastaveno právo spuštění (+x)"
|
||||
|
||||
#: sabnzbd/newsunpack.py, sabnzbd/postproc.py
|
||||
msgid "Running script"
|
||||
@@ -808,11 +806,11 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Incomplete sequence of joinable files"
|
||||
msgstr ""
|
||||
msgstr "Nekompletní sekvence spojovaných souborů"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "File join of %s failed"
|
||||
msgstr ""
|
||||
msgstr "Spojování souboru %s selhalo"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "[%s] Error \"%s\" while joining files"
|
||||
@@ -847,12 +845,12 @@ msgstr "Mazání %s selhalo!"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Trying unrar with password \"%s\""
|
||||
msgstr ""
|
||||
msgstr "Zkouším unrar s heslem \"%s\""
|
||||
|
||||
#. Error message
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Unpacking failed, archive requires a password"
|
||||
msgstr ""
|
||||
msgstr "Rozbalení selhalo, archiv vyžaduje heslo"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Unpacking"
|
||||
@@ -860,7 +858,7 @@ msgstr "Rozbaluji"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Unpacking failed, unable to find %s"
|
||||
msgstr ""
|
||||
msgstr "Rozbalování selhalo, nezdařilo se najít %s"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/newsunpack.py
|
||||
@@ -924,11 +922,11 @@ msgstr "%s souborů v %s"
|
||||
#. Error message
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Error \"%s\" while running unzip() on %s"
|
||||
msgstr ""
|
||||
msgstr "Chyba \"%s\" během běhu unzip() na %s"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "No 7za binary found, cannot unpack \"%s\""
|
||||
msgstr ""
|
||||
msgstr "7za nebylo nalezeno, nemohu rozbalit \"%s\""
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Trying 7zip with password \"%s\""
|
||||
@@ -936,7 +934,7 @@ msgstr "Zkouším 7zip s heslem \"%s\""
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "7ZIP set \"%s\" is incomplete, cannot unpack"
|
||||
msgstr ""
|
||||
msgstr "Sada 7zip \"%s\" je nekompletní, nelze rozbalit"
|
||||
|
||||
#: sabnzbd/newsunpack.py
|
||||
msgid "Could not unpack %s"
|
||||
@@ -1180,7 +1178,7 @@ msgstr "Nekompatibilní soubor fronty, nelze pokračovat"
|
||||
#. Error message
|
||||
#: sabnzbd/nzbqueue.py
|
||||
msgid "Error loading %s, corrupt file detected"
|
||||
msgstr ""
|
||||
msgstr "Nelze nahrát %s, detekován porušený soubor"
|
||||
|
||||
#: sabnzbd/nzbqueue.py
|
||||
msgid "NZB added to queue"
|
||||
@@ -1188,8 +1186,8 @@ msgstr "NZB přidáno do fronty"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Nesprávný NZB soubor %s, přeskakuji (důvod=%s, řádek=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -1333,15 +1331,15 @@ msgstr "Zkontrolovat sledovanou složku"
|
||||
|
||||
#: sabnzbd/osxmenu.py, sabnzbd/sabtray.py, sabnzbd/sabtraylinux.py
|
||||
msgid "Read all RSS feeds"
|
||||
msgstr ""
|
||||
msgstr "Číst všechny RSS kanály"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Complete Folder"
|
||||
msgstr ""
|
||||
msgstr "Složka dokončených"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Incomplete Folder"
|
||||
msgstr ""
|
||||
msgstr "Složka nedokončených"
|
||||
|
||||
#: sabnzbd/osxmenu.py, sabnzbd/sabtray.py
|
||||
msgid "Troubleshoot"
|
||||
@@ -1359,11 +1357,11 @@ msgstr "Restart bez přihlášení"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Quit"
|
||||
msgstr ""
|
||||
msgstr "Vypnout"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Queue First 10 Items"
|
||||
msgstr ""
|
||||
msgstr "Fronta prvních 10 položek"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Empty"
|
||||
@@ -1371,7 +1369,7 @@ msgstr "Prázdný"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "History Last 10 Items"
|
||||
msgstr ""
|
||||
msgstr "Historie posledních 10 položek"
|
||||
|
||||
#: sabnzbd/osxmenu.py
|
||||
msgid "Go to wizard"
|
||||
@@ -1383,7 +1381,7 @@ msgstr "Zastavuji..."
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Problem with"
|
||||
msgstr ""
|
||||
msgstr "Problém s"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid ""
|
||||
@@ -1431,7 +1429,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "SABnzbd detected a fatal error:"
|
||||
msgstr ""
|
||||
msgstr "SABnzbd detekovalo závažnou chybu:"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid ""
|
||||
@@ -1452,7 +1450,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Program did not start!"
|
||||
msgstr ""
|
||||
msgstr "Program se nespustil!"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Fatal error"
|
||||
@@ -1467,7 +1465,7 @@ msgstr ""
|
||||
#. Warning message
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Cannot launch the browser, probably not found"
|
||||
msgstr ""
|
||||
msgstr "Nelze spustit webový prohlížeč, nejspíš nenalezen"
|
||||
|
||||
#: sabnzbd/panic.py
|
||||
msgid "Access denied"
|
||||
@@ -1480,7 +1478,7 @@ msgstr ""
|
||||
#. Warning message
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Old queue detected, use Status->Repair to convert the queue"
|
||||
msgstr ""
|
||||
msgstr "Stará fronta nalezena, použijte Status->Repair pro konverzi fronty"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/postproc.py
|
||||
@@ -1555,7 +1553,7 @@ msgstr "Stahování selhalo"
|
||||
#. Error message
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Cleanup of %s failed."
|
||||
msgstr ""
|
||||
msgstr "Čištění %s selhalo."
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Download Completed"
|
||||
@@ -1564,7 +1562,7 @@ msgstr "Stahování dokončeno"
|
||||
#. Error message
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Cannot create final folder %s"
|
||||
msgstr ""
|
||||
msgstr "Nelze vytvořit složku dokončených souborů %s"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "[%s] No par2 sets"
|
||||
@@ -1576,7 +1574,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Verified successfully using SFV files"
|
||||
msgstr ""
|
||||
msgstr "Úspěšně ověřeno s využitím SFV souborů"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Trying RAR-based verification"
|
||||
@@ -1588,7 +1586,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/postproc.py, sabnzbd/skintext.py
|
||||
msgid "Passworded"
|
||||
msgstr ""
|
||||
msgstr "Zaheslováno"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "RAR files verified successfully"
|
||||
@@ -1639,7 +1637,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/rating.py
|
||||
msgid "Server address"
|
||||
msgstr ""
|
||||
msgstr "Adresa serveru"
|
||||
|
||||
#: sabnzbd/rating.py, sabnzbd/skintext.py
|
||||
msgid "API Key"
|
||||
@@ -1654,7 +1652,7 @@ msgstr ""
|
||||
#. Error message
|
||||
#: sabnzbd/rss.py
|
||||
msgid "Incorrect RSS feed description \"%s\""
|
||||
msgstr ""
|
||||
msgstr "Špatný popis RSS kanálu \"%s\""
|
||||
|
||||
#: sabnzbd/rss.py
|
||||
msgid "Do not have valid authentication for feed %s"
|
||||
@@ -1666,7 +1664,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/rss.py
|
||||
msgid "Failed to retrieve RSS from %s: %s"
|
||||
msgstr ""
|
||||
msgstr "Nezdařilo se stáhnout RSS z %s:%s"
|
||||
|
||||
#: sabnzbd/rss.py, sabnzbd/urlgrabber.py
|
||||
msgid "Server %s uses an untrusted HTTPS certificate"
|
||||
@@ -1675,24 +1673,24 @@ msgstr ""
|
||||
#. Warning message
|
||||
#: sabnzbd/rss.py
|
||||
msgid "RSS Feed %s was empty"
|
||||
msgstr ""
|
||||
msgstr "RSS kanál %s byl prázdný"
|
||||
|
||||
#: sabnzbd/rss.py
|
||||
msgid "Incompatible feed"
|
||||
msgstr ""
|
||||
msgstr "Nekompatibilní kanál"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/rss.py
|
||||
msgid "Empty RSS entry found (%s)"
|
||||
msgstr ""
|
||||
msgstr "Prázdný RSS záznam nalezen (%s)"
|
||||
|
||||
#: sabnzbd/sabtray.py, sabnzbd/sabtraylinux.py
|
||||
msgid "Show interface"
|
||||
msgstr ""
|
||||
msgstr "Zobrazit rozhraní"
|
||||
|
||||
#: sabnzbd/sabtray.py, sabnzbd/sabtraylinux.py
|
||||
msgid "Open complete folder"
|
||||
msgstr ""
|
||||
msgstr "Otevřít složku s kompletními soubory"
|
||||
|
||||
#. Queue page button or entry box
|
||||
#: sabnzbd/sabtray.py, sabnzbd/skintext.py
|
||||
@@ -1726,7 +1724,7 @@ msgstr "Pozastavit na 6 hodin"
|
||||
#. Shutdown SABnzbd - Config->Scheduling
|
||||
#: sabnzbd/sabtray.py, sabnzbd/sabtraylinux.py, sabnzbd/skintext.py
|
||||
msgid "Shutdown"
|
||||
msgstr ""
|
||||
msgstr "Vypnout"
|
||||
|
||||
#: sabnzbd/sabtray.py, sabnzbd/sabtraylinux.py
|
||||
msgid "Remaining"
|
||||
@@ -1744,7 +1742,7 @@ msgstr ""
|
||||
#. Warning message
|
||||
#: sabnzbd/scheduler.py
|
||||
msgid "Unknown action: %s"
|
||||
msgstr ""
|
||||
msgstr "Neznámá akce: %s"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/scheduler.py
|
||||
@@ -1755,17 +1753,17 @@ msgstr ""
|
||||
#. "download item"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Download"
|
||||
msgstr ""
|
||||
msgstr "Stahování"
|
||||
|
||||
#. PP phase "filejoin"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Join files"
|
||||
msgstr ""
|
||||
msgstr "Spojit soubory"
|
||||
|
||||
#. PP phase "unpack"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Unpack"
|
||||
msgstr ""
|
||||
msgstr "Rozbalit"
|
||||
|
||||
#. PP phase "script" - Notification Script settings
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -1775,67 +1773,67 @@ msgstr "Skript"
|
||||
#. PP Source of the NZB (path or URL) - Where to find the SABnzbd sourcecode
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Source"
|
||||
msgstr ""
|
||||
msgstr "Zdroj"
|
||||
|
||||
#. PP Distribution over servers - Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Servers"
|
||||
msgstr ""
|
||||
msgstr "Servery"
|
||||
|
||||
#. PP Failure message
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Failure"
|
||||
msgstr ""
|
||||
msgstr "Chyba"
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Failed"
|
||||
msgstr ""
|
||||
msgstr "Selhalo"
|
||||
|
||||
#. Queue and PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Waiting"
|
||||
msgstr ""
|
||||
msgstr "Čekání"
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Repairing..."
|
||||
msgstr ""
|
||||
msgstr "Opravuji..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extracting..."
|
||||
msgstr ""
|
||||
msgstr "Rozbaluji..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Moving..."
|
||||
msgstr ""
|
||||
msgstr "Přesouvám..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Running script..."
|
||||
msgstr ""
|
||||
msgstr "Spouštím skript..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Fetching extra blocks..."
|
||||
msgstr ""
|
||||
msgstr "Stahuji extra bloky..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Quick Check..."
|
||||
msgstr ""
|
||||
msgstr "Rychlá kontrola..."
|
||||
|
||||
#. PP status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Verifying..."
|
||||
msgstr ""
|
||||
msgstr "Kontroluji..."
|
||||
|
||||
#. Pseudo-PP status, in reality used for Queue-status
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Downloading"
|
||||
msgstr ""
|
||||
msgstr "Stahování"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Propagation delay"
|
||||
@@ -1844,22 +1842,22 @@ msgstr ""
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Task"
|
||||
msgstr ""
|
||||
msgstr "Úkol"
|
||||
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "disable server"
|
||||
msgstr ""
|
||||
msgstr "deaktivovat server"
|
||||
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "enable server"
|
||||
msgstr ""
|
||||
msgstr "povolit server"
|
||||
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Speedlimit"
|
||||
msgstr ""
|
||||
msgstr "Omezení rychlosti"
|
||||
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -1879,17 +1877,17 @@ msgstr ""
|
||||
#. #: Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Read RSS feeds"
|
||||
msgstr ""
|
||||
msgstr "Číst RSS kanály"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove failed jobs"
|
||||
msgstr ""
|
||||
msgstr "Odstranit neúspěšné úkoly"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Remove completed jobs"
|
||||
msgstr ""
|
||||
msgstr "Odstranit dokončené úkoly"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -1919,17 +1917,17 @@ msgstr ""
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Resume high prioirty jobs"
|
||||
msgstr ""
|
||||
msgstr "Obnovit úkoly s vysokou prioritou"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Enable quota management"
|
||||
msgstr ""
|
||||
msgstr "Povolit správu kvót"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Disable quota management"
|
||||
msgstr ""
|
||||
msgstr "Vypnout správu kvót"
|
||||
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -1939,17 +1937,17 @@ msgstr "Pozastavit úkoly v kategorii"
|
||||
#. Config->Scheduler
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Resume jobs with category"
|
||||
msgstr ""
|
||||
msgstr "Onovit úkoly v kategorii"
|
||||
|
||||
#. Prowl priority - Three way switch for duplicates
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Off"
|
||||
msgstr ""
|
||||
msgstr "Vypnuto"
|
||||
|
||||
#. Prowl priority
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Very Low"
|
||||
msgstr ""
|
||||
msgstr "Velmi nízká"
|
||||
|
||||
#. Prowl priority
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2114,11 +2112,11 @@ msgstr "Vlastní"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Speed"
|
||||
msgstr ""
|
||||
msgstr "Rychlost"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "on"
|
||||
msgstr ""
|
||||
msgstr "zapnuto"
|
||||
|
||||
#. Config: startup parameters of SABnzbd - Notification Script settings
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2146,7 +2144,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Comment"
|
||||
msgstr ""
|
||||
msgstr "Komentář"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Send"
|
||||
@@ -2158,11 +2156,11 @@ msgstr "Zrušit"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Other"
|
||||
msgstr ""
|
||||
msgstr "Ostatní"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Report"
|
||||
msgstr ""
|
||||
msgstr "Výkaz"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Video"
|
||||
@@ -2174,11 +2172,11 @@ msgstr "Audio"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Not used"
|
||||
msgstr ""
|
||||
msgstr "Nepoužito"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "or less"
|
||||
msgstr ""
|
||||
msgstr "nebo méně"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Log in"
|
||||
@@ -2204,12 +2202,12 @@ msgstr "Uložit"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Saving.."
|
||||
msgstr ""
|
||||
msgstr "Ukládám..."
|
||||
|
||||
#. Used in confirmation popups
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Are you sure?"
|
||||
msgstr ""
|
||||
msgstr "Jste si jistí?"
|
||||
|
||||
#. Used in confirmation popups
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2219,7 +2217,7 @@ msgstr "Smazat všechny stažené soubory?"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Home"
|
||||
msgstr ""
|
||||
msgstr "Domů"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2234,7 +2232,7 @@ msgstr "Stav"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Help"
|
||||
msgstr ""
|
||||
msgstr "Nápověda"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2249,12 +2247,12 @@ msgstr "IRC"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Issues"
|
||||
msgstr ""
|
||||
msgstr "Problémy"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr ""
|
||||
msgstr "Podpořte projekt!"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2274,7 +2272,7 @@ msgstr "Přepínače"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Scheduling"
|
||||
msgstr ""
|
||||
msgstr "Plánování"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2304,7 +2302,7 @@ msgstr "Řazení"
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Special"
|
||||
msgstr ""
|
||||
msgstr "Speciální"
|
||||
|
||||
#. Main menu item
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2408,7 +2406,7 @@ msgstr "Zadejte URL"
|
||||
#. Queue page selection menu
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "On queue finish"
|
||||
msgstr ""
|
||||
msgstr "Při dokončení fronty"
|
||||
|
||||
#. Queue page end-of-queue action
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2438,7 +2436,7 @@ msgstr "Limit rychlosti"
|
||||
#. Queue page table column header - Config->RSS table column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Order"
|
||||
msgstr ""
|
||||
msgstr "Pořadí"
|
||||
|
||||
#. Queue page table column header - Job details page
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2478,7 +2476,7 @@ msgstr "Skripty"
|
||||
#. Confirmation popup
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Delete all items from the queue?"
|
||||
msgstr ""
|
||||
msgstr "Smazat všechny položky z fronty?"
|
||||
|
||||
#. Queue page button
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2508,22 +2506,22 @@ msgstr "Odstranit NZB a smazat soubory"
|
||||
#. Queue page, as in "4G *of* 10G"
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "of"
|
||||
msgstr ""
|
||||
msgstr "z"
|
||||
|
||||
#. Caption for missing articles in Queue
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Missing articles"
|
||||
msgstr ""
|
||||
msgstr "Chybějící části"
|
||||
|
||||
#. Remaining quota (displayed in Queue)
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Quota left"
|
||||
msgstr ""
|
||||
msgstr "Zbývající kvóta"
|
||||
|
||||
#. Manual reset of quota
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "manual"
|
||||
msgstr ""
|
||||
msgstr "ručně"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Reset Quota now"
|
||||
@@ -2532,7 +2530,7 @@ msgstr "Vynulovat kvótu"
|
||||
#. Confirmation popup
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Delete all completed items from History?"
|
||||
msgstr ""
|
||||
msgstr "Smazat věchny dokončené položky z historie?"
|
||||
|
||||
#. Button/link hiding History job details
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2613,7 +2611,7 @@ msgstr ""
|
||||
#. Status page button
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Force Disconnect"
|
||||
msgstr ""
|
||||
msgstr "Vynucené odpojení"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -2694,7 +2692,7 @@ msgstr ""
|
||||
#. column header
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Type"
|
||||
msgstr ""
|
||||
msgstr "Typ"
|
||||
|
||||
#. Status page, indicator that server is enabled
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -2707,7 +2705,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Connection failed!"
|
||||
msgstr ""
|
||||
msgstr "Spojení selhalo!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Local IPv4 address"
|
||||
@@ -2744,7 +2742,7 @@ msgstr "Rychlost složko pro dokončené"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Writing speed"
|
||||
msgstr ""
|
||||
msgstr "Rychlost zápisu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Could not write. Check that the directory is writable."
|
||||
@@ -2794,7 +2792,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Advanced"
|
||||
msgstr ""
|
||||
msgstr "Pokročilé"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -2865,15 +2863,15 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Restarting SABnzbd..."
|
||||
msgstr ""
|
||||
msgstr "Restartuji SABnzbd..."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Changes will require a SABnzbd restart!"
|
||||
msgstr ""
|
||||
msgstr "Změny vyžadují restart SABnzbd!"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SABnzbd Web Server"
|
||||
msgstr ""
|
||||
msgstr "SABnzbd webový server"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SABnzbd Host"
|
||||
@@ -2986,7 +2984,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "RSS Checking Interval"
|
||||
msgstr ""
|
||||
msgstr "Interval konctroly RSS"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -3028,7 +3026,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "History Retention"
|
||||
msgstr ""
|
||||
msgstr "Retence historie"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
@@ -3038,19 +3036,19 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Keep all jobs"
|
||||
msgstr ""
|
||||
msgstr "Zachovat všechny úkoly"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Keep maximum number of completed jobs"
|
||||
msgstr ""
|
||||
msgstr "Maximální počet dokončených úkolů"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Keep completed jobs maximum number of days"
|
||||
msgstr ""
|
||||
msgstr "Počet dnů pro zachování dokončených ukolů "
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Do not keep any completed jobs"
|
||||
msgstr ""
|
||||
msgstr "Nauchovávat dokončené úkoly"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Jobs"
|
||||
@@ -3115,7 +3113,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "No access"
|
||||
msgstr ""
|
||||
msgstr "Bez přístupu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Add NZB files "
|
||||
@@ -3611,6 +3609,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr ""
|
||||
@@ -3854,6 +3860,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr ""
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4034,6 +4051,12 @@ msgstr ""
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr ""
|
||||
@@ -4063,11 +4086,11 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Send RSS notifications"
|
||||
msgstr ""
|
||||
msgstr "Odeslat RSS upozornění"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Send email when an RSS feed adds jobs to the queue."
|
||||
msgstr ""
|
||||
msgstr "Odeslat email když RSS kanál přidá úkol do fronty."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SMTP Server"
|
||||
@@ -4310,7 +4333,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4338,7 +4367,8 @@ msgstr ""
|
||||
msgid "Example"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4490,7 +4520,8 @@ msgstr ""
|
||||
msgid "No folders"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -411,10 +411,6 @@ msgstr "Forkert logning i historiken av %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Dekoder fejl: Ikke mere hukommelse"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -570,8 +566,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Ingen modtagere givet, ingen e-mail sendt"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Kan ikke læse %s"
|
||||
|
||||
@@ -1194,8 +1189,8 @@ msgstr "NZB tilføjet i køen"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Ødelagt NZB fil %s, springer over (årsag=%s, linje=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3701,6 +3696,14 @@ msgstr ""
|
||||
"Kontroller certifikater, når du opretter forbindelse til indeksører og RSS-"
|
||||
"kilder ved hjælp HTTPS."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -3949,6 +3952,17 @@ msgstr "Streng"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 er højeste prioritet, 100 er den laveste prioritet"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4134,6 +4148,12 @@ msgstr "Hentet"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Læs alle Feeds nu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "E-mail påmindelse når job er fuldført"
|
||||
@@ -4416,7 +4436,13 @@ msgstr "Indekseringen kategorier/grupper"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Serie sortering"
|
||||
|
||||
@@ -4444,7 +4470,8 @@ msgstr "Forudindstillinger"
|
||||
msgid "Example"
|
||||
msgstr "Eksempel"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "Film sortering"
|
||||
|
||||
@@ -4596,7 +4623,8 @@ msgstr "I mappe"
|
||||
msgid "No folders"
|
||||
msgstr "Ingen mappe"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Dato sortering"
|
||||
|
||||
|
||||
@@ -9,12 +9,13 @@
|
||||
# Ben Hecht <benjamin.hecht@me.com>, 2021
|
||||
# Safihre <safihre@sabnzbd.org>, 2021
|
||||
# Manuel C. Senn, 2021
|
||||
# Andreas Kames, 2021
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-develop\n"
|
||||
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
|
||||
"Last-Translator: Manuel C. Senn, 2021\n"
|
||||
"Last-Translator: Andreas Kames, 2021\n"
|
||||
"Language-Team: German (https://www.transifex.com/sabnzbd/teams/111101/de/)\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
@@ -429,10 +430,6 @@ msgstr "Ungültiges Stufen-Protokoll im Verlauf für %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Decoder Fehler: Nicht genügend Speicher"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "UUencode erkannt, aber nur yEnc encoding ist untertützt [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -538,7 +535,7 @@ msgstr "Wird beendet …"
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
msgid "Server %s is expiring in %s day(s)"
|
||||
msgstr "Server %s läuft in %s tag(en) ab"
|
||||
msgstr "Server %s läuft in %s Tag(en) ab"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/downloader.py
|
||||
@@ -591,8 +588,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Keine E-Mail gesendet da keine Empfänger angegeben"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "%s kann nicht gelesen werden"
|
||||
|
||||
@@ -1233,8 +1229,8 @@ msgstr "NZB zur Warteschlange hinzugefügt"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Ungültige NZB-Datei %s wird übersprungen: %s auf Zeile %s"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -1688,7 +1684,7 @@ msgstr "RAR-Datei konnten nicht überprüft werden"
|
||||
|
||||
#: sabnzbd/postproc.py
|
||||
msgid "Trying RAR renamer"
|
||||
msgstr "Versuche RAR Umbenenner"
|
||||
msgstr "Versuche RAR-Umbenenner"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/postproc.py
|
||||
@@ -3818,6 +3814,14 @@ msgstr ""
|
||||
"Überprüfe Zertifikate bei Verbindungen zu Indexern und RSS-Quellen über "
|
||||
"HTTPS."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -4077,6 +4081,19 @@ msgstr "Strikt"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 ist die höchste, 99 die niedrigste Priorität"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "Benötigt"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"Im Fall von Verbindungsausfällen wird die Downloadwarteschlange für ein paar"
|
||||
" Minuten pausiert, statt diesen Server zu überspringen"
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4194,7 +4211,7 @@ msgstr "Bearbeiten"
|
||||
#. Config->RSS when will be the next RSS scan
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Next scan at"
|
||||
msgstr "Nächster scan um"
|
||||
msgstr "Nächster Scan um"
|
||||
|
||||
#. Config->RSS table column header
|
||||
#: sabnzbd/skintext.py
|
||||
@@ -4261,6 +4278,12 @@ msgstr "Heruntergeladen"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Jetzt alle Feeds lesen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Email-Benachrichtigung beim Fertigstellen von Aufträgen"
|
||||
@@ -4547,7 +4570,13 @@ msgstr "Indexer Kategorien/Gruppen"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr "Wähle mindestens 1 Kategorie"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Sortieren von TV-Serien"
|
||||
|
||||
@@ -4575,7 +4604,8 @@ msgstr "Voreinstellungen"
|
||||
msgid "Example"
|
||||
msgstr "Beispiel"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "Film Sortierung"
|
||||
|
||||
@@ -4727,7 +4757,8 @@ msgstr "In Ordnern"
|
||||
msgid "No folders"
|
||||
msgstr "Keine Ordner"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Sortieren nach Datum"
|
||||
|
||||
@@ -4925,7 +4956,7 @@ msgstr "Weitere Warteschlangen Spalten"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Extra history columns"
|
||||
msgstr "Weitere Verlauf Spalten"
|
||||
msgstr "Weitere Verlaufspalten"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "page"
|
||||
|
||||
@@ -428,10 +428,6 @@ msgstr "Registro de etapa invalido para transferencia terminada %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Fallo del decodificador: no hay memoria"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "Se ha detectado UUencode, solo se permite codificación yEnc [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -591,8 +587,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Sin destinatarios no se pudo enviar el email"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "No se puede leer %s"
|
||||
|
||||
@@ -1237,8 +1232,8 @@ msgstr "NZB añadido a la cola"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Fichero NBZ inválido: %s, omitiendo (razón=%s, línea=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3802,6 +3797,14 @@ msgstr ""
|
||||
"Verificar certificados al conectarse a indexadores y fuentes RSS usando "
|
||||
"HTTPS."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Servidor"
|
||||
@@ -4059,6 +4062,17 @@ msgstr "Estricto"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 El prioridad más alta, 99 es la prioridad más baja"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4245,6 +4259,12 @@ msgstr "Descargado"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Leer todas las fuentes ahora"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Notificación por email al terminar"
|
||||
@@ -4531,7 +4551,13 @@ msgstr "Categorías Indexer / Grupos"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Ordenación de Series"
|
||||
|
||||
@@ -4559,7 +4585,8 @@ msgstr "Preajustes"
|
||||
msgid "Example"
|
||||
msgstr "Ejemplo"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "Clasificación de películas"
|
||||
|
||||
@@ -4711,7 +4738,8 @@ msgstr "En directorios"
|
||||
msgid "No folders"
|
||||
msgstr "Sin Directorios"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Ordenar por fecha"
|
||||
|
||||
|
||||
@@ -408,10 +408,6 @@ msgstr "Virheellinen tila lokihistoriassa kohteelle %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -566,8 +562,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Vastaanottajaa ei määritelty, sähköpostia ei lähetetty"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Ei voida lukea %s"
|
||||
|
||||
@@ -1187,8 +1182,8 @@ msgstr "NZB lisätty jonoon"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Virheellinen NZB tiedosto %s, ohitetaan (syy=%s, rivi=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3703,6 +3698,14 @@ msgstr ""
|
||||
"Varmenna sertifikaatit yhdistettäessä indeksoijiin ja RSS-lähteisiin HTTPS "
|
||||
"protokollan avulla."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Palvelin"
|
||||
@@ -3950,6 +3953,17 @@ msgstr "Tiukka"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 on suurin prioriteetti, 99 on pienin prioriteetti"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4134,6 +4148,12 @@ msgstr "Ladattu"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Lue kaikki syötteet nyt"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Sähköposti-ilmoitus onnistuneesta latauksesta"
|
||||
@@ -4416,7 +4436,13 @@ msgstr "Indeksoijan kategoriat / ryhmä"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Sarjojen lajittelu"
|
||||
|
||||
@@ -4444,7 +4470,8 @@ msgstr "Esiasetukset"
|
||||
msgid "Example"
|
||||
msgstr "Esimerkki"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4596,7 +4623,8 @@ msgstr "Kansioissa"
|
||||
msgid "No folders"
|
||||
msgstr "Ei kansioita"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Päivämäärän lajittelu"
|
||||
|
||||
|
||||
@@ -429,10 +429,6 @@ msgstr "Étape de journalisation invalide dans l'historique pour %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Échec du décodeur : mémoire insuffisante"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "UUencode détecté, seul l'encodage yEnc est pris en charge [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -593,8 +589,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Aucun destinataire déterminé, aucun email envoyé"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Impossible de lire %s"
|
||||
|
||||
@@ -1234,8 +1229,8 @@ msgstr "NZB ajouté à la file d'attente"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Fichier NZB invalide %s, ignoré (raison=%s, ligne=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr "Fichier NZB %s invalide, sera ignoré (erreur : %s)"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3821,6 +3816,15 @@ msgstr ""
|
||||
"Vérifier les certificats lors de la connexion aux indexeurs et sources RSS "
|
||||
"utilisant HTTPS."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr "Proxy SOCKS5"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
"Utiliser le proxy SOCKS5 spécifié pour toutes les connexions sortantes."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Serveur"
|
||||
@@ -4080,6 +4084,19 @@ msgstr "Strict"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 est la priorité la plus élevée, 99 est la priorité la plus faible"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "Obligatoire"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"En cas d'échecs de connexion, la file d'attente des téléchargements sera "
|
||||
"mise en pause pendant quelques minutes au lieu de passer au serveur suivant."
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4267,6 +4284,14 @@ msgstr "Téléchargé"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Lire tous les flux maintenant"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
"Si seule la catégorie <em>Défaut</em> est sélectionnée, les notifications "
|
||||
"seront activées pour les tâches de toutes les catégories."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Notification par email lorsque des téléchargements sont terminés"
|
||||
@@ -4555,7 +4580,13 @@ msgstr "Catégories de l'indexeur / Groupes"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr "Sélectionnez au moins une catégorie."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Tri des séries"
|
||||
|
||||
@@ -4583,7 +4614,8 @@ msgstr "Modèles prédéfinis"
|
||||
msgid "Example"
|
||||
msgstr "Exemple"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "Tri des films"
|
||||
|
||||
@@ -4736,7 +4768,8 @@ msgstr "Dans les dossiers"
|
||||
msgid "No folders"
|
||||
msgstr "Pas de dossiers"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Tri par date"
|
||||
|
||||
|
||||
@@ -404,10 +404,6 @@ msgstr "רישום של אירוע בלתי תקף בהיסטוריה עבור %
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "כישלון מפענח: אין זיכרון"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "UUencode התגלה, רק קידוד yEnc נתמך [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -563,8 +559,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "נמענים לא ניתנו, דוא״ל לא נשלח"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "לא ניתן לקרוא את %s"
|
||||
|
||||
@@ -1187,8 +1182,8 @@ msgstr "NZB התווסף לתור"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "קובץ NZB בלתי תקף %s, מדלג (סיבה=%s, שורה=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3699,6 +3694,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr "וודא תעודות בעת התחברות אל מדדנים ומקורות RSS ע״י שימוש ב־HTTPS."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "שרת"
|
||||
@@ -3950,6 +3953,19 @@ msgstr "קפדני"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 הוא העדיפות הגבוהה ביותר, 100 הוא העדיפות הנמוכה ביותר"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "דרוש"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"במקרה של כישלונות חיבור, תור ההורדות יושהה למשך כמה דקות במקום דילוג על השרת"
|
||||
" הזה"
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4133,6 +4149,14 @@ msgstr "הוּרד"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "קרא את כל ההזנות כעת"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
"אם רק קטגורית <em>ברירת המחדל</em> נבחרת, התראות מאופשרות עבור עבודות בכל "
|
||||
"הקטגוריות."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "התראת דוא״ל בעת השלמת עבודה"
|
||||
@@ -4413,7 +4437,13 @@ msgstr "קטגוריות / קבוצות של מדדן"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr "בחר קטגוריה 1 לפחות."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "מיון סדרות"
|
||||
|
||||
@@ -4441,7 +4471,8 @@ msgstr "קדם־קביעות"
|
||||
msgid "Example"
|
||||
msgstr "דוגמה"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "מיון סרטים"
|
||||
|
||||
@@ -4593,7 +4624,8 @@ msgstr "בתיקיות"
|
||||
msgid "No folders"
|
||||
msgstr "אין תיקיות"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "מיון תאריכים"
|
||||
|
||||
|
||||
@@ -405,10 +405,6 @@ msgstr "Ugyldig scenen logging i historien for %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -562,8 +558,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Ingen mottaker oppgitt, e-post ikke sendt"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Kan ikke lese %s"
|
||||
|
||||
@@ -1184,8 +1179,8 @@ msgstr "NZB er lagt til i køen"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Feilaktig NZB fil %s, hopper over (årsak=%s, linje=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3679,6 +3674,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -3927,6 +3930,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 er høyeste prioritet, 99 er laveste prioritet"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4111,6 +4125,12 @@ msgstr "Nedlastet"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Les alle kilder nå"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "E-Post varsling når nedlasting er ferdig"
|
||||
@@ -4389,7 +4409,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Seriesortering"
|
||||
|
||||
@@ -4417,7 +4443,8 @@ msgstr "For innstillinger"
|
||||
msgid "Example"
|
||||
msgstr "Eksempel"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4569,7 +4596,8 @@ msgstr "I mappe"
|
||||
msgid "No folders"
|
||||
msgstr "Ingen mappe"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Dato sortering"
|
||||
|
||||
|
||||
@@ -420,10 +420,6 @@ msgstr "Ongeldig loggen van fase in geschiedenis voor %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Decoder fout: onvoldoende geheugen"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "UUencode gevonden, alleen yEnc codering wordt ondersteund [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -582,8 +578,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Geen geadresseerden opgegeven, e-mail niet verstuurd"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "%s kan niet gelezen worden"
|
||||
|
||||
@@ -1216,8 +1211,8 @@ msgstr "Download aan wachtrij toegevoegd"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Foutief NZB-bestand %s, overslaan (reden=%s, regel=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3771,6 +3766,14 @@ msgstr ""
|
||||
"Controleer certificaten bij beveiligde verbindingen met indexers en RSS-"
|
||||
"feeds."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr "SOCKS5 Proxy"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr "Gebruik een SOCKS5 proxy server voor alle uitgaande verbindingen."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -4033,6 +4036,19 @@ msgstr "Strikt"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 is de hoogste en 99 de laagste prioriteit"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr "Vereist"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
"Het downloaden zal een aantal minuten gepauzeerd worden wanneer deze server "
|
||||
"niet beschikbaar is."
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4219,6 +4235,14 @@ msgstr "Gedownload"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Alle feeds nu uitlezen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
"Notificaties zullen worden verstuurd voor alle downloads als de "
|
||||
"<em>Standaard</em> categorie geselecteerd is."
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Stuur een e-mail na het voltooien van elke download"
|
||||
@@ -4505,7 +4529,13 @@ msgstr "Indexer Categorieën / Groepen"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr "Selecteer minstens 1 categorie."
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Serie sorteren"
|
||||
|
||||
@@ -4533,7 +4563,8 @@ msgstr "Standaardinstellingen"
|
||||
msgid "Example"
|
||||
msgstr "Voorbeeld"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "Film sorteren"
|
||||
|
||||
@@ -4685,7 +4716,8 @@ msgstr "In mappen"
|
||||
msgid "No folders"
|
||||
msgstr "Geen mappen"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Datum sorteren"
|
||||
|
||||
@@ -4724,7 +4756,7 @@ msgstr "eigenschap"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt Property"
|
||||
msgstr "GuessIt eigenschap"
|
||||
msgstr "GuessIt Eigenschap"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "GuessIt.Property"
|
||||
|
||||
@@ -400,10 +400,6 @@ msgstr "Nieprawidłowy log etapu w historii dla %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -561,8 +557,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Nie podano adresatów, wiadomość nie została wysłana"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Nie można odczytać %s"
|
||||
|
||||
@@ -1185,8 +1180,8 @@ msgstr "NZB dodany do kolejki"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Nieprawidłowy plik NZB %s, pomijam (powód=%s, linia=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3686,6 +3681,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Serwer"
|
||||
@@ -3935,6 +3938,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 oznacza najwyższy priorytet, 99 - najniższy"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4119,6 +4133,12 @@ msgstr "Pobrane"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Czytaj teraz wszystkie kanały"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Powiadomienia email po zakończeniu zadania"
|
||||
@@ -4397,7 +4417,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Sortowanie seriali"
|
||||
|
||||
@@ -4425,7 +4451,8 @@ msgstr "Predefiniowane"
|
||||
msgid "Example"
|
||||
msgstr "Przykład"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4577,7 +4604,8 @@ msgstr "W katalogach"
|
||||
msgid "No folders"
|
||||
msgstr "Brak katalogów"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Sortowanie według daty"
|
||||
|
||||
|
||||
@@ -406,10 +406,6 @@ msgstr "Registro inválido de etapa no histórico para %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -565,8 +561,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Nenhum destinário fornecido, e-mail não enviado"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Não é possível ler %s"
|
||||
|
||||
@@ -1186,8 +1181,8 @@ msgstr "NZB adicionado à fila"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Arquivo NZB %s inválido. Pulando (razão=%s, linha=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3691,6 +3686,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Servidor"
|
||||
@@ -3939,6 +3942,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 é a prioridade mais alta, 99 é a prioridade mais baixa"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4123,6 +4137,12 @@ msgstr "Baixados"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Ler Todos os Feeds Agora"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Notificar por e-mail na conclusão da tarefa"
|
||||
@@ -4401,7 +4421,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Ordenação de Séries"
|
||||
|
||||
@@ -4429,7 +4455,8 @@ msgstr "Predefinições"
|
||||
msgid "Example"
|
||||
msgstr "Exemplo"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4581,7 +4608,8 @@ msgstr "Em pastas"
|
||||
msgid "No folders"
|
||||
msgstr "Sem pastas"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Ordenação por data"
|
||||
|
||||
|
||||
@@ -417,10 +417,6 @@ msgstr "Jurnal istoric stagii invalid pentru %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "Eroare decodare: lipsă memorie"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr "UUencode detectat, este suportată doar codarea yEnc [%s]"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -579,8 +575,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Destinatar necunoscut, niciun email trimis"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Nu pot citi %s"
|
||||
|
||||
@@ -1212,8 +1207,8 @@ msgstr "NZB adăugat în coadă"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Fişier NZB invalid %s, ignorăm (motiv=%s, line=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3717,6 +3712,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -3967,6 +3970,17 @@ msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr ""
|
||||
"0 este prioritatea cea mai ridicată, 99 este prioritatea cea mai scăzută"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4151,6 +4165,12 @@ msgstr "Descărcate"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Citeşte Toate Fluxurile Acum"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Notificări Email Sarcină Terminată"
|
||||
@@ -4428,7 +4448,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Sortare Seriale"
|
||||
|
||||
@@ -4456,7 +4482,8 @@ msgstr "Presetări"
|
||||
msgid "Example"
|
||||
msgstr "Exemplu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4608,7 +4635,8 @@ msgstr "În dosare"
|
||||
msgid "No folders"
|
||||
msgstr "Fără dosare"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Sortare Dată"
|
||||
|
||||
|
||||
@@ -404,10 +404,6 @@ msgstr "Недопустимый этап ведения журнала для %
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -561,8 +557,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Получатели не указаны. Электронное письмо не отправлено"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Не удаётся прочитать %s"
|
||||
|
||||
@@ -1186,8 +1181,8 @@ msgstr "NZB-файл добавлен в очередь"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Недопустимый NZB-файл %s: пропущен (причина — %s, строка — %s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3681,6 +3676,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Сервер"
|
||||
@@ -3928,6 +3931,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr ""
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4112,6 +4126,12 @@ msgstr "Загружено"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Прочитать все ленты"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Уведомление по электронной почте после завершения задания"
|
||||
@@ -4396,7 +4416,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Сортировка сериалов"
|
||||
|
||||
@@ -4424,7 +4450,8 @@ msgstr "Готовые шаблоны"
|
||||
msgid "Example"
|
||||
msgstr "Пример"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4576,7 +4603,8 @@ msgstr "В папках"
|
||||
msgid "No folders"
|
||||
msgstr "Без папок"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Сортировка даты"
|
||||
|
||||
|
||||
@@ -402,10 +402,6 @@ msgstr "Погрешне етапе извештаја можете наћи у
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -560,8 +556,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Нема примаоце, е-порука није послана"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Неуспешно читање %s"
|
||||
|
||||
@@ -1180,8 +1175,8 @@ msgstr "NZB додат у ред"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Неважећи NZB %s, прескакање (разлог=%s, линија=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3666,6 +3661,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Сервер"
|
||||
@@ -3914,6 +3917,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 je najveći prioritet, 99 je najniži prioritet"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4097,6 +4111,12 @@ msgstr "Преузето"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Сада читај све фидове"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "Нотификација е-поштом при завршетку рада"
|
||||
@@ -4374,7 +4394,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Сортирање серије"
|
||||
|
||||
@@ -4402,7 +4428,8 @@ msgstr "Предподешавања"
|
||||
msgid "Example"
|
||||
msgstr "Примери"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4554,7 +4581,8 @@ msgstr "У фасциклама"
|
||||
msgid "No folders"
|
||||
msgstr "Нема фасцикле"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Сређивање датумом"
|
||||
|
||||
|
||||
@@ -403,10 +403,6 @@ msgstr "Felaktig loggning i historiken av %s"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -560,8 +556,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "Ingen mottagare angiven, ingen e-post har skickats"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "Kan ej läsa %s"
|
||||
|
||||
@@ -1184,8 +1179,8 @@ msgstr "NZB tillagd i kön"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "Felaktig NZB fil %s, hoppar över (orsak=%s, linje=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3678,6 +3673,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "Server"
|
||||
@@ -3926,6 +3929,17 @@ msgstr ""
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 är högst prioritet, 99 är lägst prioritet"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4110,6 +4124,12 @@ msgstr "Nedladdad"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "Läsa Alla Flöden Nu"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "E-post notifiering när jobb är slutfört"
|
||||
@@ -4387,7 +4407,13 @@ msgstr ""
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "Seriesortering"
|
||||
|
||||
@@ -4415,7 +4441,8 @@ msgstr "Förinställningar"
|
||||
msgid "Example"
|
||||
msgstr "Exempel"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr ""
|
||||
|
||||
@@ -4567,7 +4594,8 @@ msgstr "In mapp"
|
||||
msgid "No folders"
|
||||
msgstr "Ingen mapp"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "Datum sortering"
|
||||
|
||||
|
||||
@@ -400,10 +400,6 @@ msgstr "%s 历史信息中 stage 日志无效"
|
||||
msgid "Decoder failure: Out of memory"
|
||||
msgstr "解码器失败:内存不足"
|
||||
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "UUencode detected, only yEnc encoding is supported [%s]"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/decoder.py
|
||||
msgid "Unknown Error while decoding %s"
|
||||
@@ -557,8 +553,7 @@ msgid "No recipients given, no email sent"
|
||||
msgstr "未给定收件人,电子邮件未发出"
|
||||
|
||||
#. Error message - Warning message
|
||||
#: sabnzbd/emailer.py, sabnzbd/filesystem.py, sabnzbd/nzbparser.py,
|
||||
#: sabnzbd/rss.py
|
||||
#: sabnzbd/emailer.py, sabnzbd/nzbparser.py, sabnzbd/rss.py
|
||||
msgid "Cannot read %s"
|
||||
msgstr "无法读取 %s"
|
||||
|
||||
@@ -1171,8 +1166,8 @@ msgstr "NZB 已添加到队列"
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Invalid NZB file %s, skipping (reason=%s, line=%s)"
|
||||
msgstr "无效 NZB 文件 %s,正在跳过 (原因=%s, 行=%s)"
|
||||
msgid "Invalid NZB file %s, skipping (error: %s)"
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/nzbstuff.py, sabnzbd/urlgrabber.py
|
||||
@@ -3621,6 +3616,14 @@ msgid ""
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
msgstr "当用 HTTPS 方式连接索引和RSS源时验证证书。"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "SOCKS5 Proxy"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Use the specified SOCKS5 proxy for all outgoing connections."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Server"
|
||||
msgstr "服务器"
|
||||
@@ -3864,6 +3867,17 @@ msgstr "严格"
|
||||
msgid "0 is highest priority, 100 is the lowest priority"
|
||||
msgstr "0 为最高优先级,100 为最低优先级"
|
||||
|
||||
#. Server required tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Required"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"In case of connection failures, the download queue will be paused for a few "
|
||||
"minutes instead of skipping this server"
|
||||
msgstr ""
|
||||
|
||||
#. Server optional tickbox
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Optional"
|
||||
@@ -4046,6 +4060,12 @@ msgstr "已下载"
|
||||
msgid "Read All Feeds Now"
|
||||
msgstr "立即读取全部 Feed"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid ""
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled"
|
||||
" for jobs in all categories."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Email Notification On Job Completion"
|
||||
msgstr "任务完成 Email 通知"
|
||||
@@ -4324,7 +4344,13 @@ msgstr "索引 Categories / Groups"
|
||||
msgid "X"
|
||||
msgstr "X"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Select at least 1 category."
|
||||
msgstr ""
|
||||
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Series Sorting"
|
||||
msgstr "TV 排序"
|
||||
|
||||
@@ -4352,7 +4378,8 @@ msgstr "预设"
|
||||
msgid "Example"
|
||||
msgstr "示例"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Movie Sorting"
|
||||
msgstr "电影排序"
|
||||
|
||||
@@ -4504,7 +4531,8 @@ msgstr "分文件夹"
|
||||
msgid "No folders"
|
||||
msgstr "不分文件夹"
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
#. Warning message
|
||||
#: sabnzbd/skintext.py, sabnzbd/sorting.py
|
||||
msgid "Date Sorting"
|
||||
msgstr "日期排序"
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ cherrypy
|
||||
portend
|
||||
chardet
|
||||
notify2
|
||||
PySocks
|
||||
puremagic
|
||||
guessit>=3.1.0
|
||||
|
||||
|
||||
@@ -18,19 +18,22 @@
|
||||
import os
|
||||
import logging
|
||||
import datetime
|
||||
import shutil
|
||||
import tempfile
|
||||
import pickle
|
||||
import ctypes.util
|
||||
import gzip
|
||||
import time
|
||||
import socket
|
||||
import socks
|
||||
import cherrypy
|
||||
import cherrypy._cpreqbody
|
||||
import platform
|
||||
import sys
|
||||
import ssl
|
||||
import urllib.parse
|
||||
from threading import Lock, Thread, Condition
|
||||
from typing import Any, AnyStr, Optional, Union
|
||||
from typing import Any, Optional, Union, BinaryIO
|
||||
|
||||
##############################################################################
|
||||
# Determine platform flags
|
||||
@@ -69,8 +72,6 @@ elif os.name == "posix":
|
||||
# Parse macOS version numbers
|
||||
if platform.system().lower() == "darwin":
|
||||
DARWIN = True
|
||||
# 12 = Sierra, 11 = ElCaptain, 10 = Yosemite, 9 = Mavericks, 8 = MountainLion
|
||||
DARWIN_VERSION = int(platform.mac_ver()[0].split(".")[1])
|
||||
MACOSLIBC = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True) # the MacOS C library
|
||||
try:
|
||||
import Foundation
|
||||
@@ -121,10 +122,6 @@ from sabnzbd.constants import (
|
||||
DEFAULT_PRIORITY,
|
||||
VALID_ARCHIVES,
|
||||
REPAIR_REQUEST,
|
||||
QUEUE_FILE_NAME,
|
||||
QUEUE_VERSION,
|
||||
QUEUE_FILE_TMPL,
|
||||
Status,
|
||||
)
|
||||
import sabnzbd.utils.ssdp
|
||||
|
||||
@@ -201,7 +198,6 @@ CMDLINE = " ".join(['"%s"' % arg for arg in sys.argv])
|
||||
__INITIALIZED__ = False
|
||||
__SHUTTING_DOWN__ = False
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Signal Handler
|
||||
##############################################################################
|
||||
@@ -274,6 +270,7 @@ def initialize(pause_downloader=False, clean_up=False, repair=0):
|
||||
cfg.https_cert.callback(guard_restart)
|
||||
cfg.https_key.callback(guard_restart)
|
||||
cfg.enable_https.callback(guard_restart)
|
||||
cfg.socks5_proxy_url.callback(guard_restart)
|
||||
cfg.top_only.callback(guard_top_only)
|
||||
cfg.pause_on_post_processing.callback(guard_pause_on_pp)
|
||||
cfg.quota_size.callback(guard_quota_size)
|
||||
@@ -604,30 +601,38 @@ def backup_exists(filename: str) -> bool:
|
||||
return path and os.path.exists(os.path.join(path, filename + ".gz"))
|
||||
|
||||
|
||||
def backup_nzb(filename: str, data: AnyStr):
|
||||
"""Backup NZB file"""
|
||||
path = cfg.nzb_backup_dir.get_path()
|
||||
if path:
|
||||
save_compressed(path, filename, data)
|
||||
def backup_nzb(nzb_path: str):
|
||||
"""Backup NZB file, return path to nzb if it was saved"""
|
||||
nzb_backup_dir = cfg.nzb_backup_dir.get_path()
|
||||
if nzb_backup_dir:
|
||||
logging.debug("Saving copy of %s in %s", filesystem.get_filename(nzb_path), nzb_backup_dir)
|
||||
shutil.copy(nzb_path, nzb_backup_dir)
|
||||
|
||||
|
||||
def save_compressed(folder: str, filename: str, data: AnyStr):
|
||||
"""Save compressed NZB file in folder"""
|
||||
def save_compressed(folder: str, filename: str, data_fp: BinaryIO) -> str:
|
||||
"""Save compressed NZB file in folder, return path to saved nzb file"""
|
||||
if filename.endswith(".nzb"):
|
||||
filename += ".gz"
|
||||
else:
|
||||
filename += ".nzb.gz"
|
||||
logging.info("Backing up %s", os.path.join(folder, filename))
|
||||
try:
|
||||
# Have to get around the path being put inside the tgz
|
||||
with open(os.path.join(folder, filename), "wb") as tgz_file:
|
||||
f = gzip.GzipFile(filename, fileobj=tgz_file, mode="wb")
|
||||
f.write(encoding.utob(data))
|
||||
f.flush()
|
||||
f.close()
|
||||
except:
|
||||
logging.error(T("Saving %s failed"), os.path.join(folder, filename))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
full_nzb_path = os.path.join(folder, filename)
|
||||
|
||||
# Skip existing ones, as it might be queue-repair
|
||||
if not os.path.exists(full_nzb_path):
|
||||
logging.info("Saving %s", full_nzb_path)
|
||||
try:
|
||||
# Have to get around the path being put inside the tgz
|
||||
with open(full_nzb_path, "wb") as tgz_file:
|
||||
# We only need minimal compression to prevent huge files
|
||||
with gzip.GzipFile(filename, mode="wb", compresslevel=1, fileobj=tgz_file) as gzip_file:
|
||||
shutil.copyfileobj(data_fp, gzip_file)
|
||||
except:
|
||||
logging.error(T("Saving %s failed"), full_nzb_path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
else:
|
||||
logging.info("Skipping existing file %s", full_nzb_path)
|
||||
|
||||
return full_nzb_path
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -686,6 +691,9 @@ def add_nzbfile(
|
||||
logging.error(T("Cannot create temp file for %s"), filename)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
return None
|
||||
finally:
|
||||
# Close the CherryPy reference
|
||||
nzbfile.file.close()
|
||||
|
||||
# Externally defined if we should keep the file?
|
||||
if keep is None:
|
||||
@@ -1082,6 +1090,21 @@ def wait_for_download_folder():
|
||||
time.sleep(2.0)
|
||||
|
||||
|
||||
def set_socks5_proxy():
|
||||
if cfg.socks5_proxy_url():
|
||||
proxy = urllib.parse.urlparse(cfg.socks5_proxy_url())
|
||||
logging.info("Using Socks5 proxy %s:%s", proxy.hostname, proxy.port)
|
||||
socks.set_default_proxy(
|
||||
socks.SOCKS5,
|
||||
proxy.hostname,
|
||||
proxy.port,
|
||||
True, # use remote DNS, default
|
||||
proxy.username,
|
||||
proxy.password,
|
||||
)
|
||||
socket.socket = socks.socksocket
|
||||
|
||||
|
||||
def test_ipv6():
|
||||
"""Check if external IPv6 addresses are reachable"""
|
||||
if not cfg.selftest_host():
|
||||
|
||||
@@ -63,11 +63,10 @@ from sabnzbd.misc import (
|
||||
from sabnzbd.filesystem import diskspace, get_ext, clip_path, remove_all, list_scripts
|
||||
from sabnzbd.encoding import xml_name, utob
|
||||
from sabnzbd.utils.servertests import test_nntp_server_dict
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6, addresslookup
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6, addresslookup, active_socks5_proxy
|
||||
from sabnzbd.database import build_history_info, unpack_history_info, HistoryDB
|
||||
from sabnzbd.lang import is_rtl
|
||||
import sabnzbd.notifier
|
||||
import sabnzbd.rss
|
||||
import sabnzbd.emailer
|
||||
import sabnzbd.sorting
|
||||
|
||||
@@ -1336,6 +1335,7 @@ def build_status(calculate_performance: bool = False, skip_dashboard: bool = Fal
|
||||
|
||||
# Dashboard: Connection information
|
||||
if not int_conv(skip_dashboard):
|
||||
info["active_socks5_proxy"] = active_socks5_proxy()
|
||||
info["localipv4"] = localipv4()
|
||||
info["publicipv4"] = publicipv4()
|
||||
info["ipv6"] = ipv6()
|
||||
|
||||
@@ -24,8 +24,8 @@ import queue
|
||||
import logging
|
||||
import re
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
import hashlib
|
||||
import ctypes
|
||||
from typing import Tuple, Optional, List
|
||||
|
||||
import sabnzbd
|
||||
@@ -41,7 +41,6 @@ from sabnzbd.filesystem import (
|
||||
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbFile
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.par2file as par2file
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
|
||||
@@ -119,6 +118,8 @@ class Assembler(Thread):
|
||||
logging.error(T("Disk error on creating file %s"), clip_path(filepath))
|
||||
# Log traceback
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
if sabnzbd.WIN32:
|
||||
logging.info("Winerror: %s", hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2 ** 32))
|
||||
# Pause without saving
|
||||
sabnzbd.Downloader.pause()
|
||||
continue
|
||||
@@ -298,7 +299,6 @@ def check_encrypted_and_unwanted_files(nzo: NzbObject, filepath: str) -> Tuple[b
|
||||
# Is it even a rarfile?
|
||||
if rarfile.is_rarfile(filepath):
|
||||
# Open the rar
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
zf = rarfile.RarFile(filepath, single_file_check=True)
|
||||
|
||||
# Check for encryption
|
||||
|
||||
@@ -233,7 +233,7 @@ rating_filter_pause_keywords = OptionStr("misc", "rating_filter_pause_keywords")
|
||||
##############################################################################
|
||||
enable_tv_sorting = OptionBool("misc", "enable_tv_sorting", False)
|
||||
tv_sort_string = OptionStr("misc", "tv_sort_string")
|
||||
tv_categories = OptionList("misc", "tv_categories", "")
|
||||
tv_categories = OptionList("misc", "tv_categories", ["tv"])
|
||||
|
||||
enable_movie_sorting = OptionBool("misc", "enable_movie_sorting", False)
|
||||
movie_sort_string = OptionStr("misc", "movie_sort_string")
|
||||
@@ -260,6 +260,7 @@ rss_rate = OptionNumber("misc", "rss_rate", 60, 15, 24 * 60)
|
||||
ampm = OptionBool("misc", "ampm", False)
|
||||
replace_illegal = OptionBool("misc", "replace_illegal", True)
|
||||
start_paused = OptionBool("misc", "start_paused", False)
|
||||
preserve_paused_state = OptionBool("misc", "preserve_paused_state", False)
|
||||
enable_par_cleanup = OptionBool("misc", "enable_par_cleanup", True)
|
||||
process_unpacked_par2 = OptionBool("misc", "process_unpacked_par2", True)
|
||||
enable_unrar = OptionBool("misc", "enable_unrar", True)
|
||||
@@ -289,7 +290,7 @@ api_warnings = OptionBool("misc", "api_warnings", True, protect=True)
|
||||
disable_key = OptionBool("misc", "disable_api_key", False, protect=True)
|
||||
no_penalties = OptionBool("misc", "no_penalties", False)
|
||||
x_frame_options = OptionBool("misc", "x_frame_options", True)
|
||||
require_modern_tls = OptionBool("misc", "require_modern_tls", False)
|
||||
allow_old_ssl_tls = OptionBool("misc", "allow_old_ssl_tls", False)
|
||||
num_decoders = OptionNumber("misc", "num_decoders", 3)
|
||||
|
||||
# Text values
|
||||
@@ -311,6 +312,7 @@ local_ranges = OptionList("misc", "local_ranges", protect=True)
|
||||
max_url_retries = OptionNumber("misc", "max_url_retries", 10, 1)
|
||||
downloader_sleep_time = OptionNumber("misc", "downloader_sleep_time", 10, 0)
|
||||
ssdp_broadcast_interval = OptionNumber("misc", "ssdp_broadcast_interval", 15, 1, 600)
|
||||
socks5_proxy_url = OptionStr("misc", "socks5_proxy_url")
|
||||
|
||||
|
||||
##############################################################################
|
||||
|
||||
@@ -409,6 +409,7 @@ class ConfigServer:
|
||||
self.ssl_verify = OptionNumber(name, "ssl_verify", 2, add=False)
|
||||
self.ssl_ciphers = OptionStr(name, "ssl_ciphers", add=False)
|
||||
self.enable = OptionBool(name, "enable", True, add=False)
|
||||
self.required = OptionBool(name, "required", False, add=False)
|
||||
self.optional = OptionBool(name, "optional", False, add=False)
|
||||
self.retention = OptionNumber(name, "retention", 0, add=False)
|
||||
self.expire_date = OptionStr(name, "expire_date", add=False)
|
||||
@@ -442,6 +443,7 @@ class ConfigServer:
|
||||
"ssl_ciphers",
|
||||
"send_group",
|
||||
"enable",
|
||||
"required",
|
||||
"optional",
|
||||
"retention",
|
||||
"expire_date",
|
||||
@@ -476,6 +478,7 @@ class ConfigServer:
|
||||
output_dict["ssl_verify"] = self.ssl_verify()
|
||||
output_dict["ssl_ciphers"] = self.ssl_ciphers()
|
||||
output_dict["enable"] = self.enable()
|
||||
output_dict["required"] = self.required()
|
||||
output_dict["optional"] = self.optional()
|
||||
output_dict["retention"] = self.retention()
|
||||
output_dict["expire_date"] = self.expire_date()
|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
|
||||
import os
|
||||
from collections import namedtuple
|
||||
from re import compile
|
||||
|
||||
CONFIG_VERSION = 19
|
||||
|
||||
@@ -122,6 +121,7 @@ VALID_NZB_FILES = (".nzb", ".gz", ".bz2")
|
||||
CHEETAH_DIRECTIVES = {"directiveStartToken": "<!--#", "directiveEndToken": "#-->", "prioritizeSearchListOverSelf": True}
|
||||
|
||||
IGNORED_FOLDERS = ("@eaDir", ".appleDouble")
|
||||
IGNORED_MOVIE_FOLDERS = ("video_ts", "audio_ts", "bdmv")
|
||||
|
||||
EXCLUDED_GUESSIT_PROPERTIES = [
|
||||
"part",
|
||||
|
||||
@@ -22,12 +22,15 @@ sabnzbd.decoder - article decoder
|
||||
import logging
|
||||
import hashlib
|
||||
import queue
|
||||
import binascii
|
||||
from io import BytesIO
|
||||
from threading import Thread
|
||||
from typing import Tuple, List, Optional
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import SABYENC_VERSION_REQUIRED
|
||||
from sabnzbd.encoding import ubtou
|
||||
from sabnzbd.nzbstuff import Article
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
@@ -45,17 +48,18 @@ except ImportError:
|
||||
SABYENC_ENABLED = False
|
||||
|
||||
|
||||
class CrcError(Exception):
|
||||
def __init__(self, needcrc: int, gotcrc: int, data: bytes):
|
||||
class BadData(Exception):
|
||||
def __init__(self, data: bytes):
|
||||
super().__init__()
|
||||
self.needcrc = needcrc
|
||||
self.gotcrc = gotcrc
|
||||
self.data = data
|
||||
|
||||
|
||||
class BadYenc(Exception):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
pass
|
||||
|
||||
|
||||
class BadUu(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Decoder:
|
||||
@@ -140,7 +144,11 @@ class DecoderWorker(Thread):
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
decoded_data = decode(article, raw_data)
|
||||
if article.nzf.type == "uu":
|
||||
decoded_data = decode_uu(article, raw_data)
|
||||
else:
|
||||
decoded_data = decode_yenc(article, raw_data)
|
||||
|
||||
article_success = True
|
||||
|
||||
except MemoryError:
|
||||
@@ -154,15 +162,20 @@ class DecoderWorker(Thread):
|
||||
sabnzbd.NzbQueue.reset_try_lists(article)
|
||||
continue
|
||||
|
||||
except CrcError as crc_error:
|
||||
logging.info("CRC Error in %s" % art_id)
|
||||
|
||||
except BadData as error:
|
||||
# Continue to the next one if we found new server
|
||||
if search_new_server(article):
|
||||
continue
|
||||
|
||||
# Store data, maybe par2 can still fix it
|
||||
decoded_data = crc_error.data
|
||||
decoded_data = error.data
|
||||
|
||||
except BadUu:
|
||||
logging.info("Badly formed uu article in %s", art_id)
|
||||
|
||||
# Try the next server
|
||||
if search_new_server(article):
|
||||
continue
|
||||
|
||||
except (BadYenc, ValueError):
|
||||
# Handles precheck and badly formed articles
|
||||
@@ -170,25 +183,29 @@ class DecoderWorker(Thread):
|
||||
# STAT was used, so we only get a status code
|
||||
article_success = True
|
||||
else:
|
||||
# Examine headers (for precheck) or body (for download)
|
||||
# Look for DMCA clues (while skipping "X-" headers)
|
||||
# Detect potential UUencode
|
||||
for line in raw_data:
|
||||
lline = line.lower()
|
||||
if b"message-id:" in lline:
|
||||
# Try uu-decoding
|
||||
if (not nzo.precheck) and article.nzf.type != "yenc":
|
||||
try:
|
||||
decoded_data = decode_uu(article, raw_data)
|
||||
logging.debug("Found uu-encoded article %s in job %s", art_id, nzo.final_name)
|
||||
article_success = True
|
||||
if not lline.startswith(b"x-") and match_str(
|
||||
lline, (b"dmca", b"removed", b"cancel", b"blocked")
|
||||
):
|
||||
article_success = False
|
||||
logging.info("Article removed from server (%s)", art_id)
|
||||
break
|
||||
if lline.find(b"\nbegin ") >= 0:
|
||||
logme = T("UUencode detected, only yEnc encoding is supported [%s]") % nzo.final_name
|
||||
logging.error(logme)
|
||||
nzo.fail_msg = logme
|
||||
sabnzbd.NzbQueue.end_job(nzo)
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
# Only bother with further checks if uu-decoding didn't work out
|
||||
if not article_success:
|
||||
# Convert the initial chunks of raw socket data to article lines,
|
||||
# and examine the headers (for precheck) or body (for download).
|
||||
for line in b"".join(raw_data[:2]).split(b"\r\n"):
|
||||
lline = line.lower()
|
||||
if lline.startswith(b"message-id:"):
|
||||
article_success = True
|
||||
# Look for DMCA clues (while skipping "X-" headers)
|
||||
if not lline.startswith(b"x-") and match_str(
|
||||
lline, (b"dmca", b"removed", b"cancel", b"blocked")
|
||||
):
|
||||
article_success = False
|
||||
logging.info("Article removed from server (%s)", art_id)
|
||||
break
|
||||
|
||||
# Pre-check, proper article found so just register
|
||||
if nzo.precheck and article_success and sabnzbd.LOG_ALL:
|
||||
@@ -218,7 +235,7 @@ class DecoderWorker(Thread):
|
||||
sabnzbd.NzbQueue.register_article(article, article_success)
|
||||
|
||||
|
||||
def decode(article: Article, raw_data: List[bytes]) -> bytes:
|
||||
def decode_yenc(article: Article, raw_data: List[bytes]) -> bytes:
|
||||
# Let SABYenc do all the heavy lifting
|
||||
decoded_data, yenc_filename, crc, crc_expected, crc_correct = sabyenc3.decode_usenet_chunks(raw_data, article.bytes)
|
||||
|
||||
@@ -240,11 +257,135 @@ def decode(article: Article, raw_data: List[bytes]) -> bytes:
|
||||
|
||||
# CRC check
|
||||
if not crc_correct:
|
||||
raise CrcError(crc_expected, crc, decoded_data)
|
||||
logging.info("CRC Error in %s", article.article)
|
||||
raise BadData(decoded_data)
|
||||
|
||||
return decoded_data
|
||||
|
||||
|
||||
def decode_uu(article: Article, raw_data: List[bytes]) -> bytes:
|
||||
"""Try to uu-decode an article. The raw_data may or may not contain headers.
|
||||
If there are headers, they will be separated from the body by at least one
|
||||
empty line. In case of no headers, the first line seems to always be the nntp
|
||||
response code (222) directly followed by the msg body."""
|
||||
if not raw_data:
|
||||
logging.debug("No data to decode")
|
||||
raise BadUu
|
||||
|
||||
# Line up the raw_data
|
||||
with BytesIO() as encoded_data:
|
||||
for data in raw_data:
|
||||
encoded_data.write(data)
|
||||
raw_data = encoded_data.getvalue().split(b"\r\n")
|
||||
|
||||
# Index of the uu payload start in raw_data
|
||||
uu_start = 0
|
||||
|
||||
# Limit the number of lines to check for the onset of uu data
|
||||
limit = min(len(raw_data), 32) - 1
|
||||
if limit < 3:
|
||||
logging.debug("Article too short to contain valid uu-encoded data")
|
||||
raise BadUu
|
||||
|
||||
# Try to find an empty line separating the body from headers or response
|
||||
# code and set the expected payload start to the next line.
|
||||
try:
|
||||
uu_start = raw_data[:limit].index(b"") + 1
|
||||
except ValueError:
|
||||
# No empty line, look for a response code instead
|
||||
if raw_data[0].startswith(b"222 "):
|
||||
uu_start = 1
|
||||
else:
|
||||
# Invalid data?
|
||||
logging.debug("Failed to locate start of uu payload")
|
||||
raise BadUu
|
||||
|
||||
def is_uu_junk(line: bytes) -> bool:
|
||||
"""Determine if the line is empty or contains known junk data"""
|
||||
return (not line) or line == b"-- " or line.startswith(b"Posted via ")
|
||||
|
||||
# Check the uu 'begin' line
|
||||
if article.lowest_partnum:
|
||||
try:
|
||||
# Make sure the line after the uu_start one isn't empty as well or
|
||||
# detection of the 'begin' line won't work. For articles other than
|
||||
# lowest_partnum, filtering out empty lines (and other junk) can
|
||||
# wait until the actual decoding step.
|
||||
for index in range(uu_start, limit):
|
||||
if is_uu_junk(raw_data[index]):
|
||||
uu_start = index + 1
|
||||
else:
|
||||
# Bingo
|
||||
break
|
||||
else:
|
||||
# Search reached the limit
|
||||
raise IndexError
|
||||
|
||||
uu_begin_data = raw_data[uu_start].split(b" ")
|
||||
# Filename may contain spaces
|
||||
uu_filename = ubtou(b" ".join(uu_begin_data[2:]).strip())
|
||||
|
||||
# Sanity check the 'begin' line
|
||||
if (
|
||||
len(uu_begin_data) < 3
|
||||
or uu_begin_data[0].lower() != b"begin"
|
||||
or (not int(uu_begin_data[1], 8))
|
||||
or (not uu_filename)
|
||||
):
|
||||
raise ValueError
|
||||
|
||||
# Consider this enough proof to set the type, avoiding further
|
||||
# futile attempts at decoding articles in this nzf as yenc.
|
||||
article.nzf.type = "uu"
|
||||
|
||||
# Bump the pointer for the payload to the next line
|
||||
uu_start += 1
|
||||
except Exception:
|
||||
logging.debug("Missing or invalid uu 'begin' line: %s", raw_data[uu_start] if uu_start < limit else None)
|
||||
raise BadUu
|
||||
|
||||
# Do the actual decoding
|
||||
with BytesIO() as decoded_data:
|
||||
for line in raw_data[uu_start:]:
|
||||
# Ignore junk
|
||||
if is_uu_junk(line):
|
||||
continue
|
||||
|
||||
# End of the article
|
||||
if line in (b"`", b"end", b"."):
|
||||
break
|
||||
|
||||
try:
|
||||
decoded_line = binascii.a2b_uu(line)
|
||||
except binascii.Error as msg:
|
||||
try:
|
||||
# Workaround for broken uuencoders by Fredrik Lundh
|
||||
nbytes = (((ord(line[0]) - 32) & 63) * 4 + 5) / 3
|
||||
decoded_line = binascii.a2b_uu(line[:nbytes])
|
||||
except Exception as msg2:
|
||||
logging.info(
|
||||
"Error while uu-decoding %s: %s (line: %s; workaround: %s)", article.article, msg, line, msg2
|
||||
)
|
||||
raise BadData(decoded_data.getvalue())
|
||||
|
||||
# Store the decoded data
|
||||
decoded_data.write(decoded_line)
|
||||
|
||||
# Mark as decoded and set the type to uu; the latter is still needed in
|
||||
# case the lowest_partnum article was damaged or slow to download.
|
||||
article.decoded = True
|
||||
article.nzf.type = "uu"
|
||||
|
||||
if article.lowest_partnum:
|
||||
decoded_data.seek(0)
|
||||
article.nzf.md5of16k = hashlib.md5(decoded_data.read(16384)).digest()
|
||||
# Handle the filename
|
||||
if not article.nzf.filename_checked and uu_filename:
|
||||
article.nzf.nzo.verify_nzf_filename(article.nzf, uu_filename)
|
||||
|
||||
return decoded_data.getvalue()
|
||||
|
||||
|
||||
def search_new_server(article: Article) -> bool:
|
||||
"""Shorthand for searching new server or else increasing bad_articles"""
|
||||
# Continue to the next one if we found new server
|
||||
|
||||
@@ -64,9 +64,9 @@ def decode_par2(parfile: str) -> List[str]:
|
||||
with open(filepath, "rb") as fileToMatch:
|
||||
first16k_data = fileToMatch.read(16384)
|
||||
|
||||
# Check if we have this hash
|
||||
# Check if we have this hash and the filename is different
|
||||
file_md5of16k = hashlib.md5(first16k_data).digest()
|
||||
if file_md5of16k in md5of16k:
|
||||
if file_md5of16k in md5of16k and fn != md5of16k[file_md5of16k]:
|
||||
new_path = os.path.join(dirname, md5of16k[file_md5of16k])
|
||||
# Make sure it's a unique name
|
||||
unique_filename = get_unique_filename(new_path)
|
||||
@@ -166,7 +166,7 @@ def deobfuscate_list(filelist: List[str], usefulname: str):
|
||||
# 2. if no meaningful extension, add it
|
||||
# 3. based on detecting obfuscated filenames
|
||||
|
||||
# to be sure, only keep really exsiting files:
|
||||
# to be sure, only keep really existing files:
|
||||
filelist = [f for f in filelist if os.path.isfile(f)]
|
||||
|
||||
# let's see if there are files with uncommon/unpopular (so: obfuscated) extensions
|
||||
@@ -176,7 +176,7 @@ def deobfuscate_list(filelist: List[str], usefulname: str):
|
||||
for file in filelist:
|
||||
if file_extension.has_popular_extension(file):
|
||||
# common extension, like .doc or .iso, so assume OK and change nothing
|
||||
logging.debug("extension of %s looks common", file)
|
||||
logging.debug("Extension of %s looks common", file)
|
||||
newlist.append(file)
|
||||
else:
|
||||
# uncommon (so: obfuscated) extension
|
||||
@@ -220,6 +220,7 @@ def deobfuscate_list(filelist: List[str], usefulname: str):
|
||||
# check that file is still there (and not renamed by the secondary renaming process below)
|
||||
if not os.path.isfile(filename):
|
||||
continue
|
||||
|
||||
logging.debug("Deobfuscate inspecting %s", filename)
|
||||
# Do we need to rename this file?
|
||||
# Criteria: big, not-excluded extension, obfuscated (in that order)
|
||||
|
||||
@@ -325,6 +325,7 @@ class DirectUnpacker(threading.Thread):
|
||||
# Add last line and write any new output
|
||||
if linebuf:
|
||||
unrar_log.append(platform_btou(linebuf.strip()))
|
||||
if unrar_log:
|
||||
logging.debug("DirectUnpack Unrar output %s", "\n".join(unrar_log))
|
||||
|
||||
# Make more space
|
||||
|
||||
@@ -28,6 +28,7 @@ from nntplib import NNTPPermanentError
|
||||
import socket
|
||||
import random
|
||||
import sys
|
||||
import ssl
|
||||
from typing import List, Dict, Optional, Union
|
||||
|
||||
import sabnzbd
|
||||
@@ -73,6 +74,8 @@ class Server:
|
||||
"ssl",
|
||||
"ssl_verify",
|
||||
"ssl_ciphers",
|
||||
"ssl_context",
|
||||
"required",
|
||||
"optional",
|
||||
"retention",
|
||||
"send_group",
|
||||
@@ -103,12 +106,13 @@ class Server:
|
||||
timeout,
|
||||
threads,
|
||||
priority,
|
||||
ssl,
|
||||
use_ssl,
|
||||
ssl_verify,
|
||||
ssl_ciphers,
|
||||
send_group,
|
||||
username=None,
|
||||
password=None,
|
||||
required=False,
|
||||
optional=False,
|
||||
retention=0,
|
||||
):
|
||||
@@ -122,9 +126,11 @@ class Server:
|
||||
self.timeout: int = timeout
|
||||
self.threads: int = threads
|
||||
self.priority: int = priority
|
||||
self.ssl: bool = ssl
|
||||
self.ssl: bool = use_ssl
|
||||
self.ssl_verify: int = ssl_verify
|
||||
self.ssl_ciphers: str = ssl_ciphers
|
||||
self.ssl_context: Optional[ssl.SSLContext] = None
|
||||
self.required: bool = required
|
||||
self.optional: bool = optional
|
||||
self.retention: int = retention
|
||||
self.send_group: bool = send_group
|
||||
@@ -183,7 +189,7 @@ class Server:
|
||||
logging.debug("%s: Connecting to address %s", self.host, ip)
|
||||
elif cfg.load_balancing() == 2:
|
||||
# RFC6555 / Happy Eyeballs:
|
||||
ip = happyeyeballs(self.host, port=self.port, ssl=self.ssl)
|
||||
ip = happyeyeballs(self.host, port=self.port)
|
||||
if ip:
|
||||
logging.debug("%s: Connecting to address %s", self.host, ip)
|
||||
else:
|
||||
@@ -312,6 +318,7 @@ class Downloader(Thread):
|
||||
ssl_ciphers = srv.ssl_ciphers()
|
||||
username = srv.username()
|
||||
password = srv.password()
|
||||
required = srv.required()
|
||||
optional = srv.optional()
|
||||
retention = int(srv.retention() * 24 * 3600) # days ==> seconds
|
||||
send_group = srv.send_group()
|
||||
@@ -344,6 +351,7 @@ class Downloader(Thread):
|
||||
send_group,
|
||||
username,
|
||||
password,
|
||||
required,
|
||||
optional,
|
||||
retention,
|
||||
)
|
||||
@@ -363,8 +371,11 @@ class Downloader(Thread):
|
||||
|
||||
@NzbQueueLocker
|
||||
def set_paused_state(self, state: bool):
|
||||
"""Set downloader to specified paused state"""
|
||||
self.paused = state
|
||||
"""Set downloader to new paused state if it is changed"""
|
||||
if self.paused != state:
|
||||
if cfg.preserve_paused_state():
|
||||
cfg.start_paused.set(state)
|
||||
self.paused = state
|
||||
|
||||
@NzbQueueLocker
|
||||
def resume(self):
|
||||
@@ -372,6 +383,8 @@ class Downloader(Thread):
|
||||
if self.paused and sabnzbd.WEB_DIR:
|
||||
logging.info("Resuming")
|
||||
sabnzbd.notifier.send_notification("SABnzbd", T("Resuming"), "pause_resume")
|
||||
if cfg.preserve_paused_state():
|
||||
cfg.start_paused.set(False)
|
||||
self.paused = False
|
||||
|
||||
@NzbQueueLocker
|
||||
@@ -381,6 +394,8 @@ class Downloader(Thread):
|
||||
self.paused = True
|
||||
logging.info("Pausing")
|
||||
sabnzbd.notifier.send_notification("SABnzbd", T("Paused"), "pause_resume")
|
||||
if cfg.preserve_paused_state():
|
||||
cfg.start_paused.set(True)
|
||||
if self.is_paused():
|
||||
sabnzbd.BPSMeter.reset()
|
||||
if cfg.autodisconnect():
|
||||
@@ -473,12 +488,16 @@ class Downloader(Thread):
|
||||
if server.errormsg != errormsg:
|
||||
server.errormsg = errormsg
|
||||
logging.warning(errormsg)
|
||||
logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT)
|
||||
if not server.required:
|
||||
logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT)
|
||||
|
||||
# Not fully the same as the code below for optional servers
|
||||
server.bad_cons = 0
|
||||
server.deactivate()
|
||||
self.plan_server(server, _PENALTY_TIMEOUT)
|
||||
if server.required:
|
||||
sabnzbd.Scheduler.plan_required_server_resume()
|
||||
else:
|
||||
server.deactivate()
|
||||
self.plan_server(server, _PENALTY_TIMEOUT)
|
||||
|
||||
# Optional and active server had too many problems.
|
||||
# Disable it now and send a re-enable plan to the scheduler
|
||||
@@ -835,12 +854,17 @@ class Downloader(Thread):
|
||||
penalty = _PENALTY_UNKNOWN
|
||||
block = True
|
||||
if block or (penalty and server.optional):
|
||||
retry_article = False
|
||||
if server.active:
|
||||
server.deactivate()
|
||||
if penalty and (block or server.optional):
|
||||
self.plan_server(server, penalty)
|
||||
# Note that the article is discard for this server
|
||||
self.__reset_nw(nw, retry_article=False, send_quit=True)
|
||||
if server.required:
|
||||
sabnzbd.Scheduler.plan_required_server_resume()
|
||||
retry_article = True
|
||||
else:
|
||||
server.deactivate()
|
||||
if penalty and (block or server.optional):
|
||||
self.plan_server(server, penalty)
|
||||
# Note that the article is discard for this server if the server is not required
|
||||
self.__reset_nw(nw, retry_article=retry_article, send_quit=True)
|
||||
continue
|
||||
except:
|
||||
logging.error(
|
||||
|
||||
@@ -28,9 +28,8 @@ import threading
|
||||
import time
|
||||
import fnmatch
|
||||
import stat
|
||||
import zipfile
|
||||
import ctypes
|
||||
from typing import Union, List, Tuple, Any, Dict, Optional
|
||||
from typing import Union, List, Tuple, Dict, Optional
|
||||
|
||||
try:
|
||||
import win32api
|
||||
@@ -426,40 +425,6 @@ def same_file(a: str, b: str) -> int:
|
||||
return is_subfolder
|
||||
|
||||
|
||||
def is_archive(path: str) -> Tuple[int, Any, str]:
|
||||
"""Check if file in path is an ZIP, RAR or 7z file
|
||||
:param path: path to file
|
||||
:return: (zf, status, expected_extension)
|
||||
status: -1==Error/Retry, 0==OK, 1==Ignore
|
||||
"""
|
||||
if zipfile.is_zipfile(path):
|
||||
try:
|
||||
zf = zipfile.ZipFile(path)
|
||||
return 0, zf, ".zip"
|
||||
except:
|
||||
logging.info(T("Cannot read %s"), path, exc_info=True)
|
||||
return -1, None, ""
|
||||
elif rarfile.is_rarfile(path):
|
||||
try:
|
||||
# Set path to tool to open it
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
zf = rarfile.RarFile(path)
|
||||
return 0, zf, ".rar"
|
||||
except:
|
||||
logging.info(T("Cannot read %s"), path, exc_info=True)
|
||||
return -1, None, ""
|
||||
elif sabnzbd.newsunpack.is_sevenfile(path):
|
||||
try:
|
||||
zf = sabnzbd.newsunpack.SevenZip(path)
|
||||
return 0, zf, ".7z"
|
||||
except:
|
||||
logging.info(T("Cannot read %s"), path, exc_info=True)
|
||||
return -1, None, ""
|
||||
else:
|
||||
logging.info("Archive %s is not a real archive!", os.path.basename(path))
|
||||
return 1, None, ""
|
||||
|
||||
|
||||
def check_mount(path: str) -> bool:
|
||||
"""Return False if volume isn't mounted on Linux or macOS
|
||||
Retry 6 times with an interval of 1 sec.
|
||||
@@ -480,6 +445,61 @@ def check_mount(path: str) -> bool:
|
||||
return not m
|
||||
|
||||
|
||||
RAR_RE = re.compile(r"\.(?P<ext>part\d*\.rar|rar|r\d\d|s\d\d|t\d\d|u\d\d|v\d\d|\d\d\d?\d)$", re.I)
|
||||
SPLITFILE_RE = re.compile(r"\.(\d\d\d?\d$)", re.I)
|
||||
ZIP_RE = re.compile(r"\.(zip$)", re.I)
|
||||
SEVENZIP_RE = re.compile(r"\.7z$", re.I)
|
||||
SEVENMULTI_RE = re.compile(r"\.7z\.\d+$", re.I)
|
||||
TS_RE = re.compile(r"\.(\d+)\.(ts$)", re.I)
|
||||
|
||||
|
||||
def build_filelists(
|
||||
workdir: Optional[str], workdir_complete: Optional[str] = None, check_both: bool = False, check_rar: bool = True
|
||||
) -> Tuple[List[str], List[str], List[str], List[str], List[str]]:
|
||||
"""Build filelists, if workdir_complete has files, ignore workdir.
|
||||
Optionally scan both directories.
|
||||
Optionally test content to establish RAR-ness
|
||||
"""
|
||||
sevens, joinables, zips, rars, ts, filelist = ([], [], [], [], [], [])
|
||||
|
||||
if workdir_complete:
|
||||
filelist.extend(listdir_full(workdir_complete))
|
||||
|
||||
if workdir and (not filelist or check_both):
|
||||
filelist.extend(listdir_full(workdir, recursive=False))
|
||||
|
||||
for file in filelist:
|
||||
# Extra check for rar (takes CPU/disk)
|
||||
file_is_rar = False
|
||||
if check_rar:
|
||||
file_is_rar = rarfile.is_rarfile(file)
|
||||
|
||||
# Run through all the checks
|
||||
if SEVENZIP_RE.search(file) or SEVENMULTI_RE.search(file):
|
||||
# 7zip
|
||||
sevens.append(file)
|
||||
elif SPLITFILE_RE.search(file) and not file_is_rar:
|
||||
# Joinables, optional with RAR check
|
||||
joinables.append(file)
|
||||
elif ZIP_RE.search(file):
|
||||
# ZIP files
|
||||
zips.append(file)
|
||||
elif RAR_RE.search(file):
|
||||
# RAR files
|
||||
rars.append(file)
|
||||
elif TS_RE.search(file):
|
||||
# TS split files
|
||||
ts.append(file)
|
||||
|
||||
logging.debug("build_filelists(): joinables: %s", joinables)
|
||||
logging.debug("build_filelists(): zips: %s", zips)
|
||||
logging.debug("build_filelists(): rars: %s", rars)
|
||||
logging.debug("build_filelists(): 7zips: %s", sevens)
|
||||
logging.debug("build_filelists(): ts: %s", ts)
|
||||
|
||||
return joinables, zips, rars, sevens, ts
|
||||
|
||||
|
||||
def safe_fnmatch(f: str, pattern: str) -> bool:
|
||||
"""fnmatch will fail if the pattern contains any of it's
|
||||
key characters, like [, ] or !.
|
||||
@@ -902,7 +922,7 @@ def renamer(old: str, new: str, create_local_directories: bool = False) -> str:
|
||||
time.sleep(2)
|
||||
else:
|
||||
raise
|
||||
raise OSError("Failed to rename")
|
||||
raise OSError("Failed to rename (Winerr %s)" % hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2 ** 32))
|
||||
else:
|
||||
shutil.move(old, new)
|
||||
return new
|
||||
|
||||
@@ -24,6 +24,7 @@ import multiprocessing.pool
|
||||
import functools
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import socks
|
||||
|
||||
import sabnzbd
|
||||
import sabnzbd.cfg
|
||||
@@ -64,6 +65,13 @@ def addresslookup6(myhost):
|
||||
return socket.getaddrinfo(myhost, 80, socket.AF_INET6)
|
||||
|
||||
|
||||
def active_socks5_proxy():
|
||||
"""Return the active proxy"""
|
||||
if socket.socket == socks.socksocket:
|
||||
return "%s:%s" % socks.socksocket.default_proxy[1:3]
|
||||
return None
|
||||
|
||||
|
||||
def localipv4():
|
||||
try:
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s_ipv4:
|
||||
@@ -124,6 +132,6 @@ def ipv6():
|
||||
# IPv6 prefix for documentation purpose
|
||||
s_ipv6.connect(("2001:db8::8080", 80))
|
||||
ipv6_address = s_ipv6.getsockname()[0]
|
||||
except socket.error:
|
||||
except:
|
||||
ipv6_address = None
|
||||
return ipv6_address
|
||||
|
||||
@@ -24,7 +24,7 @@ import time
|
||||
from datetime import datetime
|
||||
import cherrypy
|
||||
import logging
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import urllib.parse
|
||||
import re
|
||||
import hashlib
|
||||
import socket
|
||||
@@ -876,6 +876,7 @@ class ConfigSwitches:
|
||||
##############################################################################
|
||||
SPECIAL_BOOL_LIST = (
|
||||
"start_paused",
|
||||
"preserve_paused_state",
|
||||
"no_penalties",
|
||||
"fast_fail",
|
||||
"overwrite_files",
|
||||
@@ -908,7 +909,7 @@ SPECIAL_BOOL_LIST = (
|
||||
"disable_api_key",
|
||||
"api_logging",
|
||||
"x_frame_options",
|
||||
"require_modern_tls",
|
||||
"allow_old_ssl_tls",
|
||||
)
|
||||
SPECIAL_VALUE_LIST = (
|
||||
"downloader_sleep_time",
|
||||
@@ -992,6 +993,7 @@ GENERAL_LIST = (
|
||||
"https_key",
|
||||
"https_chain",
|
||||
"enable_https_verification",
|
||||
"socks5_proxy_url",
|
||||
"auto_browser",
|
||||
"check_new_rel",
|
||||
)
|
||||
@@ -1234,7 +1236,7 @@ def handle_server(kwargs, root=None, new_svr=False):
|
||||
if new_svr:
|
||||
server = unique_svr_name(server)
|
||||
|
||||
for kw in ("ssl", "send_group", "enable", "optional"):
|
||||
for kw in ("ssl", "send_group", "enable", "required", "optional"):
|
||||
if kw not in kwargs.keys():
|
||||
kwargs[kw] = None
|
||||
if svr and not new_svr:
|
||||
|
||||
@@ -43,6 +43,7 @@ from sabnzbd.filesystem import userxbit
|
||||
TAB_UNITS = ("", "K", "M", "G", "T", "P")
|
||||
RE_UNITS = re.compile(r"(\d+\.*\d*)\s*([KMGTP]?)", re.I)
|
||||
RE_VERSION = re.compile(r"(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)")
|
||||
RE_SAMPLE = re.compile(r"((^|[\W_])(sample|proof))", re.I) # something-sample or something-proof
|
||||
RE_IP4 = re.compile(r"inet\s+(addr:\s*)?(\d+\.\d+\.\d+\.\d+)")
|
||||
RE_IP6 = re.compile(r"inet6\s+(addr:\s*)?([0-9a-f:]+)", re.I)
|
||||
|
||||
@@ -751,16 +752,15 @@ def create_https_certificates(ssl_cert, ssl_key):
|
||||
|
||||
|
||||
def get_all_passwords(nzo) -> List[str]:
|
||||
"""Get all passwords, from the NZB, meta and password file. In case the correct password is
|
||||
already known, only that password is returned."""
|
||||
"""Get all passwords, from the NZB, meta and password file. In case a working password is
|
||||
already known, try it first."""
|
||||
passwords = []
|
||||
if nzo.correct_password:
|
||||
return [nzo.correct_password]
|
||||
passwords.append(nzo.correct_password)
|
||||
|
||||
if nzo.password:
|
||||
logging.info("Found a password that was set by the user: %s", nzo.password)
|
||||
passwords = [nzo.password.strip()]
|
||||
else:
|
||||
passwords = []
|
||||
passwords.append(nzo.password.strip())
|
||||
|
||||
meta_passwords = nzo.meta.get("password", [])
|
||||
pw = nzo.nzo_info.get("password")
|
||||
@@ -808,6 +808,11 @@ def get_all_passwords(nzo) -> List[str]:
|
||||
return unique_passwords
|
||||
|
||||
|
||||
def is_sample(filename: str) -> bool:
|
||||
"""Try to determine if filename is (most likely) a sample"""
|
||||
return bool(re.search(RE_SAMPLE, filename))
|
||||
|
||||
|
||||
def find_on_path(targets):
|
||||
"""Search the PATH for a program and return full path"""
|
||||
if sabnzbd.WIN32:
|
||||
|
||||
@@ -26,9 +26,10 @@ import subprocess
|
||||
import logging
|
||||
import time
|
||||
import zlib
|
||||
import io
|
||||
import shutil
|
||||
import functools
|
||||
from typing import Tuple
|
||||
from typing import Tuple, List, BinaryIO
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.encoding import platform_btou, correct_unknown_encoding, ubtou
|
||||
@@ -56,6 +57,8 @@ from sabnzbd.filesystem import (
|
||||
setname_from_path,
|
||||
get_ext,
|
||||
get_filename,
|
||||
TS_RE,
|
||||
build_filelists,
|
||||
)
|
||||
from sabnzbd.nzbstuff import NzbObject, NzbFile
|
||||
from sabnzbd.sorting import SeriesSorter
|
||||
@@ -63,18 +66,13 @@ import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import Status
|
||||
|
||||
# Regex globals
|
||||
RAR_RE = re.compile(r"\.(?P<ext>part\d*\.rar|rar|r\d\d|s\d\d|t\d\d|u\d\d|v\d\d|\d\d\d?\d)$", re.I)
|
||||
RAR_RE_V3 = re.compile(r"\.(?P<ext>part\d*)$", re.I)
|
||||
|
||||
TARGET_RE = re.compile(r'^(?:File|Target): "(.+)" -')
|
||||
EXTRACTFROM_RE = re.compile(r"^Extracting\sfrom\s(.+)")
|
||||
EXTRACTED_RE = re.compile(r"^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$")
|
||||
SPLITFILE_RE = re.compile(r"\.(\d\d\d?\d$)", re.I)
|
||||
ZIP_RE = re.compile(r"\.(zip$)", re.I)
|
||||
SEVENZIP_RE = re.compile(r"\.7z$", re.I)
|
||||
SEVENMULTI_RE = re.compile(r"\.7z\.\d+$", re.I)
|
||||
TS_RE = re.compile(r"\.(\d+)\.(ts$)", re.I)
|
||||
|
||||
# Constants
|
||||
SEVENZIP_ID = b"7z\xbc\xaf'\x1c"
|
||||
PAR2_COMMAND = None
|
||||
MULTIPAR_COMMAND = None
|
||||
RAR_COMMAND = None
|
||||
@@ -142,6 +140,9 @@ def find_programs(curdir):
|
||||
# Run check on par2-multicore
|
||||
sabnzbd.newsunpack.PAR2_MT = par2_mt_check(sabnzbd.newsunpack.PAR2_COMMAND)
|
||||
|
||||
# Set the path for rarfile
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
|
||||
|
||||
ENV_NZO_FIELDS = [
|
||||
"bytes",
|
||||
@@ -1118,8 +1119,7 @@ def par2_repair(parfile_nzf: NzbFile, nzo: NzbObject, workdir, setname, single):
|
||||
readd = False
|
||||
for extrapar in nzo.extrapars[setname][:]:
|
||||
# Make sure we only get new par2 files
|
||||
if extrapar not in nzo.finished_files and extrapar not in nzo.files:
|
||||
nzo.add_parfile(extrapar)
|
||||
if nzo.add_parfile(extrapar):
|
||||
readd = True
|
||||
if readd:
|
||||
return readd, result
|
||||
@@ -1957,7 +1957,6 @@ def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
# UnRar is required to read some RAR files
|
||||
# RarFile can fail in special cases
|
||||
try:
|
||||
rarfile.UNRAR_TOOL = RAR_COMMAND
|
||||
zf = rarfile.RarFile(rarfile_path)
|
||||
|
||||
# setpassword can fail due to bugs in RarFile
|
||||
@@ -1995,51 +1994,6 @@ def rar_sort(a, b):
|
||||
return cmp(a, b)
|
||||
|
||||
|
||||
def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=True):
|
||||
"""Build filelists, if workdir_complete has files, ignore workdir.
|
||||
Optionally scan both directories.
|
||||
Optionally test content to establish RAR-ness
|
||||
"""
|
||||
sevens, joinables, zips, rars, ts, filelist = ([], [], [], [], [], [])
|
||||
|
||||
if workdir_complete:
|
||||
filelist.extend(listdir_full(workdir_complete))
|
||||
|
||||
if workdir and (not filelist or check_both):
|
||||
filelist.extend(listdir_full(workdir, recursive=False))
|
||||
|
||||
for file in filelist:
|
||||
# Extra check for rar (takes CPU/disk)
|
||||
file_is_rar = False
|
||||
if check_rar:
|
||||
file_is_rar = rarfile.is_rarfile(file)
|
||||
|
||||
# Run through all the checks
|
||||
if SEVENZIP_RE.search(file) or SEVENMULTI_RE.search(file):
|
||||
# 7zip
|
||||
sevens.append(file)
|
||||
elif SPLITFILE_RE.search(file) and not file_is_rar:
|
||||
# Joinables, optional with RAR check
|
||||
joinables.append(file)
|
||||
elif ZIP_RE.search(file):
|
||||
# ZIP files
|
||||
zips.append(file)
|
||||
elif RAR_RE.search(file):
|
||||
# RAR files
|
||||
rars.append(file)
|
||||
elif TS_RE.search(file):
|
||||
# TS split files
|
||||
ts.append(file)
|
||||
|
||||
logging.debug("build_filelists(): joinables: %s", joinables)
|
||||
logging.debug("build_filelists(): zips: %s", zips)
|
||||
logging.debug("build_filelists(): rars: %s", rars)
|
||||
logging.debug("build_filelists(): 7zips: %s", sevens)
|
||||
logging.debug("build_filelists(): ts: %s", ts)
|
||||
|
||||
return joinables, zips, rars, sevens, ts
|
||||
|
||||
|
||||
def quick_check_set(set, nzo):
|
||||
"""Check all on-the-fly md5sums of a set"""
|
||||
md5pack = nzo.md5packs.get(set)
|
||||
@@ -2388,22 +2342,27 @@ def pre_queue(nzo: NzbObject, pp, cat):
|
||||
return values
|
||||
|
||||
|
||||
def is_sevenfile(path):
|
||||
"""Return True if path has proper extension and 7Zip is installed"""
|
||||
return SEVEN_COMMAND and os.path.splitext(path)[1].lower() == ".7z"
|
||||
def is_sevenfile(path: str) -> bool:
|
||||
"""Return True if path has 7Zip-signature and 7Zip is detected"""
|
||||
with open(path, "rb") as sevenzip:
|
||||
if sevenzip.read(6) == SEVENZIP_ID:
|
||||
return bool(SEVEN_COMMAND)
|
||||
return False
|
||||
|
||||
|
||||
class SevenZip:
|
||||
"""Minimal emulation of ZipFile class for 7Zip"""
|
||||
|
||||
def __init__(self, path):
|
||||
def __init__(self, path: str):
|
||||
self.path = path
|
||||
# Check if it's actually a 7Zip-file
|
||||
if not is_sevenfile(self.path):
|
||||
raise TypeError("File is not a 7zip file")
|
||||
|
||||
def namelist(self):
|
||||
def namelist(self) -> List[str]:
|
||||
"""Return list of names in 7Zip"""
|
||||
names = []
|
||||
# Future extension: use '-sccUTF-8' to get names in UTF8 encoding
|
||||
command = [SEVEN_COMMAND, "l", "-p", "-y", "-slt", self.path]
|
||||
command = [SEVEN_COMMAND, "l", "-p", "-y", "-slt", "-sccUTF-8", self.path]
|
||||
output = run_command(command)
|
||||
|
||||
re_path = re.compile("^Path = (.+)")
|
||||
@@ -2416,11 +2375,14 @@ class SevenZip:
|
||||
del names[0]
|
||||
return names
|
||||
|
||||
def read(self, name):
|
||||
def open(self, name: str) -> BinaryIO:
|
||||
"""Read named file from 7Zip and return data"""
|
||||
command = [SEVEN_COMMAND, "e", "-p", "-y", "-so", self.path, name]
|
||||
# Ignore diagnostic output, otherwise it will be appended to content
|
||||
return run_command(command, stderr=subprocess.DEVNULL)
|
||||
with build_and_run_command(command, stderr=subprocess.DEVNULL) as p:
|
||||
data = io.BytesIO(p.stdout.read())
|
||||
p.wait()
|
||||
return data
|
||||
|
||||
def close(self):
|
||||
"""Close file"""
|
||||
|
||||
@@ -268,7 +268,7 @@ class NNTP:
|
||||
if not self.nw.server.info:
|
||||
raise socket.error(errno.EADDRNOTAVAIL, "Address not available - Check for internet or DNS problems")
|
||||
|
||||
af, socktype, proto, canonname, sa = self.nw.server.info[0]
|
||||
af, socktype, proto, _, _ = self.nw.server.info[0]
|
||||
|
||||
# there will be a connect to host (or self.host, so let's force set 'af' to the correct value
|
||||
if is_ipv4_addr(self.host):
|
||||
@@ -276,38 +276,42 @@ class NNTP:
|
||||
if is_ipv6_addr(self.host):
|
||||
af = socket.AF_INET6
|
||||
|
||||
# Secured or unsecured?
|
||||
if not self.nw.server.ssl:
|
||||
# Basic connection
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
else:
|
||||
# Use context or just wrapper
|
||||
if sabnzbd.CERTIFICATE_VALIDATION:
|
||||
# Setup the SSL socket
|
||||
ctx = ssl.create_default_context()
|
||||
# Create SSL-context if it is needed and not created yet
|
||||
if self.nw.server.ssl and not self.nw.server.ssl_context:
|
||||
# Setup the SSL socket
|
||||
self.nw.server.ssl_context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
|
||||
if sabnzbd.cfg.require_modern_tls():
|
||||
# We want a modern TLS (1.2 or higher), so we disallow older protocol versions (<= TLS 1.1)
|
||||
ctx.options |= ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
|
||||
|
||||
# Only verify hostname when we're strict
|
||||
if self.nw.server.ssl_verify < 2:
|
||||
ctx.check_hostname = False
|
||||
# Only verify hostname when we're strict
|
||||
if self.nw.server.ssl_verify < 2:
|
||||
self.nw.server.ssl_context.check_hostname = False
|
||||
# Certificates optional
|
||||
if self.nw.server.ssl_verify == 0:
|
||||
ctx.verify_mode = ssl.CERT_NONE
|
||||
self.nw.server.ssl_context.verify_mode = ssl.CERT_NONE
|
||||
|
||||
# Did the user set a custom cipher-string?
|
||||
if self.nw.server.ssl_ciphers:
|
||||
# At their own risk, socket will error out in case it was invalid
|
||||
ctx.set_ciphers(self.nw.server.ssl_ciphers)
|
||||
|
||||
self.sock = ctx.wrap_socket(socket.socket(af, socktype, proto), server_hostname=self.nw.server.host)
|
||||
# Did the user set a custom cipher-string?
|
||||
if self.nw.server.ssl_ciphers:
|
||||
# At their own risk, socket will error out in case it was invalid
|
||||
self.nw.server.ssl_context.set_ciphers(self.nw.server.ssl_ciphers)
|
||||
# Python does not allow setting ciphers on TLSv1.3, so have to force TLSv1.2 as the maximum
|
||||
self.nw.server.ssl_context.maximum_version = ssl.TLSVersion.TLSv1_2
|
||||
else:
|
||||
# Use a regular wrapper, no certificate validation
|
||||
self.sock = ssl.wrap_socket(socket.socket(af, socktype, proto))
|
||||
# Support at least TLSv1.2+ ciphers, as some essential ones are removed by default in Python 3.10
|
||||
self.nw.server.ssl_context.set_ciphers("HIGH")
|
||||
|
||||
# Store fileno of the socket
|
||||
if sabnzbd.cfg.allow_old_ssl_tls():
|
||||
# Allow anything that the system has
|
||||
self.nw.server.ssl_context.minimum_version = ssl.TLSVersion.MINIMUM_SUPPORTED
|
||||
else:
|
||||
# We want a modern TLS (1.2 or higher), so we disallow older protocol versions (<= TLS 1.1)
|
||||
self.nw.server.ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
|
||||
# Disable any verification if the setup is bad
|
||||
if not sabnzbd.CERTIFICATE_VALIDATION:
|
||||
self.nw.server.ssl_context.check_hostname = False
|
||||
self.nw.server.ssl_context.verify_mode = ssl.CERT_NONE
|
||||
|
||||
# Create socket and store fileno of the socket
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
self.fileno: int = self.sock.fileno()
|
||||
|
||||
# Open the connection in a separate thread due to avoid blocking
|
||||
@@ -326,10 +330,11 @@ class NNTP:
|
||||
|
||||
# Connect
|
||||
self.sock.connect((self.host, self.nw.server.port))
|
||||
self.sock.setblocking(self.nw.blocking)
|
||||
|
||||
# Log SSL/TLS info
|
||||
# Secured or unsecured?
|
||||
if self.nw.server.ssl:
|
||||
# Wrap socket and log SSL/TLS diagnostic info
|
||||
self.sock = self.nw.server.ssl_context.wrap_socket(self.sock, server_hostname=self.nw.server.host)
|
||||
logging.info(
|
||||
"%s@%s: Connected using %s (%s)",
|
||||
self.nw.thrdnum,
|
||||
@@ -339,6 +344,9 @@ class NNTP:
|
||||
)
|
||||
self.nw.server.ssl_info = "%s (%s)" % (self.sock.version(), self.sock.cipher()[0])
|
||||
|
||||
# Set blocking mode
|
||||
self.sock.setblocking(self.nw.blocking)
|
||||
|
||||
# Now it's safe to add the socket to the list of active sockets
|
||||
# Skip this step during server test
|
||||
if not self.nw.blocking:
|
||||
|
||||
@@ -23,7 +23,8 @@ sabnzbd.notifier - Send notifications to any notification services
|
||||
|
||||
import os.path
|
||||
import logging
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import http.client
|
||||
import json
|
||||
from threading import Thread
|
||||
|
||||
@@ -20,26 +20,25 @@ sabnzbd.nzbparser - Parse and import NZB files
|
||||
"""
|
||||
import bz2
|
||||
import gzip
|
||||
import re
|
||||
import time
|
||||
import logging
|
||||
import hashlib
|
||||
import xml.etree.ElementTree
|
||||
import datetime
|
||||
from typing import Optional, Dict, Any, Union
|
||||
import zipfile
|
||||
from typing import Optional, Dict, Any, Union, List, Tuple
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd import filesystem, nzbstuff
|
||||
from sabnzbd.constants import Status
|
||||
from sabnzbd.encoding import utob, correct_unknown_encoding
|
||||
from sabnzbd.filesystem import is_archive, get_filename
|
||||
from sabnzbd.encoding import utob
|
||||
from sabnzbd.filesystem import get_filename
|
||||
from sabnzbd.misc import name_to_cat
|
||||
from sabnzbd.utils import rarfile
|
||||
|
||||
|
||||
def nzbfile_parser(raw_data, nzo):
|
||||
# Load data as file-object
|
||||
raw_data = re.sub(r"""\s(xmlns="[^"]+"|xmlns='[^']+')""", "", raw_data, count=1)
|
||||
nzb_tree = xml.etree.ElementTree.fromstring(raw_data)
|
||||
def nzbfile_parser(full_nzb_path: str, nzo):
|
||||
# For type-hinting
|
||||
nzo: sabnzbd.nzbstuff.NzbObject
|
||||
|
||||
# Hash for dupe-checking
|
||||
md5sum = hashlib.md5()
|
||||
@@ -52,98 +51,112 @@ def nzbfile_parser(raw_data, nzo):
|
||||
skipped_files = 0
|
||||
valid_files = 0
|
||||
|
||||
# Parse the header
|
||||
if nzb_tree.find("head"):
|
||||
for meta in nzb_tree.find("head").iter("meta"):
|
||||
meta_type = meta.attrib.get("type")
|
||||
if meta_type and meta.text:
|
||||
# Meta tags can occur multiple times
|
||||
if meta_type not in nzo.meta:
|
||||
nzo.meta[meta_type] = []
|
||||
nzo.meta[meta_type].append(meta.text)
|
||||
logging.debug("NZB file meta-data = %s", nzo.meta)
|
||||
# Use nzb.gz file from admin dir
|
||||
with gzip.open(full_nzb_path) as nzb_fh:
|
||||
for _, element in xml.etree.ElementTree.iterparse(nzb_fh):
|
||||
# For type-hinting
|
||||
element: xml.etree.ElementTree.Element
|
||||
|
||||
# Parse the files
|
||||
for file in nzb_tree.iter("file"):
|
||||
# Get subject and date
|
||||
file_name = ""
|
||||
if file.attrib.get("subject"):
|
||||
file_name = file.attrib.get("subject")
|
||||
# Ignore namespace
|
||||
_, has_namespace, postfix = element.tag.partition("}")
|
||||
if has_namespace:
|
||||
element.tag = postfix
|
||||
|
||||
# Don't fail if no date present
|
||||
try:
|
||||
file_date = datetime.datetime.fromtimestamp(int(file.attrib.get("date")))
|
||||
file_timestamp = int(file.attrib.get("date"))
|
||||
except:
|
||||
file_date = datetime.datetime.fromtimestamp(time_now)
|
||||
file_timestamp = time_now
|
||||
# Parse the header
|
||||
if element.tag.lower() == "head":
|
||||
for meta in element.iter("meta"):
|
||||
meta_type = meta.attrib.get("type")
|
||||
if meta_type and meta.text:
|
||||
# Meta tags can occur multiple times
|
||||
if meta_type not in nzo.meta:
|
||||
nzo.meta[meta_type] = []
|
||||
nzo.meta[meta_type].append(meta.text)
|
||||
element.clear()
|
||||
logging.debug("NZB file meta-data = %s", nzo.meta)
|
||||
continue
|
||||
|
||||
# Get group
|
||||
for group in file.iter("group"):
|
||||
if group.text not in nzo.groups:
|
||||
nzo.groups.append(group.text)
|
||||
# Parse the files
|
||||
if element.tag.lower() == "file":
|
||||
# Get subject and date
|
||||
file_name = ""
|
||||
if element.attrib.get("subject"):
|
||||
file_name = element.attrib.get("subject")
|
||||
|
||||
# Get segments
|
||||
raw_article_db = {}
|
||||
file_bytes = 0
|
||||
if file.find("segments"):
|
||||
for segment in file.find("segments").iter("segment"):
|
||||
# Don't fail if no date present
|
||||
try:
|
||||
article_id = segment.text
|
||||
segment_size = int(segment.attrib.get("bytes"))
|
||||
partnum = int(segment.attrib.get("number"))
|
||||
|
||||
# Update hash
|
||||
md5sum.update(utob(article_id))
|
||||
|
||||
# Duplicate parts?
|
||||
if partnum in raw_article_db:
|
||||
if article_id != raw_article_db[partnum][0]:
|
||||
logging.info(
|
||||
"Duplicate part %s, but different ID-s (%s // %s)",
|
||||
partnum,
|
||||
raw_article_db[partnum][0],
|
||||
article_id,
|
||||
)
|
||||
nzo.increase_bad_articles_counter("duplicate_articles")
|
||||
else:
|
||||
logging.info("Skipping duplicate article (%s)", article_id)
|
||||
elif segment_size <= 0 or segment_size >= 2 ** 23:
|
||||
# Perform sanity check (not negative, 0 or larger than 8MB) on article size
|
||||
# We use this value later to allocate memory in cache and sabyenc
|
||||
logging.info("Skipping article %s due to strange size (%s)", article_id, segment_size)
|
||||
nzo.increase_bad_articles_counter("bad_articles")
|
||||
else:
|
||||
raw_article_db[partnum] = (article_id, segment_size)
|
||||
file_bytes += segment_size
|
||||
file_date = datetime.datetime.fromtimestamp(int(element.attrib.get("date")))
|
||||
file_timestamp = int(element.attrib.get("date"))
|
||||
except:
|
||||
# In case of missing attributes
|
||||
pass
|
||||
file_date = datetime.datetime.fromtimestamp(time_now)
|
||||
file_timestamp = time_now
|
||||
|
||||
# Sort the articles by part number, compatible with Python 3.5
|
||||
raw_article_db_sorted = [raw_article_db[partnum] for partnum in sorted(raw_article_db)]
|
||||
# Get group
|
||||
for group in element.iter("group"):
|
||||
if group.text not in nzo.groups:
|
||||
nzo.groups.append(group.text)
|
||||
|
||||
# Create NZF
|
||||
nzf = sabnzbd.nzbstuff.NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo)
|
||||
# Get segments
|
||||
raw_article_db = {}
|
||||
file_bytes = 0
|
||||
if element.find("segments"):
|
||||
for segment in element.find("segments").iter("segment"):
|
||||
try:
|
||||
article_id = segment.text
|
||||
segment_size = int(segment.attrib.get("bytes"))
|
||||
partnum = int(segment.attrib.get("number"))
|
||||
|
||||
# Check if we already have this exact NZF (see custom eq-checks)
|
||||
if nzf in nzo.files:
|
||||
logging.info("File %s occured twice in NZB, skipping", nzf.filename)
|
||||
continue
|
||||
# Update hash
|
||||
md5sum.update(utob(article_id))
|
||||
|
||||
# Add valid NZF's
|
||||
if file_name and nzf.valid and nzf.nzf_id:
|
||||
logging.info("File %s added to queue", nzf.filename)
|
||||
nzo.files.append(nzf)
|
||||
nzo.files_table[nzf.nzf_id] = nzf
|
||||
nzo.bytes += nzf.bytes
|
||||
valid_files += 1
|
||||
avg_age_sum += file_timestamp
|
||||
else:
|
||||
logging.info("Error importing %s, skipping", file_name)
|
||||
if nzf.nzf_id:
|
||||
sabnzbd.remove_data(nzf.nzf_id, nzo.admin_path)
|
||||
skipped_files += 1
|
||||
# Duplicate parts?
|
||||
if partnum in raw_article_db:
|
||||
if article_id != raw_article_db[partnum][0]:
|
||||
logging.info(
|
||||
"Duplicate part %s, but different ID-s (%s // %s)",
|
||||
partnum,
|
||||
raw_article_db[partnum][0],
|
||||
article_id,
|
||||
)
|
||||
nzo.increase_bad_articles_counter("duplicate_articles")
|
||||
else:
|
||||
logging.info("Skipping duplicate article (%s)", article_id)
|
||||
elif segment_size <= 0 or segment_size >= 2 ** 23:
|
||||
# Perform sanity check (not negative, 0 or larger than 8MB) on article size
|
||||
# We use this value later to allocate memory in cache and sabyenc
|
||||
logging.info("Skipping article %s due to strange size (%s)", article_id, segment_size)
|
||||
nzo.increase_bad_articles_counter("bad_articles")
|
||||
else:
|
||||
raw_article_db[partnum] = (article_id, segment_size)
|
||||
file_bytes += segment_size
|
||||
except:
|
||||
# In case of missing attributes
|
||||
pass
|
||||
|
||||
# Sort the articles by part number, compatible with Python 3.5
|
||||
raw_article_db_sorted = [raw_article_db[partnum] for partnum in sorted(raw_article_db)]
|
||||
|
||||
# Create NZF
|
||||
nzf = sabnzbd.nzbstuff.NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo)
|
||||
|
||||
# Check if we already have this exact NZF (see custom eq-checks)
|
||||
if nzf in nzo.files:
|
||||
logging.info("File %s occured twice in NZB, skipping", nzf.filename)
|
||||
continue
|
||||
|
||||
# Add valid NZF's
|
||||
if file_name and nzf.valid and nzf.nzf_id:
|
||||
logging.info("File %s added to queue", nzf.filename)
|
||||
nzo.files.append(nzf)
|
||||
nzo.files_table[nzf.nzf_id] = nzf
|
||||
nzo.bytes += nzf.bytes
|
||||
valid_files += 1
|
||||
avg_age_sum += file_timestamp
|
||||
else:
|
||||
logging.info("Error importing %s, skipping", file_name)
|
||||
if nzf.nzf_id:
|
||||
sabnzbd.remove_data(nzf.nzf_id, nzo.admin_path)
|
||||
skipped_files += 1
|
||||
element.clear()
|
||||
|
||||
# Final bookkeeping
|
||||
nr_files = max(1, valid_files)
|
||||
@@ -171,22 +184,29 @@ def process_nzb_archive_file(
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
):
|
||||
"""Analyse ZIP file and create job(s).
|
||||
Accepts ZIP files with ONLY nzb/nfo/folder files in it.
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Analyse archive and create job(s).
|
||||
Accepts archive files with ONLY nzb/nfo/folder files in it.
|
||||
returns (status, nzo_ids)
|
||||
status: -1==Error, 0==OK, 1==Ignore
|
||||
"""
|
||||
nzo_ids = []
|
||||
if catdir is None:
|
||||
catdir = cat
|
||||
|
||||
filename, cat = name_to_cat(filename, catdir)
|
||||
# Returns -1==Error/Retry, 0==OK, 1==Ignore
|
||||
status, zf, extension = is_archive(path)
|
||||
|
||||
if status != 0:
|
||||
return status, []
|
||||
try:
|
||||
if zipfile.is_zipfile(path):
|
||||
zf = zipfile.ZipFile(path)
|
||||
elif rarfile.is_rarfile(path):
|
||||
zf = rarfile.RarFile(path)
|
||||
elif sabnzbd.newsunpack.is_sevenfile(path):
|
||||
zf = sabnzbd.newsunpack.SevenZip(path)
|
||||
else:
|
||||
raise TypeError("File %s is not a supported archive!" % filename)
|
||||
except:
|
||||
logging.info(T("Cannot read %s"), path, exc_info=True)
|
||||
return -1, []
|
||||
|
||||
status = 1
|
||||
names = zf.namelist()
|
||||
@@ -203,20 +223,20 @@ def process_nzb_archive_file(
|
||||
for name in names:
|
||||
if name.lower().endswith(".nzb"):
|
||||
try:
|
||||
data = correct_unknown_encoding(zf.read(name))
|
||||
datap = zf.open(name)
|
||||
except OSError:
|
||||
logging.error(T("Cannot read %s"), name, exc_info=True)
|
||||
zf.close()
|
||||
return -1, []
|
||||
name = filesystem.setname_from_path(name)
|
||||
if data:
|
||||
if datap:
|
||||
nzo = None
|
||||
try:
|
||||
nzo = nzbstuff.NzbObject(
|
||||
name,
|
||||
pp=pp,
|
||||
script=script,
|
||||
nzb_data=data,
|
||||
nzb_fp=datap,
|
||||
cat=cat,
|
||||
url=url,
|
||||
priority=priority,
|
||||
@@ -233,6 +253,8 @@ def process_nzb_archive_file(
|
||||
except:
|
||||
# Something else is wrong, show error
|
||||
logging.error(T("Error while adding %s, removing"), name, exc_info=True)
|
||||
finally:
|
||||
datap.close()
|
||||
|
||||
if nzo:
|
||||
if nzo_id:
|
||||
@@ -242,7 +264,10 @@ def process_nzb_archive_file(
|
||||
nzo_id = None
|
||||
nzo_ids.append(sabnzbd.NzbQueue.add(nzo))
|
||||
nzo.update_rating()
|
||||
|
||||
# Close the pointer to the compressed file
|
||||
zf.close()
|
||||
|
||||
try:
|
||||
if not keep:
|
||||
filesystem.remove_file(path)
|
||||
@@ -272,11 +297,11 @@ def process_single_nzb(
|
||||
url: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
nzo_id: Optional[str] = None,
|
||||
):
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Analyze file and create a job from it
|
||||
Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
|
||||
returns (status, nzo_ids)
|
||||
status: -2==Error/retry, -1==Error, 0==OK
|
||||
status: -1==Error, 0==OK
|
||||
"""
|
||||
nzo_ids = []
|
||||
if catdir is None:
|
||||
@@ -289,17 +314,13 @@ def process_single_nzb(
|
||||
if check_bytes == b"\x1f\x8b":
|
||||
# gzip file or gzip in disguise
|
||||
filename = filename.replace(".nzb.gz", ".nzb")
|
||||
nzb_reader_handler = gzip.GzipFile
|
||||
nzb_fp = gzip.GzipFile(path, "rb")
|
||||
elif check_bytes == b"BZ":
|
||||
# bz2 file or bz2 in disguise
|
||||
filename = filename.replace(".nzb.bz2", ".nzb")
|
||||
nzb_reader_handler = bz2.BZ2File
|
||||
nzb_fp = bz2.BZ2File(path, "rb")
|
||||
else:
|
||||
nzb_reader_handler = open
|
||||
|
||||
# Let's get some data and hope we can decode it
|
||||
with nzb_reader_handler(path, "rb") as nzb_file:
|
||||
data = correct_unknown_encoding(nzb_file.read())
|
||||
nzb_fp = open(path, "rb")
|
||||
|
||||
except OSError:
|
||||
logging.warning(T("Cannot read %s"), filesystem.clip_path(path))
|
||||
@@ -318,7 +339,7 @@ def process_single_nzb(
|
||||
filename,
|
||||
pp=pp,
|
||||
script=script,
|
||||
nzb_data=data,
|
||||
nzb_fp=nzb_fp,
|
||||
cat=cat,
|
||||
url=url,
|
||||
priority=priority,
|
||||
@@ -338,13 +359,11 @@ def process_single_nzb(
|
||||
# Empty
|
||||
return 1, nzo_ids
|
||||
except:
|
||||
if data.find("<nzb") >= 0 > data.find("</nzb"):
|
||||
# Looks like an incomplete file, retry
|
||||
return -2, nzo_ids
|
||||
else:
|
||||
# Something else is wrong, show error
|
||||
logging.error(T("Error while adding %s, removing"), filename, exc_info=True)
|
||||
return -1, nzo_ids
|
||||
# Something else is wrong, show error
|
||||
logging.error(T("Error while adding %s, removing"), filename, exc_info=True)
|
||||
return -1, nzo_ids
|
||||
finally:
|
||||
nzb_fp.close()
|
||||
|
||||
if nzo:
|
||||
if nzo_id:
|
||||
|
||||
@@ -26,7 +26,7 @@ import datetime
|
||||
import threading
|
||||
import functools
|
||||
import difflib
|
||||
from typing import List, Dict, Any, Tuple, Optional, Union
|
||||
from typing import List, Dict, Any, Tuple, Optional, Union, BinaryIO
|
||||
|
||||
# SABnzbd modules
|
||||
import sabnzbd
|
||||
@@ -588,13 +588,13 @@ class NzbObject(TryList):
|
||||
filename: str,
|
||||
pp: Optional[int] = None,
|
||||
script: Optional[str] = None,
|
||||
nzb_data: Optional[str] = None,
|
||||
nzb_fp: Optional[BinaryIO] = None,
|
||||
futuretype: bool = False,
|
||||
cat: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
priority: Optional[Union[int, str]] = DEFAULT_PRIORITY,
|
||||
nzbname: Optional[str] = None,
|
||||
status: Status = Status.QUEUED,
|
||||
status: str = Status.QUEUED,
|
||||
nzo_info: Optional[Dict[str, Any]] = None,
|
||||
reuse: Optional[str] = None,
|
||||
dup_check: bool = True,
|
||||
@@ -602,13 +602,13 @@ class NzbObject(TryList):
|
||||
super().__init__()
|
||||
|
||||
self.filename = filename # Original filename
|
||||
if nzbname and nzb_data:
|
||||
if nzbname and nzb_fp:
|
||||
self.work_name = nzbname # Use nzbname if set and only for non-future slot
|
||||
else:
|
||||
self.work_name = filename
|
||||
|
||||
# For future-slots we keep the name given by URLGrabber
|
||||
if nzb_data is None:
|
||||
if nzb_fp is None:
|
||||
self.final_name = self.work_name = filename
|
||||
else:
|
||||
# Remove trailing .nzb and .par(2)
|
||||
@@ -648,7 +648,7 @@ class NzbObject(TryList):
|
||||
self.script = script
|
||||
|
||||
# Information fields
|
||||
self.url = url or filename
|
||||
self.url = url
|
||||
self.groups = []
|
||||
self.avg_date = datetime.datetime(1970, 1, 1, 1, 0)
|
||||
self.avg_stamp = 0.0 # Avg age in seconds (calculated from avg_age)
|
||||
@@ -725,7 +725,7 @@ class NzbObject(TryList):
|
||||
self.pp_active = False # Signals active post-processing (not saved)
|
||||
self.md5sum: Optional[str] = None
|
||||
|
||||
if nzb_data is None and not reuse:
|
||||
if nzb_fp is None and not reuse:
|
||||
# This is a slot for a future NZB, ready now
|
||||
# It can also be a retry of a failed job with no extra NZB-file
|
||||
return
|
||||
@@ -762,12 +762,13 @@ class NzbObject(TryList):
|
||||
remove_all(admin_dir, "SABnzbd_nz?_*", keep_folder=True)
|
||||
remove_all(admin_dir, "SABnzbd_article_*", keep_folder=True)
|
||||
|
||||
if nzb_data and "<nzb" in nzb_data:
|
||||
if nzb_fp:
|
||||
full_nzb_path = sabnzbd.save_compressed(admin_dir, filename, nzb_fp)
|
||||
try:
|
||||
sabnzbd.nzbparser.nzbfile_parser(nzb_data, self)
|
||||
sabnzbd.nzbparser.nzbfile_parser(full_nzb_path, self)
|
||||
except Exception as err:
|
||||
self.incomplete = True
|
||||
logging.warning(T("Invalid NZB file %s, skipping (reason=%s, line=%s)"), filename, err, "1")
|
||||
logging.warning(T("Invalid NZB file %s, skipping (error: %s)"), filename, err)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
# Some people want to keep the broken files
|
||||
@@ -783,8 +784,8 @@ class NzbObject(TryList):
|
||||
if not reuse and dup_check and self.priority != REPAIR_PRIORITY:
|
||||
duplicate, series_duplicate = self.has_duplicates()
|
||||
|
||||
sabnzbd.backup_nzb(filename, nzb_data)
|
||||
sabnzbd.save_compressed(admin_dir, filename, nzb_data)
|
||||
# Copy to backup
|
||||
sabnzbd.backup_nzb(full_nzb_path)
|
||||
|
||||
if not self.files and not reuse:
|
||||
self.purge_data()
|
||||
@@ -1105,8 +1106,7 @@ class NzbObject(TryList):
|
||||
self.postpone_pars(nzf, setname)
|
||||
# Get the next one
|
||||
for new_nzf in self.extrapars[setname]:
|
||||
if not new_nzf.completed:
|
||||
self.add_parfile(new_nzf)
|
||||
if self.add_parfile(new_nzf):
|
||||
# Add it to the top
|
||||
self.files.remove(new_nzf)
|
||||
self.files.insert(0, new_nzf)
|
||||
@@ -1143,8 +1143,8 @@ class NzbObject(TryList):
|
||||
added_blocks = 0
|
||||
while added_blocks < needed_blocks:
|
||||
new_nzf = block_list.pop()
|
||||
self.add_parfile(new_nzf)
|
||||
added_blocks += new_nzf.blocks
|
||||
if self.add_parfile(new_nzf):
|
||||
added_blocks += new_nzf.blocks
|
||||
|
||||
logging.info("Added %s blocks to %s", added_blocks, self.final_name)
|
||||
return added_blocks
|
||||
@@ -1433,15 +1433,18 @@ class NzbObject(TryList):
|
||||
self.unwanted_ext = 2
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def add_parfile(self, parfile: NzbFile):
|
||||
def add_parfile(self, parfile: NzbFile) -> bool:
|
||||
"""Add parfile to the files to be downloaded
|
||||
Resets trylist just to be sure
|
||||
Adjust download-size accordingly
|
||||
Returns False when the file couldn't be added
|
||||
"""
|
||||
if not parfile.completed and parfile not in self.files and parfile not in self.finished_files:
|
||||
parfile.reset_try_list()
|
||||
self.files.append(parfile)
|
||||
self.bytes_tried -= parfile.bytes_left
|
||||
return True
|
||||
return False
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def remove_parset(self, setname: str):
|
||||
@@ -1468,12 +1471,12 @@ class NzbObject(TryList):
|
||||
# from all the sets. This probably means we get too much par2, but it's worth it.
|
||||
blocks_new = 0
|
||||
for new_nzf in self.extrapars[parset]:
|
||||
self.add_parfile(new_nzf)
|
||||
blocks_new += new_nzf.blocks
|
||||
# Enough now?
|
||||
if blocks_new >= self.bad_articles:
|
||||
logging.info("Prospectively added %s repair blocks to %s", blocks_new, self.final_name)
|
||||
break
|
||||
if self.add_parfile(new_nzf):
|
||||
blocks_new += new_nzf.blocks
|
||||
# Enough now?
|
||||
if blocks_new >= self.bad_articles:
|
||||
logging.info("Prospectively added %s repair blocks to %s", blocks_new, self.final_name)
|
||||
break
|
||||
# Reset NZO TryList
|
||||
self.reset_try_list()
|
||||
|
||||
@@ -1576,9 +1579,7 @@ class NzbObject(TryList):
|
||||
if dups:
|
||||
download_msgs.append(T("%s articles had non-matching duplicates") % dups)
|
||||
self.set_unpack_info("Download", "<br/>".join(download_msgs), unique=True)
|
||||
|
||||
if self.url:
|
||||
self.set_unpack_info("Source", self.url, unique=True)
|
||||
self.set_unpack_info("Source", self.url or self.filename, unique=True)
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def increase_bad_articles_counter(self, article_type: str):
|
||||
@@ -1742,7 +1743,7 @@ class NzbObject(TryList):
|
||||
and not is_probably_obfuscated(yenc_filename)
|
||||
and not nzf.filename.endswith(".par2")
|
||||
):
|
||||
logging.info("Detected filename from yenc: %s -> %s", nzf.filename, yenc_filename)
|
||||
logging.info("Detected filename from yenc or uu: %s -> %s", nzf.filename, yenc_filename)
|
||||
self.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
|
||||
|
||||
@@ -19,10 +19,8 @@
|
||||
sabnzbd.osxmenu - macOS Top Menu
|
||||
"""
|
||||
|
||||
import objc
|
||||
from Foundation import *
|
||||
from AppKit import *
|
||||
from PyObjCTools import AppHelper
|
||||
from objc import YES, NO
|
||||
|
||||
import os
|
||||
@@ -40,7 +38,6 @@ from sabnzbd.panic import launch_a_browser
|
||||
|
||||
from sabnzbd.api import fast_queue
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.downloader
|
||||
|
||||
status_icons = {
|
||||
"idle": "icons/sabnzbd_osx_idle.tiff",
|
||||
@@ -80,9 +77,7 @@ class SABnzbdDelegate(NSObject):
|
||||
# Path is modified for the binary
|
||||
icon_path = os.path.join(os.path.dirname(sys.executable), "..", "Resources", status_icons[icon])
|
||||
self.icons[icon] = NSImage.alloc().initByReferencingFile_(icon_path)
|
||||
if sabnzbd.DARWIN_VERSION > 9:
|
||||
# Support for Yosemite Dark Mode
|
||||
self.icons[icon].setTemplate_(YES)
|
||||
self.icons[icon].setTemplate_(YES)
|
||||
|
||||
self.status_item.setImage_(self.icons["idle"])
|
||||
self.status_item.setAlternateImage_(self.icons["clicked"])
|
||||
@@ -435,13 +430,8 @@ class SABnzbdDelegate(NSObject):
|
||||
style.setMaximumLineHeight_(9.0)
|
||||
style.setParagraphSpacing_(-3.0)
|
||||
|
||||
# In Big Sur the offset was changed
|
||||
baseline_offset = 5.0
|
||||
if sabnzbd.DARWIN_VERSION >= 16:
|
||||
baseline_offset = baseline_offset * -1
|
||||
|
||||
titleAttributes = {
|
||||
NSBaselineOffsetAttributeName: baseline_offset,
|
||||
NSBaselineOffsetAttributeName: -5.0,
|
||||
NSFontAttributeName: NSFont.menuFontOfSize_(9.0),
|
||||
NSParagraphStyleAttributeName: style,
|
||||
}
|
||||
|
||||
@@ -39,7 +39,7 @@ from sabnzbd.newsunpack import (
|
||||
is_sfv_file,
|
||||
)
|
||||
from threading import Thread
|
||||
from sabnzbd.misc import on_cleanup_list
|
||||
from sabnzbd.misc import on_cleanup_list, is_sample
|
||||
from sabnzbd.filesystem import (
|
||||
real_path,
|
||||
get_unique_path,
|
||||
@@ -65,7 +65,7 @@ from sabnzbd.filesystem import (
|
||||
get_filename,
|
||||
)
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
from sabnzbd.sorting import Sorter, is_sample
|
||||
from sabnzbd.sorting import Sorter
|
||||
from sabnzbd.constants import (
|
||||
REPAIR_PRIORITY,
|
||||
FORCE_PRIORITY,
|
||||
@@ -74,13 +74,12 @@ from sabnzbd.constants import (
|
||||
JOB_ADMIN,
|
||||
Status,
|
||||
VERIFIED_FILE,
|
||||
IGNORED_MOVIE_FOLDERS,
|
||||
)
|
||||
from sabnzbd.nzbparser import process_single_nzb
|
||||
import sabnzbd.emailer as emailer
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
import sabnzbd.nzbqueue
|
||||
import sabnzbd.database as database
|
||||
import sabnzbd.notifier as notifier
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
@@ -499,7 +498,7 @@ def process_job(nzo: NzbObject):
|
||||
)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# Better disable sorting because filenames are all off now
|
||||
file_sorter.sort_file = None
|
||||
file_sorter.sorter_active = None
|
||||
|
||||
if empty:
|
||||
job_result = -1
|
||||
@@ -510,17 +509,19 @@ def process_job(nzo: NzbObject):
|
||||
remove_samples(workdir_complete)
|
||||
|
||||
# TV/Movie/Date Renaming code part 2 - rename and move files to parent folder
|
||||
if all_ok and file_sorter.sort_file:
|
||||
if all_ok and file_sorter.sorter_active:
|
||||
if newfiles:
|
||||
workdir_complete, ok = file_sorter.sorter.rename(newfiles, workdir_complete)
|
||||
if not ok:
|
||||
nzo.set_unpack_info("Unpack", T("Failed to move files"))
|
||||
nzo.fail_msg = T("Failed to move files")
|
||||
all_ok = False
|
||||
|
||||
# Run further post-processing
|
||||
if (all_ok or not cfg.safe_postproc()) and not nzb_list:
|
||||
# Use par2 files to deobfuscate unpacked file names
|
||||
if cfg.process_unpacked_par2():
|
||||
# Only if we also run cleanup, so not to process the "regular" par2 files
|
||||
if flag_delete and cfg.process_unpacked_par2():
|
||||
newfiles = deobfuscate.recover_par2_names(newfiles)
|
||||
|
||||
if cfg.deobfuscate_final_filenames():
|
||||
@@ -691,7 +692,7 @@ def prepare_extraction_path(nzo: NzbObject) -> Tuple[str, str, Sorter, bool, Opt
|
||||
else:
|
||||
file_sorter = Sorter(None, nzo.cat)
|
||||
complete_dir = file_sorter.detect(nzo.final_name, complete_dir)
|
||||
if file_sorter.sort_file:
|
||||
if file_sorter.sorter_active:
|
||||
one_folder = False
|
||||
|
||||
complete_dir = sanitize_and_trim_path(complete_dir)
|
||||
@@ -853,9 +854,7 @@ def try_rar_check(nzo: NzbObject, rars):
|
||||
nzo.set_unpack_info("Repair", T("Trying RAR-based verification"), setname)
|
||||
nzo.set_action_line(T("Trying RAR-based verification"), "...")
|
||||
try:
|
||||
# Set path to unrar and open the file
|
||||
# Requires de-unicode for RarFile to work!
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
zf = rarfile.RarFile(rars[0])
|
||||
|
||||
# Skip if it's encrypted
|
||||
@@ -1175,7 +1174,7 @@ def rename_and_collapse_folder(oldpath, newpath, files):
|
||||
if len(items) == 1:
|
||||
folder = items[0]
|
||||
folder_path = os.path.join(oldpath, folder)
|
||||
if os.path.isdir(folder_path) and folder not in ("VIDEO_TS", "AUDIO_TS"):
|
||||
if os.path.isdir(folder_path) and folder.lower() not in IGNORED_MOVIE_FOLDERS:
|
||||
logging.info("Collapsing %s", os.path.join(newpath, folder))
|
||||
oldpath = folder_path
|
||||
|
||||
|
||||
@@ -25,9 +25,7 @@ import time
|
||||
from typing import Optional
|
||||
|
||||
import sabnzbd.utils.kronos as kronos
|
||||
import sabnzbd.rss
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.dirscanner
|
||||
import sabnzbd.misc
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -391,6 +389,11 @@ class Scheduler:
|
||||
self.__check_diskspace, "check_diskspace", 5 * 60, 9 * 60, "threaded", args=[full_dir, required_space]
|
||||
)
|
||||
|
||||
def plan_required_server_resume(self, interval: int = 5):
|
||||
"""Create task for resuming downloading"""
|
||||
if not sabnzbd.Downloader.paused:
|
||||
self.plan_resume(interval)
|
||||
|
||||
def cancel_resume_task(self):
|
||||
"""Cancel the current auto resume task"""
|
||||
if self.resume_task:
|
||||
|
||||
@@ -569,6 +569,8 @@ SKIN_TEXT = {
|
||||
"explain-enable_https_verification": TT(
|
||||
"Verify certificates when connecting to indexers and RSS-sources using HTTPS."
|
||||
),
|
||||
"opt-socks5_proxy_url": TT("SOCKS5 Proxy"),
|
||||
"explain-socks5_proxy_url": TT("Use the specified SOCKS5 proxy for all outgoing connections."),
|
||||
"swtag-server": TT("Server"),
|
||||
"swtag-queue": TT("Queue"),
|
||||
"swtag-pp": TT("Post processing"),
|
||||
@@ -644,6 +646,10 @@ SKIN_TEXT = {
|
||||
"ssl_verify-strict": TT("Strict"),
|
||||
"srv-priority": TT("Priority"), #: Server priority
|
||||
"explain-svrprio": TT("0 is highest priority, 100 is the lowest priority"), #: Explain server priority
|
||||
"srv-required": TT("Required"), #: Server required tickbox
|
||||
"explain-required": TT(
|
||||
"In case of connection failures, the download queue will be paused for a few minutes instead of skipping this server"
|
||||
), #: Explain server required tickbox
|
||||
"srv-optional": TT("Optional"), #: Server optional tickbox
|
||||
"explain-optional": TT(
|
||||
"For unreliable servers, will be ignored longer in case of failures"
|
||||
@@ -701,6 +707,9 @@ SKIN_TEXT = {
|
||||
"link-download": TT("Download"), #: Config->RSS button "download item"
|
||||
"button-rssNow": TT("Read All Feeds Now"), #: Config->RSS button
|
||||
# Config->Notifications
|
||||
"defaultNotifiesAll": TT(
|
||||
"If only the <em>Default</em> category is selected, notifications are enabled for jobs in all categories."
|
||||
),
|
||||
"opt-email_endjob": TT("Email Notification On Job Completion"),
|
||||
"email-never": TT("Never"), #: When to send email
|
||||
"email-always": TT("Always"), #: When to send email
|
||||
@@ -773,6 +782,7 @@ SKIN_TEXT = {
|
||||
"catTags": TT("Indexer Categories / Groups"),
|
||||
"button-delCat": TT("X"), #: Small delete button
|
||||
# Config->Sorting
|
||||
"selectOneCat": TT("Select at least 1 category."),
|
||||
"seriesSorting": TT("Series Sorting"),
|
||||
"opt-tvsort": TT("Enable TV Sorting"),
|
||||
"sort-legenda": TT("Pattern Key"),
|
||||
|
||||
@@ -38,7 +38,8 @@ from sabnzbd.filesystem import (
|
||||
clip_path,
|
||||
)
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import EXCLUDED_GUESSIT_PROPERTIES
|
||||
from sabnzbd.constants import EXCLUDED_GUESSIT_PROPERTIES, IGNORED_MOVIE_FOLDERS
|
||||
from sabnzbd.misc import is_sample
|
||||
from sabnzbd.nzbstuff import NzbObject, scan_password
|
||||
|
||||
# Do not rename .vob files as they are usually DVD's
|
||||
@@ -76,7 +77,7 @@ class BaseSorter:
|
||||
self.cat = cat
|
||||
self.filename_set = ""
|
||||
self.fname = "" # Value for %fn substitution in folders
|
||||
self.do_rename = False
|
||||
self.rename_files = False
|
||||
self.info = {}
|
||||
self.type = None
|
||||
self.guess = guess
|
||||
@@ -259,7 +260,7 @@ class BaseSorter:
|
||||
# Split the last part of the path up for the renamer
|
||||
if extension:
|
||||
path, self.filename_set = os.path.split(path)
|
||||
self.do_rename = True
|
||||
self.rename_files = True
|
||||
|
||||
# The normpath function translates "" to "." which results in an incorrect path
|
||||
return os.path.normpath(path) if path else path
|
||||
@@ -305,7 +306,7 @@ class BaseSorter:
|
||||
except:
|
||||
logging.error(T("Failed to rename: %s to %s"), clip_path(current_path), clip_path(newpath))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
rename_similar(current_path, ext, self.filename_set, ())
|
||||
rename_similar(current_path, ext, self.filename_set)
|
||||
else:
|
||||
logging.debug("Nothing to rename, %s", files)
|
||||
|
||||
@@ -317,7 +318,7 @@ class Sorter:
|
||||
|
||||
def __init__(self, nzo: Optional[NzbObject], cat: str):
|
||||
self.sorter: Optional[BaseSorter] = None
|
||||
self.sort_file = False
|
||||
self.sorter_active = False
|
||||
self.nzo = nzo
|
||||
self.cat = cat
|
||||
|
||||
@@ -334,9 +335,9 @@ class Sorter:
|
||||
self.sorter = MovieSorter(self.nzo, job_name, complete_dir, self.cat, guess)
|
||||
|
||||
if self.sorter and self.sorter.matched:
|
||||
self.sort_file = True
|
||||
self.sorter_active = True
|
||||
|
||||
return self.sorter.get_final_path() if self.sort_file else complete_dir
|
||||
return self.sorter.get_final_path() if self.sorter_active else complete_dir
|
||||
|
||||
|
||||
class SeriesSorter(BaseSorter):
|
||||
@@ -357,12 +358,17 @@ class SeriesSorter(BaseSorter):
|
||||
def match(self):
|
||||
"""Try to guess series info if config and category sort out or force is set"""
|
||||
if self.force or (cfg.enable_tv_sorting() and cfg.tv_sort_string() and self.cat.lower() in self.cats):
|
||||
self.guess = guess_what(self.original_job_name, sort_type="episode")
|
||||
if not self.guess:
|
||||
self.guess = guess_what(self.original_job_name, sort_type="episode")
|
||||
if self.guess.get("type") == "episode" and "date" not in self.guess:
|
||||
logging.debug("Using tv sorter for %s", self.original_job_name)
|
||||
self.matched = True
|
||||
self.type = "tv"
|
||||
|
||||
# Require at least 1 category, this was not enforced before 3.4.0
|
||||
if cfg.enable_tv_sorting() and not self.cats:
|
||||
logging.warning("%s: %s", T("Series Sorting"), T("Select at least 1 category."))
|
||||
|
||||
def get_values(self):
|
||||
"""Collect all values needed for path replacement"""
|
||||
self.get_year()
|
||||
@@ -394,8 +400,8 @@ class SeriesSorter(BaseSorter):
|
||||
"""Rename for Series"""
|
||||
if min_size < 0:
|
||||
min_size = cfg.episode_rename_limit.get_int()
|
||||
if not self.do_rename:
|
||||
return current_path, False
|
||||
if not self.rename_files:
|
||||
return move_to_parent_directory(current_path)
|
||||
else:
|
||||
logging.debug("Renaming series file(s)")
|
||||
return super().rename(files, current_path, min_size)
|
||||
@@ -420,12 +426,17 @@ class MovieSorter(BaseSorter):
|
||||
def match(self):
|
||||
"""Try to guess movie info if config and category sort out or force is set"""
|
||||
if self.force or (cfg.enable_movie_sorting() and self.sort_string and self.cat.lower() in self.cats):
|
||||
self.guess = guess_what(self.original_job_name, sort_type="movie")
|
||||
if not self.guess:
|
||||
self.guess = guess_what(self.original_job_name, sort_type="movie")
|
||||
if self.guess.get("type") == "movie":
|
||||
logging.debug("Using movie sorter for %s", self.original_job_name)
|
||||
self.matched = True
|
||||
self.type = "movie"
|
||||
|
||||
# Require at least 1 category, this was not enforced before 3.4.0
|
||||
if cfg.enable_movie_sorting() and not self.cats:
|
||||
logging.warning("%s: %s", T("Movie Sorting"), T("Select at least 1 category."))
|
||||
|
||||
def get_values(self):
|
||||
"""Collect all values needed for path replacement"""
|
||||
self.get_year()
|
||||
@@ -437,8 +448,9 @@ class MovieSorter(BaseSorter):
|
||||
if min_size < 0:
|
||||
min_size = cfg.movie_rename_limit.get_int()
|
||||
|
||||
if not self.do_rename:
|
||||
return current_path, False
|
||||
if not self.rename_files:
|
||||
return move_to_parent_directory(current_path)
|
||||
|
||||
logging.debug("Renaming movie file(s)")
|
||||
|
||||
def filter_files(f, current_path):
|
||||
@@ -500,12 +512,17 @@ class DateSorter(BaseSorter):
|
||||
def match(self):
|
||||
"""Checks the category for a match, if so set self.matched to true"""
|
||||
if self.force or (cfg.enable_date_sorting() and self.sort_string and self.cat.lower() in self.cats):
|
||||
self.guess = guess_what(self.original_job_name, sort_type="episode")
|
||||
if not self.guess:
|
||||
self.guess = guess_what(self.original_job_name, sort_type="episode")
|
||||
if self.guess.get("type") == "episode" and "date" in self.guess:
|
||||
logging.debug("Using date sorter for %s", self.original_job_name)
|
||||
self.matched = True
|
||||
self.type = "date"
|
||||
|
||||
# Require at least 1 category, this was not enforced before 3.4.0
|
||||
if cfg.enable_date_sorting() and not self.cats:
|
||||
logging.warning("%s: %s", T("Date Sorting"), T("Select at least 1 category."))
|
||||
|
||||
def get_date(self):
|
||||
"""Get month and day"""
|
||||
self.info["month"] = str(self.guess.get("date").month)
|
||||
@@ -526,8 +543,8 @@ class DateSorter(BaseSorter):
|
||||
"""Renaming Date file"""
|
||||
if min_size < 0:
|
||||
min_size = cfg.episode_rename_limit.get_int()
|
||||
if not self.do_rename:
|
||||
return current_path, False
|
||||
if not self.rename_files:
|
||||
return move_to_parent_directory(current_path)
|
||||
else:
|
||||
logging.debug("Renaming date file(s)")
|
||||
return super().rename(files, current_path, min_size)
|
||||
@@ -546,9 +563,11 @@ def move_to_parent_directory(workdir: str) -> Tuple[str, bool]:
|
||||
workdir = os.path.abspath(os.path.normpath(workdir))
|
||||
dest = os.path.abspath(os.path.normpath(os.path.join(workdir, "..")))
|
||||
|
||||
logging.debug("Moving all files from %s to %s", workdir, dest)
|
||||
|
||||
# Check for DVD folders and bail out if found
|
||||
for item in os.listdir(workdir):
|
||||
if item.lower() in ("video_ts", "audio_ts", "bdmv"):
|
||||
if item.lower() in IGNORED_MOVIE_FOLDERS:
|
||||
return workdir, True
|
||||
|
||||
for root, dirs, files in os.walk(workdir):
|
||||
@@ -612,40 +631,9 @@ def guess_what(name: str, sort_type: Optional[str] = None) -> MatchesDict:
|
||||
):
|
||||
guess["type"] = "unknown"
|
||||
|
||||
# Remove sample indicators from groupnames, e.g. 'sample-groupname' or 'groupname-proof'
|
||||
group = guess.get("release_group", "")
|
||||
if group.lower().startswith(("sample-", "proof-")) or group.lower().endswith(("-sample", "-proof")):
|
||||
# Set clean groupname
|
||||
guess["release_group"] = re.sub("^(sample|proof)-|-(sample|proof)$", "", group, re.I)
|
||||
# Add 'Sample' property to the guess
|
||||
other = guess.get("other")
|
||||
if not other:
|
||||
guess.setdefault("other", "Sample")
|
||||
else:
|
||||
if "Sample" not in guess["other"]:
|
||||
# Pre-existing 'other' may be a string or a list
|
||||
try:
|
||||
guess["other"].append("Sample")
|
||||
except AttributeError:
|
||||
guess["other"] = [other, "Sample"]
|
||||
|
||||
return guess
|
||||
|
||||
|
||||
def is_sample(filename: str) -> bool:
|
||||
"""Try to determine if filename belongs to a sample"""
|
||||
if os.path.splitext(filename)[0].lower().strip() in ("sample", "proof"):
|
||||
# The entire filename is just 'sample.ext' or similar
|
||||
return True
|
||||
|
||||
# If that didn't work, start guessing
|
||||
guess = guess_what(filename).get("other", "")
|
||||
if isinstance(guess, list):
|
||||
return any(item in ("Sample", "Proof") for item in guess)
|
||||
else:
|
||||
return guess in ("Sample", "Proof")
|
||||
|
||||
|
||||
def path_subst(path: str, mapping: List[Tuple[str, str]]) -> str:
|
||||
"""Replace the sort string elements in the path with the real values provided by the mapping;
|
||||
non-elements are copied verbatim."""
|
||||
@@ -794,7 +782,7 @@ def strip_path_elements(path: str) -> str:
|
||||
return "\\\\" + path if is_unc else path
|
||||
|
||||
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: List[str]):
|
||||
def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: Optional[List[str]] = None):
|
||||
"""Rename all other files in the 'folder' hierarchy after 'name'
|
||||
and move them to the root of 'folder'.
|
||||
Files having extension 'skip_ext' will be moved, but not renamed.
|
||||
@@ -807,7 +795,7 @@ def rename_similar(folder: str, skip_ext: str, name: str, skipped_files: List[st
|
||||
for root, dirs, files in os.walk(folder):
|
||||
for f in files:
|
||||
path = os.path.join(root, f)
|
||||
if path in skipped_files:
|
||||
if skipped_files and path in skipped_files:
|
||||
continue
|
||||
org, ext = os.path.splitext(f)
|
||||
if ext.lower() == skip_ext:
|
||||
@@ -861,7 +849,7 @@ def eval_sort(sort_type: str, expression: str, name: str = None, multipart: str
|
||||
if "%fn" in path:
|
||||
path = path.replace("%fn", fname + ".ext")
|
||||
else:
|
||||
if sorter.do_rename:
|
||||
if sorter.rename_files:
|
||||
path = fpath + ".ext"
|
||||
else:
|
||||
path += "\\" if sabnzbd.WIN32 else "/"
|
||||
|
||||
@@ -25,7 +25,6 @@ import time
|
||||
import logging
|
||||
import queue
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
from http.client import IncompleteRead, HTTPResponse
|
||||
from mailbox import Message
|
||||
|
||||
@@ -9,8 +9,7 @@ import puremagic
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
from sabnzbd.filesystem import get_ext
|
||||
from sabnzbd.filesystem import get_ext, RAR_RE
|
||||
|
||||
# common extension from https://www.computerhope.com/issues/ch001789.htm
|
||||
POPULAR_EXT = (
|
||||
@@ -168,6 +167,8 @@ DOWNLOAD_EXT = (
|
||||
"bdmv",
|
||||
"bin",
|
||||
"bup",
|
||||
"cbr",
|
||||
"cbz",
|
||||
"clpi",
|
||||
"crx",
|
||||
"db",
|
||||
@@ -234,16 +235,16 @@ DOWNLOAD_EXT = (
|
||||
"xpi",
|
||||
)
|
||||
|
||||
# combine to one tuple, with unique entries:
|
||||
# Combine to one tuple, with unique entries:
|
||||
ALL_EXT = tuple(set(POPULAR_EXT + DOWNLOAD_EXT))
|
||||
# prepend a dot to each extension, because we work with a leading dot in extensions
|
||||
# Prepend a dot to each extension, because we work with a leading dot in extensions
|
||||
ALL_EXT = tuple(["." + i for i in ALL_EXT])
|
||||
|
||||
|
||||
def has_popular_extension(file_path: str) -> bool:
|
||||
"""returns boolean if the extension of file_path is a popular, well-known extension"""
|
||||
file_extension = get_ext(file_path)
|
||||
return file_extension in ALL_EXT
|
||||
return file_extension in ALL_EXT or RAR_RE.match(file_extension)
|
||||
|
||||
|
||||
def all_possible_extensions(file_path: str) -> List[str]:
|
||||
@@ -264,9 +265,12 @@ def what_is_most_likely_extension(file_path: str) -> str:
|
||||
|
||||
# Check if text or NZB, as puremagic is not good at that.
|
||||
try:
|
||||
txt = Path(file_path).read_text()
|
||||
# Only read the start, don't need the whole file
|
||||
with open(file_path, "r") as inp_file:
|
||||
txt = inp_file.read(200).lower()
|
||||
|
||||
# Yes, a text file ... so let's check if it's even an NZB:
|
||||
if txt.lower().find("<nzb xmlns=") >= 0 or txt.lower().find("!doctype nzb public") >= 0:
|
||||
if "!doctype nzb public" in txt or "<nzb xmlns=" in txt:
|
||||
# yes, contains NZB signals:
|
||||
return ".nzb"
|
||||
else:
|
||||
|
||||
@@ -9,33 +9,19 @@
|
||||
from happyeyeballs import happyeyeballs
|
||||
print happyeyeballs('newszilla.xs4all.nl', port=119)
|
||||
"""
|
||||
# or with more logging:
|
||||
"""
|
||||
from happyeyeballs import happyeyeballs
|
||||
import logging
|
||||
logger = logging.getLogger('')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
print happyeyeballs('newszilla.xs4all.nl', port=119)
|
||||
"""
|
||||
|
||||
import socket
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import queue
|
||||
|
||||
DEBUG = False
|
||||
|
||||
|
||||
# called by each thread
|
||||
def do_socket_connect(queue, ip, PORT, SSL, ipv4delay):
|
||||
# connect to the ip, and put the result into the queue
|
||||
if DEBUG:
|
||||
logging.debug("Input for thread is %s %s %s", ip, PORT, SSL)
|
||||
|
||||
# Called by each thread
|
||||
def do_socket_connect(result_queue: queue.Queue, ip: str, port: int, ipv4delay: int):
|
||||
"""Connect to the ip, and put the result into the queue"""
|
||||
try:
|
||||
# CREATE SOCKET
|
||||
# Create socket
|
||||
if ip.find(":") >= 0:
|
||||
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
|
||||
if ip.find(".") >= 0:
|
||||
@@ -43,123 +29,78 @@ def do_socket_connect(queue, ip, PORT, SSL, ipv4delay):
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
|
||||
s.settimeout(3)
|
||||
if not SSL:
|
||||
try:
|
||||
# Connect ...
|
||||
s.connect((ip, PORT))
|
||||
finally:
|
||||
# always close
|
||||
s.close()
|
||||
else:
|
||||
# SSL, so wrap socket
|
||||
wrappedSocket = ssl.wrap_socket(s)
|
||||
try:
|
||||
# CONNECT
|
||||
wrappedSocket.connect((ip, PORT))
|
||||
finally:
|
||||
# CLOSE SOCKET CONNECTION
|
||||
wrappedSocket.close()
|
||||
|
||||
queue.put((ip, True))
|
||||
if DEBUG:
|
||||
logging.debug("connect to %s OK", ip)
|
||||
try:
|
||||
# Connect ...
|
||||
s.connect((ip, port))
|
||||
finally:
|
||||
# always close
|
||||
s.close()
|
||||
|
||||
result_queue.put((ip, True))
|
||||
except:
|
||||
# We got an exception, so no succesfull connect on IP & port:
|
||||
queue.put((ip, False))
|
||||
if DEBUG:
|
||||
logging.debug("connect to %s not OK", ip)
|
||||
pass
|
||||
# We got an exception, so no successful connect on IP & port:
|
||||
result_queue.put((ip, False))
|
||||
|
||||
|
||||
def happyeyeballs(HOST, **kwargs):
|
||||
# Happyeyeballs function, with caching of the results
|
||||
def happyeyeballs(host: str, port: int = 80, preferipv6: bool = False) -> str:
|
||||
"""Happyeyeballs function, with caching of the results"""
|
||||
|
||||
# Fill out the parameters into the variables
|
||||
try:
|
||||
PORT = kwargs["port"]
|
||||
except:
|
||||
PORT = 80
|
||||
try:
|
||||
SSL = kwargs["ssl"]
|
||||
except:
|
||||
SSL = False
|
||||
try:
|
||||
preferipv6 = kwargs["preferipv6"]
|
||||
except:
|
||||
preferipv6 = False # Do not prefer IPv6
|
||||
|
||||
# Find out if a cached result is available, and recent enough:
|
||||
# Find out if a cached result is available, and recent enough:
|
||||
timecurrent = int(time.time()) # current time in seconds since epoch
|
||||
retentionseconds = 100
|
||||
hostkey = (HOST, PORT, SSL, preferipv6) # Example key: (u'ssl.astraweb.com', 563, True, True)
|
||||
hostkey = (host, port, preferipv6) # Example key: ('ssl.astraweb.com', 563, True)
|
||||
|
||||
try:
|
||||
happyeyeballs.happylist[hostkey] # just to check: does it exist?
|
||||
# No exception, so entry exists, so let's check the time:
|
||||
# Let's check the time:
|
||||
timecached = happyeyeballs.happylist[hostkey][1]
|
||||
if timecurrent - timecached <= retentionseconds:
|
||||
if DEBUG:
|
||||
logging.debug("existing cached result recent enough")
|
||||
return happyeyeballs.happylist[hostkey][0]
|
||||
else:
|
||||
if DEBUG:
|
||||
logging.debug("existing cached result too old. Find a new one")
|
||||
# Continue a few lines down
|
||||
except:
|
||||
# Exception, so entry not there, so we have to fill it out
|
||||
if DEBUG:
|
||||
logging.debug("Host not yet in the cache. Find entry")
|
||||
pass
|
||||
# we only arrive here if the entry has to be determined. So let's do that:
|
||||
|
||||
# We have to determine the (new) best IP address
|
||||
# we only arrive here if the entry has to be determined. So let's do that:
|
||||
# We have to determine the (new) best IP address
|
||||
start = time.perf_counter()
|
||||
if DEBUG:
|
||||
logging.debug("\n\n%s %s %s %s", HOST, PORT, SSL, preferipv6)
|
||||
|
||||
ipv4delay = 0
|
||||
try:
|
||||
# Check if there is an AAAA / IPv6 result for this host:
|
||||
socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
|
||||
if DEBUG:
|
||||
logging.debug("IPv6 address found for %s", HOST)
|
||||
socket.getaddrinfo(host, port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
|
||||
# preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so IPv6 has a head start and is preferred
|
||||
if preferipv6:
|
||||
ipv4delay = 0.1 # preferipv6, AND at least one IPv6 found, so give IPv4 (!) a delay so that IPv6 has a head start and is preferred
|
||||
ipv4delay = 0.1
|
||||
except:
|
||||
if DEBUG:
|
||||
logging.debug("No IPv6 address found for %s", HOST)
|
||||
pass
|
||||
|
||||
myqueue = queue.Queue() # queue used for threads giving back the results
|
||||
result_queue = queue.Queue() # queue used for threads giving back the results
|
||||
|
||||
try:
|
||||
# Get all IP (IPv4 and IPv6) addresses:
|
||||
allinfo = socket.getaddrinfo(HOST, PORT, 0, 0, socket.IPPROTO_TCP)
|
||||
allinfo = socket.getaddrinfo(host, port, 0, 0, socket.IPPROTO_TCP)
|
||||
for info in allinfo:
|
||||
address = info[4][0]
|
||||
thisthread = threading.Thread(target=do_socket_connect, args=(myqueue, address, PORT, SSL, ipv4delay))
|
||||
thisthread.daemon = True
|
||||
thisthread.start()
|
||||
resolver_thread = threading.Thread(target=do_socket_connect, args=(result_queue, address, port, ipv4delay))
|
||||
resolver_thread.daemon = True
|
||||
resolver_thread.start()
|
||||
|
||||
result = None # default return value, used if none of threads says True/"OK", so no connect on any IP address
|
||||
# start reading from the Queue for message from the threads:
|
||||
for i in range(len(allinfo)):
|
||||
s = myqueue.get() # get a response
|
||||
if s[1] == True:
|
||||
result = s[0]
|
||||
for _ in range(len(allinfo)):
|
||||
connect_result = result_queue.get() # get a response
|
||||
if connect_result[1]:
|
||||
result = connect_result[0]
|
||||
break # the first True/"OK" is enough, so break out of for loop
|
||||
except:
|
||||
if DEBUG:
|
||||
logging.debug("something went wrong in the try block")
|
||||
result = None
|
||||
logging.info(
|
||||
"Quickest IP address for %s (port %s, ssl %s, preferipv6 %s) is %s", HOST, PORT, SSL, preferipv6, result
|
||||
)
|
||||
|
||||
logging.info("Quickest IP address for %s (port %s, preferipv6 %s) is %s", host, port, preferipv6, result)
|
||||
delay = int(1000 * (time.perf_counter() - start))
|
||||
logging.debug("Happy Eyeballs lookup and port connect took %s ms", delay)
|
||||
|
||||
# We're done. Store and return the result
|
||||
if result:
|
||||
happyeyeballs.happylist[hostkey] = (result, timecurrent)
|
||||
if DEBUG:
|
||||
logging.debug("Determined new result for %s with result %s", hostkey, happyeyeballs.happylist[hostkey])
|
||||
return result
|
||||
|
||||
|
||||
@@ -167,29 +108,23 @@ happyeyeballs.happylist = {} # The cached results. This static variable must be
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
logger = logging.getLogger("")
|
||||
logger.setLevel(logging.INFO)
|
||||
if DEBUG:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
# plain HTTP/HTTPS sites:
|
||||
print((happyeyeballs("www.google.com")))
|
||||
print((happyeyeballs("www.google.com", port=443, ssl=True)))
|
||||
print((happyeyeballs("www.google.com", port=443)))
|
||||
print((happyeyeballs("www.nu.nl")))
|
||||
|
||||
# newsservers:
|
||||
print((happyeyeballs("newszilla6.xs4all.nl", port=119)))
|
||||
print((happyeyeballs("newszilla.xs4all.nl", port=119)))
|
||||
print((happyeyeballs("block.cheapnews.eu", port=119)))
|
||||
print((happyeyeballs("block.cheapnews.eu", port=443, ssl=True)))
|
||||
print((happyeyeballs("sslreader.eweka.nl", port=563, ssl=True)))
|
||||
print((happyeyeballs("block.cheapnews.eu", port=443)))
|
||||
print((happyeyeballs("sslreader.eweka.nl", port=563)))
|
||||
print((happyeyeballs("news.thundernews.com", port=119)))
|
||||
print((happyeyeballs("news.thundernews.com", port=119, preferipv6=False)))
|
||||
print((happyeyeballs("secure.eu.thundernews.com", port=563, ssl=True)))
|
||||
print((happyeyeballs("bonus.frugalusenet.com", port=563, ssl=True)))
|
||||
print((happyeyeballs("secure.eu.thundernews.com", port=563)))
|
||||
print((happyeyeballs("bonus.frugalusenet.com", port=563)))
|
||||
|
||||
# Strange cases
|
||||
print((happyeyeballs("does.not.resolve", port=443, ssl=True)))
|
||||
print((happyeyeballs("does.not.resolve", port=443)))
|
||||
print((happyeyeballs("www.google.com", port=119)))
|
||||
print((happyeyeballs("216.58.211.164")))
|
||||
|
||||
@@ -414,7 +414,7 @@ class ThreadedScheduler(Scheduler):
|
||||
def start(self):
|
||||
"""Splice off a thread in which the scheduler will run."""
|
||||
self.thread = threading.Thread(target=self._run)
|
||||
self.thread.setDaemon(True)
|
||||
self.thread.deamon = True
|
||||
self.thread.start()
|
||||
|
||||
def stop(self):
|
||||
|
||||
@@ -2,16 +2,21 @@ name: sabnzbd
|
||||
summary: SABnzbd
|
||||
description: The automated Usenet download tool
|
||||
confinement: strict
|
||||
base: core18
|
||||
base: core20
|
||||
grade: stable
|
||||
icon: interfaces/Config/templates/staticcfg/images/logo-small.svg
|
||||
adopt-info: sabnzbd
|
||||
|
||||
architectures:
|
||||
- build-on: arm64
|
||||
- build-on: armhf
|
||||
- build-on: amd64
|
||||
|
||||
apps:
|
||||
sabnzbd:
|
||||
environment:
|
||||
LC_CTYPE: C.UTF-8
|
||||
command: python3 $SNAP/opt/sabnzbd/SABnzbd.py -f $SNAP_COMMON
|
||||
command: bin/sabnzbd-wrapper
|
||||
daemon: simple
|
||||
plugs: [network, home, network-bind, removable-media]
|
||||
|
||||
@@ -19,8 +24,7 @@ parts:
|
||||
sabnzbd:
|
||||
plugin: python
|
||||
source: .
|
||||
python-version: python3
|
||||
requirements: [snap/requirements_snap.txt, requirements.txt]
|
||||
requirements: [snap/local/requirements_snap.txt, requirements.txt]
|
||||
stage-packages: [python3-dev, libdb5.3, unrar, p7zip-full, par2]
|
||||
build-packages: [libffi-dev, python3-dev, libssl-dev, cargo]
|
||||
override-build: |
|
||||
@@ -29,5 +33,10 @@ parts:
|
||||
python3 tools/make_mo.py
|
||||
mkdir -p $SNAPCRAFT_PART_INSTALL/opt
|
||||
cp -R $SNAPCRAFT_PART_BUILD $SNAPCRAFT_PART_INSTALL/opt/sabnzbd
|
||||
mkdir -p $SNAPCRAFT_PART_INSTALL/bin
|
||||
echo "python3 \$SNAP/opt/sabnzbd/SABnzbd.py -f \$SNAP_COMMON" > $SNAPCRAFT_PART_INSTALL/bin/sabnzbd-wrapper
|
||||
chmod +x $SNAPCRAFT_PART_INSTALL/bin/sabnzbd-wrapper
|
||||
|
||||
organize:
|
||||
usr/bin/unrar-nonfree: usr/bin/unrar
|
||||
|
||||
|
||||
@@ -136,11 +136,6 @@ def run_sabnews_and_selenium(request):
|
||||
if not sys.platform.startswith("win"):
|
||||
driver_options.add_argument("--single-process")
|
||||
|
||||
# On Linux we want to use the PPA Chrome
|
||||
# This makes sure we always match Chrome and chromedriver
|
||||
if not sys.platform.startswith(("win", "darwin")):
|
||||
driver_options.binary_location = "/usr/bin/chromium-browser"
|
||||
|
||||
# Start the driver and pass it on to all the classes
|
||||
driver = webdriver.Chrome(options=driver_options)
|
||||
for item in request.node.items:
|
||||
|
||||
@@ -145,6 +145,7 @@ def create_nzb(nzb_file=None, nzb_dir=None, metadata=None):
|
||||
|
||||
# Either use directory or single file
|
||||
if nzb_dir:
|
||||
nzb_dir = os.path.normpath(nzb_dir)
|
||||
if not os.path.exists(nzb_dir) or not os.path.isdir(nzb_dir):
|
||||
raise NotADirectoryError("%s is not a valid directory" % nzb_dir)
|
||||
|
||||
|
||||
194
tests/test_decoder.py
Normal file
194
tests/test_decoder.py
Normal file
@@ -0,0 +1,194 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2021 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_decoder- Testing functions in decoder.py
|
||||
"""
|
||||
import binascii
|
||||
import os
|
||||
import pytest
|
||||
|
||||
from random import randint
|
||||
from unittest import mock
|
||||
|
||||
import sabnzbd.decoder as decoder
|
||||
from sabnzbd.nzbstuff import Article
|
||||
|
||||
|
||||
LINES_DATA = [os.urandom(45) for _ in range(32)]
|
||||
VALID_UU_LINES = [binascii.b2a_uu(data).rstrip(b"\n") for data in LINES_DATA]
|
||||
|
||||
END_DATA = os.urandom(randint(1, 45))
|
||||
VALID_UU_END = [
|
||||
binascii.b2a_uu(END_DATA).rstrip(b"\n"),
|
||||
b"`",
|
||||
b"end",
|
||||
]
|
||||
|
||||
|
||||
class TestUuDecoder:
|
||||
def _generate_msg_part(
|
||||
self,
|
||||
part: str,
|
||||
insert_empty_line: bool = True,
|
||||
insert_excess_empty_lines: bool = False,
|
||||
insert_headers: bool = False,
|
||||
insert_end: bool = True,
|
||||
begin_line: bytes = b"begin 644 My Favorite Open Source Movie.mkv",
|
||||
):
|
||||
"""Generate message parts. Part may be one of 'begin', 'middle', or 'end' for multipart
|
||||
messages, or 'single' for a singlepart message. All uu payload is taken from VALID_UU_*.
|
||||
|
||||
Returns Article with a random id and lowest_partnum correctly set, socket-style raw
|
||||
data, and the expected result of uu decoding for the generated message.
|
||||
"""
|
||||
article_id = "test@host" + os.urandom(8).hex() + ".sab"
|
||||
article = Article(article_id, randint(4321, 54321), None)
|
||||
article.lowest_partnum = True if part in ("begin", "single") else False
|
||||
# Mock an nzf so results from hashing and filename handling can be stored
|
||||
article.nzf = mock.Mock()
|
||||
|
||||
# Store the message data and the expected decoding result
|
||||
data = []
|
||||
result = []
|
||||
|
||||
# Always start with the response code line
|
||||
data.append(b"222 0 <" + bytes(article_id, encoding="ascii") + b">")
|
||||
|
||||
if insert_empty_line:
|
||||
# Only insert other headers if there's an empty line
|
||||
if insert_headers:
|
||||
data.extend([b"x-hoop: is uitgestelde teleurstelling", b"Another-Header: Sure"])
|
||||
|
||||
# Insert the empty line between response code and body
|
||||
data.append(b"")
|
||||
|
||||
if insert_excess_empty_lines:
|
||||
data.extend([b"", b""])
|
||||
|
||||
# Insert uu data into the body
|
||||
if part in ("begin", "single"):
|
||||
data.append(begin_line)
|
||||
|
||||
if part in ("begin", "middle", "single"):
|
||||
size = randint(4, len(VALID_UU_LINES) - 1)
|
||||
data.extend(VALID_UU_LINES[:size])
|
||||
result.extend(LINES_DATA[:size])
|
||||
|
||||
if part in ("end", "single"):
|
||||
if insert_end:
|
||||
data.extend(VALID_UU_END)
|
||||
result.append(END_DATA)
|
||||
|
||||
# Signal the end of the message with a dot on a line of its own
|
||||
data.append(b".")
|
||||
|
||||
# Join the data with \r\n line endings, just like we get from socket reads
|
||||
data = b"\r\n".join(data)
|
||||
# Concatenate expected result
|
||||
result = b"".join(result)
|
||||
|
||||
return article, data, result
|
||||
|
||||
def test_no_data(self):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(None, None)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
[
|
||||
[b""],
|
||||
[b"\r\n\r\n"],
|
||||
[b"f", b"o", b"o", b"b", b"a", b"r", b"\r\n"], # Plenty of list items, but (too) few actual lines
|
||||
[b"222 0 <artid@woteva>\r\nX-Too-Short: yup\r\n"],
|
||||
],
|
||||
)
|
||||
def test_short_data(self, raw_data):
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(None, raw_data)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_data",
|
||||
[
|
||||
[b"222 0 <foo@bar>\r\n\r\n"], # Missing altogether
|
||||
[b"222 0 <foo@bar>\r\n\r\nbeing\r\n"], # Typo in 'begin'
|
||||
[b"222 0 <foo@bar>\r\n\r\nx-header: begin 644 foobar\r\n"], # Not at start of the line
|
||||
[b"666 0 <foo@bar>\r\nbegin\r\n"], # No empty line + wrong response code
|
||||
[b"OMG 0 <foo@bar>\r\nbegin\r\n"], # No empty line + invalid response code
|
||||
[b"222 0 <foo@bar>\r\nbegin\r\n"], # No perms
|
||||
[b"222 0 <foo@bar>\r\nbegin ABC DEF\r\n"], # Permissions not octal
|
||||
[b"222 0 <foo@bar>\r\nbegin 755\r\n"], # No filename
|
||||
[b"222 0 <foo@bar>\r\nbegin 644 \t \t\r\n"], # Filename empty after stripping
|
||||
],
|
||||
)
|
||||
def test_missing_uu_begin(self, raw_data):
|
||||
article = Article("foo@bar", 1234, None)
|
||||
article.lowest_partnum = True
|
||||
filler = b"\r\n" * 4
|
||||
with pytest.raises(decoder.BadUu):
|
||||
assert decoder.decode_uu(article, raw_data.append(filler))
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
@pytest.mark.parametrize("insert_excess_empty_lines", [True, False])
|
||||
@pytest.mark.parametrize("insert_headers", [True, False])
|
||||
@pytest.mark.parametrize("insert_end", [True, False])
|
||||
@pytest.mark.parametrize(
|
||||
"begin_line",
|
||||
[
|
||||
b"begin 644 nospace.bin",
|
||||
b"begin 444 filename with spaces.txt",
|
||||
b"BEGIN 644 foobar",
|
||||
b"begin 0755 shell.sh",
|
||||
],
|
||||
)
|
||||
def test_singlepart(self, insert_empty_line, insert_excess_empty_lines, insert_headers, insert_end, begin_line):
|
||||
"""Test variations of a sane single part nzf with proper uu-encoded data"""
|
||||
# Generate a singlepart message
|
||||
article, raw_data, expected_result = self._generate_msg_part(
|
||||
"single", insert_empty_line, insert_excess_empty_lines, insert_headers, insert_end, begin_line
|
||||
)
|
||||
assert decoder.decode_uu(article, [raw_data]) == expected_result
|
||||
assert article.nzf.filename_checked
|
||||
|
||||
@pytest.mark.parametrize("insert_empty_line", [True, False])
|
||||
def test_multipart(self, insert_empty_line):
|
||||
"""Test a simple multipart nzf"""
|
||||
|
||||
# Generate and process a multipart msg
|
||||
decoded_data = expected_data = b""
|
||||
for part in ("begin", "middle", "middle", "end"):
|
||||
article, data, result = self._generate_msg_part(part, insert_empty_line, False, False, True)
|
||||
decoded_data += decoder.decode_uu(article, [data])
|
||||
expected_data += result
|
||||
|
||||
# Verify results
|
||||
assert decoded_data == expected_data
|
||||
assert article.nzf.filename_checked
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"bad_data",
|
||||
[
|
||||
b"MI^+0E\"C^364:CQ':]DW++^$F0J)6FDG/!`]0\\(4;EG$UY5RI,3JMBNX\\8+06\r\n$(WAIVBC^", # Trailing junk
|
||||
VALID_UU_LINES[-1][:10] + bytes("ваше здоровье", encoding="utf8") + VALID_UU_LINES[-1][-10:], # Non-ascii
|
||||
],
|
||||
)
|
||||
def test_broken_uu(self, bad_data):
|
||||
article = Article("foo@bar", 4321, None)
|
||||
article.lowest_partnum = False
|
||||
filler = b"\r\n".join(VALID_UU_LINES[:4]) + b"\r\n"
|
||||
with pytest.raises(decoder.BadData):
|
||||
assert decoder.decode_uu(article, [b"222 0 <foo@bar>\r\n" + filler + bad_data + b"\r\n"])
|
||||
@@ -29,6 +29,10 @@ class Test_File_Extension:
|
||||
assert file_extension.has_popular_extension("blabla/blabla.mkv")
|
||||
assert file_extension.has_popular_extension("blabla/blabla.srt")
|
||||
assert file_extension.has_popular_extension("djjddj/aaaaa.epub")
|
||||
assert file_extension.has_popular_extension("test/testing.r01")
|
||||
assert file_extension.has_popular_extension("test/testing.s91")
|
||||
assert not file_extension.has_popular_extension("test/testing")
|
||||
assert not file_extension.has_popular_extension("test/testing.rar01")
|
||||
assert not file_extension.has_popular_extension("98ads098f098fa.a0ds98f098asdf")
|
||||
|
||||
def test_what_is_most_likely_extension(self):
|
||||
|
||||
@@ -257,6 +257,8 @@ class TestSanitizeFiles(ffs.TestCase):
|
||||
self.setUpPyfakefs()
|
||||
self.fs.path_separator = "\\"
|
||||
self.fs.is_windows_fs = True
|
||||
# Disable randomisation of directory listings
|
||||
self.fs.shuffle_listdir_results = False
|
||||
|
||||
def test_sanitize_files_input(self):
|
||||
assert [] == filesystem.sanitize_files(folder=None)
|
||||
|
||||
@@ -26,7 +26,6 @@ import sys
|
||||
from random import choice, randint, sample
|
||||
from string import ascii_lowercase, digits
|
||||
|
||||
import sabnzbd.config
|
||||
from sabnzbd.constants import (
|
||||
DUP_PRIORITY,
|
||||
PAUSED_PRIORITY,
|
||||
|
||||
@@ -179,8 +179,8 @@ class TestConfigRSS(SABnzbdBaseTest):
|
||||
|
||||
def test_rss_basic_flow(self, httpserver: HTTPServer):
|
||||
# Setup the response for the NZB
|
||||
nzb_data = create_and_read_nzb("basic_rar5")
|
||||
httpserver.expect_request("/test_nzb.nzb").respond_with_data(nzb_data)
|
||||
nzb_fp = create_and_read_nzb_fp("basic_rar5")
|
||||
httpserver.expect_request("/test_nzb.nzb").respond_with_data(nzb_fp.read())
|
||||
nzb_url = httpserver.url_for("/test_nzb.nzb")
|
||||
|
||||
# Set the response for the RSS-feed, replacing the URL to the NZB
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
"""
|
||||
tests.test_functional_downloads - Test the downloading flow
|
||||
"""
|
||||
import sys
|
||||
|
||||
import sabnzbd.filesystem as filesystem
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
"""
|
||||
tests.test_functional_misc - Functional tests of various functions
|
||||
"""
|
||||
import io
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -46,13 +47,13 @@ class TestShowLogging(SABnzbdBaseTest):
|
||||
class TestQueueRepair(SABnzbdBaseTest):
|
||||
def test_queue_repair(self):
|
||||
"""Test full queue repair by manually adding an orphaned job"""
|
||||
nzb_data = create_and_read_nzb("basic_rar5")
|
||||
nzb_fp = create_and_read_nzb_fp("basic_rar5")
|
||||
test_job_name = "testfile_%s" % time.time()
|
||||
|
||||
# Create folder and save compressed NZB like SABnzbd would do
|
||||
admin_path = os.path.join(SAB_INCOMPLETE_DIR, test_job_name, JOB_ADMIN)
|
||||
os.makedirs(admin_path)
|
||||
save_compressed(admin_path, test_job_name, nzb_data)
|
||||
save_compressed(admin_path, test_job_name, nzb_fp)
|
||||
assert os.path.exists(os.path.join(admin_path, test_job_name + ".nzb.gz"))
|
||||
|
||||
# Pause the queue do we don't download stuff
|
||||
|
||||
@@ -214,6 +214,44 @@ class TestMisc:
|
||||
os.unlink("test.cert")
|
||||
os.unlink("test.key")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, result",
|
||||
[
|
||||
("Free.Open.Source.Movie.2001.1080p.WEB-DL.DD5.1.H264-FOSS", False), # Not samples
|
||||
("Setup.exe", False),
|
||||
("23.123.hdtv-rofl", False),
|
||||
("Something.1080p.WEB-DL.DD5.1.H264-EMRG-sample", True), # Samples
|
||||
("Something.1080p.WEB-DL.DD5.1.H264-EMRG-sample.ogg", True),
|
||||
("Sumtin_Else_1080p_WEB-DL_DD5.1_H264_proof-EMRG", True),
|
||||
("Wot.Eva.540i.WEB-DL.aac.H264-Groupie sample.mp4", True),
|
||||
("file-sample.mkv", True),
|
||||
("PROOF.JPG", True),
|
||||
("Bla.s01e02.title.1080p.aac-sample proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac-proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac sample proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac proof.mkv", True),
|
||||
("Lwtn.s08e26.1080p.web.h264-glhf-sample.par2", True),
|
||||
("Lwtn.s08e26.1080p.web.h264-glhf-sample.vol001-002.par2", True),
|
||||
("Look at That 2011 540i WEB-DL.H265-NoSample", False),
|
||||
],
|
||||
)
|
||||
def test_is_sample(self, name, result):
|
||||
assert misc.is_sample(name) == result
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, result",
|
||||
[
|
||||
("Not Death Proof (2022) 1080p x264 (DD5.1) BE Subs", False), # Try to trigger some false positives
|
||||
("Proof.of.Everything.(2042).4320p.x266-4U", False),
|
||||
("Crime_Scene_S01E13_Free_Sample_For_Sale_480p-OhDear", False),
|
||||
("Sample That 2011 480p WEB-DL.H265-aMiGo", False),
|
||||
("NOT A SAMPLE.JPG", False),
|
||||
],
|
||||
)
|
||||
def test_is_sample_known_false_positives(self, name, result):
|
||||
"""We know these fail, but don't have a better solution for them at the moment."""
|
||||
assert misc.is_sample(name) != result
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"test_input, expected_output",
|
||||
[
|
||||
|
||||
@@ -18,15 +18,37 @@
|
||||
"""
|
||||
tests.test_newsunpack - Tests of various functions in newspack
|
||||
"""
|
||||
|
||||
import pytest
|
||||
|
||||
from sabnzbd.newsunpack import *
|
||||
import sabnzbd.newsunpack as newsunpack
|
||||
|
||||
|
||||
class TestNewsUnpack:
|
||||
def test_is_sfv_file(self):
|
||||
assert is_sfv_file("tests/data/good_sfv_unicode.sfv")
|
||||
assert is_sfv_file("tests/data/one_line.sfv")
|
||||
assert not is_sfv_file("tests/data/only_comments.sfv")
|
||||
assert not is_sfv_file("tests/data/random.bin")
|
||||
assert newsunpack.is_sfv_file("tests/data/good_sfv_unicode.sfv")
|
||||
assert newsunpack.is_sfv_file("tests/data/one_line.sfv")
|
||||
assert not newsunpack.is_sfv_file("tests/data/only_comments.sfv")
|
||||
assert not newsunpack.is_sfv_file("tests/data/random.bin")
|
||||
|
||||
def test_is_sevenfile(self):
|
||||
# False, because the command is not set
|
||||
assert not newsunpack.SEVEN_COMMAND
|
||||
assert not newsunpack.is_sevenfile("tests/data/test_7zip/testfile.7z")
|
||||
|
||||
# Set the command to get some real results
|
||||
newsunpack.find_programs(".")
|
||||
assert newsunpack.SEVEN_COMMAND
|
||||
assert not newsunpack.is_sevenfile("tests/data/only_comments.sfv")
|
||||
assert not newsunpack.is_sevenfile("tests/data/random.bin")
|
||||
assert not newsunpack.is_sevenfile("tests/data/par2file/basic_16k.par2")
|
||||
assert newsunpack.is_sevenfile("tests/data/test_7zip/testfile.7z")
|
||||
|
||||
def test_sevenzip(self):
|
||||
testzip = newsunpack.SevenZip("tests/data/test_7zip/testfile.7z")
|
||||
assert testzip.namelist() == ["testfile.bin"]
|
||||
# Basic check that we can get data from the 7zip
|
||||
assert len(testzip.open(testzip.namelist()[0]).read()) == 102400
|
||||
|
||||
# Test with a non-7zip file
|
||||
with pytest.raises(TypeError):
|
||||
newsunpack.SevenZip("tests/data/basic_rar5/testfile.rar")
|
||||
|
||||
135
tests/test_newswrapper.py
Normal file
135
tests/test_newswrapper.py
Normal file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2021 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_newswrapper - Tests of various functions in newswrapper
|
||||
"""
|
||||
import logging
|
||||
import os.path
|
||||
import socket
|
||||
import tempfile
|
||||
import threading
|
||||
import ssl
|
||||
import time
|
||||
from typing import Optional
|
||||
import portend
|
||||
from flaky import flaky
|
||||
|
||||
from tests.testhelper import *
|
||||
from sabnzbd import misc
|
||||
from sabnzbd import newswrapper
|
||||
|
||||
TEST_HOST = "127.0.0.1"
|
||||
TEST_PORT = portend.find_available_local_port()
|
||||
TEST_DATA = b"connection_test"
|
||||
|
||||
|
||||
def socket_test_server(ssl_context: ssl.SSLContext):
|
||||
"""Support function that starts a mini-server, as
|
||||
socket.create_server is not supported on Python 3.7"""
|
||||
# Allow reuse of the address, because our CI is too fast for the socket closing
|
||||
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
try:
|
||||
server_socket.bind((TEST_HOST, TEST_PORT))
|
||||
server_socket.listen(1)
|
||||
server_socket.settimeout(1.0)
|
||||
conn, _ = server_socket.accept()
|
||||
with ssl_context.wrap_socket(sock=conn, server_side=True) as wrapped_socket:
|
||||
wrapped_socket.write(TEST_DATA)
|
||||
except Exception as e:
|
||||
# Skip SSL errors
|
||||
logging.info("Error in server: %s", e)
|
||||
pass
|
||||
finally:
|
||||
# Make sure to close the socket
|
||||
server_socket.close()
|
||||
|
||||
|
||||
@flaky
|
||||
class TestNewsWrapper:
|
||||
cert_file = os.path.join(tempfile.mkdtemp(), "test.cert")
|
||||
key_file = os.path.join(tempfile.mkdtemp(), "test.key")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"server_tls, expected_client_tls, client_cipher, can_connect",
|
||||
[
|
||||
(None, "TLSv1.3", None, True), # Default, highest
|
||||
(ssl.TLSVersion.TLSv1_2, "TLSv1.2", None, True), # Server with just TLSv1.2
|
||||
(ssl.TLSVersion.SSLv3, None, None, False), # No connection for old TLS/SSL
|
||||
(ssl.TLSVersion.TLSv1, None, None, False),
|
||||
(ssl.TLSVersion.TLSv1_1, None, None, False),
|
||||
(None, None, "RC4-MD5", False), # No connection for old cipher
|
||||
(None, "TLSv1.2", "AES256-SHA", True), # Forced to TLSv1.2 if ciphers set
|
||||
(None, None, "TLS_AES_128_CCM_SHA256", False), # Cannot force use of TLSv1.3 cipher
|
||||
],
|
||||
)
|
||||
def test_newswrapper(
|
||||
self,
|
||||
server_tls: Optional[ssl.TLSVersion],
|
||||
expected_client_tls: Optional[str],
|
||||
client_cipher: Optional[str],
|
||||
can_connect: bool,
|
||||
):
|
||||
# We need at least some certificates for the server to work
|
||||
if not os.path.exists(self.cert_file) or not os.path.exists(self.key_file):
|
||||
misc.create_https_certificates(self.cert_file, self.key_file)
|
||||
|
||||
# Create the server context
|
||||
server_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
server_context.load_cert_chain(self.cert_file, self.key_file)
|
||||
server_context.set_ciphers("HIGH")
|
||||
|
||||
# Set the options
|
||||
if server_tls:
|
||||
server_context.maximum_version = server_tls
|
||||
server_thread = threading.Thread(target=socket_test_server, args=(server_context,), daemon=True)
|
||||
server_thread.start()
|
||||
|
||||
# Create the NNTP, mocking the required values
|
||||
# We disable certificate validation, as we use self-signed certificates
|
||||
nw = mock.Mock()
|
||||
nw.blocking = True
|
||||
nw.thrdnum = 1
|
||||
nw.server = mock.Mock()
|
||||
nw.server.host = TEST_HOST
|
||||
nw.server.port = TEST_PORT
|
||||
nw.server.info = socket.getaddrinfo(TEST_HOST, TEST_PORT, 0, socket.SOCK_STREAM)
|
||||
nw.server.ssl = True
|
||||
nw.server.ssl_context = None
|
||||
nw.server.ssl_verify = 0
|
||||
nw.server.ssl_ciphers = client_cipher
|
||||
|
||||
# Do we expect failure to connect?
|
||||
if not can_connect:
|
||||
with pytest.raises(OSError):
|
||||
newswrapper.NNTP(nw, TEST_HOST)
|
||||
else:
|
||||
nntp = newswrapper.NNTP(nw, TEST_HOST)
|
||||
assert nntp.sock.recv(len(TEST_DATA)) == TEST_DATA
|
||||
|
||||
# Assert SSL data
|
||||
assert nntp.sock.version() == expected_client_tls
|
||||
|
||||
if client_cipher:
|
||||
assert nntp.sock.cipher()[0] == client_cipher
|
||||
|
||||
# Wait for server to close
|
||||
server_thread.join(timeout=1.5)
|
||||
if server_thread.is_alive():
|
||||
raise RuntimeError("Test server was not stopped")
|
||||
time.sleep(1.0)
|
||||
71
tests/test_nzbparser.py
Normal file
71
tests/test_nzbparser.py
Normal file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/python3 -OO
|
||||
# Copyright 2007-2021 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
"""
|
||||
tests.test_nzbparser - Tests of basic NZB parsing
|
||||
"""
|
||||
from tests.testhelper import *
|
||||
import sabnzbd.nzbparser as nzbparser
|
||||
from sabnzbd import nzbstuff, save_compressed
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("clean_cache_dir")
|
||||
class TestNzbParser:
|
||||
@set_config({"download_dir": SAB_CACHE_DIR})
|
||||
def test_nzbparser(self):
|
||||
nzo = nzbstuff.NzbObject("test_basic")
|
||||
# Create test file
|
||||
metadata = {"category": "test", "password": "testpass"}
|
||||
nzb_fp = create_and_read_nzb_fp("..", metadata=metadata)
|
||||
|
||||
# Create folder and save compressed NZB like SABnzbd would do
|
||||
save_compressed(SAB_CACHE_DIR, "test", nzb_fp)
|
||||
nzb_file = os.path.join(SAB_CACHE_DIR, "test.nzb.gz")
|
||||
assert os.path.exists(nzb_file)
|
||||
|
||||
# Files we expect
|
||||
test_dir = os.path.normpath(os.path.join(SAB_DATA_DIR, ".."))
|
||||
expected_files = [fl for fl in os.listdir(test_dir) if os.path.isfile(os.path.join(test_dir, fl))]
|
||||
expected_files.sort()
|
||||
assert expected_files
|
||||
|
||||
# Parse the file
|
||||
nzbparser.nzbfile_parser(nzb_file, nzo)
|
||||
|
||||
# Compare filenames
|
||||
resulting_files = [nzf.filename for nzf in nzo.files]
|
||||
resulting_files.sort()
|
||||
assert resulting_files == expected_files
|
||||
|
||||
# Compare sizes
|
||||
expected_sizes = [os.path.getsize(os.path.join(test_dir, fl)) for fl in expected_files]
|
||||
expected_sizes.sort()
|
||||
resulting_sizes = [nzf.bytes for nzf in nzo.files]
|
||||
resulting_sizes.sort()
|
||||
assert resulting_sizes == expected_sizes
|
||||
|
||||
# Check meta-data
|
||||
for field in metadata:
|
||||
assert [metadata[field]] == nzo.meta[field]
|
||||
|
||||
@pytest.mark.xfail(reason="These tests should be added")
|
||||
def test_nzbparser_bad_stuff(self):
|
||||
# TODO: Add tests for:
|
||||
# Duplicate parts
|
||||
# Strange articles sizes
|
||||
# Correct parsing of dates
|
||||
assert False
|
||||
@@ -1,9 +1,12 @@
|
||||
"""
|
||||
tests.test_nzbstuff - Testing functions in nzbstuff.py
|
||||
"""
|
||||
import io
|
||||
|
||||
import sabnzbd.nzbstuff as nzbstuff
|
||||
from sabnzbd.config import ConfigCat
|
||||
from sabnzbd.constants import NORMAL_PRIORITY
|
||||
from sabnzbd.encoding import utob
|
||||
from sabnzbd.filesystem import globber
|
||||
|
||||
from tests.testhelper import *
|
||||
@@ -24,10 +27,10 @@ class TestNZO:
|
||||
assert not nzo.created
|
||||
|
||||
# Create NZB-file to import
|
||||
nzb_data = create_and_read_nzb("basic_rar5")
|
||||
nzb_fp = create_and_read_nzb_fp("basic_rar5")
|
||||
|
||||
# Very basic test of NZO creation with data
|
||||
nzo = nzbstuff.NzbObject("test_basic_data", nzb_data=nzb_data)
|
||||
nzo = nzbstuff.NzbObject("test_basic_data", nzb_fp=nzb_fp)
|
||||
assert nzo.final_name == "test_basic_data"
|
||||
assert nzo.files
|
||||
assert nzo.files[0].filename == "testfile.rar"
|
||||
|
||||
@@ -9,7 +9,6 @@ tests.test_postproc- Tests of various functions in newspack, among which rar_ren
|
||||
"""
|
||||
|
||||
import shutil
|
||||
from distutils.dir_util import copy_tree
|
||||
from unittest import mock
|
||||
|
||||
from sabnzbd.postproc import *
|
||||
@@ -35,8 +34,7 @@ class TestPostProc:
|
||||
|
||||
# create a fresh copy
|
||||
try:
|
||||
# shutil.copytree(sourcedir, workingdir) gives problems on AppVeyor, so:
|
||||
copy_tree(sourcedir, workingdir)
|
||||
shutil.copytree(sourcedir, workingdir)
|
||||
except:
|
||||
pytest.fail("Could not create copy of files for rar_renamer")
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ import sys
|
||||
from random import choice
|
||||
|
||||
from sabnzbd import sorting
|
||||
from sabnzbd.constants import IGNORED_MOVIE_FOLDERS
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
@@ -65,7 +66,7 @@ class TestSortingFunctions:
|
||||
"country": "US",
|
||||
},
|
||||
),
|
||||
("Test Movie 720p HDTV AAC x265 sample-MYgroup", {"release_group": "MYgroup", "other": "Sample"}),
|
||||
("Test Movie 720p HDTV AAC x265 MYgroup-Sample", {"release_group": "MYgroup", "other": "Sample"}),
|
||||
(None, None), # Jobname missing
|
||||
("", None),
|
||||
],
|
||||
@@ -85,33 +86,6 @@ class TestSortingFunctions:
|
||||
else:
|
||||
assert guess[key] == value
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, result",
|
||||
[
|
||||
("Free.Open.Source.Movie.2001.1080p.WEB-DL.DD5.1.H264-FOSS", False), # Not samples
|
||||
("Setup.exe", False),
|
||||
("23.123.hdtv-rofl", False),
|
||||
("Something.1080p.WEB-DL.DD5.1.H264-EMRG-sample", True), # Samples
|
||||
("Something.1080p.WEB-DL.DD5.1.H264-EMRG-sample.ogg", True),
|
||||
("Sumtin_Else_1080p_WEB-DL_DD5.1_H264_proof-EMRG", True),
|
||||
("Wot.Eva.540i.WEB-DL.aac.H264-Groupie sample.mp4", True),
|
||||
("file-sample.mkv", True),
|
||||
("PROOF.JPG", True),
|
||||
("Bla.s01e02.title.1080p.aac-sample proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac-proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac sample proof.mkv", True),
|
||||
("Bla.s01e02.title.1080p.aac proof.mkv", True),
|
||||
("Not Death Proof (2022) 1080p x264 (DD5.1) BE Subs", False), # Try to trigger some false positives
|
||||
("Proof.of.Everything.(2042).4320p.x266-4U", False),
|
||||
("Crime_Scene_S01E13_Free_Sample_For_Sale_480p-OhDear", False),
|
||||
("Sample That 2011 480p WEB-DL.H265-aMiGo", False),
|
||||
("Look at That 2011 540i WEB-DL.H265-NoSample", False),
|
||||
("NOT A SAMPLE.JPG", False),
|
||||
],
|
||||
)
|
||||
def test_is_sample(self, name, result):
|
||||
assert sorting.is_sample(name) == result
|
||||
|
||||
@pytest.mark.parametrize("platform", ["linux", "darwin", "win32"])
|
||||
@pytest.mark.parametrize(
|
||||
"path, result_unix, result_win",
|
||||
@@ -315,7 +289,7 @@ class TestSortingFunctions:
|
||||
pyfakefs.fake_filesystem_unittest.set_uid(0)
|
||||
# Create a fake filesystem in a random base directory, and included a typical DVD directory
|
||||
base_dir = "/" + os.urandom(4).hex() + "/" + os.urandom(2).hex()
|
||||
dvd = choice(("video_ts", "audio_ts", "bdmv"))
|
||||
dvd = choice(IGNORED_MOVIE_FOLDERS)
|
||||
for test_dir in ["dir/2", "TEST/DIR2"]:
|
||||
ffs.fs.create_dir(base_dir + "/" + test_dir, perm_bits=755)
|
||||
assert os.path.exists(base_dir + "/" + test_dir) is True
|
||||
@@ -373,7 +347,7 @@ class TestSortingFunctions:
|
||||
pyfakefs.fake_filesystem_unittest.set_uid(0)
|
||||
# Create a fake filesystem in a random base directory, and included a typical DVD directory
|
||||
base_dir = "D:\\" + os.urandom(4).hex() + "\\" + os.urandom(2).hex()
|
||||
dvd = choice(("video_ts", "audio_ts", "bdmv"))
|
||||
dvd = choice(IGNORED_MOVIE_FOLDERS)
|
||||
for test_dir in ["dir\\2", "TEST\\DIR2"]:
|
||||
ffs.fs.create_dir(base_dir + "\\" + test_dir, perm_bits=755)
|
||||
assert os.path.exists(base_dir + "\\" + test_dir) is True
|
||||
@@ -553,11 +527,14 @@ class TestSortingSorters:
|
||||
_func()
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"s_class, job_tag, sort_string, sort_result", # sort_result without extension
|
||||
"s_class, job_tag, sort_string, sort_filename_result", # Supply sort_filename_result without extension
|
||||
[
|
||||
(sorting.SeriesSorter, "S01E02", "%r/%sn s%0se%0e.%ext", "Simulated Job s01e02"),
|
||||
(sorting.SeriesSorter, "S01E02", "%r/%sn s%0se%0e", ""),
|
||||
(sorting.MovieSorter, "2021", "%y_%.title.%r.%ext", "2021_Simulated.Job.2160p"),
|
||||
(sorting.DateSorter, "2020-02-29", "%y/%0m/%0d/%.t-%GI<release_group>", "Simulated.Job-SAB"),
|
||||
(sorting.MovieSorter, "2021", "%y_%.title.%r", ""),
|
||||
(sorting.DateSorter, "2020-02-29", "%y/%0m/%0d/%.t-%GI<release_group>.%ext", "Simulated.Job-SAB"),
|
||||
(sorting.DateSorter, "2020-02-29", "%y/%0m/%0d/%.t-%GI<release_group>", ""),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("size_limit, file_size", [(512, 1024), (1024, 512)])
|
||||
@@ -569,7 +546,7 @@ class TestSortingSorters:
|
||||
s_class,
|
||||
job_tag,
|
||||
sort_string,
|
||||
sort_result,
|
||||
sort_filename_result,
|
||||
size_limit,
|
||||
file_size,
|
||||
extension,
|
||||
@@ -631,8 +608,10 @@ class TestSortingSorters:
|
||||
|
||||
# Check the result
|
||||
try:
|
||||
# If there's no "%ext" in the sort_string, no filenames should be changed
|
||||
if (
|
||||
is_ok
|
||||
and sort_filename_result
|
||||
and file_size > size_limit
|
||||
and extension not in sorting.EXCLUDED_FILE_EXTS
|
||||
and not (sorter.type == "movie" and number_of_files > 1 and not generate_sequential_filenames)
|
||||
@@ -642,10 +621,10 @@ class TestSortingSorters:
|
||||
if number_of_files > 1 and generate_sequential_filenames and sorter.type == "movie":
|
||||
# Movie sequential file handling
|
||||
for n in range(1, number_of_files + 1):
|
||||
expected = os.path.join(sort_dest, sort_result + " CD" + str(n) + extension)
|
||||
expected = os.path.join(sort_dest, sort_filename_result + " CD" + str(n) + extension)
|
||||
assert os.path.exists(expected)
|
||||
else:
|
||||
expected = os.path.join(sort_dest, sort_result + extension)
|
||||
expected = os.path.join(sort_dest, sort_filename_result + extension)
|
||||
assert os.path.exists(expected)
|
||||
else:
|
||||
# No renaming should happen
|
||||
@@ -699,7 +678,7 @@ class TestSortingSorters:
|
||||
generic = sorting.Sorter(None, "test_cat")
|
||||
generic.detect(job_name, SAB_CACHE_DIR)
|
||||
|
||||
assert generic.sort_file is result_sort_file
|
||||
assert generic.sorter_active is result_sort_file
|
||||
if result_sort_file:
|
||||
assert generic.sorter
|
||||
assert generic.sorter.__class__ is result_class
|
||||
|
||||
@@ -25,7 +25,6 @@ import urllib.parse
|
||||
import pytest_httpbin
|
||||
|
||||
import sabnzbd.urlgrabber as urlgrabber
|
||||
import sabnzbd.version
|
||||
from sabnzbd.cfg import selftest_host
|
||||
from tests.testhelper import *
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ class TestHappyEyeballs:
|
||||
assert "." in ip or ":" in ip
|
||||
|
||||
def test_google_https(self):
|
||||
ip = happyeyeballs("www.google.com", port=443, ssl=True)
|
||||
ip = happyeyeballs("www.google.com", port=443)
|
||||
assert "." in ip or ":" in ip
|
||||
|
||||
def test_not_resolvable(self):
|
||||
|
||||
@@ -18,10 +18,12 @@
|
||||
"""
|
||||
tests.testhelper - Basic helper functions
|
||||
"""
|
||||
|
||||
import io
|
||||
import os
|
||||
import time
|
||||
from http.client import RemoteDisconnected
|
||||
from typing import BinaryIO, Optional, Dict
|
||||
|
||||
import pytest
|
||||
from random import choice, randint
|
||||
import requests
|
||||
@@ -138,21 +140,21 @@ def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}):
|
||||
return r.json()
|
||||
|
||||
|
||||
def create_nzb(nzb_dir, metadata=None):
|
||||
def create_nzb(nzb_dir: str, metadata: Optional[Dict[str, str]] = None) -> str:
|
||||
"""Create NZB from directory using SABNews"""
|
||||
nzb_dir_full = os.path.join(SAB_DATA_DIR, nzb_dir)
|
||||
return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full, metadata=metadata)
|
||||
|
||||
|
||||
def create_and_read_nzb(nzbdir):
|
||||
def create_and_read_nzb_fp(nzbdir: str, metadata: Optional[Dict[str, str]] = None) -> BinaryIO:
|
||||
"""Create NZB, return data and delete file"""
|
||||
# Create NZB-file to import
|
||||
nzb_path = create_nzb(nzbdir)
|
||||
with open(nzb_path, "r") as nzb_data_fp:
|
||||
nzb_path = create_nzb(nzbdir, metadata)
|
||||
with open(nzb_path, "rb") as nzb_data_fp:
|
||||
nzb_data = nzb_data_fp.read()
|
||||
# Remove the created NZB-file
|
||||
os.remove(nzb_path)
|
||||
return nzb_data
|
||||
return io.BytesIO(nzb_data)
|
||||
|
||||
|
||||
def random_name(lenghth: int = 16) -> str:
|
||||
|
||||
Reference in New Issue
Block a user