mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-27 09:38:31 -05:00
Compare commits
26 Commits
feature/pr
...
3.1.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
86c0f7e864 | ||
|
|
99b5a00c12 | ||
|
|
85ee1f07d7 | ||
|
|
e58b4394e0 | ||
|
|
1e91a57bf1 | ||
|
|
39cee52a7e | ||
|
|
72068f939d | ||
|
|
096d0d3cad | ||
|
|
2472ab0121 | ||
|
|
00421717b8 | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.1.0RC1
|
||||
Summary: SABnzbd-3.1.0RC1
|
||||
Version: 3.1.1
|
||||
Summary: SABnzbd-3.1.1
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
24
README.mkd
24
README.mkd
@@ -1,24 +1,29 @@
|
||||
Release Notes - SABnzbd 3.1.0 Release Candidate 1
|
||||
Release Notes - SABnzbd 3.1.1
|
||||
=========================================================
|
||||
|
||||
## Changes and bugfixes since 3.1.0 Beta 2
|
||||
- Deobfuscate final filenames can now be used when job folders are disabled.
|
||||
- Deobfuscate final filenames will ignore blu-ray disc files.
|
||||
- Clear error if Complete Folder is set as a subfolder of the Temporary Folder.
|
||||
- Filtering of history by category would not filter jobs in post-processing.
|
||||
## Changes and bugfixes since 3.1.1
|
||||
- Enforce CRLF line endings on outgoing email messages.
|
||||
- Queue Repair would fail if Rating is enabled.
|
||||
- It was not possible to set directory-settings to empty values.
|
||||
- Deobfuscate-during-download was not triggered.
|
||||
- Failed to start on Python 3.5 with HTTPS enabled.
|
||||
- Could show traceback when formatting error/warnings messages.
|
||||
- Windows/macOS: improve handling of program restart.
|
||||
|
||||
## Changes since 3.0.2
|
||||
- Added option to automatically deobfuscate final filenames: after unpacking,
|
||||
detect and rename obfuscated or meaningless filenames to the job name,
|
||||
similar to the Deobfuscate.py post-processing script.
|
||||
similar to the `Deobfuscate.py` post-processing script.
|
||||
- Switched to Transifex as our translations platform:
|
||||
Help us translate SABnzbd in your language! Add untranslated texts or
|
||||
improved existing translations here: https://sabnzbd.org/wiki/translate
|
||||
- Redesigned job availability-check to be more efficient and reliable.
|
||||
- Scheduled readouts of RSS-feeds would fail silently, they now show a warning.
|
||||
- Skip repair on Retry if all sets were previously successfully verified.
|
||||
- Passwords included in the filename no longer have to be at the end.
|
||||
- Restore limit on length of foldernames (`max_foldername_length`).
|
||||
- Added password input box on the Add NZB screen.
|
||||
- Clear error if `Complete Folder` is set as a subfolder of the `Temporary Folder`.
|
||||
- Show warning that Pyton 3.5 support will be dropped after 3.1.0.
|
||||
- Windows/macOS: update UnRar to 5.91 and MultiPar to 1.3.1.0.
|
||||
- Windows: retry `Access Denied` when renaming files on Windows.
|
||||
@@ -27,12 +32,17 @@ Release Notes - SABnzbd 3.1.0 Release Candidate 1
|
||||
- Assembler crashes could occur due to race condition in `ArticleCache`.
|
||||
- On HTTP-redirects the scheme/hostname/port were ignored when behind a proxy.
|
||||
- Strip slash of the end of `url_base` as it could break other code.
|
||||
- `Temporary Folder` with unicode characters could result in duplicate unpacking.
|
||||
- Unpacking with a relative folder set for a category could fail.
|
||||
- Existing files were not parsed when retrying a job.
|
||||
- Reading attributes when retrying a job could result in crash.
|
||||
- Paused priority of pre-queue script was ignored.
|
||||
- Duplicate Detection did not check filenames in History.
|
||||
- Downloaded bytes could show as exceeding the total bytes of a job.
|
||||
- Filtering of history by category would not filter jobs in post-processing.
|
||||
- Windows: non-Latin languages were displayed incorrectly in the installer.
|
||||
- Windows: could fail to create folders on some network shares.
|
||||
- Windows: folders could end in a period, breaking Windows Explorer.
|
||||
|
||||
## Upgrade notices
|
||||
- Jobs that failed on versions before 3.1.x, will throw an error about the
|
||||
|
||||
20
SABnzbd.py
20
SABnzbd.py
@@ -125,17 +125,23 @@ class GUIHandler(logging.Handler):
|
||||
|
||||
def emit(self, record):
|
||||
""" Emit a record by adding it to our private queue """
|
||||
# If % is part of the msg, this could fail
|
||||
try:
|
||||
record_msg = record.msg % record.args
|
||||
except TypeError:
|
||||
record_msg = record.msg + str(record.args)
|
||||
|
||||
if record.levelname == "WARNING":
|
||||
sabnzbd.LAST_WARNING = record.msg % record.args
|
||||
sabnzbd.LAST_WARNING = record_msg
|
||||
else:
|
||||
sabnzbd.LAST_ERROR = record.msg % record.args
|
||||
sabnzbd.LAST_ERROR = record_msg
|
||||
|
||||
if len(self.store) >= self.size:
|
||||
# Loose the oldest record
|
||||
self.store.pop(0)
|
||||
try:
|
||||
# Append traceback, if available
|
||||
warning = {"type": record.levelname, "text": record.msg % record.args, "time": int(time.time())}
|
||||
warning = {"type": record.levelname, "text": record_msg, "time": int(time.time())}
|
||||
if record.exc_info:
|
||||
warning["text"] = "%s\n%s" % (warning["text"], traceback.format_exc())
|
||||
self.store.append(warning)
|
||||
@@ -1287,7 +1293,7 @@ def main():
|
||||
sabnzbd.cfg.enable_https.set(False)
|
||||
|
||||
# So the cert and key files do exist, now let's check if they are valid:
|
||||
trialcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
trialcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
|
||||
try:
|
||||
trialcontext.load_cert_chain(https_cert, https_key)
|
||||
logging.info("HTTPS keys are OK")
|
||||
@@ -1530,6 +1536,7 @@ def main():
|
||||
# Check for auto-restart request
|
||||
# Or special restart cases like Mac and WindowsService
|
||||
if sabnzbd.TRIGGER_RESTART:
|
||||
logging.info("Performing triggered restart")
|
||||
# Shutdown
|
||||
sabnzbd.shutdown_program()
|
||||
|
||||
@@ -1548,7 +1555,7 @@ def main():
|
||||
my_name = sabnzbd.MY_FULLNAME.replace("/Contents/MacOS/SABnzbd", "")
|
||||
my_args = " ".join(sys.argv[1:])
|
||||
cmd = 'kill -9 %s && open "%s" --args %s' % (my_pid, my_name, my_args)
|
||||
logging.info("Launching: ", cmd)
|
||||
logging.info("Launching: %s", cmd)
|
||||
os.system(cmd)
|
||||
elif sabnzbd.WIN_SERVICE:
|
||||
# Use external service handler to do the restart
|
||||
@@ -1659,7 +1666,8 @@ def handle_windows_service():
|
||||
"""
|
||||
# Detect if running as Windows Service (only Vista and above!)
|
||||
# Adapted from https://stackoverflow.com/a/55248281/5235502
|
||||
if win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
# Only works when run from the exe-files
|
||||
if hasattr(sys, "frozen") and win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
servicemanager.Initialize()
|
||||
servicemanager.PrepareToHostSingle(SABnzbd)
|
||||
servicemanager.StartServiceCtrlDispatcher()
|
||||
|
||||
@@ -465,15 +465,6 @@ def trigger_restart(timeout=None):
|
||||
if timeout:
|
||||
time.sleep(timeout)
|
||||
|
||||
# Add extra arguments
|
||||
if sabnzbd.downloader.Downloader.do.paused:
|
||||
sabnzbd.RESTART_ARGS.append("-p")
|
||||
sys.argv = sabnzbd.RESTART_ARGS
|
||||
|
||||
# Stop all services
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
|
||||
if sabnzbd.WIN32:
|
||||
# Remove connection info for faster restart
|
||||
del_connection_info()
|
||||
@@ -482,6 +473,15 @@ def trigger_restart(timeout=None):
|
||||
if hasattr(sys, "frozen"):
|
||||
sabnzbd.TRIGGER_RESTART = True
|
||||
else:
|
||||
# Add extra arguments
|
||||
if sabnzbd.downloader.Downloader.do.paused:
|
||||
sabnzbd.RESTART_ARGS.append("-p")
|
||||
sys.argv = sabnzbd.RESTART_ARGS
|
||||
|
||||
# Stop all services
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
|
||||
# Do the restart right now
|
||||
cherrypy.engine._do_execv()
|
||||
|
||||
|
||||
@@ -1746,8 +1746,8 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None)
|
||||
# Un-reverse the queue
|
||||
items.reverse()
|
||||
|
||||
# Global check if rating is enabled
|
||||
rating_enabled = cfg.rating_enable()
|
||||
# Global check if rating is enabled and available (queue-repair)
|
||||
rating_enabled = cfg.rating_enable() and Rating.do
|
||||
|
||||
for item in items:
|
||||
item["size"] = to_units(item["bytes"], "B")
|
||||
|
||||
@@ -236,7 +236,7 @@ class OptionDir(Option):
|
||||
'create' means try to create (but don't set permanent create flag)
|
||||
"""
|
||||
error = None
|
||||
if value and (value != self.get() or create):
|
||||
if value is not None and (value != self.get() or create):
|
||||
value = value.strip()
|
||||
if self.__validation:
|
||||
error, value = self.__validation(self.__root, value, super().default())
|
||||
|
||||
@@ -151,7 +151,7 @@ class Status:
|
||||
GRABBING = "Grabbing" # Q: Getting an NZB from an external site
|
||||
MOVING = "Moving" # PP: Files are being moved
|
||||
PAUSED = "Paused" # Q: Job is paused
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download or post-process
|
||||
QUICK_CHECK = "QuickCheck" # PP: QuickCheck verification is running
|
||||
REPAIRING = "Repairing" # PP: Job is being repaired (by par2)
|
||||
RUNNING = "Running" # PP: User's post processing script is running
|
||||
|
||||
@@ -46,7 +46,7 @@ def compare_stat_tuple(tup1, tup2):
|
||||
|
||||
def clean_file_list(inp_list, folder, files):
|
||||
""" Remove elements of "inp_list" not found in "files" """
|
||||
for path in sorted(inp_list.keys()):
|
||||
for path in sorted(inp_list):
|
||||
fld, name = os.path.split(path)
|
||||
if fld == folder:
|
||||
present = False
|
||||
|
||||
@@ -914,7 +914,8 @@ class Downloader(Thread):
|
||||
# Clean expired timers
|
||||
now = time.time()
|
||||
kicked = []
|
||||
for server_id in self._timers.keys():
|
||||
# Create a copy so we can remove during iteration
|
||||
for server_id in list(self._timers):
|
||||
if not [stamp for stamp in self._timers[server_id] if stamp >= now]:
|
||||
logging.debug("Forcing re-evaluation of server-id %s", server_id)
|
||||
del self._timers[server_id]
|
||||
|
||||
@@ -27,6 +27,7 @@ import glob
|
||||
|
||||
from Cheetah.Template import Template
|
||||
from email.message import EmailMessage
|
||||
from email import policy
|
||||
|
||||
from sabnzbd.constants import *
|
||||
import sabnzbd
|
||||
@@ -296,4 +297,4 @@ def _prepare_message(txt):
|
||||
msg[keyword] = value
|
||||
|
||||
msg.set_content("\n".join(payload))
|
||||
return msg.as_bytes()
|
||||
return msg.as_bytes(policy=msg.policy.clone(linesep="\r\n"))
|
||||
|
||||
@@ -529,7 +529,7 @@ class Wizard:
|
||||
else:
|
||||
# Sort servers to get the first enabled one
|
||||
server_names = sorted(
|
||||
servers.keys(),
|
||||
servers,
|
||||
key=lambda svr: "%d%02d%s"
|
||||
% (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()),
|
||||
)
|
||||
@@ -1018,16 +1018,13 @@ class QueuePage:
|
||||
class HistoryPage:
|
||||
def __init__(self, root):
|
||||
self.__root = root
|
||||
self.__failed_only = False
|
||||
|
||||
@secured_expose
|
||||
def index(self, **kwargs):
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
search = kwargs.get("search")
|
||||
failed_only = kwargs.get("failed_only")
|
||||
if failed_only is None:
|
||||
failed_only = self.__failed_only
|
||||
failed_only = int_conv(kwargs.get("failed_only"))
|
||||
|
||||
history = build_header()
|
||||
history["failed_only"] = failed_only
|
||||
@@ -1583,7 +1580,7 @@ class ConfigServer:
|
||||
new = []
|
||||
servers = config.get_servers()
|
||||
server_names = sorted(
|
||||
list(servers.keys()),
|
||||
servers,
|
||||
key=lambda svr: "%d%02d%s"
|
||||
% (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()),
|
||||
)
|
||||
|
||||
@@ -1350,7 +1350,7 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False):
|
||||
block_table[nzf.blocks] = nzf
|
||||
|
||||
if block_table:
|
||||
nzf = block_table[min(block_table.keys())]
|
||||
nzf = block_table[min(block_table)]
|
||||
logging.info("Found new par2file %s", nzf.filename)
|
||||
|
||||
# Move from extrapar list to files to be downloaded
|
||||
@@ -1650,7 +1650,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False):
|
||||
block_table[nzf.blocks] = nzf
|
||||
|
||||
if block_table:
|
||||
nzf = block_table[min(block_table.keys())]
|
||||
nzf = block_table[min(block_table)]
|
||||
logging.info("Found new par2file %s", nzf.filename)
|
||||
|
||||
# Move from extrapar list to files to be downloaded
|
||||
@@ -1976,8 +1976,9 @@ def create_env(nzo=None, extra_env_fields={}):
|
||||
|
||||
|
||||
def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
"""Extract volumes that are part of this rarset
|
||||
and merge them with existing list, removing duplicates
|
||||
"""List volumes that are part of this rarset
|
||||
and merge them with parsed paths list, removing duplicates.
|
||||
We assume RarFile is right and use parsed paths as backup.
|
||||
"""
|
||||
# UnRar is required to read some RAR files
|
||||
# RarFile can fail in special cases
|
||||
@@ -1996,12 +1997,12 @@ def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
zf_volumes = []
|
||||
|
||||
# Remove duplicates
|
||||
known_volumes_base = [os.path.basename(vol) for vol in known_volumes]
|
||||
for zf_volume in zf_volumes:
|
||||
if os.path.basename(zf_volume) not in known_volumes_base:
|
||||
zf_volumes_base = [os.path.basename(vol) for vol in zf_volumes]
|
||||
for known_volume in known_volumes:
|
||||
if os.path.basename(known_volume) not in zf_volumes_base:
|
||||
# Long-path notation just to be sure
|
||||
known_volumes.append(long_path(zf_volume))
|
||||
return known_volumes
|
||||
zf_volumes.append(long_path(known_volume))
|
||||
return zf_volumes
|
||||
|
||||
|
||||
# Sort the various RAR filename formats properly :\
|
||||
|
||||
@@ -118,7 +118,7 @@ def nzbfile_parser(raw_data, nzo):
|
||||
pass
|
||||
|
||||
# Sort the articles by part number, compatible with Python 3.5
|
||||
raw_article_db_sorted = [raw_article_db[partnum] for partnum in sorted(raw_article_db.keys())]
|
||||
raw_article_db_sorted = [raw_article_db[partnum] for partnum in sorted(raw_article_db)]
|
||||
|
||||
# Create NZF
|
||||
nzf = sabnzbd.nzbstuff.NzbFile(file_date, file_name, raw_article_db_sorted, file_bytes, nzo)
|
||||
|
||||
@@ -204,18 +204,24 @@ class NzbQueue:
|
||||
return nzo_id
|
||||
|
||||
@NzbQueueLocker
|
||||
def send_back(self, nzo):
|
||||
def send_back(self, old_nzo):
|
||||
""" Send back job to queue after successful pre-check """
|
||||
try:
|
||||
nzb_path = globber_full(nzo.workpath, "*.gz")[0]
|
||||
nzb_path = globber_full(old_nzo.workpath, "*.gz")[0]
|
||||
except:
|
||||
logging.info("Failed to find NZB file after pre-check (%s)", nzo.nzo_id)
|
||||
logging.info("Failed to find NZB file after pre-check (%s)", old_nzo.nzo_id)
|
||||
return
|
||||
|
||||
# Need to remove it first, otherwise it might still be downloading
|
||||
self.remove(nzo, add_to_history=False, cleanup=False)
|
||||
res, nzo_ids = process_single_nzb(nzo.filename, nzb_path, keep=True, reuse=nzo.downpath, nzo_id=nzo.nzo_id)
|
||||
# Store old position and create new NZO
|
||||
old_position = self.__nzo_list.index(old_nzo)
|
||||
res, nzo_ids = process_single_nzb(
|
||||
old_nzo.filename, nzb_path, keep=True, reuse=old_nzo.downpath, nzo_id=old_nzo.nzo_id
|
||||
)
|
||||
if res == 0 and nzo_ids:
|
||||
# Swap to old position
|
||||
new_nzo = self.get_nzo(nzo_ids[0])
|
||||
self.__nzo_list.remove(new_nzo)
|
||||
self.__nzo_list.insert(old_position, new_nzo)
|
||||
# Reset reuse flag to make pause/abort on encryption possible
|
||||
self.__nzo_table[nzo_ids[0]].reuse = None
|
||||
|
||||
@@ -776,10 +782,9 @@ class NzbQueue:
|
||||
|
||||
def end_job(self, nzo):
|
||||
""" Send NZO to the post-processing queue """
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
nzo.deleted = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
|
||||
@@ -78,6 +78,7 @@ from sabnzbd.filesystem import (
|
||||
remove_file,
|
||||
get_filepath,
|
||||
make_script_path,
|
||||
globber,
|
||||
)
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
@@ -356,15 +357,15 @@ class NzbFile(TryList):
|
||||
self.valid = bool(raw_article_db)
|
||||
|
||||
if self.valid and self.nzf_id:
|
||||
# Save first article separate so we can do duplicate file detection
|
||||
# Save first article separate so we can do
|
||||
# duplicate file detection and deobfuscate-during-download
|
||||
first_article = self.add_article(raw_article_db.pop(0))
|
||||
first_article.lowest_partnum = True
|
||||
self.nzo.first_articles.append(first_article)
|
||||
self.nzo.first_articles_count += 1
|
||||
|
||||
# For non-par2 files we also use it to do deobfuscate-during-download
|
||||
# And we count how many bytes are available for repair
|
||||
# Count how many bytes are available for repair
|
||||
if sabnzbd.par2file.is_parfile(self.filename):
|
||||
self.nzo.first_articles.append(first_article)
|
||||
self.nzo.first_articles_count += 1
|
||||
self.nzo.bytes_par2 += self.bytes
|
||||
|
||||
# Any articles left?
|
||||
@@ -382,7 +383,7 @@ class NzbFile(TryList):
|
||||
if raw_article_db:
|
||||
# Convert 2.x.x jobs
|
||||
if isinstance(raw_article_db, dict):
|
||||
raw_article_db = [raw_article_db[partnum] for partnum in sorted(raw_article_db.keys())]
|
||||
raw_article_db = [raw_article_db[partnum] for partnum in sorted(raw_article_db)]
|
||||
|
||||
for raw_article in raw_article_db:
|
||||
self.add_article(raw_article)
|
||||
@@ -472,7 +473,7 @@ class NzbFile(TryList):
|
||||
|
||||
# Convert 2.x.x jobs
|
||||
if isinstance(self.decodetable, dict):
|
||||
self.decodetable = [self.decodetable[partnum] for partnum in sorted(self.decodetable.keys())]
|
||||
self.decodetable = [self.decodetable[partnum] for partnum in sorted(self.decodetable)]
|
||||
|
||||
# Set non-transferable values
|
||||
self.md5 = None
|
||||
@@ -910,7 +911,6 @@ class NzbObject(TryList):
|
||||
# to history we first need an nzo_id by entering the NzbQueue
|
||||
if accept == 2:
|
||||
self.deleted = True
|
||||
self.status = Status.FAILED
|
||||
sabnzbd.NzbQueue.do.add(self, quiet=True)
|
||||
sabnzbd.NzbQueue.do.end_job(self)
|
||||
# Raise error, so it's not added
|
||||
@@ -1173,8 +1173,6 @@ class NzbObject(TryList):
|
||||
|
||||
# Abort the job due to failure
|
||||
if not job_can_succeed:
|
||||
# Set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T("Aborted, cannot be completed") + " - https://sabnzbd.org/not-complete"
|
||||
self.set_unpack_info("Download", self.fail_msg, unique=False)
|
||||
@@ -1184,8 +1182,6 @@ class NzbObject(TryList):
|
||||
post_done = False
|
||||
if not self.files:
|
||||
post_done = True
|
||||
# set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
|
||||
return articles_left, file_done, post_done
|
||||
@@ -1207,8 +1203,8 @@ class NzbObject(TryList):
|
||||
""" Check if downloaded files already exits, for these set NZF to complete """
|
||||
fix_unix_encoding(wdir)
|
||||
|
||||
# Get a list of already present files
|
||||
files = [f for f in os.listdir(wdir) if os.path.isfile(f)]
|
||||
# Get a list of already present files, ignore folders
|
||||
files = globber(wdir, "*.*")
|
||||
|
||||
# Substitute renamed files
|
||||
renames = sabnzbd.load_data(RENAMES_FILE, self.workpath, remove=True)
|
||||
@@ -1232,6 +1228,7 @@ class NzbObject(TryList):
|
||||
for nzf in nzfs:
|
||||
subject = sanitize_filename(name_extractor(nzf.subject))
|
||||
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
|
||||
logging.info("Existing file %s matched to file %s of %s", filename, nzf.filename, self.final_name)
|
||||
nzf.filename = filename
|
||||
nzf.bytes_left = 0
|
||||
self.remove_nzf(nzf)
|
||||
@@ -1254,25 +1251,25 @@ class NzbObject(TryList):
|
||||
for filename in files:
|
||||
# Create NZO's using basic information
|
||||
filepath = os.path.join(wdir, filename)
|
||||
if os.path.exists(filepath):
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
logging.info("Existing file %s added to %s", filename, self.final_name)
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
logging.info("Existing file %s added to job", filename)
|
||||
except:
|
||||
logging.error(T("Error importing %s"), self.final_name)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -1496,7 +1493,7 @@ class NzbObject(TryList):
|
||||
# Sort the servers first
|
||||
servers = config.get_servers()
|
||||
server_names = sorted(
|
||||
servers.keys(),
|
||||
servers,
|
||||
key=lambda svr: "%d%02d%s"
|
||||
% (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()),
|
||||
)
|
||||
@@ -1612,7 +1609,7 @@ class NzbObject(TryList):
|
||||
|
||||
pos_nzf_table = self.build_pos_nzf_table(nzf_ids)
|
||||
|
||||
keys = list(pos_nzf_table.keys())
|
||||
keys = list(pos_nzf_table)
|
||||
keys.sort()
|
||||
|
||||
if target == keys:
|
||||
@@ -1629,7 +1626,7 @@ class NzbObject(TryList):
|
||||
|
||||
pos_nzf_table = self.build_pos_nzf_table(nzf_ids)
|
||||
|
||||
keys = list(pos_nzf_table.keys())
|
||||
keys = list(pos_nzf_table)
|
||||
keys.sort()
|
||||
|
||||
if target == keys:
|
||||
@@ -1705,8 +1702,11 @@ class NzbObject(TryList):
|
||||
self.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def verify_all_filenames_and_resort(self):
|
||||
""" Verify all filenames based on par2 info and then re-sort files """
|
||||
"""Verify all filenames based on par2 info and then re-sort files.
|
||||
Locked so all files are verified at once without interuptions.
|
||||
"""
|
||||
logging.info("Checking all filenames for %s", self.final_name)
|
||||
for nzf_verify in self.files:
|
||||
self.verify_nzf_filename(nzf_verify)
|
||||
@@ -1891,13 +1891,17 @@ class NzbObject(TryList):
|
||||
for attrib in NzoAttributeSaver:
|
||||
attribs[attrib] = getattr(self, attrib)
|
||||
logging.debug("Saving attributes %s for %s", attribs, self.final_name)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath, silent=True)
|
||||
|
||||
def load_attribs(self):
|
||||
""" Load saved attributes and return them to be parsed """
|
||||
attribs = sabnzbd.load_data(ATTRIB_FILE, self.workpath, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
|
||||
# If attributes file somehow does not exists
|
||||
if not attribs:
|
||||
return None, None, None
|
||||
|
||||
# Only a subset we want to apply directly to the NZO
|
||||
for attrib in ("final_name", "priority", "password", "url"):
|
||||
# Only set if it is present and has a value
|
||||
@@ -2070,16 +2074,16 @@ def nzf_cmp_name(nzf1, nzf2):
|
||||
|
||||
|
||||
def create_work_name(name):
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize """
|
||||
strip_ext = [".nzb", ".par", ".par2"]
|
||||
name = sanitize_foldername(name.strip())
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize, skip URL's """
|
||||
if name.find("://") < 0:
|
||||
name_base, ext = os.path.splitext(name)
|
||||
# In case it was one of these, there might be more
|
||||
while ext.lower() in strip_ext:
|
||||
# Need to remove any invalid characters before starting
|
||||
name_base, ext = os.path.splitext(sanitize_foldername(name))
|
||||
while ext.lower() in (".nzb", ".par", ".par2"):
|
||||
name = name_base
|
||||
name_base, ext = os.path.splitext(name)
|
||||
return name.strip()
|
||||
# And make sure we remove invalid characters again
|
||||
return sanitize_foldername(name)
|
||||
else:
|
||||
return name.strip()
|
||||
|
||||
|
||||
@@ -234,7 +234,7 @@ class SABnzbdDelegate(NSObject):
|
||||
100: "100%",
|
||||
}
|
||||
|
||||
for speed in sorted(speeds.keys()):
|
||||
for speed in sorted(speeds):
|
||||
menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
|
||||
"%s" % (speeds[speed]), "speedlimitAction:", ""
|
||||
)
|
||||
|
||||
@@ -166,6 +166,8 @@ class PostProcessor(Thread):
|
||||
|
||||
def process(self, nzo):
|
||||
""" Push on finished job in the queue """
|
||||
# Make sure we return the status "Waiting"
|
||||
nzo.status = Status.QUEUED
|
||||
if nzo not in self.history_queue:
|
||||
self.history_queue.append(nzo)
|
||||
|
||||
@@ -327,7 +329,8 @@ def process_job(nzo):
|
||||
# Get the NZB name
|
||||
filename = nzo.final_name
|
||||
|
||||
if nzo.fail_msg: # Special case: aborted due to too many missing data
|
||||
# Download-processes can mark job as failed
|
||||
if nzo.fail_msg:
|
||||
nzo.status = Status.FAILED
|
||||
nzo.save_attribs()
|
||||
all_ok = False
|
||||
@@ -959,7 +962,7 @@ def rar_renamer(nzo, workdir):
|
||||
# So, all rar files with rarvolnr 1, find the contents (files inside the rar),
|
||||
# and match with rarfiles with rarvolnr 2, and put them in the correct rarset.
|
||||
# And so on, until the highest rarvolnr minus 1 matched against highest rarvolnr
|
||||
for n in range(1, len(rarvolnr.keys())):
|
||||
for n in range(1, len(rarvolnr)):
|
||||
logging.debug("Deobfuscate: Finding matches between rar sets %s and %s" % (n, n + 1))
|
||||
for base_obfuscated_filename in rarvolnr[n]:
|
||||
matchcounter = 0
|
||||
|
||||
@@ -24,6 +24,7 @@ import logging
|
||||
import time
|
||||
import datetime
|
||||
import threading
|
||||
import urllib.parse
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import RSS_FILE_NAME, DEFAULT_PRIORITY, DUP_PRIORITY
|
||||
@@ -150,8 +151,7 @@ def remove_obsolete(jobs, new_jobs):
|
||||
"""
|
||||
now = time.time()
|
||||
limit = now - 259200 # 3days (3x24x3600)
|
||||
olds = list(jobs.keys())
|
||||
for old in olds:
|
||||
for old in list(jobs):
|
||||
tm = jobs[old]["time"]
|
||||
if old not in new_jobs:
|
||||
if jobs[old].get("status", " ")[0] in ("G", "B"):
|
||||
@@ -177,7 +177,7 @@ class RSSQueue:
|
||||
self.jobs = sabnzbd.load_admin(RSS_FILE_NAME)
|
||||
if self.jobs:
|
||||
for feed in self.jobs:
|
||||
remove_obsolete(self.jobs[feed], list(self.jobs[feed].keys()))
|
||||
remove_obsolete(self.jobs[feed], list(self.jobs[feed]))
|
||||
except:
|
||||
logging.warning(T("Cannot read %s"), RSS_FILE_NAME)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -277,44 +277,49 @@ class RSSQueue:
|
||||
feedparser.USER_AGENT = "SABnzbd/%s" % sabnzbd.__version__
|
||||
|
||||
# Read the RSS feed
|
||||
msg = None
|
||||
entries = None
|
||||
msg = ""
|
||||
entries = []
|
||||
if readout:
|
||||
all_entries = []
|
||||
for uri in uris:
|
||||
uri = uri.replace(" ", "%20")
|
||||
# Reset parsing message for each feed
|
||||
msg = ""
|
||||
feed_parsed = {}
|
||||
uri = uri.replace(" ", "%20").replace("feed://", "http://")
|
||||
logging.debug("Running feedparser on %s", uri)
|
||||
feed_parsed = feedparser.parse(uri.replace("feed://", "http://"))
|
||||
logging.debug("Done parsing %s", uri)
|
||||
|
||||
if not feed_parsed:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, "?")
|
||||
logging.info(msg)
|
||||
try:
|
||||
feed_parsed = feedparser.parse(uri)
|
||||
except Exception as feedparser_exc:
|
||||
# Feedparser 5 would catch all errors, while 6 just throws them back at us
|
||||
feed_parsed["bozo_exception"] = feedparser_exc
|
||||
logging.debug("Finished parsing %s", uri)
|
||||
|
||||
status = feed_parsed.get("status", 999)
|
||||
if status in (401, 402, 403):
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
logging.info(msg)
|
||||
|
||||
if 500 <= status <= 599:
|
||||
elif 500 <= status <= 599:
|
||||
msg = T("Server side error (server code %s); could not get %s on %s") % (status, feed, uri)
|
||||
logging.info(msg)
|
||||
|
||||
entries = feed_parsed.get("entries")
|
||||
entries = feed_parsed.get("entries", [])
|
||||
if not entries and "feed" in feed_parsed and "error" in feed_parsed["feed"]:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, feed_parsed["feed"]["error"])
|
||||
|
||||
# Exception was thrown
|
||||
if "bozo_exception" in feed_parsed and not entries:
|
||||
msg = str(feed_parsed["bozo_exception"])
|
||||
if "CERTIFICATE_VERIFY_FAILED" in msg:
|
||||
msg = T("Server %s uses an untrusted HTTPS certificate") % get_base_url(uri)
|
||||
msg += " - https://sabnzbd.org/certificate-errors"
|
||||
logging.error(msg)
|
||||
elif "href" in feed_parsed and feed_parsed["href"] != uri and "login" in feed_parsed["href"]:
|
||||
# Redirect to login page!
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
else:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, msg)
|
||||
logging.info(msg)
|
||||
|
||||
if not entries and not msg:
|
||||
if msg:
|
||||
# We need to escape any "%20" that could be in the warning due to the URL's
|
||||
logging.warning_helpful(urllib.parse.unquote(msg))
|
||||
elif not entries:
|
||||
msg = T("RSS Feed %s was empty") % uri
|
||||
logging.info(msg)
|
||||
all_entries.extend(entries)
|
||||
|
||||
@@ -532,7 +532,7 @@ def check_for_sequence(regex, files):
|
||||
prefix = name[: match1.start()]
|
||||
|
||||
# Don't do anything if only one or no files matched
|
||||
if len(list(matches.keys())) < 2:
|
||||
if len(list(matches)) < 2:
|
||||
return {}
|
||||
|
||||
key_prev = 0
|
||||
@@ -540,7 +540,7 @@ def check_for_sequence(regex, files):
|
||||
alphabet = "abcdefghijklmnopqrstuvwxyz"
|
||||
|
||||
# Check the dictionary to see if the keys are in a numeric or alphabetic sequence
|
||||
for akey in sorted(matches.keys()):
|
||||
for akey in sorted(matches):
|
||||
if akey.isdigit():
|
||||
key = int(akey)
|
||||
elif akey in alphabet:
|
||||
|
||||
@@ -318,7 +318,7 @@ class URLGrabber(Thread):
|
||||
msg = T("URL Fetching failed; %s") % msg
|
||||
|
||||
# Mark as failed
|
||||
nzo.status = Status.FAILED
|
||||
nzo.set_unpack_info("Source", msg)
|
||||
nzo.fail_msg = msg
|
||||
|
||||
notifier.send_notification(T("URL Fetching failed; %s") % "", "%s\n%s" % (msg, url), "other", nzo.cat)
|
||||
|
||||
8
scripts/Deobfuscate.py
Normal file → Executable file
8
scripts/Deobfuscate.py
Normal file → Executable file
@@ -221,5 +221,13 @@ if run_renamer:
|
||||
else:
|
||||
print("No par2 files or large files found")
|
||||
|
||||
# Note about the new option
|
||||
print(
|
||||
"The features of Deobfuscate.py are now integrated into SABnzbd! "
|
||||
+ "Just enable 'Deobfuscate final filenames' in Config - Switches. "
|
||||
+ "Don't forget to disable this script when you enable the new option!"
|
||||
+ "This script will be removed in the next version of SABnzbd."
|
||||
)
|
||||
|
||||
# Always exit with success-code
|
||||
sys.exit(0)
|
||||
|
||||
@@ -55,7 +55,7 @@ class TestNZO:
|
||||
# TODO: More checks!
|
||||
|
||||
|
||||
class TestScanPassword:
|
||||
class TestNZBStuffHelpers:
|
||||
def test_scan_passwords(self):
|
||||
file_names = {
|
||||
"my_awesome_nzb_file{{password}}": "password",
|
||||
@@ -77,3 +77,20 @@ class TestScanPassword:
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.scan_password(file_name)[0] == clean_file_name
|
||||
|
||||
def test_create_work_name(self):
|
||||
# Only test stuff specific for create_work_name
|
||||
# The sanitizing is already tested in tests for sanitize_foldername
|
||||
file_names = {
|
||||
"my_awesome_nzb_file.pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file.....pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file....par2..": "my_awesome_nzb_file",
|
||||
" my_awesome_nzb_file .pAr.nZb": "my_awesome_nzb_file",
|
||||
"with.extension.and.period.par2.": "with.extension.and.period",
|
||||
"nothing.in.here": "nothing.in.here",
|
||||
" just.space ": "just.space",
|
||||
"http://test.par2 ": "http://test.par2",
|
||||
}
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.create_work_name(file_name) == clean_file_name
|
||||
|
||||
Reference in New Issue
Block a user