mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-26 17:17:54 -05:00
Compare commits
72 Commits
bugfix/nor
...
3.1.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ae96d93f94 | ||
|
|
8522c40c8f | ||
|
|
23f86e95f1 | ||
|
|
eed2045189 | ||
|
|
217785bf0f | ||
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 | ||
|
|
feebbb9f04 | ||
|
|
bc4f06dd1d | ||
|
|
971e4fc909 | ||
|
|
51cc765949 | ||
|
|
19c6a4fffa | ||
|
|
105ac32d2f | ||
|
|
57550675d2 | ||
|
|
e674abc5c0 | ||
|
|
f965c96f51 | ||
|
|
c76b8ed9e0 | ||
|
|
4fbd0d8a7b | ||
|
|
2186c0fff6 | ||
|
|
1adca9a9c1 | ||
|
|
9408353f2b | ||
|
|
84f4d453d2 | ||
|
|
d10209f2a1 | ||
|
|
3ae149c72f | ||
|
|
47385acc3b | ||
|
|
814eeaa900 | ||
|
|
5f2ea13aad | ||
|
|
41ca217931 | ||
|
|
b57d36e8dd | ||
|
|
9a4be70734 | ||
|
|
a8443595a6 | ||
|
|
fd0a70ac58 | ||
|
|
8a8685c968 | ||
|
|
9e6cb8da8e | ||
|
|
054ec54d51 | ||
|
|
272ce773cb | ||
|
|
050b925f7b | ||
|
|
0087940898 | ||
|
|
e323c014f9 | ||
|
|
cc465c7554 | ||
|
|
14cb37564f | ||
|
|
094db56c3b | ||
|
|
aabb709b8b | ||
|
|
0833dd2db9 | ||
|
|
cd3f912be4 | ||
|
|
665c516db6 | ||
|
|
b670da9fa0 | ||
|
|
80bee9bffe | ||
|
|
d85a70e8ad | ||
|
|
8f21533e76 | ||
|
|
89996482a1 | ||
|
|
03c10dce91 | ||
|
|
bd5331be05 | ||
|
|
46e1645289 | ||
|
|
4ce3965747 | ||
|
|
9d4af19db3 | ||
|
|
48e034f4be | ||
|
|
f8959baa2f | ||
|
|
8ed5997eae | ||
|
|
daf9f50ac8 | ||
|
|
6b11013c1a |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.1.0RC1
|
||||
Summary: SABnzbd-3.1.0RC1
|
||||
Version: 3.1.0
|
||||
Summary: SABnzbd-3.1.0
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
17
README.mkd
17
README.mkd
@@ -1,24 +1,20 @@
|
||||
Release Notes - SABnzbd 3.1.0 Release Candidate 1
|
||||
Release Notes - SABnzbd 3.1.0
|
||||
=========================================================
|
||||
|
||||
## Changes and bugfixes since 3.1.0 Beta 2
|
||||
- Deobfuscate final filenames can now be used when job folders are disabled.
|
||||
- Deobfuscate final filenames will ignore blu-ray disc files.
|
||||
- Clear error if Complete Folder is set as a subfolder of the Temporary Folder.
|
||||
- Filtering of history by category would not filter jobs in post-processing.
|
||||
|
||||
## Changes since 3.0.2
|
||||
- Added option to automatically deobfuscate final filenames: after unpacking,
|
||||
detect and rename obfuscated or meaningless filenames to the job name,
|
||||
similar to the Deobfuscate.py post-processing script.
|
||||
similar to the `Deobfuscate.py` post-processing script.
|
||||
- Switched to Transifex as our translations platform:
|
||||
Help us translate SABnzbd in your language! Add untranslated texts or
|
||||
improved existing translations here: https://sabnzbd.org/wiki/translate
|
||||
- Redesigned job availability-check to be more efficient and reliable.
|
||||
- Scheduled readouts of RSS-feeds would fail silently, they now show a warning.
|
||||
- Skip repair on Retry if all sets were previously successfully verified.
|
||||
- Passwords included in the filename no longer have to be at the end.
|
||||
- Restore limit on length of foldernames (`max_foldername_length`).
|
||||
- Added password input box on the Add NZB screen.
|
||||
- Clear error if `Complete Folder` is set as a subfolder of the `Temporary Folder`.
|
||||
- Show warning that Pyton 3.5 support will be dropped after 3.1.0.
|
||||
- Windows/macOS: update UnRar to 5.91 and MultiPar to 1.3.1.0.
|
||||
- Windows: retry `Access Denied` when renaming files on Windows.
|
||||
@@ -27,12 +23,17 @@ Release Notes - SABnzbd 3.1.0 Release Candidate 1
|
||||
- Assembler crashes could occur due to race condition in `ArticleCache`.
|
||||
- On HTTP-redirects the scheme/hostname/port were ignored when behind a proxy.
|
||||
- Strip slash of the end of `url_base` as it could break other code.
|
||||
- `Temporary Folder` with unicode characters could result in duplicate unpacking.
|
||||
- Unpacking with a relative folder set for a category could fail.
|
||||
- Existing files were not parsed when retrying a job.
|
||||
- Reading attributes when retrying a job could result in crash.
|
||||
- Paused priority of pre-queue script was ignored.
|
||||
- Duplicate Detection did not check filenames in History.
|
||||
- Downloaded bytes could show as exceeding the total bytes of a job.
|
||||
- Filtering of history by category would not filter jobs in post-processing.
|
||||
- Windows: non-Latin languages were displayed incorrectly in the installer.
|
||||
- Windows: could fail to create folders on some network shares.
|
||||
- Windows: folders could end in a period, breaking Windows Explorer.
|
||||
|
||||
## Upgrade notices
|
||||
- Jobs that failed on versions before 3.1.x, will throw an error about the
|
||||
|
||||
@@ -1659,7 +1659,8 @@ def handle_windows_service():
|
||||
"""
|
||||
# Detect if running as Windows Service (only Vista and above!)
|
||||
# Adapted from https://stackoverflow.com/a/55248281/5235502
|
||||
if win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
# Only works when run from the exe-files
|
||||
if hasattr(sys, "frozen") and win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
servicemanager.Initialize()
|
||||
servicemanager.PrepareToHostSingle(SABnzbd)
|
||||
servicemanager.StartServiceCtrlDispatcher()
|
||||
|
||||
@@ -151,7 +151,7 @@ class Status:
|
||||
GRABBING = "Grabbing" # Q: Getting an NZB from an external site
|
||||
MOVING = "Moving" # PP: Files are being moved
|
||||
PAUSED = "Paused" # Q: Job is paused
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download or post-process
|
||||
QUICK_CHECK = "QuickCheck" # PP: QuickCheck verification is running
|
||||
REPAIRING = "Repairing" # PP: Job is being repaired (by par2)
|
||||
RUNNING = "Running" # PP: User's post processing script is running
|
||||
|
||||
@@ -1018,16 +1018,13 @@ class QueuePage:
|
||||
class HistoryPage:
|
||||
def __init__(self, root):
|
||||
self.__root = root
|
||||
self.__failed_only = False
|
||||
|
||||
@secured_expose
|
||||
def index(self, **kwargs):
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
search = kwargs.get("search")
|
||||
failed_only = kwargs.get("failed_only")
|
||||
if failed_only is None:
|
||||
failed_only = self.__failed_only
|
||||
failed_only = int_conv(kwargs.get("failed_only"))
|
||||
|
||||
history = build_header()
|
||||
history["failed_only"] = failed_only
|
||||
|
||||
@@ -1976,8 +1976,9 @@ def create_env(nzo=None, extra_env_fields={}):
|
||||
|
||||
|
||||
def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
"""Extract volumes that are part of this rarset
|
||||
and merge them with existing list, removing duplicates
|
||||
"""List volumes that are part of this rarset
|
||||
and merge them with parsed paths list, removing duplicates.
|
||||
We assume RarFile is right and use parsed paths as backup.
|
||||
"""
|
||||
# UnRar is required to read some RAR files
|
||||
# RarFile can fail in special cases
|
||||
@@ -1996,12 +1997,12 @@ def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
zf_volumes = []
|
||||
|
||||
# Remove duplicates
|
||||
known_volumes_base = [os.path.basename(vol) for vol in known_volumes]
|
||||
for zf_volume in zf_volumes:
|
||||
if os.path.basename(zf_volume) not in known_volumes_base:
|
||||
zf_volumes_base = [os.path.basename(vol) for vol in zf_volumes]
|
||||
for known_volume in known_volumes:
|
||||
if os.path.basename(known_volume) not in zf_volumes_base:
|
||||
# Long-path notation just to be sure
|
||||
known_volumes.append(long_path(zf_volume))
|
||||
return known_volumes
|
||||
zf_volumes.append(long_path(known_volume))
|
||||
return zf_volumes
|
||||
|
||||
|
||||
# Sort the various RAR filename formats properly :\
|
||||
|
||||
@@ -204,18 +204,24 @@ class NzbQueue:
|
||||
return nzo_id
|
||||
|
||||
@NzbQueueLocker
|
||||
def send_back(self, nzo):
|
||||
def send_back(self, old_nzo):
|
||||
""" Send back job to queue after successful pre-check """
|
||||
try:
|
||||
nzb_path = globber_full(nzo.workpath, "*.gz")[0]
|
||||
nzb_path = globber_full(old_nzo.workpath, "*.gz")[0]
|
||||
except:
|
||||
logging.info("Failed to find NZB file after pre-check (%s)", nzo.nzo_id)
|
||||
logging.info("Failed to find NZB file after pre-check (%s)", old_nzo.nzo_id)
|
||||
return
|
||||
|
||||
# Need to remove it first, otherwise it might still be downloading
|
||||
self.remove(nzo, add_to_history=False, cleanup=False)
|
||||
res, nzo_ids = process_single_nzb(nzo.filename, nzb_path, keep=True, reuse=nzo.downpath, nzo_id=nzo.nzo_id)
|
||||
# Store old position and create new NZO
|
||||
old_position = self.__nzo_list.index(old_nzo)
|
||||
res, nzo_ids = process_single_nzb(
|
||||
old_nzo.filename, nzb_path, keep=True, reuse=old_nzo.downpath, nzo_id=old_nzo.nzo_id
|
||||
)
|
||||
if res == 0 and nzo_ids:
|
||||
# Swap to old position
|
||||
new_nzo = self.get_nzo(nzo_ids[0])
|
||||
self.__nzo_list.remove(new_nzo)
|
||||
self.__nzo_list.insert(old_position, new_nzo)
|
||||
# Reset reuse flag to make pause/abort on encryption possible
|
||||
self.__nzo_table[nzo_ids[0]].reuse = None
|
||||
|
||||
@@ -776,10 +782,9 @@ class NzbQueue:
|
||||
|
||||
def end_job(self, nzo):
|
||||
""" Send NZO to the post-processing queue """
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
nzo.deleted = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
|
||||
@@ -78,6 +78,7 @@ from sabnzbd.filesystem import (
|
||||
remove_file,
|
||||
get_filepath,
|
||||
make_script_path,
|
||||
globber,
|
||||
)
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
@@ -910,7 +911,6 @@ class NzbObject(TryList):
|
||||
# to history we first need an nzo_id by entering the NzbQueue
|
||||
if accept == 2:
|
||||
self.deleted = True
|
||||
self.status = Status.FAILED
|
||||
sabnzbd.NzbQueue.do.add(self, quiet=True)
|
||||
sabnzbd.NzbQueue.do.end_job(self)
|
||||
# Raise error, so it's not added
|
||||
@@ -1173,8 +1173,6 @@ class NzbObject(TryList):
|
||||
|
||||
# Abort the job due to failure
|
||||
if not job_can_succeed:
|
||||
# Set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T("Aborted, cannot be completed") + " - https://sabnzbd.org/not-complete"
|
||||
self.set_unpack_info("Download", self.fail_msg, unique=False)
|
||||
@@ -1184,8 +1182,6 @@ class NzbObject(TryList):
|
||||
post_done = False
|
||||
if not self.files:
|
||||
post_done = True
|
||||
# set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
|
||||
return articles_left, file_done, post_done
|
||||
@@ -1207,8 +1203,8 @@ class NzbObject(TryList):
|
||||
""" Check if downloaded files already exits, for these set NZF to complete """
|
||||
fix_unix_encoding(wdir)
|
||||
|
||||
# Get a list of already present files
|
||||
files = [f for f in os.listdir(wdir) if os.path.isfile(f)]
|
||||
# Get a list of already present files, ignore folders
|
||||
files = globber(wdir, "*.*")
|
||||
|
||||
# Substitute renamed files
|
||||
renames = sabnzbd.load_data(RENAMES_FILE, self.workpath, remove=True)
|
||||
@@ -1232,6 +1228,7 @@ class NzbObject(TryList):
|
||||
for nzf in nzfs:
|
||||
subject = sanitize_filename(name_extractor(nzf.subject))
|
||||
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
|
||||
logging.info("Existing file %s matched to file %s of %s", filename, nzf.filename, self.final_name)
|
||||
nzf.filename = filename
|
||||
nzf.bytes_left = 0
|
||||
self.remove_nzf(nzf)
|
||||
@@ -1254,25 +1251,25 @@ class NzbObject(TryList):
|
||||
for filename in files:
|
||||
# Create NZO's using basic information
|
||||
filepath = os.path.join(wdir, filename)
|
||||
if os.path.exists(filepath):
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
logging.info("Existing file %s added to %s", filename, self.final_name)
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
logging.info("Existing file %s added to job", filename)
|
||||
except:
|
||||
logging.error(T("Error importing %s"), self.final_name)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -1891,13 +1888,17 @@ class NzbObject(TryList):
|
||||
for attrib in NzoAttributeSaver:
|
||||
attribs[attrib] = getattr(self, attrib)
|
||||
logging.debug("Saving attributes %s for %s", attribs, self.final_name)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath, silent=True)
|
||||
|
||||
def load_attribs(self):
|
||||
""" Load saved attributes and return them to be parsed """
|
||||
attribs = sabnzbd.load_data(ATTRIB_FILE, self.workpath, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
|
||||
# If attributes file somehow does not exists
|
||||
if not attribs:
|
||||
return None, None, None
|
||||
|
||||
# Only a subset we want to apply directly to the NZO
|
||||
for attrib in ("final_name", "priority", "password", "url"):
|
||||
# Only set if it is present and has a value
|
||||
@@ -2070,16 +2071,16 @@ def nzf_cmp_name(nzf1, nzf2):
|
||||
|
||||
|
||||
def create_work_name(name):
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize """
|
||||
strip_ext = [".nzb", ".par", ".par2"]
|
||||
name = sanitize_foldername(name.strip())
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize, skip URL's """
|
||||
if name.find("://") < 0:
|
||||
name_base, ext = os.path.splitext(name)
|
||||
# In case it was one of these, there might be more
|
||||
while ext.lower() in strip_ext:
|
||||
# Need to remove any invalid characters before starting
|
||||
name_base, ext = os.path.splitext(sanitize_foldername(name))
|
||||
while ext.lower() in (".nzb", ".par", ".par2"):
|
||||
name = name_base
|
||||
name_base, ext = os.path.splitext(name)
|
||||
return name.strip()
|
||||
# And make sure we remove invalid characters again
|
||||
return sanitize_foldername(name)
|
||||
else:
|
||||
return name.strip()
|
||||
|
||||
|
||||
@@ -166,6 +166,8 @@ class PostProcessor(Thread):
|
||||
|
||||
def process(self, nzo):
|
||||
""" Push on finished job in the queue """
|
||||
# Make sure we return the status "Waiting"
|
||||
nzo.status = Status.QUEUED
|
||||
if nzo not in self.history_queue:
|
||||
self.history_queue.append(nzo)
|
||||
|
||||
@@ -327,7 +329,8 @@ def process_job(nzo):
|
||||
# Get the NZB name
|
||||
filename = nzo.final_name
|
||||
|
||||
if nzo.fail_msg: # Special case: aborted due to too many missing data
|
||||
# Download-processes can mark job as failed
|
||||
if nzo.fail_msg:
|
||||
nzo.status = Status.FAILED
|
||||
nzo.save_attribs()
|
||||
all_ok = False
|
||||
|
||||
@@ -24,6 +24,7 @@ import logging
|
||||
import time
|
||||
import datetime
|
||||
import threading
|
||||
import urllib.parse
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import RSS_FILE_NAME, DEFAULT_PRIORITY, DUP_PRIORITY
|
||||
@@ -277,44 +278,49 @@ class RSSQueue:
|
||||
feedparser.USER_AGENT = "SABnzbd/%s" % sabnzbd.__version__
|
||||
|
||||
# Read the RSS feed
|
||||
msg = None
|
||||
entries = None
|
||||
msg = ""
|
||||
entries = []
|
||||
if readout:
|
||||
all_entries = []
|
||||
for uri in uris:
|
||||
uri = uri.replace(" ", "%20")
|
||||
# Reset parsing message for each feed
|
||||
msg = ""
|
||||
feed_parsed = {}
|
||||
uri = uri.replace(" ", "%20").replace("feed://", "http://")
|
||||
logging.debug("Running feedparser on %s", uri)
|
||||
feed_parsed = feedparser.parse(uri.replace("feed://", "http://"))
|
||||
logging.debug("Done parsing %s", uri)
|
||||
|
||||
if not feed_parsed:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, "?")
|
||||
logging.info(msg)
|
||||
try:
|
||||
feed_parsed = feedparser.parse(uri)
|
||||
except Exception as feedparser_exc:
|
||||
# Feedparser 5 would catch all errors, while 6 just throws them back at us
|
||||
feed_parsed["bozo_exception"] = feedparser_exc
|
||||
logging.debug("Finished parsing %s", uri)
|
||||
|
||||
status = feed_parsed.get("status", 999)
|
||||
if status in (401, 402, 403):
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
logging.info(msg)
|
||||
|
||||
if 500 <= status <= 599:
|
||||
elif 500 <= status <= 599:
|
||||
msg = T("Server side error (server code %s); could not get %s on %s") % (status, feed, uri)
|
||||
logging.info(msg)
|
||||
|
||||
entries = feed_parsed.get("entries")
|
||||
entries = feed_parsed.get("entries", [])
|
||||
if not entries and "feed" in feed_parsed and "error" in feed_parsed["feed"]:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, feed_parsed["feed"]["error"])
|
||||
|
||||
# Exception was thrown
|
||||
if "bozo_exception" in feed_parsed and not entries:
|
||||
msg = str(feed_parsed["bozo_exception"])
|
||||
if "CERTIFICATE_VERIFY_FAILED" in msg:
|
||||
msg = T("Server %s uses an untrusted HTTPS certificate") % get_base_url(uri)
|
||||
msg += " - https://sabnzbd.org/certificate-errors"
|
||||
logging.error(msg)
|
||||
elif "href" in feed_parsed and feed_parsed["href"] != uri and "login" in feed_parsed["href"]:
|
||||
# Redirect to login page!
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
else:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, msg)
|
||||
logging.info(msg)
|
||||
|
||||
if not entries and not msg:
|
||||
if msg:
|
||||
# We need to escape any "%20" that could be in the warning due to the URL's
|
||||
logging.warning_helpful(urllib.parse.unquote(msg))
|
||||
elif not entries:
|
||||
msg = T("RSS Feed %s was empty") % uri
|
||||
logging.info(msg)
|
||||
all_entries.extend(entries)
|
||||
|
||||
@@ -318,7 +318,7 @@ class URLGrabber(Thread):
|
||||
msg = T("URL Fetching failed; %s") % msg
|
||||
|
||||
# Mark as failed
|
||||
nzo.status = Status.FAILED
|
||||
nzo.set_unpack_info("Source", msg)
|
||||
nzo.fail_msg = msg
|
||||
|
||||
notifier.send_notification(T("URL Fetching failed; %s") % "", "%s\n%s" % (msg, url), "other", nzo.cat)
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
# You MUST use double quotes (so " and not ')
|
||||
|
||||
__version__ = "3.1.0-develop"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "3.1.0"
|
||||
__baseline__ = "23f86e95f1f980963c4e4017276b3a3e2adfc6e2"
|
||||
|
||||
8
scripts/Deobfuscate.py
Normal file → Executable file
8
scripts/Deobfuscate.py
Normal file → Executable file
@@ -221,5 +221,13 @@ if run_renamer:
|
||||
else:
|
||||
print("No par2 files or large files found")
|
||||
|
||||
# Note about the new option
|
||||
print(
|
||||
"The features of Deobfuscate.py are now integrated into SABnzbd! "
|
||||
+ "Just enable 'Deobfuscate final filenames' in Config - Switches. "
|
||||
+ "Don't forget to disable this script when you enable the new option!"
|
||||
+ "This script will be removed in the next version of SABnzbd."
|
||||
)
|
||||
|
||||
# Always exit with success-code
|
||||
sys.exit(0)
|
||||
|
||||
@@ -55,7 +55,7 @@ class TestNZO:
|
||||
# TODO: More checks!
|
||||
|
||||
|
||||
class TestScanPassword:
|
||||
class TestNZBStuffHelpers:
|
||||
def test_scan_passwords(self):
|
||||
file_names = {
|
||||
"my_awesome_nzb_file{{password}}": "password",
|
||||
@@ -77,3 +77,20 @@ class TestScanPassword:
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.scan_password(file_name)[0] == clean_file_name
|
||||
|
||||
def test_create_work_name(self):
|
||||
# Only test stuff specific for create_work_name
|
||||
# The sanitizing is already tested in tests for sanitize_foldername
|
||||
file_names = {
|
||||
"my_awesome_nzb_file.pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file.....pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file....par2..": "my_awesome_nzb_file",
|
||||
" my_awesome_nzb_file .pAr.nZb": "my_awesome_nzb_file",
|
||||
"with.extension.and.period.par2.": "with.extension.and.period",
|
||||
"nothing.in.here": "nothing.in.here",
|
||||
" just.space ": "just.space",
|
||||
"http://test.par2 ": "http://test.par2",
|
||||
}
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.create_work_name(file_name) == clean_file_name
|
||||
|
||||
Reference in New Issue
Block a user