mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-30 11:09:22 -05:00
Compare commits
13 Commits
feature/pr
...
3.1.0RC3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6aef50dc5d | ||
|
|
16b6e3caa7 | ||
|
|
3de4c99a8a | ||
|
|
980aa19a75 | ||
|
|
fb4b57e056 | ||
|
|
03638365ea | ||
|
|
157cb1c83d | ||
|
|
e51f11c2b1 | ||
|
|
1ad0961dd8 | ||
|
|
46ff7dd4e2 | ||
|
|
8b067df914 | ||
|
|
ef43b13272 | ||
|
|
e8e9974224 |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.1.0RC1
|
||||
Summary: SABnzbd-3.1.0RC1
|
||||
Version: 3.1.0RC3
|
||||
Summary: SABnzbd-3.1.0RC3
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
14
README.mkd
14
README.mkd
@@ -1,6 +1,18 @@
|
||||
Release Notes - SABnzbd 3.1.0 Release Candidate 1
|
||||
Release Notes - SABnzbd 3.1.0 Release Candidate 3
|
||||
=========================================================
|
||||
|
||||
## Changes and bugfixes since 3.1.0 Release Candidate 2
|
||||
- Jobs in post-processing could be left in the "Waiting"-status.
|
||||
- Notify users of `Deobfuscate.py` that it is now part of SABnzbd.
|
||||
|
||||
## Changes and bugfixes since 3.1.0 Release Candidate 1
|
||||
- Failing RSS-feeds would result in tracebacks, they now show a warning.
|
||||
- Existing files were not parsed when retrying a job.
|
||||
- Reading attributes when retrying a job could result in crash.
|
||||
- Temporary Folder with unicode characters could result in duplicate unpacking.
|
||||
- Plush skin would only show failed jobs.
|
||||
- Windows: Folders could end in a period, breaking Windows Explorer.
|
||||
|
||||
## Changes and bugfixes since 3.1.0 Beta 2
|
||||
- Deobfuscate final filenames can now be used when job folders are disabled.
|
||||
- Deobfuscate final filenames will ignore blu-ray disc files.
|
||||
|
||||
@@ -1659,7 +1659,8 @@ def handle_windows_service():
|
||||
"""
|
||||
# Detect if running as Windows Service (only Vista and above!)
|
||||
# Adapted from https://stackoverflow.com/a/55248281/5235502
|
||||
if win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
# Only works when run from the exe-files
|
||||
if hasattr(sys, "frozen") and win32ts.ProcessIdToSessionId(win32api.GetCurrentProcessId()) == 0:
|
||||
servicemanager.Initialize()
|
||||
servicemanager.PrepareToHostSingle(SABnzbd)
|
||||
servicemanager.StartServiceCtrlDispatcher()
|
||||
|
||||
@@ -151,7 +151,7 @@ class Status:
|
||||
GRABBING = "Grabbing" # Q: Getting an NZB from an external site
|
||||
MOVING = "Moving" # PP: Files are being moved
|
||||
PAUSED = "Paused" # Q: Job is paused
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download
|
||||
QUEUED = "Queued" # Q: Job is waiting for its turn to download or post-process
|
||||
QUICK_CHECK = "QuickCheck" # PP: QuickCheck verification is running
|
||||
REPAIRING = "Repairing" # PP: Job is being repaired (by par2)
|
||||
RUNNING = "Running" # PP: User's post processing script is running
|
||||
|
||||
@@ -1018,16 +1018,13 @@ class QueuePage:
|
||||
class HistoryPage:
|
||||
def __init__(self, root):
|
||||
self.__root = root
|
||||
self.__failed_only = False
|
||||
|
||||
@secured_expose
|
||||
def index(self, **kwargs):
|
||||
start = int_conv(kwargs.get("start"))
|
||||
limit = int_conv(kwargs.get("limit"))
|
||||
search = kwargs.get("search")
|
||||
failed_only = kwargs.get("failed_only")
|
||||
if failed_only is None:
|
||||
failed_only = self.__failed_only
|
||||
failed_only = int_conv(kwargs.get("failed_only"))
|
||||
|
||||
history = build_header()
|
||||
history["failed_only"] = failed_only
|
||||
|
||||
@@ -1976,8 +1976,9 @@ def create_env(nzo=None, extra_env_fields={}):
|
||||
|
||||
|
||||
def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
"""Extract volumes that are part of this rarset
|
||||
and merge them with existing list, removing duplicates
|
||||
"""List volumes that are part of this rarset
|
||||
and merge them with parsed paths list, removing duplicates.
|
||||
We assume RarFile is right and use parsed paths as backup.
|
||||
"""
|
||||
# UnRar is required to read some RAR files
|
||||
# RarFile can fail in special cases
|
||||
@@ -1996,12 +1997,12 @@ def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
zf_volumes = []
|
||||
|
||||
# Remove duplicates
|
||||
known_volumes_base = [os.path.basename(vol) for vol in known_volumes]
|
||||
for zf_volume in zf_volumes:
|
||||
if os.path.basename(zf_volume) not in known_volumes_base:
|
||||
zf_volumes_base = [os.path.basename(vol) for vol in zf_volumes]
|
||||
for known_volume in known_volumes:
|
||||
if os.path.basename(known_volume) not in zf_volumes_base:
|
||||
# Long-path notation just to be sure
|
||||
known_volumes.append(long_path(zf_volume))
|
||||
return known_volumes
|
||||
zf_volumes.append(long_path(known_volume))
|
||||
return zf_volumes
|
||||
|
||||
|
||||
# Sort the various RAR filename formats properly :\
|
||||
|
||||
@@ -776,10 +776,9 @@ class NzbQueue:
|
||||
|
||||
def end_job(self, nzo):
|
||||
""" Send NZO to the post-processing queue """
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
logging.info("[%s] Ending job %s", caller_name(), nzo.final_name)
|
||||
nzo.deleted = True
|
||||
if nzo.precheck:
|
||||
nzo.save_to_disk()
|
||||
|
||||
@@ -78,6 +78,7 @@ from sabnzbd.filesystem import (
|
||||
remove_file,
|
||||
get_filepath,
|
||||
make_script_path,
|
||||
globber,
|
||||
)
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
@@ -910,7 +911,6 @@ class NzbObject(TryList):
|
||||
# to history we first need an nzo_id by entering the NzbQueue
|
||||
if accept == 2:
|
||||
self.deleted = True
|
||||
self.status = Status.FAILED
|
||||
sabnzbd.NzbQueue.do.add(self, quiet=True)
|
||||
sabnzbd.NzbQueue.do.end_job(self)
|
||||
# Raise error, so it's not added
|
||||
@@ -1173,8 +1173,6 @@ class NzbObject(TryList):
|
||||
|
||||
# Abort the job due to failure
|
||||
if not job_can_succeed:
|
||||
# Set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T("Aborted, cannot be completed") + " - https://sabnzbd.org/not-complete"
|
||||
self.set_unpack_info("Download", self.fail_msg, unique=False)
|
||||
@@ -1184,8 +1182,6 @@ class NzbObject(TryList):
|
||||
post_done = False
|
||||
if not self.files:
|
||||
post_done = True
|
||||
# set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
|
||||
return articles_left, file_done, post_done
|
||||
@@ -1207,8 +1203,8 @@ class NzbObject(TryList):
|
||||
""" Check if downloaded files already exits, for these set NZF to complete """
|
||||
fix_unix_encoding(wdir)
|
||||
|
||||
# Get a list of already present files
|
||||
files = [f for f in os.listdir(wdir) if os.path.isfile(f)]
|
||||
# Get a list of already present files, ignore folders
|
||||
files = globber(wdir, "*.*")
|
||||
|
||||
# Substitute renamed files
|
||||
renames = sabnzbd.load_data(RENAMES_FILE, self.workpath, remove=True)
|
||||
@@ -1232,6 +1228,7 @@ class NzbObject(TryList):
|
||||
for nzf in nzfs:
|
||||
subject = sanitize_filename(name_extractor(nzf.subject))
|
||||
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
|
||||
logging.info("Existing file %s matched to file %s of %s", filename, nzf.filename, self.final_name)
|
||||
nzf.filename = filename
|
||||
nzf.bytes_left = 0
|
||||
self.remove_nzf(nzf)
|
||||
@@ -1254,25 +1251,25 @@ class NzbObject(TryList):
|
||||
for filename in files:
|
||||
# Create NZO's using basic information
|
||||
filepath = os.path.join(wdir, filename)
|
||||
if os.path.exists(filepath):
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
logging.info("Existing file %s added to %s", filename, self.final_name)
|
||||
tup = os.stat(filepath)
|
||||
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
|
||||
nzf = NzbFile(tm, filename, [], tup.st_size, self)
|
||||
self.files.append(nzf)
|
||||
self.files_table[nzf.nzf_id] = nzf
|
||||
nzf.filename = filename
|
||||
self.remove_nzf(nzf)
|
||||
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
# Set bytes correctly
|
||||
self.bytes += nzf.bytes
|
||||
self.bytes_tried += nzf.bytes
|
||||
self.bytes_downloaded += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
|
||||
# Process par2 files
|
||||
if sabnzbd.par2file.is_parfile(filepath):
|
||||
self.handle_par2(nzf, filepath)
|
||||
self.bytes_par2 += nzf.bytes
|
||||
logging.info("Existing file %s added to job", filename)
|
||||
except:
|
||||
logging.error(T("Error importing %s"), self.final_name)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -1891,13 +1888,17 @@ class NzbObject(TryList):
|
||||
for attrib in NzoAttributeSaver:
|
||||
attribs[attrib] = getattr(self, attrib)
|
||||
logging.debug("Saving attributes %s for %s", attribs, self.final_name)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath)
|
||||
sabnzbd.save_data(attribs, ATTRIB_FILE, self.workpath, silent=True)
|
||||
|
||||
def load_attribs(self):
|
||||
""" Load saved attributes and return them to be parsed """
|
||||
attribs = sabnzbd.load_data(ATTRIB_FILE, self.workpath, remove=False)
|
||||
logging.debug("Loaded attributes %s for %s", attribs, self.final_name)
|
||||
|
||||
# If attributes file somehow does not exists
|
||||
if not attribs:
|
||||
return None, None, None
|
||||
|
||||
# Only a subset we want to apply directly to the NZO
|
||||
for attrib in ("final_name", "priority", "password", "url"):
|
||||
# Only set if it is present and has a value
|
||||
@@ -2070,16 +2071,16 @@ def nzf_cmp_name(nzf1, nzf2):
|
||||
|
||||
|
||||
def create_work_name(name):
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize """
|
||||
strip_ext = [".nzb", ".par", ".par2"]
|
||||
name = sanitize_foldername(name.strip())
|
||||
""" Remove ".nzb" and ".par(2)" and sanitize, skip URL's """
|
||||
if name.find("://") < 0:
|
||||
name_base, ext = os.path.splitext(name)
|
||||
# In case it was one of these, there might be more
|
||||
while ext.lower() in strip_ext:
|
||||
# Need to remove any invalid characters before starting
|
||||
name_base, ext = os.path.splitext(sanitize_foldername(name))
|
||||
while ext.lower() in (".nzb", ".par", ".par2"):
|
||||
name = name_base
|
||||
name_base, ext = os.path.splitext(name)
|
||||
return name.strip()
|
||||
# And make sure we remove invalid characters again
|
||||
return sanitize_foldername(name)
|
||||
else:
|
||||
return name.strip()
|
||||
|
||||
|
||||
@@ -166,6 +166,8 @@ class PostProcessor(Thread):
|
||||
|
||||
def process(self, nzo):
|
||||
""" Push on finished job in the queue """
|
||||
# Make sure we return the status "Waiting"
|
||||
nzo.status = Status.QUEUED
|
||||
if nzo not in self.history_queue:
|
||||
self.history_queue.append(nzo)
|
||||
|
||||
@@ -327,7 +329,8 @@ def process_job(nzo):
|
||||
# Get the NZB name
|
||||
filename = nzo.final_name
|
||||
|
||||
if nzo.fail_msg: # Special case: aborted due to too many missing data
|
||||
# Download-processes can mark job as failed
|
||||
if nzo.fail_msg:
|
||||
nzo.status = Status.FAILED
|
||||
nzo.save_attribs()
|
||||
all_ok = False
|
||||
|
||||
@@ -24,6 +24,7 @@ import logging
|
||||
import time
|
||||
import datetime
|
||||
import threading
|
||||
import urllib.parse
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import RSS_FILE_NAME, DEFAULT_PRIORITY, DUP_PRIORITY
|
||||
@@ -277,44 +278,47 @@ class RSSQueue:
|
||||
feedparser.USER_AGENT = "SABnzbd/%s" % sabnzbd.__version__
|
||||
|
||||
# Read the RSS feed
|
||||
msg = None
|
||||
entries = None
|
||||
entries = []
|
||||
if readout:
|
||||
all_entries = []
|
||||
for uri in uris:
|
||||
uri = uri.replace(" ", "%20")
|
||||
msg = ""
|
||||
feed_parsed = {}
|
||||
uri = uri.replace(" ", "%20").replace("feed://", "http://")
|
||||
logging.debug("Running feedparser on %s", uri)
|
||||
feed_parsed = feedparser.parse(uri.replace("feed://", "http://"))
|
||||
logging.debug("Done parsing %s", uri)
|
||||
|
||||
if not feed_parsed:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, "?")
|
||||
logging.info(msg)
|
||||
try:
|
||||
feed_parsed = feedparser.parse(uri)
|
||||
except Exception as feedparser_exc:
|
||||
# Feedparser 5 would catch all errors, while 6 just throws them back at us
|
||||
feed_parsed["bozo_exception"] = feedparser_exc
|
||||
logging.debug("Finished parsing %s", uri)
|
||||
|
||||
status = feed_parsed.get("status", 999)
|
||||
if status in (401, 402, 403):
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
logging.info(msg)
|
||||
|
||||
if 500 <= status <= 599:
|
||||
elif 500 <= status <= 599:
|
||||
msg = T("Server side error (server code %s); could not get %s on %s") % (status, feed, uri)
|
||||
logging.info(msg)
|
||||
|
||||
entries = feed_parsed.get("entries")
|
||||
entries = feed_parsed.get("entries", [])
|
||||
if not entries and "feed" in feed_parsed and "error" in feed_parsed["feed"]:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, feed_parsed["feed"]["error"])
|
||||
|
||||
# Exception was thrown
|
||||
if "bozo_exception" in feed_parsed and not entries:
|
||||
msg = str(feed_parsed["bozo_exception"])
|
||||
if "CERTIFICATE_VERIFY_FAILED" in msg:
|
||||
msg = T("Server %s uses an untrusted HTTPS certificate") % get_base_url(uri)
|
||||
msg += " - https://sabnzbd.org/certificate-errors"
|
||||
logging.error(msg)
|
||||
elif "href" in feed_parsed and feed_parsed["href"] != uri and "login" in feed_parsed["href"]:
|
||||
# Redirect to login page!
|
||||
msg = T("Do not have valid authentication for feed %s") % uri
|
||||
else:
|
||||
msg = T("Failed to retrieve RSS from %s: %s") % (uri, msg)
|
||||
logging.info(msg)
|
||||
|
||||
if not entries and not msg:
|
||||
if msg:
|
||||
# We need to escape any "%20" that could be in the warning due to the URL's
|
||||
logging.warning_helpful(urllib.parse.unquote(msg))
|
||||
elif not entries:
|
||||
msg = T("RSS Feed %s was empty") % uri
|
||||
logging.info(msg)
|
||||
all_entries.extend(entries)
|
||||
|
||||
@@ -318,7 +318,7 @@ class URLGrabber(Thread):
|
||||
msg = T("URL Fetching failed; %s") % msg
|
||||
|
||||
# Mark as failed
|
||||
nzo.status = Status.FAILED
|
||||
nzo.set_unpack_info("Source", msg)
|
||||
nzo.fail_msg = msg
|
||||
|
||||
notifier.send_notification(T("URL Fetching failed; %s") % "", "%s\n%s" % (msg, url), "other", nzo.cat)
|
||||
|
||||
8
scripts/Deobfuscate.py
Normal file → Executable file
8
scripts/Deobfuscate.py
Normal file → Executable file
@@ -221,5 +221,13 @@ if run_renamer:
|
||||
else:
|
||||
print("No par2 files or large files found")
|
||||
|
||||
# Note about the new option
|
||||
print(
|
||||
"The features of Deobfuscate.py are now integrated into SABnzbd! "
|
||||
+ "Just enable 'Deobfuscate final filenames' in Config - Switches. "
|
||||
+ "Don't forget to disable this script when you enable the new option!"
|
||||
+ "This script will be removed in the next version of SABnzbd."
|
||||
)
|
||||
|
||||
# Always exit with success-code
|
||||
sys.exit(0)
|
||||
|
||||
@@ -55,7 +55,7 @@ class TestNZO:
|
||||
# TODO: More checks!
|
||||
|
||||
|
||||
class TestScanPassword:
|
||||
class TestNZBStuffHelpers:
|
||||
def test_scan_passwords(self):
|
||||
file_names = {
|
||||
"my_awesome_nzb_file{{password}}": "password",
|
||||
@@ -77,3 +77,20 @@ class TestScanPassword:
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.scan_password(file_name)[0] == clean_file_name
|
||||
|
||||
def test_create_work_name(self):
|
||||
# Only test stuff specific for create_work_name
|
||||
# The sanitizing is already tested in tests for sanitize_foldername
|
||||
file_names = {
|
||||
"my_awesome_nzb_file.pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file.....pAr2.nZb": "my_awesome_nzb_file",
|
||||
"my_awesome_nzb_file....par2..": "my_awesome_nzb_file",
|
||||
" my_awesome_nzb_file .pAr.nZb": "my_awesome_nzb_file",
|
||||
"with.extension.and.period.par2.": "with.extension.and.period",
|
||||
"nothing.in.here": "nothing.in.here",
|
||||
" just.space ": "just.space",
|
||||
"http://test.par2 ": "http://test.par2",
|
||||
}
|
||||
|
||||
for file_name, clean_file_name in file_names.items():
|
||||
assert nzbstuff.create_work_name(file_name) == clean_file_name
|
||||
|
||||
Reference in New Issue
Block a user