mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-02-18 14:48:28 -05:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e0dc988f94 | ||
|
|
4021e6098c | ||
|
|
f521037669 | ||
|
|
246e9e421b | ||
|
|
8aaee09652 | ||
|
|
e36450a666 |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.3.3RC1
|
||||
Summary: SABnzbd-2.3.3Beta1
|
||||
Version: 2.3.3RC2
|
||||
Summary: SABnzbd-2.3.3RC2
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
Release Notes - SABnzbd 2.3.3 RC 1
|
||||
Release Notes - SABnzbd 2.3.3 RC 2
|
||||
=========================================================
|
||||
|
||||
## Changes since 2.3.3 RC 1
|
||||
- Hostname check improved for IPv6 addresses
|
||||
- Category not correctly parsed when grabbing NZB from URL
|
||||
- Unpacked zip/7zip files were not sorted
|
||||
- Extended SAN list of newly generated self-signed certificates
|
||||
|
||||
## Changes since 2.3.3 Beta 1
|
||||
- SABYenc updated to 3.3.4 to fix (more) false-positive CRC errors
|
||||
- Failed URL fetches also trigger post-processing script (if configured)
|
||||
|
||||
@@ -162,8 +162,9 @@ def check_hostname():
|
||||
if not host:
|
||||
return False
|
||||
|
||||
# Remove the port-part
|
||||
host = host.split(':')[0].lower()
|
||||
# Remove the port-part (like ':8080'), if it is there, always on the right hand side.
|
||||
# Not to be confused with IPv6 colons (within square brackets)
|
||||
host = re.sub(':[0123456789]+$', '', host).lower()
|
||||
|
||||
# Fine if localhost or IP
|
||||
if host == 'localhost' or probablyipv4(host) or probablyipv6(host):
|
||||
|
||||
@@ -174,7 +174,7 @@ def cat_to_opts(cat, pp=None, script=None, priority=None):
|
||||
if priority == DEFAULT_PRIORITY:
|
||||
priority = def_cat.priority()
|
||||
|
||||
# logging.debug('Cat->Attrib cat=%s pp=%s script=%s prio=%s', cat, pp, script, priority)
|
||||
logging.debug('Cat->Attrib cat=%s pp=%s script=%s prio=%s', cat, pp, script, priority)
|
||||
return cat, pp, script, priority
|
||||
|
||||
|
||||
@@ -412,6 +412,7 @@ def sanitize_files_in_folder(folder):
|
||||
new_path = os.path.join(root, sanitize_filename(file_))
|
||||
if path != new_path:
|
||||
try:
|
||||
logging.debug('Filename-sanitizer will rename %s to %s', path, new_path)
|
||||
os.rename(path, new_path)
|
||||
path = new_path
|
||||
except:
|
||||
@@ -995,6 +996,22 @@ def create_dirs(dirpath):
|
||||
return dirpath
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def recursive_listdir(dir):
|
||||
""" List all files in dirs and sub-dirs """
|
||||
filelist = []
|
||||
for root, dirs, files in os.walk(dir):
|
||||
for file in files:
|
||||
if '.AppleDouble' not in root and '.DS_Store' not in root:
|
||||
try:
|
||||
p = os.path.join(root, file)
|
||||
filelist.append(p)
|
||||
except UnicodeDecodeError:
|
||||
# Just skip failing names
|
||||
pass
|
||||
return filelist
|
||||
|
||||
|
||||
@synchronized(DIR_LOCK)
|
||||
def move_to_path(path, new_path):
|
||||
""" Move a file to a new path, optionally give unique filename
|
||||
@@ -1515,7 +1532,9 @@ def probablyipv4(ip):
|
||||
|
||||
|
||||
def probablyipv6(ip):
|
||||
if ip.count(':') >= 2 and re.sub('[0123456789abcdefABCDEF:]', '', ip) == '':
|
||||
# Returns True if the given input is probably an IPv6 address
|
||||
# Square Brackets like '[2001::1]' are OK
|
||||
if ip.count(':') >= 2 and re.sub('[0123456789abcdefABCDEF:\[\]]', '', ip) == '':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -33,7 +33,7 @@ from sabnzbd.encoding import TRANS, unicoder, platform_encode, deunicode
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
from sabnzbd.misc import format_time_string, find_on_path, make_script_path, int_conv, \
|
||||
real_path, globber, globber_full, get_all_passwords, renamer, clip_path, \
|
||||
has_win_device, calc_age, long_path, remove_file
|
||||
has_win_device, calc_age, long_path, remove_file, recursive_listdir
|
||||
from sabnzbd.sorting import SeriesSorter
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import Status
|
||||
@@ -246,7 +246,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
|
||||
rerun = False
|
||||
force_rerun = False
|
||||
newfiles = []
|
||||
error = 0
|
||||
error = None
|
||||
new_joins = new_rars = new_zips = new_ts = None
|
||||
|
||||
if cfg.enable_filejoin():
|
||||
@@ -257,8 +257,6 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
|
||||
if newf:
|
||||
newfiles.extend(newf)
|
||||
logging.info('Filejoin finished on %s', workdir)
|
||||
nzo.set_action_line()
|
||||
rerun = not error
|
||||
|
||||
if cfg.enable_unrar():
|
||||
new_rars = [rar for rar in xrars if rar not in rars]
|
||||
@@ -268,32 +266,27 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
|
||||
if newf:
|
||||
newfiles.extend(newf)
|
||||
logging.info('Unrar finished on %s', workdir)
|
||||
nzo.set_action_line()
|
||||
rerun = not error
|
||||
|
||||
if cfg.enable_7zip():
|
||||
new_sevens = [seven for seven in xsevens if seven not in sevens]
|
||||
if new_sevens:
|
||||
logging.info('7za starting on %s', workdir)
|
||||
if unseven(nzo, workdir, workdir_complete, dele, one_folder, new_sevens):
|
||||
error = True
|
||||
error, newf = unseven(nzo, workdir, workdir_complete, dele, one_folder, new_sevens)
|
||||
if newf:
|
||||
newfiles.extend(newf)
|
||||
logging.info('7za finished on %s', workdir)
|
||||
nzo.set_action_line()
|
||||
rerun = not error
|
||||
|
||||
if cfg.enable_unzip():
|
||||
new_zips = [zip for zip in xzips if zip not in zips]
|
||||
if new_zips:
|
||||
logging.info('Unzip starting on %s', workdir)
|
||||
if SEVEN_COMMAND:
|
||||
if unseven(nzo, workdir, workdir_complete, dele, one_folder, new_zips):
|
||||
error = True
|
||||
error, newf = unseven(nzo, workdir, workdir_complete, dele, one_folder, new_zips)
|
||||
else:
|
||||
if unzip(nzo, workdir, workdir_complete, dele, one_folder, new_zips):
|
||||
error = True
|
||||
error, newf = unzip(nzo, workdir, workdir_complete, dele, one_folder, new_zips)
|
||||
if newf:
|
||||
newfiles.extend(newf)
|
||||
logging.info('Unzip finished on %s', workdir)
|
||||
nzo.set_action_line()
|
||||
rerun = not error
|
||||
|
||||
if cfg.enable_tsjoin():
|
||||
new_ts = [_ts for _ts in xts if _ts not in ts]
|
||||
@@ -303,8 +296,12 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
|
||||
if newf:
|
||||
newfiles.extend(newf)
|
||||
logging.info('TS Joining finished on %s', workdir)
|
||||
nzo.set_action_line()
|
||||
rerun = not error
|
||||
|
||||
# Refresh history and set output
|
||||
nzo.set_action_line()
|
||||
|
||||
# Only re-run if something was unpacked and it was success
|
||||
rerun = error in (False, 0)
|
||||
|
||||
# During a Retry we might miss files that failed during recursive unpack
|
||||
if nzo.reuse and depth == 1 and any(build_filelists(workdir, workdir_complete)):
|
||||
@@ -853,6 +850,9 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips):
|
||||
unzip_failed = False
|
||||
tms = time.time()
|
||||
|
||||
# For file-bookkeeping
|
||||
orig_dir_content = recursive_listdir(workdir_complete)
|
||||
|
||||
for _zip in zips:
|
||||
logging.info("Starting extract on zipfile: %s ", _zip)
|
||||
nzo.set_action_line(T('Unpacking'), '%s' % unicoder(os.path.basename(_zip)))
|
||||
@@ -870,6 +870,9 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips):
|
||||
msg = T('%s files in %s') % (str(i), format_time_string(time.time() - tms))
|
||||
nzo.set_unpack_info('Unpack', msg)
|
||||
|
||||
# What's new?
|
||||
new_files = list(set(orig_dir_content + recursive_listdir(workdir_complete)))
|
||||
|
||||
# Delete the old files if we have to
|
||||
if delete and not unzip_failed:
|
||||
i = 0
|
||||
@@ -890,12 +893,12 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips):
|
||||
except OSError:
|
||||
logging.warning(T('Deleting %s failed!'), brokenzip)
|
||||
|
||||
return unzip_failed
|
||||
return unzip_failed, new_files
|
||||
except:
|
||||
msg = sys.exc_info()[1]
|
||||
nzo.fail_msg = T('Unpacking failed, %s') % msg
|
||||
logging.error(T('Error "%s" while running unzip() on %s'), msg, nzo.final_name)
|
||||
return True
|
||||
return True, []
|
||||
|
||||
|
||||
def ZIP_Extract(zipfile, extraction_path, one_folder):
|
||||
@@ -929,6 +932,7 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens):
|
||||
"""
|
||||
i = 0
|
||||
unseven_failed = False
|
||||
new_files = []
|
||||
tms = time.time()
|
||||
|
||||
# Find multi-volume sets, because 7zip will not provide actual set members
|
||||
@@ -955,18 +959,19 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens):
|
||||
else:
|
||||
extraction_path = os.path.split(seven)[0]
|
||||
|
||||
res, msg = seven_extract(nzo, seven, extensions, extraction_path, one_folder, delete)
|
||||
res, new_files_set, msg = seven_extract(nzo, seven, extensions, extraction_path, one_folder, delete)
|
||||
if res:
|
||||
unseven_failed = True
|
||||
nzo.set_unpack_info('Unpack', msg)
|
||||
else:
|
||||
i += 1
|
||||
new_files.extend(new_files_set)
|
||||
|
||||
if not unseven_failed:
|
||||
msg = T('%s files in %s') % (str(i), format_time_string(time.time() - tms))
|
||||
nzo.set_unpack_info('Unpack', msg)
|
||||
|
||||
return unseven_failed
|
||||
return unseven_failed, new_files
|
||||
|
||||
|
||||
def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete):
|
||||
@@ -982,7 +987,7 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete
|
||||
msg = T('Trying 7zip with password "%s"') % unicoder(password)
|
||||
nzo.fail_msg = msg
|
||||
nzo.set_unpack_info('Unpack', msg)
|
||||
fail, msg = seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete, password)
|
||||
fail, new_files, msg = seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete, password)
|
||||
if fail != 2:
|
||||
break
|
||||
|
||||
@@ -991,7 +996,7 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete
|
||||
msg = '%s (%s)' % (T('Unpacking failed, archive requires a password'), os.path.basename(sevenset))
|
||||
nzo.fail_msg = msg
|
||||
logging.error(msg)
|
||||
return fail, msg
|
||||
return fail, new_files, msg
|
||||
|
||||
|
||||
def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete, password):
|
||||
@@ -1025,6 +1030,9 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
|
||||
if not os.path.exists(name):
|
||||
return 1, T('7ZIP set "%s" is incomplete, cannot unpack') % unicoder(sevenset)
|
||||
|
||||
# For file-bookkeeping
|
||||
orig_dir_content = recursive_listdir(extraction_path)
|
||||
|
||||
command = [SEVEN_COMMAND, method, '-y', overwrite, parm, case, password,
|
||||
'-o%s' % extraction_path, name]
|
||||
|
||||
@@ -1039,6 +1047,9 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
|
||||
|
||||
ret = p.wait()
|
||||
|
||||
# What's new?
|
||||
new_files = list(set(orig_dir_content + recursive_listdir(extraction_path)))
|
||||
|
||||
if ret == 0 and delete:
|
||||
if extensions:
|
||||
for ext in extensions:
|
||||
@@ -1054,7 +1065,7 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete
|
||||
logging.warning(T('Deleting %s failed!'), sevenset)
|
||||
|
||||
# Always return an error message, even when return code is 0
|
||||
return ret, T('Could not unpack %s') % unicoder(sevenset)
|
||||
return ret, new_files, T('Could not unpack %s') % unicoder(sevenset)
|
||||
|
||||
|
||||
##############################################################################
|
||||
@@ -2079,26 +2090,10 @@ def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=
|
||||
sevens, joinables, zips, rars, ts, filelist = ([], [], [], [], [], [])
|
||||
|
||||
if workdir_complete:
|
||||
for root, dirs, files in os.walk(workdir_complete):
|
||||
for _file in files:
|
||||
if '.AppleDouble' not in root and '.DS_Store' not in root:
|
||||
try:
|
||||
p = os.path.join(root, _file)
|
||||
filelist.append(p)
|
||||
except UnicodeDecodeError:
|
||||
# Just skip failing names
|
||||
pass
|
||||
filelist.extend(recursive_listdir(workdir_complete))
|
||||
|
||||
if workdir and (not filelist or check_both):
|
||||
for root, dirs, files in os.walk(workdir):
|
||||
for _file in files:
|
||||
if '.AppleDouble' not in root and '.DS_Store' not in root:
|
||||
try:
|
||||
p = os.path.join(root, _file)
|
||||
filelist.append(p)
|
||||
except UnicodeDecodeError:
|
||||
# Just skip failing names
|
||||
pass
|
||||
filelist.extend(recursive_listdir(workdir))
|
||||
|
||||
for file in filelist:
|
||||
# Extra check for rar (takes CPU/disk)
|
||||
|
||||
@@ -626,9 +626,16 @@ class NzbObject(TryList):
|
||||
|
||||
# Determine category and find pp/script values based on input
|
||||
# Later will be re-evaluated based on import steps
|
||||
self.cat, pp_tmp, self.script, priority = cat_to_opts(cat, pp, script, priority)
|
||||
self.set_priority(priority)
|
||||
self.repair, self.unpack, self.delete = sabnzbd.pp_to_opts(pp_tmp)
|
||||
if pp is None:
|
||||
r = u = d = None
|
||||
else:
|
||||
r, u, d = sabnzbd.pp_to_opts(pp)
|
||||
self.set_priority(priority) # Parse priority of input
|
||||
self.repair = r # True if we want to repair this set
|
||||
self.unpack = u # True if we want to unpack this set
|
||||
self.delete = d # True if we want to delete this set
|
||||
self.script = script # External script for this set
|
||||
self.cat = cat # User-set category
|
||||
|
||||
# Information fields
|
||||
self.url = url or filename
|
||||
|
||||
@@ -31,7 +31,7 @@ from sabnzbd.newsunpack import unpack_magic, par2_repair, external_processing, \
|
||||
sfv_check, build_filelists, rar_sort
|
||||
from threading import Thread
|
||||
from sabnzbd.misc import real_path, get_unique_path, create_dirs, move_to_path, \
|
||||
make_script_path, long_path, clip_path, \
|
||||
make_script_path, long_path, clip_path, recursive_listdir, \
|
||||
on_cleanup_list, renamer, remove_dir, remove_all, globber, globber_full, \
|
||||
set_permissions, cleanup_empty_directories, fix_unix_encoding, \
|
||||
sanitize_and_trim_path, sanitize_files_in_folder, remove_file
|
||||
@@ -385,10 +385,12 @@ def process_job(nzo):
|
||||
nzo.status = Status.EXTRACTING
|
||||
logging.info("Running unpack_magic on %s", filename)
|
||||
unpack_error, newfiles = unpack_magic(nzo, workdir, tmp_workdir_complete, flag_delete, one_folder, (), (), (), (), ())
|
||||
logging.info("Unpacked files %s", newfiles)
|
||||
|
||||
if sabnzbd.WIN32:
|
||||
# Sanitize the resulting files
|
||||
newfiles = sanitize_files_in_folder(tmp_workdir_complete)
|
||||
logging.info("unpack_magic finished on %s", filename)
|
||||
logging.info("Finished unpack_magic on %s", filename)
|
||||
else:
|
||||
nzo.set_unpack_info('Unpack', T('No post-processing because of failed verification'))
|
||||
|
||||
@@ -866,10 +868,7 @@ def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
|
||||
if so send to queue and remove if on CleanList
|
||||
Returns list of processed NZB's
|
||||
"""
|
||||
files = []
|
||||
for root, _dirs, names in os.walk(wdir):
|
||||
for name in names:
|
||||
files.append(os.path.join(root, name))
|
||||
files = recursive_listdir(wdir)
|
||||
|
||||
for file_ in files:
|
||||
if os.path.splitext(file_)[1].lower() != '.nzb':
|
||||
|
||||
@@ -315,6 +315,9 @@ class URLGrabber(Thread):
|
||||
if cfg.email_endjob() > 0:
|
||||
emailer.badfetch_mail(msg, url)
|
||||
|
||||
# Parse category to make sure script is set correctly after a grab
|
||||
nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script)
|
||||
|
||||
# Add to history and run script if desired
|
||||
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False)
|
||||
PostProcessor.do.process(nzo)
|
||||
|
||||
@@ -12,8 +12,9 @@ from cryptography import x509
|
||||
from cryptography.x509.oid import NameOID
|
||||
import datetime
|
||||
import os
|
||||
from sabnzbd.getipaddress import localipv4
|
||||
import socket
|
||||
|
||||
from sabnzbd.getipaddress import localipv4
|
||||
|
||||
# Ported from cryptography/utils.py
|
||||
def int_from_bytes(data, byteorder, signed=False):
|
||||
@@ -60,15 +61,25 @@ def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', L
|
||||
# x509.NameAttribute(NameOID.COMMON_NAME, CN),
|
||||
])
|
||||
|
||||
# build SubjectAltName list since we are not using a common name
|
||||
san_list = [
|
||||
x509.DNSName(u"localhost"),
|
||||
x509.DNSName(u"127.0.0.1"),
|
||||
]
|
||||
# append local v4 ip (functions already has try/catch logic)
|
||||
mylocalipv4 = localipv4()
|
||||
if mylocalipv4:
|
||||
san_list.append(x509.DNSName(u"" + mylocalipv4))
|
||||
|
||||
# build Subject Alternate Names (aka SAN) list
|
||||
# First the host names, add with x509.DNSName():
|
||||
san_list = [x509.DNSName(u"localhost")]
|
||||
san_list.append(x509.DNSName(unicode(socket.gethostname())))
|
||||
|
||||
# Then the host IP addresses, add with x509.IPAddress()
|
||||
# Inside a try-except, just to be sure
|
||||
try:
|
||||
import ipaddress
|
||||
san_list.append(x509.IPAddress(ipaddress.IPv4Address(u"127.0.0.1")))
|
||||
san_list.append(x509.IPAddress(ipaddress.IPv6Address(u"::1")))
|
||||
|
||||
# append local v4 ip
|
||||
mylocalipv4 = localipv4()
|
||||
if mylocalipv4:
|
||||
san_list.append(x509.IPAddress(ipaddress.IPv4Address(unicode(mylocalipv4))))
|
||||
except:
|
||||
pass
|
||||
|
||||
cert = x509.CertificateBuilder().subject_name(
|
||||
subject
|
||||
|
||||
Reference in New Issue
Block a user