Compare commits

..

17 Commits

Author SHA1 Message Date
Safihre
fb301eb5c8 Update text files for 2.2.1RC2 2017-08-23 22:49:59 +02:00
Safihre
1562c3560b Handle '482 Download limt exceeded'
Closes #1009
2017-08-23 22:48:15 +02:00
Safihre
9813bc237f Only auto-disconnect after first run of verification 2017-08-23 21:42:56 +02:00
Safihre
b39fe059c6 Pause between unpacks on Windows, otherwise subprocess_fix overloads
Strange but true, but on jobs with many small files to unpack, it would just fail.
2017-08-23 21:42:17 +02:00
Safihre
a56c770a8b The real anti-stalling fix
Woohoo!
For each NZF (file) make sure all articles have tried a server before marking it as tried. Before if articles were still in transit they could be marked as tried on NZF level before the server could get to them,
2017-08-23 16:02:01 +02:00
Safihre
e3bf0edad8 TryList reset at NZO level also nessecary
Timing issue between when a new server is selected and when a job is added to the NZO-level try-list. Locks were tried, but failed.
2017-08-23 09:11:01 +02:00
Safihre
e35d9e4db3 Correct handeling of TryList when server has timeout 2017-08-23 08:32:47 +02:00
Safihre
c617d4321a Correctly remove + from INFO label in all languages 2017-08-22 16:13:24 +02:00
Safihre
0fd3a2881f Correct redirect after ports change 2017-08-22 10:19:42 +02:00
Safihre
0c1f7633de Only discard really non-unique hashes from md5of16k 2017-08-22 09:43:33 +02:00
Safihre
b7d5d49c84 Show hover-title that the compress icon is Direct Unpack 2017-08-22 09:43:26 +02:00
Safihre
9911b93ece Add error when NZO creation fails 2017-08-22 09:43:11 +02:00
Safihre
eeaad00968 Also hide email-accounts in logging 2017-08-22 09:43:06 +02:00
Safihre
e1bb8459e3 Take the risk of allowing up to 5 bad articles in jobs without Par2 2017-08-22 09:42:47 +02:00
Safihre
65c3ac0cc0 Warn in case the password file has too many passwords 2017-08-22 09:42:16 +02:00
Safihre
413c02a80f Do not run get_new_id forever in case of problems
#984
2017-08-22 09:41:40 +02:00
Safihre
80f118f304 UnRar is required to read some RAR files 2017-08-21 08:23:10 +02:00
17 changed files with 94 additions and 64 deletions

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 2.2.1RC1
Summary: SABnzbd-2.2.1RC1
Version: 2.2.1RC2
Summary: SABnzbd-2.2.1RC2
Home-page: https://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,8 +1,15 @@
Release Notes - SABnzbd 2.2.1 Release Candidate 1
Release Notes - SABnzbd 2.2.1 Release Candidate 2
=========================================================
## Bugfixes since 2.2.0
- Some users were experiencing downloads being stuck at 99%
- Some users were experiencing downloads or pre-check being stuck at 99%
- Allow up to 5 bad articles for jobs with no or little par2
- Fixed RarFile error during unpacking
- Unpacking of many archives could fail
- Warn user when password-file is too large
- Remove email addresses settings from log export
- Block server longer on 'Download limit exceeded' errors
- Only auto-disconnect after first run of verification
## Upgrading from 2.1.x and older
- Finish queue

View File

@@ -232,7 +232,7 @@ function do_restart() {
var portsUnchanged = ($('#port').val() == $('#port').data('original')) && ($('#https_port').val() == $('#https_port').data('original'))
// Are we on settings page or did nothing change?
if(!$('body').hasClass('General') || (!switchedHTTPS && !portsUnchanged)) {
if(!$('body').hasClass('General') || (!switchedHTTPS && portsUnchanged)) {
// Same as before
var urlTotal = window.location.origin + urlPath
} else {

View File

@@ -95,7 +95,7 @@
<span data-bind="text: password"></span>
</small>
<!-- /ko -->
<div class="name-icons direct-unpack hover-button" data-bind="visible: direct_unpack">
<div class="name-icons direct-unpack hover-button" data-bind="visible: direct_unpack" title="$T('opt-direct_unpack')">
<span class="glyphicon glyphicon-compressed"></span> <span data-bind="text: direct_unpack"></span>
</div>
</div>

View File

@@ -103,7 +103,7 @@
glitterTranslate.status['Script'] = "$T('stage-script')";
glitterTranslate.status['Source'] = "$T('stage-source')";
glitterTranslate.status['Servers'] = "$T('stage-servers')";
glitterTranslate.status['INFO'] = "$T('log-info')".replace('+ ', '').toUpperCase();
glitterTranslate.status['INFO'] = "$T('log-info')".replace('+', '').toUpperCase();
glitterTranslate.status['WARNING'] = "$T('Glitter-warning')";
glitterTranslate.status['ERROR'] = "$T('Glitter-error')";

View File

@@ -12,7 +12,7 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=ASCII\n"
"Content-Transfer-Encoding: 7bit\n"
"POT-Creation-Date: 2017-08-16 13:33+W. Europe Daylight Time\n"
"POT-Creation-Date: 2017-08-19 16:05+W. Europe Daylight Time\n"
"Generated-By: pygettext.py 1.5\n"
@@ -712,6 +712,10 @@ msgstr ""
msgid "Error creating SSL key and certificate"
msgstr ""
#: sabnzbd/misc.py [Warning message]
msgid "Your password file contains more than 30 passwords, testing all these passwords takes a lot of time. Try to only list useful passwords."
msgstr ""
#: sabnzbd/misc.py [Error message]
msgid "Cannot change permissions of %s"
msgstr ""

View File

@@ -863,6 +863,7 @@ def get_new_id(prefix, folder, check_list=None):
except:
logging.error(T('Failure in tempfile.mkstemp'))
logging.info("Traceback: ", exc_info=True)
break
# Cannot create unique id, crash the process
raise IOError

View File

@@ -218,8 +218,9 @@ class Assembler(Thread):
table[name] = hash
if hash16k not in nzf.nzo.md5of16k:
nzf.nzo.md5of16k[hash16k] = name
else:
# Not unique, remove to avoid false-renames
elif nzf.nzo.md5of16k[hash16k] != name:
# Not unique and not already linked to this file
# Remove to avoid false-renames
duplicates16k.append(hash16k)
header = f.read(8)

View File

@@ -81,6 +81,7 @@ MAX_DECODE_QUEUE = 10
LIMIT_DECODE_QUEUE = 100
MAX_WARNINGS = 20
MAX_WIN_DFOLDER = 60
MAX_BAD_ARTICLES = 5
REPAIR_PRIORITY = 3
TOP_PRIORITY = 2

View File

@@ -222,6 +222,8 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
# Looks like an incomplete file, retry
return -2, nzo_ids
else:
# Something else is wrong, show error
logging.error(T('Error while adding %s, removing'), name, exc_info=True)
return -1, nzo_ids
if nzo:

View File

@@ -655,7 +655,7 @@ class Downloader(Thread):
logging.error(T('Failed login for server %s'), server.id)
penalty = _PENALTY_PERM
block = True
elif ecode == '502':
elif ecode in ('502', '482'):
# Cannot connect (other reasons), block this server
if server.active:
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
@@ -795,11 +795,8 @@ class Downloader(Thread):
# Remove this server from try_list
article.fetcher = None
nzf = article.nzf
nzo = nzf.nzo
# Allow all servers to iterate over each nzo/nzf again ##
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
# Allow all servers to iterate over each nzo/nzf again
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article.nzf, article.nzf.nzo)
if destroy:
nw.terminate(quit=quit)
@@ -942,7 +939,8 @@ def clues_too_many(text):
""" Check for any "too many connections" clues in the response code """
text = text.lower()
for clue in ('exceed', 'connections', 'too many', 'threads', 'limit'):
if clue in text:
# Not 'download limit exceeded' error
if (clue in text) and ('download' not in text):
return True
return False
@@ -959,7 +957,7 @@ def clues_too_many_ip(text):
def clues_pay(text):
""" Check for messages about payments """
text = text.lower()
for clue in ('credits', 'paym', 'expired'):
for clue in ('credits', 'paym', 'expired', 'exceeded'):
if clue in text:
return True
return False

View File

@@ -2414,7 +2414,7 @@ LOG_API_RE = re.compile(r"(apikey|api)(=|:)[\w]+", re.I)
LOG_API_JSON_RE = re.compile(r"u'(apikey|api)': u'[\w]+'", re.I)
LOG_USER_RE = re.compile(r"(user|username)\s?=\s?[\S]+", re.I)
LOG_PASS_RE = re.compile(r"(password)\s?=\s?[\S]+", re.I)
LOG_INI_HIDE_RE = re.compile(r"(email_pwd|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", re.I)
LOG_INI_HIDE_RE = re.compile(r"(email_pwd|email_account|email_to|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", re.I)
LOG_HASH_RE = re.compile(r"([a-fA-F\d]{25})", re.I)
class Status(object):

View File

@@ -1359,6 +1359,7 @@ def get_all_passwords(nzo):
pw = nzo.nzo_info.get('password')
if pw:
meta_passwords.append(pw)
if meta_passwords:
if nzo.password == meta_passwords[0]:
# this nzo.password came from meta, so don't use it twice
@@ -1366,19 +1367,23 @@ def get_all_passwords(nzo):
else:
passwords.extend(meta_passwords)
logging.info('Read %s passwords from meta data in NZB: %s', len(meta_passwords), meta_passwords)
pw_file = cfg.password_file.get_path()
if pw_file:
try:
pwf = open(pw_file, 'r')
lines = pwf.read().split('\n')
with open(pw_file, 'r') as pwf:
lines = pwf.read().split('\n')
# Remove empty lines and space-only passwords and remove surrounding spaces
pws = [pw.strip('\r\n ') for pw in lines if pw.strip('\r\n ')]
logging.debug('Read these passwords from file: %s', pws)
passwords.extend(pws)
pwf.close()
logging.info('Read %s passwords from file %s', len(pws), pw_file)
except IOError:
logging.info('Failed to read the passwords file %s', pw_file)
except:
logging.warning('Failed to read the passwords file %s', pw_file)
# Check size
if len(passwords) > 30:
logging.warning(T('Your password file contains more than 30 passwords, testing all these passwords takes a lot of time. Try to only list useful passwords.'))
if nzo.password:
# If an explicit password was set, add a retry without password, just in case.

View File

@@ -606,6 +606,10 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, rename, '-ai', password_command,
'%s' % clip_path(rarfile_path), '%s\\' % extraction_path]
# The subprocess_fix requires time to clear the buffers to work,
# otherwise the inputs get send incorrectly and unrar breaks
time.sleep(0.5)
elif RAR_PROBLEM:
# Use only oldest options (specifically no "-or")
command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, password_command,
@@ -2064,10 +2068,13 @@ def rar_volumelist(rarfile_path, password, known_volumes):
""" Extract volumes that are part of this rarset
and merge them with existing list, removing duplicates
"""
# UnRar is required to read some RAR files
rarfile.UNRAR_TOOL = RAR_COMMAND
zf = rarfile.RarFile(rarfile_path)
# setpassword can fail due to bugs in RarFile
if password:
try:
# setpassword can fail due to bugs in RarFile
zf.setpassword(password)
except:
pass

View File

@@ -768,10 +768,6 @@ class NzbQueue(object):
def end_job(self, nzo):
""" Send NZO to the post-processing queue """
logging.info('Ending job %s', nzo.final_name)
if self.actives(grabs=False) < 2 and cfg.autodisconnect():
# This was the last job, close server connections
if sabnzbd.downloader.Downloader.do:
sabnzbd.downloader.Downloader.do.disconnect()
# Notify assembler to call postprocessor
if not nzo.deleted:
@@ -856,28 +852,11 @@ class NzbQueue(object):
ArticleCache.do.purge_articles(nzo.saved_articles)
def stop_idle_jobs(self):
""" Detect jobs that have zero files left or are stalled
and send them to post-processing
"""
nr_servers = len(sabnzbd.downloader.Downloader.do.servers)
""" Detect jobs that have zero files left and send them to post processing """
empty = []
for nzo in self.__nzo_list:
if not nzo.futuretype and nzo.status not in (Status.PAUSED, Status.GRABBING):
# Finished, but not yet ended
if not nzo.files:
empty.append(nzo)
continue
# Check if maybe stalled by checking if all files
# have all servers in their TryList indicating a lock-up
for file in nzo.files:
if file.try_list_size() < nr_servers:
# Not yet all tried
break
else:
# Only executed if all files are stuck
logging.info('Job %s seems stalled, resetting', nzo.final_name)
nzo.reset_all_try_lists()
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
empty.append(nzo)
for nzo in empty:
self.end_job(nzo)

View File

@@ -41,7 +41,7 @@ import sabnzbd
from sabnzbd.constants import GIGI, ATTRIB_FILE, JOB_ADMIN, \
DEFAULT_PRIORITY, LOW_PRIORITY, NORMAL_PRIORITY, \
PAUSED_PRIORITY, TOP_PRIORITY, DUP_PRIORITY, REPAIR_PRIORITY, \
RENAMES_FILE, Status, PNFO
RENAMES_FILE, MAX_BAD_ARTICLES, Status, PNFO
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
get_unique_path, get_admin_path, remove_all, sanitize_filename, globber_full, \
int_conv, set_permissions, format_time_string, long_path, trim_win_path, \
@@ -91,11 +91,6 @@ class TryList(object):
if server not in self.__try_list:
self.__try_list.append(server)
def try_list_size(self):
""" How many servers are listed as tried """
with TRYLIST_LOCK:
return len(self.__try_list)
def reset_try_list(self):
""" Clean the list """
with TRYLIST_LOCK:
@@ -312,13 +307,27 @@ class NzbFile(TryList):
self.blocks = int(blocks)
def get_article(self, server, servers):
""" Get next article to be downloaded """
""" Get next article to be downloaded from this server
Returns None when there are still articles to try
Returns False when all articles are tried
"""
# Make sure all articles have tried this server before
# adding to the NZF-TryList, otherwise there will be stalls!
tried_all_articles = True
for article in self.articles:
article = article.get_article(server, servers)
if article:
return article
article_return = article.get_article(server, servers)
if article_return:
return article_return
elif tried_all_articles and not article.server_in_try_list(server):
tried_all_articles = False
self.add_to_try_list(server)
# We are sure they are all tried
if tried_all_articles:
self.add_to_try_list(server)
return False
# Still articles left to try
return None
def reset_all_try_lists(self):
""" Clear all lists of visited servers """
@@ -1064,7 +1073,7 @@ class NzbObject(TryList):
self.prospective_add(nzf)
# Sometimes a few CRC errors are still fine, so we continue
if self.bad_articles > 5:
if self.bad_articles > MAX_BAD_ARTICLES:
self.abort_direct_unpacker()
post_done = False
@@ -1264,13 +1273,13 @@ class NzbObject(TryList):
while blocks_already < self.bad_articles and extrapars_sorted:
new_nzf = extrapars_sorted.pop()
# Reset NZF TryList, in case something was on it before it became extrapar
new_nzf.reset_try_list()
new_nzf.reset_all_try_lists()
self.add_parfile(new_nzf)
self.extrapars[parset] = extrapars_sorted
blocks_already = blocks_already + int_conv(new_nzf.blocks)
logging.info('Prospectively added %s repair blocks to %s', new_nzf.blocks, self.final_name)
# Reset NZO TryList
self.reset_all_try_lists()
self.reset_try_list()
def add_to_direct_unpacker(self, nzf):
""" Start or add to DirectUnpacker """
@@ -1287,6 +1296,12 @@ class NzbObject(TryList):
""" Determine amount of articles present on servers
and return (gross available, nett) bytes
"""
# Few missing articles in RAR-only job might still work
if self.bad_articles <= MAX_BAD_ARTICLES:
logging.debug('Download Quality: bad-articles=%s', self.bad_articles)
return True, 200
# Do the full check
need = 0L
pars = 0L
short = 0L
@@ -1373,6 +1388,7 @@ class NzbObject(TryList):
def get_article(self, server, servers):
article = None
nzf_remove_list = []
tried_all_articles = True
for nzf in self.files:
if nzf.deleted:
@@ -1396,6 +1412,9 @@ class NzbObject(TryList):
article = nzf.get_article(server, servers)
if article:
break
if article == None:
# None is returned by NZF when server is not tried for all articles
tried_all_articles = False
# Remove all files for which admin could not be read
for nzf in nzf_remove_list:
@@ -1406,7 +1425,8 @@ class NzbObject(TryList):
if nzf_remove_list and not self.files:
sabnzbd.NzbQueue.do.end_job(self)
if not article:
# Only add to trylist when server has been tried for all articles of all NZF's
if not article and tried_all_articles:
# No articles for this server, block for next time
self.add_to_try_list(server)
return article

View File

@@ -308,6 +308,11 @@ def process_job(nzo):
# Try to get more par files
return False
# If we don't need extra par2, we can disconnect
if sabnzbd.nzbqueue.NzbQueue.do.actives(grabs=False) == 0 and cfg.autodisconnect():
# This was the last job, close server connections
sabnzbd.downloader.Downloader.do.disconnect()
# Sanitize the resulting files
if sabnzbd.WIN32:
sanitize_files_in_folder(workdir)