Compare commits

...

20 Commits

Author SHA1 Message Date
SABnzbd Automation
f13339d64d Automatic translation update 2017-09-05 19:53:42 +00:00
Safihre
fa5b44be99 Update text files for 2.3.0Alpha2 2017-09-05 16:36:12 +02:00
Safihre
08bc7c5b9d Use NZO_LOCK to limit possible dir conflicts when adding NZBs
Hopefully resolving problems with possible overlapping directory names and import errors.
2017-09-05 16:07:43 +02:00
Safihre
d9b5dd549a Revert "Only allow 1 NZB to be added at the same time"
Could result in deadlock for some reason.
This reverts commit e64df8ed60.
2017-09-05 16:05:48 +02:00
Safihre
8ec53a3bce Giving up on elegant solutions to prevent stalling, just use a check
This stupid nightmare still wasn't fixed.
The proper solution creates slowdown on many systems because it's not efficient enough. Instead just do a check every 90 seconds if there's stalled stuff.
2017-09-05 16:05:19 +02:00
Safihre
0aac9a5e5c Correct display of download percentage
No need to calculate in javascript, we get it from API
2017-09-05 16:03:58 +02:00
Safihre
11a880d040 MultiPar shows 'PAR File(s) Incomplete' on verification success
When there are Par2-files with very similar filenames in the folder.
2017-09-05 13:21:48 +02:00
Safihre
67b66beb13 Only count really extra files during Multipar 2017-09-04 22:43:59 +02:00
Safihre
1da633442b Correct byte counts when retrying 2017-09-04 22:43:01 +02:00
Safihre
13de40881e Correct display of Forced items 2017-09-04 21:44:00 +02:00
Safihre
1c6419ea65 Revert "Paused status for individual download trumps Force priority"
Oops, I was wrong. This reverts commit d06c11673f.
2017-09-04 20:51:05 +02:00
Safihre
a2adeffc1a Correctly count all bytes and drop 'missing' use 'mbmissing'
Reporting number of missing articles makes 0 sense, it's the MB that matters.
2017-09-04 20:39:15 +02:00
Safihre
71fa3c544a Also shjow scanning of extra files for par2cmdline/tbb 2017-09-04 14:54:41 +02:00
Safihre
b739fb7f07 Do not count overhead-bytes for NZO statistics 2017-09-04 14:18:55 +02:00
Safihre
860728beae Show counter when Multipar is scanning other files in the directory 2017-09-04 13:58:50 +02:00
Safihre
1bdbf1c6a8 Show different icon when priority is Force 2017-09-04 10:59:48 +02:00
Safihre
abbed4cd77 Show missing articles starting at 2% 2017-09-04 10:59:18 +02:00
Safihre
d06c11673f Paused status for individual download trumps Force priority 2017-09-04 10:57:02 +02:00
Safihre
67d67f5ff6 Correct typo in reject_duplicate_files
Closes #1021
2017-09-03 17:07:41 +02:00
Safihre
2386d65b84 Extrapars could be empty if not +Repair set for job 2017-09-03 16:54:07 +02:00
18 changed files with 154 additions and 106 deletions

View File

@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 2.3.0Alpha1
Version: 2.3.0Alpha2
Summary: SABnzbd-2.3.0Alpha1
Home-page: https://sabnzbd.org
Author: The SABnzbd Team

View File

@@ -1,6 +1,18 @@
Release Notes - SABnzbd 2.3.0 Alpha 1
Release Notes - SABnzbd 2.3.0 Alpha 2
=========================================================
## Changes and bugfixes since 2.3.0 Alpha 1
- Specials Config page could not be loaded
- Crash when adding new jobs
- Further stalling-detection improvements
- Crash when a job was set to only Download
- Display of download progress and missing data improved
- Retried jobs would show incorrect download progress
- Different icon for downloads with Force priority
- Show progress during verification of extra files
- API: 'missing' field removed, use 'mbmissing'
## Changes since 2.2.1
- Option to limit Servers to specific Categories removed
- Improved par2 handling and obfuscated files detection

View File

@@ -88,7 +88,7 @@
<tr class="queue-item">
<td>
<a href="#" data-bind="click: pauseToggle, attr: { 'title': pausedStatus() ? '$T('link-resume')' : '$T('link-pause')' }">
<span class="hover-button glyphicon" data-bind="css: { 'glyphicon-play': pausedStatus(), 'glyphicon-pause': !pausedStatus() }"></span>
<span class="hover-button glyphicon" data-bind="css: queueIcon"></span>
</a>
</td>
<td class="name">
@@ -121,8 +121,8 @@
</td>
<td class="progress-indicator">
<div class="progress">
<div class="progress-bar progress-bar-info" data-bind="attr: { 'style': 'width: ' + percentageRounded() + '; background-color: ' + progressColor() + ';' }">
<strong data-bind="text: percentageRounded"></strong>
<div class="progress-bar progress-bar-info" data-bind="attr: { 'style': 'width: ' + percentage() + '%; background-color: ' + progressColor() + ';' }">
<strong data-bind="text: percentage() + '%'"></strong>
<i data-bind="text: missingText"></i>
</div>
<span data-bind="text: progressText"></span>

View File

@@ -480,9 +480,10 @@ function QueueModel(parent, data) {
self.status = ko.observable(data.status);
self.isGrabbing = ko.observable(data.status == 'Grabbing' || data.avg_age == '-')
self.totalMB = ko.observable(parseFloat(data.mb));
self.remainingMB = ko.observable(parseFloat(data.mbleft));
self.remainingMB = ko.observable(parseFloat(data.mbleft))
self.missingMB = ko.observable(parseFloat(data.mbmissing))
self.percentage = ko.observable(parseInt(data.percentage))
self.avg_age = ko.observable(data.avg_age)
self.missing = ko.observable(parseFloat(data.mbmissing))
self.direct_unpack = ko.observable(data.direct_unpack)
self.category = ko.observable(data.cat);
self.priority = ko.observable(parent.priorityName[data.priority]);
@@ -502,8 +503,8 @@ function QueueModel(parent, data) {
if(self.status() == 'Checking') {
return '#58A9FA'
}
// Check for missing data, the value is arbitrary! (3%)
if(self.missing()/self.totalMB() > 0.03) {
// Check for missing data, the value is arbitrary! (2%)
if(self.missingMB()/self.totalMB() > 0.02) {
return '#F8A34E'
}
// Set to grey, only when not Force download
@@ -514,22 +515,16 @@ function QueueModel(parent, data) {
return '';
});
// MB's and percentages
self.downloadedMB = ko.computed(function() {
return(self.totalMB() - self.remainingMB()).toFixed(0);
});
self.percentageRounded = ko.pureComputed(function() {
return fixPercentages(((self.downloadedMB() / self.totalMB()) * 100).toFixed(2))
})
// MB's
self.progressText = ko.pureComputed(function() {
return self.downloadedMB() + " MB / " + (self.totalMB() * 1).toFixed(0) + " MB";
return (self.totalMB() - self.remainingMB()).toFixed(0) + " MB / " + (self.totalMB() * 1).toFixed(0) + " MB";
})
// Texts
self.missingText= ko.pureComputed(function() {
// Check for missing data, the value is arbitrary! (3%)
if(self.missing()/self.totalMB() > 0.03) {
return self.missing().toFixed(0) + ' MB ' + glitterTranslate.misingArt
// Check for missing data, the value is arbitrary! (1%)
if(self.missingMB()/self.totalMB() > 0.01) {
return self.missingMB().toFixed(0) + ' MB ' + glitterTranslate.misingArt
}
return;
})
@@ -546,6 +541,18 @@ function QueueModel(parent, data) {
return rewriteTime(self.timeLeft());
});
// Icon to better show force-priority
self.queueIcon = ko.computed(function() {
// Force comes first
if(self.priority() == 2) {
return 'glyphicon-forward'
}
if(self.pausedStatus()) {
return 'glyphicon-play'
}
return 'glyphicon-pause'
})
// Extra queue column
self.extraText = ko.pureComputed(function() {
// Picked anything?
@@ -581,8 +588,9 @@ function QueueModel(parent, data) {
self.isGrabbing(data.status == 'Grabbing' || data.avg_age == '-')
self.totalMB(parseFloat(data.mb));
self.remainingMB(parseFloat(data.mbleft));
self.missingMB(parseFloat(data.mbmissing))
self.percentage(parseInt(data.percentage))
self.avg_age(data.avg_age)
self.missing(parseFloat(data.mbmissing))
self.direct_unpack(data.direct_unpack)
self.category(data.cat);
self.priority(parent.priorityName[data.priority]);

View File

@@ -527,6 +527,7 @@ tbody>tr>td:last-child {
}
.hover-button.glyphicon-play,
.hover-button.glyphicon-forward,
.hover-button.glyphicon-stop {
opacity: 1;
color: #474747;

View File

@@ -56,7 +56,7 @@
</td>
<td class="download-title" <!--#if $rating_enable#-->style="width:35%"<!--#end if#-->>
<a href="nzb/$slot.nzo_id/" title="$T('status'): $T('post-'+$slot.status)<br/>$T('nzo-age'): $slot.avg_age<br/><!--#if $slot.missing#-->$T('missingArt'): $slot.missing<!--#end if#-->">$slot.filename.replace('.', '.&#8203;').replace('_', '_&#8203;')<!--#if $slot.password#--> / $slot.password<!--#end if#--></a>
<a href="nzb/$slot.nzo_id/" title="$T('status'): $T('post-'+$slot.status)<br/>$T('nzo-age'): $slot.avg_age<br/><!--#if $slot.mbmissing!="0.00"#-->$T('missingArt'): $slot.mbmissing $T('MB')<!--#end if#-->">$slot.filename.replace('.', '.&#8203;').replace('_', '_&#8203;')<!--#if $slot.password#--> / $slot.password<!--#end if#--></a>
</td>
<!--#if $rating_enable#-->

View File

@@ -12,7 +12,7 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=ASCII\n"
"Content-Transfer-Encoding: 7bit\n"
"POT-Creation-Date: 2017-09-02 15:39+W. Europe Daylight Time\n"
"POT-Creation-Date: 2017-09-04 13:27+W. Europe Daylight Time\n"
"Generated-By: pygettext.py 1.5\n"
@@ -987,6 +987,10 @@ msgstr ""
msgid "Checking"
msgstr ""
#: sabnzbd/newsunpack.py
msgid "Checking extra files"
msgstr ""
#: sabnzbd/newsunpack.py [Error message]
msgid "Python script \"%s\" does not have execute (+x) permission set"
msgstr ""

View File

@@ -8,13 +8,13 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2017-09-02 13:51+0000\n"
"PO-Revision-Date: 2017-05-23 11:46+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
"PO-Revision-Date: 2017-09-03 14:07+0000\n"
"Last-Translator: Steffen Bærø <steffen.baro@gmail.com>\n"
"Language-Team: Norwegian Bokmal <nb@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2017-09-03 05:49+0000\n"
"X-Launchpad-Export-Date: 2017-09-04 05:51+0000\n"
"X-Generator: Launchpad (build 18446)\n"
#: SABnzbd.py [Error message]
@@ -47,11 +47,11 @@ msgstr "par2-binærfil... IKKE funnet!"
#: SABnzbd.py [Error message] # SABnzbd.py [Error message]
msgid "Verification and repair will not be possible."
msgstr ""
msgstr "Verifikasjon og reparasjon vil ikke være mulig."
#: SABnzbd.py [Error message]
msgid "MultiPar binary... NOT found!"
msgstr ""
msgstr "MultiPar-binærfil... IKKE funnet!"
#: SABnzbd.py [Warning message]
msgid "Your UNRAR version is %s, we recommend version %s or higher.<br />"
@@ -60,7 +60,7 @@ msgstr ""
#: SABnzbd.py [Error message]
msgid "Downloads will not unpacked."
msgstr ""
msgstr "Nedlastinger vil ikke blir pakket ut."
#: SABnzbd.py [Error message]
msgid "unrar binary... NOT found"

View File

@@ -8,13 +8,13 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2017-09-02 13:51+0000\n"
"PO-Revision-Date: 2017-09-01 09:37+0000\n"
"PO-Revision-Date: 2017-09-04 13:02+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
"Language-Team: Dutch <nl@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2017-09-03 05:49+0000\n"
"X-Launchpad-Export-Date: 2017-09-05 04:59+0000\n"
"X-Generator: Launchpad (build 18446)\n"
#: SABnzbd.py [Error message]
@@ -1037,7 +1037,7 @@ msgstr ""
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
msgid "Fetching %s blocks..."
msgstr "Ophalen van %s blokken..."
msgstr "%s herstelblokken downloaden..."
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
msgid "Fetching"
@@ -1054,7 +1054,7 @@ msgstr "Repareren"
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
msgid "[%s] Repaired in %s"
msgstr "[%s] Reparatie in %s"
msgstr "[%s] Gerepareerd in %s"
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
#: sabnzbd/newsunpack.py
@@ -1815,7 +1815,7 @@ msgstr "Script uitvoeren..."
#: sabnzbd/skintext.py [PP status]
msgid "Fetching extra blocks..."
msgstr "Extra blokken ophalen..."
msgstr "Extra herstelblokken downloaden..."
#: sabnzbd/skintext.py [PP status]
msgid "Quick Check..."

View File

@@ -1342,7 +1342,6 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
slot['size'] = format_bytes(bytes)
slot['sizeleft'] = format_bytes(bytesleft)
slot['percentage'] = "%s" % (int(((mb - mbleft) / mb) * 100)) if mb != mbleft else '0'
slot['missing'] = pnfo.missing
slot['mbmissing'] = "%.2f" % (pnfo.bytes_missing / MEBI)
slot['direct_unpack'] = pnfo.direct_unpack
if not output:

View File

@@ -253,7 +253,7 @@ keep_awake = OptionBool('misc', 'keep_awake', True)
win_menu = OptionBool('misc', 'win_menu', True)
allow_incomplete_nzb = OptionBool('misc', 'allow_incomplete_nzb', False)
enable_bonjour = OptionBool('misc', 'enable_bonjour', True)
reject_duplicate_files = OptionBool('misc', 'recject_duplicate_files', False)
reject_duplicate_files = OptionBool('misc', 'reject_duplicate_files', False)
max_art_opt = OptionBool('misc', 'max_art_opt', False)
use_pickle = OptionBool('misc', 'use_pickle', False)
ipv6_hosting = OptionBool('misc', 'ipv6_hosting', False)

View File

@@ -27,7 +27,7 @@ REC_RAR_VERSION = 500
PNFO = namedtuple('PNFO', 'repair unpack delete script nzo_id filename password unpackstrht '
'msgid category url bytes_left bytes avg_stamp avg_date finished_files '
'active_files queued_files status priority missing bytes_missing direct_unpack')
'active_files queued_files status priority bytes_missing direct_unpack')
QNFO = namedtuple('QNFO', 'bytes bytes_left bytes_left_previous_page list q_size_list q_fullsize')

View File

@@ -20,7 +20,6 @@
##############################################################################
from threading import RLock, Condition
DIR_LOCK = RLock()
DOWNLOADER_CV = Condition(RLock())
def synchronized(lock):

View File

@@ -30,7 +30,6 @@ import threading
import sabnzbd
from sabnzbd.constants import SCAN_FILE_NAME, VALID_ARCHIVES
import sabnzbd.utils.rarfile as rarfile
from sabnzbd.decorators import synchronized, DIR_LOCK
from sabnzbd.encoding import platform_encode
from sabnzbd.newsunpack import is_sevenfile, SevenZip
import sabnzbd.nzbstuff as nzbstuff
@@ -341,7 +340,6 @@ class DirScanner(threading.Thread):
def scan(self):
""" Do one scan of the watched folder """
@synchronized(DIR_LOCK)
def run_dir(folder, catdir):
try:
files = os.listdir(folder)

View File

@@ -35,7 +35,7 @@ import stat
from urlparse import urlparse
import sabnzbd
from sabnzbd.decorators import synchronized, DIR_LOCK
from sabnzbd.decorators import synchronized
from sabnzbd.constants import DEFAULT_PRIORITY, FUTURE_Q_FOLDER, JOB_ADMIN, \
GIGI, MEBI, DEF_CACHE_LIMIT
import sabnzbd.config as config
@@ -864,6 +864,7 @@ def get_cache_limit():
##############################################################################
# Locked directory operations to avoid problems with simultaneous add/remove
##############################################################################
DIR_LOCK = threading.RLock()
@synchronized(DIR_LOCK)
def get_unique_path(dirpath, n=0, create_dir=True):

View File

@@ -1451,10 +1451,12 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
# Target files
m = TARGET_RE.match(line)
if m:
nzo.status = Status.VERIFYING
verifynum += 1
if verifytotal == 0 or verifynum < verifytotal:
verifynum += 1
nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal))
nzo.status = Status.VERIFYING
else:
nzo.set_action_line(T('Checking extra files'), '%02d' % verifynum)
# Remove redundant extra files that are just duplicates of original ones
if 'duplicate data blocks' in line:
@@ -1696,9 +1698,9 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
# It prints the filename couple of times, so we save it to check
# 'datafiles' will not contain all data-files in par-set, only the
# ones that got scanned, but it's ouput is never used!
nzo.status = Status.VERIFYING
if line.split()[1] in ('Damaged', 'Found'):
verifynum += 1
nzo.status = Status.VERIFYING
datafiles.append(TRANS(m.group(1)))
# Set old_name in case it was misnamed and found (not when we are joining)
@@ -1725,6 +1727,13 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
# Need to reset it to avoid collision
old_name = None
else:
# It's scanning extra files that don't belong to the set
# For damaged files it reports the filename twice, so only then start
verifynum += 1
if verifynum / 2 > verifytotal:
nzo.set_action_line(T('Checking extra files'), '%02d' % verifynum)
if joinables:
# Find out if a joinable file has been used for joining
uline = unicoder(line)
@@ -1755,8 +1764,10 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
nzo.status = Status.FAILED
# Result of verification
elif line.startswith('All Files Complete'):
elif line.startswith('All Files Complete') or line.endswith('PAR File(s) Incomplete'):
# Completed without damage!
# 'PAR File(s) Incomplete' is reported for success
# but when there are very similar filenames in the folder
msg = T('[%s] Verified in %s, all files correct') % (unicoder(setname), format_time_string(time.time() - start))
nzo.set_unpack_info('Repair', msg)
logging.info('Verified in %s, all files correct',

View File

@@ -817,8 +817,8 @@ class NzbQueue(object):
n = 0
for nzo in self.__nzo_list:
if nzo.status not in (Status.PAUSED, Status.CHECKING):
b_left = nzo.remaining()
if nzo.status not in (Status.PAUSED, Status.CHECKING) or nzo.priority == TOP_PRIORITY:
b_left = nzo.remaining
bytes_total += nzo.bytes
bytes_left += b_left
q_size += 1
@@ -840,7 +840,7 @@ class NzbQueue(object):
bytes_left = 0
for nzo in self.__nzo_list:
if nzo.status != 'Paused':
bytes_left += nzo.remaining()
bytes_left += nzo.remaining
return bytes_left
def is_empty(self):
@@ -864,6 +864,17 @@ class NzbQueue(object):
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
empty.append(nzo)
# Stall prevention by checking if all servers are in the trylist
# This is a CPU-cheaper alternative to prevent stalling
if len(nzo.try_list) == sabnzbd.downloader.Downloader.do.server_nr:
# Maybe the NZF's need a reset too?
for nzf in nzo.files:
if len(nzf.try_list) == sabnzbd.downloader.Downloader.do.server_nr:
# We do not want to reset all article trylists, they are good
nzf.reset_try_list()
# Reset main trylist, minimal performance impact
nzo.reset_try_list()
for nzo in empty:
self.end_job(nzo)

View File

@@ -48,7 +48,7 @@ from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername
int_conv, set_permissions, format_time_string, long_path, trim_win_path, \
fix_unix_encoding, calc_age, is_obfuscated_filename, get_ext, get_filename, \
get_unique_filename, renamer
from sabnzbd.decorators import synchronized, DIR_LOCK
from sabnzbd.decorators import synchronized
import sabnzbd.config as config
import sabnzbd.cfg as cfg
from sabnzbd.encoding import unicoder, platform_encode
@@ -300,17 +300,7 @@ class NzbFile(TryList):
self.articles.remove(article)
if found:
self.bytes_left -= article.bytes
# To keep counter correct for pre-check
if self.nzo.precheck:
self.nzo.bytes_downloaded += article.bytes
# The parent trylist is filled to the top, maybe too soon
# This is a CPU-cheaper alternative to prevent stalling
if len(self.nzo.try_list) == sabnzbd.downloader.Downloader.do.server_nr:
self.reset_try_list()
self.nzo.reset_try_list()
self.nzo.bytes_tried += article.bytes
return (not self.articles)
def set_par2(self, setname, vol, blocks):
@@ -566,9 +556,9 @@ class NzbParser(xml.sax.handler.ContentHandler):
##############################################################################
NzbObjectSaver = (
'filename', 'work_name', 'final_name', 'created', 'bytes', 'bytes_downloaded', 'bytes_tried',
'repair', 'unpack', 'delete', 'script', 'cat', 'url', 'groups', 'avg_date', 'md5of16k',
'partable', 'extrapars', 'md5packs', 'files', 'files_table', 'finished_files', 'status',
'avg_bps_freq', 'avg_bps_total', 'priority', 'saved_articles', 'nzo_id',
'bytes_missing', 'repair', 'unpack', 'delete', 'script', 'cat', 'url', 'groups', 'avg_date',
'md5of16k', 'partable', 'extrapars', 'md5packs', 'files', 'files_table', 'finished_files',
'status', 'avg_bps_freq', 'avg_bps_total', 'priority', 'saved_articles', 'nzo_id',
'futuretype', 'deleted', 'parsed', 'action_line', 'unpack_info', 'fail_msg', 'nzo_info',
'custom_name', 'password', 'next_save', 'save_timeout', 'encrypted', 'bad_articles',
'duplicate', 'oversized', 'precheck', 'incomplete', 'reuse', 'meta',
@@ -581,7 +571,7 @@ NZO_LOCK = threading.RLock()
class NzbObject(TryList):
@synchronized(DIR_LOCK)
@synchronized(NZO_LOCK)
def __init__(self, filename, pp, script, nzb=None,
futuretype=False, cat=None, url=None,
priority=NORMAL_PRIORITY, nzbname=None, status="Queued", nzo_info=None,
@@ -626,6 +616,7 @@ class NzbObject(TryList):
self.bytes = 0 # Original bytesize
self.bytes_downloaded = 0 # Downloaded byte
self.bytes_tried = 0 # Which bytes did we try
self.bytes_missing = 0 # Bytes missing
self.bad_articles = 0 # How many bad (non-recoverable) articles
self.repair = r # True if we want to repair this set
self.unpack = u # True if we want to unpack this set
@@ -953,7 +944,6 @@ class NzbObject(TryList):
self.servercount[serverid] += bytes
else:
self.servercount[serverid] = bytes
self.bytes_downloaded += bytes
@synchronized(NZO_LOCK)
def remove_nzf(self, nzf):
@@ -990,10 +980,13 @@ class NzbObject(TryList):
# Don't postpone header-only-files, to extract all possible md5of16k
if setname and block and matcher(lparset, setname.lower()):
xnzf.set_par2(parset, vol, block)
# Don't postpone if all par2 are desired and should be kept
if not(cfg.enable_all_par() and not cfg.enable_par_cleanup()):
# Don't postpone if all par2 are desired and should be kept or not repairing
if self.repair and not(cfg.enable_all_par() and not cfg.enable_par_cleanup()):
self.extrapars[parset].append(xnzf)
self.files.remove(xnzf)
# Already count these bytes as done
self.bytes_tried += xnzf.bytes_left
# Sort the sets
for setname in self.extrapars:
self.extrapars[parset].sort(key=lambda x: x.blocks)
@@ -1017,8 +1010,7 @@ class NzbObject(TryList):
logging.debug('Got md5pack for set %s', nzf.setname)
self.md5packs[setname] = pack
# See if we need to postpone some pars
if self.repair:
self.postpone_pars(nzf, setname)
self.postpone_pars(nzf, setname)
else:
# Need to add this to the set, first need setname
for setname in self.md5packs:
@@ -1129,6 +1121,14 @@ class NzbObject(TryList):
if self.bad_articles > MAX_BAD_ARTICLES:
self.abort_direct_unpacker()
# Increase missing bytes counter
self.bytes_missing += article.bytes
else:
# Increase counter of actually finished bytes
self.bytes_downloaded += article.bytes
# All the bytes that were tried
self.bytes_tried += article.bytes
post_done = False
if not self.files:
post_done = True
@@ -1162,43 +1162,54 @@ class NzbObject(TryList):
# Looking for the longest name first, minimizes the chance on a mismatch
files.sort(lambda x, y: len(y) - len(x))
nzfs = self.files[:]
# The NZFs should be tried shortest first, to improve the chance on a proper match
nzfs = self.files[:]
nzfs.sort(lambda x, y: len(x.subject) - len(y.subject))
# Flag files from NZB that already exist as finished
for filename in files[:]:
for nzf in nzfs:
subject = sanitize_filename(name_extractor(nzf.subject))
filepath = os.path.join(wdir, filename)
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
nzf.filename = filename
nzf.bytes_left = 0
if sabnzbd.par2file.is_parfile(filepath):
self.handle_par2(nzf, os.path.join(wdir, filename))
self.remove_nzf(nzf)
nzfs.remove(nzf)
files.remove(filename)
# Set bytes correctly
self.bytes_tried += nzf.bytes
self.bytes_downloaded += nzf.bytes
# Process par2 files
filepath = os.path.join(wdir, filename)
if sabnzbd.par2file.is_parfile(filepath):
self.handle_par2(nzf, filepath)
break
# Create an NZF for each remaining existing file
try:
# Create an NZF for each remaining existing file
for filename in files:
tup = os.stat(os.path.join(wdir, filename))
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
nzf = NzbFile(tm, '"%s"' % filename, [], tup.st_size, self)
self.files.append(nzf)
self.files_table[nzf.nzf_id] = nzf
self.bytes += nzf.bytes
nzf.filename = filename
nzf.bytes_left = 0
# Create NZB's using basic information
filepath = os.path.join(wdir, filename)
if sabnzbd.par2file.is_parfile(filepath):
self.handle_par2(nzf, filepath)
self.remove_nzf(nzf)
logging.info('File %s added to job', filename)
if os.path.exists(filepath):
tup = os.stat(filepath)
tm = datetime.datetime.fromtimestamp(tup.st_mtime)
nzf = NzbFile(tm, filename, [], tup.st_size, self)
self.files.append(nzf)
self.files_table[nzf.nzf_id] = nzf
nzf.filename = filename
self.remove_nzf(nzf)
# Set bytes correctly
self.bytes += nzf.bytes
self.bytes_tried += nzf.bytes
self.bytes_downloaded += nzf.bytes
# Process par2 files
if sabnzbd.par2file.is_parfile(filepath):
self.handle_par2(nzf, filepath)
logging.info('Existing file %s added to job', filename)
except:
logging.debug('Bad NZB handling')
logging.info("Traceback: ", exc_info=True)
@@ -1288,10 +1299,12 @@ class NzbObject(TryList):
def add_parfile(self, parfile):
""" Add parfile to the files to be downloaded
Resets trylist just to be sure
Adjust download-size accordingly
"""
if not parfile.completed and parfile not in self.files and parfile not in self.finished_files:
parfile.reset_all_try_lists()
self.files.append(parfile)
self.bytes_tried -= parfile.bytes_left
@synchronized(NZO_LOCK)
def remove_parset(self, setname):
@@ -1613,6 +1626,11 @@ class NzbObject(TryList):
else:
return None
@property
def remaining(self):
""" Return remaining bytes """
return self.bytes - self.bytes_tried
@synchronized(NZO_LOCK)
def purge_data(self, keep_basic=False, del_files=False):
""" Remove all admin info, 'keep_basic' preserves attribs and nzb """
@@ -1643,16 +1661,6 @@ class NzbObject(TryList):
except:
pass
def remaining(self):
""" Return remaining bytes """
bytes_par2 = 0
for _set in self.extrapars:
for nzf in self.extrapars[_set]:
bytes_par2 += nzf.bytes_left
# Subtract PAR2 sets and already downloaded bytes
bytes_left = self.bytes - self.bytes_tried - bytes_par2
return bytes_left
def gather_info(self, full=False):
queued_files = []
if full:
@@ -1662,17 +1670,11 @@ class NzbObject(TryList):
if not nzf.completed and nzf not in self.files:
queued_files.append(nzf)
return PNFO(self.repair, self.unpack, self.delete, self.script,
self.nzo_id, self.final_name_labeled, self.password, {},
'', self.cat, self.url,
self.remaining(), self.bytes, self.avg_stamp, self.avg_date,
self.finished_files if full else [],
self.files if full else [],
queued_files,
self.status, self.priority,
self.nzo_info.get('missing_articles', 0),
self.bytes_tried - self.bytes_downloaded,
self.direct_unpacker.get_formatted_stats() if self.direct_unpacker else 0)
return PNFO(self.repair, self.unpack, self.delete, self.script, self.nzo_id,
self.final_name_labeled, self.password, {}, '', self.cat, self.url, self.remaining,
self.bytes, self.avg_stamp, self.avg_date, self.finished_files if full else [],
self.files if full else [], queued_files, self.status, self.priority,
self.bytes_missing, self.direct_unpacker.get_formatted_stats() if self.direct_unpacker else 0)
def get_nzf_by_id(self, nzf_id):
if nzf_id in self.files_table:
@@ -1808,6 +1810,8 @@ class NzbObject(TryList):
self.renames = {}
if self.bad_articles is None:
self.bad_articles = 0
if self.bytes_missing is None:
self.bytes_missing = 0
if self.bytes_tried is None:
# Fill with old info
self.bytes_tried = 0