Compare commits

...

19 Commits

Author SHA1 Message Date
shypike
f33a952536 Update text files for 0.7.13 (again). 2013-06-13 21:35:14 +02:00
shypike
cc582b5321 Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved. 2013-06-13 21:33:00 +02:00
shypike
bdc526c91b Update text files for 0.7.13 2013-06-12 22:59:28 +02:00
shypike
52039c29b4 Accept partial par2 file when no others are available. 2013-06-12 21:03:29 +02:00
shypike
1dc4175f82 Add "special" option enable_recursion to control recursive unpacking. 2013-06-09 09:59:38 +02:00
shypike
92f70fc177 When post has just one par2-set, use full wildcard so that all files are repair and par candidates. 2013-06-01 11:21:00 +02:00
shypike
fd573208bd Fix encryption detection again. 2013-05-28 19:47:35 +02:00
shypike
ca9f10c12f Update text files for 0.7.12 2013-05-21 21:47:02 +02:00
shypike
49a72d0902 Update translations 2013-05-21 21:34:25 +02:00
shypike
6aafe3c531 Fix problem in encryption detection. 2013-05-07 21:17:06 +02:00
shypike
9e84696f96 Config and Wizard skins: fix problem with Unicode when using Chrome.
The Config skin and the Wizard were missing a proper Content-Type in <head>.
2013-04-14 12:02:33 +02:00
shypike
120c133d7a Implement robots.txt to keep web crawlers out.
Should not really be needed, because users should password-protect any
SABnzbd instance exposed to internet.
2013-04-12 21:25:56 +02:00
shypike
cf9713a4b0 Don't try to join a set of just one file (e.g. IMAGE.000) and reduce memory usage when joining large segments.
When there a single file called something like IMAGE.000, don't try to join it.
The joining procedure tries to read an entire segment file into memory, this may lead to a string overflow.
Use shutil.copyfileobj() with a 24 MB buffer instead.
2013-04-12 21:24:53 +02:00
shypike
d12e9889e7 Make encryption detection more careful. 2013-04-09 19:30:25 +02:00
shypike
711a546989 Make name sorting of the queue case-insensitive. 2013-03-20 23:12:13 +01:00
shypike
7f78e6fac1 Save job admin to disk when setting password or changing other attributes. 2013-03-02 13:09:24 +01:00
shypike
72533eefa4 Plush: add "resume pp" entry to pulldown menu, when pause_pp event is scheduled.
The option allows manual resume of a scheduled paused post-processing.
2013-02-26 20:33:58 +01:00
shypike
d9643d9ea8 Improve RAR detection. 2013-02-25 22:08:26 +01:00
shypike
2de71bb96c Enable "abort if hopeless" for pre-check as well. 2013-02-13 20:40:31 +01:00
22 changed files with 144 additions and 62 deletions

View File

@@ -1,5 +1,5 @@
*******************************************
*** This is SABnzbd 0.7.11 ***
*** This is SABnzbd 0.7.13 ***
*******************************************
SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically,

View File

@@ -1,3 +1,21 @@
-------------------------------------------------------------------------------
0.7.13Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Another encryption detection fix
- Special option "enable_recursion" to control recursive unpacking
- When post has just one par2 set, use wildcard so that all files are used
- Accept partial par2 file when only one is available
- Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved.
-------------------------------------------------------------------------------
0.7.12Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix issue in encryption detection
- Don't try to "join" a single X.000 file
- Fix memory overflow caused by very large files to be joined
- Make name sorting of the queue case-insensitive
- Save data to disk after changing job password or other attributes
- Add "resume_pp" entry to Plush pull-down menu when pause_pp event is scheduled
- Deploy "abort when completion not possible" method also in pre-download check
-------------------------------------------------------------------------------
0.7.11Final by The SABnzbd-Team
-------------------------------------------------------------------------------

View File

@@ -1,4 +1,4 @@
SABnzbd 0.7.11
SABnzbd 0.7.13
-------------------------------------------------------------------------------
0) LICENSE

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 0.7.11
Summary: SABnzbd-0.7.11
Version: 0.7.13
Summary: SABnzbd-0.7.13
Home-page: http://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,11 +1,15 @@
Release Notes - SABnzbd 0.7.11
Release Notes - SABnzbd 0.7.13
================================
## Bug fixes
- Obfuscated file name support causes regular multi-set NZBs to verify (much) slower
- Bad articles from some servers are accepted as valid data
- Generic Sort fails to rename files when an extra folder level is present in the RAR files
- Another encryption detection fix
- When post has just one par2 set, use wildcard so that all files are used
- "addurl" api-call did not always accept nzbname parameter
## Features
- Special option "enable_recursion" to control recursive unpacking
## What's new in 0.7.0

View File

@@ -1,7 +1,7 @@
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>SABnzbd $version - $T('queued'): $mbleft $T('MB')</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />

View File

@@ -28,6 +28,7 @@
<!--#if $have_quota#--><li><a id="reset_quota_now" class="pointer">$T('link-resetQuota')</a></li><!--#end if#-->
<!--#if $have_rss_defined#--><li><a id="get_rss_now" class="pointer">$T('button-rssNow')</a></li><!--#end if#-->
<!--#if $have_watched_dir#--><li><a id="get_watched_now" class="pointer">$T('sch-scan_folder')</a></li><!--#end if#-->
<!--#if $pp_pause_event#--><li><a id="resume_pp" class="pointer">$T('sch-resume_post')</a></li><!--#end if#-->
<li><a id="topmenu_toggle" class="pointer">$T('Plush-topMenu')</a></li>
<li><a id="multiops_toggle" class="pointer">$T('Plush-multiOperations')</a></li>
<li>

View File

@@ -329,6 +329,17 @@ jQuery(function($){
});
});
// Resume Post Processing
$('#resume_pp').click(function() {
$.ajax({
headers: {"Cache-Control": "no-cache"},
type: "POST",
url: "tapi",
data: {mode:'resume_pp', apikey: $.plush.apikey},
success: $.plush.RefreshQueue
});
});
$('#multiops_toggle').click(function(){
if( $('#multiops_bar').is(':visible') ) { // hide
$('#multiops_bar').hide();

View File

@@ -1,5 +1,6 @@
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>$T('wizard-quickstart')</title>
<link rel="stylesheet" type="text/css" href="static/style.css"/>
<link rel="shortcut icon" href="static/images/favicon.ico" />

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-29 10:29+0000\n"
"PO-Revision-Date: 2013-03-25 10:29+0000\n"
"Last-Translator: shypike <Unknown>\n"
"Language-Team: Dutch <nl@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:43+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-26 05:02+0000\n"
"X-Generator: Launchpad (build 16540)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -691,11 +691,11 @@ msgstr "ERROR: schrijf fout (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uitpakken mislukt, bestandspad is te lang"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FOUT: bestandspad is te lang (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -3241,11 +3241,11 @@ msgstr "Zend meldingen naar NotifyOSD"
#: sabnzbd/skintext.py:560
msgid "Notification Center"
msgstr "Notification Center"
msgstr "Berichtencentrum"
#: sabnzbd/skintext.py:561
msgid "Send notifications to Notification Center"
msgstr "Stuur berichten naar Notification Center"
msgstr "Stuur berichten naar het Berichtencentrum"
#: sabnzbd/skintext.py:562
msgid "Notification classes"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-09-29 03:57+0000\n"
"PO-Revision-Date: 2013-02-11 19:34+0000\n"
"Last-Translator: lrrosa <Unknown>\n"
"Language-Team: Brazilian Portuguese <pt_BR@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-02-12 04:58+0000\n"
"X-Generator: Launchpad (build 16491)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -163,10 +163,11 @@ msgstr ""
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
"ATENÇÃO: Tarefa \"%s\" cancelada por causa de arquivo RAR criptografado"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
msgstr ""
msgstr "Cancelado, criptografia detectada"
#: sabnzbd/assembler.py:154
msgid "%s missing"
@@ -459,7 +460,7 @@ msgstr ""
#: sabnzbd/interface.py:889 [Abbreviation for bytes, as in GB]
msgid "B"
msgstr ""
msgstr "B"
#: sabnzbd/interface.py:1061 # sabnzbd/interface.py:1073
msgid "Initiating restart...<br />"
@@ -694,11 +695,11 @@ msgstr "ERRO: erro de escrita (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Descompactação falhou, o caminho é muito extenso"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "ERRO: caminho muito extenso (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -897,7 +898,7 @@ msgstr "Pausando NZB duplicado \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Cancelado, não é possível concluir"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -937,7 +938,7 @@ msgstr "%s artigos tinham duplicatas não-correspondentes"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artigos foram removidos"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1326,7 +1327,7 @@ msgstr "veja o arquivo de log"
#: sabnzbd/postproc.py:489
msgid "PostProcessing was aborted (%s)"
msgstr "O pós-processamento foi interrompido (%s)"
msgstr "O pós-processamento foi cancelado (%s)"
#: sabnzbd/postproc.py:521 [Error message]
msgid "Cleanup of %s failed."
@@ -2648,12 +2649,13 @@ msgstr ""
#: sabnzbd/skintext.py:383
msgid "Action when encrypted RAR is downloaded"
msgstr ""
msgstr "Ação quando RAR criptografado é baixado"
#: sabnzbd/skintext.py:384
msgid ""
"In case of \"Pause\", you'll need to set a password and resume the job."
msgstr ""
"Em caso de \"Pausa\", você precisa definir uma senha e retomar a tarefa."
#: sabnzbd/skintext.py:385
msgid "Detect Duplicate Downloads"
@@ -2677,7 +2679,7 @@ msgstr "Descartar"
#: sabnzbd/skintext.py:390 [Three way switch for encrypted posts]
msgid "Abort"
msgstr ""
msgstr "Cancelar"
#: sabnzbd/skintext.py:391
msgid "Enable SFV-based checks"
@@ -2956,13 +2958,15 @@ msgstr "Aplicar o máximo de tentativas somente com servidores opcionais"
#: sabnzbd/skintext.py:462
msgid "Abort jobs that cannot be completed"
msgstr ""
msgstr "Cancela tarefas que não podem ser concluídas"
#: sabnzbd/skintext.py:463
msgid ""
"When during download it becomes clear that too much data is missing, abort "
"the job"
msgstr ""
"Quando durante o download ficar claro que muitos dados estão faltando, "
"cancela a tarefa"
#: sabnzbd/skintext.py:467 [Caption]
msgid "Server configuration"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-28 11:03+0000\n"
"Last-Translator: Björn Lindh <probablyx@gmail.com>\n"
"PO-Revision-Date: 2013-03-17 20:50+0000\n"
"Last-Translator: Kristofer Norén <kristofer@shallowdreams.com>\n"
"Language-Team: Swedish <sv@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-18 05:01+0000\n"
"X-Generator: Launchpad (build 16532)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -159,7 +159,7 @@ msgstr "WARNING: Paused job \"%s\" because of encrypted RAR file"
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
msgstr "Varning: avbröt jobbet %s på grund av att RAR-filen är krypterad"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
@@ -689,11 +689,11 @@ msgstr "FEL: skrivningsfel (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uppackning misslyckades, sökvägen är för lång"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FEL: sökvägen är för lång (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -894,7 +894,7 @@ msgstr "Pausar dubblett för NZB \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Avbrutet, kan inte slutföras"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -934,7 +934,7 @@ msgstr "%s artiklar hade icke-matchande dubletter"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artiklar borttagna"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1543,7 +1543,7 @@ msgstr "Läs RSS-flöden"
#: sabnzbd/skintext.py:65 [Config->Scheduler]
msgid "Remove failed jobs"
msgstr ""
msgstr "Ta bort misslyckade jobb"
#: sabnzbd/skintext.py:70 [Speed indicator kilobytes/sec]
msgid "KB/s"

View File

@@ -632,6 +632,12 @@ def _api_watched_now(name, output, kwargs):
return report(output)
def _api_resume_pp(name, output, kwargs):
""" API: accepts output """
PostProcessor.do.paused = False
return report(output)
def _api_rss_now(name, output, kwargs):
""" API: accepts output """
# Run RSS scan async, because it can take a long time
@@ -795,6 +801,7 @@ _api_table = {
'rescan' : _api_rescan,
'eval_sort' : _api_eval_sort,
'watched_now' : _api_watched_now,
'resume_pp' : _api_resume_pp,
'rss_now' : _api_rss_now,
'browse' : _api_browse,
'reset_quota' : _api_reset_quota,
@@ -1594,6 +1601,7 @@ def build_header(prim, webdir=''):
header['quota'] = to_units(BPSMeter.do.quota)
header['have_quota'] = bool(BPSMeter.do.quota > 0.0)
header['left_quota'] = to_units(BPSMeter.do.left)
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
status = ''
if Downloader.do.paused or Downloader.do.postproc:

View File

@@ -290,9 +290,15 @@ def ParseFilePacket(f, header):
def is_cloaked(path, names):
""" Return True if this is likely to be a cloaked encrypted post """
fname = unicoder(os.path.split(path)[1]).lower()
fname = os.path.splitext(fname)[0]
for name in names:
name = unicoder(name.lower())
if fname == name or 'password' in name:
name = os.path.split(name.lower())[1]
name, ext = os.path.splitext(unicoder(name))
if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8:
logging.debug('File %s is probably encrypted due to RAR with same name inside this RAR', fname)
return True
elif 'password' in name:
logging.debug('RAR %s is probably encrypted: "password" in filename %s', fname, name)
return True
return False

View File

@@ -88,6 +88,7 @@ start_paused = OptionBool('misc', 'start_paused', False)
enable_unrar = OptionBool('misc', 'enable_unrar', True)
enable_unzip = OptionBool('misc', 'enable_unzip', True)
enable_recursive = OptionBool('misc', 'enable_recursive', True)
enable_filejoin = OptionBool('misc', 'enable_filejoin', True)
enable_tsjoin = OptionBool('misc', 'enable_tsjoin', True)
enable_par_cleanup = OptionBool('misc', 'enable_par_cleanup', True)

View File

@@ -461,6 +461,12 @@ class MainPage(object):
retry_job(kwargs.get('job'), kwargs.get('nzbfile'))
raise dcRaiser(self.__root, kwargs)
@cherrypy.expose
def robots_txt(self):
""" Keep web crawlers out """
cherrypy.response.headers['Content-Type'] = 'text/plain'
return 'User-agent: *\nDisallow: /\n'
#------------------------------------------------------------------------------
class NzoPage(object):
@@ -1218,7 +1224,8 @@ SPECIAL_BOOL_LIST = \
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb', 'enable_recursive'
)
SPECIAL_VALUE_LIST = \
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',

View File

@@ -26,6 +26,7 @@ import subprocess
import logging
from time import time
import binascii
import shutil
import sabnzbd
from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
@@ -233,7 +234,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
nzo.set_action_line()
if rerun:
if rerun and (cfg.enable_recursive() or new_ts or new_joins):
z, y = unpack_magic(nzo, workdir, workdir_complete, dele, one_folder,
xjoinables, xzips, xrars, xts, depth)
if z:
@@ -289,7 +290,6 @@ def get_seq_number(name):
match, set, num = match_ts(name)
else:
num = tail[1:]
assert isinstance(num, str)
if num.isdigit():
return int(num)
else:
@@ -300,6 +300,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
when succesful, delete originals
"""
newfiles = []
bufsize = 24*1024*1024
# Create matching sets from the list of files
joinable_sets = {}
@@ -330,6 +331,11 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
# done, go to next set
continue
# Only join when there is more than one file
size = len(current)
if size < 2:
continue
# Prepare joined file
filename = joinable_set
if workdir_complete:
@@ -338,7 +344,6 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
joined_file = open(filename, 'ab')
# Join the segments
size = len(current)
n = get_seq_number(current[0])
seq_error = n > 1
for joinable in current:
@@ -348,7 +353,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
logging.debug("Processing %s", joinable)
nzo.set_action_line(T('Joining'), '%.0f%%' % perc)
f = open(joinable, 'rb')
joined_file.write(f.read())
shutil.copyfileobj(f, joined_file, bufsize)
f.close()
if delete:
logging.debug("Deleting %s", joinable)
@@ -789,7 +794,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
# PAR2 Functions
#------------------------------------------------------------------------------
def par2_repair(parfile_nzf, nzo, workdir, setname):
def par2_repair(parfile_nzf, nzo, workdir, setname, single):
""" Try to repair a set, return readd or correctness """
#set the current nzo status to "Repairing". Used in History
@@ -823,7 +828,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
joinables, zips, rars, ts = build_filelists(workdir, None, check_rar=False)
finished, readd, pars, datafiles, used_joinables, used_par2 = PAR_Verify(parfile, parfile_nzf, nzo,
setname, joinables)
setname, joinables, single=single)
if finished:
result = True
@@ -915,7 +920,7 @@ _RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
_RE_LOADING_PAR2 = re.compile('Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile('Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, single=False):
""" Run par2 on par-set """
if cfg.never_repair():
cmd = 'v'
@@ -949,7 +954,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
# Append the wildcard for this set
wildcard = '%s*' % os.path.join(os.path.split(parfile)[0], setname)
if len(globber(wildcard, None)) < 2:
if single or len(globber(wildcard, None)) < 2:
# Support bizarre naming conventions
wildcard = os.path.join(os.path.split(parfile)[0], '*')
command.append(wildcard)
@@ -1258,7 +1263,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
if retry_classic:
logging.debug('Retry PAR2-joining with par2-classic')
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True, single=single)
else:
return finished, readd, pars, datafiles, used_joinables, used_par2

View File

@@ -220,7 +220,7 @@ class NzbQueue(TryList):
if save_nzo is None or nzo is save_nzo:
sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath)
if not nzo.futuretype:
nzo.save_attribs()
nzo.save_to_disk()
sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
@@ -595,7 +595,7 @@ class NzbQueue(TryList):
return nzo_id_pos1
nzo.priority = priority
nzo.save_attribs()
nzo.save_to_disk()
if nzo_id_pos1 != -1:
del self.__nzo_list[nzo_id_pos1]
@@ -755,7 +755,7 @@ class NzbQueue(TryList):
if not nzo.deleted:
nzo.deleted = True
if nzo.precheck:
nzo.save_attribs()
nzo.save_to_disk()
# Check result
enough, ratio = nzo.check_quality()
if enough:
@@ -886,7 +886,7 @@ def _nzo_date_cmp(nzo1, nzo2):
return cmp(avg_date1, avg_date2)
def _nzo_name_cmp(nzo1, nzo2):
return cmp(nzo1.filename, nzo2.filename)
return cmp(nzo1.filename.lower(), nzo2.filename.lower())
def _nzo_size_cmp(nzo1, nzo2):
return cmp(nzo1.bytes, nzo2.bytes)

View File

@@ -889,8 +889,8 @@ class NzbObject(TryList):
head, vol, block = analyse_par2(fn)
## Is a par2file and repair mode activated
if head and (self.repair or cfg.allow_streaming()):
## Skip if mini-par2 is not complete
if not block and nzf.bytes_left:
## Skip if mini-par2 is not complete and there are more par2 files
if not block and nzf.bytes_left and self.extrapars.get(head):
return
nzf.set_par2(head, vol, block)
## Already got a parfile for this set?
@@ -934,7 +934,7 @@ class NzbObject(TryList):
if file_done:
self.remove_nzf(nzf)
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
if not self.reuse and cfg.fail_hopeless() and not self.check_quality(99)[0]:
#set the nzo status to return "Queued"
self.status = Status.QUEUED
self.set_download_report()
@@ -1022,6 +1022,7 @@ class NzbObject(TryList):
def set_pp(self, value):
self.repair, self.unpack, self.delete = sabnzbd.pp_to_opts(value)
self.save_to_disk()
@property
def final_name_pw(self):
@@ -1054,7 +1055,7 @@ class NzbObject(TryList):
if isinstance(name, str):
name, self.password = scan_password(platform_encode(name))
self.final_name = sanitize_foldername(name)
self.save_attribs()
self.save_to_disk()
def pause(self):
self.status = 'Paused'
@@ -1412,6 +1413,12 @@ class NzbObject(TryList):
def repair_opts(self):
return self.repair, self.unpack, self.delete
def save_to_disk(self):
""" Save job's admin to disk """
self.save_attribs()
if self.nzo_id:
sabnzbd.save_data(self, self.nzo_id, self.workpath)
def save_attribs(self):
set_attrib_file(self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name_pw_clean, self.url))

View File

@@ -559,6 +559,7 @@ def parring(nzo, workdir):
re_add = False
par_error = False
single = len(repair_sets) == 1
if repair_sets:
for setname in repair_sets:
@@ -569,7 +570,7 @@ def parring(nzo, workdir):
parfile_nzf = par_table[setname]
if not os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)):
continue
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():
res = try_sfv_check(nzo, workdir, setname)

View File

@@ -40,7 +40,7 @@ __SCHED = None # Global pointer to Scheduler instance
RSSTASK_MINUTE = random.randint(0, 59)
SCHEDULE_GUARD_FLAG = False
PP_PAUSE_EVENT = False
def schedule_guard():
""" Set flag for scheduler restart """
@@ -53,6 +53,8 @@ def pp_pause():
def pp_resume():
PostProcessor.do.paused = False
def pp_pause_event():
return PP_PAUSE_EVENT
def init():
""" Create the scheduler and set all required events
@@ -275,6 +277,8 @@ def sort_schedules(all_events, now=None):
def analyse(was_paused=False):
""" Determine what pause/resume state we would have now.
"""
global PP_PAUSE_EVENT
PP_PAUSE_EVENT = False
paused = None
paused_all = False
pause_post = False
@@ -292,13 +296,16 @@ def analyse(was_paused=False):
paused = True
elif action == 'pause_all':
paused_all = True
PP_PAUSE_EVENT = True
elif action == 'resume':
paused = False
paused_all = False
elif action == 'pause_post':
pause_post = True
PP_PAUSE_EVENT = True
elif action == 'resume_post':
pause_post = False
PP_PAUSE_EVENT = True
elif action == 'speedlimit' and value!=None:
speedlimit = int(ev[2])
elif action == 'enable_server':

View File

@@ -217,7 +217,8 @@ class URLGrabber(Thread):
self.add(url, future_nzo, when)
# Check if a supported archive
else:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority, url=future_nzo.url)[0] == 0:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority,
nzbname=nzbname, url=future_nzo.url)[0] == 0:
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
else:
# Not a supported filetype, not an nzb (text/html ect)