mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-24 16:19:31 -05:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc9be3f92b | ||
|
|
2dc5c329c9 | ||
|
|
67817978f4 | ||
|
|
e2ab8c6ce4 | ||
|
|
f33a952536 | ||
|
|
cc582b5321 | ||
|
|
bdc526c91b | ||
|
|
52039c29b4 | ||
|
|
1dc4175f82 | ||
|
|
92f70fc177 | ||
|
|
fd573208bd | ||
|
|
ca9f10c12f | ||
|
|
49a72d0902 | ||
|
|
6aafe3c531 | ||
|
|
9e84696f96 | ||
|
|
120c133d7a | ||
|
|
cf9713a4b0 | ||
|
|
d12e9889e7 | ||
|
|
711a546989 | ||
|
|
7f78e6fac1 | ||
|
|
72533eefa4 | ||
|
|
d9643d9ea8 | ||
|
|
2de71bb96c |
@@ -1,5 +1,5 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 0.7.11 ***
|
||||
*** This is SABnzbd 0.7.14 ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
|
||||
@@ -1,3 +1,27 @@
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.14Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Another encryption detection fix (special case)
|
||||
- Missing mini-par2 sometimes prevents the other par2 files from being downloaded.
|
||||
- Make sure even invalid RAR files are fed to unrar and handle its reporting.
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.13Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Another encryption detection fix
|
||||
- Special option "enable_recursion" to control recursive unpacking
|
||||
- When post has just one par2 set, use wildcard so that all files are used
|
||||
- Accept partial par2 file when only one is available
|
||||
- Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved.
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.12Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Fix issue in encryption detection
|
||||
- Don't try to "join" a single X.000 file
|
||||
- Fix memory overflow caused by very large files to be joined
|
||||
- Make name sorting of the queue case-insensitive
|
||||
- Save data to disk after changing job password or other attributes
|
||||
- Add "resume_pp" entry to Plush pull-down menu when pause_pp event is scheduled
|
||||
- Deploy "abort when completion not possible" method also in pre-download check
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.11Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SABnzbd 0.7.11
|
||||
SABnzbd 0.7.14
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 0.7.11
|
||||
Summary: SABnzbd-0.7.11
|
||||
Version: 0.7.14
|
||||
Summary: SABnzbd-0.7.14
|
||||
Home-page: http://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
Release Notes - SABnzbd 0.7.11
|
||||
Release Notes - SABnzbd 0.7.14
|
||||
================================
|
||||
|
||||
|
||||
## Bug fixes
|
||||
- Obfuscated file name support causes regular multi-set NZBs to verify (much) slower
|
||||
- Bad articles from some servers are accepted as valid data
|
||||
- Generic Sort fails to rename files when an extra folder level is present in the RAR files
|
||||
- Missing mini-par2 sometimes prevents the other par2 files from being downloaded
|
||||
- When unrar reports invalid RAR files, show a proper error message
|
||||
- Fix special case of unjustified encryption warning
|
||||
|
||||
|
||||
## What's new in 0.7.0
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>SABnzbd $version - $T('queued'): $mbleft $T('MB')</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
<!--#if $have_quota#--><li><a id="reset_quota_now" class="pointer">$T('link-resetQuota')</a></li><!--#end if#-->
|
||||
<!--#if $have_rss_defined#--><li><a id="get_rss_now" class="pointer">$T('button-rssNow')</a></li><!--#end if#-->
|
||||
<!--#if $have_watched_dir#--><li><a id="get_watched_now" class="pointer">$T('sch-scan_folder')</a></li><!--#end if#-->
|
||||
<!--#if $pp_pause_event#--><li><a id="resume_pp" class="pointer">$T('sch-resume_post')</a></li><!--#end if#-->
|
||||
<li><a id="topmenu_toggle" class="pointer">$T('Plush-topMenu')</a></li>
|
||||
<li><a id="multiops_toggle" class="pointer">$T('Plush-multiOperations')</a></li>
|
||||
<li>
|
||||
|
||||
@@ -329,6 +329,17 @@ jQuery(function($){
|
||||
});
|
||||
});
|
||||
|
||||
// Resume Post Processing
|
||||
$('#resume_pp').click(function() {
|
||||
$.ajax({
|
||||
headers: {"Cache-Control": "no-cache"},
|
||||
type: "POST",
|
||||
url: "tapi",
|
||||
data: {mode:'resume_pp', apikey: $.plush.apikey},
|
||||
success: $.plush.RefreshQueue
|
||||
});
|
||||
});
|
||||
|
||||
$('#multiops_toggle').click(function(){
|
||||
if( $('#multiops_bar').is(':visible') ) { // hide
|
||||
$('#multiops_bar').hide();
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<title>$T('wizard-quickstart')</title>
|
||||
<link rel="stylesheet" type="text/css" href="static/style.css"/>
|
||||
<link rel="shortcut icon" href="static/images/favicon.ico" />
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
|
||||
"PO-Revision-Date: 2012-12-29 10:29+0000\n"
|
||||
"PO-Revision-Date: 2013-03-25 10:29+0000\n"
|
||||
"Last-Translator: shypike <Unknown>\n"
|
||||
"Language-Team: Dutch <nl@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2013-01-25 05:43+0000\n"
|
||||
"X-Generator: Launchpad (build 16445)\n"
|
||||
"X-Launchpad-Export-Date: 2013-03-26 05:02+0000\n"
|
||||
"X-Generator: Launchpad (build 16540)\n"
|
||||
|
||||
#: SABnzbd.py:303 [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -691,11 +691,11 @@ msgstr "ERROR: schrijf fout (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
|
||||
msgid "Unpacking failed, path is too long"
|
||||
msgstr ""
|
||||
msgstr "Uitpakken mislukt, bestandspad is te lang"
|
||||
|
||||
#: sabnzbd/newsunpack.py:622 [Error message]
|
||||
msgid "ERROR: path too long (%s)"
|
||||
msgstr ""
|
||||
msgstr "FOUT: bestandspad is te lang (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:631
|
||||
msgid "Unpacking failed, see log"
|
||||
@@ -3241,11 +3241,11 @@ msgstr "Zend meldingen naar NotifyOSD"
|
||||
|
||||
#: sabnzbd/skintext.py:560
|
||||
msgid "Notification Center"
|
||||
msgstr "Notification Center"
|
||||
msgstr "Berichtencentrum"
|
||||
|
||||
#: sabnzbd/skintext.py:561
|
||||
msgid "Send notifications to Notification Center"
|
||||
msgstr "Stuur berichten naar Notification Center"
|
||||
msgstr "Stuur berichten naar het Berichtencentrum"
|
||||
|
||||
#: sabnzbd/skintext.py:562
|
||||
msgid "Notification classes"
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
|
||||
"PO-Revision-Date: 2012-09-29 03:57+0000\n"
|
||||
"PO-Revision-Date: 2013-02-11 19:34+0000\n"
|
||||
"Last-Translator: lrrosa <Unknown>\n"
|
||||
"Language-Team: Brazilian Portuguese <pt_BR@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
|
||||
"X-Generator: Launchpad (build 16445)\n"
|
||||
"X-Launchpad-Export-Date: 2013-02-12 04:58+0000\n"
|
||||
"X-Generator: Launchpad (build 16491)\n"
|
||||
|
||||
#: SABnzbd.py:303 [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -163,10 +163,11 @@ msgstr ""
|
||||
#: sabnzbd/assembler.py:119 [Warning message]
|
||||
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
|
||||
msgstr ""
|
||||
"ATENÇÃO: Tarefa \"%s\" cancelada por causa de arquivo RAR criptografado"
|
||||
|
||||
#: sabnzbd/assembler.py:120
|
||||
msgid "Aborted, encryption detected"
|
||||
msgstr ""
|
||||
msgstr "Cancelado, criptografia detectada"
|
||||
|
||||
#: sabnzbd/assembler.py:154
|
||||
msgid "%s missing"
|
||||
@@ -459,7 +460,7 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/interface.py:889 [Abbreviation for bytes, as in GB]
|
||||
msgid "B"
|
||||
msgstr ""
|
||||
msgstr "B"
|
||||
|
||||
#: sabnzbd/interface.py:1061 # sabnzbd/interface.py:1073
|
||||
msgid "Initiating restart...<br />"
|
||||
@@ -694,11 +695,11 @@ msgstr "ERRO: erro de escrita (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
|
||||
msgid "Unpacking failed, path is too long"
|
||||
msgstr ""
|
||||
msgstr "Descompactação falhou, o caminho é muito extenso"
|
||||
|
||||
#: sabnzbd/newsunpack.py:622 [Error message]
|
||||
msgid "ERROR: path too long (%s)"
|
||||
msgstr ""
|
||||
msgstr "ERRO: caminho muito extenso (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:631
|
||||
msgid "Unpacking failed, see log"
|
||||
@@ -897,7 +898,7 @@ msgstr "Pausando NZB duplicado \"%s\""
|
||||
|
||||
#: sabnzbd/nzbstuff.py:941
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr ""
|
||||
msgstr "Cancelado, não é possível concluir"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
|
||||
msgid "DUPLICATE"
|
||||
@@ -937,7 +938,7 @@ msgstr "%s artigos tinham duplicatas não-correspondentes"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1140
|
||||
msgid "%s articles were removed"
|
||||
msgstr ""
|
||||
msgstr "%s artigos foram removidos"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1172 [Error message]
|
||||
msgid "Error importing %s"
|
||||
@@ -1326,7 +1327,7 @@ msgstr "veja o arquivo de log"
|
||||
|
||||
#: sabnzbd/postproc.py:489
|
||||
msgid "PostProcessing was aborted (%s)"
|
||||
msgstr "O pós-processamento foi interrompido (%s)"
|
||||
msgstr "O pós-processamento foi cancelado (%s)"
|
||||
|
||||
#: sabnzbd/postproc.py:521 [Error message]
|
||||
msgid "Cleanup of %s failed."
|
||||
@@ -2648,12 +2649,13 @@ msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py:383
|
||||
msgid "Action when encrypted RAR is downloaded"
|
||||
msgstr ""
|
||||
msgstr "Ação quando RAR criptografado é baixado"
|
||||
|
||||
#: sabnzbd/skintext.py:384
|
||||
msgid ""
|
||||
"In case of \"Pause\", you'll need to set a password and resume the job."
|
||||
msgstr ""
|
||||
"Em caso de \"Pausa\", você precisa definir uma senha e retomar a tarefa."
|
||||
|
||||
#: sabnzbd/skintext.py:385
|
||||
msgid "Detect Duplicate Downloads"
|
||||
@@ -2677,7 +2679,7 @@ msgstr "Descartar"
|
||||
|
||||
#: sabnzbd/skintext.py:390 [Three way switch for encrypted posts]
|
||||
msgid "Abort"
|
||||
msgstr ""
|
||||
msgstr "Cancelar"
|
||||
|
||||
#: sabnzbd/skintext.py:391
|
||||
msgid "Enable SFV-based checks"
|
||||
@@ -2956,13 +2958,15 @@ msgstr "Aplicar o máximo de tentativas somente com servidores opcionais"
|
||||
|
||||
#: sabnzbd/skintext.py:462
|
||||
msgid "Abort jobs that cannot be completed"
|
||||
msgstr ""
|
||||
msgstr "Cancela tarefas que não podem ser concluídas"
|
||||
|
||||
#: sabnzbd/skintext.py:463
|
||||
msgid ""
|
||||
"When during download it becomes clear that too much data is missing, abort "
|
||||
"the job"
|
||||
msgstr ""
|
||||
"Quando durante o download ficar claro que muitos dados estão faltando, "
|
||||
"cancela a tarefa"
|
||||
|
||||
#: sabnzbd/skintext.py:467 [Caption]
|
||||
msgid "Server configuration"
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
|
||||
"PO-Revision-Date: 2012-12-28 11:03+0000\n"
|
||||
"Last-Translator: Björn Lindh <probablyx@gmail.com>\n"
|
||||
"PO-Revision-Date: 2013-03-17 20:50+0000\n"
|
||||
"Last-Translator: Kristofer Norén <kristofer@shallowdreams.com>\n"
|
||||
"Language-Team: Swedish <sv@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
|
||||
"X-Generator: Launchpad (build 16445)\n"
|
||||
"X-Launchpad-Export-Date: 2013-03-18 05:01+0000\n"
|
||||
"X-Generator: Launchpad (build 16532)\n"
|
||||
|
||||
#: SABnzbd.py:303 [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -159,7 +159,7 @@ msgstr "WARNING: Paused job \"%s\" because of encrypted RAR file"
|
||||
|
||||
#: sabnzbd/assembler.py:119 [Warning message]
|
||||
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
|
||||
msgstr ""
|
||||
msgstr "Varning: avbröt jobbet %s på grund av att RAR-filen är krypterad"
|
||||
|
||||
#: sabnzbd/assembler.py:120
|
||||
msgid "Aborted, encryption detected"
|
||||
@@ -689,11 +689,11 @@ msgstr "FEL: skrivningsfel (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
|
||||
msgid "Unpacking failed, path is too long"
|
||||
msgstr ""
|
||||
msgstr "Uppackning misslyckades, sökvägen är för lång"
|
||||
|
||||
#: sabnzbd/newsunpack.py:622 [Error message]
|
||||
msgid "ERROR: path too long (%s)"
|
||||
msgstr ""
|
||||
msgstr "FEL: sökvägen är för lång (%s)"
|
||||
|
||||
#: sabnzbd/newsunpack.py:631
|
||||
msgid "Unpacking failed, see log"
|
||||
@@ -894,7 +894,7 @@ msgstr "Pausar dubblett för NZB \"%s\""
|
||||
|
||||
#: sabnzbd/nzbstuff.py:941
|
||||
msgid "Aborted, cannot be completed"
|
||||
msgstr ""
|
||||
msgstr "Avbrutet, kan inte slutföras"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
|
||||
msgid "DUPLICATE"
|
||||
@@ -934,7 +934,7 @@ msgstr "%s artiklar hade icke-matchande dubletter"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1140
|
||||
msgid "%s articles were removed"
|
||||
msgstr ""
|
||||
msgstr "%s artiklar borttagna"
|
||||
|
||||
#: sabnzbd/nzbstuff.py:1172 [Error message]
|
||||
msgid "Error importing %s"
|
||||
@@ -1543,7 +1543,7 @@ msgstr "Läs RSS-flöden"
|
||||
|
||||
#: sabnzbd/skintext.py:65 [Config->Scheduler]
|
||||
msgid "Remove failed jobs"
|
||||
msgstr ""
|
||||
msgstr "Ta bort misslyckade jobb"
|
||||
|
||||
#: sabnzbd/skintext.py:70 [Speed indicator kilobytes/sec]
|
||||
msgid "KB/s"
|
||||
|
||||
@@ -632,6 +632,12 @@ def _api_watched_now(name, output, kwargs):
|
||||
return report(output)
|
||||
|
||||
|
||||
def _api_resume_pp(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
PostProcessor.do.paused = False
|
||||
return report(output)
|
||||
|
||||
|
||||
def _api_rss_now(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
# Run RSS scan async, because it can take a long time
|
||||
@@ -795,6 +801,7 @@ _api_table = {
|
||||
'rescan' : _api_rescan,
|
||||
'eval_sort' : _api_eval_sort,
|
||||
'watched_now' : _api_watched_now,
|
||||
'resume_pp' : _api_resume_pp,
|
||||
'rss_now' : _api_rss_now,
|
||||
'browse' : _api_browse,
|
||||
'reset_quota' : _api_reset_quota,
|
||||
@@ -1594,6 +1601,7 @@ def build_header(prim, webdir=''):
|
||||
header['quota'] = to_units(BPSMeter.do.quota)
|
||||
header['have_quota'] = bool(BPSMeter.do.quota > 0.0)
|
||||
header['left_quota'] = to_units(BPSMeter.do.left)
|
||||
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
|
||||
|
||||
status = ''
|
||||
if Downloader.do.paused or Downloader.do.postproc:
|
||||
|
||||
@@ -290,9 +290,16 @@ def ParseFilePacket(f, header):
|
||||
def is_cloaked(path, names):
|
||||
""" Return True if this is likely to be a cloaked encrypted post """
|
||||
fname = unicoder(os.path.split(path)[1]).lower()
|
||||
fname = os.path.splitext(fname)[0]
|
||||
for name in names:
|
||||
name = unicoder(name.lower())
|
||||
if fname == name or 'password' in name:
|
||||
name = os.path.split(name.lower())[1]
|
||||
name, ext = os.path.splitext(unicoder(name))
|
||||
if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and \
|
||||
'.subs.' not in fname:
|
||||
logging.debug('File %s is probably encrypted due to RAR with same name inside this RAR', fname)
|
||||
return True
|
||||
elif 'password' in name:
|
||||
logging.debug('RAR %s is probably encrypted: "password" in filename %s', fname, name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@@ -88,6 +88,7 @@ start_paused = OptionBool('misc', 'start_paused', False)
|
||||
|
||||
enable_unrar = OptionBool('misc', 'enable_unrar', True)
|
||||
enable_unzip = OptionBool('misc', 'enable_unzip', True)
|
||||
enable_recursive = OptionBool('misc', 'enable_recursive', True)
|
||||
enable_filejoin = OptionBool('misc', 'enable_filejoin', True)
|
||||
enable_tsjoin = OptionBool('misc', 'enable_tsjoin', True)
|
||||
enable_par_cleanup = OptionBool('misc', 'enable_par_cleanup', True)
|
||||
|
||||
@@ -461,6 +461,12 @@ class MainPage(object):
|
||||
retry_job(kwargs.get('job'), kwargs.get('nzbfile'))
|
||||
raise dcRaiser(self.__root, kwargs)
|
||||
|
||||
@cherrypy.expose
|
||||
def robots_txt(self):
|
||||
""" Keep web crawlers out """
|
||||
cherrypy.response.headers['Content-Type'] = 'text/plain'
|
||||
return 'User-agent: *\nDisallow: /\n'
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
class NzoPage(object):
|
||||
@@ -1218,7 +1224,8 @@ SPECIAL_BOOL_LIST = \
|
||||
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
|
||||
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
|
||||
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
|
||||
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
|
||||
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb', 'enable_recursive'
|
||||
|
||||
)
|
||||
SPECIAL_VALUE_LIST = \
|
||||
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',
|
||||
|
||||
@@ -26,6 +26,7 @@ import subprocess
|
||||
import logging
|
||||
from time import time
|
||||
import binascii
|
||||
import shutil
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
|
||||
@@ -233,7 +234,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
|
||||
nzo.set_action_line()
|
||||
|
||||
|
||||
if rerun:
|
||||
if rerun and (cfg.enable_recursive() or new_ts or new_joins):
|
||||
z, y = unpack_magic(nzo, workdir, workdir_complete, dele, one_folder,
|
||||
xjoinables, xzips, xrars, xts, depth)
|
||||
if z:
|
||||
@@ -289,7 +290,6 @@ def get_seq_number(name):
|
||||
match, set, num = match_ts(name)
|
||||
else:
|
||||
num = tail[1:]
|
||||
assert isinstance(num, str)
|
||||
if num.isdigit():
|
||||
return int(num)
|
||||
else:
|
||||
@@ -300,6 +300,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
|
||||
when succesful, delete originals
|
||||
"""
|
||||
newfiles = []
|
||||
bufsize = 24*1024*1024
|
||||
|
||||
# Create matching sets from the list of files
|
||||
joinable_sets = {}
|
||||
@@ -330,6 +331,11 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
|
||||
# done, go to next set
|
||||
continue
|
||||
|
||||
# Only join when there is more than one file
|
||||
size = len(current)
|
||||
if size < 2:
|
||||
continue
|
||||
|
||||
# Prepare joined file
|
||||
filename = joinable_set
|
||||
if workdir_complete:
|
||||
@@ -338,7 +344,6 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
|
||||
joined_file = open(filename, 'ab')
|
||||
|
||||
# Join the segments
|
||||
size = len(current)
|
||||
n = get_seq_number(current[0])
|
||||
seq_error = n > 1
|
||||
for joinable in current:
|
||||
@@ -348,7 +353,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
|
||||
logging.debug("Processing %s", joinable)
|
||||
nzo.set_action_line(T('Joining'), '%.0f%%' % perc)
|
||||
f = open(joinable, 'rb')
|
||||
joined_file.write(f.read())
|
||||
shutil.copyfileobj(f, joined_file, bufsize)
|
||||
f.close()
|
||||
if delete:
|
||||
logging.debug("Deleting %s", joinable)
|
||||
@@ -649,6 +654,18 @@ def rar_extract_core(rarfile, numrars, one_folder, nzo, setname, extraction_path
|
||||
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
|
||||
fail = 2
|
||||
|
||||
elif 'is not RAR archive' in line:
|
||||
# Unrecognizable RAR file
|
||||
m = re.search('(.+) is not RAR archive', line)
|
||||
if m:
|
||||
filename = TRANS(m.group(1)).strip()
|
||||
else:
|
||||
filename = '???'
|
||||
nzo.fail_msg = T('Unusable RAR file')
|
||||
msg = ('[%s][%s] '+ Ta('Unusable RAR file')) % (setname, latin1(filename))
|
||||
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
|
||||
fail = 1
|
||||
|
||||
else:
|
||||
m = re.search(r'^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$', line)
|
||||
if m:
|
||||
@@ -789,7 +806,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
|
||||
# PAR2 Functions
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
def par2_repair(parfile_nzf, nzo, workdir, setname):
|
||||
def par2_repair(parfile_nzf, nzo, workdir, setname, single):
|
||||
""" Try to repair a set, return readd or correctness """
|
||||
#set the current nzo status to "Repairing". Used in History
|
||||
|
||||
@@ -823,7 +840,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
|
||||
joinables, zips, rars, ts = build_filelists(workdir, None, check_rar=False)
|
||||
|
||||
finished, readd, pars, datafiles, used_joinables, used_par2 = PAR_Verify(parfile, parfile_nzf, nzo,
|
||||
setname, joinables)
|
||||
setname, joinables, single=single)
|
||||
|
||||
if finished:
|
||||
result = True
|
||||
@@ -915,7 +932,7 @@ _RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
|
||||
_RE_LOADING_PAR2 = re.compile('Loading "([^"]+)"\.')
|
||||
_RE_LOADED_PAR2 = re.compile('Loaded (\d+) new packets')
|
||||
|
||||
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, single=False):
|
||||
""" Run par2 on par-set """
|
||||
if cfg.never_repair():
|
||||
cmd = 'v'
|
||||
@@ -949,7 +966,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
|
||||
# Append the wildcard for this set
|
||||
wildcard = '%s*' % os.path.join(os.path.split(parfile)[0], setname)
|
||||
if len(globber(wildcard, None)) < 2:
|
||||
if single or len(globber(wildcard, None)) < 2:
|
||||
# Support bizarre naming conventions
|
||||
wildcard = os.path.join(os.path.split(parfile)[0], '*')
|
||||
command.append(wildcard)
|
||||
@@ -1258,7 +1275,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
|
||||
if retry_classic:
|
||||
logging.debug('Retry PAR2-joining with par2-classic')
|
||||
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
|
||||
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True, single=single)
|
||||
else:
|
||||
return finished, readd, pars, datafiles, used_joinables, used_par2
|
||||
|
||||
@@ -1367,10 +1384,7 @@ def build_filelists(workdir, workdir_complete, check_rar=True):
|
||||
|
||||
zips = [f for f in filelist if ZIP_RE.search(f)]
|
||||
|
||||
if check_rar:
|
||||
rars = [f for f in filelist if RAR_RE.search(f) and is_rarfile(f)]
|
||||
else:
|
||||
rars = [f for f in filelist if RAR_RE.search(f)]
|
||||
rars = [f for f in filelist if RAR_RE.search(f)]
|
||||
|
||||
ts = [f for f in filelist if TS_RE.search(f) and f not in joinables]
|
||||
|
||||
|
||||
@@ -220,7 +220,7 @@ class NzbQueue(TryList):
|
||||
if save_nzo is None or nzo is save_nzo:
|
||||
sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath)
|
||||
if not nzo.futuretype:
|
||||
nzo.save_attribs()
|
||||
nzo.save_to_disk()
|
||||
|
||||
sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
|
||||
|
||||
@@ -595,7 +595,7 @@ class NzbQueue(TryList):
|
||||
return nzo_id_pos1
|
||||
|
||||
nzo.priority = priority
|
||||
nzo.save_attribs()
|
||||
nzo.save_to_disk()
|
||||
|
||||
if nzo_id_pos1 != -1:
|
||||
del self.__nzo_list[nzo_id_pos1]
|
||||
@@ -755,7 +755,7 @@ class NzbQueue(TryList):
|
||||
if not nzo.deleted:
|
||||
nzo.deleted = True
|
||||
if nzo.precheck:
|
||||
nzo.save_attribs()
|
||||
nzo.save_to_disk()
|
||||
# Check result
|
||||
enough, ratio = nzo.check_quality()
|
||||
if enough:
|
||||
@@ -886,7 +886,7 @@ def _nzo_date_cmp(nzo1, nzo2):
|
||||
return cmp(avg_date1, avg_date2)
|
||||
|
||||
def _nzo_name_cmp(nzo1, nzo2):
|
||||
return cmp(nzo1.filename, nzo2.filename)
|
||||
return cmp(nzo1.filename.lower(), nzo2.filename.lower())
|
||||
|
||||
def _nzo_size_cmp(nzo1, nzo2):
|
||||
return cmp(nzo1.bytes, nzo2.bytes)
|
||||
|
||||
@@ -889,8 +889,8 @@ class NzbObject(TryList):
|
||||
head, vol, block = analyse_par2(fn)
|
||||
## Is a par2file and repair mode activated
|
||||
if head and (self.repair or cfg.allow_streaming()):
|
||||
## Skip if mini-par2 is not complete
|
||||
if not block and nzf.bytes_left:
|
||||
## Skip if mini-par2 is not complete and there are more par2 files
|
||||
if not block and nzf.bytes_left and self.extrapars.get(head):
|
||||
return
|
||||
nzf.set_par2(head, vol, block)
|
||||
## Already got a parfile for this set?
|
||||
@@ -934,7 +934,7 @@ class NzbObject(TryList):
|
||||
|
||||
if file_done:
|
||||
self.remove_nzf(nzf)
|
||||
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
|
||||
if not self.reuse and cfg.fail_hopeless() and not self.check_quality(99)[0]:
|
||||
#set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
@@ -1022,6 +1022,7 @@ class NzbObject(TryList):
|
||||
|
||||
def set_pp(self, value):
|
||||
self.repair, self.unpack, self.delete = sabnzbd.pp_to_opts(value)
|
||||
self.save_to_disk()
|
||||
|
||||
@property
|
||||
def final_name_pw(self):
|
||||
@@ -1054,7 +1055,7 @@ class NzbObject(TryList):
|
||||
if isinstance(name, str):
|
||||
name, self.password = scan_password(platform_encode(name))
|
||||
self.final_name = sanitize_foldername(name)
|
||||
self.save_attribs()
|
||||
self.save_to_disk()
|
||||
|
||||
def pause(self):
|
||||
self.status = 'Paused'
|
||||
@@ -1412,6 +1413,12 @@ class NzbObject(TryList):
|
||||
def repair_opts(self):
|
||||
return self.repair, self.unpack, self.delete
|
||||
|
||||
def save_to_disk(self):
|
||||
""" Save job's admin to disk """
|
||||
self.save_attribs()
|
||||
if self.nzo_id:
|
||||
sabnzbd.save_data(self, self.nzo_id, self.workpath)
|
||||
|
||||
def save_attribs(self):
|
||||
set_attrib_file(self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name_pw_clean, self.url))
|
||||
|
||||
|
||||
@@ -559,6 +559,7 @@ def parring(nzo, workdir):
|
||||
|
||||
re_add = False
|
||||
par_error = False
|
||||
single = len(repair_sets) == 1
|
||||
|
||||
if repair_sets:
|
||||
for setname in repair_sets:
|
||||
@@ -567,13 +568,14 @@ def parring(nzo, workdir):
|
||||
if not verified.get(setname, False):
|
||||
logging.info("Running repair on set %s", setname)
|
||||
parfile_nzf = par_table[setname]
|
||||
if not os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)):
|
||||
if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars:
|
||||
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single)
|
||||
re_add = re_add or need_re_add
|
||||
if not res and not need_re_add and cfg.sfv_check():
|
||||
res = try_sfv_check(nzo, workdir, setname)
|
||||
verified[setname] = res
|
||||
else:
|
||||
continue
|
||||
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
|
||||
re_add = re_add or need_re_add
|
||||
if not res and not need_re_add and cfg.sfv_check():
|
||||
res = try_sfv_check(nzo, workdir, setname)
|
||||
verified[setname] = res
|
||||
par_error = par_error or not res
|
||||
else:
|
||||
logging.info("No par2 sets for %s", filename)
|
||||
|
||||
@@ -40,7 +40,7 @@ __SCHED = None # Global pointer to Scheduler instance
|
||||
|
||||
RSSTASK_MINUTE = random.randint(0, 59)
|
||||
SCHEDULE_GUARD_FLAG = False
|
||||
|
||||
PP_PAUSE_EVENT = False
|
||||
|
||||
def schedule_guard():
|
||||
""" Set flag for scheduler restart """
|
||||
@@ -53,6 +53,8 @@ def pp_pause():
|
||||
def pp_resume():
|
||||
PostProcessor.do.paused = False
|
||||
|
||||
def pp_pause_event():
|
||||
return PP_PAUSE_EVENT
|
||||
|
||||
def init():
|
||||
""" Create the scheduler and set all required events
|
||||
@@ -275,6 +277,8 @@ def sort_schedules(all_events, now=None):
|
||||
def analyse(was_paused=False):
|
||||
""" Determine what pause/resume state we would have now.
|
||||
"""
|
||||
global PP_PAUSE_EVENT
|
||||
PP_PAUSE_EVENT = False
|
||||
paused = None
|
||||
paused_all = False
|
||||
pause_post = False
|
||||
@@ -292,13 +296,16 @@ def analyse(was_paused=False):
|
||||
paused = True
|
||||
elif action == 'pause_all':
|
||||
paused_all = True
|
||||
PP_PAUSE_EVENT = True
|
||||
elif action == 'resume':
|
||||
paused = False
|
||||
paused_all = False
|
||||
elif action == 'pause_post':
|
||||
pause_post = True
|
||||
PP_PAUSE_EVENT = True
|
||||
elif action == 'resume_post':
|
||||
pause_post = False
|
||||
PP_PAUSE_EVENT = True
|
||||
elif action == 'speedlimit' and value!=None:
|
||||
speedlimit = int(ev[2])
|
||||
elif action == 'enable_server':
|
||||
|
||||
@@ -217,7 +217,8 @@ class URLGrabber(Thread):
|
||||
self.add(url, future_nzo, when)
|
||||
# Check if a supported archive
|
||||
else:
|
||||
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority, url=future_nzo.url)[0] == 0:
|
||||
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority,
|
||||
nzbname=nzbname, url=future_nzo.url)[0] == 0:
|
||||
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
|
||||
else:
|
||||
# Not a supported filetype, not an nzb (text/html ect)
|
||||
|
||||
Reference in New Issue
Block a user