Compare commits

...

30 Commits

Author SHA1 Message Date
shypike
bc9be3f92b Update text files for 0.7.14 2013-07-07 13:12:15 +02:00
shypike
2dc5c329c9 Fix special case of unjustified encryption warning. 2013-07-07 13:11:01 +02:00
shypike
67817978f4 Missing mini-par2 sometimes prevents the other par2 files from being downloaded. 2013-06-27 20:41:57 +02:00
shypike
e2ab8c6ce4 Make sure even invalid RAR files are fed to unrar and handle its reporting. 2013-06-27 20:29:04 +02:00
shypike
f33a952536 Update text files for 0.7.13 (again). 2013-06-13 21:35:14 +02:00
shypike
cc582b5321 Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved. 2013-06-13 21:33:00 +02:00
shypike
bdc526c91b Update text files for 0.7.13 2013-06-12 22:59:28 +02:00
shypike
52039c29b4 Accept partial par2 file when no others are available. 2013-06-12 21:03:29 +02:00
shypike
1dc4175f82 Add "special" option enable_recursion to control recursive unpacking. 2013-06-09 09:59:38 +02:00
shypike
92f70fc177 When post has just one par2-set, use full wildcard so that all files are repair and par candidates. 2013-06-01 11:21:00 +02:00
shypike
fd573208bd Fix encryption detection again. 2013-05-28 19:47:35 +02:00
shypike
ca9f10c12f Update text files for 0.7.12 2013-05-21 21:47:02 +02:00
shypike
49a72d0902 Update translations 2013-05-21 21:34:25 +02:00
shypike
6aafe3c531 Fix problem in encryption detection. 2013-05-07 21:17:06 +02:00
shypike
9e84696f96 Config and Wizard skins: fix problem with Unicode when using Chrome.
The Config skin and the Wizard were missing a proper Content-Type in <head>.
2013-04-14 12:02:33 +02:00
shypike
120c133d7a Implement robots.txt to keep web crawlers out.
Should not really be needed, because users should password-protect any
SABnzbd instance exposed to internet.
2013-04-12 21:25:56 +02:00
shypike
cf9713a4b0 Don't try to join a set of just one file (e.g. IMAGE.000) and reduce memory usage when joining large segments.
When there a single file called something like IMAGE.000, don't try to join it.
The joining procedure tries to read an entire segment file into memory, this may lead to a string overflow.
Use shutil.copyfileobj() with a 24 MB buffer instead.
2013-04-12 21:24:53 +02:00
shypike
d12e9889e7 Make encryption detection more careful. 2013-04-09 19:30:25 +02:00
shypike
711a546989 Make name sorting of the queue case-insensitive. 2013-03-20 23:12:13 +01:00
shypike
7f78e6fac1 Save job admin to disk when setting password or changing other attributes. 2013-03-02 13:09:24 +01:00
shypike
72533eefa4 Plush: add "resume pp" entry to pulldown menu, when pause_pp event is scheduled.
The option allows manual resume of a scheduled paused post-processing.
2013-02-26 20:33:58 +01:00
shypike
d9643d9ea8 Improve RAR detection. 2013-02-25 22:08:26 +01:00
shypike
2de71bb96c Enable "abort if hopeless" for pre-check as well. 2013-02-13 20:40:31 +01:00
shypike
07be241112 Update text files for 0.7.11 2013-02-07 20:21:59 +01:00
shypike
fbdd264653 Update translations 2013-02-07 19:38:48 +01:00
shypike
a8bc793132 Fix regression error that could result in slow verification of NZBs with multiple rar/par sets.
The detection of obfuscated files failed, causing each par2 run to parse all files.
2013-02-07 19:27:03 +01:00
shypike
6bce423f23 Fix "Sorting" file renaming for RAR files that contain an extra folder level.
collapse_folder() should convert filename list accoerding to the renaming it does.
It's also its task to remove the _unpack_ markers from those filenames.
Rename this function to rename_and_collapse().
2013-02-04 22:47:53 +01:00
shypike
51cabf85a1 Plush: default refresh-rate now 4 sec and multi-ops bar visible. 2013-02-04 21:43:12 +01:00
shypike
38fdc2c7c8 Show warning when decoder encounters I/O-errors. 2013-02-03 12:25:33 +01:00
shypike
b91a2af9df Some badly encoded articles can be accepted as valid data.
Regression error since 0.7.9 (commit ccfbb07).
decoder.decode() no long ran into an excepton when no valid data was found.
Solved now by using the "found" flag.
2013-02-02 14:57:28 +01:00
29 changed files with 217 additions and 105 deletions

View File

@@ -1,5 +1,5 @@
*******************************************
*** This is SABnzbd 0.7.10 ***
*** This is SABnzbd 0.7.14 ***
*******************************************
SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically,

View File

@@ -1,3 +1,34 @@
-------------------------------------------------------------------------------
0.7.14Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Another encryption detection fix (special case)
- Missing mini-par2 sometimes prevents the other par2 files from being downloaded.
- Make sure even invalid RAR files are fed to unrar and handle its reporting.
-------------------------------------------------------------------------------
0.7.13Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Another encryption detection fix
- Special option "enable_recursion" to control recursive unpacking
- When post has just one par2 set, use wildcard so that all files are used
- Accept partial par2 file when only one is available
- Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved.
-------------------------------------------------------------------------------
0.7.12Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix issue in encryption detection
- Don't try to "join" a single X.000 file
- Fix memory overflow caused by very large files to be joined
- Make name sorting of the queue case-insensitive
- Save data to disk after changing job password or other attributes
- Add "resume_pp" entry to Plush pull-down menu when pause_pp event is scheduled
- Deploy "abort when completion not possible" method also in pre-download check
-------------------------------------------------------------------------------
0.7.11Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Bad articles from some servers were accepted as valid data
- Show warning when the decoder encounters I/O errors
- Generic Sort failed to rename files when an extra folder level was present in the RAR files
- Obfuscated file name support caused regular NZBs to verify slower
-------------------------------------------------------------------------------
0.7.10Final by The SABnzbd-Team
-------------------------------------------------------------------------------

View File

@@ -1,4 +1,4 @@
SABnzbd 0.7.10
SABnzbd 0.7.14
-------------------------------------------------------------------------------
0) LICENSE

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 0.7.10
Summary: SABnzbd-0.7.10
Version: 0.7.14
Summary: SABnzbd-0.7.14
Home-page: http://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,18 +1,12 @@
Release Notes - SABnzbd 0.7.10
Release Notes - SABnzbd 0.7.14
================================
## Features
- Try to repair rar/par sets with obfuscated names
- Display next RSS scan moment in Cfg->RSS
## Bug fixes
- Disable obsolete newzbin bookmark readout
- Show speed when downloading in Forced mode while paused
- Plush History icons repair and unpack were swapped
- Reset "today" byte counters at midnight even when idle
- An email about a failed should say that the download failed
- Report errors coming from fully encrypted rar files
- Accept %fn (as well as %fn.%ext) as end parameter in sorting strings.
- Missing mini-par2 sometimes prevents the other par2 files from being downloaded
- When unrar reports invalid RAR files, show a proper error message
- Fix special case of unjustified encryption warning
## What's new in 0.7.0

View File

@@ -1,5 +1,5 @@
#
# Copyright 2008-2012 The SABnzbd-Team <team@sabnzbd.org>
# Copyright 2008-2013 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License

View File

@@ -1,7 +1,7 @@
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>SABnzbd $version - $T('queued'): $mbleft $T('MB')</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />

View File

@@ -10,7 +10,7 @@
</table>
<div class="sabnzbd_logo main_sprite_container sprite_sabnzbdplus_logo"></div>
<p><strong>SABnzbd $T('version'):</strong> $version</p>
<p><small>Copyright (C) 2008-2012, The SABnzbd Team &lt;team@sabnzbd.org&gt;</small></p>
<p><small>Copyright (C) 2008-2013, The SABnzbd Team &lt;team@sabnzbd.org&gt;</small></p>
<p><small>$T('yourRights')</small></p>
</div>

View File

@@ -28,6 +28,7 @@
<!--#if $have_quota#--><li><a id="reset_quota_now" class="pointer">$T('link-resetQuota')</a></li><!--#end if#-->
<!--#if $have_rss_defined#--><li><a id="get_rss_now" class="pointer">$T('button-rssNow')</a></li><!--#end if#-->
<!--#if $have_watched_dir#--><li><a id="get_watched_now" class="pointer">$T('sch-scan_folder')</a></li><!--#end if#-->
<!--#if $pp_pause_event#--><li><a id="resume_pp" class="pointer">$T('sch-resume_post')</a></li><!--#end if#-->
<li><a id="topmenu_toggle" class="pointer">$T('Plush-topMenu')</a></li>
<li><a id="multiops_toggle" class="pointer">$T('Plush-multiOperations')</a></li>
<li>

View File

@@ -8,7 +8,7 @@ jQuery(function($){
// ***************************************************************
// Plush defaults
refreshRate: $.cookie('plushRefreshRate') ? $.cookie('plushRefreshRate') : 30, // refresh rate in seconds
refreshRate: $.cookie('plushRefreshRate') ? $.cookie('plushRefreshRate') : 4, // refresh rate in seconds
containerWidth: $.cookie('plushContainerWidth') ? $.cookie('plushContainerWidth') : '100%', // width of all elements on page
queuePerPage: $.cookie('plushQueuePerPage') ? $.cookie('plushQueuePerPage') : 5, // pagination - nzbs per page
histPerPage: $.cookie('plushHistPerPage') ? $.cookie('plushHistPerPage') : 5, // pagination - nzbs per page
@@ -16,7 +16,7 @@ jQuery(function($){
confirmDeleteHistory: $.cookie('plushConfirmDeleteHistory') == 0 ? false : true, // confirm history nzb removal
blockRefresh: $.cookie('plushBlockRefresh') == 0 ? false : true, // prevent refreshing when hovering queue
failedOnly: $.cookie('plushFailedOnly') == 1 ? 1 : 0, // prevent refreshing when hovering queue
multiOps: $.cookie('plushMultiOps') == 1 ? true : false, // is multi-operations menu visible in queue
multiOps: $.cookie('plushMultiOps') == 0 ? false : true, // is multi-operations menu visible in queue
noTopMenu: $.cookie('plushNoTopMenu') == 1 ? false : true, // is top menu visible
multiOpsChecks: null,
@@ -329,6 +329,17 @@ jQuery(function($){
});
});
// Resume Post Processing
$('#resume_pp').click(function() {
$.ajax({
headers: {"Cache-Control": "no-cache"},
type: "POST",
url: "tapi",
data: {mode:'resume_pp', apikey: $.plush.apikey},
success: $.plush.RefreshQueue
});
});
$('#multiops_toggle').click(function(){
if( $('#multiops_bar').is(':visible') ) { // hide
$('#multiops_bar').hide();

View File

@@ -1,5 +1,6 @@
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>$T('wizard-quickstart')</title>
<link rel="stylesheet" type="text/css" href="static/style.css"/>
<link rel="shortcut icon" href="static/images/favicon.ico" />

View File

@@ -14,8 +14,8 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-29 05:39+0000\n"
"X-Generator: Launchpad (build 16451)\n"
"X-Launchpad-Export-Date: 2013-01-30 04:51+0000\n"
"X-Generator: Launchpad (build 16455)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"

View File

@@ -14,8 +14,8 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-29 05:39+0000\n"
"X-Generator: Launchpad (build 16451)\n"
"X-Launchpad-Export-Date: 2013-01-30 04:51+0000\n"
"X-Generator: Launchpad (build 16455)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-29 10:29+0000\n"
"PO-Revision-Date: 2013-03-25 10:29+0000\n"
"Last-Translator: shypike <Unknown>\n"
"Language-Team: Dutch <nl@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:43+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-26 05:02+0000\n"
"X-Generator: Launchpad (build 16540)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -691,11 +691,11 @@ msgstr "ERROR: schrijf fout (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uitpakken mislukt, bestandspad is te lang"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FOUT: bestandspad is te lang (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -3241,11 +3241,11 @@ msgstr "Zend meldingen naar NotifyOSD"
#: sabnzbd/skintext.py:560
msgid "Notification Center"
msgstr "Notification Center"
msgstr "Berichtencentrum"
#: sabnzbd/skintext.py:561
msgid "Send notifications to Notification Center"
msgstr "Stuur berichten naar Notification Center"
msgstr "Stuur berichten naar het Berichtencentrum"
#: sabnzbd/skintext.py:562
msgid "Notification classes"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-09-29 03:57+0000\n"
"PO-Revision-Date: 2013-02-11 19:34+0000\n"
"Last-Translator: lrrosa <Unknown>\n"
"Language-Team: Brazilian Portuguese <pt_BR@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-02-12 04:58+0000\n"
"X-Generator: Launchpad (build 16491)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -163,10 +163,11 @@ msgstr ""
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
"ATENÇÃO: Tarefa \"%s\" cancelada por causa de arquivo RAR criptografado"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
msgstr ""
msgstr "Cancelado, criptografia detectada"
#: sabnzbd/assembler.py:154
msgid "%s missing"
@@ -459,7 +460,7 @@ msgstr ""
#: sabnzbd/interface.py:889 [Abbreviation for bytes, as in GB]
msgid "B"
msgstr ""
msgstr "B"
#: sabnzbd/interface.py:1061 # sabnzbd/interface.py:1073
msgid "Initiating restart...<br />"
@@ -694,11 +695,11 @@ msgstr "ERRO: erro de escrita (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Descompactação falhou, o caminho é muito extenso"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "ERRO: caminho muito extenso (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -897,7 +898,7 @@ msgstr "Pausando NZB duplicado \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Cancelado, não é possível concluir"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -937,7 +938,7 @@ msgstr "%s artigos tinham duplicatas não-correspondentes"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artigos foram removidos"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1326,7 +1327,7 @@ msgstr "veja o arquivo de log"
#: sabnzbd/postproc.py:489
msgid "PostProcessing was aborted (%s)"
msgstr "O pós-processamento foi interrompido (%s)"
msgstr "O pós-processamento foi cancelado (%s)"
#: sabnzbd/postproc.py:521 [Error message]
msgid "Cleanup of %s failed."
@@ -2648,12 +2649,13 @@ msgstr ""
#: sabnzbd/skintext.py:383
msgid "Action when encrypted RAR is downloaded"
msgstr ""
msgstr "Ação quando RAR criptografado é baixado"
#: sabnzbd/skintext.py:384
msgid ""
"In case of \"Pause\", you'll need to set a password and resume the job."
msgstr ""
"Em caso de \"Pausa\", você precisa definir uma senha e retomar a tarefa."
#: sabnzbd/skintext.py:385
msgid "Detect Duplicate Downloads"
@@ -2677,7 +2679,7 @@ msgstr "Descartar"
#: sabnzbd/skintext.py:390 [Three way switch for encrypted posts]
msgid "Abort"
msgstr ""
msgstr "Cancelar"
#: sabnzbd/skintext.py:391
msgid "Enable SFV-based checks"
@@ -2956,13 +2958,15 @@ msgstr "Aplicar o máximo de tentativas somente com servidores opcionais"
#: sabnzbd/skintext.py:462
msgid "Abort jobs that cannot be completed"
msgstr ""
msgstr "Cancela tarefas que não podem ser concluídas"
#: sabnzbd/skintext.py:463
msgid ""
"When during download it becomes clear that too much data is missing, abort "
"the job"
msgstr ""
"Quando durante o download ficar claro que muitos dados estão faltando, "
"cancela a tarefa"
#: sabnzbd/skintext.py:467 [Caption]
msgid "Server configuration"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-28 11:03+0000\n"
"Last-Translator: Björn Lindh <probablyx@gmail.com>\n"
"PO-Revision-Date: 2013-03-17 20:50+0000\n"
"Last-Translator: Kristofer Norén <kristofer@shallowdreams.com>\n"
"Language-Team: Swedish <sv@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-18 05:01+0000\n"
"X-Generator: Launchpad (build 16532)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -159,7 +159,7 @@ msgstr "WARNING: Paused job \"%s\" because of encrypted RAR file"
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
msgstr "Varning: avbröt jobbet %s på grund av att RAR-filen är krypterad"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
@@ -689,11 +689,11 @@ msgstr "FEL: skrivningsfel (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uppackning misslyckades, sökvägen är för lång"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FEL: sökvägen är för lång (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -894,7 +894,7 @@ msgstr "Pausar dubblett för NZB \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Avbrutet, kan inte slutföras"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -934,7 +934,7 @@ msgstr "%s artiklar hade icke-matchande dubletter"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artiklar borttagna"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1543,7 +1543,7 @@ msgstr "Läs RSS-flöden"
#: sabnzbd/skintext.py:65 [Config->Scheduler]
msgid "Remove failed jobs"
msgstr ""
msgstr "Ta bort misslyckade jobb"
#: sabnzbd/skintext.py:70 [Speed indicator kilobytes/sec]
msgid "KB/s"

View File

@@ -632,6 +632,12 @@ def _api_watched_now(name, output, kwargs):
return report(output)
def _api_resume_pp(name, output, kwargs):
""" API: accepts output """
PostProcessor.do.paused = False
return report(output)
def _api_rss_now(name, output, kwargs):
""" API: accepts output """
# Run RSS scan async, because it can take a long time
@@ -795,6 +801,7 @@ _api_table = {
'rescan' : _api_rescan,
'eval_sort' : _api_eval_sort,
'watched_now' : _api_watched_now,
'resume_pp' : _api_resume_pp,
'rss_now' : _api_rss_now,
'browse' : _api_browse,
'reset_quota' : _api_reset_quota,
@@ -1594,6 +1601,7 @@ def build_header(prim, webdir=''):
header['quota'] = to_units(BPSMeter.do.quota)
header['have_quota'] = bool(BPSMeter.do.quota > 0.0)
header['left_quota'] = to_units(BPSMeter.do.left)
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
status = ''
if Downloader.do.paused or Downloader.do.postproc:

View File

@@ -290,9 +290,16 @@ def ParseFilePacket(f, header):
def is_cloaked(path, names):
""" Return True if this is likely to be a cloaked encrypted post """
fname = unicoder(os.path.split(path)[1]).lower()
fname = os.path.splitext(fname)[0]
for name in names:
name = unicoder(name.lower())
if fname == name or 'password' in name:
name = os.path.split(name.lower())[1]
name, ext = os.path.splitext(unicoder(name))
if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and \
'.subs.' not in fname:
logging.debug('File %s is probably encrypted due to RAR with same name inside this RAR', fname)
return True
elif 'password' in name:
logging.debug('RAR %s is probably encrypted: "password" in filename %s', fname, name)
return True
return False

View File

@@ -88,6 +88,7 @@ start_paused = OptionBool('misc', 'start_paused', False)
enable_unrar = OptionBool('misc', 'enable_unrar', True)
enable_unzip = OptionBool('misc', 'enable_unzip', True)
enable_recursive = OptionBool('misc', 'enable_recursive', True)
enable_filejoin = OptionBool('misc', 'enable_filejoin', True)
enable_tsjoin = OptionBool('misc', 'enable_tsjoin', True)
enable_par_cleanup = OptionBool('misc', 'enable_par_cleanup', True)

View File

@@ -105,7 +105,9 @@ class Decoder(Thread):
found = True
except IOError, e:
logme = Ta('Decoding %s failed') % art_id
logging.info(logme)
logging.warning(logme)
logging.info("Traceback: ", exc_info = True)
sabnzbd.downloader.Downloader.do.pause()
article.fetcher = None
@@ -262,7 +264,10 @@ def decode(article, data):
except IndexError:
raise BadYenc()
decoded_data = '\r\n'.join(data)
if found:
decoded_data = '\r\n'.join(data)
else:
raise BadYenc()
#Deal with yenc encoded posts
elif (ybegin and yend):

View File

@@ -461,6 +461,12 @@ class MainPage(object):
retry_job(kwargs.get('job'), kwargs.get('nzbfile'))
raise dcRaiser(self.__root, kwargs)
@cherrypy.expose
def robots_txt(self):
""" Keep web crawlers out """
cherrypy.response.headers['Content-Type'] = 'text/plain'
return 'User-agent: *\nDisallow: /\n'
#------------------------------------------------------------------------------
class NzoPage(object):
@@ -1218,7 +1224,8 @@ SPECIAL_BOOL_LIST = \
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb', 'enable_recursive'
)
SPECIAL_VALUE_LIST = \
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',

View File

@@ -73,8 +73,10 @@ def safe_lower(txt):
#------------------------------------------------------------------------------
def globber(path, pattern='*'):
""" Do a glob.glob(), disabling the [] pattern in 'path' """
return glob.glob(os.path.join(path, pattern).replace('[', '[[]'))
if pattern:
return glob.glob(os.path.join(path, pattern).replace('[', '[[]'))
else:
return glob.glob(path.replace('[', '[[]'))
#------------------------------------------------------------------------------
def cat_to_opts(cat, pp=None, script=None, priority=None):

View File

@@ -26,6 +26,7 @@ import subprocess
import logging
from time import time
import binascii
import shutil
import sabnzbd
from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
@@ -233,7 +234,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
nzo.set_action_line()
if rerun:
if rerun and (cfg.enable_recursive() or new_ts or new_joins):
z, y = unpack_magic(nzo, workdir, workdir_complete, dele, one_folder,
xjoinables, xzips, xrars, xts, depth)
if z:
@@ -289,7 +290,6 @@ def get_seq_number(name):
match, set, num = match_ts(name)
else:
num = tail[1:]
assert isinstance(num, str)
if num.isdigit():
return int(num)
else:
@@ -300,6 +300,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
when succesful, delete originals
"""
newfiles = []
bufsize = 24*1024*1024
# Create matching sets from the list of files
joinable_sets = {}
@@ -330,6 +331,11 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
# done, go to next set
continue
# Only join when there is more than one file
size = len(current)
if size < 2:
continue
# Prepare joined file
filename = joinable_set
if workdir_complete:
@@ -338,7 +344,6 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
joined_file = open(filename, 'ab')
# Join the segments
size = len(current)
n = get_seq_number(current[0])
seq_error = n > 1
for joinable in current:
@@ -348,7 +353,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
logging.debug("Processing %s", joinable)
nzo.set_action_line(T('Joining'), '%.0f%%' % perc)
f = open(joinable, 'rb')
joined_file.write(f.read())
shutil.copyfileobj(f, joined_file, bufsize)
f.close()
if delete:
logging.debug("Deleting %s", joinable)
@@ -649,6 +654,18 @@ def rar_extract_core(rarfile, numrars, one_folder, nzo, setname, extraction_path
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
fail = 2
elif 'is not RAR archive' in line:
# Unrecognizable RAR file
m = re.search('(.+) is not RAR archive', line)
if m:
filename = TRANS(m.group(1)).strip()
else:
filename = '???'
nzo.fail_msg = T('Unusable RAR file')
msg = ('[%s][%s] '+ Ta('Unusable RAR file')) % (setname, latin1(filename))
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
fail = 1
else:
m = re.search(r'^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$', line)
if m:
@@ -789,7 +806,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
# PAR2 Functions
#------------------------------------------------------------------------------
def par2_repair(parfile_nzf, nzo, workdir, setname):
def par2_repair(parfile_nzf, nzo, workdir, setname, single):
""" Try to repair a set, return readd or correctness """
#set the current nzo status to "Repairing". Used in History
@@ -823,7 +840,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
joinables, zips, rars, ts = build_filelists(workdir, None, check_rar=False)
finished, readd, pars, datafiles, used_joinables, used_par2 = PAR_Verify(parfile, parfile_nzf, nzo,
setname, joinables)
setname, joinables, single=single)
if finished:
result = True
@@ -915,7 +932,7 @@ _RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
_RE_LOADING_PAR2 = re.compile('Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile('Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, single=False):
""" Run par2 on par-set """
if cfg.never_repair():
cmd = 'v'
@@ -949,7 +966,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
# Append the wildcard for this set
wildcard = '%s*' % os.path.join(os.path.split(parfile)[0], setname)
if len(globber(wildcard)) < 2:
if single or len(globber(wildcard, None)) < 2:
# Support bizarre naming conventions
wildcard = os.path.join(os.path.split(parfile)[0], '*')
command.append(wildcard)
@@ -1258,7 +1275,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
if retry_classic:
logging.debug('Retry PAR2-joining with par2-classic')
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True, single=single)
else:
return finished, readd, pars, datafiles, used_joinables, used_par2
@@ -1367,10 +1384,7 @@ def build_filelists(workdir, workdir_complete, check_rar=True):
zips = [f for f in filelist if ZIP_RE.search(f)]
if check_rar:
rars = [f for f in filelist if RAR_RE.search(f) and is_rarfile(f)]
else:
rars = [f for f in filelist if RAR_RE.search(f)]
rars = [f for f in filelist if RAR_RE.search(f)]
ts = [f for f in filelist if TS_RE.search(f) and f not in joinables]

View File

@@ -220,7 +220,7 @@ class NzbQueue(TryList):
if save_nzo is None or nzo is save_nzo:
sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath)
if not nzo.futuretype:
nzo.save_attribs()
nzo.save_to_disk()
sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
@@ -595,7 +595,7 @@ class NzbQueue(TryList):
return nzo_id_pos1
nzo.priority = priority
nzo.save_attribs()
nzo.save_to_disk()
if nzo_id_pos1 != -1:
del self.__nzo_list[nzo_id_pos1]
@@ -755,7 +755,7 @@ class NzbQueue(TryList):
if not nzo.deleted:
nzo.deleted = True
if nzo.precheck:
nzo.save_attribs()
nzo.save_to_disk()
# Check result
enough, ratio = nzo.check_quality()
if enough:
@@ -886,7 +886,7 @@ def _nzo_date_cmp(nzo1, nzo2):
return cmp(avg_date1, avg_date2)
def _nzo_name_cmp(nzo1, nzo2):
return cmp(nzo1.filename, nzo2.filename)
return cmp(nzo1.filename.lower(), nzo2.filename.lower())
def _nzo_size_cmp(nzo1, nzo2):
return cmp(nzo1.bytes, nzo2.bytes)

View File

@@ -889,8 +889,8 @@ class NzbObject(TryList):
head, vol, block = analyse_par2(fn)
## Is a par2file and repair mode activated
if head and (self.repair or cfg.allow_streaming()):
## Skip if mini-par2 is not complete
if not block and nzf.bytes_left:
## Skip if mini-par2 is not complete and there are more par2 files
if not block and nzf.bytes_left and self.extrapars.get(head):
return
nzf.set_par2(head, vol, block)
## Already got a parfile for this set?
@@ -934,7 +934,7 @@ class NzbObject(TryList):
if file_done:
self.remove_nzf(nzf)
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
if not self.reuse and cfg.fail_hopeless() and not self.check_quality(99)[0]:
#set the nzo status to return "Queued"
self.status = Status.QUEUED
self.set_download_report()
@@ -1022,6 +1022,7 @@ class NzbObject(TryList):
def set_pp(self, value):
self.repair, self.unpack, self.delete = sabnzbd.pp_to_opts(value)
self.save_to_disk()
@property
def final_name_pw(self):
@@ -1054,7 +1055,7 @@ class NzbObject(TryList):
if isinstance(name, str):
name, self.password = scan_password(platform_encode(name))
self.final_name = sanitize_foldername(name)
self.save_attribs()
self.save_to_disk()
def pause(self):
self.status = 'Paused'
@@ -1412,6 +1413,12 @@ class NzbObject(TryList):
def repair_opts(self):
return self.repair, self.unpack, self.delete
def save_to_disk(self):
""" Save job's admin to disk """
self.save_attribs()
if self.nzo_id:
sabnzbd.save_data(self, self.nzo_id, self.workpath)
def save_attribs(self):
set_attrib_file(self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name_pw_clean, self.url))

View File

@@ -390,14 +390,17 @@ def process_job(nzo):
if not nzb_list:
## Give destination its final name
if cfg.folder_rename() and tmp_workdir_complete and not one_folder:
if not all_ok:
if all_ok:
try:
newfiles = rename_and_collapse_folder(tmp_workdir_complete, workdir_complete, newfiles)
except:
logging.error(Ta('Error renaming "%s" to "%s"'), tmp_workdir_complete, workdir_complete)
logging.info('Traceback: ', exc_info = True)
# Better disable sorting because filenames are all off now
file_sorter.sort_file = None
else:
workdir_complete = tmp_workdir_complete.replace('_UNPACK_', '_FAILED_')
workdir_complete = get_unique_path(workdir_complete, n=0, create_dir=False)
try:
collapse_folder(tmp_workdir_complete, workdir_complete)
except:
logging.error(Ta('Error renaming "%s" to "%s"'), tmp_workdir_complete, workdir_complete)
logging.info("Traceback: ", exc_info = True)
if empty:
job_result = -1
@@ -556,6 +559,7 @@ def parring(nzo, workdir):
re_add = False
par_error = False
single = len(repair_sets) == 1
if repair_sets:
for setname in repair_sets:
@@ -564,13 +568,14 @@ def parring(nzo, workdir):
if not verified.get(setname, False):
logging.info("Running repair on set %s", setname)
parfile_nzf = par_table[setname]
if not os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)):
if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars:
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():
res = try_sfv_check(nzo, workdir, setname)
verified[setname] = res
else:
continue
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():
res = try_sfv_check(nzo, workdir, setname)
verified[setname] = res
par_error = par_error or not res
else:
logging.info("No par2 sets for %s", filename)
@@ -750,9 +755,10 @@ def remove_samples(path):
#------------------------------------------------------------------------------
def collapse_folder(oldpath, newpath):
def rename_and_collapse_folder(oldpath, newpath, files):
""" Rename folder, collapsing when there's just a single subfolder
oldpath --> newpath OR oldpath/subfolder --> newpath
Modify list of filenames accordingly
"""
orgpath = oldpath
items = globber(oldpath)
@@ -763,11 +769,16 @@ def collapse_folder(oldpath, newpath):
logging.info('Collapsing %s', os.path.join(newpath, folder))
oldpath = folder_path
oldpath = os.path.normpath(oldpath)
newpath = os.path.normpath(newpath)
files = [os.path.normpath(f).replace(oldpath, newpath) for f in files]
renamer(oldpath, newpath)
try:
remove_dir(orgpath)
except:
pass
return files
#------------------------------------------------------------------------------

View File

@@ -40,7 +40,7 @@ __SCHED = None # Global pointer to Scheduler instance
RSSTASK_MINUTE = random.randint(0, 59)
SCHEDULE_GUARD_FLAG = False
PP_PAUSE_EVENT = False
def schedule_guard():
""" Set flag for scheduler restart """
@@ -53,6 +53,8 @@ def pp_pause():
def pp_resume():
PostProcessor.do.paused = False
def pp_pause_event():
return PP_PAUSE_EVENT
def init():
""" Create the scheduler and set all required events
@@ -275,6 +277,8 @@ def sort_schedules(all_events, now=None):
def analyse(was_paused=False):
""" Determine what pause/resume state we would have now.
"""
global PP_PAUSE_EVENT
PP_PAUSE_EVENT = False
paused = None
paused_all = False
pause_post = False
@@ -292,13 +296,16 @@ def analyse(was_paused=False):
paused = True
elif action == 'pause_all':
paused_all = True
PP_PAUSE_EVENT = True
elif action == 'resume':
paused = False
paused_all = False
elif action == 'pause_post':
pause_post = True
PP_PAUSE_EVENT = True
elif action == 'resume_post':
pause_post = False
PP_PAUSE_EVENT = True
elif action == 'speedlimit' and value!=None:
speedlimit = int(ev[2])
elif action == 'enable_server':

View File

@@ -403,9 +403,9 @@ class SeriesSorter(object):
def to_filepath(f, current_path):
if is_full_path(f):
filepath = f.replace('_UNPACK_', '')
filepath = os.path.normpath(f)
else:
filepath = os.path.join(current_path, f)
filepath = os.path.normpath(os.path.join(current_path, f))
return filepath
# Create a generator of filepaths, ignore sample files and excluded files (vobs ect)
@@ -657,9 +657,9 @@ class GenericSorter(object):
logging.debug("Renaming Generic file")
def filter_files(_file, current_path):
if is_full_path(_file):
filepath = _file.replace('_UNPACK_', '')
filepath = os.path.normpath(_file)
else:
filepath = os.path.join(current_path, _file)
filepath = os.path.normpath(os.path.join(current_path, _file))
if os.path.exists(filepath):
size = os.stat(filepath).st_size
if size >= cfg.movie_rename_limit.get_int() and not RE_SAMPLE.search(_file) \
@@ -675,9 +675,9 @@ class GenericSorter(object):
if length == 1:
file = files[0]
if is_full_path(file):
filepath = file.replace('_UNPACK_', '')
filepath = os.path.normpath(file)
else:
filepath = os.path.join(current_path, file)
filepath = os.path.normpath(os.path.join(current_path, file))
if os.path.exists(filepath):
self.fname, ext = os.path.splitext(os.path.split(file)[1])
newname = "%s%s" % (self.filename_set, ext)
@@ -881,9 +881,9 @@ class DateSorter(object):
#find the master file to rename
for file in files:
if is_full_path(file):
filepath = file.replace('_UNPACK_', '')
filepath = os.path.normpath(file)
else:
filepath = os.path.join(current_path, file)
filepath = os.path.normpath(os.path.join(current_path, file))
if os.path.exists(filepath):
size = os.stat(filepath).st_size

View File

@@ -217,7 +217,8 @@ class URLGrabber(Thread):
self.add(url, future_nzo, when)
# Check if a supported archive
else:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority, url=future_nzo.url)[0] == 0:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority,
nzbname=nzbname, url=future_nzo.url)[0] == 0:
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
else:
# Not a supported filetype, not an nzb (text/html ect)