Compare commits

...

35 Commits

Author SHA1 Message Date
shypike
06576baf5c Update text files for 0.7.15 2013-08-26 19:12:28 +02:00
shypike
7b5fcbe0af For Unix systems, expand wildcards for the par2 tool to prevent problems with some builds of par2cmdline. 2013-08-20 21:31:46 +02:00
shypike
003ee07dee Revert "Use ".admin" instead of "__ADMIN__" as job admin folder to support some non-standard Unix systems."
This reverts commit 1b05bc9ed2.
2013-08-20 21:29:01 +02:00
shypike
654b5e9a24 Remove "news" section in Config skin's main page.
Was never used and caused mixed mode https/http issues.
2013-08-15 19:23:26 +02:00
shypike
1b05bc9ed2 Use ".admin" instead of "__ADMIN__" as job admin folder to support some non-standard Unix systems.
".admin" will be treated as a hidden folder by non-Windows systems, avoiding a problem with
wildcard expansion for par2cmdline on some Unix systems.
2013-08-12 22:18:13 +02:00
shypike
dc328c545b Add password entry box to "File Details" page (Plush only).
Also extend api call "queue_rename" with a password parameter (value3).
2013-08-09 18:34:59 +02:00
shypike
823816ddc4 Prevent "special" sub-folders on file servers from being scanned during unpacking. 2013-07-28 14:00:14 +02:00
shypike
8979598f23 Add special option 'sanitize_safe' to remove bad Windows chars on other platforms. 2013-07-16 21:54:02 +02:00
shypike
f26bf9b21f Fix false positive encryption alarm for some posts, 2013-07-16 21:36:09 +02:00
shypike
5d3a0cc593 Merge pull request #104 from manandre/rss_guid
Add of GUID field in History and Queue RSS feeds
2013-07-16 12:32:02 -07:00
manandre
21d445b7a6 Add of GUID field in Queue RSS feed
The NZO id is used as unique id for the queue RSS feed to help some RSS
readers (like Thunderbird) to identify articles when the link field is
the same for all articles
2013-07-07 18:38:17 +02:00
manandre
9c0df30d34 Add of GUID field in History RSS feed
The NZO id is used as unique id for the history RSS feed to help some RSS readers (like Thunderbird) to identify articles when the link field is the same for all articles.
2013-07-07 18:16:24 +02:00
shypike
bc9be3f92b Update text files for 0.7.14 2013-07-07 13:12:15 +02:00
shypike
2dc5c329c9 Fix special case of unjustified encryption warning. 2013-07-07 13:11:01 +02:00
shypike
67817978f4 Missing mini-par2 sometimes prevents the other par2 files from being downloaded. 2013-06-27 20:41:57 +02:00
shypike
e2ab8c6ce4 Make sure even invalid RAR files are fed to unrar and handle its reporting. 2013-06-27 20:29:04 +02:00
shypike
f33a952536 Update text files for 0.7.13 (again). 2013-06-13 21:35:14 +02:00
shypike
cc582b5321 Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved. 2013-06-13 21:33:00 +02:00
shypike
bdc526c91b Update text files for 0.7.13 2013-06-12 22:59:28 +02:00
shypike
52039c29b4 Accept partial par2 file when no others are available. 2013-06-12 21:03:29 +02:00
shypike
1dc4175f82 Add "special" option enable_recursion to control recursive unpacking. 2013-06-09 09:59:38 +02:00
shypike
92f70fc177 When post has just one par2-set, use full wildcard so that all files are repair and par candidates. 2013-06-01 11:21:00 +02:00
shypike
fd573208bd Fix encryption detection again. 2013-05-28 19:47:35 +02:00
shypike
ca9f10c12f Update text files for 0.7.12 2013-05-21 21:47:02 +02:00
shypike
49a72d0902 Update translations 2013-05-21 21:34:25 +02:00
shypike
6aafe3c531 Fix problem in encryption detection. 2013-05-07 21:17:06 +02:00
shypike
9e84696f96 Config and Wizard skins: fix problem with Unicode when using Chrome.
The Config skin and the Wizard were missing a proper Content-Type in <head>.
2013-04-14 12:02:33 +02:00
shypike
120c133d7a Implement robots.txt to keep web crawlers out.
Should not really be needed, because users should password-protect any
SABnzbd instance exposed to internet.
2013-04-12 21:25:56 +02:00
shypike
cf9713a4b0 Don't try to join a set of just one file (e.g. IMAGE.000) and reduce memory usage when joining large segments.
When there a single file called something like IMAGE.000, don't try to join it.
The joining procedure tries to read an entire segment file into memory, this may lead to a string overflow.
Use shutil.copyfileobj() with a 24 MB buffer instead.
2013-04-12 21:24:53 +02:00
shypike
d12e9889e7 Make encryption detection more careful. 2013-04-09 19:30:25 +02:00
shypike
711a546989 Make name sorting of the queue case-insensitive. 2013-03-20 23:12:13 +01:00
shypike
7f78e6fac1 Save job admin to disk when setting password or changing other attributes. 2013-03-02 13:09:24 +01:00
shypike
72533eefa4 Plush: add "resume pp" entry to pulldown menu, when pause_pp event is scheduled.
The option allows manual resume of a scheduled paused post-processing.
2013-02-26 20:33:58 +01:00
shypike
d9643d9ea8 Improve RAR detection. 2013-02-25 22:08:26 +01:00
shypike
2de71bb96c Enable "abort if hopeless" for pre-check as well. 2013-02-13 20:40:31 +01:00
25 changed files with 223 additions and 91 deletions

View File

@@ -1,5 +1,5 @@
*******************************************
*** This is SABnzbd 0.7.11 ***
*** This is SABnzbd 0.7.15 ***
*******************************************
SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically,

View File

@@ -1,3 +1,36 @@
-------------------------------------------------------------------------------
0.7.15Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix false encryption alarms for some posts
- Add "password" dialog to Plush's job details page
- Add special "sanitize_safe" to remove bad Windows characters on other platforms
- Remove "news" section from Config skin
- Fix for faulty par2cmdline on some embbeded Unix systems
- Add GUID fields to the History RSS feed.
-------------------------------------------------------------------------------
0.7.14Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Another encryption detection fix (special case)
- Missing mini-par2 sometimes prevents the other par2 files from being downloaded.
- Make sure even invalid RAR files are fed to unrar and handle its reporting.
-------------------------------------------------------------------------------
0.7.13Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Another encryption detection fix
- Special option "enable_recursion" to control recursive unpacking
- When post has just one par2 set, use wildcard so that all files are used
- Accept partial par2 file when only one is available
- Accept "nzbname" parameter in api-call "add url" even when a ZIP file is retrieved.
-------------------------------------------------------------------------------
0.7.12Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix issue in encryption detection
- Don't try to "join" a single X.000 file
- Fix memory overflow caused by very large files to be joined
- Make name sorting of the queue case-insensitive
- Save data to disk after changing job password or other attributes
- Add "resume_pp" entry to Plush pull-down menu when pause_pp event is scheduled
- Deploy "abort when completion not possible" method also in pre-download check
-------------------------------------------------------------------------------
0.7.11Final by The SABnzbd-Team
-------------------------------------------------------------------------------

View File

@@ -1,4 +1,4 @@
SABnzbd 0.7.11
SABnzbd 0.7.15
-------------------------------------------------------------------------------
0) LICENSE

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 0.7.11
Summary: SABnzbd-0.7.11
Version: 0.7.15
Summary: SABnzbd-0.7.15
Home-page: http://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,11 +1,16 @@
Release Notes - SABnzbd 0.7.11
Release Notes - SABnzbd 0.7.15
================================
## Bug fixes
- Obfuscated file name support causes regular multi-set NZBs to verify (much) slower
- Bad articles from some servers are accepted as valid data
- Generic Sort fails to rename files when an extra folder level is present in the RAR files
- Fix false encryption alarms for some posts
- Fix for faulty par2cmdline on some embbeded Unix systems
## Features
- Add "password" box to Plush's job details page
- Add special "sanitize_safe" to remove unsupported Windows characters on other platforms.
This solves issues when using NAS shares from Windows.
## What's new in 0.7.0

View File

@@ -1,7 +1,7 @@
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>SABnzbd $version - $T('queued'): $mbleft $T('MB')</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />

View File

@@ -24,11 +24,6 @@
<h5 class="copyright">Copyright &copy; 2008-2013 The SABnzbd Team &lt;<span style="color: #0000ff;">team@sabnzbd.org</span>&gt;</h5>
<p class="copyright"><small>$T('yourRights')</small></p>
</div>
<!--#if $news_items#-->
<div class="padding">
<iframe frameborder=0 width=100% src="http://sabnzbdplus.sourceforge.net/version/news.html"></iframe>
</div>
<!--#end if#-->
</div>
<!--#include $webdir + "/_inc_footer_uc.tmpl"#-->

View File

@@ -28,6 +28,7 @@
<!--#if $have_quota#--><li><a id="reset_quota_now" class="pointer">$T('link-resetQuota')</a></li><!--#end if#-->
<!--#if $have_rss_defined#--><li><a id="get_rss_now" class="pointer">$T('button-rssNow')</a></li><!--#end if#-->
<!--#if $have_watched_dir#--><li><a id="get_watched_now" class="pointer">$T('sch-scan_folder')</a></li><!--#end if#-->
<!--#if $pp_pause_event#--><li><a id="resume_pp" class="pointer">$T('sch-resume_post')</a></li><!--#end if#-->
<li><a id="topmenu_toggle" class="pointer">$T('Plush-topMenu')</a></li>
<li><a id="multiops_toggle" class="pointer">$T('Plush-multiOperations')</a></li>
<li>

View File

@@ -6,7 +6,8 @@
<form action="save" method="post" class="nzo_save_form">
<input type="hidden" name="session" value="$session">
<input type="text" name="name" size="70" value="$slot.filename" />
<input type="text" name="name" size="70" value="$slot.filename_clean" />
<input type="text" name="password" size="15" value="$slot.password" placeholder="$T('srv-password')"/>
<div>
<select name="index"><optgroup label="$T('order')">

View File

@@ -329,6 +329,17 @@ jQuery(function($){
});
});
// Resume Post Processing
$('#resume_pp').click(function() {
$.ajax({
headers: {"Cache-Control": "no-cache"},
type: "POST",
url: "tapi",
data: {mode:'resume_pp', apikey: $.plush.apikey},
success: $.plush.RefreshQueue
});
});
$('#multiops_toggle').click(function(){
if( $('#multiops_bar').is(':visible') ) { // hide
$('#multiops_bar').hide();

View File

@@ -1,5 +1,6 @@
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>$T('wizard-quickstart')</title>
<link rel="stylesheet" type="text/css" href="static/style.css"/>
<link rel="shortcut icon" href="static/images/favicon.ico" />

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-29 10:29+0000\n"
"PO-Revision-Date: 2013-03-25 10:29+0000\n"
"Last-Translator: shypike <Unknown>\n"
"Language-Team: Dutch <nl@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:43+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-26 05:02+0000\n"
"X-Generator: Launchpad (build 16540)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -691,11 +691,11 @@ msgstr "ERROR: schrijf fout (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uitpakken mislukt, bestandspad is te lang"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FOUT: bestandspad is te lang (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -3241,11 +3241,11 @@ msgstr "Zend meldingen naar NotifyOSD"
#: sabnzbd/skintext.py:560
msgid "Notification Center"
msgstr "Notification Center"
msgstr "Berichtencentrum"
#: sabnzbd/skintext.py:561
msgid "Send notifications to Notification Center"
msgstr "Stuur berichten naar Notification Center"
msgstr "Stuur berichten naar het Berichtencentrum"
#: sabnzbd/skintext.py:562
msgid "Notification classes"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-09-29 03:57+0000\n"
"PO-Revision-Date: 2013-02-11 19:34+0000\n"
"Last-Translator: lrrosa <Unknown>\n"
"Language-Team: Brazilian Portuguese <pt_BR@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-02-12 04:58+0000\n"
"X-Generator: Launchpad (build 16491)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -163,10 +163,11 @@ msgstr ""
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
"ATENÇÃO: Tarefa \"%s\" cancelada por causa de arquivo RAR criptografado"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
msgstr ""
msgstr "Cancelado, criptografia detectada"
#: sabnzbd/assembler.py:154
msgid "%s missing"
@@ -459,7 +460,7 @@ msgstr ""
#: sabnzbd/interface.py:889 [Abbreviation for bytes, as in GB]
msgid "B"
msgstr ""
msgstr "B"
#: sabnzbd/interface.py:1061 # sabnzbd/interface.py:1073
msgid "Initiating restart...<br />"
@@ -694,11 +695,11 @@ msgstr "ERRO: erro de escrita (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Descompactação falhou, o caminho é muito extenso"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "ERRO: caminho muito extenso (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -897,7 +898,7 @@ msgstr "Pausando NZB duplicado \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Cancelado, não é possível concluir"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -937,7 +938,7 @@ msgstr "%s artigos tinham duplicatas não-correspondentes"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artigos foram removidos"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1326,7 +1327,7 @@ msgstr "veja o arquivo de log"
#: sabnzbd/postproc.py:489
msgid "PostProcessing was aborted (%s)"
msgstr "O pós-processamento foi interrompido (%s)"
msgstr "O pós-processamento foi cancelado (%s)"
#: sabnzbd/postproc.py:521 [Error message]
msgid "Cleanup of %s failed."
@@ -2648,12 +2649,13 @@ msgstr ""
#: sabnzbd/skintext.py:383
msgid "Action when encrypted RAR is downloaded"
msgstr ""
msgstr "Ação quando RAR criptografado é baixado"
#: sabnzbd/skintext.py:384
msgid ""
"In case of \"Pause\", you'll need to set a password and resume the job."
msgstr ""
"Em caso de \"Pausa\", você precisa definir uma senha e retomar a tarefa."
#: sabnzbd/skintext.py:385
msgid "Detect Duplicate Downloads"
@@ -2677,7 +2679,7 @@ msgstr "Descartar"
#: sabnzbd/skintext.py:390 [Three way switch for encrypted posts]
msgid "Abort"
msgstr ""
msgstr "Cancelar"
#: sabnzbd/skintext.py:391
msgid "Enable SFV-based checks"
@@ -2956,13 +2958,15 @@ msgstr "Aplicar o máximo de tentativas somente com servidores opcionais"
#: sabnzbd/skintext.py:462
msgid "Abort jobs that cannot be completed"
msgstr ""
msgstr "Cancela tarefas que não podem ser concluídas"
#: sabnzbd/skintext.py:463
msgid ""
"When during download it becomes clear that too much data is missing, abort "
"the job"
msgstr ""
"Quando durante o download ficar claro que muitos dados estão faltando, "
"cancela a tarefa"
#: sabnzbd/skintext.py:467 [Caption]
msgid "Server configuration"

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2013-01-24 09:42+0000\n"
"PO-Revision-Date: 2012-12-28 11:03+0000\n"
"Last-Translator: Björn Lindh <probablyx@gmail.com>\n"
"PO-Revision-Date: 2013-03-17 20:50+0000\n"
"Last-Translator: Kristofer Norén <kristofer@shallowdreams.com>\n"
"Language-Team: Swedish <sv@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2013-01-25 05:44+0000\n"
"X-Generator: Launchpad (build 16445)\n"
"X-Launchpad-Export-Date: 2013-03-18 05:01+0000\n"
"X-Generator: Launchpad (build 16532)\n"
#: SABnzbd.py:303 [Error message]
msgid "Failed to start web-interface"
@@ -159,7 +159,7 @@ msgstr "WARNING: Paused job \"%s\" because of encrypted RAR file"
#: sabnzbd/assembler.py:119 [Warning message]
msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file"
msgstr ""
msgstr "Varning: avbröt jobbet %s på grund av att RAR-filen är krypterad"
#: sabnzbd/assembler.py:120
msgid "Aborted, encryption detected"
@@ -689,11 +689,11 @@ msgstr "FEL: skrivningsfel (%s)"
#: sabnzbd/newsunpack.py:620 # sabnzbd/newsunpack.py:621
msgid "Unpacking failed, path is too long"
msgstr ""
msgstr "Uppackning misslyckades, sökvägen är för lång"
#: sabnzbd/newsunpack.py:622 [Error message]
msgid "ERROR: path too long (%s)"
msgstr ""
msgstr "FEL: sökvägen är för lång (%s)"
#: sabnzbd/newsunpack.py:631
msgid "Unpacking failed, see log"
@@ -894,7 +894,7 @@ msgstr "Pausar dubblett för NZB \"%s\""
#: sabnzbd/nzbstuff.py:941
msgid "Aborted, cannot be completed"
msgstr ""
msgstr "Avbrutet, kan inte slutföras"
#: sabnzbd/nzbstuff.py:1030 [Queue indicator for duplicate job]
msgid "DUPLICATE"
@@ -934,7 +934,7 @@ msgstr "%s artiklar hade icke-matchande dubletter"
#: sabnzbd/nzbstuff.py:1140
msgid "%s articles were removed"
msgstr ""
msgstr "%s artiklar borttagna"
#: sabnzbd/nzbstuff.py:1172 [Error message]
msgid "Error importing %s"
@@ -1543,7 +1543,7 @@ msgstr "Läs RSS-flöden"
#: sabnzbd/skintext.py:65 [Config->Scheduler]
msgid "Remove failed jobs"
msgstr ""
msgstr "Ta bort misslyckade jobb"
#: sabnzbd/skintext.py:70 [Speed indicator kilobytes/sec]
msgid "KB/s"

View File

@@ -177,10 +177,11 @@ def _api_queue_delete_nzf(output, value, kwargs):
def _api_queue_rename(output, value, kwargs):
""" API: accepts output, value(=old name), value2(=new name) """
""" API: accepts output, value(=old name), value2(=new name), value3(=password) """
value2 = kwargs.get('value2')
value3 = kwargs.get('value3')
if value and value2:
NzbQueue.do.change_name(value, special_fixer(value2))
NzbQueue.do.change_name(value, special_fixer(value2), special_fixer(value3))
return report(output)
else:
return report(output, _MSG_NO_VALUE2)
@@ -632,6 +633,12 @@ def _api_watched_now(name, output, kwargs):
return report(output)
def _api_resume_pp(name, output, kwargs):
""" API: accepts output """
PostProcessor.do.paused = False
return report(output)
def _api_rss_now(name, output, kwargs):
""" API: accepts output """
# Run RSS scan async, because it can take a long time
@@ -795,6 +802,7 @@ _api_table = {
'rescan' : _api_rescan,
'eval_sort' : _api_eval_sort,
'watched_now' : _api_watched_now,
'resume_pp' : _api_resume_pp,
'rss_now' : _api_rss_now,
'browse' : _api_browse,
'reset_quota' : _api_reset_quota,
@@ -1406,6 +1414,7 @@ def rss_qstatus():
bytes = pnfo[PNFO_BYTES_FIELD] / MEBI
mbleft = (bytesleft / MEBI)
mb = (bytes / MEBI)
nzo_id = pnfo[PNFO_NZO_ID_FIELD]
if mb == mbleft:
@@ -1423,6 +1432,8 @@ def rss_qstatus():
else:
item.link = "http://%s:%s/sabnzbd/history" % ( \
cfg.cherryhost(), cfg.cherryport() )
item.guid = nzo_id
status_line = []
status_line.append('<tr>')
#Total MB/MB left
@@ -1594,6 +1605,7 @@ def build_header(prim, webdir=''):
header['quota'] = to_units(BPSMeter.do.quota)
header['have_quota'] = bool(BPSMeter.do.quota > 0.0)
header['left_quota'] = to_units(BPSMeter.do.left)
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
status = ''
if Downloader.do.paused or Downloader.do.postproc:

View File

@@ -290,9 +290,16 @@ def ParseFilePacket(f, header):
def is_cloaked(path, names):
""" Return True if this is likely to be a cloaked encrypted post """
fname = unicoder(os.path.split(path)[1]).lower()
fname = os.path.splitext(fname)[0]
for name in names:
name = unicoder(name.lower())
if fname == name or 'password' in name:
name = os.path.split(name.lower())[1]
name, ext = os.path.splitext(unicoder(name))
if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and \
'.subs' not in fname:
logging.debug('File %s is probably encrypted due to RAR with same name inside this RAR', fname)
return True
elif 'password' in name:
logging.debug('RAR %s is probably encrypted: "password" in filename %s', fname, name)
return True
return False

View File

@@ -80,7 +80,6 @@ email_dir = OptionDir('misc', 'email_dir', create=True)
email_rss = OptionBool('misc', 'email_rss', False)
version_check = OptionNumber('misc', 'check_new_rel', 1)
news_items = OptionBool('misc', 'news_items', True)
autobrowser = OptionBool('misc', 'auto_browser', True)
replace_illegal = OptionBool('misc', 'replace_illegal', True)
pre_script = OptionStr('misc', 'pre_script', 'None')
@@ -88,6 +87,7 @@ start_paused = OptionBool('misc', 'start_paused', False)
enable_unrar = OptionBool('misc', 'enable_unrar', True)
enable_unzip = OptionBool('misc', 'enable_unzip', True)
enable_recursive = OptionBool('misc', 'enable_recursive', True)
enable_filejoin = OptionBool('misc', 'enable_filejoin', True)
enable_tsjoin = OptionBool('misc', 'enable_tsjoin', True)
enable_par_cleanup = OptionBool('misc', 'enable_par_cleanup', True)
@@ -179,6 +179,7 @@ password_file = OptionDir('misc', 'password_file', '', create=False)
fsys_type = OptionNumber('misc', 'fsys_type', 0, 0, 2)
wait_for_dfolder = OptionBool('misc', 'wait_for_dfolder', False)
warn_empty_nzb = OptionBool('misc', 'warn_empty_nzb', True)
sanitize_safe = OptionBool('misc', 'sanitize_safe', False)
cherryhost = OptionStr('misc', 'host', DEF_HOST)
if sabnzbd.WIN32:

View File

@@ -461,6 +461,12 @@ class MainPage(object):
retry_job(kwargs.get('job'), kwargs.get('nzbfile'))
raise dcRaiser(self.__root, kwargs)
@cherrypy.expose
def robots_txt(self):
""" Keep web crawlers out """
cherrypy.response.headers['Content-Type'] = 'text/plain'
return 'User-agent: *\nDisallow: /\n'
#------------------------------------------------------------------------------
class NzoPage(object):
@@ -534,12 +540,19 @@ class NzoPage(object):
cat = pnfo[PNFO_EXTRA_FIELD1]
if not cat:
cat = 'None'
filename = xml_name(nzo.final_name_pw_clean)
filename_pw = xml_name(nzo.final_name_pw_clean)
filename = xml_name(nzo.final_name)
if nzo.password:
password = xml_name(nzo.password)
else:
password = ''
priority = pnfo[PNFO_PRIORITY_FIELD]
slot['nzo_id'] = str(nzo_id)
slot['cat'] = cat
slot['filename'] = filename
slot['filename'] = filename_pw
slot['filename_clean'] = filename
slot['password'] = password or ''
slot['script'] = script
slot['priority'] = str(priority)
slot['unpackopts'] = str(unpackopts)
@@ -587,6 +600,7 @@ class NzoPage(object):
def save_details(self, nzo_id, args, kwargs):
index = kwargs.get('index', None)
name = kwargs.get('name', None)
password = kwargs.get('password', None)
pp = kwargs.get('pp', None)
script = kwargs.get('script', None)
cat = kwargs.get('cat', None)
@@ -596,7 +610,7 @@ class NzoPage(object):
if index != None:
NzbQueue.do.switch(nzo_id, index)
if name != None:
NzbQueue.do.change_name(nzo_id, special_fixer(name))
NzbQueue.do.change_name(nzo_id, special_fixer(name), password)
if cat != None:
NzbQueue.do.change_cat(nzo_id,cat)
if script != None:
@@ -1044,7 +1058,6 @@ class ConfigPage(object):
for svr in config.get_servers():
new[svr] = {}
conf['servers'] = new
conf['news_items'] = cfg.news_items()
conf['folders'] = sabnzbd.nzbqueue.scan_jobs(all=False, action=False)
@@ -1218,7 +1231,8 @@ SPECIAL_BOOL_LIST = \
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb', 'enable_recursive', 'sanitize_safe'
)
SPECIAL_VALUE_LIST = \
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',
@@ -2729,6 +2743,7 @@ def rss_history(url, limit=50, search=None):
item.link = history['url_info']
else:
item.link = url
item.guid = history['nzo_id']
stageLine = []
for stage in history['stage_log']:

View File

@@ -238,6 +238,11 @@ def sanitize_foldername(name):
illegal = FL_ILLEGAL
legal = FL_LEGAL
if cfg.sanitize_safe():
# Remove all bad Windows chars too
illegal += r'\/<>?*|"'
legal += r'++{}!@#`'
repl = cfg.replace_illegal()
lst = []
for ch in name.strip():

View File

@@ -26,6 +26,7 @@ import subprocess
import logging
from time import time
import binascii
import shutil
import sabnzbd
from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
@@ -233,7 +234,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi
nzo.set_action_line()
if rerun:
if rerun and (cfg.enable_recursive() or new_ts or new_joins):
z, y = unpack_magic(nzo, workdir, workdir_complete, dele, one_folder,
xjoinables, xzips, xrars, xts, depth)
if z:
@@ -289,7 +290,6 @@ def get_seq_number(name):
match, set, num = match_ts(name)
else:
num = tail[1:]
assert isinstance(num, str)
if num.isdigit():
return int(num)
else:
@@ -300,6 +300,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
when succesful, delete originals
"""
newfiles = []
bufsize = 24*1024*1024
# Create matching sets from the list of files
joinable_sets = {}
@@ -330,6 +331,11 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
# done, go to next set
continue
# Only join when there is more than one file
size = len(current)
if size < 2:
continue
# Prepare joined file
filename = joinable_set
if workdir_complete:
@@ -338,7 +344,6 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
joined_file = open(filename, 'ab')
# Join the segments
size = len(current)
n = get_seq_number(current[0])
seq_error = n > 1
for joinable in current:
@@ -348,7 +353,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
logging.debug("Processing %s", joinable)
nzo.set_action_line(T('Joining'), '%.0f%%' % perc)
f = open(joinable, 'rb')
joined_file.write(f.read())
shutil.copyfileobj(f, joined_file, bufsize)
f.close()
if delete:
logging.debug("Deleting %s", joinable)
@@ -649,6 +654,18 @@ def rar_extract_core(rarfile, numrars, one_folder, nzo, setname, extraction_path
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
fail = 2
elif 'is not RAR archive' in line:
# Unrecognizable RAR file
m = re.search('(.+) is not RAR archive', line)
if m:
filename = TRANS(m.group(1)).strip()
else:
filename = '???'
nzo.fail_msg = T('Unusable RAR file')
msg = ('[%s][%s] '+ Ta('Unusable RAR file')) % (setname, latin1(filename))
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
fail = 1
else:
m = re.search(r'^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$', line)
if m:
@@ -789,7 +806,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
# PAR2 Functions
#------------------------------------------------------------------------------
def par2_repair(parfile_nzf, nzo, workdir, setname):
def par2_repair(parfile_nzf, nzo, workdir, setname, single):
""" Try to repair a set, return readd or correctness """
#set the current nzo status to "Repairing". Used in History
@@ -823,7 +840,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
joinables, zips, rars, ts = build_filelists(workdir, None, check_rar=False)
finished, readd, pars, datafiles, used_joinables, used_par2 = PAR_Verify(parfile, parfile_nzf, nzo,
setname, joinables)
setname, joinables, single=single)
if finished:
result = True
@@ -915,7 +932,7 @@ _RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
_RE_LOADING_PAR2 = re.compile('Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile('Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, single=False):
""" Run par2 on par-set """
if cfg.never_repair():
cmd = 'v'
@@ -949,10 +966,14 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
# Append the wildcard for this set
wildcard = '%s*' % os.path.join(os.path.split(parfile)[0], setname)
if len(globber(wildcard, None)) < 2:
if single or len(globber(wildcard, None)) < 2:
# Support bizarre naming conventions
wildcard = os.path.join(os.path.split(parfile)[0], '*')
command.append(wildcard)
if sabnzbd.WIN32 or sabnzbd.DARWIN:
command.append(wildcard)
else:
flist = [item for item in globber(wildcard, None) if os.path.isfile(item)]
command.extend(flist)
stup, need_shell, command, creationflags = build_command(command)
logging.debug('Starting par2: %s', command)
@@ -1258,7 +1279,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
if retry_classic:
logging.debug('Retry PAR2-joining with par2-classic')
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True, single=single)
else:
return finished, readd, pars, datafiles, used_joinables, used_par2
@@ -1353,12 +1374,14 @@ def build_filelists(workdir, workdir_complete, check_rar=True):
if workdir_complete:
for root, dirs, files in os.walk(workdir_complete):
for _file in files:
filelist.append(os.path.join(root, _file))
if '.AppleDouble' not in root and '.DS_Store' not in root:
filelist.append(os.path.join(root, _file))
if workdir and not filelist:
for root, dirs, files in os.walk(workdir):
for _file in files:
filelist.append(os.path.join(root, _file))
if '.AppleDouble' not in root and '.DS_Store' not in root:
filelist.append(os.path.join(root, _file))
if check_rar:
joinables = [f for f in filelist if SPLITFILE_RE.search(f) and not is_rarfile(f)]
@@ -1367,10 +1390,7 @@ def build_filelists(workdir, workdir_complete, check_rar=True):
zips = [f for f in filelist if ZIP_RE.search(f)]
if check_rar:
rars = [f for f in filelist if RAR_RE.search(f) and is_rarfile(f)]
else:
rars = [f for f in filelist if RAR_RE.search(f)]
rars = [f for f in filelist if RAR_RE.search(f)]
ts = [f for f in filelist if TS_RE.search(f) and f not in joinables]

View File

@@ -220,7 +220,7 @@ class NzbQueue(TryList):
if save_nzo is None or nzo is save_nzo:
sabnzbd.save_data(nzo, nzo.nzo_id, nzo.workpath)
if not nzo.futuretype:
nzo.save_attribs()
nzo.save_to_disk()
sabnzbd.save_admin((QUEUE_VERSION, nzo_ids, []), QUEUE_FILE_NAME)
@@ -308,11 +308,11 @@ class NzbQueue(TryList):
self.set_priority(nzo_id, prio)
@synchronized(NZBQUEUE_LOCK)
def change_name(self, nzo_id, name):
def change_name(self, nzo_id, name, password=None):
if nzo_id in self.__nzo_table:
nzo = self.__nzo_table[nzo_id]
if not nzo.futuretype:
nzo.set_final_name_pw(name)
nzo.set_final_name_pw(name, password)
else:
# Reset url fetch wait time
nzo.wait = None
@@ -595,7 +595,7 @@ class NzbQueue(TryList):
return nzo_id_pos1
nzo.priority = priority
nzo.save_attribs()
nzo.save_to_disk()
if nzo_id_pos1 != -1:
del self.__nzo_list[nzo_id_pos1]
@@ -755,7 +755,7 @@ class NzbQueue(TryList):
if not nzo.deleted:
nzo.deleted = True
if nzo.precheck:
nzo.save_attribs()
nzo.save_to_disk()
# Check result
enough, ratio = nzo.check_quality()
if enough:
@@ -886,7 +886,7 @@ def _nzo_date_cmp(nzo1, nzo2):
return cmp(avg_date1, avg_date2)
def _nzo_name_cmp(nzo1, nzo2):
return cmp(nzo1.filename, nzo2.filename)
return cmp(nzo1.filename.lower(), nzo2.filename.lower())
def _nzo_size_cmp(nzo1, nzo2):
return cmp(nzo1.bytes, nzo2.bytes)

View File

@@ -889,8 +889,8 @@ class NzbObject(TryList):
head, vol, block = analyse_par2(fn)
## Is a par2file and repair mode activated
if head and (self.repair or cfg.allow_streaming()):
## Skip if mini-par2 is not complete
if not block and nzf.bytes_left:
## Skip if mini-par2 is not complete and there are more par2 files
if not block and nzf.bytes_left and self.extrapars.get(head):
return
nzf.set_par2(head, vol, block)
## Already got a parfile for this set?
@@ -934,7 +934,7 @@ class NzbObject(TryList):
if file_done:
self.remove_nzf(nzf)
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
if not self.reuse and cfg.fail_hopeless() and not self.check_quality(99)[0]:
#set the nzo status to return "Queued"
self.status = Status.QUEUED
self.set_download_report()
@@ -1022,6 +1022,7 @@ class NzbObject(TryList):
def set_pp(self, value):
self.repair, self.unpack, self.delete = sabnzbd.pp_to_opts(value)
self.save_to_disk()
@property
def final_name_pw(self):
@@ -1050,11 +1051,15 @@ class NzbObject(TryList):
else:
return self.final_name
def set_final_name_pw(self, name):
def set_final_name_pw(self, name, password=None):
if isinstance(name, str):
name, self.password = scan_password(platform_encode(name))
if password:
name = platform_encode(name)
self.password = platform_encode(password)
else:
name, self.password = scan_password(platform_encode(name))
self.final_name = sanitize_foldername(name)
self.save_attribs()
self.save_to_disk()
def pause(self):
self.status = 'Paused'
@@ -1412,6 +1417,12 @@ class NzbObject(TryList):
def repair_opts(self):
return self.repair, self.unpack, self.delete
def save_to_disk(self):
""" Save job's admin to disk """
self.save_attribs()
if self.nzo_id:
sabnzbd.save_data(self, self.nzo_id, self.workpath)
def save_attribs(self):
set_attrib_file(self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name_pw_clean, self.url))

View File

@@ -559,6 +559,7 @@ def parring(nzo, workdir):
re_add = False
par_error = False
single = len(repair_sets) == 1
if repair_sets:
for setname in repair_sets:
@@ -567,13 +568,14 @@ def parring(nzo, workdir):
if not verified.get(setname, False):
logging.info("Running repair on set %s", setname)
parfile_nzf = par_table[setname]
if not os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)):
if os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)) or parfile_nzf.extrapars:
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname, single=single)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():
res = try_sfv_check(nzo, workdir, setname)
verified[setname] = res
else:
continue
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():
res = try_sfv_check(nzo, workdir, setname)
verified[setname] = res
par_error = par_error or not res
else:
logging.info("No par2 sets for %s", filename)

View File

@@ -40,7 +40,7 @@ __SCHED = None # Global pointer to Scheduler instance
RSSTASK_MINUTE = random.randint(0, 59)
SCHEDULE_GUARD_FLAG = False
PP_PAUSE_EVENT = False
def schedule_guard():
""" Set flag for scheduler restart """
@@ -53,6 +53,8 @@ def pp_pause():
def pp_resume():
PostProcessor.do.paused = False
def pp_pause_event():
return PP_PAUSE_EVENT
def init():
""" Create the scheduler and set all required events
@@ -275,6 +277,8 @@ def sort_schedules(all_events, now=None):
def analyse(was_paused=False):
""" Determine what pause/resume state we would have now.
"""
global PP_PAUSE_EVENT
PP_PAUSE_EVENT = False
paused = None
paused_all = False
pause_post = False
@@ -292,13 +296,16 @@ def analyse(was_paused=False):
paused = True
elif action == 'pause_all':
paused_all = True
PP_PAUSE_EVENT = True
elif action == 'resume':
paused = False
paused_all = False
elif action == 'pause_post':
pause_post = True
PP_PAUSE_EVENT = True
elif action == 'resume_post':
pause_post = False
PP_PAUSE_EVENT = True
elif action == 'speedlimit' and value!=None:
speedlimit = int(ev[2])
elif action == 'enable_server':

View File

@@ -217,7 +217,8 @@ class URLGrabber(Thread):
self.add(url, future_nzo, when)
# Check if a supported archive
else:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority, url=future_nzo.url)[0] == 0:
if dirscanner.ProcessArchiveFile(filename, fn, pp, script, cat, priority=priority,
nzbname=nzbname, url=future_nzo.url)[0] == 0:
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
else:
# Not a supported filetype, not an nzb (text/html ect)