Compare commits

..

46 Commits

Author SHA1 Message Date
shypike
e4de2d626d Merge branch '0.7.x' 2013-01-30 22:39:25 +01:00
shypike
71ee5969a8 Update text files for 0.7.10 2013-01-30 21:04:24 +01:00
shypike
1b1c772b55 Update text files for 0.7.10 2013-01-30 20:34:13 +01:00
shypike
458eb3a417 Update translations 2013-01-29 19:31:57 +01:00
shypike
98b753f015 Change access to build share, even more improved. 2013-01-28 23:31:25 +01:00
shypike
a3284e12d2 Change access to build share, improved. 2013-01-28 22:51:35 +01:00
shypike
c85120bb4b Change access to build share. 2013-01-28 21:36:12 +01:00
shypike
903925e06b Update main POT file. 2013-01-24 10:35:27 +01:00
shypike
03196f25e4 Accept NNTP error 400 without "too many connection" clues as a transient error.
Previously it would file a warning and lock out the server for a few minutes.
Reduce the lockout to 6 seconds.
400 should report "too many connections" but some servers use it for temporary connectivity issues.
2013-01-24 10:34:18 +01:00
ShyPike
e3f3f962b6 Handle unrar error messages better (like "path too long").
In the case of fully encrypted RAR files, SABnzbd cannot check the resulting files.
Handling unrar errors explicitly is better anyway.
2013-01-23 22:23:11 +01:00
shypike
153f92e697 "Failed" message should also appear in email notifications. 2013-01-22 23:27:21 +01:00
shypike
c1dcafe953 Display next RSS scan moment in Config->RSS 2013-01-19 21:34:31 +01:00
ShyPike
b53d97732b Reset the "today" byte counters at midnight even when idle.
Set a scheduled event at midnight for resetting the "today" byte counters.
Otherwise, when idle, the bpsmeter isn't called at all.
2013-01-17 22:24:12 +01:00
ShyPike
8f47cce9c8 Try to process obfuscated rar/par sets as good as possible.
When detecting a main par2 file without extra pars, use full wildcard for par2-run.
During par2-run, register which par2 files contain matching blocks.
Remove matching par2 files after the repair.
Skip sets of which the main par2 file has been deleted (due to having been used
in another set).
2013-01-15 22:42:59 +01:00
ShyPike
3cf42a7f94 Accept %fn (next to %fn.%ext) as end parameter in sorting strings. 2013-01-14 20:52:37 +01:00
ShyPike
ae74370cfb Add IP address of unauthenticated API-call to warning. 2013-01-14 19:23:47 +01:00
ShyPike
2aaa283991 Plush: repair and unpack icons in History were swapped.
Fixing does mean that order changes too, but that was the only way to
make the icons correspond with the hover popups.
2013-01-11 21:55:14 +01:00
ShyPike
dca7a8ccdb Plush: show speed when forced job is running in Paused mode. 2013-01-11 19:48:17 +01:00
ShyPike
2b3b5b765a Plush: show speed when forced job is running in Paused mode. 2013-01-11 19:45:31 +01:00
shypike
3172d6e987 Disable scheduled task for newzbin bookmarks. 2013-01-10 21:36:43 +01:00
ShyPike
c237ddfef4 Update text files for 0.7.9 2013-01-06 20:11:33 +01:00
ShyPike
b543dcb5ac Fix text in dropdowns being hard to see in chrome. 2013-01-06 19:33:31 +01:00
ShyPike
ccfbb07333 Take servers that only support ARTICLE into account.
When only full articles are available, the decoder needs to scan more lines
to find the start of the payload.
2013-01-06 19:33:19 +01:00
ShyPike
256ccbd6a1 Prevent crash in decoder.py 2013-01-06 19:06:15 +01:00
ShyPike
d8d507f110 Update text files for 0.7.8 2013-01-03 19:24:27 +01:00
ShyPike
7b3309649f Cancel encryption detection if meta-data if NZB contains password. 2013-01-03 18:44:31 +01:00
shypike
9a7a6652e8 Update text files for 0.7.8 2013-01-03 18:41:50 +01:00
shypike
db4891748f Update copyright year. 2013-01-03 18:40:49 +01:00
ShyPike
3dce2e8908 Support NZB 1.1 meta data; currently "category" and "password" are used.
"category" will trigger category conversion.
"password" value(s) will be used when an encrypted download is encountered.
The latter will also suppress the on-the-fly encryption detection.
2013-01-02 23:05:15 +01:00
ShyPike
c91291c315 Don't retry an empty but correct NZB retrieved from an indexer.
Also add special option "warn_empty_nzb" to control warning about empty NZBs.
2013-01-02 19:37:49 +01:00
shypike
a2a5a1f8e4 Make sure "Abort" error message ends up in download report. 2013-01-01 22:04:24 +01:00
shypike
7651f709ad API functions "addfile" and "addlocalfile" now support "nzbname" parameter for ZIP files with single NZB.
Also, prevent crash on calling "cat_convert" in those api functions (undefined).
2012-12-31 14:11:09 +01:00
shypike
a565077348 Update translations 2012-12-31 12:27:13 +01:00
shypike
6cf99e7d3a Add handling of an extra par2 error message. 2012-12-30 15:06:35 +01:00
shypike
f730a82005 Check for IPv6 connectivity should not use specific exceptions. 2012-12-30 13:16:46 +01:00
ShyPike
5449607c1d Update POT file. 2012-12-28 14:12:36 +01:00
ShyPike
c62415abfd Add "Abort" option to encryption detection.
The option pause_on_pwrar gets an extra value (2) which will
abort an encrypted job. Retrying the job will disable the check.
2012-12-28 14:10:53 +01:00
ShyPike
dcbea3057c Register removed articles and list in download report. 2012-12-28 13:23:34 +01:00
ShyPike
91642d16c8 Update POT file. 2012-12-27 22:26:33 +01:00
ShyPike
2f2773149d Fix missing Retry link for "Out of retention" jobs. 2012-12-27 22:26:05 +01:00
ShyPike
adaba03f50 Option to terminate download if too much data is missing.
Option 'fail_hopeless' Config->Switches.
On-the-fly check for possible completion after each file is processed.
Abort if it's no longer possible to download at least 99% of total data (payload + par2).
Don't do the check when retrying from History.
2012-12-27 21:56:00 +01:00
shypike
58a5e09540 Prevent web-watchdog from crashing when using Python 2.5 2012-12-27 11:28:06 +01:00
shypike
20dc906095 Support servers that don't support STAT and BODY commands.
When server sends error 500, use alternative "HEADER" and "ARTICLE" instead,
which are less efficient.
2012-12-24 20:12:01 +01:00
shypike
e2f41d3761 Add special "wait_for_dfolder", will wait for "temp download folder" at startup.
At startup, wait for the temporary download folder to come on line.
Supports situations where external drives are used, which do not mount before
SABnzbd starts up.
2012-12-23 12:11:18 +01:00
ShyPike
ab1372c7fc Prevent crash in DateSorter. 2012-12-20 18:46:45 +01:00
ShyPike
e305678cf4 In Sorting the %fn substitution sometimes fails to rename the file properly.
The Sorting code assumed that the file name used as the source for %fn
is always a base name. Sometimes it can be a full name. Deal with it.
2012-12-19 21:51:00 +01:00
49 changed files with 7398 additions and 6578 deletions

View File

@@ -1,5 +1,5 @@
*******************************************
*** This is SABnzbd 0.7.7 ***
*** This is SABnzbd 0.7.10 ***
*******************************************
SABnzbd is an open-source cross-platform binary newsreader.
It simplifies the process of downloading from Usenet dramatically,

View File

@@ -1,3 +1,36 @@
-------------------------------------------------------------------------------
0.7.10Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Disable obsolete newzbin bookmark readout
- Show speed when downloading in Forced mode while paused
- Plush History icons repair and unpack were swapped
- Try to repair rar/par sets with obfuscated names
- Reset "today" byte counters at midnight even when idle
- Display next RSS scan moment in Cfg->RSS
- An email about a failed should say that the download failed
- Report errors coming from fully encrypted rar files
- Accept NNTP error 400 without "too many connection" clues as a transient error.
- Accept %fn (next to %fn.%ext) as end parameter in sorting strings.
-------------------------------------------------------------------------------
0.7.9Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix fatal error in decoder when encountering a malformed article
- Fix compatibility with free.xsusenet.com
- Small fix in smpl-black CSS
-------------------------------------------------------------------------------
0.7.8Final by The SABnzbd-Team
-------------------------------------------------------------------------------
- Fix problem with %fn substitution in Sorting
- Add special "wait_for_dfolder", enables waiting for external temp download folder
- Work-around for servers that do not support STAT command
- Removed articles are now listed seperately in download report
- Add "abort" option to encryption detection
- Fix missing Retry link for "Out of retention" jobs.
- Option to abort download when it is clear that not enough data is available
- Support "nzbname" parameter in addfile/addlocalfile api calls for
ZIP files with a single NZB
- Support NZB-1.1 meta data "password" and "category"
- Don't retry an empty but correct NZB from an indexer
-------------------------------------------------------------------------------
0.7.7Final by The SABnzbd-Team
-------------------------------------------------------------------------------

View File

@@ -1,5 +1,5 @@
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
The SABnzbd-team is:

View File

@@ -1,10 +1,10 @@
SABnzbd 0.7.7
SABnzbd 0.7.10
-------------------------------------------------------------------------------
0) LICENSE
-------------------------------------------------------------------------------
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License

View File

@@ -1,4 +1,4 @@
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 0.7.7
Summary: SABnzbd-0.7.7
Version: 0.7.10
Summary: SABnzbd-0.7.10
Home-page: http://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,15 +1,18 @@
Release Notes - SABnzbd 0.7.7
===============================
Release Notes - SABnzbd 0.7.10
================================
## Features
- Updated unrar to 4.20 (OSX/Windows)
- Optional web-ui watchdog (diagnostic tool)
- Removed account support for defunct indexers
- Try to repair rar/par sets with obfuscated names
- Display next RSS scan moment in Cfg->RSS
## Bug fixes
- Fix some issues with orphaned items
- Generic sort didn't always rename media files in multi-part jobs properly
- Always show RSS items in the same order as the original RSS feed
- Disable obsolete newzbin bookmark readout
- Show speed when downloading in Forced mode while paused
- Plush History icons repair and unpack were swapped
- Reset "today" byte counters at midnight even when idle
- An email about a failed should say that the download failed
- Report errors coming from fully encrypted rar files
- Accept %fn (as well as %fn.%ext) as end parameter in sorting strings.
## What's new in 0.7.0
@@ -39,7 +42,7 @@ Release Notes - SABnzbd 0.7.7
built-in post-processing options that automatically verify, repair,
extract and clean up posts downloaded from Usenet.
(c) Copyright 2007-2012 by "The SABnzbd-team" \<team@sabnzbd.org\>
(c) Copyright 2007-2013 by "The SABnzbd-team" \<team@sabnzbd.org\>
### IMPORTANT INFORMATION about release 0.7.x

View File

@@ -1,5 +1,5 @@
#!/usr/bin/python -OO
# Copyright 2008-2012 The SABnzbd-Team <team@sabnzbd.org>
# Copyright 2008-2013 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
@@ -260,7 +260,7 @@ def print_version():
print """
%s-%s
Copyright (C) 2008-2012, The SABnzbd-Team <team@sabnzbd.org>
Copyright (C) 2008-2013, The SABnzbd-Team <team@sabnzbd.org>
SABnzbd comes with ABSOLUTELY NO WARRANTY.
This is free software, and you are welcome to redistribute it
under certain conditions. It is licensed under the

View File

@@ -21,7 +21,7 @@
</table>
</div>
<div class="padding alt">
<h5 class="copyright">Copyright &copy; 2008-2012 The SABnzbd Team &lt;<span style="color: #0000ff;">team@sabnzbd.org</span>&gt;</h5>
<h5 class="copyright">Copyright &copy; 2008-2013 The SABnzbd Team &lt;<span style="color: #0000ff;">team@sabnzbd.org</span>&gt;</h5>
<p class="copyright"><small>$T('yourRights')</small></p>
</div>
<!--#if $news_items#-->

View File

@@ -95,6 +95,7 @@
<label class="config narrow" for="rss_rate">$T('opt-rss_rate')</label>
<input type="number" name="rss_rate" id="rss_rate" value="$rss_rate" size="8" min="15" max="1440" />
<input type="submit" value="$T('button-save')" />
<span class "config narrow">&nbsp;&nbsp;$T('Next scan at:')&nbsp;$rss_next</span>
<span class="desc narrow">$T('explain-rss_rate')</span>
</div>
</fieldset>

View File

@@ -83,6 +83,11 @@
</div><!-- /col2 -->
<div class="col1">
<fieldset>
<div class="field-pair">
<label class="config" for="fail_hopeless">$T('opt-fail_hopeless')</label>
<input type="checkbox" name="fail_hopeless" id="fail_hopeless" value="1" <!--#if int($fail_hopeless) > 0 then 'checked="checked"' else ""#--> />
<span class="desc">$T('explain-fail_hopeless')</span>
</div>
<div class="field-pair alt">
<label class="config" for="pre_check">$T('opt-pre_check')</label>
<input type="checkbox" name="pre_check" id="pre_check" value="1" <!--#if int($pre_check) > 0 then 'checked="checked"' else ""#--> />
@@ -104,7 +109,11 @@
</div>
<div class="field-pair">
<label class="config" for="pause_on_pwrar">$T('opt-pause_on_pwrar')</label>
<input type="checkbox" name="pause_on_pwrar" id="pause_on_pwrar" value="1" <!--#if int($pause_on_pwrar) > 0 then 'checked="checked"' else ""#--> />
<select name="pause_on_pwrar" id="pause_on_pwrar">
<option value="0" <!--#if int($pause_on_pwrar) == 0 then 'selected="selected" class="selected"' else ""#--> >$T('nodupes-off')</option>
<option value="1" <!--#if int($pause_on_pwrar) == 1 then 'selected="selected" class="selected"' else ""#--> >$T('nodupes-pause')</option>
<option value="2" <!--#if int($pause_on_pwrar) == 2 then 'selected="selected" class="selected"' else ""#--> >$T('abort')</option>
</select>
<span class="desc">$T('explain-pause_on_pwrar')</span>
</div>
<div class="field-pair alt">

View File

@@ -1242,7 +1242,7 @@ $.plush.histprevslots = $.plush.histnoofslots; // for the next refresh
SetQueueETAStats : function(speed,kbpersec,timeleft,eta) {
// ETA/speed stats at top of queue
if (kbpersec < 1 || $.plush.paused) {
if (kbpersec < 100 && $.plush.paused) {
$('#stats_eta').html('&mdash;');
$('#stats_speed').html('&mdash;');
$('#time-left').attr('title','&mdash;'); // Tooltip on "time left"

View File

@@ -401,12 +401,12 @@ body {
.sprite_hv_download{ background-position: 0 -167px; }
.sprite_hv_error{ background-position: 0 -193px; }
.sprite_hv_filejoin{ background-position: 0 -219px; }
.sprite_hv_repair{ background-position: 0 -245px; }
.sprite_hv_unpack{ background-position: 0 -245px; }
.sprite_hv_report{ background-position: 0 -271px; }
.sprite_hv_script{ background-position: 0 -297px; }
.sprite_hv_star{ background-position: 0 -323px; }
.sprite_hv_stats{ background-position: 0 -349px; }
.sprite_hv_unpack{ background-position: 0 -375px; }
.sprite_hv_repair{ background-position: 0 -375px; }
.sprite_progress_done{ background-position: 0 -401px; } /* queue.tmpl */
.sprite_progressbar_bg{ background-position: 0 -430px; }
.sprite_q_menu_pausefor{ background-position: 0 -459px; }

View File

@@ -24,15 +24,15 @@ border-top: 1px dotted #222;
}
#progressBar {
background-color: #fff;
border: 1px solid #000;
background-color: #fff;
border: 1px solid #000;
}
#progressBartop {
background-color: #fff;
border: 1px solid #ccc;
background-color: #fff;
border: 1px solid #ccc;
}
#percentageBar {
background-color: #4B4545;
background-color: #4B4545;
}
@@ -83,7 +83,7 @@ table{border-spacing:0;}
input, select {
input, select, option {
background-color:#232323;
border-color:#3a3a3a;
color:white;
@@ -110,4 +110,4 @@ span.unselected {
color: white;
background-color:#333;
border: 1px solid #555;
}
}

View File

@@ -39,9 +39,14 @@ build_folders = (
)
# Check presense of all builds
sharepath = os.environ.get('SHARE')
if not (sharepath and os.path.exists(sharepath)):
print 'Build share not defined or not found'
exit(1)
build_paths = []
for build in builds:
path = os.path.join(os.environ['HOME'], 'project/osx/%s-%s.cpio' % (prod, build))
path = os.path.join(sharepath,'%s-%s.cpio' % (prod, build))
if os.path.exists(path):
build_paths.append(path)
else:
@@ -100,4 +105,4 @@ print 'Make image internet-enabled'
os.system("hdiutil internet-enable %s" % fileDmg)
print 'Copy GZ file'
os.system('cp ~/project/osx/%s .' % fileOSr)
os.system('cp "%s" .' % os.path.join(sharepath, fileOSr))

View File

@@ -438,8 +438,8 @@ if target == 'app':
os.system('rm dist/SABnzbd.app/Contents/Resources/site.py')
# Add the SabNotifier app
if OSX_ML and os.path.exists('/project/sabnotifier/SABnzbd.app'):
os.system("cp -pR /project/sabnotifier/SABnzbd.app dist/SABnzbd.app/Contents/Resources/")
if OSX_ML and os.path.exists(os.path.join(os.environ['HOME'], 'sabnotifier/SABnzbd.app')):
os.system("cp -pR $HOME/sabnotifier/SABnzbd.app dist/SABnzbd.app/Contents/Resources/")
# Add License files
os.mkdir("dist/SABnzbd.app/Contents/Resources/licenses/")
@@ -448,19 +448,20 @@ if target == 'app':
os.system("sleep 5")
# Archive result to share
dest_path = '/Volumes/VMware Shared Folders/osx'
if not os.path.exists(dest_path):
dest_path = '$HOME/project/osx'
cpio_path = os.path.join(dest_path, prod) + '-' + postfix + '.cpio'
print 'Create CPIO file %s' % cpio_path
delete_files(cpio_path)
os.system('ditto -c -z dist/ "%s"' % cpio_path)
# Archive result to share, if present
dest_path = os.environ.get('SHARE')
if dest_path and os.path.exists(dest_path):
cpio_path = os.path.join(dest_path, prod) + '-' + postfix + '.cpio'
print 'Create CPIO file %s' % cpio_path
delete_files(cpio_path)
os.system('ditto -c -z dist/ "%s"' % cpio_path)
else:
print 'No SHARE variable set, build result not copied'
if OSX_ML:
print 'Create src %s' % fileOSr
delete_files(fileOSr)
os.system('tar -czf %s --exclude ".git*" --exclude "sab*.zip" --exclude "SAB*.tar.gz" --exclude "*.cmd" --exclude "*.pyc" '
os.system('tar -czf "%s" --exclude ".git*" --exclude "sab*.zip" --exclude "SAB*.tar.gz" --exclude "*.cmd" --exclude "*.pyc" '
'--exclude "*.sparseimage*" --exclude "dist" --exclude "build" --exclude "*.nsi" --exclude "win" --exclude "*.dmg" '
'./ >/dev/null' % os.path.join(dest_path, fileOSr) )

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2012-04-28 12:01+0000\n"
"PO-Revision-Date: 2011-06-26 10:50+0000\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"PO-Revision-Date: 2012-12-28 10:58+0000\n"
"Last-Translator: Thomas Lucke (Lucky) <Unknown>\n"
"Language-Team: German <de@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2012-04-29 05:17+0000\n"
"X-Generator: Launchpad (build 15149)\n"
"X-Launchpad-Export-Date: 2012-12-29 05:11+0000\n"
"X-Generator: Launchpad (build 16378)\n"
#: email/email.tmpl:1
msgid ""
@@ -189,3 +189,24 @@ msgid ""
"\n"
"Bye\n"
msgstr ""
"## Translation by Thomas Lucke (Lucky)\n"
"##\n"
"## Bad URL Fetch Email template for SABnzbd\n"
"## This a Cheetah template\n"
"## Documentation: http://sabnzbd.wikidot.com/email-templates\n"
"##\n"
"## Newlines and whitespace are significant!\n"
"##\n"
"## These are the email headers\n"
"To: $to\n"
"From: $from\n"
"Date: $date\n"
"Subject: SABnzbd konnte eine NZB-Datei nicht herunterladen\n"
"X-priority: 5\n"
"X-MS-priority: 5\n"
"## After this comes the body, the empty line is required!\n"
"\n"
"Hallo,\n"
"\n"
"SABnzbd konnte die NZB-Datei von $url nicht herrunterladen.\n"
"Die Fehlermeldung war: $msg\n"

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

File diff suppressed because it is too large Load Diff

View File

@@ -8,14 +8,14 @@ msgstr ""
"Project-Id-Version: sabnzbd\n"
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
"POT-Creation-Date: 2012-08-14 18:42+0000\n"
"PO-Revision-Date: 2012-05-03 03:22+0000\n"
"Last-Translator: Rene <Unknown>\n"
"PO-Revision-Date: 2012-12-28 11:02+0000\n"
"Last-Translator: Steffen Thomsen <urskov@gmail.com>\n"
"Language-Team: Danish <da@li.org>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Launchpad-Export-Date: 2012-08-15 05:11+0000\n"
"X-Generator: Launchpad (build 15801)\n"
"X-Launchpad-Export-Date: 2012-12-29 05:12+0000\n"
"X-Generator: Launchpad (build 16378)\n"
#: NSIS_Installer.nsi:425
msgid "Go to the SABnzbd Wiki"
@@ -27,11 +27,11 @@ msgstr "Vis udgivelsesbemærkninger"
#: NSIS_Installer.nsi:429
msgid "Support the project, Donate!"
msgstr "Støtte projektet, donere!"
msgstr "Støt projektet, donér!"
#: NSIS_Installer.nsi:431
msgid "Please close \"SABnzbd.exe\" first"
msgstr "Luk 'SABnzbd.exe' først"
msgstr "Luk venligst \"SABnzbd.exe\" først"
#: NSIS_Installer.nsi:433
msgid ""
@@ -52,7 +52,7 @@ msgstr "Kør ved opstart"
#: NSIS_Installer.nsi:439
msgid "Desktop Icon"
msgstr "Skrivebords ikon"
msgstr "Skrivebordsikon"
#: NSIS_Installer.nsi:441
msgid "NZB File association"
@@ -64,19 +64,19 @@ msgstr "Slet program"
#: NSIS_Installer.nsi:445
msgid "Delete Settings"
msgstr "Slet instillinger"
msgstr "Slet indstillinger"
#: NSIS_Installer.nsi:447
msgid ""
"This system requires the Microsoft runtime library VC90 to be installed "
"first. Do you want to do that now?"
msgstr ""
"Dette system kræver, at Microsoft runtime biblioteket VC90, der skal "
"installeres først. Ønsker du at gøre det nu?"
"Dette system kræver, at Microsoft runtime biblioteket VC90 skal installeres "
"først. Ønsker du at gøre det nu?"
#: NSIS_Installer.nsi:449
msgid "Downloading Microsoft runtime installer..."
msgstr "Downloading Microsoft runtime installer..."
msgstr "Downloader Microsoft runtime installationsfil..."
#: NSIS_Installer.nsi:451
msgid "Download error, retry?"
@@ -91,12 +91,13 @@ msgid ""
"You cannot overwrite an existing installation. \\n\\nClick `OK` to remove "
"the previous version or `Cancel` to cancel this upgrade."
msgstr ""
"Du kan ikke overskrive en eksisterende installation. Klik `OK` for at fjerne "
"den tidligere version eller `Annuller` for at annullere denne opgradering."
"Du kan ikke overskrive en eksisterende installation. \\n\\nKlik `OK` for at "
"fjerne den tidligere version eller `Annuller` for at annullere denne "
"opgradering."
#: NSIS_Installer.nsi:457
msgid "Your settings and data will be preserved."
msgstr "Dine indstillinger og data vil blive opbevaret."
msgstr "Dine indstillinger og data vil blive bevaret."
#~ msgid "Start SABnzbd (hidden)"
#~ msgstr "Start SABnzbd"

View File

@@ -220,6 +220,13 @@ def initialize(pause_downloader = False, clean_up = False, evalSched=False, repa
# New admin folder
misc.remove_all(cfg.admin_dir.get_path(), '*.sab')
### Optionally wait for "incomplete" to become online
if cfg.wait_for_dfolder():
wait_for_download_folder()
else:
cfg.download_dir.set(cfg.download_dir(), create=True)
cfg.download_dir.set_create(True)
### Set access rights for "incomplete" base folder
misc.set_permissions(cfg.download_dir.get_path(), recursive=False)
@@ -617,7 +624,7 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT
logging.info("Traceback: ", exc_info = True)
if ext.lower() in ('.zip', '.rar'):
return ProcessArchiveFile(filename, path, pp, script, cat, priority=priority)
return ProcessArchiveFile(filename, path, pp, script, cat, priority=priority, nzbname=nzbname)
else:
return ProcessSingleFile(filename, path, pp, script, cat, priority=priority, nzbname=nzbname, keep=keep, reuse=reuse)
@@ -1075,6 +1082,13 @@ def check_incomplete_vs_complete():
cfg.download_dir.set('incomplete')
def wait_for_download_folder():
""" Wait for download folder to become available """
while not cfg.download_dir.test_path():
logging.debug('Waiting for "incomplete" folder')
time.sleep(2.0)
# Required wrapper because nzbstuff.py cannot import downloader.py
def active_primaries():
return sabnzbd.downloader.Downloader.do.active_primaries()

View File

@@ -52,7 +52,7 @@ from sabnzbd.utils.json import JsonWriter
from sabnzbd.utils.pathbrowser import folders_at_path
from sabnzbd.misc import loadavg, to_units, diskfree, disktotal, get_ext, \
get_filename, int_conv, globber, time_format, remove_all, \
starts_with_path
starts_with_path, cat_convert
from sabnzbd.encoding import xml_name, unicoder, special_fixer, platform_encode, html_escape
from sabnzbd.postproc import PostProcessor
from sabnzbd.articlecache import ArticleCache
@@ -346,7 +346,7 @@ def _api_addlocalfile(name, output, kwargs):
if get_ext(name) in ('.zip', '.rar'):
res = sabnzbd.dirscanner.ProcessArchiveFile(\
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True)
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname)
elif get_ext(name) in ('.nzb', '.gz'):
res = sabnzbd.dirscanner.ProcessSingleFile(\
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname)
@@ -650,7 +650,7 @@ def _api_test_email(name, output, kwargs):
pack['unpack'] = ['action 1', 'action 2']
res = sabnzbd.emailer.endjob('I had a d\xe8ja vu', 123, 'unknown', True,
os.path.normpath(os.path.join(cfg.complete_dir.get_path(), '/unknown/I had a d\xe8ja vu')),
123*MEBI, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0)
123*MEBI, None, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0)
if res == 'Email succeeded':
res = None
return report(output, error=res)

View File

@@ -112,8 +112,14 @@ class Assembler(Thread):
logging.debug('Got md5pack for set %s', setname)
if check_encrypted_rar(nzo, filepath):
logging.warning(Ta('WARNING: Paused job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
nzo.pause()
if cfg.pause_on_pwrar() == 1:
logging.warning(Ta('WARNING: Paused job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
nzo.pause()
else:
logging.warning(Ta('WARNING: Aborted job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
nzo.fail_msg = T('Aborted, encryption detected')
import sabnzbd.nzbqueue
sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo)
nzf.completed = True
else:
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False)
@@ -294,11 +300,11 @@ def is_cloaked(path, names):
def check_encrypted_rar(nzo, filepath):
""" Check if file is rar and is encrypted """
encrypted = False
if not nzo.password and cfg.pause_on_pwrar() and is_rarfile(filepath):
if not nzo.password and not nzo.meta.get('password') and cfg.pause_on_pwrar() and is_rarfile(filepath):
try:
zf = RarFile(filepath, all_names=True)
encrypted = zf.encrypted or is_cloaked(filepath, zf.namelist())
if encrypted and int(nzo.encrypted) < 2:
if encrypted and int(nzo.encrypted) < 2 and not nzo.reuse:
nzo.encrypted = 1
else:
encrypted = False

View File

@@ -398,10 +398,20 @@ class BPSMeter(object):
return None, 0, 0
def midnight(self):
""" Midnight action: dummy update for all servers """
for server in self.day_total:
self.update(server)
def quota_handler():
""" To be called from scheduler """
logging.debug('Checking quota')
BPSMeter.do.reset_quota()
def midnight_action():
if BPSMeter.do:
BPSMeter.do.midnight()
BPSMeter()

View File

@@ -103,6 +103,7 @@ par2_multicore = OptionBool('misc', 'par2_multicore', True)
allow_64bit_tools = OptionBool('misc', 'allow_64bit_tools', True)
allow_streaming = OptionBool('misc', 'allow_streaming', False)
pre_check = OptionBool('misc', 'pre_check', False)
fail_hopeless = OptionBool('misc', 'fail_hopeless', False)
req_completion_rate = OptionNumber('misc', 'req_completion_rate', 100.2, 100, 200)
newzbin_username = OptionStr('newzbin', 'username')
@@ -162,7 +163,7 @@ xxx_apikey = OptionStr('nzbxxx', 'apikey')
configlock = OptionBool('misc', 'config_lock', 0)
umask = OptionStr('misc', 'permissions', '', validation=validate_octal)
download_dir = OptionDir('misc', 'download_dir', DEF_DOWNLOAD_DIR, validation=validate_safedir)
download_dir = OptionDir('misc', 'download_dir', DEF_DOWNLOAD_DIR, create=False, validation=validate_safedir)
download_free = OptionStr('misc', 'download_free')
complete_dir = OptionDir('misc', 'complete_dir', DEF_COMPLETE_DIR, create=False, \
apply_umask=True, validation=validate_notempty)
@@ -176,6 +177,8 @@ dirscan_speed = OptionNumber('misc', 'dirscan_speed', DEF_SCANRATE, 0, 3600)
size_limit = OptionStr('misc', 'size_limit', '0')
password_file = OptionDir('misc', 'password_file', '', create=False)
fsys_type = OptionNumber('misc', 'fsys_type', 0, 0, 2)
wait_for_dfolder = OptionBool('misc', 'wait_for_dfolder', False)
warn_empty_nzb = OptionBool('misc', 'warn_empty_nzb', True)
cherryhost = OptionStr('misc', 'host', DEF_HOST)
if sabnzbd.WIN32:

View File

@@ -186,6 +186,14 @@ class OptionDir(Option):
res, path = sabnzbd.misc.create_real_path(self.ident()[1], self.__root, value, self.__apply_umask)
return path
def test_path(self):
""" Return True if path exists """
value = self.get()
if value:
return os.path.exists(sabnzbd.misc.real_path(self.__root, value))
else:
return False
def set_root(self, root):
""" Set new root, is assumed to be valid """
self.__root = root
@@ -210,6 +218,10 @@ class OptionDir(Option):
self._Option__set(value)
return error
def set_create(self, value):
""" Set auto-creation value """
self.__create = value
class OptionList(Option):
""" List option class """

View File

@@ -84,6 +84,8 @@ class Decoder(Thread):
article, lines = art_tup
nzf = article.nzf
nzo = nzf.nzo
art_id = article.article
killed = False
data = None
@@ -96,13 +98,13 @@ class Decoder(Thread):
if nzo.precheck:
raise BadYenc
register = True
logging.debug("Decoding %s", article)
logging.debug("Decoding %s", art_id)
data = decode(article, lines)
nzf.article_count += 1
found = True
except IOError, e:
logme = Ta('Decoding %s failed') % article
logme = Ta('Decoding %s failed') % art_id
logging.info(logme)
sabnzbd.downloader.Downloader.do.pause()
@@ -113,7 +115,7 @@ class Decoder(Thread):
register = False
except CrcError, e:
logme = Ta('CRC Error in %s (%s -> %s)') % (article, e.needcrc, e.gotcrc)
logme = Ta('CRC Error in %s (%s -> %s)') % (art_id, e.needcrc, e.gotcrc)
logging.info(logme)
data = e.data
@@ -135,27 +137,32 @@ class Decoder(Thread):
# Examine headers (for precheck) or body (for download)
# And look for DMCA clues (while skipping "X-" headers)
for line in lines:
if not line.startswith('X-') and match_str(line.lower(), ('dmca', 'removed', 'cancel', 'blocked')):
logging.info('Article removed from server (%s)', article)
lline = line.lower()
if 'message-id:' in lline:
found = True
if not line.startswith('X-') and match_str(lline, ('dmca', 'removed', 'cancel', 'blocked')):
killed = True
break
if killed:
logme = 'Article removed from server (%s)'
logging.info(logme, art_id)
if nzo.precheck:
if found or not killed:
if found and not killed:
# Pre-check, proper article found, just register
logging.debug('Server has article %s', article)
logging.debug('Server has article %s', art_id)
register = True
elif not killed and not found:
logme = Ta('Badly formed yEnc article in %s') % article
logme = Ta('Badly formed yEnc article in %s') % art_id
logging.info(logme)
if not found:
if not found or killed:
new_server_found = self.__search_new_server(article)
if new_server_found:
register = False
logme = None
except:
logme = Ta('Unknown Error while decoding %s') % article
logme = Ta('Unknown Error while decoding %s') % art_id
logging.info(logme)
logging.info("Traceback: ", exc_info = True)
@@ -165,7 +172,10 @@ class Decoder(Thread):
logme = None
if logme:
article.nzf.nzo.inc_log('bad_art_log', logme)
if killed:
article.nzf.nzo.inc_log('killed_art_log', art_id)
else:
article.nzf.nzo.inc_log('bad_art_log', art_id)
else:
new_server_found = self.__search_new_server(article)
@@ -236,7 +246,7 @@ def decode(article, data):
if not ybegin:
found = False
try:
for i in xrange(10):
for i in xrange(min(40, len(data))):
if data[i].startswith('begin '):
nzf.filename = name_fixer(data[i].split(None, 2)[2])
nzf.type = 'uu'
@@ -300,7 +310,7 @@ def yCheck(data):
yend = None
## Check head
for i in xrange(40):
for i in xrange(min(40, len(data))):
try:
if data[i].startswith('=ybegin '):
splits = 3

View File

@@ -59,7 +59,8 @@ def CompareStat(tup1, tup2):
return True
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, url=''):
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False,
priority=None, url='', nzbname=None):
""" Analyse ZIP file and create job(s).
Accepts ZIP files with ONLY nzb/nfo/folder files in it.
returns (status, nzo_ids)
@@ -88,6 +89,7 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
status = 1
names = zf.namelist()
names.sort()
nzbcount = 0
for name in names:
name = name.lower()
if not (name.endswith('.nzb') or name.endswith('.nfo') or name.endswith('/')):
@@ -95,7 +97,10 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
break
elif name.endswith('.nzb'):
status = 0
nzbcount += 1
if status == 0:
if nzbcount != 1:
nzbname = None
for name in names:
if name.lower().endswith('.nzb'):
try:
@@ -108,7 +113,8 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
name = misc.sanitize_foldername(name)
if data:
try:
nzo = nzbstuff.NzbObject(name, 0, pp, script, data, cat=cat, url=url, priority=priority)
nzo = nzbstuff.NzbObject(name, 0, pp, script, data, cat=cat, url=url,
priority=priority, nzbname=nzbname)
except:
nzo = None
if nzo:
@@ -171,6 +177,9 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
except TypeError:
# Duplicate, ignore
nzo = None
except ValueError:
# Empty, but correct file
return -1, nzo_ids
except:
if data.find("<nzb") >= 0 and data.find("</nzb") < 0:
# Looks like an incomplete file, retry

View File

@@ -47,6 +47,8 @@ _PENALTY_SHARE = 10 # Account sharing detected
_PENALTY_TOOMANY = 10 # Too many connections
_PENALTY_PERM = 10 # Permanent error, like bad username/password
_PENALTY_SHORT = 1 # Minimal penalty when no_penalties is set
_PENALTY_VERYSHORT = 0.1 # Error 400 without cause clues
TIMER_LOCK = RLock()
@@ -77,7 +79,8 @@ class Server(object):
self.warning = ''
self.info = None # Will hold getaddrinfo() list
self.request = False # True if a getaddrinfo() request is pending
self.oddball = 'free.xsusenet.com' in host
self.have_body = 'free.xsusenet.com' not in host
self.have_stat = True # Assume server has "STAT", until proven otherwise
for i in range(threads):
self.idle_threads.append(NewsWrapper(self, i+1))
@@ -502,8 +505,7 @@ class Downloader(Thread):
ecode = msg[:3]
display_msg = ' [%s]' % msg
logging.debug('Server login problem: %s, %s', ecode, msg)
if ((ecode in ('502', '400')) and clues_too_many(msg)) or \
(ecode == '481' and clues_too_many(msg)):
if ecode in ('502', '481', '400') and clues_too_many(msg):
# Too many connections: remove this thread and reduce thread-setting for server
# Plan to go back to the full number after a penalty timeout
if server.active:
@@ -536,6 +538,12 @@ class Downloader(Thread):
else:
penalty = _PENALTY_502
block = True
elif ecode == '400':
# Temp connection problem?
if server.active:
logging.debug('Unspecified error 400 from server %s', server.host)
penalty = _PENALTY_VERYSHORT
block = True
else:
# Unknown error, just keep trying
if server.active:
@@ -599,6 +607,19 @@ class Downloader(Thread):
msg = T('Server %s requires user/password') % ('%s:%s' % (nw.server.host, nw.server.port))
self.__reset_nw(nw, msg, quit=True)
elif code == '500':
if nzo.precheck:
# Assume "STAT" command is not supported
server.have_stat = False
logging.debug('Server %s does not support STAT', server.host)
else:
# Assume "BODY" command is not supported
server.have_body = False
logging.debug('Server %s does not support BODY', server.host)
nw.lines = []
nw.data = ''
self.__request_article(nw)
if done:
server.bad_cons = 0 # Succesful data, clear "bad" counter
if sabnzbd.LOG_ALL:

View File

@@ -187,12 +187,16 @@ def send_with_template(prefix, parm):
return ret
def endjob(filename, msgid, cat, status, path, bytes, stages, script, script_output, script_ret):
def endjob(filename, msgid, cat, status, path, bytes, fail_msg, stages, script, script_output, script_ret):
""" Send end-of-job email """
# Translate the stage names
tr = sabnzbd.api.Ttemplate
xstages = {}
if not status and fail_msg:
xstages = {tr('stage-fail'): (fail_msg,)}
else:
xstages = {}
for stage in stages:
lines = []
for line in stages[stage]:

View File

@@ -185,7 +185,7 @@ def check_apikey(kwargs, nokey=False):
Return None when OK, otherwise an error message
"""
def log_warning(txt):
txt = '%s %s' % (txt, cherrypy.request.headers.get('User-Agent', '??'))
txt = '%s %s>%s' % (txt, cherrypy.request.remote.ip, cherrypy.request.headers.get('User-Agent', '??'))
logging.warning('%s', txt)
output = kwargs.get('output')
@@ -1163,7 +1163,7 @@ SWITCH_LIST = \
'ignore_samples', 'pause_on_post_processing', 'quick_check', 'nice', 'ionice',
'ssl_type', 'pre_script', 'pause_on_pwrar', 'ampm', 'sfv_check', 'folder_rename',
'unpack_check', 'quota_size', 'quota_day', 'quota_resume', 'quota_period',
'pre_check', 'max_art_tries', 'max_art_opt'
'pre_check', 'max_art_tries', 'max_art_opt', 'fail_hopeless'
)
#------------------------------------------------------------------------------
@@ -1218,7 +1218,7 @@ SPECIAL_BOOL_LIST = \
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
'web_watchdog'
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
)
SPECIAL_VALUE_LIST = \
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',
@@ -1676,6 +1676,7 @@ class ConfigRss(object):
active_feed = kwargs.get('feed', '')
conf['active_feed'] = active_feed
conf['rss'] = rss
conf['rss_next'] = time.strftime(time_format('%H:%M'),time.localtime(sabnzbd.rss.next_run()))
if active_feed:
readout = bool(self.__refresh_readout)

View File

@@ -32,7 +32,7 @@ from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
reliable_unpack_names, unicoder, latin1, platform_encode
from sabnzbd.utils.rarfile import RarFile, is_rarfile
from sabnzbd.misc import format_time_string, find_on_path, make_script_path, int_conv, \
flag_file, real_path
flag_file, real_path, globber
from sabnzbd.tvsort import SeriesSorter
import sabnzbd.cfg as cfg
from sabnzbd.constants import Status, QCHECK_FILE, RENAMES_FILE
@@ -471,13 +471,15 @@ def rar_extract(rarfile, numrars, one_folder, nzo, setname, extraction_path):
passwords = [nzo.password]
else:
passwords = []
# Append meta passwords, to prevent changing the original list
passwords.extend(nzo.meta.get('password', []))
pw_file = cfg.password_file.get_path()
if pw_file:
try:
pwf = open(pw_file, 'r')
passwords = pwf.read().split('\n')
lines = pwf.read().split('\n')
# Remove empty lines and space-only passwords and remove surrounding spaces
passwords = [pw.strip('\r\n ') for pw in passwords if pw.strip('\r\n ')]
passwords.extend([pw.strip('\r\n ') for pw in lines if pw.strip('\r\n ')])
pwf.close()
logging.info('Read the passwords file %s', pw_file)
except IOError:
@@ -609,7 +611,20 @@ def rar_extract_core(rarfile, numrars, one_folder, nzo, setname, extraction_path
nzo.fail_msg = T('Unpacking failed, write error or disk is full?')
msg = ('[%s] ' + Ta('Unpacking failed, write error or disk is full?')) % setname
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
logging.warning(Ta('ERROR: write error (%s)'), line[11:])
logging.error(Ta('ERROR: write error (%s)'), line[11:])
fail = 1
elif line.startswith('Cannot create'):
line2 = proc.readline()
if 'must not exceed 260' in line2:
nzo.fail_msg = T('Unpacking failed, path is too long')
msg = '[%s] %s: %s' % (Ta('Unpacking failed, path is too long'), setname, line[13:])
logging.error(Ta('ERROR: path too long (%s)'), line[13:])
else:
nzo.fail_msg = T('Unpacking failed, write error or disk is full?')
msg = '[%s] %s: %s' % (Ta('Unpacking failed, write error or disk is full?'), setname, line[13:])
logging.error(Ta('ERROR: write error (%s)'), line[13:])
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
fail = 1
elif line.startswith('ERROR: '):
@@ -781,8 +796,9 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
parfile = os.path.join(workdir, parfile_nzf.filename)
old_dir_content = os.listdir(workdir)
used_joinables = []
joinables = []
used_joinables = ()
joinables = ()
used_par2 = ()
setpars = pars_of_set(workdir, setname)
result = readd = False
@@ -806,8 +822,8 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
joinables, zips, rars, ts = build_filelists(workdir, None, check_rar=False)
finished, readd, pars, datafiles, used_joinables = PAR_Verify(parfile, parfile_nzf, nzo,
setname, joinables)
finished, readd, pars, datafiles, used_joinables, used_par2 = PAR_Verify(parfile, parfile_nzf, nzo,
setname, joinables)
if finished:
result = True
@@ -876,6 +892,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
if f in setpars:
deletables.append(os.path.join(workdir, f))
deletables.extend(used_joinables)
deletables.extend(used_par2)
for filepath in deletables:
if filepath in joinables:
joinables.remove(filepath)
@@ -895,6 +912,8 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
_RE_BLOCK_FOUND = re.compile('File: "([^"]+)" - found \d+ of \d+ data blocks from "([^"]+)"')
_RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
_RE_LOADING_PAR2 = re.compile('Loading "([^"]+)"\.')
_RE_LOADED_PAR2 = re.compile('Loaded (\d+) new packets')
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
""" Run par2 on par-set """
@@ -904,6 +923,8 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
cmd = 'r'
retry_classic = False
used_joinables = []
used_par2 = []
extra_par2_name = None
#set the current nzo status to "Verifying...". Used in History
nzo.status = Status.VERIFYING
start = time()
@@ -927,7 +948,11 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
command.append(joinable)
# Append the wildcard for this set
command.append('%s*' % os.path.join(os.path.split(parfile)[0], setname))
wildcard = '%s*' % os.path.join(os.path.split(parfile)[0], setname)
if len(globber(wildcard)) < 2:
# Support bizarre naming conventions
wildcard = os.path.join(os.path.split(parfile)[0], '*')
command.append(wildcard)
stup, need_shell, command, creationflags = build_command(command)
logging.debug('Starting par2: %s', command)
@@ -977,6 +1002,16 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
if 'Repairing:' not in line:
lines.append(line)
if extra_par2_name and line.startswith('Loading:') and line.endswith('%'):
continue
if extra_par2_name and line.startswith('Loaded '):
m = _RE_LOADED_PAR2.search(line)
if m and int(m.group(1)) > 0:
used_par2.append(os.path.join(nzo.downpath, extra_par2_name))
extra_par2_name = None
continue
extra_par2_name = None
if line.startswith('Invalid option specified'):
msg = T('[%s] PAR2 received incorrect options, check your Config->Switches settings') % unicoder(setname)
nzo.set_unpack_info('Repair', msg, set=setname)
@@ -997,6 +1032,12 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
start = time()
verified = 1
elif line.startswith('Loading "'):
# Found an extra par2 file. Only the next line will tell whether it's usable
m = _RE_LOADING_PAR2.search(line)
if m and m.group(1).lower().endswith('.par2'):
extra_par2_name = m.group(1)
elif line.startswith('Main packet not found') or 'The recovery file does not exist' in line:
## Initialparfile probably didn't decode properly,
logging.info(Ta('Main packet not found...'))
@@ -1161,6 +1202,13 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
logging.debug('PAR2 will rename "%s" to "%s"', old_name, new_name)
renames[new_name] = old_name
elif 'No details available for recoverable file' in line:
msg = unicoder(line.strip())
nzo.fail_msg = msg
msg = u'[%s] %s' % (unicoder(setname), msg)
nzo.set_unpack_info('Repair', msg, set=setname)
nzo.status = Status.FAILED
elif not verified:
if line.startswith('Verifying source files'):
nzo.set_action_line(T('Verifying'), '01/%02d' % verifytotal)
@@ -1212,7 +1260,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
logging.debug('Retry PAR2-joining with par2-classic')
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
else:
return finished, readd, pars, datafiles, used_joinables
return finished, readd, pars, datafiles, used_joinables, used_par2
#-------------------------------------------------------------------------------
@@ -1541,6 +1589,8 @@ def get_from_url(url, timeout=None):
p.wait()
else:
import urllib2
if sys.version_info < (2, 6):
timeout = 0
try:
if timeout:
s = urllib2.urlopen(url, timeout=timeout)

View File

@@ -320,11 +320,14 @@ class NewsWrapper(object):
def body(self, precheck):
self.timeout = time.time() + self.server.timeout
if precheck:
command = 'STAT <%s>\r\n' % (self.article.article)
elif self.server.oddball:
command = 'ARTICLE <%s>\r\n' % (self.article.article)
else:
if self.server.have_stat:
command = 'STAT <%s>\r\n' % (self.article.article)
else:
command = 'HEAD <%s>\r\n' % (self.article.article)
elif self.server.have_body:
command = 'BODY <%s>\r\n' % (self.article.article)
else:
command = 'ARTICLE <%s>\r\n' % (self.article.article)
self.nntp.sock.sendall(command)
def send_group(self, group):
@@ -433,17 +436,17 @@ def test_ipv6():
try:
info = socket.getaddrinfo('www.google.com', 80, socket.AF_INET6, socket.SOCK_STREAM,
socket.IPPROTO_IP, socket.AI_CANONNAME)
except socket.gaierror:
except:
return False
try:
af, socktype, proto, canonname, sa = info[0]
sock = socket.socket(af, socktype, proto)
sock.settimeout(4)
sock.settimeout(6)
sock.connect(sa[0:2])
sock.close()
return True
except socket.error:
except:
return False
_EXTERNAL_IPV6 = test_ipv6()

View File

@@ -322,6 +322,11 @@ class NzbParser(xml.sax.handler.ContentHandler):
self.in_group = False
self.in_segments = False
self.in_segment = False
self.in_head = False
self.in_meta = False
self.meta_type = ''
self.meta_types = {}
self.meta_content = []
self.filename = ''
self.avg_age = 0
self.valids = 0
@@ -374,6 +379,16 @@ class NzbParser(xml.sax.handler.ContentHandler):
elif name == 'groups' and self.in_nzb and self.in_file:
self.in_groups = True
elif name == 'head' and self.in_nzb:
self.in_head = True
elif name == 'meta' and self.in_nzb and self.in_head:
self.in_meta = True
meta_type = attrs.get('type')
if meta_type:
self.meta_type = meta_type.lower()
self.meta_content = []
elif name == 'nzb':
self.in_nzb = True
@@ -382,6 +397,8 @@ class NzbParser(xml.sax.handler.ContentHandler):
self.group_name.append(content)
elif self.in_segment:
self.article_id.append(content)
elif self.in_meta:
self.meta_content.append(content)
def endElement(self, name):
if name == 'group' and self.in_group:
@@ -436,12 +453,24 @@ class NzbParser(xml.sax.handler.ContentHandler):
sabnzbd.remove_data(nzf.nzf_id, self.nzo.workpath)
self.skipped_files += 1
elif name == 'head':
self.in_head = False
elif name == 'meta':
self.in_meta = False
if self.meta_type:
if self.meta_type not in self.meta_types:
self.meta_types[self.meta_type] = []
self.meta_types[self.meta_type].append(''.join(self.meta_content))
elif name == 'nzb':
self.in_nzb = False
def endDocument(self):
""" End of the file """
self.nzo.groups = self.groups
self.nzo.meta = self.meta_types
logging.debug('META-DATA = %s', self.nzo.meta)
files = max(1, self.valids)
self.nzo.avg_stamp = self.avg_age / files
self.nzo.avg_date = datetime.datetime.fromtimestamp(self.avg_age / files)
@@ -501,7 +530,9 @@ NzbObjectMapper = (
('oversized', 'oversized'), # Was detected as oversized
('create_group_folder', 'create_group_folder'),
('precheck', 'precheck'),
('incomplete', 'incomplete') # Was detected as incomplete
('incomplete', 'incomplete'), # Was detected as incomplete
('reuse', 'reuse'),
('meta', 'meta') # Meta-date from 1.1 type NZB
)
class NzbObject(TryList):
@@ -537,6 +568,7 @@ class NzbObject(TryList):
self.work_name = work_name
self.final_name = work_name
self.meta = {}
self.created = False # dirprefixes + work_name created
self.bytes = 0 # Original bytesize
self.bytes_downloaded = 0 # Downloaded byte
@@ -588,6 +620,7 @@ class NzbObject(TryList):
self.oversized = False
self.precheck = False
self.incomplete = False
self.reuse = reuse
if self.status == Status.QUEUED and not reuse:
self.precheck = cfg.pre_check()
if self.precheck:
@@ -695,12 +728,23 @@ class NzbObject(TryList):
if not self.files and not reuse:
self.purge_data(keep_basic=False)
if self.url:
logging.warning(Ta('Empty NZB file %s') + ' [%s]', filename, self.url)
if cfg.warn_empty_nzb():
mylog = logging.warning
else:
logging.warning(Ta('Empty NZB file %s'), filename)
mylog = logging.info
if self.url:
mylog(Ta('Empty NZB file %s') + ' [%s]', filename, self.url)
else:
mylog(Ta('Empty NZB file %s'), filename)
raise ValueError
if cat is None:
for metacat in self.meta.get('category', ()):
metacat = cat_convert(metacat)
if metacat:
cat = metacat
break
if cat is None:
for grp in self.groups:
cat = cat_convert(grp)
@@ -890,6 +934,14 @@ class NzbObject(TryList):
if file_done:
self.remove_nzf(nzf)
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
#set the nzo status to return "Queued"
self.status = Status.QUEUED
self.set_download_report()
self.fail_msg = T('Aborted, cannot be completed')
self.set_unpack_info('Download', self.fail_msg, unique=False)
logging.debug('Abort job "%s", due to impossibility to complete it', self.final_name_pw_clean)
return True, True, True
if reset:
self.reset_try_list()
@@ -1030,7 +1082,7 @@ class NzbObject(TryList):
self.partable.pop(setname)
__re_quick_par2_check = re.compile('\.par2\W*', re.I)
def check_quality(self):
def check_quality(self, req_ratio=0):
""" Determine amount of articles present on servers
and return (gross available, nett) bytes
"""
@@ -1041,7 +1093,8 @@ class NzbObject(TryList):
for nzf_id in self.files_table:
nzf = self.files_table[nzf_id]
assert isinstance(nzf, NzbFile)
short += nzf.bytes_left
if nzf.deleted:
short += nzf.bytes_left
if self.__re_quick_par2_check.search(nzf.subject):
pars += nzf.bytes
anypars = True
@@ -1050,7 +1103,7 @@ class NzbObject(TryList):
have = need + pars - short
ratio = float(have) / float(max(1, need))
if anypars:
enough = ratio * 100.0 >= float(cfg.req_completion_rate())
enough = ratio * 100.0 >= (req_ratio or float(cfg.req_completion_rate()))
else:
enough = have >= need
logging.debug('Download Quality: enough=%s, have=%s, need=%s, ratio=%s', enough, have, need, ratio)
@@ -1074,15 +1127,18 @@ class NzbObject(TryList):
msg1 = T('Downloaded in %s at an average of %sB/s') % (complete_time, to_units(avg_bps*1024, dec_limit=1))
bad = self.nzo_info.get('bad_art_log', [])
miss = self.nzo_info.get('missing_art_log', [])
killed = self.nzo_info.get('killed_art_log', [])
dups = self.nzo_info.get('dup_art_log', [])
msg2 = msg3 = msg4 = ''
msg2 = msg3 = msg4 = msg5 = ''
if bad:
msg2 = ('<br/>' + T('%s articles were malformed')) % len(bad)
if miss:
msg3 = ('<br/>' + T('%s articles were missing')) % len(miss)
if dups:
msg4 = ('<br/>' + T('%s articles had non-matching duplicates')) % len(dups)
msg = ''.join((msg1, msg2, msg3, msg4,))
if killed:
msg5 = ('<br/>' + T('%s articles were removed')) % len(killed)
msg = ''.join((msg1, msg2, msg3, msg4, msg5, ))
self.set_unpack_info('Download', msg, unique=True)
if self.url:
self.set_unpack_info('Source', format_source_url(self.url), unique=True)
@@ -1395,6 +1451,8 @@ class NzbObject(TryList):
self.pp_active = False
self.avg_stamp = time.mktime(self.avg_date.timetuple())
self.wait = None
if self.meta is None:
self.meta = {}
TryList.__init__(self)

View File

@@ -234,6 +234,12 @@ def process_job(nzo):
nzo.save_attribs()
all_ok = False
if nzo.fail_msg: # Special case: aborted due to too many missing data
nzo.status = Status.FAILED
nzo.save_attribs()
all_ok = False
par_error = unpack_error = True
try:
# Get the folder containing the download result
@@ -241,7 +247,7 @@ def process_job(nzo):
tmp_workdir_complete = None
# if no files are present (except __admin__), fail the job
if len(globber(workdir)) < 2:
if all_ok and len(globber(workdir)) < 2:
if nzo.precheck:
enough, ratio = nzo.check_quality()
req_ratio = float(cfg.req_completion_rate()) / 100.0
@@ -272,7 +278,7 @@ def process_job(nzo):
filename, flag_repair, flag_unpack, flag_delete, script, cat)
## Par processing, if enabled
if flag_repair:
if all_ok and flag_repair:
par_error, re_add = parring(nzo, workdir)
if re_add:
# Try to get more par files
@@ -414,7 +420,7 @@ def process_job(nzo):
## Run the user script
script_path = make_script_path(script)
if all_ok and (not nzb_list) and script_path:
if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path:
#set the current nzo status to "Ext Script...". Used in History
nzo.status = Status.RUNNING
nzo.set_action_line(T('Running script'), unicoder(script))
@@ -437,7 +443,7 @@ def process_job(nzo):
if (not nzb_list) and cfg.email_endjob():
if (cfg.email_endjob() == 1) or (cfg.email_endjob() == 2 and (unpack_error or par_error)):
emailer.endjob(dirname, msgid, cat, all_ok, workdir_complete, nzo.bytes_downloaded,
nzo.unpack_info, script, TRANS(script_log), script_ret)
nzo.fail_msg, nzo.unpack_info, script, TRANS(script_log), script_ret)
if script_output:
# Can do this only now, otherwise it would show up in the email
@@ -464,6 +470,9 @@ def process_job(nzo):
elif all_ok and isinstance(nzo.url, str):
sabnzbd.proxy_rm_bookmark(nzo.url)
## Force error for empty result
all_ok = all_ok and not empty
## Show final status in history
if all_ok:
growler.send_notification(T('Download Completed'), filename, 'complete')
@@ -482,11 +491,9 @@ def process_job(nzo):
nzo.status = Status.FAILED
par_error = True
all_ok = False
info = nzo.unpack_info.copy()
info['fail'] = [nzo.fail_msg]
if cfg.email_endjob():
emailer.endjob(dirname, msgid, cat, all_ok, workdir_complete, nzo.bytes_downloaded,
info, '', '', 0)
nzo.fail_msg, nzo.unpack_info, '', '', 0)
if all_ok:
@@ -557,6 +564,8 @@ def parring(nzo, workdir):
if not verified.get(setname, False):
logging.info("Running repair on set %s", setname)
parfile_nzf = par_table[setname]
if not os.path.exists(os.path.join(nzo.downpath, parfile_nzf.filename)):
continue
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
re_add = re_add or need_re_add
if not res and not need_re_add and cfg.sfv_check():

View File

@@ -84,6 +84,16 @@ def run_method():
else:
return None
def next_run(t=None):
global __RSS
if __RSS:
if t:
__RSS.next_run = t
else:
return __RSS.next_run
else:
return time.time()
def save():
global __RSS
if __RSS: __RSS.save()
@@ -159,6 +169,8 @@ class RSSQueue(object):
return False
self.jobs = {}
self.next_run = time.time()
try:
defined = config.get_rss().keys()
feeds = sabnzbd.load_admin(RSS_FILE_NAME)
@@ -488,6 +500,8 @@ class RSSQueue(object):
"""
if not sabnzbd.PAUSED_ALL:
active = False
if self.next_run < time.time():
self.next_run = time.time() + cfg.rss_rate.get() * 60
feeds = config.get_rss()
for feed in feeds.keys():
try:

View File

@@ -136,6 +136,7 @@ def init():
interval = cfg.rss_rate()
delay = random.randint(0, interval-1)
logging.debug("Scheduling RSS interval task every %s min (delay=%s)", interval, delay)
sabnzbd.rss.next_run(time.time() + delay * 60)
__SCHED.add_interval_task(rss.run_method, "RSS", delay*60, interval*60,
kronos.method.sequential, None, None)
__SCHED.add_single_task(rss.run_method, 'RSS', 15, kronos.method.sequential, None, None)
@@ -151,7 +152,7 @@ def init():
kronos.method.sequential, [], None)
if cfg.newzbin_bookmarks():
if False: #cfg.newzbin_bookmarks():
interval = cfg.bookmark_rate()
delay = random.randint(0, interval-1)
logging.debug("Scheduling Bookmark interval task every %s min (delay=%s)", interval, delay)
@@ -166,6 +167,10 @@ def init():
__SCHED.add_daytime_task(action, 'quota_reset', range(1, 8), None, (hour, minute),
kronos.method.sequential, [], None)
logging.info('Setting schedule for midnight BPS reset')
__SCHED.add_daytime_task(sabnzbd.bpsmeter.midnight_action, 'midnight_bps', range(1, 8), None, (0, 0),
kronos.method.sequential, [], None)
# Subscribe to special schedule changes
cfg.newzbin_bookmarks.callback(schedule_guard)

View File

@@ -380,13 +380,14 @@ SKIN_TEXT = {
'explain-top_only' : TT('Enable for less memory usage. Disable to prevent slow jobs from blocking the queue.'),
'opt-safe_postproc' : TT('Post-Process Only Verified Jobs'),
'explain-safe_postproc' : TT('Only perform post-processing on jobs that passed all PAR2 checks.'),
'opt-pause_on_pwrar' : TT('Pause job when encrypted RAR is downloaded'),
'explain-pause_on_pwrar' : TT('You\'ll need to set a password and resume the job.'),
'opt-pause_on_pwrar' : TT('Action when encrypted RAR is downloaded'),
'explain-pause_on_pwrar' : TT('In case of "Pause", you\'ll need to set a password and resume the job.'),
'opt-no_dupes' : TT('Detect Duplicate Downloads'),
'explain-no_dupes' : TT('Detect identically named NZB files (requires NZB backup option) and duplicate titles across RSS feeds.'),
'nodupes-off' : TT('Off'), #: Three way switch for duplicates
'nodupes-ignore' : TT('Discard'), #: Three way switch for duplicates
'nodupes-pause' : TT('Pause'), #: Three way switch for duplicates
'abort' : TT('Abort'), #: Three way switch for encrypted posts
'opt-sfv_check' : TT('Enable SFV-based checks'),
'explain-sfv_check' : TT('Do an extra verification based on SFV files.'),
'opt-unpack_check' : TT('Check result of unpacking'),
@@ -458,6 +459,8 @@ SKIN_TEXT = {
'explain-max_art_tries' : TT('Maximum number of retries per server'),
'opt-max_art_opt' : TT('Only for optional servers'),
'explain-max_art_opt' : TT('Apply maximum retries only to optional servers'),
'opt-fail_hopeless' : TT('Abort jobs that cannot be completed'),
'explain-fail_hopeless' : TT('When during download it becomes clear that too much data is missing, abort the job'),
# Config->Server

View File

@@ -61,11 +61,12 @@ COUNTRY_REP = ('(US)', '(UK)', '(EU)', '(CA)', '(YU)', '(VE)', '(TR)', '(CH)', \
'(AW)', '(AR)', '(AL)', '(AF)')
_RE_ENDEXT = re.compile(r'\.%ext[{}]*$', re.I)
_RE_ENDFN = re.compile(r'%fn[{}]*$', re.I)
def endswith_ext(path):
""" Return True when path ends with '.%ext'
def ends_in_file(path):
""" Return True when path ends with '.%ext' or '%fn'
"""
return _RE_ENDEXT.search(path) is not None
return bool(_RE_ENDEXT.search(path) or _RE_ENDFN.search(path))
def move_to_parent_folder(workdir):
@@ -333,7 +334,7 @@ class SeriesSorter(object):
sorter = self.sort_string.replace('\\', '/')
mapping = []
if endswith_ext(sorter):
if ends_in_file(sorter):
extension = True
sorter = sorter.replace('.%ext', '')
else:
@@ -425,11 +426,10 @@ class SeriesSorter(object):
file, filepath, size = largest
# >20MB
if filepath and size > 20971520:
tmp, self.ext = os.path.splitext(file)
self.fname = tmp
self.fname, self.ext = os.path.splitext(os.path.split(file)[1])
newname = "%s%s" % (self.filename_set, self.ext)
# Replace %fn with the original filename
newname = newname.replace('%fn', tmp)
newname = newname.replace('%fn', self.fname)
newpath = os.path.join(current_path, newname)
# Replace %ext with extension
newpath = newpath.replace('%ext', self.ext)
@@ -594,7 +594,7 @@ class GenericSorter(object):
sorter = self.sort_string.replace('\\', '/')
mapping = []
if endswith_ext(sorter):
if ends_in_file(sorter):
extension = True
sorter = sorter.replace(".%ext", '')
else:
@@ -679,10 +679,9 @@ class GenericSorter(object):
else:
filepath = os.path.join(current_path, file)
if os.path.exists(filepath):
tmp, ext = os.path.splitext(file)
self.fname = tmp
self.fname, ext = os.path.splitext(os.path.split(file)[1])
newname = "%s%s" % (self.filename_set, ext)
newname = newname.replace('%fn', tmp)
newname = newname.replace('%fn', self.fname)
newpath = os.path.join(current_path, newname)
try:
logging.debug("Rename: %s to %s", filepath, newpath)
@@ -703,10 +702,9 @@ class GenericSorter(object):
for index, file in matched_files.iteritems():
filepath = os.path.join(current_path, file)
renamed.append(filepath)
tmp, ext = os.path.splitext(file)
self.fname = tmp
self.fname, ext = os.path.splitext(os.path.split(file)[1])
name = '%s%s' % (self.filename_set, self.extra)
name = name.replace('%1', str(index)).replace('%fn', tmp)
name = name.replace('%1', str(index)).replace('%fn', self.fname)
name = name + ext
newpath = os.path.join(current_path, name)
try:
@@ -807,7 +805,7 @@ class DateSorter(object):
sorter = self.sort_string.replace('\\', '/')
mapping = []
if endswith_ext(sorter):
if ends_in_file(sorter):
extension = True
sorter = sorter.replace(".%ext", '')
else:
@@ -891,10 +889,9 @@ class DateSorter(object):
size = os.stat(filepath).st_size
if size > cfg.movie_rename_limit.get_int():
if 'sample' not in file:
tmp, ext = os.path.splitext(file)
self.fname = tmp
self.fname, ext = os.path.splitext(os.path.split(file)[1])
newname = "%s%s" % (self.filename_set, ext)
newname = newname.replace('%fn', tmp)
newname = newname.replace('%fn', self.fname)
newpath = os.path.join(current_path, newname)
if not os.path.exists(newpath):
try:
@@ -903,7 +900,7 @@ class DateSorter(object):
except:
logging.error(Ta('Failed to rename: %s to %s'), current_path, newpath)
logging.info("Traceback: ", exc_info = True)
rename_similar(current_path, ext, self.filename_set)
rename_similar(current_path, ext, self.filename_set, ())
break

View File

@@ -207,6 +207,10 @@ class URLGrabber(Thread):
if res == -2:
logging.info('Incomplete NZB, retry after 5 min %s', url)
when = 300
elif res == -1:
# Error, but no reason to retry. Warning is already given
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
continue
else:
logging.info('Unknown error fetching NZB, retry after 2 min %s', url)
when = 120