mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-05 14:09:22 -05:00
Compare commits
225 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bcc4dd75cf | ||
|
|
97711ca82e | ||
|
|
e782237f27 | ||
|
|
52bb156c08 | ||
|
|
4361d82ddd | ||
|
|
017cf8f285 | ||
|
|
03cdf6ed5d | ||
|
|
cf347a8e90 | ||
|
|
f06afe43e1 | ||
|
|
fb301eb5c8 | ||
|
|
1562c3560b | ||
|
|
9813bc237f | ||
|
|
b39fe059c6 | ||
|
|
a56c770a8b | ||
|
|
e3bf0edad8 | ||
|
|
e35d9e4db3 | ||
|
|
c617d4321a | ||
|
|
0fd3a2881f | ||
|
|
0c1f7633de | ||
|
|
b7d5d49c84 | ||
|
|
9911b93ece | ||
|
|
eeaad00968 | ||
|
|
e1bb8459e3 | ||
|
|
65c3ac0cc0 | ||
|
|
413c02a80f | ||
|
|
80f118f304 | ||
|
|
5c0a10e16b | ||
|
|
d9b32261e7 | ||
|
|
8d8ce52193 | ||
|
|
1cc2e25cda | ||
|
|
0dc2c6687d | ||
|
|
b061e582b6 | ||
|
|
690731fd79 | ||
|
|
068b7ed7f5 | ||
|
|
aae2fdcd32 | ||
|
|
d3628a1eb7 | ||
|
|
9cc8176d87 | ||
|
|
27f83f21be | ||
|
|
5e31a31a21 | ||
|
|
a077012478 | ||
|
|
fed0e0f765 | ||
|
|
fbdbf7ab22 | ||
|
|
f013d38d00 | ||
|
|
93b9c8a6da | ||
|
|
4605c3fd30 | ||
|
|
ed7dc3f827 | ||
|
|
e69eeebdd8 | ||
|
|
5da5f1adc1 | ||
|
|
f47e92dec0 | ||
|
|
a894ca5171 | ||
|
|
5abe1140ae | ||
|
|
d34e14370c | ||
|
|
c4f4a3131c | ||
|
|
dcbd9b57f3 | ||
|
|
aad3b54a17 | ||
|
|
cde142a371 | ||
|
|
8bfc98ffc6 | ||
|
|
e46f21d566 | ||
|
|
0e45fdcdfd | ||
|
|
eec7af16d7 | ||
|
|
6532425902 | ||
|
|
44b896522c | ||
|
|
1b16ee44cb | ||
|
|
d5f608c28c | ||
|
|
555d8418e7 | ||
|
|
8c22e35da4 | ||
|
|
95a7924b31 | ||
|
|
5830bebd95 | ||
|
|
d32cf57c75 | ||
|
|
6d9242ebc5 | ||
|
|
cbc4f6a964 | ||
|
|
2a3b2b9556 | ||
|
|
53a219f12b | ||
|
|
48519dcfa0 | ||
|
|
92542c58fe | ||
|
|
7eafe730f9 | ||
|
|
494e72a996 | ||
|
|
84cc86f1d3 | ||
|
|
64479e2e5d | ||
|
|
13b523d9bd | ||
|
|
181881a21b | ||
|
|
86d11095ac | ||
|
|
927ba3cd9d | ||
|
|
6296fc1762 | ||
|
|
60fbe44724 | ||
|
|
29e45da431 | ||
|
|
d82e69eef4 | ||
|
|
8c7d557252 | ||
|
|
a56d6e5517 | ||
|
|
7548d9e975 | ||
|
|
b7e2bd9684 | ||
|
|
f0a243e3d3 | ||
|
|
6e108c9ef2 | ||
|
|
89edcc1924 | ||
|
|
8a6aca47a1 | ||
|
|
d03e5780b8 | ||
|
|
209d8f9b40 | ||
|
|
c257b1be3d | ||
|
|
2c48c8de2e | ||
|
|
a767ef6aed | ||
|
|
ad61d1dd03 | ||
|
|
33c3d187a0 | ||
|
|
4eb486d4e2 | ||
|
|
bfb6c167a4 | ||
|
|
44abf3bdf6 | ||
|
|
c950572592 | ||
|
|
3999cb13fd | ||
|
|
af65075f0c | ||
|
|
de2a2b465b | ||
|
|
cd7a77f02d | ||
|
|
f4a5394b63 | ||
|
|
3fb6a8dedb | ||
|
|
50c8f84eba | ||
|
|
2c7ecdee92 | ||
|
|
72390a793a | ||
|
|
04ad4e5d3e | ||
|
|
5ef9c6a433 | ||
|
|
e6baffc839 | ||
|
|
e361eb25a5 | ||
|
|
9b420e91c9 | ||
|
|
3a4bf971b2 | ||
|
|
1128691c5d | ||
|
|
15043aef3f | ||
|
|
2a3b4afa03 | ||
|
|
00a98efa81 | ||
|
|
f013dd7f0d | ||
|
|
7b91b1c769 | ||
|
|
5583cce322 | ||
|
|
b995c5f992 | ||
|
|
214ac4a53d | ||
|
|
fc7e87f0df | ||
|
|
c0f2f59fc1 | ||
|
|
b90a847a6f | ||
|
|
a58bb385f5 | ||
|
|
9754baeb1c | ||
|
|
ffcd154966 | ||
|
|
97cfe9488c | ||
|
|
374b6f616a | ||
|
|
e2f51595b6 | ||
|
|
04091a16aa | ||
|
|
9d9d2fd9a2 | ||
|
|
5746115331 | ||
|
|
42f1a4926c | ||
|
|
7d87fd461b | ||
|
|
1ba9976979 | ||
|
|
659c199043 | ||
|
|
81a3f53226 | ||
|
|
1cbff28f67 | ||
|
|
8e15acbf30 | ||
|
|
e07be60db6 | ||
|
|
539c9662ff | ||
|
|
b396014f8d | ||
|
|
1db32415b6 | ||
|
|
b24629db6b | ||
|
|
9b5cdcf8fb | ||
|
|
4831415d14 | ||
|
|
a4c51f0b20 | ||
|
|
ec3ba1fb93 | ||
|
|
61966f7036 | ||
|
|
4f69e81841 | ||
|
|
d0d90581df | ||
|
|
8ea5c27633 | ||
|
|
517500fdf3 | ||
|
|
c4c1c9b6ab | ||
|
|
2388889ede | ||
|
|
55cfe878d7 | ||
|
|
a2daaee468 | ||
|
|
2c360e395e | ||
|
|
399cfee594 | ||
|
|
be646ae6ab | ||
|
|
b470253d9f | ||
|
|
b83c493492 | ||
|
|
991277bb01 | ||
|
|
5626013b81 | ||
|
|
2810d37758 | ||
|
|
c2f08f01e0 | ||
|
|
17ff087e06 | ||
|
|
77de565b7c | ||
|
|
54d238aa4d | ||
|
|
379d09f8cc | ||
|
|
00de72b127 | ||
|
|
f9c84fa7dd | ||
|
|
c8e46691bb | ||
|
|
df1bb636e5 | ||
|
|
ff886fad0d | ||
|
|
6dbee7a413 | ||
|
|
3f8fcd7172 | ||
|
|
d94f7388e6 | ||
|
|
ad8b49fea8 | ||
|
|
ce00270c12 | ||
|
|
8c501f8f58 | ||
|
|
ce313ebc65 | ||
|
|
887ad881a2 | ||
|
|
ce40827552 | ||
|
|
2777d89482 | ||
|
|
727b300a0e | ||
|
|
652b021a8e | ||
|
|
fdf33acfbb | ||
|
|
b001bc9b6f | ||
|
|
8802cb1d8c | ||
|
|
e19a2fbae7 | ||
|
|
53e38f98f9 | ||
|
|
e783e227f6 | ||
|
|
f3dfbe4181 | ||
|
|
bcd8ca8bc4 | ||
|
|
816d6a63cd | ||
|
|
88d3f25700 | ||
|
|
80f69b11db | ||
|
|
81a11f20c8 | ||
|
|
9e2a839953 | ||
|
|
3cefcde270 | ||
|
|
87a1eacfe7 | ||
|
|
7cbc1a8419 | ||
|
|
7b5570eb0b | ||
|
|
1a43a4dcf0 | ||
|
|
2c2a6592c7 | ||
|
|
f31de6ee4e | ||
|
|
8fcd1f6b6c | ||
|
|
d7f3a473d7 | ||
|
|
ab2eb0c94e | ||
|
|
e51f4fc45a | ||
|
|
65278120e2 | ||
|
|
2eed355e9c | ||
|
|
018955f4d5 | ||
|
|
12fd63c1cf |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.2.0RC2
|
||||
Summary: SABnzbd-2.2.0RC2
|
||||
Version: 2.2.1
|
||||
Summary: SABnzbd-2.2.1
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
65
README.mkd
65
README.mkd
@@ -1,4 +1,24 @@
|
||||
Release Notes - SABnzbd 2.2.0 Release Candidate 2
|
||||
Release Notes - SABnzbd 2.2.1
|
||||
=========================================================
|
||||
|
||||
## Changes since 2.2.0
|
||||
- Allow up to 5 bad articles for jobs with no or little par2
|
||||
- Only auto-disconnect after first run of verification
|
||||
- Warning is shown when password-file is too large
|
||||
- Failure of recursive unpacking no longer fails whole job
|
||||
- Failure of unpacking of duplicate RAR no longer fails whole job
|
||||
|
||||
## Bugfixes since 2.2.0
|
||||
- Some users were experiencing downloads or pre-check being stuck at 99%
|
||||
- Fixed RarFile error during unpacking
|
||||
- Remove email addresses settings from log export
|
||||
- Block server longer on 'Download limit exceeded' errors
|
||||
- Windows: If repair renamed a job the correct renamed file was deleted
|
||||
- Windows: Unpacking of downloads with many archives could fail
|
||||
- macOS: Adding jobs could fail without any error
|
||||
|
||||
|
||||
Release Notes - SABnzbd 2.2.0
|
||||
=========================================================
|
||||
|
||||
NOTE: Due to changes in this release, the queue will be converted when 2.2.0
|
||||
@@ -6,23 +26,6 @@ is started for the first time. Job order, settings and data will be
|
||||
preserved, but all jobs will be unpaused and URLs that did not finish
|
||||
fetching before the upgrade will be lost!
|
||||
|
||||
## Changes since 2.2.0 Release Candidate 1
|
||||
- Not all RAR files were correctly removed for encrypted downloads
|
||||
- Better indication of verification process before and after repair
|
||||
- All par2 files are only downloaded when enabled, not on enable_par_cleanup
|
||||
- Disk-space is now checked before writing files
|
||||
- Server usage graphs did not always list all available months
|
||||
- Warning is shown when many files with duplicate filenames are discarded
|
||||
- Special characters like []!* in filenames could break repair
|
||||
- In some cases not all RAR-sets were unpacked
|
||||
- Categories with ' in them could result in SQL errors
|
||||
- Faulty pynotify could stop shutdown
|
||||
- Various CSS fixes in Glitter and the Config
|
||||
- macOS: Really catch "Protocol wrong type for socket" errors
|
||||
|
||||
- NOTE: Option to limit Servers to specific Categories is now scheduled
|
||||
to be removed in the next release.
|
||||
|
||||
## Changes since 2.1.0
|
||||
- Direct Unpack: Jobs will start unpacking during the download, reduces
|
||||
post-processing time but requires capable hard drive. Only works for jobs that
|
||||
@@ -36,23 +39,28 @@ fetching before the upgrade will be lost!
|
||||
- New option "History Retention" to automatically purge jobs from History
|
||||
- Jobs outside server retention are processed faster
|
||||
- Obfuscated filenames are renamed during downloading, if possible
|
||||
- Disk-space is now checked before writing files
|
||||
- Add "Retry All Failed" button to Glitter
|
||||
- Smoother animations in Firefox (disabled previously due to FF high-CPU usage)
|
||||
- Show missing articles in MB instead of number of articles
|
||||
- Correct value in "Speed" Extra History Column
|
||||
- Better indication of verification process before and after repair
|
||||
- Remove video and audio rating icons from Queue
|
||||
- Show vote buttons instead of video and audio rating buttons in History
|
||||
- If enabled, replace dots in filenames also when there are spaces already
|
||||
- Handling of par2 files made more robust
|
||||
- All par2 files are only downloaded when enabled, not on enable_par_cleanup
|
||||
- Update GNTP bindings to 1.0.3
|
||||
- max_art_opt and replace_illegal moved from Switches to Specials
|
||||
- Removed Specials par2_multicore and allow_streaming
|
||||
- Windows: Full unicode support when calling repair and unpack
|
||||
- Windows: Move enable_MultiPar to Specials
|
||||
- Windows: Move enable_multipar to Specials
|
||||
- Windows: MultiPar verification of a job is skipped after blocks are fetched
|
||||
- Windows & macOS: removed par2cmdline in favor of par2tbb/MultiPar
|
||||
- Windows & macOS: Updated WinRAR to 5.5.0
|
||||
|
||||
## Bugfixes since 2.1.0
|
||||
- Shutdown/suspend did not work on some Linux systems
|
||||
- Standby/Hibernate was not working on Windows
|
||||
- Deleting a job could result in write errors
|
||||
- Display warning if "Extra par2 parameters" turn out to be wrong
|
||||
- RSS URLs with commas in the URL were broken
|
||||
@@ -64,10 +72,17 @@ fetching before the upgrade will be lost!
|
||||
- Fix race-condition in Post-processing
|
||||
- History would not always show latest changes
|
||||
- Convert HTML in error messages
|
||||
- In some cases not all RAR-sets were unpacked
|
||||
- Fixed unicode error during Sorting
|
||||
- Faulty pynotify could stop shutdown
|
||||
- Categories with ' in them could result in SQL errors
|
||||
- Special characters like []!* in filenames could break repair
|
||||
- Wizard was always accessible, even with username and password set
|
||||
- Correct value in "Speed" Extra History Column
|
||||
- Not all texts were shown in the selected Language
|
||||
- Various CSS fixes in Glitter and the Config
|
||||
- Catch "error 0" when using HTTPS on some Linux platforms
|
||||
- Warning is shown when many files with duplicate filenames are discarded
|
||||
- Improve zeroconf/bonjour by sending HTTPS setting and auto-discover of IP
|
||||
- Windows: Fix error in MultiPar-code when first par2-file was damaged
|
||||
- macOS: Catch "Protocol wrong type for socket" errors
|
||||
@@ -75,13 +90,21 @@ fetching before the upgrade will be lost!
|
||||
## Translations
|
||||
- Added Hebrew translation by ION IL, many other languages updated.
|
||||
|
||||
## Upgrading from 0.7.x and older
|
||||
## Depreciation notices
|
||||
- Option to limit Servers to specific Categories is now scheduled
|
||||
to be removed in the next release.
|
||||
|
||||
## Upgrading from 2.1.x and older
|
||||
- Finish queue
|
||||
- Stop SABnzbd
|
||||
- Install new version
|
||||
- Start SABnzbd
|
||||
|
||||
## Upgrade notices
|
||||
- Due to changes in this release, the queue will be converted when 2.2.x
|
||||
is started for the first time. Job order, settings and data will be
|
||||
preserved, but all jobs will be unpaused and URLs that did not finish
|
||||
fetching before the upgrade will be lost!
|
||||
- The organization of the download queue is different from 0.7.x releases.
|
||||
This version will not see the old queue, but you restore the jobs by going
|
||||
to Status page and use Queue Repair.
|
||||
|
||||
@@ -98,6 +98,12 @@ class BuiltinSSLAdapter(wsgiserver.SSLAdapter):
|
||||
# The connection can safely be dropped.
|
||||
return None, {}
|
||||
raise
|
||||
except:
|
||||
# Temporary fix for https://github.com/cherrypy/cherrypy/issues/1618
|
||||
e = sys.exc_info()[1]
|
||||
if e.args == (0, 'Error'):
|
||||
return None, {}
|
||||
raise
|
||||
return s, self.get_environ(s)
|
||||
|
||||
# TODO: fill this out more with mod ssl env
|
||||
|
||||
@@ -140,10 +140,13 @@
|
||||
<!--
|
||||
We need to find how many months we have recorded so far, so we
|
||||
loop over all the dates to find the lowest value and then use
|
||||
the number of days passed as an estimate of the months we have.
|
||||
this to calculate the date-selector
|
||||
-->
|
||||
|
||||
<!--#import json#-->
|
||||
<!--#import datetime#-->
|
||||
<!--#import sabnzbd.misc#-->
|
||||
|
||||
<!--#def show_date_selector($server, $id)#-->
|
||||
<!--#set month_names = [$T('January'), $T('February'), $T('March'), $T('April'), $T('May'), $T('June'), $T('July'), $T('August'), $T('September'), $T('October'), $T('November'), $T('December')] #-->
|
||||
<!--#set min_date = datetime.date.today()#-->
|
||||
@@ -151,10 +154,10 @@
|
||||
<!--#set split_date = $date.split('-')#-->
|
||||
<!--#set min_date = min(min_date, datetime.date(int(split_date[0]), int(split_date[1]), 1))#-->
|
||||
<!--#end for#-->
|
||||
<!--#set months_recorded = int((datetime.date.today()-min_date).days / (365/12))#-->
|
||||
<!--#set months_recorded = list(sabnzbd.misc.monthrange(min_date, datetime.date.today()))#-->
|
||||
<!--#$months_recorded.reverse()#-->
|
||||
<select class="chart-selector" name="chart-selector-${id}" id="chart-selector-${id}" data-id="${id}">
|
||||
<!--#for $i in range(months_recorded+1)#-->
|
||||
<!--#set cur_date = (datetime.date.today() - datetime.timedelta($i*365/12))#-->
|
||||
<!--#for $cur_date in months_recorded#-->
|
||||
<option value="<!--#echo '%d-%02d' % ($cur_date.year, $cur_date.month)#-->">$month_names[$cur_date.month-1] $cur_date.year</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -300,7 +303,7 @@
|
||||
// Server data
|
||||
serverData[${cur}] = <!--#echo json.dumps($server['amounts'][4])#-->
|
||||
\$(document).ready(function() {
|
||||
showChart(${cur})
|
||||
showChart(${cur}, \$('#chart-selector-${cur}').val())
|
||||
})
|
||||
</script>
|
||||
<!--#end if#-->
|
||||
@@ -317,13 +320,9 @@
|
||||
var thisDay = new Date()
|
||||
|
||||
// What month are we doing?
|
||||
if(month) {
|
||||
var inputDate = new Date(month+'-01')
|
||||
} else {
|
||||
var inputDate = new Date()
|
||||
}
|
||||
var baseDate = new Date(inputDate.getFullYear(), inputDate.getMonth(), 1)
|
||||
var maxDaysInMonth = new Date(baseDate.getYear(), baseDate.getMonth()+1, 0).getDate()
|
||||
var inputDate = new Date(month+'-01')
|
||||
var baseDate = new Date(inputDate.getUTCFullYear(), inputDate.getUTCMonth(), 1)
|
||||
var maxDaysInMonth = new Date(baseDate.getFullYear(), baseDate.getMonth()+1, 0).getDate()
|
||||
|
||||
// Fill the data array
|
||||
var data = {
|
||||
|
||||
@@ -232,7 +232,7 @@ function do_restart() {
|
||||
var portsUnchanged = ($('#port').val() == $('#port').data('original')) && ($('#https_port').val() == $('#https_port').data('original'))
|
||||
|
||||
// Are we on settings page or did nothing change?
|
||||
if(!$('body').hasClass('General') || (!switchedHTTPS && !portsUnchanged)) {
|
||||
if(!$('body').hasClass('General') || (!switchedHTTPS && portsUnchanged)) {
|
||||
// Same as before
|
||||
var urlTotal = window.location.origin + urlPath
|
||||
} else {
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
<span data-bind="text: password"></span>
|
||||
</small>
|
||||
<!-- /ko -->
|
||||
<div class="name-icons direct-unpack hover-button" data-bind="visible: direct_unpack">
|
||||
<div class="name-icons direct-unpack hover-button" data-bind="visible: direct_unpack" title="$T('opt-direct_unpack')">
|
||||
<span class="glyphicon glyphicon-compressed"></span> <span data-bind="text: direct_unpack"></span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
glitterTranslate.status['Script'] = "$T('stage-script')";
|
||||
glitterTranslate.status['Source'] = "$T('stage-source')";
|
||||
glitterTranslate.status['Servers'] = "$T('stage-servers')";
|
||||
glitterTranslate.status['INFO'] = "$T('log-info')".replace('+ ', '').toUpperCase();
|
||||
glitterTranslate.status['INFO'] = "$T('log-info')".replace('+', '').toUpperCase();
|
||||
glitterTranslate.status['WARNING'] = "$T('Glitter-warning')";
|
||||
glitterTranslate.status['ERROR'] = "$T('Glitter-error')";
|
||||
|
||||
|
||||
16146
interfaces/smpl/templates/static/MochiKit/MochiKit.js
vendored
16146
interfaces/smpl/templates/static/MochiKit/MochiKit.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,2 +1,2 @@
|
||||
dojo.hostenv.conditionalLoadModule({"common": ["MochiKit.MochiKit"]});
|
||||
dojo.hostenv.moduleLoaded("MochiKit.*");
|
||||
dojo.hostenv.conditionalLoadModule({"common": ["MochiKit.MochiKit"]});
|
||||
dojo.hostenv.moduleLoaded("MochiKit.*");
|
||||
|
||||
@@ -87,7 +87,7 @@
|
||||
</div>
|
||||
<div class="col-md-5">
|
||||
<div class="clearfix"></div>
|
||||
<iframe style="float: right; width: 315px; height: 315px;" frameborder="0" src="https://resources.sabnzbd.org/wizard/ad/$language"></iframe>
|
||||
<iframe style="float: right; width: 325px; height: 325px;" frameborder="0" src="https://sabnzbd.org/wizard#$language"></iframe>
|
||||
</div>
|
||||
</div>
|
||||
<input type="hidden" name="session" value="$session" />
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
The original author of SABnzbd based his work on Pynewsleecher by Freddy@madcowdesease.org.
|
||||
|
||||
Few parts of Pynewsleecher have survived the generations of SABnzbd in a
|
||||
recognizable form.
|
||||
Still, we wish to thank Freddy for his inspiration.
|
||||
|
||||
The home of the Pynewsleecher project:
|
||||
http://www.madcowdisease.org/mcd/pynewsleecher
|
||||
|
||||
The software does not carry any license information.
|
||||
|
||||
The original author of SABnzbd based his work on Pynewsleecher by Freddy@madcowdesease.org.
|
||||
|
||||
Few parts of Pynewsleecher have survived the generations of SABnzbd in a
|
||||
recognizable form.
|
||||
Still, we wish to thank Freddy for his inspiration.
|
||||
|
||||
The home of the Pynewsleecher project:
|
||||
http://www.madcowdisease.org/mcd/pynewsleecher
|
||||
|
||||
The software does not carry any license information.
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
On http://www.brunningonline.net/simon/blog/archives/001835.html,
|
||||
the author licensed SysTrayIcon.py under a variant of the WTFPL:
|
||||
|
||||
> Any road up, help yourself. Consider SysTrayIcon.py to be under an
|
||||
> "Aleister Crowley" style license - "Do what thou wilt shall be the
|
||||
> only law".
|
||||
>
|
||||
> Err, but don't sue me if it doesn't work. ;-)
|
||||
On http://www.brunningonline.net/simon/blog/archives/001835.html,
|
||||
the author licensed SysTrayIcon.py under a variant of the WTFPL:
|
||||
|
||||
> Any road up, help yourself. Consider SysTrayIcon.py to be under an
|
||||
> "Aleister Crowley" style license - "Do what thou wilt shall be the
|
||||
> only law".
|
||||
>
|
||||
> Err, but don't sue me if it doesn't work. ;-)
|
||||
|
||||
BIN
osx/unrar/unrar
BIN
osx/unrar/unrar
Binary file not shown.
@@ -5,14 +5,14 @@
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-2.2.0-develop\n"
|
||||
"Project-Id-Version: SABnzbd-2.3.0-develop\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: shypike@sabnzbd.org\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=ASCII\n"
|
||||
"Content-Transfer-Encoding: 7bit\n"
|
||||
"POT-Creation-Date: 2017-08-02 12:48+W. Europe Daylight Time\n"
|
||||
"POT-Creation-Date: 2017-08-25 09:18+W. Europe Daylight Time\n"
|
||||
"Generated-By: pygettext.py 1.5\n"
|
||||
|
||||
|
||||
@@ -40,10 +40,22 @@ msgstr ""
|
||||
msgid "par2 binary... NOT found!"
|
||||
msgstr ""
|
||||
|
||||
#: SABnzbd.py [Error message] # SABnzbd.py [Error message]
|
||||
msgid "Verification and repair will not be possible."
|
||||
msgstr ""
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "MultiPar binary... NOT found!"
|
||||
msgstr ""
|
||||
|
||||
#: SABnzbd.py [Warning message]
|
||||
msgid "Your UNRAR version is %s, we recommend version %s or higher.<br />"
|
||||
msgstr ""
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Downloads will not unpacked."
|
||||
msgstr ""
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "unrar binary... NOT found"
|
||||
msgstr ""
|
||||
@@ -178,10 +190,6 @@ msgstr ""
|
||||
msgid "Trying to set status of non-existing server %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/__init__.py [Warning message]
|
||||
msgid "Too little diskspace forcing PAUSE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/__init__.py [Error message]
|
||||
msgid "Failure in tempfile.mkstemp"
|
||||
msgstr ""
|
||||
@@ -229,6 +237,10 @@ msgstr ""
|
||||
msgid "Failed to compile regex for search term: %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/assembler.py [Warning message]
|
||||
msgid "Too little diskspace forcing PAUSE"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/assembler.py [Error message]
|
||||
msgid "Disk full! Forcing Pause"
|
||||
msgstr ""
|
||||
@@ -435,10 +447,14 @@ msgstr ""
|
||||
msgid "Error removing %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/dirscanner.py [Warning message]
|
||||
#: sabnzbd/dirscanner.py [Warning message] # sabnzbd/rss.py [Warning message]
|
||||
msgid "Cannot read %s"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/dirscanner.py [Error message]
|
||||
msgid "Error while adding %s, removing"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/dirscanner.py [Error message] # sabnzbd/dirscanner.py [Error message]
|
||||
msgid "Cannot read Watched Folder %s"
|
||||
msgstr ""
|
||||
@@ -664,7 +680,7 @@ msgstr ""
|
||||
msgid "Undefined server!"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/interface.py
|
||||
#: sabnzbd/interface.py # sabnzbd/interface.py
|
||||
msgid "Incorrect parameter"
|
||||
msgstr ""
|
||||
|
||||
@@ -712,6 +728,10 @@ msgstr ""
|
||||
msgid "Error creating SSL key and certificate"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/misc.py [Warning message]
|
||||
msgid "Your password file contains more than 30 passwords, testing all these passwords takes a lot of time. Try to only list useful passwords."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/misc.py [Error message]
|
||||
msgid "Cannot change permissions of %s"
|
||||
msgstr ""
|
||||
@@ -921,7 +941,7 @@ msgid "Main packet not found..."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
|
||||
msgid "Invalid par2 files, cannot verify or repair"
|
||||
msgid "Invalid par2 files or invalid PAR2 parameters, cannot verify or repair"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/newsunpack.py # sabnzbd/newsunpack.py
|
||||
@@ -1759,6 +1779,14 @@ msgstr ""
|
||||
msgid "Disable quota management"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py [Config->Scheduler]
|
||||
msgid "Pause jobs with category"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py [Config->Scheduler]
|
||||
msgid "Resume jobs with category"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py [Prowl priority] # sabnzbd/skintext.py [Prowl priority] # sabnzbd/skintext.py [Three way switch for duplicates]
|
||||
msgid "Off"
|
||||
msgstr ""
|
||||
@@ -2965,6 +2993,14 @@ msgstr ""
|
||||
msgid "Detect identical episodes in series (based on \"name/season/episode\" of items in your History)"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Allow proper releases"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Bypass series duplicate detection if PROPER, REAL or REPACK is detected in the download name"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py [Four way switch for duplicates]
|
||||
msgid "Discard"
|
||||
msgstr ""
|
||||
@@ -4573,6 +4609,14 @@ msgid ""
|
||||
"It is licensed under the GNU GENERAL PUBLIC LICENSE Version 2 or (at your option) any later version.\n"
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "In order to download from usenet you will require access to a provider. Your ISP may provide you with access, however a premium provider is recommended."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/skintext.py
|
||||
msgid "Don't have a usenet provider? We recommend trying %s."
|
||||
msgstr ""
|
||||
|
||||
#: sabnzbd/tvsort.py [Error message]
|
||||
msgid "Error getting TV info (%s)"
|
||||
msgstr ""
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2017-08-06 09:51+0000\n"
|
||||
"PO-Revision-Date: 2017-08-06 20:37+0000\n"
|
||||
"Last-Translator: fox <Unknown>\n"
|
||||
"PO-Revision-Date: 2017-08-09 21:46+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
|
||||
"Language-Team: German <de@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-07 05:53+0000\n"
|
||||
"X-Generator: Launchpad (build 18441)\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-10 06:03+0000\n"
|
||||
"X-Generator: Launchpad (build 18446)\n"
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -1235,7 +1235,7 @@ msgid ""
|
||||
"%d files with duplicate filenames were discared for \"%s\". Enable "
|
||||
"\"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr ""
|
||||
"%s Dateien mit doppelten Dateinamen wurden für \"%s\" verworfen. Aktiviere "
|
||||
"%d Dateien mit doppelten Dateinamen wurden für \"%s\" verworfen. Aktiviere "
|
||||
"\"allow_duplicate_files\" um doppelte Dateinamen zu erlauben."
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
|
||||
@@ -105,8 +105,8 @@ msgstr "Posts will be paused until they are at least this age. Setting job prior
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr "Support the project, donate!"
|
||||
|
||||
msgid "%d files with duplicate filenames were discared for "%s". Enable "allow_duplicate_files" to allow duplicate filenames."
|
||||
msgstr "%d files with duplicate filenames were discarded for "%s". Enable "allow_duplicate_files" to allow duplicate filenames."
|
||||
msgid "%d files with duplicate filenames were discared for \"%s\". Enable \"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr "%d files with duplicate filenames were discarded for \"%s\". Enable \"allow_duplicate_files\" to allow duplicate filenames."
|
||||
|
||||
msgid "User script can flag job as failed"
|
||||
msgstr "Post-processing script can flag job as failed"
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2017-08-06 09:51+0000\n"
|
||||
"PO-Revision-Date: 2017-08-06 21:05+0000\n"
|
||||
"PO-Revision-Date: 2017-08-15 20:47+0000\n"
|
||||
"Last-Translator: ION IL <Unknown>\n"
|
||||
"Language-Team: Hebrew <he@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-07 05:53+0000\n"
|
||||
"X-Generator: Launchpad (build 18441)\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-16 05:16+0000\n"
|
||||
"X-Generator: Launchpad (build 18446)\n"
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -4210,7 +4210,7 @@ msgstr "ערכים"
|
||||
|
||||
#: sabnzbd/skintext.py [Job details page]
|
||||
msgid "Edit NZB Details"
|
||||
msgstr "ערוך פרטי NZB"
|
||||
msgstr "NZB ערוך פרטי"
|
||||
|
||||
#: sabnzbd/skintext.py [Job details page, delete button]
|
||||
msgid "Delete"
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2017-08-06 09:51+0000\n"
|
||||
"PO-Revision-Date: 2017-07-27 19:33+0000\n"
|
||||
"PO-Revision-Date: 2017-08-09 21:46+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
|
||||
"Language-Team: Dutch <nl@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-07 05:53+0000\n"
|
||||
"X-Generator: Launchpad (build 18441)\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-10 06:03+0000\n"
|
||||
"X-Generator: Launchpad (build 18446)\n"
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -1216,6 +1216,9 @@ msgid ""
|
||||
"%d files with duplicate filenames were discared for \"%s\". Enable "
|
||||
"\"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr ""
|
||||
"Er zijn %d bestanden met dezelfde bestandsnaam niet toegevoegd aan opdracht "
|
||||
"\"%s\". Zet \"allow_duplicate_files\" aan om dubbele bestandsnamen toe te "
|
||||
"staan."
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
|
||||
@@ -598,27 +598,22 @@ def backup_nzb(filename, data):
|
||||
|
||||
def save_compressed(folder, filename, data):
|
||||
""" Save compressed NZB file in folder """
|
||||
# Need to go to the save folder to
|
||||
# prevent the pathname being embedded in the GZ file
|
||||
here = os.getcwd()
|
||||
os.chdir(folder)
|
||||
|
||||
if filename.endswith('.nzb'):
|
||||
filename += '.gz'
|
||||
else:
|
||||
filename += '.nzb.gz'
|
||||
logging.info("Backing up %s", os.path.join(folder, filename))
|
||||
try:
|
||||
f = gzip.GzipFile(filename, 'wb')
|
||||
f.write(data)
|
||||
f.flush()
|
||||
f.close()
|
||||
# Have to get around the path being put inside the tgz
|
||||
with open(os.path.join(folder, filename), 'wb') as tgz_file:
|
||||
f = gzip.GzipFile(filename, fileobj=tgz_file)
|
||||
f.write(data)
|
||||
f.flush()
|
||||
f.close()
|
||||
except:
|
||||
logging.error(T('Saving %s failed'), os.path.join(folder, filename))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
os.chdir(here)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Unsynchronized methods
|
||||
@@ -863,6 +858,7 @@ def get_new_id(prefix, folder, check_list=None):
|
||||
except:
|
||||
logging.error(T('Failure in tempfile.mkstemp'))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
break
|
||||
# Cannot create unique id, crash the process
|
||||
raise IOError
|
||||
|
||||
|
||||
@@ -30,13 +30,8 @@ from sabnzbd.constants import GIGI, ANFO
|
||||
|
||||
ARTICLE_LOCK = threading.Lock()
|
||||
|
||||
|
||||
class ArticleCache(object):
|
||||
""" Operations on lists/dicts are atomic enough that we
|
||||
do not have to put locks. Only the cache-size needs
|
||||
a lock since the integer needs to stay synced.
|
||||
With less locking, the decoder and assembler do not
|
||||
have to wait on each other.
|
||||
"""
|
||||
do = None
|
||||
|
||||
def __init__(self):
|
||||
@@ -47,9 +42,11 @@ class ArticleCache(object):
|
||||
self.__article_table = {} # Dict of buffered articles
|
||||
ArticleCache.do = self
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def cache_info(self):
|
||||
return ANFO(len(self.__article_list), abs(self.__cache_size), self.__cache_limit_org)
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def new_limit(self, limit):
|
||||
""" Called when cache limit changes """
|
||||
self.__cache_limit_org = limit
|
||||
@@ -59,28 +56,23 @@ class ArticleCache(object):
|
||||
self.__cache_limit = min(limit, GIGI)
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def increase_cache_size(self, value):
|
||||
self.__cache_size += value
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def decrease_cache_size(self, value):
|
||||
self.__cache_size -= value
|
||||
|
||||
def reserve_space(self, data):
|
||||
""" Is there space left in the set limit? """
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
self.increase_cache_size(data_size)
|
||||
self.__cache_size += data_size
|
||||
if self.__cache_size + data_size > self.__cache_limit:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def free_reserve_space(self, data):
|
||||
""" Remove previously reserved space """
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
self.decrease_cache_size(data_size)
|
||||
self.__cache_size -= data_size
|
||||
return self.__cache_size + data_size < self.__cache_limit
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def save_article(self, article, data):
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
@@ -98,6 +90,7 @@ class ArticleCache(object):
|
||||
if self.__cache_limit:
|
||||
if self.__cache_limit < 0:
|
||||
self.__add_to_cache(article, data)
|
||||
|
||||
else:
|
||||
data_size = len(data)
|
||||
|
||||
@@ -106,7 +99,7 @@ class ArticleCache(object):
|
||||
# Flush oldest article in cache
|
||||
old_article = self.__article_list.pop(0)
|
||||
old_data = self.__article_table.pop(old_article)
|
||||
self.decrease_cache_size(len(old_data))
|
||||
self.__cache_size -= len(old_data)
|
||||
# No need to flush if this is a refreshment article
|
||||
if old_article != article:
|
||||
self.__flush_article(old_article, old_data)
|
||||
@@ -120,6 +113,7 @@ class ArticleCache(object):
|
||||
else:
|
||||
self.__flush_article(article, data)
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def load_article(self, article):
|
||||
data = None
|
||||
nzo = article.nzf.nzo
|
||||
@@ -127,7 +121,7 @@ class ArticleCache(object):
|
||||
if article in self.__article_list:
|
||||
data = self.__article_table.pop(article)
|
||||
self.__article_list.remove(article)
|
||||
self.decrease_cache_size(len(data))
|
||||
self.__cache_size -= len(data)
|
||||
elif article.art_id:
|
||||
data = sabnzbd.load_data(article.art_id, nzo.workpath, remove=True,
|
||||
do_pickle=False, silent=True)
|
||||
@@ -137,19 +131,21 @@ class ArticleCache(object):
|
||||
|
||||
return data
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def flush_articles(self):
|
||||
self.__cache_size = 0
|
||||
while self.__article_list:
|
||||
article = self.__article_list.pop(0)
|
||||
data = self.__article_table.pop(article)
|
||||
self.__flush_article(article, data)
|
||||
self.__cache_size = 0
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def purge_articles(self, articles):
|
||||
for article in articles:
|
||||
if article in self.__article_list:
|
||||
self.__article_list.remove(article)
|
||||
data = self.__article_table.pop(article)
|
||||
self.decrease_cache_size(len(data))
|
||||
self.__cache_size -= len(data)
|
||||
if article.art_id:
|
||||
sabnzbd.remove_data(article.art_id, article.nzf.nzo.workpath)
|
||||
|
||||
@@ -172,12 +168,11 @@ class ArticleCache(object):
|
||||
|
||||
def __add_to_cache(self, article, data):
|
||||
if article in self.__article_table:
|
||||
self.decrease_cache_size(len(self.__article_table[article]))
|
||||
self.__cache_size -= len(self.__article_table[article])
|
||||
else:
|
||||
self.__article_list.append(article)
|
||||
|
||||
self.__article_table[article] = data
|
||||
self.increase_cache_size(len(data))
|
||||
self.__cache_size += len(data)
|
||||
|
||||
|
||||
# Create the instance
|
||||
|
||||
@@ -86,15 +86,14 @@ class Assembler(Thread):
|
||||
continue
|
||||
|
||||
# Prepare filename
|
||||
filename = sanitize_filename(nzf.filename)
|
||||
nzf.filename = filename
|
||||
dupe = nzo.check_for_dupe(nzf)
|
||||
filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename)
|
||||
nzo.verify_nzf_filename(nzf)
|
||||
nzf.filename = sanitize_filename(nzf.filename)
|
||||
filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename)
|
||||
|
||||
if filepath:
|
||||
logging.info('Decoding %s %s', filepath, nzf.type)
|
||||
try:
|
||||
filepath = self.assemble(nzf, filepath, dupe)
|
||||
filepath = self.assemble(nzf, filepath)
|
||||
except IOError, (errno, strerror):
|
||||
# If job was deleted, ignore error
|
||||
if not nzo.is_gone():
|
||||
@@ -110,19 +109,23 @@ class Assembler(Thread):
|
||||
logging.error(T('Fatal error in Assembler'), exc_info=True)
|
||||
break
|
||||
|
||||
# Clean-up admin data
|
||||
nzf.remove_admin()
|
||||
setname = nzf.setname
|
||||
if nzf.is_par2 and (nzo.md5packs.get(setname) is None):
|
||||
pack = self.parse_par2_file(filepath, nzo.md5of16k)
|
||||
if pack:
|
||||
nzo.md5packs[setname] = pack
|
||||
logging.debug('Got md5pack for set %s', setname)
|
||||
# Valid md5pack, so use this par2-file as main par2 file for the set
|
||||
if setname in nzo.partable:
|
||||
# First copy the set of extrapars, we need them later
|
||||
nzf.extrapars = nzo.partable[setname].extrapars
|
||||
nzo.partable[setname] = nzf
|
||||
|
||||
# Parse par2 files
|
||||
if nzf.is_par2:
|
||||
# Always parse par2 files to get new md5of16k info
|
||||
pack = self.parse_par2_file(nzf, filepath)
|
||||
if pack and (nzo.md5packs.get(nzf.setname) is None):
|
||||
nzo.md5packs[nzf.setname] = pack
|
||||
logging.debug('Got md5pack for set %s', nzf.setname)
|
||||
# Valid md5pack, so use this par2-file as main par2 file for the set
|
||||
if nzf.setname in nzo.partable:
|
||||
# First copy the set of extrapars, we need them later
|
||||
nzf.extrapars = nzo.partable[nzf.setname].extrapars
|
||||
nzo.partable[nzf.setname] = nzf
|
||||
|
||||
# Encryption and unwanted extension detection
|
||||
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
|
||||
if rar_encrypted:
|
||||
if cfg.pause_on_pwrar() == 1:
|
||||
@@ -161,15 +164,8 @@ class Assembler(Thread):
|
||||
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False)
|
||||
PostProcessor.do.process(nzo)
|
||||
|
||||
def assemble(self, nzf, path, dupe):
|
||||
def assemble(self, nzf, path):
|
||||
""" Assemble a NZF from its table of articles """
|
||||
if os.path.exists(path):
|
||||
unique_path = get_unique_filename(path)
|
||||
if dupe:
|
||||
path = unique_path
|
||||
else:
|
||||
renamer(path, unique_path)
|
||||
|
||||
md5 = hashlib.md5()
|
||||
fout = open(path, 'ab')
|
||||
decodetable = nzf.decodetable
|
||||
@@ -200,7 +196,7 @@ class Assembler(Thread):
|
||||
|
||||
return path
|
||||
|
||||
def parse_par2_file(self, fname, table16k):
|
||||
def parse_par2_file(self, nzf, fname):
|
||||
""" Get the hash table and the first-16k hash table from a PAR2 file
|
||||
Return as dictionary, indexed on names or hashes for the first-16 table
|
||||
For a full description of the par2 specification, visit:
|
||||
@@ -220,10 +216,11 @@ class Assembler(Thread):
|
||||
name, hash, hash16k = parse_par2_file_packet(f, header)
|
||||
if name:
|
||||
table[name] = hash
|
||||
if hash16k not in table16k:
|
||||
table16k[hash16k] = name
|
||||
else:
|
||||
# Not unique, remove to avoid false-renames
|
||||
if hash16k not in nzf.nzo.md5of16k:
|
||||
nzf.nzo.md5of16k[hash16k] = name
|
||||
elif nzf.nzo.md5of16k[hash16k] != name:
|
||||
# Not unique and not already linked to this file
|
||||
# Remove to avoid false-renames
|
||||
duplicates16k.append(hash16k)
|
||||
|
||||
header = f.read(8)
|
||||
@@ -240,10 +237,18 @@ class Assembler(Thread):
|
||||
# Have to remove duplicates at the end to make sure
|
||||
# no trace is left in case of multi-duplicates
|
||||
for hash16k in duplicates16k:
|
||||
if hash16k in table16k:
|
||||
old_name = table16k.pop(hash16k)
|
||||
if hash16k in nzf.nzo.md5of16k:
|
||||
old_name = nzf.nzo.md5of16k.pop(hash16k)
|
||||
logging.debug('Par2-16k signature of %s not unique, discarding', old_name)
|
||||
|
||||
# If the filename was changed (duplicate filename) check if we already have the set
|
||||
base_fname = os.path.split(fname)[1]
|
||||
if table and base_fname != nzf.filename and table not in nzf.nzo.md5packs.values():
|
||||
# Re-parse this par2 file to create new set
|
||||
nzf.filename = base_fname
|
||||
nzf.is_par2 = False
|
||||
nzf.nzo.handle_par2(nzf, True)
|
||||
|
||||
return table
|
||||
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ MAX_DECODE_QUEUE = 10
|
||||
LIMIT_DECODE_QUEUE = 100
|
||||
MAX_WARNINGS = 20
|
||||
MAX_WIN_DFOLDER = 60
|
||||
MAX_BAD_ARTICLES = 5
|
||||
|
||||
REPAIR_PRIORITY = 3
|
||||
TOP_PRIORITY = 2
|
||||
|
||||
@@ -31,8 +31,8 @@ from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE, SABY
|
||||
import sabnzbd.articlecache
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.nzbqueue
|
||||
from sabnzbd.encoding import yenc_name_fixer, platform_encode
|
||||
from sabnzbd.misc import match_str, is_obfuscated_filename
|
||||
from sabnzbd.encoding import yenc_name_fixer
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
# Check for basic-yEnc
|
||||
try:
|
||||
@@ -336,26 +336,8 @@ class Decoder(Thread):
|
||||
if article.partnum == nzf.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(decoded_data[:16384]).digest()
|
||||
|
||||
# If we have the md5, use it to rename
|
||||
if nzf.md5of16k:
|
||||
# Don't check again, even if no match
|
||||
nzf.filename_checked = True
|
||||
# Find the match and rename
|
||||
if nzf.md5of16k in nzf.nzo.md5of16k:
|
||||
new_filename = platform_encode(nzf.nzo.md5of16k[nzf.md5of16k])
|
||||
# Was it even new?
|
||||
if new_filename != nzf.filename:
|
||||
logging.info('Detected filename based on par2: %s -> %s', nzf.filename, new_filename)
|
||||
nzf.nzo.renamed_file(new_filename, nzf.filename)
|
||||
nzf.filename = new_filename
|
||||
return
|
||||
|
||||
# Fallback to yenc/nzb name (also when there is no partnum=1)
|
||||
# We also keep the NZB name in case it ends with ".par2" (usually correct)
|
||||
if yenc_filename != nzf.filename and not is_obfuscated_filename(yenc_filename) and not nzf.filename.endswith('.par2'):
|
||||
logging.info('Detected filename from yenc: %s -> %s', nzf.filename, yenc_filename)
|
||||
nzf.nzo.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
# Try the rename
|
||||
nzf.nzo.verify_nzf_filename(nzf, yenc_filename)
|
||||
|
||||
|
||||
def yCheck(data):
|
||||
|
||||
@@ -430,4 +430,4 @@ def test_disk_performance():
|
||||
else:
|
||||
logging.info('Direct Unpack was not enabled, incomplete folder disk speed below 40MB/s')
|
||||
cfg.direct_unpack_tested.set(True)
|
||||
config.save_config()
|
||||
sabnzbd.config.save_config()
|
||||
|
||||
@@ -73,6 +73,7 @@ def is_archive(path):
|
||||
zf = zipfile.ZipFile(path)
|
||||
return 0, zf, '.zip'
|
||||
except:
|
||||
logging.info(T('Cannot read %s'), path, exc_info=True)
|
||||
return -1, None, ''
|
||||
elif rarfile.is_rarfile(path):
|
||||
try:
|
||||
@@ -81,14 +82,17 @@ def is_archive(path):
|
||||
zf = rarfile.RarFile(path)
|
||||
return 0, zf, '.rar'
|
||||
except:
|
||||
logging.info(T('Cannot read %s'), path, exc_info=True)
|
||||
return -1, None, ''
|
||||
elif is_sevenfile(path):
|
||||
try:
|
||||
zf = SevenZip(path)
|
||||
return 0, zf, '.7z'
|
||||
except:
|
||||
logging.info(T('Cannot read %s'), path, exc_info=True)
|
||||
return -1, None, ''
|
||||
else:
|
||||
logging.info('Archive %s is not a real archive!', os.path.basename(path))
|
||||
return 1, None, ''
|
||||
|
||||
|
||||
@@ -127,17 +131,24 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
|
||||
try:
|
||||
data = zf.read(name)
|
||||
except:
|
||||
logging.error(T('Cannot read %s'), name, exc_info=True)
|
||||
zf.close()
|
||||
return -1, []
|
||||
name = os.path.basename(name)
|
||||
if data:
|
||||
nzo = None
|
||||
try:
|
||||
nzo = nzbstuff.NzbObject(name, pp, script, data, cat=cat, url=url,
|
||||
priority=priority, nzbname=nzbname)
|
||||
if not nzo.password:
|
||||
nzo.password = password
|
||||
except (TypeError, ValueError) as e:
|
||||
# Duplicate or empty, ignore
|
||||
pass
|
||||
except:
|
||||
nzo = None
|
||||
# Something else is wrong, show error
|
||||
logging.error(T('Error while adding %s, removing'), name, exc_info=True)
|
||||
|
||||
if nzo:
|
||||
if nzo_id:
|
||||
# Re-use existing nzo_id, when a "future" job gets it payload
|
||||
@@ -222,6 +233,8 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
|
||||
# Looks like an incomplete file, retry
|
||||
return -2, nzo_ids
|
||||
else:
|
||||
# Something else is wrong, show error
|
||||
logging.error(T('Error while adding %s, removing'), name, exc_info=True)
|
||||
return -1, nzo_ids
|
||||
|
||||
if nzo:
|
||||
|
||||
@@ -84,7 +84,7 @@ class Server(object):
|
||||
self.categories = categories
|
||||
|
||||
# Temporary deprication warning
|
||||
if len(categories) > 1 or 'Default' not in categories:
|
||||
if categories and (len(categories) > 1 or 'Default' not in categories):
|
||||
logging.warning('[%s] Server specific categories option is scheduled to be removed in the next release of SABnzbd', self.host)
|
||||
|
||||
self.busy_threads = []
|
||||
@@ -655,7 +655,7 @@ class Downloader(Thread):
|
||||
logging.error(T('Failed login for server %s'), server.id)
|
||||
penalty = _PENALTY_PERM
|
||||
block = True
|
||||
elif ecode == '502':
|
||||
elif ecode in ('502', '482'):
|
||||
# Cannot connect (other reasons), block this server
|
||||
if server.active:
|
||||
errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg)
|
||||
@@ -795,11 +795,8 @@ class Downloader(Thread):
|
||||
# Remove this server from try_list
|
||||
article.fetcher = None
|
||||
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
|
||||
# Allow all servers to iterate over each nzo/nzf again ##
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
# Allow all servers to iterate over each nzo/nzf again
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article.nzf, article.nzf.nzo)
|
||||
|
||||
if destroy:
|
||||
nw.terminate(quit=quit)
|
||||
@@ -942,7 +939,8 @@ def clues_too_many(text):
|
||||
""" Check for any "too many connections" clues in the response code """
|
||||
text = text.lower()
|
||||
for clue in ('exceed', 'connections', 'too many', 'threads', 'limit'):
|
||||
if clue in text:
|
||||
# Not 'download limit exceeded' error
|
||||
if (clue in text) and ('download' not in text):
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -959,7 +957,7 @@ def clues_too_many_ip(text):
|
||||
def clues_pay(text):
|
||||
""" Check for messages about payments """
|
||||
text = text.lower()
|
||||
for clue in ('credits', 'paym', 'expired'):
|
||||
for clue in ('credits', 'paym', 'expired', 'exceeded'):
|
||||
if clue in text:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -2414,7 +2414,7 @@ LOG_API_RE = re.compile(r"(apikey|api)(=|:)[\w]+", re.I)
|
||||
LOG_API_JSON_RE = re.compile(r"u'(apikey|api)': u'[\w]+'", re.I)
|
||||
LOG_USER_RE = re.compile(r"(user|username)\s?=\s?[\S]+", re.I)
|
||||
LOG_PASS_RE = re.compile(r"(password)\s?=\s?[\S]+", re.I)
|
||||
LOG_INI_HIDE_RE = re.compile(r"(email_pwd|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", re.I)
|
||||
LOG_INI_HIDE_RE = re.compile(r"(email_pwd|email_account|email_to|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", re.I)
|
||||
LOG_HASH_RE = re.compile(r"([a-fA-F\d]{25})", re.I)
|
||||
|
||||
class Status(object):
|
||||
|
||||
@@ -42,9 +42,11 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.encoding import unicoder, special_fixer, gUTF
|
||||
|
||||
RE_VERSION = re.compile(r'(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)')
|
||||
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
|
||||
TAB_UNITS = ('', 'K', 'M', 'G', 'T', 'P')
|
||||
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
|
||||
RE_VERSION = re.compile(r'(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)')
|
||||
RE_IP4 = re.compile(r'inet\s+(addr:\s*){0,1}(\d+\.\d+\.\d+\.\d+)')
|
||||
RE_IP6 = re.compile(r'inet6\s+(addr:\s*){0,1}([0-9a-f:]+)', re.I)
|
||||
|
||||
# Check if strings are defined for AM and PM
|
||||
HAVE_AMPM = bool(time.strftime('%p', time.localtime()))
|
||||
@@ -92,6 +94,15 @@ def calc_age(date, trans=False):
|
||||
return age
|
||||
|
||||
|
||||
def monthrange(start, finish):
|
||||
""" Calculate months between 2 dates, used in the Config template """
|
||||
months = (finish.year - start.year) * 12 + finish.month + 1
|
||||
for i in xrange(start.month, months):
|
||||
year = (i - 1) / 12 + start.year
|
||||
month = (i - 1) % 12 + 1
|
||||
yield datetime.date(year, month, 1)
|
||||
|
||||
|
||||
def safe_lower(txt):
|
||||
""" Return lowercased string. Return '' for None """
|
||||
if txt:
|
||||
@@ -1348,6 +1359,7 @@ def get_all_passwords(nzo):
|
||||
pw = nzo.nzo_info.get('password')
|
||||
if pw:
|
||||
meta_passwords.append(pw)
|
||||
|
||||
if meta_passwords:
|
||||
if nzo.password == meta_passwords[0]:
|
||||
# this nzo.password came from meta, so don't use it twice
|
||||
@@ -1355,19 +1367,23 @@ def get_all_passwords(nzo):
|
||||
else:
|
||||
passwords.extend(meta_passwords)
|
||||
logging.info('Read %s passwords from meta data in NZB: %s', len(meta_passwords), meta_passwords)
|
||||
|
||||
pw_file = cfg.password_file.get_path()
|
||||
if pw_file:
|
||||
try:
|
||||
pwf = open(pw_file, 'r')
|
||||
lines = pwf.read().split('\n')
|
||||
with open(pw_file, 'r') as pwf:
|
||||
lines = pwf.read().split('\n')
|
||||
# Remove empty lines and space-only passwords and remove surrounding spaces
|
||||
pws = [pw.strip('\r\n ') for pw in lines if pw.strip('\r\n ')]
|
||||
logging.debug('Read these passwords from file: %s', pws)
|
||||
passwords.extend(pws)
|
||||
pwf.close()
|
||||
logging.info('Read %s passwords from file %s', len(pws), pw_file)
|
||||
except IOError:
|
||||
logging.info('Failed to read the passwords file %s', pw_file)
|
||||
except:
|
||||
logging.warning('Failed to read the passwords file %s', pw_file)
|
||||
|
||||
# Check size
|
||||
if len(passwords) > 30:
|
||||
logging.warning(T('Your password file contains more than 30 passwords, testing all these passwords takes a lot of time. Try to only list useful passwords.'))
|
||||
|
||||
if nzo.password:
|
||||
# If an explicit password was set, add a retry without password, just in case.
|
||||
@@ -1397,8 +1413,6 @@ def find_on_path(targets):
|
||||
return None
|
||||
|
||||
|
||||
_RE_IP4 = re.compile(r'inet\s+(addr:\s*){0,1}(\d+\.\d+\.\d+\.\d+)')
|
||||
_RE_IP6 = re.compile(r'inet6\s+(addr:\s*){0,1}([0-9a-f:]+)', re.I)
|
||||
def ip_extract():
|
||||
""" Return list of IP addresses of this system """
|
||||
ips = []
|
||||
@@ -1425,9 +1439,9 @@ def ip_extract():
|
||||
output = p.stdout.read()
|
||||
p.wait()
|
||||
for line in output.split('\n'):
|
||||
m = _RE_IP4.search(line)
|
||||
m = RE_IP4.search(line)
|
||||
if not (m and m.group(2)):
|
||||
m = _RE_IP6.search(line)
|
||||
m = RE_IP6.search(line)
|
||||
if m and m.group(2):
|
||||
ips.append(m.group(2))
|
||||
return ips
|
||||
|
||||
@@ -522,6 +522,23 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
|
||||
logging.debug('rar_unpack(): Newfiles: %s', newfiles)
|
||||
extracted_files.extend(newfiles)
|
||||
|
||||
# Do not fail if this was a recursive unpack
|
||||
if fail and rarpath.startswith(workdir_complete):
|
||||
# Do not delete the files, leave it to user!
|
||||
logging.info('Ignoring failure to do recursive unpack of %s', rarpath)
|
||||
fail = 0
|
||||
success = True
|
||||
newfiles = []
|
||||
|
||||
# Do not fail if this was maybe just some duplicate fileset
|
||||
# Multipar and par2tbb will detect and log them, par2cmdline will not
|
||||
if fail and rar_set.endswith(('.1', '.2')):
|
||||
# Just in case, we leave the raw files
|
||||
logging.info('Ignoring failure of unpack for possible duplicate file %s', rarpath)
|
||||
fail = 0
|
||||
success = True
|
||||
newfiles = []
|
||||
|
||||
# Delete the old files if we have to
|
||||
if success and delete and newfiles:
|
||||
for rar in rars:
|
||||
@@ -606,6 +623,10 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
|
||||
command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, rename, '-ai', password_command,
|
||||
'%s' % clip_path(rarfile_path), '%s\\' % extraction_path]
|
||||
|
||||
# The subprocess_fix requires time to clear the buffers to work,
|
||||
# otherwise the inputs get send incorrectly and unrar breaks
|
||||
time.sleep(0.5)
|
||||
|
||||
elif RAR_PROBLEM:
|
||||
# Use only oldest options (specifically no "-or")
|
||||
command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, password_command,
|
||||
@@ -1521,7 +1542,12 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False):
|
||||
verifynum += 1
|
||||
nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal))
|
||||
nzo.status = Status.VERIFYING
|
||||
datafiles.append(TRANS(m.group(1)))
|
||||
|
||||
# Remove redundant extra files that are just duplicates of original ones
|
||||
if 'duplicate data blocks' in line:
|
||||
used_for_repair.append(TRANS(m.group(1)))
|
||||
else:
|
||||
datafiles.append(TRANS(m.group(1)))
|
||||
continue
|
||||
|
||||
# Verify done
|
||||
@@ -1943,7 +1969,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False)
|
||||
if renames:
|
||||
# If succes, we also remove the possibly previously renamed ones
|
||||
if finished:
|
||||
reconstructed.extend(nzo.renames)
|
||||
reconstructed.extend(renames.values())
|
||||
|
||||
# Adding to the collection
|
||||
nzo.renamed_file(renames)
|
||||
@@ -2064,10 +2090,13 @@ def rar_volumelist(rarfile_path, password, known_volumes):
|
||||
""" Extract volumes that are part of this rarset
|
||||
and merge them with existing list, removing duplicates
|
||||
"""
|
||||
# UnRar is required to read some RAR files
|
||||
rarfile.UNRAR_TOOL = RAR_COMMAND
|
||||
zf = rarfile.RarFile(rarfile_path)
|
||||
|
||||
# setpassword can fail due to bugs in RarFile
|
||||
if password:
|
||||
try:
|
||||
# setpassword can fail due to bugs in RarFile
|
||||
zf.setpassword(password)
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -768,10 +768,6 @@ class NzbQueue(object):
|
||||
def end_job(self, nzo):
|
||||
""" Send NZO to the post-processing queue """
|
||||
logging.info('Ending job %s', nzo.final_name)
|
||||
if self.actives(grabs=False) < 2 and cfg.autodisconnect():
|
||||
# This was the last job, close server connections
|
||||
if sabnzbd.downloader.Downloader.do:
|
||||
sabnzbd.downloader.Downloader.do.disconnect()
|
||||
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
@@ -861,6 +857,7 @@ class NzbQueue(object):
|
||||
for nzo in self.__nzo_list:
|
||||
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
|
||||
empty.append(nzo)
|
||||
|
||||
for nzo in empty:
|
||||
self.end_job(nzo)
|
||||
|
||||
|
||||
@@ -41,11 +41,11 @@ import sabnzbd
|
||||
from sabnzbd.constants import GIGI, ATTRIB_FILE, JOB_ADMIN, \
|
||||
DEFAULT_PRIORITY, LOW_PRIORITY, NORMAL_PRIORITY, \
|
||||
PAUSED_PRIORITY, TOP_PRIORITY, DUP_PRIORITY, REPAIR_PRIORITY, \
|
||||
RENAMES_FILE, Status, PNFO
|
||||
RENAMES_FILE, MAX_BAD_ARTICLES, Status, PNFO
|
||||
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
|
||||
get_unique_path, get_admin_path, remove_all, sanitize_filename, globber_full, \
|
||||
int_conv, set_permissions, format_time_string, long_path, trim_win_path, \
|
||||
fix_unix_encoding, calc_age
|
||||
fix_unix_encoding, calc_age, is_obfuscated_filename
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -307,13 +307,27 @@ class NzbFile(TryList):
|
||||
self.blocks = int(blocks)
|
||||
|
||||
def get_article(self, server, servers):
|
||||
""" Get next article to be downloaded """
|
||||
""" Get next article to be downloaded from this server
|
||||
Returns None when there are still articles to try
|
||||
Returns False when all articles are tried
|
||||
"""
|
||||
# Make sure all articles have tried this server before
|
||||
# adding to the NZF-TryList, otherwise there will be stalls!
|
||||
tried_all_articles = True
|
||||
for article in self.articles:
|
||||
article = article.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
article_return = article.get_article(server, servers)
|
||||
if article_return:
|
||||
return article_return
|
||||
elif tried_all_articles and not article.server_in_try_list(server):
|
||||
tried_all_articles = False
|
||||
|
||||
self.add_to_try_list(server)
|
||||
# We are sure they are all tried
|
||||
if tried_all_articles:
|
||||
self.add_to_try_list(server)
|
||||
return False
|
||||
|
||||
# Still articles left to try
|
||||
return None
|
||||
|
||||
def reset_all_try_lists(self):
|
||||
""" Clear all lists of visited servers """
|
||||
@@ -558,7 +572,7 @@ NzbObjectSaver = (
|
||||
'filename', 'work_name', 'final_name', 'created', 'bytes', 'bytes_downloaded', 'bytes_tried',
|
||||
'repair', 'unpack', 'delete', 'script', 'cat', 'url', 'groups', 'avg_date', 'md5of16k',
|
||||
'partable', 'extrapars', 'md5packs', 'files', 'files_table', 'finished_files', 'status',
|
||||
'avg_bps_freq', 'avg_bps_total', 'priority', 'dupe_table', 'saved_articles', 'nzo_id',
|
||||
'avg_bps_freq', 'avg_bps_total', 'priority', 'saved_articles', 'nzo_id',
|
||||
'futuretype', 'deleted', 'parsed', 'action_line', 'unpack_info', 'fail_msg', 'nzo_info',
|
||||
'custom_name', 'password', 'next_save', 'save_timeout', 'encrypted', 'bad_articles',
|
||||
'duplicate', 'oversized', 'precheck', 'incomplete', 'reuse', 'meta',
|
||||
@@ -651,8 +665,6 @@ class NzbObject(TryList):
|
||||
priority = DEFAULT_PRIORITY
|
||||
self.priority = priority
|
||||
|
||||
self.dupe_table = {}
|
||||
|
||||
self.saved_articles = []
|
||||
|
||||
self.nzo_id = None
|
||||
@@ -939,21 +951,6 @@ class NzbObject(TryList):
|
||||
# Raise error, so it's not added
|
||||
raise TypeError
|
||||
|
||||
def check_for_dupe(self, nzf):
|
||||
filename = nzf.filename
|
||||
|
||||
dupe = False
|
||||
|
||||
if filename in self.dupe_table:
|
||||
old_nzf = self.dupe_table[filename]
|
||||
if nzf.article_count <= old_nzf.article_count:
|
||||
dupe = True
|
||||
|
||||
if not dupe:
|
||||
self.dupe_table[filename] = nzf
|
||||
|
||||
return dupe
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def update_download_stats(self, bps, serverid, bytes):
|
||||
if bps:
|
||||
@@ -995,7 +992,8 @@ class NzbObject(TryList):
|
||||
if head and matcher(lparset, head.lower()):
|
||||
xnzf.set_par2(parset, vol, block)
|
||||
# Don't postpone if all par2 are desired and should be kept
|
||||
if not(cfg.enable_all_par() and not cfg.enable_par_cleanup()):
|
||||
# Also don't postpone header-only-files, to extract all possible md5of16k
|
||||
if not(cfg.enable_all_par() and not cfg.enable_par_cleanup()) and block:
|
||||
self.extrapars[parset].append(xnzf)
|
||||
self.files.remove(xnzf)
|
||||
|
||||
@@ -1075,7 +1073,7 @@ class NzbObject(TryList):
|
||||
self.prospective_add(nzf)
|
||||
|
||||
# Sometimes a few CRC errors are still fine, so we continue
|
||||
if self.bad_articles > 5:
|
||||
if self.bad_articles > MAX_BAD_ARTICLES:
|
||||
self.abort_direct_unpacker()
|
||||
|
||||
post_done = False
|
||||
@@ -1275,7 +1273,7 @@ class NzbObject(TryList):
|
||||
while blocks_already < self.bad_articles and extrapars_sorted:
|
||||
new_nzf = extrapars_sorted.pop()
|
||||
# Reset NZF TryList, in case something was on it before it became extrapar
|
||||
new_nzf.reset_try_list()
|
||||
new_nzf.reset_all_try_lists()
|
||||
self.add_parfile(new_nzf)
|
||||
self.extrapars[parset] = extrapars_sorted
|
||||
blocks_already = blocks_already + int_conv(new_nzf.blocks)
|
||||
@@ -1298,6 +1296,12 @@ class NzbObject(TryList):
|
||||
""" Determine amount of articles present on servers
|
||||
and return (gross available, nett) bytes
|
||||
"""
|
||||
# Few missing articles in RAR-only job might still work
|
||||
if self.bad_articles <= MAX_BAD_ARTICLES:
|
||||
logging.debug('Download Quality: bad-articles=%s', self.bad_articles)
|
||||
return True, 200
|
||||
|
||||
# Do the full check
|
||||
need = 0L
|
||||
pars = 0L
|
||||
short = 0L
|
||||
@@ -1384,6 +1388,7 @@ class NzbObject(TryList):
|
||||
def get_article(self, server, servers):
|
||||
article = None
|
||||
nzf_remove_list = []
|
||||
tried_all_articles = True
|
||||
|
||||
for nzf in self.files:
|
||||
if nzf.deleted:
|
||||
@@ -1407,16 +1412,21 @@ class NzbObject(TryList):
|
||||
article = nzf.get_article(server, servers)
|
||||
if article:
|
||||
break
|
||||
if article == None:
|
||||
# None is returned by NZF when server is not tried for all articles
|
||||
tried_all_articles = False
|
||||
|
||||
# Remove all files for which admin could not be read
|
||||
for nzf in nzf_remove_list:
|
||||
nzf.deleted = True
|
||||
self.files.remove(nzf)
|
||||
|
||||
# If cleanup emptied the active files list, end this job
|
||||
if nzf_remove_list and not self.files:
|
||||
sabnzbd.NzbQueue.do.end_job(self)
|
||||
|
||||
if not article:
|
||||
# Only add to trylist when server has been tried for all articles of all NZF's
|
||||
if not article and tried_all_articles:
|
||||
# No articles for this server, block for next time
|
||||
self.add_to_try_list(server)
|
||||
return article
|
||||
@@ -1489,6 +1499,33 @@ class NzbObject(TryList):
|
||||
self.files[pos + 1] = nzf
|
||||
self.files[pos] = tmp_nzf
|
||||
|
||||
def verify_nzf_filename(self, nzf, yenc_filename=None):
|
||||
""" Get filename from par2-info or from yenc """
|
||||
# Already done?
|
||||
if nzf.filename_checked:
|
||||
return
|
||||
|
||||
# If we have the md5, use it to rename
|
||||
if nzf.md5of16k and self.md5of16k:
|
||||
# Don't check again, even if no match
|
||||
nzf.filename_checked = True
|
||||
# Find the match and rename
|
||||
if nzf.md5of16k in self.md5of16k:
|
||||
new_filename = platform_encode(self.md5of16k[nzf.md5of16k])
|
||||
# Was it even new?
|
||||
if new_filename != nzf.filename:
|
||||
logging.info('Detected filename based on par2: %s -> %s', nzf.filename, new_filename)
|
||||
self.renamed_file(new_filename, nzf.filename)
|
||||
nzf.filename = new_filename
|
||||
return
|
||||
|
||||
# Fallback to yenc/nzb name (also when there is no partnum=1)
|
||||
# We also keep the NZB name in case it ends with ".par2" (usually correct)
|
||||
if yenc_filename and yenc_filename != nzf.filename and not is_obfuscated_filename(yenc_filename) and not nzf.filename.endswith('.par2'):
|
||||
logging.info('Detected filename from yenc: %s -> %s', nzf.filename, yenc_filename)
|
||||
self.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def renamed_file(self, name_set, old_name=None):
|
||||
""" Save renames at various stages (Download/PP)
|
||||
|
||||
@@ -308,6 +308,11 @@ def process_job(nzo):
|
||||
# Try to get more par files
|
||||
return False
|
||||
|
||||
# If we don't need extra par2, we can disconnect
|
||||
if sabnzbd.nzbqueue.NzbQueue.do.actives(grabs=False) == 0 and cfg.autodisconnect():
|
||||
# This was the last job, close server connections
|
||||
sabnzbd.downloader.Downloader.do.disconnect()
|
||||
|
||||
# Sanitize the resulting files
|
||||
if sabnzbd.WIN32:
|
||||
sanitize_files_in_folder(workdir)
|
||||
|
||||
@@ -28,12 +28,28 @@ import time
|
||||
##############################################################################
|
||||
# Power management for Windows
|
||||
##############################################################################
|
||||
try:
|
||||
import win32security
|
||||
import win32api
|
||||
import ntsecuritycon
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def win_power_privileges():
|
||||
""" To do any power-options, the process needs higher privileges """
|
||||
flags = ntsecuritycon.TOKEN_ADJUST_PRIVILEGES | ntsecuritycon.TOKEN_QUERY
|
||||
htoken = win32security.OpenProcessToken(win32api.GetCurrentProcess(), flags)
|
||||
id_ = win32security.LookupPrivilegeValue(None, ntsecuritycon.SE_SHUTDOWN_NAME)
|
||||
newPrivileges = [(id_, ntsecuritycon.SE_PRIVILEGE_ENABLED)]
|
||||
win32security.AdjustTokenPrivileges(htoken, 0, newPrivileges)
|
||||
|
||||
|
||||
def win_hibernate():
|
||||
""" Hibernate Windows system, returns after wakeup """
|
||||
try:
|
||||
subprocess.Popen("rundll32 powrprof.dll,SetSuspendState Hibernate")
|
||||
time.sleep(10)
|
||||
win_power_privileges()
|
||||
win32api.SetSystemPowerState(False, True)
|
||||
except:
|
||||
logging.error(T('Failed to hibernate system'))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -42,8 +58,8 @@ def win_hibernate():
|
||||
def win_standby():
|
||||
""" Standby Windows system, returns after wakeup """
|
||||
try:
|
||||
subprocess.Popen("rundll32 powrprof.dll,SetSuspendState Standby")
|
||||
time.sleep(10)
|
||||
win_power_privileges()
|
||||
win32api.SetSystemPowerState(True, True)
|
||||
except:
|
||||
logging.error(T('Failed to standby system'))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -52,15 +68,7 @@ def win_standby():
|
||||
def win_shutdown():
|
||||
""" Shutdown Windows system, never returns """
|
||||
try:
|
||||
import win32security
|
||||
import win32api
|
||||
import ntsecuritycon
|
||||
|
||||
flags = ntsecuritycon.TOKEN_ADJUST_PRIVILEGES | ntsecuritycon.TOKEN_QUERY
|
||||
htoken = win32security.OpenProcessToken(win32api.GetCurrentProcess(), flags)
|
||||
id_ = win32security.LookupPrivilegeValue(None, ntsecuritycon.SE_SHUTDOWN_NAME)
|
||||
newPrivileges = [(id_, ntsecuritycon.SE_PRIVILEGE_ENABLED)]
|
||||
win32security.AdjustTokenPrivileges(htoken, 0, newPrivileges)
|
||||
win_power_privileges()
|
||||
win32api.InitiateSystemShutdown("", "", 30, 1, 0)
|
||||
finally:
|
||||
os._exit(0)
|
||||
|
||||
@@ -993,5 +993,6 @@ SKIN_TEXT = {
|
||||
SABnzbd comes with ABSOLUTELY NO WARRANTY.
|
||||
This is free software, and you are welcome to redistribute it under certain conditions.
|
||||
It is licensed under the GNU GENERAL PUBLIC LICENSE Version 2 or (at your option) any later version.
|
||||
''')
|
||||
'''),
|
||||
'wizard-ad': TT('In order to download from Usenet you will require access to a provider. Your ISP may provide you with access, however a premium provider is recommended. Don\'t have a Usenet provider? We recommend trying %s.''')
|
||||
}
|
||||
|
||||
@@ -86,17 +86,20 @@ def CreateProcess(executable, args, _p_attr, _t_attr,
|
||||
Python implementation of CreateProcess using CreateProcessW for Win32
|
||||
|
||||
"""
|
||||
|
||||
si = STARTUPINFOW(
|
||||
dwFlags=startup_info.dwFlags,
|
||||
wShowWindow=startup_info.wShowWindow,
|
||||
cb=sizeof(STARTUPINFOW),
|
||||
## XXXvlab: not sure of the casting here to ints.
|
||||
hStdInput=int(startup_info.hStdInput),
|
||||
hStdOutput=int(startup_info.hStdOutput),
|
||||
hStdError=int(startup_info.hStdError),
|
||||
)
|
||||
|
||||
# Only cast to ints when it's given
|
||||
if startup_info.hStdInput:
|
||||
si.hStdInput = int(startup_info.hStdInput)
|
||||
if startup_info.hStdOutput:
|
||||
si.hStdOutput = int(startup_info.hStdOutput)
|
||||
if startup_info.hStdError:
|
||||
si.hStdError = int(startup_info.hStdError)
|
||||
|
||||
wenv = None
|
||||
if env is not None:
|
||||
## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
# You MUST use double quotes (so " and not ')
|
||||
|
||||
__version__ = "2.2.0-develop"
|
||||
__baseline__ = "unknown"
|
||||
__version__ = "2.2.0"
|
||||
__baseline__ = "b061e582b63491f5a7dca02277b1ccd490bbd388"
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
@echo off
|
||||
rem Example of a post processing script for SABnzbd
|
||||
|
||||
echo.
|
||||
echo Running in directory "%~d0%~p0"
|
||||
echo.
|
||||
echo The first parameter (result-dir) = %1
|
||||
echo The second parameter (nzb-name) = %2
|
||||
echo The third parameter (nice name) = %3
|
||||
echo The fourth parameter (newzbin #) = %4
|
||||
echo The fifth parameter (category) = %5
|
||||
echo The sixth parameter (group) = %6
|
||||
echo The seventh parameter (status) = %7
|
||||
echo The eight parameter (failure_url)= %8
|
||||
echo.
|
||||
@echo off
|
||||
rem Example of a post processing script for SABnzbd
|
||||
|
||||
echo.
|
||||
echo Running in directory "%~d0%~p0"
|
||||
echo.
|
||||
echo The first parameter (result-dir) = %1
|
||||
echo The second parameter (nzb-name) = %2
|
||||
echo The third parameter (nice name) = %3
|
||||
echo The fourth parameter (newzbin #) = %4
|
||||
echo The fifth parameter (category) = %5
|
||||
echo The sixth parameter (group) = %6
|
||||
echo The seventh parameter (status) = %7
|
||||
echo The eight parameter (failure_url)= %8
|
||||
echo.
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user