mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-05 22:20:21 -05:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27f83f21be | ||
|
|
5e31a31a21 | ||
|
|
a077012478 | ||
|
|
fed0e0f765 | ||
|
|
fbdbf7ab22 | ||
|
|
f013d38d00 | ||
|
|
93b9c8a6da |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.2.0RC2
|
||||
Summary: SABnzbd-2.2.0RC2
|
||||
Version: 2.2.0RC3
|
||||
Summary: SABnzbd-2.2.0RC3
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Release Notes - SABnzbd 2.2.0 Release Candidate 2
|
||||
Release Notes - SABnzbd 2.2.0 Release Candidate 3
|
||||
=========================================================
|
||||
|
||||
NOTE: Due to changes in this release, the queue will be converted when 2.2.0
|
||||
@@ -6,6 +6,11 @@ is started for the first time. Job order, settings and data will be
|
||||
preserved, but all jobs will be unpaused and URLs that did not finish
|
||||
fetching before the upgrade will be lost!
|
||||
|
||||
## Changes since 2.2.0 Release Candidate 2
|
||||
- Handling of par2 files made more robust
|
||||
- Standby/Hibernate was not working on Windows
|
||||
- Server graphs did not work in all time zones
|
||||
|
||||
## Changes since 2.2.0 Release Candidate 1
|
||||
- Not all RAR files were correctly removed for encrypted downloads
|
||||
- Better indication of verification process before and after repair
|
||||
|
||||
@@ -140,10 +140,13 @@
|
||||
<!--
|
||||
We need to find how many months we have recorded so far, so we
|
||||
loop over all the dates to find the lowest value and then use
|
||||
the number of days passed as an estimate of the months we have.
|
||||
this to calculate the date-selector
|
||||
-->
|
||||
|
||||
<!--#import json#-->
|
||||
<!--#import datetime#-->
|
||||
<!--#import sabnzbd.misc#-->
|
||||
|
||||
<!--#def show_date_selector($server, $id)#-->
|
||||
<!--#set month_names = [$T('January'), $T('February'), $T('March'), $T('April'), $T('May'), $T('June'), $T('July'), $T('August'), $T('September'), $T('October'), $T('November'), $T('December')] #-->
|
||||
<!--#set min_date = datetime.date.today()#-->
|
||||
@@ -151,10 +154,10 @@
|
||||
<!--#set split_date = $date.split('-')#-->
|
||||
<!--#set min_date = min(min_date, datetime.date(int(split_date[0]), int(split_date[1]), 1))#-->
|
||||
<!--#end for#-->
|
||||
<!--#set months_recorded = int((datetime.date.today()-min_date).days / (365/12))#-->
|
||||
<!--#set months_recorded = list(sabnzbd.misc.monthrange(min_date, datetime.date.today()))#-->
|
||||
<!--#$months_recorded.reverse()#-->
|
||||
<select class="chart-selector" name="chart-selector-${id}" id="chart-selector-${id}" data-id="${id}">
|
||||
<!--#for $i in range(months_recorded+1)#-->
|
||||
<!--#set cur_date = (datetime.date.today() - datetime.timedelta($i*365/12))#-->
|
||||
<!--#for $cur_date in months_recorded#-->
|
||||
<option value="<!--#echo '%d-%02d' % ($cur_date.year, $cur_date.month)#-->">$month_names[$cur_date.month-1] $cur_date.year</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -300,7 +303,7 @@
|
||||
// Server data
|
||||
serverData[${cur}] = <!--#echo json.dumps($server['amounts'][4])#-->
|
||||
\$(document).ready(function() {
|
||||
showChart(${cur})
|
||||
showChart(${cur}, \$('#chart-selector-${cur}').val())
|
||||
})
|
||||
</script>
|
||||
<!--#end if#-->
|
||||
@@ -317,13 +320,9 @@
|
||||
var thisDay = new Date()
|
||||
|
||||
// What month are we doing?
|
||||
if(month) {
|
||||
var inputDate = new Date(month+'-01')
|
||||
} else {
|
||||
var inputDate = new Date()
|
||||
}
|
||||
var baseDate = new Date(inputDate.getFullYear(), inputDate.getMonth(), 1)
|
||||
var maxDaysInMonth = new Date(baseDate.getYear(), baseDate.getMonth()+1, 0).getDate()
|
||||
var inputDate = new Date(month+'-01')
|
||||
var baseDate = new Date(inputDate.getUTCFullYear(), inputDate.getUTCMonth(), 1)
|
||||
var maxDaysInMonth = new Date(baseDate.getFullYear(), baseDate.getMonth()+1, 0).getDate()
|
||||
|
||||
// Fill the data array
|
||||
var data = {
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2017-08-06 09:51+0000\n"
|
||||
"PO-Revision-Date: 2017-08-06 20:37+0000\n"
|
||||
"Last-Translator: fox <Unknown>\n"
|
||||
"PO-Revision-Date: 2017-08-09 21:46+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
|
||||
"Language-Team: German <de@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-07 05:53+0000\n"
|
||||
"X-Generator: Launchpad (build 18441)\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-10 06:03+0000\n"
|
||||
"X-Generator: Launchpad (build 18446)\n"
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -1235,7 +1235,7 @@ msgid ""
|
||||
"%d files with duplicate filenames were discared for \"%s\". Enable "
|
||||
"\"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr ""
|
||||
"%s Dateien mit doppelten Dateinamen wurden für \"%s\" verworfen. Aktiviere "
|
||||
"%d Dateien mit doppelten Dateinamen wurden für \"%s\" verworfen. Aktiviere "
|
||||
"\"allow_duplicate_files\" um doppelte Dateinamen zu erlauben."
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
|
||||
@@ -105,8 +105,8 @@ msgstr "Posts will be paused until they are at least this age. Setting job prior
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr "Support the project, donate!"
|
||||
|
||||
msgid "%d files with duplicate filenames were discared for "%s". Enable "allow_duplicate_files" to allow duplicate filenames."
|
||||
msgstr "%d files with duplicate filenames were discarded for "%s". Enable "allow_duplicate_files" to allow duplicate filenames."
|
||||
msgid "%d files with duplicate filenames were discared for \"%s\". Enable \"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr "%d files with duplicate filenames were discarded for \"%s\". Enable \"allow_duplicate_files\" to allow duplicate filenames."
|
||||
|
||||
msgid "User script can flag job as failed"
|
||||
msgstr "Post-processing script can flag job as failed"
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2017-08-06 09:51+0000\n"
|
||||
"PO-Revision-Date: 2017-07-27 19:33+0000\n"
|
||||
"PO-Revision-Date: 2017-08-09 21:46+0000\n"
|
||||
"Last-Translator: Safihre <safihre@sabnzbd.org>\n"
|
||||
"Language-Team: Dutch <nl@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-07 05:53+0000\n"
|
||||
"X-Generator: Launchpad (build 18441)\n"
|
||||
"X-Launchpad-Export-Date: 2017-08-10 06:03+0000\n"
|
||||
"X-Generator: Launchpad (build 18446)\n"
|
||||
|
||||
#: SABnzbd.py [Error message]
|
||||
msgid "Failed to start web-interface"
|
||||
@@ -1216,6 +1216,9 @@ msgid ""
|
||||
"%d files with duplicate filenames were discared for \"%s\". Enable "
|
||||
"\"allow_duplicate_files\" to allow duplicate filenames."
|
||||
msgstr ""
|
||||
"Er zijn %d bestanden met dezelfde bestandsnaam niet toegevoegd aan opdracht "
|
||||
"\"%s\". Zet \"allow_duplicate_files\" aan om dubbele bestandsnamen toe te "
|
||||
"staan."
|
||||
|
||||
#: sabnzbd/nzbstuff.py
|
||||
msgid "Aborted, cannot be completed"
|
||||
|
||||
@@ -86,15 +86,14 @@ class Assembler(Thread):
|
||||
continue
|
||||
|
||||
# Prepare filename
|
||||
filename = sanitize_filename(nzf.filename)
|
||||
nzf.filename = filename
|
||||
dupe = nzo.check_for_dupe(nzf)
|
||||
filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, filename)
|
||||
nzo.verify_nzf_filename(nzf)
|
||||
nzf.filename = sanitize_filename(nzf.filename)
|
||||
filepath = get_filepath(long_path(cfg.download_dir.get_path()), nzo, nzf.filename)
|
||||
|
||||
if filepath:
|
||||
logging.info('Decoding %s %s', filepath, nzf.type)
|
||||
try:
|
||||
filepath = self.assemble(nzf, filepath, dupe)
|
||||
filepath = self.assemble(nzf, filepath)
|
||||
except IOError, (errno, strerror):
|
||||
# If job was deleted, ignore error
|
||||
if not nzo.is_gone():
|
||||
@@ -110,19 +109,23 @@ class Assembler(Thread):
|
||||
logging.error(T('Fatal error in Assembler'), exc_info=True)
|
||||
break
|
||||
|
||||
# Clean-up admin data
|
||||
nzf.remove_admin()
|
||||
setname = nzf.setname
|
||||
if nzf.is_par2 and (nzo.md5packs.get(setname) is None):
|
||||
pack = self.parse_par2_file(filepath, nzo.md5of16k)
|
||||
if pack:
|
||||
nzo.md5packs[setname] = pack
|
||||
logging.debug('Got md5pack for set %s', setname)
|
||||
# Valid md5pack, so use this par2-file as main par2 file for the set
|
||||
if setname in nzo.partable:
|
||||
# First copy the set of extrapars, we need them later
|
||||
nzf.extrapars = nzo.partable[setname].extrapars
|
||||
nzo.partable[setname] = nzf
|
||||
|
||||
# Parse par2 files
|
||||
if nzf.is_par2:
|
||||
# Always parse par2 files to get new md5of16k info
|
||||
pack = self.parse_par2_file(nzf, filepath)
|
||||
if pack and (nzo.md5packs.get(nzf.setname) is None):
|
||||
nzo.md5packs[nzf.setname] = pack
|
||||
logging.debug('Got md5pack for set %s', nzf.setname)
|
||||
# Valid md5pack, so use this par2-file as main par2 file for the set
|
||||
if nzf.setname in nzo.partable:
|
||||
# First copy the set of extrapars, we need them later
|
||||
nzf.extrapars = nzo.partable[nzf.setname].extrapars
|
||||
nzo.partable[nzf.setname] = nzf
|
||||
|
||||
# Encryption and unwanted extension detection
|
||||
rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath)
|
||||
if rar_encrypted:
|
||||
if cfg.pause_on_pwrar() == 1:
|
||||
@@ -161,15 +164,8 @@ class Assembler(Thread):
|
||||
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False)
|
||||
PostProcessor.do.process(nzo)
|
||||
|
||||
def assemble(self, nzf, path, dupe):
|
||||
def assemble(self, nzf, path):
|
||||
""" Assemble a NZF from its table of articles """
|
||||
if os.path.exists(path):
|
||||
unique_path = get_unique_filename(path)
|
||||
if dupe:
|
||||
path = unique_path
|
||||
else:
|
||||
renamer(path, unique_path)
|
||||
|
||||
md5 = hashlib.md5()
|
||||
fout = open(path, 'ab')
|
||||
decodetable = nzf.decodetable
|
||||
@@ -200,7 +196,7 @@ class Assembler(Thread):
|
||||
|
||||
return path
|
||||
|
||||
def parse_par2_file(self, fname, table16k):
|
||||
def parse_par2_file(self, nzf, fname):
|
||||
""" Get the hash table and the first-16k hash table from a PAR2 file
|
||||
Return as dictionary, indexed on names or hashes for the first-16 table
|
||||
For a full description of the par2 specification, visit:
|
||||
@@ -220,8 +216,8 @@ class Assembler(Thread):
|
||||
name, hash, hash16k = parse_par2_file_packet(f, header)
|
||||
if name:
|
||||
table[name] = hash
|
||||
if hash16k not in table16k:
|
||||
table16k[hash16k] = name
|
||||
if hash16k not in nzf.nzo.md5of16k:
|
||||
nzf.nzo.md5of16k[hash16k] = name
|
||||
else:
|
||||
# Not unique, remove to avoid false-renames
|
||||
duplicates16k.append(hash16k)
|
||||
@@ -240,10 +236,18 @@ class Assembler(Thread):
|
||||
# Have to remove duplicates at the end to make sure
|
||||
# no trace is left in case of multi-duplicates
|
||||
for hash16k in duplicates16k:
|
||||
if hash16k in table16k:
|
||||
old_name = table16k.pop(hash16k)
|
||||
if hash16k in nzf.nzo.md5of16k:
|
||||
old_name = nzf.nzo.md5of16k.pop(hash16k)
|
||||
logging.debug('Par2-16k signature of %s not unique, discarding', old_name)
|
||||
|
||||
# If the filename was changed (duplicate filename) check if we already have the set
|
||||
base_fname = os.path.split(fname)[1]
|
||||
if table and base_fname != nzf.filename and table not in nzf.nzo.md5packs.values():
|
||||
# Re-parse this par2 file to create new set
|
||||
nzf.filename = base_fname
|
||||
nzf.is_par2 = False
|
||||
nzf.nzo.handle_par2(nzf, True)
|
||||
|
||||
return table
|
||||
|
||||
|
||||
|
||||
@@ -31,8 +31,8 @@ from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE, SABY
|
||||
import sabnzbd.articlecache
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.nzbqueue
|
||||
from sabnzbd.encoding import yenc_name_fixer, platform_encode
|
||||
from sabnzbd.misc import match_str, is_obfuscated_filename
|
||||
from sabnzbd.encoding import yenc_name_fixer
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
# Check for basic-yEnc
|
||||
try:
|
||||
@@ -336,26 +336,8 @@ class Decoder(Thread):
|
||||
if article.partnum == nzf.lowest_partnum:
|
||||
nzf.md5of16k = hashlib.md5(decoded_data[:16384]).digest()
|
||||
|
||||
# If we have the md5, use it to rename
|
||||
if nzf.md5of16k:
|
||||
# Don't check again, even if no match
|
||||
nzf.filename_checked = True
|
||||
# Find the match and rename
|
||||
if nzf.md5of16k in nzf.nzo.md5of16k:
|
||||
new_filename = platform_encode(nzf.nzo.md5of16k[nzf.md5of16k])
|
||||
# Was it even new?
|
||||
if new_filename != nzf.filename:
|
||||
logging.info('Detected filename based on par2: %s -> %s', nzf.filename, new_filename)
|
||||
nzf.nzo.renamed_file(new_filename, nzf.filename)
|
||||
nzf.filename = new_filename
|
||||
return
|
||||
|
||||
# Fallback to yenc/nzb name (also when there is no partnum=1)
|
||||
# We also keep the NZB name in case it ends with ".par2" (usually correct)
|
||||
if yenc_filename != nzf.filename and not is_obfuscated_filename(yenc_filename) and not nzf.filename.endswith('.par2'):
|
||||
logging.info('Detected filename from yenc: %s -> %s', nzf.filename, yenc_filename)
|
||||
nzf.nzo.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
# Try the rename
|
||||
nzf.nzo.verify_nzf_filename(nzf, yenc_filename)
|
||||
|
||||
|
||||
def yCheck(data):
|
||||
|
||||
@@ -42,9 +42,11 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.encoding import unicoder, special_fixer, gUTF
|
||||
|
||||
RE_VERSION = re.compile(r'(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)')
|
||||
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
|
||||
TAB_UNITS = ('', 'K', 'M', 'G', 'T', 'P')
|
||||
RE_UNITS = re.compile(r'(\d+\.*\d*)\s*([KMGTP]{0,1})', re.I)
|
||||
RE_VERSION = re.compile(r'(\d+)\.(\d+)\.(\d+)([a-zA-Z]*)(\d*)')
|
||||
RE_IP4 = re.compile(r'inet\s+(addr:\s*){0,1}(\d+\.\d+\.\d+\.\d+)')
|
||||
RE_IP6 = re.compile(r'inet6\s+(addr:\s*){0,1}([0-9a-f:]+)', re.I)
|
||||
|
||||
# Check if strings are defined for AM and PM
|
||||
HAVE_AMPM = bool(time.strftime('%p', time.localtime()))
|
||||
@@ -92,6 +94,15 @@ def calc_age(date, trans=False):
|
||||
return age
|
||||
|
||||
|
||||
def monthrange(start, finish):
|
||||
""" Calculate months between 2 dates, used in the Config template """
|
||||
months = (finish.year - start.year) * 12 + finish.month + 1
|
||||
for i in xrange(start.month, months):
|
||||
year = (i - 1) / 12 + start.year
|
||||
month = (i - 1) % 12 + 1
|
||||
yield datetime.date(year, month, 1)
|
||||
|
||||
|
||||
def safe_lower(txt):
|
||||
""" Return lowercased string. Return '' for None """
|
||||
if txt:
|
||||
@@ -1397,8 +1408,6 @@ def find_on_path(targets):
|
||||
return None
|
||||
|
||||
|
||||
_RE_IP4 = re.compile(r'inet\s+(addr:\s*){0,1}(\d+\.\d+\.\d+\.\d+)')
|
||||
_RE_IP6 = re.compile(r'inet6\s+(addr:\s*){0,1}([0-9a-f:]+)', re.I)
|
||||
def ip_extract():
|
||||
""" Return list of IP addresses of this system """
|
||||
ips = []
|
||||
@@ -1425,9 +1434,9 @@ def ip_extract():
|
||||
output = p.stdout.read()
|
||||
p.wait()
|
||||
for line in output.split('\n'):
|
||||
m = _RE_IP4.search(line)
|
||||
m = RE_IP4.search(line)
|
||||
if not (m and m.group(2)):
|
||||
m = _RE_IP6.search(line)
|
||||
m = RE_IP6.search(line)
|
||||
if m and m.group(2):
|
||||
ips.append(m.group(2))
|
||||
return ips
|
||||
|
||||
@@ -45,7 +45,7 @@ from sabnzbd.constants import GIGI, ATTRIB_FILE, JOB_ADMIN, \
|
||||
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
|
||||
get_unique_path, get_admin_path, remove_all, sanitize_filename, globber_full, \
|
||||
int_conv, set_permissions, format_time_string, long_path, trim_win_path, \
|
||||
fix_unix_encoding, calc_age
|
||||
fix_unix_encoding, calc_age, is_obfuscated_filename
|
||||
from sabnzbd.decorators import synchronized
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -558,7 +558,7 @@ NzbObjectSaver = (
|
||||
'filename', 'work_name', 'final_name', 'created', 'bytes', 'bytes_downloaded', 'bytes_tried',
|
||||
'repair', 'unpack', 'delete', 'script', 'cat', 'url', 'groups', 'avg_date', 'md5of16k',
|
||||
'partable', 'extrapars', 'md5packs', 'files', 'files_table', 'finished_files', 'status',
|
||||
'avg_bps_freq', 'avg_bps_total', 'priority', 'dupe_table', 'saved_articles', 'nzo_id',
|
||||
'avg_bps_freq', 'avg_bps_total', 'priority', 'saved_articles', 'nzo_id',
|
||||
'futuretype', 'deleted', 'parsed', 'action_line', 'unpack_info', 'fail_msg', 'nzo_info',
|
||||
'custom_name', 'password', 'next_save', 'save_timeout', 'encrypted', 'bad_articles',
|
||||
'duplicate', 'oversized', 'precheck', 'incomplete', 'reuse', 'meta',
|
||||
@@ -651,8 +651,6 @@ class NzbObject(TryList):
|
||||
priority = DEFAULT_PRIORITY
|
||||
self.priority = priority
|
||||
|
||||
self.dupe_table = {}
|
||||
|
||||
self.saved_articles = []
|
||||
|
||||
self.nzo_id = None
|
||||
@@ -939,21 +937,6 @@ class NzbObject(TryList):
|
||||
# Raise error, so it's not added
|
||||
raise TypeError
|
||||
|
||||
def check_for_dupe(self, nzf):
|
||||
filename = nzf.filename
|
||||
|
||||
dupe = False
|
||||
|
||||
if filename in self.dupe_table:
|
||||
old_nzf = self.dupe_table[filename]
|
||||
if nzf.article_count <= old_nzf.article_count:
|
||||
dupe = True
|
||||
|
||||
if not dupe:
|
||||
self.dupe_table[filename] = nzf
|
||||
|
||||
return dupe
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def update_download_stats(self, bps, serverid, bytes):
|
||||
if bps:
|
||||
@@ -995,7 +978,8 @@ class NzbObject(TryList):
|
||||
if head and matcher(lparset, head.lower()):
|
||||
xnzf.set_par2(parset, vol, block)
|
||||
# Don't postpone if all par2 are desired and should be kept
|
||||
if not(cfg.enable_all_par() and not cfg.enable_par_cleanup()):
|
||||
# Also don't postpone header-only-files, to extract all possible md5of16k
|
||||
if not(cfg.enable_all_par() and not cfg.enable_par_cleanup()) and block:
|
||||
self.extrapars[parset].append(xnzf)
|
||||
self.files.remove(xnzf)
|
||||
|
||||
@@ -1489,6 +1473,33 @@ class NzbObject(TryList):
|
||||
self.files[pos + 1] = nzf
|
||||
self.files[pos] = tmp_nzf
|
||||
|
||||
def verify_nzf_filename(self, nzf, yenc_filename=None):
|
||||
""" Get filename from par2-info or from yenc """
|
||||
# Already done?
|
||||
if nzf.filename_checked:
|
||||
return
|
||||
|
||||
# If we have the md5, use it to rename
|
||||
if nzf.md5of16k and self.md5of16k:
|
||||
# Don't check again, even if no match
|
||||
nzf.filename_checked = True
|
||||
# Find the match and rename
|
||||
if nzf.md5of16k in self.md5of16k:
|
||||
new_filename = platform_encode(self.md5of16k[nzf.md5of16k])
|
||||
# Was it even new?
|
||||
if new_filename != nzf.filename:
|
||||
logging.info('Detected filename based on par2: %s -> %s', nzf.filename, new_filename)
|
||||
self.renamed_file(new_filename, nzf.filename)
|
||||
nzf.filename = new_filename
|
||||
return
|
||||
|
||||
# Fallback to yenc/nzb name (also when there is no partnum=1)
|
||||
# We also keep the NZB name in case it ends with ".par2" (usually correct)
|
||||
if yenc_filename and yenc_filename != nzf.filename and not is_obfuscated_filename(yenc_filename) and not nzf.filename.endswith('.par2'):
|
||||
logging.info('Detected filename from yenc: %s -> %s', nzf.filename, yenc_filename)
|
||||
self.renamed_file(yenc_filename, nzf.filename)
|
||||
nzf.filename = yenc_filename
|
||||
|
||||
@synchronized(NZO_LOCK)
|
||||
def renamed_file(self, name_set, old_name=None):
|
||||
""" Save renames at various stages (Download/PP)
|
||||
|
||||
@@ -28,12 +28,28 @@ import time
|
||||
##############################################################################
|
||||
# Power management for Windows
|
||||
##############################################################################
|
||||
try:
|
||||
import win32security
|
||||
import win32api
|
||||
import ntsecuritycon
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def win_power_privileges():
|
||||
""" To do any power-options, the process needs higher privileges """
|
||||
flags = ntsecuritycon.TOKEN_ADJUST_PRIVILEGES | ntsecuritycon.TOKEN_QUERY
|
||||
htoken = win32security.OpenProcessToken(win32api.GetCurrentProcess(), flags)
|
||||
id_ = win32security.LookupPrivilegeValue(None, ntsecuritycon.SE_SHUTDOWN_NAME)
|
||||
newPrivileges = [(id_, ntsecuritycon.SE_PRIVILEGE_ENABLED)]
|
||||
win32security.AdjustTokenPrivileges(htoken, 0, newPrivileges)
|
||||
|
||||
|
||||
def win_hibernate():
|
||||
""" Hibernate Windows system, returns after wakeup """
|
||||
try:
|
||||
subprocess.Popen("rundll32 powrprof.dll,SetSuspendState Hibernate")
|
||||
time.sleep(10)
|
||||
win_power_privileges()
|
||||
win32api.SetSystemPowerState(False, True)
|
||||
except:
|
||||
logging.error(T('Failed to hibernate system'))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -42,8 +58,8 @@ def win_hibernate():
|
||||
def win_standby():
|
||||
""" Standby Windows system, returns after wakeup """
|
||||
try:
|
||||
subprocess.Popen("rundll32 powrprof.dll,SetSuspendState Standby")
|
||||
time.sleep(10)
|
||||
win_power_privileges()
|
||||
win32api.SetSystemPowerState(True, True)
|
||||
except:
|
||||
logging.error(T('Failed to standby system'))
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -52,15 +68,7 @@ def win_standby():
|
||||
def win_shutdown():
|
||||
""" Shutdown Windows system, never returns """
|
||||
try:
|
||||
import win32security
|
||||
import win32api
|
||||
import ntsecuritycon
|
||||
|
||||
flags = ntsecuritycon.TOKEN_ADJUST_PRIVILEGES | ntsecuritycon.TOKEN_QUERY
|
||||
htoken = win32security.OpenProcessToken(win32api.GetCurrentProcess(), flags)
|
||||
id_ = win32security.LookupPrivilegeValue(None, ntsecuritycon.SE_SHUTDOWN_NAME)
|
||||
newPrivileges = [(id_, ntsecuritycon.SE_PRIVILEGE_ENABLED)]
|
||||
win32security.AdjustTokenPrivileges(htoken, 0, newPrivileges)
|
||||
win_power_privileges()
|
||||
win32api.InitiateSystemShutdown("", "", 30, 1, 0)
|
||||
finally:
|
||||
os._exit(0)
|
||||
|
||||
@@ -86,17 +86,20 @@ def CreateProcess(executable, args, _p_attr, _t_attr,
|
||||
Python implementation of CreateProcess using CreateProcessW for Win32
|
||||
|
||||
"""
|
||||
|
||||
si = STARTUPINFOW(
|
||||
dwFlags=startup_info.dwFlags,
|
||||
wShowWindow=startup_info.wShowWindow,
|
||||
cb=sizeof(STARTUPINFOW),
|
||||
## XXXvlab: not sure of the casting here to ints.
|
||||
hStdInput=int(startup_info.hStdInput),
|
||||
hStdOutput=int(startup_info.hStdOutput),
|
||||
hStdError=int(startup_info.hStdError),
|
||||
)
|
||||
|
||||
# Only cast to ints when it's given
|
||||
if startup_info.hStdInput:
|
||||
si.hStdInput = int(startup_info.hStdInput)
|
||||
if startup_info.hStdOutput:
|
||||
si.hStdOutput = int(startup_info.hStdOutput)
|
||||
if startup_info.hStdError:
|
||||
si.hStdError = int(startup_info.hStdError)
|
||||
|
||||
wenv = None
|
||||
if env is not None:
|
||||
## LPCWSTR seems to be c_wchar_p, so let's say CWSTR is c_wchar
|
||||
|
||||
Reference in New Issue
Block a user