mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-05 22:20:21 -05:00
Compare commits
18 Commits
2.3.5
...
2.3.6Beta1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e5c6d1c04 | ||
|
|
96b140dee0 | ||
|
|
2e098b641f | ||
|
|
6678cb9d56 | ||
|
|
4b67405d16 | ||
|
|
7463a4abdc | ||
|
|
163523048b | ||
|
|
4892bc18f3 | ||
|
|
217b2436f2 | ||
|
|
a9247ba934 | ||
|
|
8b2a6ef825 | ||
|
|
320495671b | ||
|
|
5ab872afa0 | ||
|
|
7ecb31805e | ||
|
|
e8ebeb843c | ||
|
|
3840678913 | ||
|
|
da7082b17e | ||
|
|
6198f95e1e |
@@ -1,5 +1,5 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 2.3.5 ***
|
||||
*** This is SABnzbd 2.3.6 ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SABnzbd 2.3.5
|
||||
SABnzbd 2.3.6
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.3.5
|
||||
Summary: SABnzbd-2.3.5
|
||||
Version: 2.3.6Beta1
|
||||
Summary: SABnzbd-2.3.6Beta1
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
32
README.mkd
32
README.mkd
@@ -1,24 +1,20 @@
|
||||
Release Notes - SABnzbd 2.3.5
|
||||
Release Notes - SABnzbd 2.3.6 Beta 1
|
||||
=========================================================
|
||||
|
||||
## Bug fixes since 2.3.4
|
||||
- Reworked Deobfuscate.py script for much faster renaming
|
||||
- All scripts can now receive input through environment variables
|
||||
- Unable to set only one Indexer Category per category
|
||||
- Could falsely report not enough blocks are available for repair
|
||||
- Failures in un-(7)zip or file-joining would not fail the job
|
||||
- Direct Unpack could abort unnecessarily
|
||||
- Rare crash during file assembly
|
||||
- Server hostname is now used in warnings and logs
|
||||
- Improved disk performance measurement
|
||||
- Overall improvements in stability and reliability
|
||||
- Windows: MultiPar repair of joinable files could fail
|
||||
- Windows: Tray icon also shows remaining size when paused
|
||||
- Windows: Wizard would not default to installer language
|
||||
- Windows: Update MultiPar to 1.3.0.1
|
||||
- Windows and macOS: Update UnRar to 5.60
|
||||
## Improvements and bug fixes since 2.3.5
|
||||
- RSS source icon on all tabs of feed overview
|
||||
- RSS source icon now links to feed details page (if available)
|
||||
- RSS feed URL's with commas would be wrongly escaped
|
||||
- Common RSS login problems will show more appropriate error
|
||||
- Added API-call to modify RSS-filters
|
||||
- Exceeding disk space could result in endless retry-loop
|
||||
- History Retry All would not retry failed NZB URL-fetches
|
||||
- API-call of Retry could result in an error
|
||||
- Assume correct SSL/certificate setup if test-host was disabled
|
||||
- Better logging of par2-file creator
|
||||
- Windows and macOS: Update UnRar to 5.61
|
||||
|
||||
Looking for help with SABnzbd development:
|
||||
Looking for help with SABnzbd (Python 3) development:
|
||||
https://www.reddit.com/r/usenet/918nxv/
|
||||
|
||||
## Upgrading from 2.2.x and older
|
||||
|
||||
@@ -390,9 +390,10 @@
|
||||
<th class="no-sort">$T('link-download')</th>
|
||||
<th>$T('rss-filter')</th>
|
||||
<th>$T('size')</th>
|
||||
<th width="65%">$T('sort-title')</th>
|
||||
<th width="60%">$T('sort-title')</th>
|
||||
<th>$T('category')</th>
|
||||
<th class="default-sort">$T('nzo-age')</th>
|
||||
<th>$T('source')</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<!--#for $job in $matched#-->
|
||||
@@ -411,6 +412,13 @@
|
||||
<td>$job['title']</td>
|
||||
<td>$job['cat']</td>
|
||||
<td data-sort-value="$job['age_ms']">$job['age']</td>
|
||||
<td data-sort-value="$job['baselink']" title="$job['baselink']">
|
||||
<!--#if not $job['infourl']#-->
|
||||
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
|
||||
<!--#else#-->
|
||||
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end for#-->
|
||||
</table>
|
||||
@@ -426,9 +434,10 @@
|
||||
<th class="no-sort">$T('link-download')</th>
|
||||
<th>$T('rss-filter')</th>
|
||||
<th>$T('size')</th>
|
||||
<th width="65%">$T('sort-title')</th>
|
||||
<th width="60%">$T('sort-title')</th>
|
||||
<th>$T('category')</th>
|
||||
<th class="default-sort">$T('nzo-age')</th>
|
||||
<th>$T('source')</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<!--#for $job in $unmatched#-->
|
||||
@@ -447,6 +456,13 @@
|
||||
<td>$job['title']</td>
|
||||
<td>$job['cat']</td>
|
||||
<td data-sort-value="$job['age_ms']">$job['age']</td>
|
||||
<td data-sort-value="$job['baselink']" title="$job['baselink']">
|
||||
<!--#if not $job['infourl']#-->
|
||||
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
|
||||
<!--#else#-->
|
||||
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end for#-->
|
||||
</table>
|
||||
@@ -476,8 +492,10 @@
|
||||
<td>$job['title']</td>
|
||||
<td>$job['cat']</td>
|
||||
<td data-sort-value="$job['baselink']" title="$job['baselink']">
|
||||
<!--#if $job['baselink']#-->
|
||||
<!--#if not $job['infourl']#-->
|
||||
<div class="favicon source-icon" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></div>
|
||||
<!--#else#-->
|
||||
<a class="favicon source-icon" href="$job['infourl']" target="_blank" style="background-image: url(//$job['baselink']/favicon.ico);" data-domain="$job['baselink']"></a>
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -573,6 +573,7 @@ h2.activeRSS {
|
||||
float: left;
|
||||
margin: 0 6px 0 2px;
|
||||
text-align: center;
|
||||
color: black !important;
|
||||
}
|
||||
.source-icon span {
|
||||
top: -3px;
|
||||
|
||||
@@ -53,7 +53,7 @@ the various releases.
|
||||
2.4.2 2.4.1 2005 PSF yes
|
||||
2.4.3 2.4.2 2006 PSF yes
|
||||
2.5 2.4 2006 PSF yes
|
||||
2.5.1 2.5 2007 PSF yes
|
||||
2.7 2.6 2010 PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
@@ -89,9 +89,9 @@ license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python
|
||||
alone or in any derivative version, provided, however, that PSF's
|
||||
License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
|
||||
2001, 2002, 2003, 2004, 2005, 2006, 2007 Python Software Foundation;
|
||||
All Rights Reserved" are retained in Python alone or in any derivative
|
||||
version prepared by Licensee.
|
||||
2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights
|
||||
Reserved" are retained in Python alone or in any derivative version
|
||||
prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
|
||||
BIN
osx/unrar/unrar
BIN
osx/unrar/unrar
Binary file not shown.
344
po/main/da.po
344
po/main/da.po
File diff suppressed because it is too large
Load Diff
354
po/main/de.po
354
po/main/de.po
File diff suppressed because it is too large
Load Diff
344
po/main/es.po
344
po/main/es.po
File diff suppressed because it is too large
Load Diff
344
po/main/fi.po
344
po/main/fi.po
File diff suppressed because it is too large
Load Diff
346
po/main/fr.po
346
po/main/fr.po
File diff suppressed because it is too large
Load Diff
400
po/main/he.po
400
po/main/he.po
File diff suppressed because it is too large
Load Diff
344
po/main/nb.po
344
po/main/nb.po
File diff suppressed because it is too large
Load Diff
344
po/main/nl.po
344
po/main/nl.po
File diff suppressed because it is too large
Load Diff
344
po/main/pl.po
344
po/main/pl.po
File diff suppressed because it is too large
Load Diff
344
po/main/pt_BR.po
344
po/main/pt_BR.po
File diff suppressed because it is too large
Load Diff
344
po/main/ro.po
344
po/main/ro.po
File diff suppressed because it is too large
Load Diff
348
po/main/ru.po
348
po/main/ru.po
File diff suppressed because it is too large
Load Diff
344
po/main/sr.po
344
po/main/sr.po
File diff suppressed because it is too large
Load Diff
344
po/main/sv.po
344
po/main/sv.po
File diff suppressed because it is too large
Load Diff
344
po/main/zh_CN.po
344
po/main/zh_CN.po
File diff suppressed because it is too large
Load Diff
@@ -202,7 +202,7 @@ def sig_handler(signum=None, frame=None):
|
||||
INIT_LOCK = Lock()
|
||||
|
||||
|
||||
def connect_db(thread_index=0):
|
||||
def get_db_connection(thread_index=0):
|
||||
# Create a connection and store it in the current thread
|
||||
if not (hasattr(cherrypy.thread_data, 'history_db') and cherrypy.thread_data.history_db):
|
||||
cherrypy.thread_data.history_db = sabnzbd.database.HistoryDB()
|
||||
@@ -223,7 +223,7 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
|
||||
__SHUTTING_DOWN__ = False
|
||||
|
||||
# Set global database connection for Web-UI threads
|
||||
cherrypy.engine.subscribe('start_thread', connect_db)
|
||||
cherrypy.engine.subscribe('start_thread', get_db_connection)
|
||||
|
||||
# Paused?
|
||||
pause_downloader = pause_downloader or cfg.start_paused()
|
||||
@@ -1195,6 +1195,10 @@ def test_cert_checking():
|
||||
On systems with at least Python > 2.7.9
|
||||
"""
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
# User disabled the test, assume proper SSL certificates
|
||||
if not cfg.selftest_host():
|
||||
return True
|
||||
# Try a connection to our test-host
|
||||
try:
|
||||
import ssl
|
||||
ctx = ssl.create_default_context()
|
||||
|
||||
@@ -501,7 +501,7 @@ def _api_history(name, output, kwargs):
|
||||
special = value.lower()
|
||||
del_files = bool(int_conv(kwargs.get('del_files')))
|
||||
if special in ('all', 'failed', 'completed'):
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
if special in ('all', 'failed'):
|
||||
if del_files:
|
||||
del_job_files(history_db.get_failed_paths(search))
|
||||
@@ -1149,6 +1149,24 @@ def handle_rss_api(output, kwargs):
|
||||
feed.set_dict(kwargs)
|
||||
else:
|
||||
config.ConfigRSS(name, kwargs)
|
||||
|
||||
action = kwargs.get('filter_action')
|
||||
if action in ('add', 'update'):
|
||||
# Use the general function, but catch the redirect-raise
|
||||
try:
|
||||
kwargs['feed'] = name
|
||||
sabnzbd.interface.ConfigRss('/').internal_upd_rss_filter(**kwargs)
|
||||
except cherrypy.HTTPRedirect:
|
||||
pass
|
||||
|
||||
elif action == 'delete':
|
||||
# Use the general function, but catch the redirect-raise
|
||||
try:
|
||||
kwargs['feed'] = name
|
||||
sabnzbd.interface.ConfigRss('/').internal_del_rss_filter(**kwargs)
|
||||
except cherrypy.HTTPRedirect:
|
||||
pass
|
||||
|
||||
return name
|
||||
|
||||
|
||||
@@ -1509,16 +1527,17 @@ def options_list(output):
|
||||
})
|
||||
|
||||
|
||||
def retry_job(job, new_nzb, password):
|
||||
def retry_job(job, new_nzb=None, password=None):
|
||||
""" Re enter failed job in the download queue """
|
||||
if job:
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
futuretype, url, pp, script, cat = history_db.get_other(job)
|
||||
if futuretype:
|
||||
if pp == 'X':
|
||||
pp = None
|
||||
sabnzbd.add_url(url, pp, script, cat)
|
||||
nzo_id = sabnzbd.add_url(url, pp, script, cat)
|
||||
history_db.remove_history(job)
|
||||
return nzo_id
|
||||
else:
|
||||
path = history_db.get_path(job)
|
||||
if path:
|
||||
@@ -1530,8 +1549,13 @@ def retry_job(job, new_nzb, password):
|
||||
|
||||
def retry_all_jobs():
|
||||
""" Re enter all failed jobs in the download queue """
|
||||
history_db = sabnzbd.connect_db()
|
||||
return NzbQueue.do.retry_all_jobs(history_db)
|
||||
# Fetch all retryable folders from History
|
||||
items = sabnzbd.api.build_history()[0]
|
||||
nzo_ids = []
|
||||
for item in items:
|
||||
if item['retry']:
|
||||
nzo_ids.append(retry_job(item['nzo_id']))
|
||||
return nzo_ids
|
||||
|
||||
|
||||
def del_job_files(job_paths):
|
||||
@@ -1548,7 +1572,7 @@ def del_hist_job(job, del_files):
|
||||
if path:
|
||||
PostProcessor.do.delete(job, del_files=del_files)
|
||||
else:
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
path = history_db.get_path(job)
|
||||
history_db.remove_history(job)
|
||||
|
||||
@@ -1759,7 +1783,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
|
||||
|
||||
# Aquire the db instance
|
||||
try:
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
close_db = False
|
||||
except:
|
||||
# Required for repairs at startup because Cherrypy isn't active yet
|
||||
|
||||
@@ -81,11 +81,6 @@ class Assembler(Thread):
|
||||
# Abort all direct unpackers, just to be sure
|
||||
sabnzbd.directunpacker.abort_all()
|
||||
|
||||
# Place job back in queue and wait 30 seconds to hope it gets resolved
|
||||
self.process(job)
|
||||
sleep(30)
|
||||
continue
|
||||
|
||||
# Prepare filename
|
||||
nzo.verify_nzf_filename(nzf)
|
||||
nzf.filename = sanitize_filename(nzf.filename)
|
||||
|
||||
@@ -990,7 +990,7 @@ def get_rss():
|
||||
for feed_uri in feed.uri():
|
||||
if new_feed_uris and not urlparse(feed_uri).scheme and urlparse(new_feed_uris[-1]).scheme:
|
||||
# Current one has no scheme but previous one does, append to previous
|
||||
new_feed_uris[-1] += '%2C' + feed_uri
|
||||
new_feed_uris[-1] += ',' + feed_uri
|
||||
have_new_uri = True
|
||||
continue
|
||||
# Add full working URL
|
||||
|
||||
@@ -400,7 +400,7 @@ class HistoryDB(object):
|
||||
return name
|
||||
|
||||
def get_path(self, nzo_id):
|
||||
""" Return the `incomplete` path of the job `nzo_id` """
|
||||
""" Return the `incomplete` path of the job `nzo_id` if it is still there """
|
||||
t = (nzo_id,)
|
||||
path = ''
|
||||
if self.execute('SELECT path FROM history WHERE nzo_id=?', t):
|
||||
@@ -408,7 +408,9 @@ class HistoryDB(object):
|
||||
path = self.c.fetchone().get('path')
|
||||
except AttributeError:
|
||||
pass
|
||||
return path
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
return None
|
||||
|
||||
def get_other(self, nzo_id):
|
||||
""" Return additional data for job `nzo_id` """
|
||||
@@ -421,9 +423,10 @@ class HistoryDB(object):
|
||||
pp = items.get('pp')
|
||||
script = items.get('script')
|
||||
cat = items.get('category')
|
||||
return dtype, url, pp, script, cat
|
||||
except (AttributeError, IndexError):
|
||||
return '', '', '', '', ''
|
||||
return dtype, url, pp, script, cat
|
||||
pass
|
||||
return '', '', '', '', ''
|
||||
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
|
||||
@@ -499,7 +499,7 @@ class MainPage(object):
|
||||
# No session key check, due to fixed URLs
|
||||
name = kwargs.get('name')
|
||||
if name:
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
return ShowString(history_db.get_name(name), history_db.get_script_log(name))
|
||||
else:
|
||||
raise Raiser(self.__root)
|
||||
@@ -1106,7 +1106,7 @@ class HistoryPage(object):
|
||||
|
||||
@secured_expose(check_session_key=True)
|
||||
def purge(self, **kwargs):
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
history_db.remove_history()
|
||||
raise queueRaiser(self.__root, kwargs)
|
||||
|
||||
@@ -1133,7 +1133,7 @@ class HistoryPage(object):
|
||||
@secured_expose(check_session_key=True)
|
||||
def purge_failed(self, **kwargs):
|
||||
del_files = bool(int_conv(kwargs.get('del_files')))
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
if del_files:
|
||||
del_job_files(history_db.get_failed_paths())
|
||||
history_db.remove_failed()
|
||||
@@ -1173,7 +1173,7 @@ class HistoryPage(object):
|
||||
# No session key check, due to fixed URLs
|
||||
name = kwargs.get('name')
|
||||
if name:
|
||||
history_db = sabnzbd.connect_db()
|
||||
history_db = sabnzbd.get_db_connection()
|
||||
return ShowString(history_db.get_name(name), history_db.get_script_log(name))
|
||||
else:
|
||||
raise Raiser(self.__root)
|
||||
@@ -1877,9 +1877,13 @@ class ConfigRss(object):
|
||||
|
||||
@secured_expose(check_session_key=True, check_configlock=True)
|
||||
def upd_rss_filter(self, **kwargs):
|
||||
""" Wrapper, so we can call from api.py """
|
||||
self.internal_upd_rss_filter(**kwargs)
|
||||
|
||||
def internal_upd_rss_filter(self, **kwargs):
|
||||
""" Save updated filter definition """
|
||||
try:
|
||||
cfg = config.get_rss()[kwargs.get('feed')]
|
||||
feed_cfg = config.get_rss()[kwargs.get('feed')]
|
||||
except KeyError:
|
||||
raise rssRaiser(self.__root, kwargs)
|
||||
|
||||
@@ -1893,14 +1897,14 @@ class ConfigRss(object):
|
||||
enabled = kwargs.get('enabled', '0')
|
||||
|
||||
if filt:
|
||||
cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'),
|
||||
feed_cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'),
|
||||
platform_encode(filt), prio, enabled))
|
||||
|
||||
# Move filter if requested
|
||||
index = int_conv(kwargs.get('index', ''))
|
||||
new_index = kwargs.get('new_index', '')
|
||||
if new_index and int_conv(new_index) != index:
|
||||
cfg.filters.move(int(index), int_conv(new_index))
|
||||
feed_cfg.filters.move(int(index), int_conv(new_index))
|
||||
|
||||
config.save_config()
|
||||
self.__evaluate = False
|
||||
@@ -1918,13 +1922,17 @@ class ConfigRss(object):
|
||||
|
||||
@secured_expose(check_session_key=True, check_configlock=True)
|
||||
def del_rss_filter(self, **kwargs):
|
||||
""" Wrapper, so we can call from api.py """
|
||||
self.internal_del_rss_filter(**kwargs)
|
||||
|
||||
def internal_del_rss_filter(self, **kwargs):
|
||||
""" Remove one RSS filter """
|
||||
try:
|
||||
cfg = config.get_rss()[kwargs.get('feed')]
|
||||
feed_cfg = config.get_rss()[kwargs.get('feed')]
|
||||
except KeyError:
|
||||
raise rssRaiser(self.__root, kwargs)
|
||||
|
||||
cfg.filters.delete(int(kwargs.get('index', 0)))
|
||||
feed_cfg.filters.delete(int(kwargs.get('index', 0)))
|
||||
config.save_config()
|
||||
self.__evaluate = False
|
||||
self.__show_eval_button = True
|
||||
@@ -2522,6 +2530,7 @@ def GetRssLog(feed):
|
||||
# These fields could be empty
|
||||
job['cat'] = job.get('cat', '')
|
||||
job['size'] = job.get('size', '')
|
||||
job['infourl'] = job.get('infourl', '')
|
||||
|
||||
# Auto-fetched jobs didn't have these fields set
|
||||
if job.get('url'):
|
||||
|
||||
@@ -181,22 +181,6 @@ class NzbQueue(object):
|
||||
logging.info('Skipping repair for job %s', folder)
|
||||
return result
|
||||
|
||||
def retry_all_jobs(self, history_db):
|
||||
""" Retry all retryable jobs in History """
|
||||
result = []
|
||||
|
||||
# Retryable folders from History
|
||||
items = sabnzbd.api.build_history()[0]
|
||||
registered = [(platform_encode(os.path.basename(item['path'])),
|
||||
item['nzo_id'])
|
||||
for item in items if item['retry']]
|
||||
|
||||
for job in registered:
|
||||
logging.info('Repairing job %s', job[0])
|
||||
result.append(self.repair_job(job[0]))
|
||||
history_db.remove_history(job[1])
|
||||
return bool(result)
|
||||
|
||||
def repair_job(self, folder, new_nzb=None, password=None):
|
||||
""" Reconstruct admin for a single job folder, optionally with new NZB """
|
||||
def all_verified(path):
|
||||
|
||||
@@ -165,8 +165,9 @@ def parse_par2_file_packet(f, header):
|
||||
filename = data[offset + 72:].strip('\0')
|
||||
return filename, hash, hash16k
|
||||
elif data[offset:offset + 15] == PAR_CREATOR_ID:
|
||||
# Here untill the end is the creator-text
|
||||
# Usefull in case of bugs in the par2-creating software
|
||||
logging.debug('Par2-creator of %s is: %s', os.path.basename(f.name), data[offset+16:])
|
||||
# From here until the end is the creator-text
|
||||
# Useful in case of bugs in the par2-creating software
|
||||
par2creator = data[offset+16:].strip('\0') # Remove any trailing \0
|
||||
logging.debug('Par2-creator of %s is: %s', os.path.basename(f.name), par2creator)
|
||||
|
||||
return nothing
|
||||
|
||||
@@ -287,7 +287,7 @@ class RSSQueue(object):
|
||||
|
||||
status = feed_parsed.get('status', 999)
|
||||
if status in (401, 402, 403):
|
||||
msg = T('Do not have valid authentication for feed %s') % feed
|
||||
msg = T('Do not have valid authentication for feed %s') % uri
|
||||
logging.info(msg)
|
||||
|
||||
if 500 <= status <= 599:
|
||||
@@ -301,11 +301,14 @@ class RSSQueue(object):
|
||||
msg = T('Server %s uses an untrusted HTTPS certificate') % get_urlbase(uri)
|
||||
msg += ' - https://sabnzbd.org/certificate-errors'
|
||||
logging.error(msg)
|
||||
elif feed_parsed['href'] != uri and 'login' in feed_parsed['href']:
|
||||
# Redirect to login page!
|
||||
msg = T('Do not have valid authentication for feed %s') % uri
|
||||
else:
|
||||
msg = T('Failed to retrieve RSS from %s: %s') % (uri, xml_name(msg))
|
||||
logging.info(msg)
|
||||
|
||||
if not entries:
|
||||
if not entries and not msg:
|
||||
msg = T('RSS Feed %s was empty') % uri
|
||||
logging.info(msg)
|
||||
all_entries.extend(entries)
|
||||
@@ -330,7 +333,7 @@ class RSSQueue(object):
|
||||
|
||||
if readout:
|
||||
try:
|
||||
link, category, size, age, season, episode = _get_link(entry)
|
||||
link, infourl, category, size, age, season, episode = _get_link(entry)
|
||||
except (AttributeError, IndexError):
|
||||
logging.info(T('Incompatible feed') + ' ' + uri)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
@@ -350,6 +353,7 @@ class RSSQueue(object):
|
||||
continue
|
||||
else:
|
||||
link = entry
|
||||
infourl = jobs[link].get('infourl', '')
|
||||
category = jobs[link].get('orgcat', '')
|
||||
if category in ('', '*'):
|
||||
category = None
|
||||
@@ -478,13 +482,13 @@ class RSSQueue(object):
|
||||
else:
|
||||
star = first
|
||||
if result:
|
||||
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'G', category, myCat, myPP,
|
||||
myScript, act, star, priority=myPrio, rule=str(n))
|
||||
_HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'G', category, myCat,
|
||||
myPP, myScript, act, star, priority=myPrio, rule=str(n))
|
||||
if act:
|
||||
new_downloads.append(title)
|
||||
else:
|
||||
_HandleLink(jobs, feed, link, title, size, age, season, episode, 'B', category, myCat, myPP,
|
||||
myScript, False, star, priority=myPrio, rule=str(n))
|
||||
_HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, 'B', category, myCat,
|
||||
myPP, myScript, False, star, priority=myPrio, rule=str(n))
|
||||
|
||||
# Send email if wanted and not "forced"
|
||||
if new_downloads and cfg.email_rss() and not force:
|
||||
@@ -584,7 +588,7 @@ class RSSQueue(object):
|
||||
return ''
|
||||
|
||||
|
||||
def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgcat, cat, pp, script,
|
||||
def _HandleLink(jobs, feed, link, infourl, title, size, age, season, episode, flag, orgcat, cat, pp, script,
|
||||
download, star, priority=NORMAL_PRIORITY, rule=0):
|
||||
""" Process one link """
|
||||
if script == '':
|
||||
@@ -595,6 +599,7 @@ def _HandleLink(jobs, feed, link, title, size, age, season, episode, flag, orgca
|
||||
jobs[link] = {}
|
||||
jobs[link]['title'] = title
|
||||
jobs[link]['url'] = link
|
||||
jobs[link]['infourl'] = infourl
|
||||
jobs[link]['cat'] = cat
|
||||
jobs[link]['pp'] = pp
|
||||
jobs[link]['script'] = script
|
||||
@@ -641,6 +646,11 @@ def _get_link(entry):
|
||||
except:
|
||||
pass
|
||||
|
||||
# GUID usually has URL to result on page
|
||||
infourl = None
|
||||
if entry.id and entry.id != link and entry.id.startswith('http'):
|
||||
infourl = entry.id
|
||||
|
||||
if size == 0L:
|
||||
_RE_SIZE1 = re.compile(r'Size:\s*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
|
||||
_RE_SIZE2 = re.compile(r'\W*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I)
|
||||
@@ -690,10 +700,10 @@ def _get_link(entry):
|
||||
except:
|
||||
category = ''
|
||||
|
||||
return link, category, size, age, season, episode
|
||||
return link, infourl, category, size, age, season, episode
|
||||
else:
|
||||
logging.warning(T('Empty RSS entry found (%s)'), link)
|
||||
return None, '', 0L, None, 0, 0
|
||||
return None, None, '', 0L, None, 0, 0
|
||||
|
||||
|
||||
def special_rss_site(url):
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user