mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-06 06:28:45 -05:00
Compare commits
10 Commits
2.0.0Beta1
...
1.2.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
555d8418e7 | ||
|
|
8c22e35da4 | ||
|
|
d32cf57c75 | ||
|
|
6d9242ebc5 | ||
|
|
cbc4f6a964 | ||
|
|
2a3b2b9556 | ||
|
|
53a219f12b | ||
|
|
48519dcfa0 | ||
|
|
92542c58fe | ||
|
|
7eafe730f9 |
@@ -1,5 +1,5 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 2.0.0 ***
|
||||
*** This is SABnzbd 1.2.x ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
SABnzbd 2.0.0
|
||||
SABnzbd 1.2.1
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
@@ -68,13 +68,14 @@ Windows
|
||||
Essential modules
|
||||
cheetah-2.0.1+ use "pip install cheetah"
|
||||
par2cmdline >= 0.4 http://parchive.sourceforge.net/
|
||||
Note: https://sabnzbd.org/wiki/configuration/2.0/switches#par2cmdline
|
||||
Note: https://sabnzbd.org/wiki/configuration/1.2/switches#par2cmdline
|
||||
unrar >= 5.00+ http://www.rarlab.com/rar_add.htm
|
||||
|
||||
Optional modules
|
||||
unzip >= 6.00 http://www.info-zip.org/
|
||||
7zip >= 9.20 http://www.7zip.org/
|
||||
sabyenc >= 2.7.0 use "pip install sabyenc" - https://sabnzbd.org/sabyenc
|
||||
yenc module >= 0.4 use "pip install yenc"
|
||||
https://sabnzbd.org/wiki/installation/yenc-0.4_py2.7.rar (Win32-only)
|
||||
openssl => 1.0.0 http://www.openssl.org/
|
||||
v0.9.8 will work, but limits certificate validation
|
||||
cryptography >= 1.0 use "pip install cryptography"
|
||||
|
||||
@@ -24,13 +24,13 @@
|
||||
For these the server blocking method is not very favourable.
|
||||
There is an INI-only option that will limit blocks to 1 minute.
|
||||
no_penalties = 1
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
|
||||
- Some third-party utilties try to probe SABnzbd API in such a way that you will
|
||||
often see warnings about unauthenticated access.
|
||||
If you are sure these probes are harmless, you can suppress the warnings by
|
||||
setting the option "api_warnings" to 0.
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
|
||||
- On OSX you may encounter downloaded files with foreign characters.
|
||||
The par2 repair may fail when the files were created on a Windows system.
|
||||
@@ -41,7 +41,7 @@
|
||||
You will see this only when downloaded files contain accented characters.
|
||||
You need to fix it yourself by running the convmv utility (available for most Linux platforms).
|
||||
Possible the file system override setting 'fsys_type' might be solve things:
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
|
||||
- The "Watched Folder" sometimes fails to delete the NZB files it has
|
||||
processed. This happens when other software still accesses these files.
|
||||
@@ -81,4 +81,4 @@
|
||||
- Squeeze Linux
|
||||
There is a "special" option that will allow you to select an alternative library.
|
||||
use_pickle = 1
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 2.0.0Beta1
|
||||
Summary: SABnzbd-2.0.0Beta1
|
||||
Version: 1.2.2
|
||||
Summary: SABnzbd-1.2.2
|
||||
Home-page: http://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
24
README.md
24
README.md
@@ -1,25 +1,37 @@
|
||||
SABnzbd - The automated Usenet download tool
|
||||
============================================
|
||||
|
||||
This Unicode release is not compatible with 0.7.x queues!
|
||||
|
||||
There is also an issue with upgrading of the "sabnzbd.ini" file.
|
||||
Make sure that you have a backup!
|
||||
|
||||
Saved queues may not be compatible after updates.
|
||||
|
||||
----
|
||||
|
||||
SABnzbd is an Open Source Binary Newsreader written in Python.
|
||||
|
||||
It's totally free, incredibly easy to use, and works practically everywhere.
|
||||
SABnzbd makes Usenet as simple and streamlined as possible by automating everything we can. All you have to do is add an `.nzb`. SABnzbd takes over from there, where it will be automatically downloaded, verified, repaired, extracted and filed away with zero human interaction.
|
||||
|
||||
SABnzbd makes Usenet as simple and streamlined as possible by automating everything we can. All you have to do is add an .nzb. SABnzbd takes over from there, where it will be automatically downloaded, verified, repaired, extracted and filed away with zero human interaction.
|
||||
|
||||
If you want to know more you can head over to our website: http://sabnzbd.org.
|
||||
|
||||
## Resolving Dependencies
|
||||
|
||||
SABnzbd has a good deal of dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
SABnzbd has a good deal of dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages floating around (Ubuntu, Debian, Fedora, etc), then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (only 2.7.x and higher, but not 3.x.x)
|
||||
- `python-cheetah`
|
||||
- `python-support`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://forums.sabnzbd.org/viewtopic.php?f=16&t=18793#p99702))
|
||||
- `unrar` (Make sure you get the "official" non-free version of unrar)
|
||||
- `sabyenc` (via `pip`) or `python-yenc`
|
||||
|
||||
Optional:
|
||||
|
||||
- `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download)
|
||||
- `python-yenc`
|
||||
- `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish)
|
||||
- `7zip`
|
||||
- `unzip`
|
||||
@@ -31,13 +43,13 @@ Your package manager should supply these. If not, we've got links in our more in
|
||||
Once you've sorted out all the dependencies, simply run:
|
||||
|
||||
```
|
||||
python -OO SABnzbd.py
|
||||
python SABnzbd.py
|
||||
```
|
||||
|
||||
Or, if you want to run in the background:
|
||||
|
||||
```
|
||||
python -OO SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
python SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
```
|
||||
|
||||
If you want multi-language support, run:
|
||||
@@ -56,7 +68,7 @@ Basically:
|
||||
- `develop` is the target for integration and is **not** intended for end-users.
|
||||
- `1.1.x` is a release and maintenance branch for 1.1.x (1.1.0 -> 1.1.1 -> 1.1.2) and is **not** intended for end-users.
|
||||
- `feature/my_feature` is a temporary feature branch based on `develop`.
|
||||
- `bugfix/my_bugfix` is an optional temporary branch for bugfix(es) based on `develop`.
|
||||
- `hotfix/my_hotfix` is an optional temporary branch for bugfix(es) based on `develop`.
|
||||
|
||||
Conditions:
|
||||
- Merging of a stable release into `master` will be simple: the release branch is always right.
|
||||
|
||||
119
README.mkd
119
README.mkd
@@ -1,81 +1,64 @@
|
||||
Release Notes - SABnzbd 2.0.0 Beta 1
|
||||
=========================================================
|
||||
Release Notes - SABnzbd 1.2.2
|
||||
==============================================
|
||||
|
||||
## Changes since Alpha 1
|
||||
- Windows and macOS releases now also come in 64bit versions.
|
||||
The installers will install the appropriate version automatically.
|
||||
Therefore, on 64bit Windows the installation directory will change to
|
||||
'Program Files' instead of 'Program Files (x86)'.
|
||||
On Windows our tests showed an additional 5-10% gain in download speed
|
||||
when using 64bit SABnzbd on 64bit Windows.
|
||||
- Linux: Detect if Multicore Par2 is installed.
|
||||
Multicore Par2 is now easily available through the PPA and other channels:
|
||||
https://sabnzbd.org/wiki/installation/multicore-par2
|
||||
## Bug fix in 1.2.2
|
||||
- Windows: job-directory incorrectly passed to PostProcessing-script
|
||||
|
||||
## Bug fixes since Alpha 1
|
||||
- Not all servers with same priority were tried
|
||||
- Unzip on Windows was not working
|
||||
- Unrar on Windows could hang
|
||||
- Restarting SABnzbd sometimes failed
|
||||
- Reported 'Unknown encoding' warnings for some downloads
|
||||
- Error on renaming a job in Plush
|
||||
- Always show Queue search box when multi-editing
|
||||
- SABnzbd password not displayed in plain text in the Config
|
||||
## What's new in 1.2.1
|
||||
- QuickCheck will perform fast rename of obfuscated posts
|
||||
- RSS Downloaded page now shows icon to indicate source
|
||||
- HTML tags are filtered from single-line script output
|
||||
- New self-signed certificates now list local IP in SAN-list
|
||||
- Handle jobs on Windows with forbidden names (Con.*, Aux.*,..)
|
||||
|
||||
## Bug fixes in 1.2.1
|
||||
- Fix crashing Assembler
|
||||
- 'Only Download Top of Queue' was broken for a long time
|
||||
- Cloaked files (RAR within RAR) were not detected anymore
|
||||
- Incorrectly labeled some downloads as Encrypted
|
||||
- Passwords were not parsed correctly from filenames
|
||||
- RSS reading could fail on missing attributes
|
||||
- Multi-feed RSS will not stop if only 1 feed is not functioning
|
||||
- Duplicate detection set to Fail would not work for RSS feeds
|
||||
- Incorrectly marking jobs with folders inside as failed
|
||||
- Categories were not matched properly if a list of tags was set
|
||||
- PostProcessing-script was not called on Accept&Fail or Dupe detect
|
||||
- Support for newer par2cmdline(-mt) versions that need -B parameter
|
||||
- Some newsservers would timeout when connecting
|
||||
- More robust detection of execute permissions for scripts
|
||||
- CPU type reporting on Windows and macOS
|
||||
- Failed to start with some localhost configs
|
||||
- Removed some more stalling issues
|
||||
- Retry rename 3x before falling back to copy during "Moving"
|
||||
- Catch several SSL errors of the webserver
|
||||
- Disk-space information is now only checked every 10 seconds
|
||||
|
||||
## New in 2.0.0: SABYenc
|
||||
To improve SABnzbd's performance on systems where CPU power is limiting
|
||||
download speed, we developed a new C-module called SABYenc to accelerate the
|
||||
decoding of usenet articles. Depending on the hardware, download speed can
|
||||
greatly increase.
|
||||
The Windows and macOS releases automatically include this module, for other
|
||||
platforms you can read more on: https://sabnzbd.org/sabyenc
|
||||
If you experience issues, please report them on our Forums!
|
||||
The module is not mandatory, the _yenc module will continue to work and
|
||||
its performance will be similar.
|
||||
## Translations
|
||||
- Many translations updated, thanks to our translators!
|
||||
|
||||
## What's new in 2.0.0
|
||||
- Post-processing scripts now get additional job information via SAB_*
|
||||
environment variables - See: https://github.com/sabnzbd/sabnzbd/issues/785
|
||||
- Certificate Validation set to Strict for newly added newsservers
|
||||
- Schedule items can now be enabled and disabled
|
||||
- Remove Secondary Web Interface option
|
||||
- HTTP-redirects in interface are now relative URL's
|
||||
- Moved some lesser used settings to Config->Specials
|
||||
- Cache usage is now updated continuously in the Status Window
|
||||
- On macOS SABnzbd was set to have low IO-priority, this is now set to normal
|
||||
## About
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
thanks to its web-based user interface and advanced
|
||||
built-in post-processing options that automatically verify, repair,
|
||||
extract and clean up posts downloaded from Usenet.
|
||||
|
||||
## Bug fixes in 2.0.0
|
||||
- Warn in case encoding is not set to UTF-8
|
||||
- Retry ADMIN-data saving 3x before giving error
|
||||
(c) Copyright 2007-2017 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
|
||||
## Upgrade notices
|
||||
- Windows: When starting the Post-Processing script, the path to the job folder
|
||||
is no longer in short-path notation but includes the full path. To support
|
||||
long paths (>255), you might need to alter them to long-path notation (\\?\).
|
||||
- Schedule items are converted when upgrading to 2.x.x and will break when
|
||||
reverted back to pre-2.x.x releases.
|
||||
- The organization of the download queue is different from 0.7.x releases.
|
||||
So 2.x.x will not see the existing queue, but you can go to Status->QueueRepair
|
||||
and "Repair" the old queue.
|
||||
### IMPORTANT INFORMATION about release 1.x.x
|
||||
<https://sabnzbd.org/wiki/new-features-and-changes>
|
||||
|
||||
## Upgrading from 0.7.x and older
|
||||
### Known problems and solutions
|
||||
- Read the file "ISSUES.txt"
|
||||
|
||||
### Upgrading from 0.7.x and older
|
||||
- Finish queue
|
||||
- Stop SABnzbd
|
||||
- Install new version
|
||||
- Start SABnzbd
|
||||
|
||||
## IMPORTANT INFORMATION about release 2.x.x
|
||||
<https://sabnzbd.org/wiki/new-features-and-changes>
|
||||
|
||||
## Known problems and solutions
|
||||
- Read the file "ISSUES.txt"
|
||||
|
||||
## About
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically, thanks
|
||||
to its web-based user interface and advanced built-in post-processing options
|
||||
that automatically verify, repair, extract and clean up posts downloaded
|
||||
from Usenet.
|
||||
|
||||
(c) Copyright 2007-2017 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
The organization of the download queue is different from older versions.
|
||||
1.x.x will not see the existing queue, but you can go to
|
||||
Status->QueueRepair and "Repair" the old queue.
|
||||
Also, your sabnzbd.ini file will be upgraded, making it
|
||||
incompatible with releases older than 0.7.9
|
||||
|
||||
156
SABnzbd.py
156
SABnzbd.py
@@ -130,6 +130,24 @@ def guard_loglevel():
|
||||
LOG_FLAG = True
|
||||
|
||||
|
||||
class FilterCP3:
|
||||
# Filter out all CherryPy3-Access logging that we receive,
|
||||
# because we have the root logger
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def filter(self, record):
|
||||
_cplogging = record.module == '_cplogging'
|
||||
# Python2.4 fix
|
||||
# record has no attribute called funcName under python 2.4
|
||||
if hasattr(record, 'funcName'):
|
||||
access = record.funcName == 'access'
|
||||
else:
|
||||
access = True
|
||||
return not (_cplogging and access)
|
||||
|
||||
|
||||
class guiHandler(logging.Handler):
|
||||
""" Logging handler collects the last warnings/errors/exceptions
|
||||
to be displayed in the web-gui
|
||||
@@ -187,7 +205,7 @@ def print_help():
|
||||
print " -2 --template2 <templ> Secondary template dir [*]"
|
||||
print
|
||||
print " -l --logging <0..2> Set logging level (-1=off, 0= least, 2= most) [*]"
|
||||
print " -w --weblogging Enable cherrypy access logging"
|
||||
print " -w --weblogging <0..2> Set cherrypy logging (0= off, 1= on, 2= file-only) [*]"
|
||||
print
|
||||
print " -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]"
|
||||
if sabnzbd.WIN32:
|
||||
@@ -286,6 +304,9 @@ def Web_Template(key, defweb, wdir):
|
||||
logging.info("Web dir is %s", full_dir)
|
||||
|
||||
if not os.path.exists(full_main):
|
||||
# Temporarily fix that allows missing Config
|
||||
if defweb == DEF_STDCONFIG:
|
||||
return ''
|
||||
# end temp fix
|
||||
logging.warning(T('Cannot find web template: %s, trying standard template'), full_main)
|
||||
full_dir = real_path(sabnzbd.DIR_INTERFACES, DEF_STDINTF)
|
||||
@@ -295,6 +316,8 @@ def Web_Template(key, defweb, wdir):
|
||||
panic_tmpl(full_dir)
|
||||
exit_sab(1)
|
||||
|
||||
# sabnzbd.lang.install_language(real_path(full_dir, DEF_INT_LANGUAGE), sabnzbd.cfg.language(), wdir)
|
||||
|
||||
return real_path(full_dir, "templates")
|
||||
|
||||
|
||||
@@ -405,15 +428,10 @@ def GetProfileInfo(vista_plus):
|
||||
|
||||
def print_modules():
|
||||
""" Log all detected optional or external modules """
|
||||
if sabnzbd.decoder.HAVE_SABYENC:
|
||||
logging.info("SABYenc module (v%s)... found!", sabnzbd.constants.SABYENC_VERSION)
|
||||
if sabnzbd.decoder.HAVE_YENC:
|
||||
logging.info("_yenc module... found!")
|
||||
else:
|
||||
logging.error("SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc", sabnzbd.constants.SABYENC_VERSION)
|
||||
# Only now we care about old-yEnc
|
||||
if sabnzbd.decoder.HAVE_YENC:
|
||||
logging.info("_yenc module... found!")
|
||||
else:
|
||||
logging.error(T('_yenc module... NOT found!'))
|
||||
logging.warning(T('_yenc module... NOT found!'))
|
||||
|
||||
if sabnzbd.HAVE_CRYPTOGRAPHY:
|
||||
logging.info('Cryptography module (v%s)... found!', sabnzbd.HAVE_CRYPTOGRAPHY)
|
||||
@@ -762,7 +780,7 @@ def commandline_handler(frozen=True):
|
||||
try:
|
||||
opts, args = getopt.getopt(info, "phdvncw:l:s:f:t:b:2:",
|
||||
['pause', 'help', 'daemon', 'nobrowser', 'clean', 'logging=',
|
||||
'weblogging', 'server=', 'templates', 'ipv6_hosting=',
|
||||
'weblogging=', 'server=', 'templates', 'ipv6_hosting=',
|
||||
'template2', 'browser=', 'config-file=', 'force',
|
||||
'version', 'https=', 'autorestarted', 'repair', 'repair-all',
|
||||
'log-all', 'no-login', 'pid=', 'new', 'console', 'pidfile=',
|
||||
@@ -827,6 +845,7 @@ def main():
|
||||
clean_up = False
|
||||
logging_level = None
|
||||
web_dir = None
|
||||
web_dir2 = None
|
||||
vista_plus = False
|
||||
vista64 = False
|
||||
force_web = False
|
||||
@@ -860,6 +879,8 @@ def main():
|
||||
exit_sab(0)
|
||||
elif opt in ('-t', '--templates'):
|
||||
web_dir = arg
|
||||
elif opt in ('-2', '--template2'):
|
||||
web_dir2 = arg
|
||||
elif opt in ('-s', '--server'):
|
||||
(cherryhost, cherryport) = split_host(arg)
|
||||
elif opt in ('-n', '--nobrowser'):
|
||||
@@ -874,7 +895,13 @@ def main():
|
||||
elif opt in ('-c', '--clean'):
|
||||
clean_up = True
|
||||
elif opt in ('-w', '--weblogging'):
|
||||
cherrypylogging = True
|
||||
try:
|
||||
cherrypylogging = int(arg)
|
||||
except:
|
||||
cherrypylogging = -1
|
||||
if cherrypylogging < 0 or cherrypylogging > 2:
|
||||
print_help()
|
||||
exit_sab(1)
|
||||
elif opt in ('-l', '--logging'):
|
||||
try:
|
||||
logging_level = int(arg)
|
||||
@@ -1112,6 +1139,7 @@ def main():
|
||||
|
||||
logformat = '%(asctime)s::%(levelname)s::[%(module)s:%(lineno)d] %(message)s'
|
||||
rollover_log.setFormatter(logging.Formatter(logformat))
|
||||
rollover_log.addFilter(FilterCP3())
|
||||
sabnzbd.LOGHANDLER = rollover_log
|
||||
logger.addHandler(rollover_log)
|
||||
logger.setLevel(LOGLEVELS[logging_level + 1])
|
||||
@@ -1141,6 +1169,7 @@ def main():
|
||||
|
||||
if consoleLogging:
|
||||
console = logging.StreamHandler()
|
||||
console.addFilter(FilterCP3())
|
||||
console.setLevel(LOGLEVELS[logging_level + 1])
|
||||
console.setFormatter(logging.Formatter(logformat))
|
||||
logger.addHandler(console)
|
||||
@@ -1169,18 +1198,11 @@ def main():
|
||||
logging.info('Platform = %s', os.name)
|
||||
logging.info('Python-version = %s', sys.version)
|
||||
logging.info('Arguments = %s', sabnzbd.CMDLINE)
|
||||
|
||||
# Find encoding; relevant for unrar activities
|
||||
try:
|
||||
preferredencoding = locale.getpreferredencoding()
|
||||
logging.info('Preferred encoding = %s', preferredencoding)
|
||||
logging.info('Preferred encoding = %s', locale.getpreferredencoding())
|
||||
except:
|
||||
logging.info('Preferred encoding = ERROR')
|
||||
preferredencoding = ''
|
||||
|
||||
# On Linux/FreeBSD/Unix "UTF-8" is strongly, strongly adviced:
|
||||
if not sabnzbd.WIN32 and not sabnzbd.DARWIN and not ('utf' in preferredencoding.lower() and '8' in preferredencoding.lower()):
|
||||
logging.warning(T("SABnzbd was started with encoding %s, this should be UTF-8. Expect problems with Unicoded file and directory names in downloads.") % preferredencoding)
|
||||
|
||||
if sabnzbd.cfg.log_level() > 1:
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6
|
||||
@@ -1214,6 +1236,17 @@ def main():
|
||||
if cpumodel:
|
||||
logging.debug('CPU model name is %s', cpumodel)
|
||||
|
||||
# OSX 10.5 I/O priority setting
|
||||
if sabnzbd.DARWIN:
|
||||
logging.info('[osx] IO priority setting')
|
||||
try:
|
||||
from ctypes import cdll
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
boolSetResult = libc.setiopolicy_np(0, 1, 3)
|
||||
logging.info('[osx] IO priority set to throttle for process scope')
|
||||
except:
|
||||
logging.info('[osx] IO priority setting not supported')
|
||||
|
||||
logging.info('Read INI file %s', inifile)
|
||||
|
||||
if autobrowser is not None:
|
||||
@@ -1229,12 +1262,21 @@ def main():
|
||||
|
||||
os.chdir(sabnzbd.DIR_PROG)
|
||||
|
||||
sabnzbd.WEB_DIR = Web_Template(sabnzbd.cfg.web_dir, DEF_STDINTF, fix_webname(web_dir))
|
||||
sabnzbd.WEB_DIR_CONFIG = Web_Template(None, DEF_STDCONFIG, '')
|
||||
sabnzbd.WIZARD_DIR = os.path.join(sabnzbd.DIR_INTERFACES, 'wizard')
|
||||
web_dir = Web_Template(sabnzbd.cfg.web_dir, DEF_STDINTF, fix_webname(web_dir))
|
||||
web_dir2 = Web_Template(sabnzbd.cfg.web_dir2, '', fix_webname(web_dir2))
|
||||
web_dirc = Web_Template(None, DEF_STDCONFIG, '')
|
||||
|
||||
sabnzbd.WEB_COLOR = CheckColor(sabnzbd.cfg.web_color(), sabnzbd.WEB_DIR)
|
||||
wizard_dir = os.path.join(sabnzbd.DIR_INTERFACES, 'wizard')
|
||||
|
||||
sabnzbd.WEB_DIR = web_dir
|
||||
sabnzbd.WEB_DIR2 = web_dir2
|
||||
sabnzbd.WEB_DIRC = web_dirc
|
||||
sabnzbd.WIZARD_DIR = wizard_dir
|
||||
|
||||
sabnzbd.WEB_COLOR = CheckColor(sabnzbd.cfg.web_color(), web_dir)
|
||||
sabnzbd.cfg.web_color.set(sabnzbd.WEB_COLOR)
|
||||
sabnzbd.WEB_COLOR2 = CheckColor(sabnzbd.cfg.web_color2(), web_dir2)
|
||||
sabnzbd.cfg.web_color2.set(sabnzbd.WEB_COLOR2)
|
||||
|
||||
if fork and not sabnzbd.WIN32:
|
||||
daemonize()
|
||||
@@ -1262,6 +1304,23 @@ def main():
|
||||
logging.info("SSL version %s", sabnzbd.utils.sslinfo.ssl_version())
|
||||
logging.info("SSL supported protocols %s", str(sabnzbd.utils.sslinfo.ssl_protocols_labels()))
|
||||
|
||||
cherrylogtoscreen = False
|
||||
sabnzbd.WEBLOGFILE = None
|
||||
|
||||
if cherrypylogging:
|
||||
if logdir:
|
||||
sabnzbd.WEBLOGFILE = os.path.join(logdir, DEF_LOG_CHERRY)
|
||||
# Define our custom logger for cherrypy errors
|
||||
cherrypy_logging(sabnzbd.WEBLOGFILE, logging.handlers.RotatingFileHandler)
|
||||
if not fork:
|
||||
try:
|
||||
x = sys.stderr.fileno
|
||||
x = sys.stdout.fileno
|
||||
if cherrypylogging == 1:
|
||||
cherrylogtoscreen = True
|
||||
except:
|
||||
pass
|
||||
|
||||
https_cert = sabnzbd.cfg.https_cert.get_path()
|
||||
https_key = sabnzbd.cfg.https_key.get_path()
|
||||
https_chain = sabnzbd.cfg.https_chain.get_path()
|
||||
@@ -1328,45 +1387,52 @@ def main():
|
||||
'server.socket_host': cherryhost,
|
||||
'server.socket_port': cherryport,
|
||||
'server.shutdown_timeout': 0,
|
||||
'log.screen': False,
|
||||
'log.screen': cherrylogtoscreen,
|
||||
'engine.autoreload.on': False,
|
||||
'tools.encode.on': True,
|
||||
'tools.gzip.on': True,
|
||||
'tools.gzip.mime_types': mime_gzip,
|
||||
'request.show_tracebacks': True,
|
||||
'checker.check_localhost': bool(consoleLogging),
|
||||
'error_page.401': sabnzbd.panic.error_page_401,
|
||||
'error_page.404': sabnzbd.panic.error_page_404
|
||||
})
|
||||
|
||||
|
||||
# Do we want CherryPy Logging? Cannot be done via the config
|
||||
if cherrypylogging:
|
||||
sabnzbd.WEBLOGFILE = os.path.join(logdir, DEF_LOG_CHERRY)
|
||||
cherrypy.log.screen = True
|
||||
cherrypy.log.access_log.propagate = True
|
||||
cherrypy.log.access_file = str(sabnzbd.WEBLOGFILE)
|
||||
else:
|
||||
cherrypy.log.access_log.propagate = False
|
||||
|
||||
# Force mimetypes (OS might overwrite them)
|
||||
forced_mime_types = {'css': 'text/css', 'js': 'application/javascript'}
|
||||
static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dir, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
staticcfg = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dirc, 'staticcfg'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
wizard_static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(wizard_dir, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
|
||||
static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
staticcfg = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
wizard_static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WIZARD_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
|
||||
appconfig = {'/api': {'tools.basic_auth.on': False},
|
||||
appconfig = {'/sabnzbd/api': {'tools.basic_auth.on': False},
|
||||
'/api': {'tools.basic_auth.on': False},
|
||||
'/m/api': {'tools.basic_auth.on': False},
|
||||
'/rss': {'tools.basic_auth.on': False},
|
||||
'/sabnzbd/rss': {'tools.basic_auth.on': False},
|
||||
'/m/rss': {'tools.basic_auth.on': False},
|
||||
'/sabnzbd/shutdown': {'streamResponse': True},
|
||||
'/sabnzbd/static': static,
|
||||
'/static': static,
|
||||
'/sabnzbd/wizard/static': wizard_static,
|
||||
'/wizard/static': wizard_static,
|
||||
'/favicon.ico': {'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg', 'ico', 'favicon.ico')},
|
||||
'/favicon.ico': {'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(web_dirc, 'staticcfg', 'ico', 'favicon.ico')},
|
||||
'/sabnzbd/staticcfg': staticcfg,
|
||||
'/staticcfg': staticcfg
|
||||
}
|
||||
|
||||
# Make available from both URLs
|
||||
main_page = sabnzbd.interface.MainPage()
|
||||
cherrypy.tree.mount(main_page, '/', config=appconfig)
|
||||
cherrypy.tree.mount(main_page, '/sabnzbd/', config=appconfig)
|
||||
if web_dir2:
|
||||
static2 = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dir2, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
appconfig['/sabnzbd/m/api'] = {'tools.basic_auth.on': False}
|
||||
appconfig['/sabnzbd/m/rss'] = {'tools.basic_auth.on': False}
|
||||
appconfig['/sabnzbd/m/shutdown'] = {'streamResponse': True}
|
||||
appconfig['/sabnzbd/m/static'] = static2
|
||||
appconfig['/m/static'] = static2
|
||||
appconfig['/sabnzbd/m/wizard/static'] = wizard_static
|
||||
appconfig['/m/wizard/static'] = wizard_static
|
||||
appconfig['/sabnzbd/m/staticcfg'] = staticcfg
|
||||
appconfig['/m/staticcfg'] = staticcfg
|
||||
|
||||
login_page = sabnzbd.interface.MainPage(web_dir, '/', web_dir2, '/m/', web_dirc, first=2)
|
||||
cherrypy.tree.mount(login_page, '/', config=appconfig)
|
||||
|
||||
# Set authentication for CherryPy
|
||||
sabnzbd.interface.set_auth(cherrypy.config)
|
||||
|
||||
@@ -214,7 +214,18 @@ class HTTPRedirect(CherryPyException):
|
||||
if isinstance(urls, text_or_bytes):
|
||||
urls = [urls]
|
||||
|
||||
self.urls = [tonative(url, encoding or self.encoding) for url in urls]
|
||||
abs_urls = []
|
||||
for url in urls:
|
||||
url = tonative(url, encoding or self.encoding)
|
||||
|
||||
# Note that urljoin will "do the right thing" whether url is:
|
||||
# 1. a complete URL with host (e.g. "http://www.example.com/test")
|
||||
# 2. a URL relative to root (e.g. "/dummy")
|
||||
# 3. a URL relative to the current path
|
||||
# Note that any query string in cherrypy.request is discarded.
|
||||
url = _urljoin(cherrypy.url(), url)
|
||||
abs_urls.append(url)
|
||||
self.urls = abs_urls
|
||||
|
||||
# RFC 2616 indicates a 301 response code fits our goal; however,
|
||||
# browser support for 301 is quite messy. Do 302/303 instead. See
|
||||
@@ -230,7 +241,7 @@ class HTTPRedirect(CherryPyException):
|
||||
raise ValueError('status must be between 300 and 399.')
|
||||
|
||||
self.status = status
|
||||
CherryPyException.__init__(self, self.urls, status)
|
||||
CherryPyException.__init__(self, abs_urls, status)
|
||||
|
||||
def set_response(self):
|
||||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
|
||||
@@ -85,7 +85,7 @@ class BuiltinSSLAdapter(wsgiserver.SSLAdapter):
|
||||
|
||||
# Check if it's one of the known errors
|
||||
# Errors that are caught by PyOpenSSL, but thrown by built-in ssl
|
||||
_block_errors = ('unknown protocol', 'unknown ca', 'unknown_ca', 'unknown error',
|
||||
_block_errors = ('unknown protocol', 'unknown ca', 'unknown_ca',
|
||||
'inappropriate fallback', 'wrong version number',
|
||||
'no shared cipher', 'certificate unknown', 'ccs received early')
|
||||
for error_text in _block_errors:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Config"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/configure"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/configure"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#from locale import getpreferredencoding#-->
|
||||
@@ -34,7 +34,12 @@
|
||||
<tr>
|
||||
<th scope="row">OpenSSL:</th>
|
||||
<td>
|
||||
$ssl_version [$ssl_protocols]
|
||||
<!--#if $have_ssl#-->
|
||||
$ssl_version [$ssl_protocols]
|
||||
<!--#else#-->
|
||||
<span class="label label-danger">$T('notAvailable')</span>
|
||||
<a href="$helpuri$help_uri#no_ssl" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a>
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#if not $have_ssl_context#-->
|
||||
@@ -45,15 +50,6 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_mt_par2#-->
|
||||
<tr>
|
||||
<th scope="row">Multicore Par2</th>
|
||||
<td>
|
||||
<span class="label label-warning">$T('notAvailable')</span> $T('explain-getpar2mt')
|
||||
<a href="${helpuri}installation/multicore-par2" target="_blank">${helpuri}installation/multicore-par2</a>
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_cryptography #-->
|
||||
<tr>
|
||||
<th scope="row">Python Cryptography:</th>
|
||||
@@ -63,7 +59,7 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_yenc and not $have_sabyenc#-->
|
||||
<!--#if not $have_yenc#-->
|
||||
<tr>
|
||||
<th scope="row">yEnc:</th>
|
||||
<td>
|
||||
@@ -72,15 +68,6 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_sabyenc#-->
|
||||
<tr>
|
||||
<th scope="row">SABYenc:</th>
|
||||
<td>
|
||||
<span class="label label-danger">$T('notAvailable')</span>
|
||||
<a href="$helpuri$help_uri#no_sabyenc" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a>
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_unzip #-->
|
||||
<tr>
|
||||
<th scope="row">$T('opt-enable_unzip'):</th>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Categories"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/categories"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/categories"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<div class="colmask">
|
||||
<div class="section">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Folders"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/folders"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/folders"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="General"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/general"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/general"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -23,9 +23,9 @@
|
||||
<input type="number" name="port" id="port" value="$port" size="8" data-original="$port" />
|
||||
<span class="desc">$T('explain-port')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<label class="config" for="enable_https">$T('opt-enable_https')</label>
|
||||
<input type="checkbox" name="enable_https" id="enable_https" value="1" <!--#if int($enable_https) > 0 then 'checked="checked"' else ""#-->/>
|
||||
<input type="checkbox" name="enable_https" id="enable_https" value="1" <!--#if int($enable_https) > 0 then 'checked="checked"' else ""#--> <!--#if int($have_ssl) == 0 then "disabled" else ""#--> />
|
||||
<span class="desc">$T('explain-enable_https')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
@@ -39,7 +39,21 @@
|
||||
<!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<span class="desc">$T('explain-web_dir') <a href="$caller_url">$caller_url</a></span>
|
||||
<span class="desc">$T('explain-web_dir') <a href="$caller_url1">$caller_url1</a></span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="web_dir2">$T('opt-web_dir2')</label>
|
||||
<select name="web_dir2" id="web_dir2">
|
||||
<option value="None" selected="selected">$T("None")</option>
|
||||
<!--#for $webline in $web_list#-->
|
||||
<!--#if $webline.lower() == $web_dir2.lower()#-->
|
||||
<option value="$webline" selected="selected">$webline</option>
|
||||
<!--#else#-->
|
||||
<option value="$webline">$webline</option>
|
||||
<!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<span class="desc">$T('explain-web_dir2') <a href="$caller_url2">$caller_url2</a></span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="language">$T('opt-language')</label>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Email"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/notifications"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/notifications"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#def show_notify_checkboxes($section_label)#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="RSS"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/rss"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/rss"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<div class="colmask">
|
||||
<!--#if not $active_feed#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/scheduling"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<%
|
||||
@@ -80,20 +80,21 @@ else:
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<!--#if $schedlines#-->
|
||||
<!--#set $schednum = 0#-->
|
||||
<!--#set $odd = True#-->
|
||||
<!--#for $schednum, $line in enumerate($schedlines)#-->
|
||||
<!--#for $line in $schedlines#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<form action="delSchedule" method="post">
|
||||
<input type="hidden" name="session" value="$session"/>
|
||||
<input type="hidden" name="line" id="line" value="$line"/>
|
||||
<div class="field-pair infoTableSeperator <!--#if $odd then "" else " alt"#-->">
|
||||
<input type="checkbox" name="schedenabled" value="$line" <!--#if int($taskinfo[$schednum][5]) > 0 then 'checked="checked"' else ""#-->>
|
||||
<button class="btn btn-default float-left"><span class="glyphicon glyphicon-trash"></span></button>
|
||||
<div class="scheduleEntry">
|
||||
<span class="time">$taskinfo[$schednum][1]:$taskinfo[$schednum][2]</span><span class="frequency">$taskinfo[$schednum][3]</span> <span class="darkred">$taskinfo[$schednum][4]</span>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
<!--#set $schednum = $schednum+1#-->
|
||||
<!--#end for#-->
|
||||
<!--#else#-->
|
||||
<div class="field-pair">
|
||||
@@ -125,18 +126,5 @@ else:
|
||||
\$('#hidden_arguments').show()
|
||||
}*/
|
||||
})
|
||||
|
||||
\$('[name="schedenabled"]').click(function() {
|
||||
\$.ajax({
|
||||
type: "POST",
|
||||
url: "toggleSchedule",
|
||||
data: {line: \$(this).val(), session: "$session" }
|
||||
}).done(function() {
|
||||
// Let us leave!
|
||||
formWasSubmitted = true;
|
||||
formHasChanged = false;
|
||||
location.reload();
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<!--#include $webdir + "/_inc_footer_uc.tmpl"#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Servers"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/servers"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/servers"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -29,9 +29,9 @@
|
||||
<label class="config" for="port">$T('srv-port')</label>
|
||||
<input type="number" name="port" id="port" size="8" value="119" />
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<label class="config" for="ssl">$T('srv-ssl')</label>
|
||||
<input type="checkbox" name="ssl" id="ssl" value="1" />
|
||||
<input type="checkbox" name="ssl" id="ssl" value="1" <!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<span class="desc">$T('explain-ssl')</span>
|
||||
</div>
|
||||
<!-- Tricks to avoid browser auto-fill, fixed on-submit with javascript -->
|
||||
@@ -63,8 +63,8 @@
|
||||
<label class="config" for="ssl_verify">$T('opt-ssl_verify')</label>
|
||||
<select name="ssl_verify" id="ssl_verify" <!--#if int($have_ssl_context) == 0 then "disabled=\"disabled\"" else ""#-->>
|
||||
<option value="0">$T('ssl_verify-disabled')</option>
|
||||
<option value="1">$T('ssl_verify-normal')</option>
|
||||
<option value="2" selected>$T('ssl_verify-strict')</option>
|
||||
<option value="1" selected>$T('ssl_verify-normal')</option>
|
||||
<option value="2">$T('ssl_verify-strict')</option>
|
||||
</select>
|
||||
<span class="desc">$T('explain-ssl_verify').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
@@ -149,9 +149,9 @@
|
||||
<label class="config" for="port$cur">$T('srv-port')</label>
|
||||
<input type="number" name="port" id="port$cur" value="$server['port']" size="8" />
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<label class="config" for="ssl$cur">$T('srv-ssl')</label>
|
||||
<input type="checkbox" name="ssl" id="ssl$cur" value="1" <!--#if int($server['ssl']) != 0 then 'checked="checked"' else ""#--> />
|
||||
<input type="checkbox" name="ssl" id="ssl$cur" value="1" <!--#if int($server['ssl']) != 0 and int($have_ssl) == 1 then 'checked="checked"' else ""#--> <!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<span class="desc">$T('explain-ssl')</span>
|
||||
</div>
|
||||
<!-- Tricks to avoid browser auto-fill, fixed on-submit with javascript -->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Sorting"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/sorting"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/sorting"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Special"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/special"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/special"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Switches"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/switches"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/switches"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -20,9 +20,9 @@
|
||||
</select>
|
||||
<span class="desc">$T('explain-load_balancing')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<label class="config" for="ssl_ciphers">$T('opt-ssl_ciphers')</label>
|
||||
<input type="text" name="ssl_ciphers" id="ssl_ciphers" value="$ssl_ciphers" />
|
||||
<input type="text" name="ssl_ciphers" id="ssl_ciphers" value="$ssl_ciphers"<!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<span class="desc">$T('explain-ssl_ciphers') <br>$T('readwiki')
|
||||
<a href="${helpuri}advanced/ssl-ciphers" target="_blank">${helpuri}advanced/ssl-ciphers</a></span>
|
||||
</div>
|
||||
@@ -159,6 +159,11 @@
|
||||
<input type="checkbox" name="enable_all_par" id="enable_all_par" value="1" <!--#if int($enable_all_par) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-enable_all_par').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="quick_check">$T('opt-quick_check')</label>
|
||||
<input type="checkbox" name="quick_check" id="quick_check" value="1" <!--#if int($quick_check) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-quick_check')</span>
|
||||
</div>
|
||||
<!--#if $have_multicore#-->
|
||||
<div class="field-pair">
|
||||
<label class="config" for="par2_multicore">$T('opt-par2_multicore')</label>
|
||||
@@ -191,6 +196,11 @@
|
||||
<input type="checkbox" name="flat_unpack" id="flat_unpack" value="1" <!--#if int($flat_unpack) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-flat_unpack')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="overwrite_files">$T('opt-overwrite_files')</label>
|
||||
<input type="checkbox" name="overwrite_files" id="overwrite_files" value="1" <!--#if int($overwrite_files) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-overwrite_files')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="script_can_fail">$T('opt-script_can_fail')</label>
|
||||
<input type="checkbox" name="script_can_fail" id="script_can_fail" value="1" <!--#if int($script_can_fail) > 0 then 'checked="checked"' else ""#--> />
|
||||
@@ -263,6 +273,11 @@
|
||||
<span class="desc">$T('explain-sanitize_safe')</span>
|
||||
</div>
|
||||
<!--#end if#-->
|
||||
<div class="field-pair">
|
||||
<label class="config" for="enable_meta">$T('opt-enable_meta')</label>
|
||||
<input type="checkbox" name="enable_meta" id="enable_meta" value="1" <!--#if int($enable_meta) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-enable_meta')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<button class="btn btn-default saveButton"><span class="glyphicon glyphicon-ok"></span> $T('button-saveChanges')</button>
|
||||
<button class="btn btn-default restoreDefaults"><span class="glyphicon glyphicon-asterisk"></span> $T('button-restoreDefaults')</button>
|
||||
@@ -316,7 +331,16 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_enable">$T('opt-rating_enable')</label>
|
||||
<input type="checkbox" name="rating_enable" id="rating_enable" value="1" <!--#if int($rating_enable) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-rating_enable').replace('. ', '.<br/>')</span>
|
||||
<span class="desc">$T('explain-rating_enable')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_feedback">$T('opt-rating_feedback')</label>
|
||||
<input type="checkbox" name="rating_feedback" id="rating_feedback" value="1" <!--#if int($rating_feedback) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-rating_feedback')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_host">$T('opt-rating_host')</label>
|
||||
<input type="text" name="rating_host" id="rating_host" value="$rating_host" />
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_api_key">$T('opt-rating_api_key')</label>
|
||||
|
||||
@@ -865,17 +865,11 @@ input[type="checkbox"] {
|
||||
max-width: 150px !important;
|
||||
}
|
||||
|
||||
.Scheduling form[action="addSchedule"] input[type="checkbox"] {
|
||||
.Scheduling input[type="checkbox"] {
|
||||
margin-top: 0px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
|
||||
.Scheduling form[action="delSchedule"] input[type="checkbox"] {
|
||||
position: initial;
|
||||
float: left;
|
||||
margin: 9px 10px 0px 5px;
|
||||
}
|
||||
|
||||
.navbar .container {
|
||||
padding-right: 0;
|
||||
}
|
||||
|
||||
@@ -273,7 +273,7 @@ function do_restart() {
|
||||
error: function(status, text) {
|
||||
failureCounter = failureCounter+1;
|
||||
// Too many failuers and we give up
|
||||
if(failureCounter >= 6) {
|
||||
if(failureCounter >= 7) {
|
||||
// If the port has changed 'Access-Control-Allow-Origin' header will not allow
|
||||
// us to check if the server is back up. So after 7 failures we redirect
|
||||
// anyway in the hopes it works anyway..
|
||||
@@ -281,7 +281,7 @@ function do_restart() {
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 4000)
|
||||
}, 3000)
|
||||
|
||||
// Exception if we go from HTTPS to HTTP
|
||||
// (this is not allowed by browsers and all of the above will be ignored)
|
||||
|
||||
@@ -112,36 +112,37 @@
|
||||
<hr/>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('cache')</div>
|
||||
<div class="col-sm-6">
|
||||
<span data-bind="text: cacheSize"></span> (<span data-bind="text: cacheArticles"></span> $T('Glitter-articles'))
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo">
|
||||
<span data-bind="text: statusInfo.cache_size"></span> (<span data-bind="text: statusInfo.cache_art"></span> $T('Glitter-articles'))
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-systemPerformance')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo">
|
||||
<span data-bind="text: statusInfo.pystone"></span>
|
||||
<a href="#" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<a href="#" data-bind="click: loadStatusInfo" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small data-bind="truncatedText: statusInfo.cpumodel, length: 25, attr: { 'data-original-title': statusInfo.cpumodel }" data-tooltip="true"></small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-downloadDirSpeed')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasDiskStatusInfo">
|
||||
<span data-bind="text: statusInfo.downloaddirspeed()"></span> MB/s
|
||||
<a href="#" class="diskspeed-button" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small>(<span data-bind="truncatedText: statusInfo.downloaddir, length: 24, attr: { 'data-original-title': statusInfo.downloaddir }" data-tooltip="true"></span>)</small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasDiskStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-completeDirSpeed')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasDiskStatusInfo">
|
||||
<span data-bind="text: statusInfo.completedirspeed()"></span> MB/s
|
||||
<a href="#" class="diskspeed-button" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small>(<span data-bind="truncatedText: statusInfo.completedir, length: 24, attr: { 'data-original-title': statusInfo.completedir }" data-tooltip="true"></span>)</small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasDiskStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="row options-function-box">
|
||||
|
||||
@@ -40,8 +40,6 @@ function ViewModel() {
|
||||
self.quotaLimit = ko.observable();
|
||||
self.quotaLimitLeft = ko.observable();
|
||||
self.systemLoad = ko.observable();
|
||||
self.cacheSize = ko.observable();
|
||||
self.cacheArticles = ko.observable();
|
||||
self.nrWarnings = ko.observable(0);
|
||||
self.allWarnings = ko.observableArray([]);
|
||||
self.allMessages = ko.observableArray([]);
|
||||
@@ -50,7 +48,7 @@ function ViewModel() {
|
||||
|
||||
// Statusinfo container
|
||||
self.hasStatusInfo = ko.observable(false);
|
||||
self.hasPerformanceInfo = ko.observable(false);
|
||||
self.hasDiskStatusInfo = ko.observable(false);
|
||||
self.statusInfo = {};
|
||||
self.statusInfo.folders = ko.observableArray([]);
|
||||
self.statusInfo.servers = ko.observableArray([]);
|
||||
@@ -61,6 +59,8 @@ function ViewModel() {
|
||||
self.statusInfo.pystone = ko.observable();
|
||||
self.statusInfo.cpumodel = ko.observable();
|
||||
self.statusInfo.loglevel = ko.observable();
|
||||
self.statusInfo.cache_size = ko.observable();
|
||||
self.statusInfo.cache_art = ko.observable();
|
||||
self.statusInfo.downloaddir = ko.observable();
|
||||
self.statusInfo.downloaddirspeed = ko.observable();
|
||||
self.statusInfo.completedir = ko.observable();
|
||||
@@ -183,10 +183,6 @@ function ViewModel() {
|
||||
// System load
|
||||
self.systemLoad(response.queue.loadavg)
|
||||
|
||||
// Cache
|
||||
self.cacheSize(response.queue.cache_size)
|
||||
self.cacheArticles(response.queue.cache_art)
|
||||
|
||||
// Warnings (new warnings will trigger an update of allMessages)
|
||||
self.nrWarnings(response.queue.have_warnings)
|
||||
|
||||
@@ -753,6 +749,8 @@ function ViewModel() {
|
||||
callAPI({ mode: 'fullstatus', skip_dashboard: (!statusFullRefresh)*1 }).then(function(data) {
|
||||
// Update basic
|
||||
self.statusInfo.loglevel(data.status.loglevel)
|
||||
self.statusInfo.cache_art(data.status.cache_art)
|
||||
self.statusInfo.cache_size(data.status.cache_size)
|
||||
self.statusInfo.folders(data.status.folders)
|
||||
|
||||
// Update the full set
|
||||
@@ -768,7 +766,7 @@ function ViewModel() {
|
||||
self.statusInfo.publicipv4(data.status.publicipv4)
|
||||
self.statusInfo.ipv6(data.status.ipv6 || glitterTranslate.noneText)
|
||||
// Loaded disk info
|
||||
self.hasPerformanceInfo(true)
|
||||
self.hasDiskStatusInfo(true)
|
||||
}
|
||||
|
||||
// Update the servers
|
||||
@@ -818,7 +816,7 @@ function ViewModel() {
|
||||
|
||||
// Do a disk-speedtest
|
||||
self.testDiskSpeed = function(item, event) {
|
||||
self.hasPerformanceInfo(false)
|
||||
self.hasDiskStatusInfo(false)
|
||||
|
||||
// Run it and then display it
|
||||
callSpecialAPI('./status/dashrefresh/').then(function() {
|
||||
|
||||
@@ -171,7 +171,7 @@ function QueueListModel(parent) {
|
||||
|
||||
// Do we show search box. So it doesn't dissapear when nothing is found
|
||||
self.hasQueueSearch = ko.pureComputed(function() {
|
||||
return (self.pagination.hasPagination() || self.searchTerm() || (self.parent.hasQueue() && self.isMultiEditing()))
|
||||
return (self.pagination.hasPagination() || self.searchTerm())
|
||||
})
|
||||
|
||||
// Searching in queue (rate-limited in decleration)
|
||||
|
||||
@@ -26,9 +26,9 @@
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="${path}staticcfg/ico/apple-touch-icon-76x76-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="${path}staticcfg/ico/apple-touch-icon-120x120-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="152x152" href="${path}staticcfg/ico/apple-touch-icon-152x152-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${path}staticcfg/ico/apple-touch-icon-180x180-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${path}staticcfg/ico/apple-touch-icon-180x180-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="192x192" href="${path}staticcfg/ico/android-192x192.png" />
|
||||
|
||||
|
||||
<script type="text/javascript" src="${path}static/javascripts/lib.js?$version"></script>
|
||||
|
||||
#if $pane=="Main"#
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
<ul>
|
||||
<li>
|
||||
$T('Plush-maxSpeed'):
|
||||
<input type="text" id="maxSpeed-option" size="4" />
|
||||
<input type="text" id="maxSpeed-option" size="4" />
|
||||
<select id="maxSpeed-label">
|
||||
<option value="%">%</option>
|
||||
<option value="K">KB/s</option>
|
||||
|
||||
@@ -125,15 +125,15 @@
|
||||
|
||||
<div id="tabs-dashboard">
|
||||
<table class="rssTable">
|
||||
<tr>
|
||||
<th colspan="2">$T('dashboard-title')</th>
|
||||
<tr>
|
||||
<th colspan="2">$T('dashboard-title')</th>
|
||||
</tr>
|
||||
<!--#set $odd = False#-->
|
||||
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-localIP4')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $localipv4#-->
|
||||
$localipv4
|
||||
<!--#else#-->
|
||||
@@ -141,10 +141,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-publicIP4')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $publicipv4#-->
|
||||
$publicipv4
|
||||
<!--#else#-->
|
||||
@@ -152,10 +152,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-IP6')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $ipv6#-->
|
||||
$ipv6
|
||||
<!--#else#-->
|
||||
@@ -163,10 +163,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-NameserverDNS')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $dnslookup#-->
|
||||
$dnslookup
|
||||
<!--#else#-->
|
||||
@@ -178,33 +178,27 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-systemPerformance')</td>
|
||||
<td>
|
||||
<!--#if $pystone > 0 #-->
|
||||
$pystone
|
||||
<!--#elif $pystone == 0 #-->
|
||||
$T('dashboard-clickToStart')
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
<td>$pystone</td>
|
||||
</tr>
|
||||
<!--#if $cpumodel#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-cpuModel')</td>
|
||||
<td>$cpumodel</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('opt-download_dir')</td>
|
||||
<td>$downloaddir</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-writingSpeed')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $downloaddirspeed > 0 #-->
|
||||
$downloaddirspeed MB/s
|
||||
<!--#elif $downloaddirspeed == 0 #-->
|
||||
@@ -214,15 +208,15 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('opt-complete_dir')</td>
|
||||
<td>$completedir</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-writingSpeed')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $completedirspeed > 0 #-->
|
||||
$completedirspeed MB/s
|
||||
<!--#elif $completedirspeed == 0 #-->
|
||||
|
||||
@@ -40,7 +40,9 @@
|
||||
$T('srv-ssl')
|
||||
</label>
|
||||
<div class="col-sm-8 input-checkbox">
|
||||
<input type="checkbox" id="ssl" name="ssl" value="1" <!--#if $ssl == 1 then 'checked' else ''#--> data-toggle="tooltip" data-placement="right" title="$T('wizard-server-ssl-explain')"/>
|
||||
<input type="checkbox" id="ssl" name="ssl" value="1" <!--#if $have_ssl then '' else 'disabled'#--><!--#if $ssl == 1 then 'checked' else ''#--> data-toggle="tooltip" data-placement="right" title="$T('wizard-server-ssl-explain')"/>
|
||||
<!--#if not $have_ssl then '<span class="label label-warning">OpenSSL '+$T('opt-notInstalled')+'</span>' else ''#-->
|
||||
<small></small>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
@@ -64,23 +66,13 @@
|
||||
<input type="number" class="form-control" name="connections" id="connections" value="<!--#if $connections then $connections else '8'#-->" data-toggle="tooltip" data-placement="right" title="$T('wizard-server-con-explain') $T('wizard-server-con-eg')" />
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="ssl_verify" class="col-sm-4 control-label">$T('opt-ssl_verify')</label>
|
||||
<div class="col-sm-8">
|
||||
<select name="ssl_verify" id="ssl_verify" class="form-control" <!--#if int($have_ssl_context) == 0 then "disabled=\"disabled\"" else ""#-->>
|
||||
<option value="0" <!--#if $ssl_verify == 0 then 'selected="selected"' else ""#--> >$T('ssl_verify-disabled')</option>
|
||||
<option value="1" <!--#if $ssl_verify == 1 then 'selected="selected"' else ""#--> >$T('ssl_verify-normal')</option>
|
||||
<option value="2" <!--#if $ssl_verify == 2 then 'selected="selected"' else ""#--> >$T('ssl_verify-strict')</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-4">
|
||||
<button id="serverTest" class="btn btn-default"><span class="glyphicon glyphicon-sort"></span> $T('wizard-button-testServer')</button>
|
||||
</div>
|
||||
<div class="col-sm-8">
|
||||
<div id="serverResponse" class="well well-sm">$T('wizard-server-text')</div>
|
||||
<div id="serverQuote" class="btn btn-default disabled"><span id="serverResponse">$T('wizard-server-text')</span></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ $(document).ready(function() {
|
||||
} else {
|
||||
r = '<span class="failed"><span class="glyphicon glyphicon-minus-sign"></span> ' + result.value.message + '</span>';
|
||||
}
|
||||
r = r.replace('https://sabnzbd.org/certificate-errors', '<a href="https://sabnzbd.org/certificate-errors" class="failed" target="_blank">https://sabnzbd.org/certificate-errors</a>')
|
||||
|
||||
$('#serverResponse').html(r);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -62,7 +62,7 @@ a[target="_blank"] {
|
||||
color: #00cc22;
|
||||
}
|
||||
.failed {
|
||||
color: red !important;
|
||||
color: red;
|
||||
}
|
||||
#rightGreyText {
|
||||
color: #ccc;
|
||||
@@ -164,12 +164,16 @@ label {
|
||||
text-decoration: line-through;
|
||||
color: #ccc;
|
||||
}
|
||||
#serverResponse {
|
||||
padding: 6px 10px;
|
||||
#serverQuote {
|
||||
opacity: 0.8;
|
||||
box-shadow: none !important;
|
||||
white-space: normal;
|
||||
width: 100%;
|
||||
}
|
||||
#host-tip {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.error-text {
|
||||
display: inline;
|
||||
color: red;
|
||||
@@ -188,8 +192,7 @@ label {
|
||||
#content a,
|
||||
#content a:hover,
|
||||
#content a:active,
|
||||
#content a:visited,
|
||||
#serverResponse {
|
||||
#content a:visited {
|
||||
color: #555;
|
||||
}
|
||||
.btn {
|
||||
|
||||
2371
po/main/SABnzbd.pot
2371
po/main/SABnzbd.pot
File diff suppressed because it is too large
Load Diff
@@ -120,6 +120,15 @@ msgstr "Web interface"
|
||||
msgid "Script returned exit code %s and output \"%s\""
|
||||
msgstr "Notification script returned exit code %s and output \"%s\""
|
||||
|
||||
#: sabnzbd/skintext.py:521
|
||||
msgid ""
|
||||
"Enhanced functionality including ratings and extra status information is "
|
||||
"available when connected to OZnzb indexer."
|
||||
msgstr ""
|
||||
"Indexers can supply information when a job is added <strong>or</strong> "
|
||||
"using the settings below to provide ratings and extra status information. "
|
||||
"<br>The Server address and API key settings can be left blank, depending on your indexer. "
|
||||
|
||||
#: sabnzbd/skintext.py:333
|
||||
msgid "If empty, the standard port will only listen to HTTPS."
|
||||
msgstr "If empty, the SABnzbd Port set above will listen to HTTPS."
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
#
|
||||
# SABnzbd Translation Template file NSIS
|
||||
# Copyright 2011-2017 The SABnzbd-Team
|
||||
# Copyright (C) 2011-2015 by the SABnzbd Team
|
||||
# team@sabnzbd.org
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-develop\n"
|
||||
"Project-Id-Version: SABnzbd-0.8.x\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: shypike@sabnzbd.org\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@@ -13,71 +13,67 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=ASCII\n"
|
||||
"Content-Transfer-Encoding: 7bit\n"
|
||||
|
||||
#: NSIS_Installer.nsi:450
|
||||
#: NSIS_Installer.nsi:416
|
||||
msgid "Go to the SABnzbd Wiki"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:452
|
||||
#: NSIS_Installer.nsi:418
|
||||
msgid "Show Release Notes"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:454
|
||||
#: NSIS_Installer.nsi:420
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:456
|
||||
#: NSIS_Installer.nsi:422
|
||||
msgid "Please close \"SABnzbd.exe\" first"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:458
|
||||
msgid "The installation directory has changed (now in \"Program Files\"). \\nIf you run SABnzbd as a service, you need to update the service settings."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:460
|
||||
#: NSIS_Installer.nsi:424
|
||||
msgid "This will uninstall SABnzbd from your system"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:462
|
||||
#: NSIS_Installer.nsi:426
|
||||
msgid "Run at startup"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:464
|
||||
#: NSIS_Installer.nsi:428
|
||||
msgid "Desktop Icon"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:466
|
||||
#: NSIS_Installer.nsi:430
|
||||
msgid "NZB File association"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:468
|
||||
#: NSIS_Installer.nsi:432
|
||||
msgid "Delete Program"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:470
|
||||
#: NSIS_Installer.nsi:434
|
||||
msgid "Delete Settings"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:472
|
||||
#: NSIS_Installer.nsi:436
|
||||
msgid "This system requires the Microsoft runtime library VC90 to be installed first. Do you want to do that now?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:474
|
||||
#: NSIS_Installer.nsi:438
|
||||
msgid "Downloading Microsoft runtime installer..."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:476
|
||||
#: NSIS_Installer.nsi:440
|
||||
msgid "Download error, retry?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:478
|
||||
#: NSIS_Installer.nsi:442
|
||||
msgid "Cannot install without runtime library, retry?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:480
|
||||
#: NSIS_Installer.nsi:444
|
||||
msgid "You cannot overwrite an existing installation. \\n\\nClick `OK` to remove the previous version or `Cancel` to cancel this upgrade."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:482
|
||||
#: NSIS_Installer.nsi:446
|
||||
msgid "Your settings and data will be preserved."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -79,13 +79,19 @@ else:
|
||||
##############################################################################
|
||||
# SSL CHECKS
|
||||
##############################################################################
|
||||
import ssl
|
||||
HAVE_SSL_CONTEXT = None
|
||||
HAVE_SSL = None
|
||||
try:
|
||||
# Test availability of SSLContext (python 2.7.9+)
|
||||
ssl.SSLContext
|
||||
HAVE_SSL_CONTEXT = True
|
||||
import ssl
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
# Test availability of SSLContext (python 2.7.9+)
|
||||
ssl.SSLContext
|
||||
HAVE_SSL_CONTEXT = True
|
||||
except:
|
||||
HAVE_SSL_CONTEXT = False
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
HAVE_SSL_CONTEXT = False
|
||||
|
||||
try:
|
||||
@@ -154,9 +160,11 @@ BROWSER_URL = None
|
||||
CMDLINE = '' # Rendering of original command line arguments
|
||||
|
||||
WEB_DIR = None
|
||||
WEB_DIR_CONFIG = None
|
||||
WEB_DIR2 = None
|
||||
WEB_DIRC = None
|
||||
WIZARD_DIR = None
|
||||
WEB_COLOR = None
|
||||
WEB_COLOR2 = None
|
||||
SABSTOP = False
|
||||
RESTART_REQ = False
|
||||
PAUSED_ALL = False
|
||||
@@ -169,11 +177,6 @@ LAST_ERROR = None
|
||||
EXTERNAL_IPV6 = False
|
||||
LAST_HISTORY_UPDATE = time.time()
|
||||
|
||||
# Performance measure for dashboard
|
||||
PYSTONE_SCORE = 0
|
||||
DOWNLOAD_DIR_SPEED = 0
|
||||
COMPLETE_DIR_SPEED = 0
|
||||
|
||||
__INITIALIZED__ = False
|
||||
__SHUTTING_DOWN__ = False
|
||||
|
||||
@@ -217,6 +220,7 @@ def connect_db(thread_index=0):
|
||||
return cherrypy.thread_data.history_db
|
||||
|
||||
|
||||
|
||||
@synchronized(INIT_LOCK)
|
||||
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0):
|
||||
global __INITIALIZED__, __SHUTTING_DOWN__,\
|
||||
@@ -262,7 +266,9 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
|
||||
cfg.cherryhost.callback(guard_restart)
|
||||
cfg.cherryport.callback(guard_restart)
|
||||
cfg.web_dir.callback(guard_restart)
|
||||
cfg.web_dir2.callback(guard_restart)
|
||||
cfg.web_color.callback(guard_restart)
|
||||
cfg.web_color2.callback(guard_restart)
|
||||
cfg.username.callback(guard_restart)
|
||||
cfg.password.callback(guard_restart)
|
||||
cfg.log_dir.callback(guard_restart)
|
||||
@@ -311,12 +317,8 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
|
||||
else:
|
||||
newsched.append(sched)
|
||||
cfg.schedules.set(newsched)
|
||||
cfg.sched_converted.set(1)
|
||||
cfg.sched_converted.set(True)
|
||||
|
||||
# Second time schedule conversion
|
||||
if cfg.sched_converted() != 2:
|
||||
cfg.schedules.set(['%s %s' % (1, schedule) for schedule in cfg.schedules()])
|
||||
cfg.sched_converted.set(2)
|
||||
|
||||
if check_repair_request():
|
||||
repair = 2
|
||||
@@ -440,6 +442,7 @@ def halt():
|
||||
except:
|
||||
logging.error(T('Fatal error at saving state'), exc_info=True)
|
||||
|
||||
|
||||
# The Scheduler cannot be stopped when the stop was scheduled.
|
||||
# Since all warm-restarts have been removed, it's not longer
|
||||
# needed to stop the scheduler.
|
||||
@@ -451,13 +454,8 @@ def halt():
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def trigger_restart(timeout=None):
|
||||
def trigger_restart():
|
||||
""" Trigger a restart by setting a flag an shutting down CP """
|
||||
# Sometimes we need to wait a bit to send good-bye to the browser
|
||||
if timeout:
|
||||
time.sleep(timeout)
|
||||
|
||||
# Add extra arguments
|
||||
if sabnzbd.downloader.Downloader.do.paused:
|
||||
sabnzbd.RESTART_ARGS.append('-p')
|
||||
sys.argv = sabnzbd.RESTART_ARGS
|
||||
@@ -520,7 +518,6 @@ def guard_fsys_type():
|
||||
""" Callback for change of file system naming type """
|
||||
sabnzbd.encoding.change_fsys(cfg.fsys_type())
|
||||
|
||||
|
||||
def set_https_verification(value):
|
||||
prev = False
|
||||
try:
|
||||
@@ -899,25 +896,25 @@ def save_data(data, _id, path, do_pickle=True, silent=False):
|
||||
logging.debug("Saving data for %s in %s", _id, path)
|
||||
path = os.path.join(path, _id)
|
||||
|
||||
# We try 3 times, to avoid any dict or access problems
|
||||
for t in xrange(3):
|
||||
try:
|
||||
with open(path, 'wb') as data_file:
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
cPickle.dump(data, data_file)
|
||||
else:
|
||||
pickle.dump(data, data_file)
|
||||
else:
|
||||
data_file.write(data)
|
||||
break
|
||||
except:
|
||||
if t == 2:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
try:
|
||||
_f = open(path, 'wb')
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
pickler = pickle.Pickler(_f, 2)
|
||||
else:
|
||||
# Wait a tiny bit before trying again
|
||||
time.sleep(0.1)
|
||||
pickler = cPickle.Pickler(_f, 2)
|
||||
pickler.dump(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
pickler.clear_memo()
|
||||
del pickler
|
||||
else:
|
||||
_f.write(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
except:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
@@ -933,14 +930,15 @@ def load_data(_id, path, remove=True, do_pickle=True, silent=False):
|
||||
logging.debug("Loading data for %s from %s", _id, path)
|
||||
|
||||
try:
|
||||
with open(path, 'rb') as data_file:
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(data_file)
|
||||
else:
|
||||
data = cPickle.load(data_file)
|
||||
_f = open(path, 'rb')
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(_f)
|
||||
else:
|
||||
data = data_file.read()
|
||||
data = cPickle.load(_f)
|
||||
else:
|
||||
data = _f.read()
|
||||
_f.close()
|
||||
|
||||
if remove:
|
||||
os.remove(path)
|
||||
@@ -965,31 +963,31 @@ def remove_data(_id, path):
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def save_admin(data, _id):
|
||||
def save_admin(data, _id, do_pickle=True):
|
||||
""" Save data in admin folder in specified format """
|
||||
path = os.path.join(cfg.admin_dir.get_path(), _id)
|
||||
logging.info("Saving data for %s in %s", _id, path)
|
||||
|
||||
# We try 3 times, to avoid any dict or access problems
|
||||
for t in xrange(3):
|
||||
try:
|
||||
with open(path, 'wb') as data_file:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.dump(data, data_file)
|
||||
else:
|
||||
data = cPickle.dump(data, data_file)
|
||||
break
|
||||
except:
|
||||
if t == 2:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
else:
|
||||
# Wait a tiny bit before trying again
|
||||
time.sleep(0.1)
|
||||
try:
|
||||
_f = open(path, 'wb')
|
||||
if do_pickle:
|
||||
pickler = cPickle.Pickler(_f, 2)
|
||||
pickler.dump(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
pickler.clear_memo()
|
||||
del pickler
|
||||
else:
|
||||
_f.write(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
except:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def load_admin(_id, remove=False, silent=False):
|
||||
def load_admin(_id, remove=False, do_pickle=True, silent=False):
|
||||
""" Read data in admin folder in specified format """
|
||||
path = os.path.join(cfg.admin_dir.get_path(), _id)
|
||||
logging.info("Loading data for %s from %s", _id, path)
|
||||
@@ -999,11 +997,13 @@ def load_admin(_id, remove=False, silent=False):
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(path, 'rb') as data_file:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(data_file)
|
||||
else:
|
||||
data = cPickle.load(data_file)
|
||||
f = open(path, 'rb')
|
||||
if do_pickle:
|
||||
data = cPickle.load(f)
|
||||
else:
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
if remove:
|
||||
os.remove(path)
|
||||
except:
|
||||
@@ -1152,12 +1152,11 @@ def wait_for_download_folder():
|
||||
logging.debug('Waiting for "incomplete" folder')
|
||||
time.sleep(2.0)
|
||||
|
||||
|
||||
def check_old_queue():
|
||||
""" Check for old queue (when a new queue is not present) """
|
||||
old = False
|
||||
if not os.path.exists(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME)):
|
||||
for ver in (QUEUE_VERSION - 1, QUEUE_VERSION - 2, QUEUE_VERSION - 3):
|
||||
for ver in (QUEUE_VERSION -1 , QUEUE_VERSION - 2, QUEUE_VERSION - 3):
|
||||
data = load_admin(QUEUE_FILE_TMPL % str(ver))
|
||||
if data:
|
||||
break
|
||||
@@ -1165,7 +1164,8 @@ def check_old_queue():
|
||||
old = bool(data and isinstance(data, tuple) and len(data[1]))
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
if old and sabnzbd.WIN32 and ver < 10 and sabnzbd.DIR_LCLDATA != sabnzbd.DIR_HOME and misc.is_relative_path(cfg.download_dir()):
|
||||
if old and sabnzbd.WIN32 and ver < 10 and sabnzbd.DIR_LCLDATA != sabnzbd.DIR_HOME \
|
||||
and misc.is_relative_path(cfg.download_dir()):
|
||||
# For Windows and when version < 10: adjust old default location
|
||||
cfg.download_dir.set('Documents/' + cfg.download_dir())
|
||||
return old
|
||||
|
||||
104
sabnzbd/api.py
104
sabnzbd/api.py
@@ -28,7 +28,6 @@ import json
|
||||
import cherrypy
|
||||
import locale
|
||||
import socket
|
||||
from threading import Thread
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except:
|
||||
@@ -55,7 +54,6 @@ from sabnzbd.utils.json import JsonWriter
|
||||
|
||||
from sabnzbd.utils.rsslib import RSS, Item
|
||||
from sabnzbd.utils.pathbrowser import folders_at_path
|
||||
from sabnzbd.utils.getperformance import getcpu
|
||||
from sabnzbd.misc import loadavg, to_units, diskspace, get_ext, \
|
||||
get_filename, int_conv, globber, globber_full, time_format, remove_all, \
|
||||
starts_with_path, cat_convert, clip_path, create_https_certificates, calc_age
|
||||
@@ -98,7 +96,6 @@ else:
|
||||
# Flag for using the fast json encoder, unless it fails
|
||||
FAST_JSON = True
|
||||
|
||||
|
||||
def api_handler(kwargs):
|
||||
""" API Dispatcher """
|
||||
mode = kwargs.get('mode', '')
|
||||
@@ -107,7 +104,7 @@ def api_handler(kwargs):
|
||||
callback = kwargs.get('callback', '')
|
||||
|
||||
# Extend the timeout of API calls to 10minutes
|
||||
cherrypy.response.timeout = 600
|
||||
cherrypy.response.timeout = 60*10
|
||||
|
||||
if isinstance(mode, list):
|
||||
mode = mode[0]
|
||||
@@ -141,7 +138,6 @@ def _api_set_config(name, output, kwargs):
|
||||
res, data = config.get_dconfig(kwargs.get('section'), kwargs.get('keyword'))
|
||||
return report(output, keyword='config', data=data)
|
||||
|
||||
|
||||
def _api_set_config_default(name, output, kwargs):
|
||||
""" API: Reset requested config variables back to defaults. Currently only for misc-section """
|
||||
keywords = kwargs.get('keyword', [])
|
||||
@@ -388,7 +384,6 @@ def _api_retry(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_cancel_pp(name, output, kwargs):
|
||||
""" API: accepts name, output, value(=nzo_id) """
|
||||
nzo_id = kwargs.get('value')
|
||||
@@ -397,7 +392,6 @@ def _api_cancel_pp(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_addlocalfile(name, output, kwargs):
|
||||
""" API: accepts name, output, pp, script, cat, priority, nzbname """
|
||||
if name and isinstance(name, list):
|
||||
@@ -538,7 +532,7 @@ def _api_history(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_VALUE)
|
||||
elif not name:
|
||||
history = build_header()
|
||||
history = build_header(prim=True)
|
||||
if 'noofslots_total' in history:
|
||||
del history['noofslots_total']
|
||||
grand, month, week, day = BPSMeter.do.get_sums()
|
||||
@@ -612,7 +606,6 @@ def _api_resume(name, output, kwargs):
|
||||
|
||||
def _api_shutdown(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
logging.info('Shutdown requested by API')
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
sabnzbd.SABSTOP = True
|
||||
@@ -666,15 +659,12 @@ def _api_auth(name, output, kwargs):
|
||||
|
||||
def _api_restart(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
logging.info('Restart requested by API')
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
Thread(target=sabnzbd.trigger_restart, kwargs={'timeout': 1}).start()
|
||||
sabnzbd.trigger_restart()
|
||||
return report(output)
|
||||
|
||||
|
||||
def _api_restart_repair(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
logging.info('Queue repair requested by API')
|
||||
sabnzbd.request_repair()
|
||||
sabnzbd.trigger_restart()
|
||||
return report(output)
|
||||
@@ -762,14 +752,12 @@ def _api_test_email(name, output, kwargs):
|
||||
res = None
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_windows(name, output, kwargs):
|
||||
""" API: send a test to Windows, return result """
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_windows('SABnzbd', T('Test Notification'), 'other')
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_notif(name, output, kwargs):
|
||||
""" API: send a test to Notification Center, return result """
|
||||
logging.info("Sending test notification")
|
||||
@@ -811,14 +799,12 @@ def _api_test_pushbullet(name, output, kwargs):
|
||||
res = sabnzbd.notifier.send_pushbullet('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs)
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_nscript(name, output, kwargs):
|
||||
""" API: execute a test notification script, return result """
|
||||
logging.info("Executing notification script")
|
||||
res = sabnzbd.notifier.send_nscript('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs)
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_undefined(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
return report(output, _MSG_NOT_IMPLEMENTED)
|
||||
@@ -860,10 +846,14 @@ def _api_config_get_speedlimit(output, kwargs):
|
||||
|
||||
|
||||
def _api_config_set_colorscheme(output, kwargs):
|
||||
""" API: accepts output"""
|
||||
""" API: accepts output, value(=color for primary), value2(=color for secondary) """
|
||||
value = kwargs.get('value')
|
||||
value2 = kwargs.get('value2')
|
||||
if value:
|
||||
cfg.web_color.set(value)
|
||||
if value2:
|
||||
cfg.web_color2.set(value2)
|
||||
if value or value2:
|
||||
return report(output)
|
||||
else:
|
||||
return report(output, _MSG_NO_VALUE)
|
||||
@@ -1198,9 +1188,9 @@ def handle_cat_api(output, kwargs):
|
||||
return name
|
||||
|
||||
|
||||
def build_status(skip_dashboard=False, output=None):
|
||||
def build_status(web_dir=None, root=None, prim=True, skip_dashboard=False, output=None):
|
||||
# build up header full of basic information
|
||||
info = build_header()
|
||||
info = build_header(prim, web_dir)
|
||||
|
||||
info['logfile'] = sabnzbd.LOGFILE
|
||||
info['weblogfile'] = sabnzbd.WEBLOGFILE
|
||||
@@ -1208,19 +1198,7 @@ def build_status(skip_dashboard=False, output=None):
|
||||
info['folders'] = [xml_name(item) for item in sabnzbd.nzbqueue.scan_jobs(all=False, action=False)]
|
||||
info['configfn'] = xml_name(config.get_filename())
|
||||
|
||||
# Dashboard: Speed of System
|
||||
info['cpumodel'] = getcpu()
|
||||
info['pystone'] = sabnzbd.PYSTONE_SCORE
|
||||
|
||||
# Dashboard: Speed of Download directory:
|
||||
info['downloaddir'] = sabnzbd.cfg.download_dir.get_path()
|
||||
info['downloaddirspeed'] = sabnzbd.DOWNLOAD_DIR_SPEED
|
||||
|
||||
# Dashboard: Speed of Complete directory:
|
||||
info['completedir'] = sabnzbd.cfg.complete_dir.get_path()
|
||||
info['completedirspeed'] = sabnzbd.COMPLETE_DIR_SPEED
|
||||
|
||||
# Dashboard: Connection information
|
||||
# Dashboard: Begin
|
||||
if not int_conv(skip_dashboard):
|
||||
info['localipv4'] = localipv4()
|
||||
info['publicipv4'] = publicipv4()
|
||||
@@ -1232,6 +1210,33 @@ def build_status(skip_dashboard=False, output=None):
|
||||
except:
|
||||
info['dnslookup'] = None
|
||||
|
||||
# Dashboard: Speed of System
|
||||
from sabnzbd.utils.getperformance import getpystone, getcpu
|
||||
info['pystone'] = getpystone()
|
||||
info['cpumodel'] = getcpu()
|
||||
# Dashboard: Speed of Download directory:
|
||||
info['downloaddir'] = sabnzbd.cfg.download_dir.get_path()
|
||||
try:
|
||||
sabnzbd.downloaddirspeed # The persistent var
|
||||
except:
|
||||
# does not yet exist, so create it:
|
||||
sabnzbd.downloaddirspeed = 0 # 0 means ... not yet determined
|
||||
info['downloaddirspeed'] = sabnzbd.downloaddirspeed
|
||||
# Dashboard: Speed of Complete directory:
|
||||
info['completedir'] = sabnzbd.cfg.complete_dir.get_path()
|
||||
try:
|
||||
sabnzbd.completedirspeed # The persistent var
|
||||
except:
|
||||
# does not yet exist, so create it:
|
||||
sabnzbd.completedirspeed = 0 # 0 means ... not yet determined
|
||||
info['completedirspeed'] = sabnzbd.completedirspeed
|
||||
|
||||
try:
|
||||
sabnzbd.dashrefreshcounter # The persistent var @UndefinedVariable
|
||||
except:
|
||||
sabnzbd.dashrefreshcounter = 0
|
||||
info['dashrefreshcounter'] = sabnzbd.dashrefreshcounter
|
||||
|
||||
info['servers'] = []
|
||||
servers = sorted(Downloader.do.servers[:], key=lambda svr: '%02d%s' % (svr.priority, svr.displayname.lower()))
|
||||
for server in servers:
|
||||
@@ -1305,15 +1310,14 @@ def build_status(skip_dashboard=False, output=None):
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def build_queue(start=0, limit=0, trans=False, output=None, search=None):
|
||||
def build_queue(web_dir=None, root=None, prim=True, webdir='', start=0, limit=0, trans=False, output=None, search=None):
|
||||
if output:
|
||||
converter = unicoder
|
||||
else:
|
||||
converter = xml_name
|
||||
|
||||
# build up header full of basic information
|
||||
info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header(search=search, start=start, limit=limit)
|
||||
info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header(prim, webdir, search=search, start=start, limit=limit)
|
||||
|
||||
datestart = datetime.datetime.now()
|
||||
priorities = {TOP_PRIORITY: 'Force', REPAIR_PRIORITY: 'Repair', HIGH_PRIORITY: 'High', NORMAL_PRIORITY: 'Normal', LOW_PRIORITY: 'Low'}
|
||||
@@ -1377,8 +1381,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
|
||||
slot['status'] = Status.DOWNLOADING
|
||||
else:
|
||||
# ensure compatibility of API status
|
||||
if status in (Status.DELETED, ):
|
||||
status = Status.DOWNLOADING
|
||||
if status in (Status.DELETED, ): status = Status.DOWNLOADING
|
||||
slot['status'] = "%s" % (status)
|
||||
|
||||
if (Downloader.do.paused or Downloader.do.postproc or is_propagating or \
|
||||
@@ -1528,7 +1531,6 @@ def build_file_list(nzo_id):
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def rss_qstatus():
|
||||
""" Return a RSS feed with the queue status """
|
||||
qnfo = NzbQueue.do.queue_info()
|
||||
@@ -1598,7 +1600,8 @@ def options_list(output):
|
||||
'zip': sabnzbd.newsunpack.ZIP_COMMAND,
|
||||
'7zip': sabnzbd.newsunpack.SEVEN_COMMAND,
|
||||
'nice': sabnzbd.newsunpack.NICE_COMMAND,
|
||||
'ionice': sabnzbd.newsunpack.IONICE_COMMAND
|
||||
'ionice': sabnzbd.newsunpack.IONICE_COMMAND,
|
||||
'ssl': sabnzbd.HAVE_SSL
|
||||
})
|
||||
|
||||
|
||||
@@ -1608,8 +1611,7 @@ def retry_job(job, new_nzb, password):
|
||||
history_db = sabnzbd.connect_db()
|
||||
futuretype, url, pp, script, cat = history_db.get_other(job)
|
||||
if futuretype:
|
||||
if pp == 'X':
|
||||
pp = None
|
||||
if pp == 'X': pp = None
|
||||
sabnzbd.add_url(url, pp, script, cat)
|
||||
history_db.remove_history(job)
|
||||
else:
|
||||
@@ -1682,17 +1684,24 @@ def clear_trans_cache():
|
||||
sabnzbd.WEBUI_READY = True
|
||||
|
||||
|
||||
def build_header(webdir=''):
|
||||
def build_header(prim, webdir=''):
|
||||
""" Build the basic header """
|
||||
try:
|
||||
uptime = calc_age(sabnzbd.START)
|
||||
except:
|
||||
uptime = "-"
|
||||
|
||||
if prim:
|
||||
color = sabnzbd.WEB_COLOR
|
||||
else:
|
||||
color = sabnzbd.WEB_COLOR2
|
||||
if not color:
|
||||
color = ''
|
||||
|
||||
header = {'T': Ttemplate, 'Tspec': Tspec, 'Tx': Ttemplate, 'version': sabnzbd.__version__,
|
||||
'paused': Downloader.do.paused or Downloader.do.postproc,
|
||||
'pause_int': scheduler.pause_int(), 'paused_all': sabnzbd.PAUSED_ALL,
|
||||
'uptime': uptime, 'color_scheme': sabnzbd.WEB_COLOR or ''}
|
||||
'uptime': uptime, 'color_scheme': color}
|
||||
speed_limit = Downloader.do.get_limit()
|
||||
if speed_limit <= 0:
|
||||
speed_limit = 100
|
||||
@@ -1721,7 +1730,7 @@ def build_header(webdir=''):
|
||||
header['my_lcldata'] = sabnzbd.DIR_LCLDATA
|
||||
header['my_home'] = sabnzbd.DIR_HOME
|
||||
|
||||
header['webdir'] = webdir or sabnzbd.WEB_DIR
|
||||
header['webdir'] = webdir
|
||||
header['pid'] = os.getpid()
|
||||
|
||||
header['finishaction'] = sabnzbd.QUEUECOMPLETE
|
||||
@@ -1751,10 +1760,11 @@ def build_header(webdir=''):
|
||||
return header
|
||||
|
||||
|
||||
def build_queue_header(search=None, start=0, limit=0):
|
||||
|
||||
def build_queue_header(prim, webdir='', search=None, start=0, limit=0):
|
||||
""" Build full queue header """
|
||||
|
||||
header = build_header()
|
||||
header = build_header(prim, webdir)
|
||||
|
||||
bytespersec = BPSMeter.do.get_bps()
|
||||
qnfo = NzbQueue.do.queue_info(search=search, start=start, limit=limit)
|
||||
|
||||
@@ -25,7 +25,7 @@ import threading
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.constants import GIGI, ANFO
|
||||
from sabnzbd.constants import GIGI, ANFO, Status
|
||||
|
||||
|
||||
ARTICLE_LOCK = threading.Lock()
|
||||
@@ -59,7 +59,7 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def reserve_space(self, data):
|
||||
""" Is there space left in the set limit? """
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
data_size = sys.getsizeof(data)*64
|
||||
self.__cache_size += data_size
|
||||
if self.__cache_size + data_size > self.__cache_limit:
|
||||
return False
|
||||
@@ -69,10 +69,11 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def free_reserve_space(self, data):
|
||||
""" Remove previously reserved space """
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
data_size = sys.getsizeof(data)*64
|
||||
self.__cache_size -= data_size
|
||||
return self.__cache_size + data_size < self.__cache_limit
|
||||
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def save_article(self, article, data):
|
||||
nzf = article.nzf
|
||||
@@ -147,7 +148,7 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def purge_articles(self, articles):
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Purgeable articles -> %s", articles)
|
||||
logging.debug("Purgable articles -> %s", articles)
|
||||
for article in articles:
|
||||
if article in self.__article_list:
|
||||
self.__article_list.remove(article)
|
||||
|
||||
@@ -26,7 +26,12 @@ import struct
|
||||
import re
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
import hashlib
|
||||
try:
|
||||
import hashlib
|
||||
new_md5 = hashlib.md5
|
||||
except:
|
||||
import md5
|
||||
new_md5 = md5.new
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.misc import get_filepath, sanitize_filename, get_unique_filename, renamer, \
|
||||
@@ -60,6 +65,7 @@ class Assembler(Thread):
|
||||
self.queue.put(job)
|
||||
|
||||
def run(self):
|
||||
import sabnzbd.nzbqueue
|
||||
while 1:
|
||||
job = self.queue.get()
|
||||
if not job:
|
||||
@@ -154,7 +160,7 @@ def _assemble(nzf, path, dupe):
|
||||
fout = open(path, 'ab')
|
||||
|
||||
if cfg.quick_check():
|
||||
md5 = hashlib.md5()
|
||||
md5 = new_md5()
|
||||
else:
|
||||
md5 = None
|
||||
|
||||
@@ -166,7 +172,7 @@ def _assemble(nzf, path, dupe):
|
||||
break
|
||||
|
||||
# Sleep to allow decoder/assembler switching
|
||||
sleep(0.0001)
|
||||
sleep(0.001)
|
||||
article = decodetable[articlenum]
|
||||
|
||||
data = ArticleCache.do.load_article(article)
|
||||
@@ -259,7 +265,7 @@ def ParseFilePacket(f, header):
|
||||
|
||||
# Read and check the data
|
||||
data = f.read(len - 32)
|
||||
md5 = hashlib.md5()
|
||||
md5 = new_md5()
|
||||
md5.update(data)
|
||||
if md5sum != md5.digest():
|
||||
return nothing
|
||||
|
||||
@@ -22,6 +22,7 @@ sabnzbd.bpsmeter - bpsmeter
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
from math import floor
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import BYTES_FILE_NAME, BYTES_FILE_NAME_OLD, KIBI
|
||||
@@ -332,15 +333,15 @@ class BPSMeter(object):
|
||||
return None
|
||||
|
||||
# Calculate the variance in the speed
|
||||
avg = sum(self.bps_list[-timespan:]) / timespan
|
||||
avg = sum(self.bps_list[-timespan:])/timespan
|
||||
vari = 0
|
||||
for bps in self.bps_list[-timespan:]:
|
||||
vari += abs(bps - avg)
|
||||
vari = vari / timespan
|
||||
vari = vari/timespan
|
||||
|
||||
try:
|
||||
# See if the variance is less than 5%
|
||||
if (vari / (self.bps / KIBI)) < 0.05:
|
||||
if (vari / (self.bps/KIBI)) < 0.05:
|
||||
return avg
|
||||
else:
|
||||
return False
|
||||
@@ -349,6 +350,7 @@ class BPSMeter(object):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def reset_quota(self, force=False):
|
||||
""" Check if it's time to reset the quota, optionally resuming
|
||||
Return True, when still paused
|
||||
|
||||
@@ -64,7 +64,6 @@ else:
|
||||
##############################################################################
|
||||
quick_check = OptionBool('misc', 'quick_check', True)
|
||||
sfv_check = OptionBool('misc', 'sfv_check', True)
|
||||
quick_check_ext_ignore = OptionList('misc', 'quick_check_ext_ignore', ['nfo', 'sfv', 'srr'])
|
||||
|
||||
email_server = OptionStr('misc', 'email_server', validation=validate_server)
|
||||
email_to = OptionList('misc', 'email_to', validation=validate_email)
|
||||
@@ -113,6 +112,7 @@ req_completion_rate = OptionNumber('misc', 'req_completion_rate', 100.2, 100, 20
|
||||
rating_enable = OptionBool('misc', 'rating_enable', False)
|
||||
rating_host = OptionStr('misc', 'rating_host', 'api.oznzb.com')
|
||||
rating_api_key = OptionStr('misc', 'rating_api_key')
|
||||
rating_feedback = OptionBool('misc', 'rating_feedback', True)
|
||||
rating_filter_enable = OptionBool('misc', 'rating_filter_enable', False)
|
||||
rating_filter_abort_audio = OptionNumber('misc', 'rating_filter_abort_audio', 0)
|
||||
rating_filter_abort_video = OptionNumber('misc', 'rating_filter_abort_video', 0)
|
||||
@@ -212,7 +212,9 @@ refresh_rate = OptionNumber('misc', 'refresh_rate', 0)
|
||||
rss_rate = OptionNumber('misc', 'rss_rate', 60, 15, 24 * 60)
|
||||
cache_limit = OptionStr('misc', 'cache_limit')
|
||||
web_dir = OptionStr('misc', 'web_dir', DEF_STDINTF)
|
||||
web_dir2 = OptionStr('misc', 'web_dir2')
|
||||
web_color = OptionStr('misc', 'web_color', '')
|
||||
web_color2 = OptionStr('misc', 'web_color2')
|
||||
cleanup_list = OptionList('misc', 'cleanup_list')
|
||||
warned_old_queue = OptionBool('misc', 'warned_old_queue9', False)
|
||||
notified_new_skin = OptionNumber('misc', 'notified_new_skin', 0)
|
||||
@@ -391,9 +393,6 @@ allow_duplicate_files = OptionBool('misc', 'allow_duplicate_files', False)
|
||||
warn_dupl_jobs = OptionBool('misc', 'warn_dupl_jobs', True)
|
||||
new_nzb_on_failure = OptionBool('misc', 'new_nzb_on_failure', False)
|
||||
|
||||
# TEMP
|
||||
nr_decoders = OptionNumber('misc', 'nr_decoders', 2)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set root folders for Folder config-items
|
||||
|
||||
@@ -377,7 +377,7 @@ class ConfigServer(object):
|
||||
self.password = OptionPassword(name, 'password', '', add=False)
|
||||
self.connections = OptionNumber(name, 'connections', 1, 0, 100, add=False)
|
||||
self.ssl = OptionBool(name, 'ssl', False, add=False)
|
||||
self.ssl_verify = OptionNumber(name, 'ssl_verify', 2, add=False) # 0=No, 1=Normal, 2=Strict (hostname verification)
|
||||
self.ssl_verify = OptionNumber(name, 'ssl_verify', 1, add=False) # 0=No, 1=Normal, 2=Strict (hostname verification)
|
||||
self.enable = OptionBool(name, 'enable', True, add=False)
|
||||
self.optional = OptionBool(name, 'optional', False, add=False)
|
||||
self.retention = OptionNumber(name, 'retention', add=False)
|
||||
@@ -813,11 +813,9 @@ def save_config(force=False):
|
||||
except KeyError:
|
||||
CFG[sec] = {}
|
||||
value = database[section][option]()
|
||||
# bool is a subclass of int, check first
|
||||
if isinstance(value, bool):
|
||||
# convert bool to int when saving so we store 0 or 1
|
||||
if type(value) == type(True):
|
||||
CFG[sec][kw] = str(int(value))
|
||||
elif isinstance(value, int):
|
||||
elif type(value) == type(0):
|
||||
CFG[sec][kw] = str(value)
|
||||
else:
|
||||
CFG[sec][kw] = value
|
||||
|
||||
@@ -52,8 +52,6 @@ RENAMES_FILE = '__renames__'
|
||||
ATTRIB_FILE = 'SABnzbd_attrib'
|
||||
REPAIR_REQUEST = 'repair-all.sab'
|
||||
|
||||
SABYENC_VERSION = '2.7.0'
|
||||
|
||||
DB_HISTORY_VERSION = 1
|
||||
DB_QUEUE_VERSION = 1
|
||||
|
||||
@@ -102,7 +100,7 @@ PAUSED_PRIORITY = -2
|
||||
DUP_PRIORITY = -3
|
||||
STOP_PRIORITY = -4
|
||||
|
||||
STAGES = {'Source': 0, 'Download': 1, 'Servers': 2, 'Repair': 3, 'Filejoin': 4, 'Unpack': 5, 'Script': 6}
|
||||
STAGES = { 'Source' : 0, 'Download' : 1, 'Servers' : 2, 'Repair' : 3, 'Filejoin' : 4, 'Unpack' : 5, 'Script' : 6 }
|
||||
|
||||
VALID_ARCHIVES = ('.zip', '.rar', '.7z')
|
||||
|
||||
@@ -134,7 +132,7 @@ class Status():
|
||||
COMPLETED = 'Completed' # PP: Job is finished
|
||||
CHECKING = 'Checking' # Q: Pre-check is running
|
||||
DOWNLOADING = 'Downloading' # Q: Normal downloading
|
||||
EXTRACTING = 'Extracting' # PP: Archives are being extracted
|
||||
EXTRACTING = 'Extracting' # PP: Archives are being extraced
|
||||
FAILED = 'Failed' # PP: Job has failed, now in History
|
||||
FETCHING = 'Fetching' # Q: Job is downloading extra par2 files
|
||||
GRABBING = 'Grabbing' # Q: Getting an NZB from an external site
|
||||
|
||||
@@ -25,30 +25,21 @@ import logging
|
||||
import re
|
||||
from time import sleep
|
||||
from threading import Thread
|
||||
try:
|
||||
import _yenc
|
||||
HAVE_YENC = True
|
||||
|
||||
except ImportError:
|
||||
HAVE_YENC = False
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE, SABYENC_VERSION
|
||||
import sabnzbd.articlecache
|
||||
from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE
|
||||
from sabnzbd.articlecache import ArticleCache
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.nzbqueue
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.encoding import yenc_name_fixer
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
try:
|
||||
import _yenc
|
||||
HAVE_YENC = True
|
||||
except ImportError:
|
||||
HAVE_YENC = False
|
||||
|
||||
try:
|
||||
import sabyenc
|
||||
HAVE_SABYENC = True
|
||||
# Verify version
|
||||
if sabyenc.__version__ != SABYENC_VERSION:
|
||||
raise ImportError
|
||||
except ImportError:
|
||||
HAVE_SABYENC = False
|
||||
|
||||
class CrcError(Exception):
|
||||
|
||||
@@ -67,26 +58,33 @@ class BadYenc(Exception):
|
||||
|
||||
class Decoder(Thread):
|
||||
|
||||
def __init__(self, servers, queue):
|
||||
def __init__(self, servers):
|
||||
Thread.__init__(self)
|
||||
|
||||
self.queue = queue
|
||||
self.queue = Queue.Queue()
|
||||
self.servers = servers
|
||||
|
||||
def decode(self, article, lines):
|
||||
self.queue.put((article, lines))
|
||||
# See if there's space left in cache, pause otherwise
|
||||
# But do allow some articles to enter queue, in case of full cache
|
||||
qsize = self.queue.qsize()
|
||||
if (not ArticleCache.do.reserve_space(lines) and qsize > MAX_DECODE_QUEUE) or (qsize > LIMIT_DECODE_QUEUE):
|
||||
sabnzbd.downloader.Downloader.do.delay()
|
||||
|
||||
def stop(self):
|
||||
# Put multiple to stop all decoders
|
||||
self.queue.put(None)
|
||||
self.queue.put(None)
|
||||
|
||||
def run(self):
|
||||
from sabnzbd.nzbqueue import NzbQueue
|
||||
while 1:
|
||||
# Sleep to allow decoder/assembler switching
|
||||
sleep(0.0001)
|
||||
sleep(0.001)
|
||||
art_tup = self.queue.get()
|
||||
if not art_tup:
|
||||
break
|
||||
|
||||
article, lines, raw_data = art_tup
|
||||
article, lines = art_tup
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
art_id = article.article
|
||||
@@ -94,8 +92,7 @@ class Decoder(Thread):
|
||||
|
||||
# Check if the space that's now free can let us continue the queue?
|
||||
qsize = self.queue.qsize()
|
||||
if (sabnzbd.articlecache.ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and \
|
||||
(qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
|
||||
if (ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and (qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
|
||||
sabnzbd.downloader.Downloader.do.undelay()
|
||||
|
||||
data = None
|
||||
@@ -103,14 +100,14 @@ class Decoder(Thread):
|
||||
found = False # Proper article found
|
||||
logme = None
|
||||
|
||||
if lines or raw_data:
|
||||
if lines:
|
||||
try:
|
||||
if nzo.precheck:
|
||||
raise BadYenc
|
||||
register = True
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
data = decode(article, lines, raw_data)
|
||||
data = decode(article, lines)
|
||||
nzf.article_count += 1
|
||||
found = True
|
||||
|
||||
@@ -121,7 +118,7 @@ class Decoder(Thread):
|
||||
|
||||
sabnzbd.downloader.Downloader.do.pause()
|
||||
article.fetcher = None
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
register = False
|
||||
|
||||
except MemoryError, e:
|
||||
@@ -133,7 +130,7 @@ class Decoder(Thread):
|
||||
|
||||
sabnzbd.downloader.Downloader.do.pause()
|
||||
article.fetcher = None
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
register = False
|
||||
|
||||
except CrcError, e:
|
||||
@@ -142,18 +139,17 @@ class Decoder(Thread):
|
||||
|
||||
data = e.data
|
||||
|
||||
except (BadYenc, ValueError):
|
||||
except BadYenc:
|
||||
# Handles precheck and badly formed articles
|
||||
killed = False
|
||||
found = False
|
||||
data_to_check = lines or raw_data
|
||||
if nzo.precheck and data_to_check and data_to_check[0].startswith('223 '):
|
||||
if nzo.precheck and lines and lines[0].startswith('223 '):
|
||||
# STAT was used, so we only get a status code
|
||||
found = True
|
||||
else:
|
||||
# Examine headers (for precheck) or body (for download)
|
||||
# And look for DMCA clues (while skipping "X-" headers)
|
||||
for line in data_to_check:
|
||||
for line in lines:
|
||||
lline = line.lower()
|
||||
if 'message-id:' in lline:
|
||||
found = True
|
||||
@@ -166,14 +162,14 @@ class Decoder(Thread):
|
||||
if nzo.precheck:
|
||||
if found and not killed:
|
||||
# Pre-check, proper article found, just register
|
||||
logging.debug('Server %s has article %s', article.fetcher, art_id)
|
||||
logging.debug('Server has article %s', art_id)
|
||||
register = True
|
||||
elif not killed and not found:
|
||||
logme = T('Badly formed yEnc article in %s') % art_id
|
||||
logging.info(logme)
|
||||
|
||||
if not found or killed:
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
new_server_found = self.__search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
logme = None
|
||||
@@ -182,7 +178,8 @@ class Decoder(Thread):
|
||||
logme = T('Unknown Error while decoding %s') % art_id
|
||||
logging.info(logme)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
|
||||
new_server_found = self.__search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
logme = None
|
||||
@@ -194,39 +191,66 @@ class Decoder(Thread):
|
||||
nzo.inc_log('bad_art_log', art_id)
|
||||
|
||||
else:
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
new_server_found = self.__search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
elif nzo.precheck:
|
||||
found = False
|
||||
|
||||
if logme or not found:
|
||||
# Add extra parfiles when there was a damaged article
|
||||
if cfg.prospective_par_download() and nzo.extrapars:
|
||||
nzo.prospective_add(nzf)
|
||||
|
||||
if data:
|
||||
sabnzbd.articlecache.ArticleCache.do.save_article(article, data)
|
||||
ArticleCache.do.save_article(article, data)
|
||||
|
||||
if register:
|
||||
sabnzbd.nzbqueue.NzbQueue.do.register_article(article, found)
|
||||
NzbQueue.do.register_article(article, found)
|
||||
|
||||
def __search_new_server(self, article):
|
||||
from sabnzbd.nzbqueue import NzbQueue
|
||||
article.add_to_try_list(article.fetcher)
|
||||
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
|
||||
new_server_found = False
|
||||
fill_server_found = False
|
||||
|
||||
for server in self.servers:
|
||||
if server.active and not article.server_in_try_list(server):
|
||||
if not sabnzbd.highest_server(server):
|
||||
fill_server_found = True
|
||||
else:
|
||||
new_server_found = True
|
||||
break
|
||||
|
||||
# Only found one (or more) fill server(s)
|
||||
if not new_server_found and fill_server_found:
|
||||
article.allow_fill_server = True
|
||||
new_server_found = True
|
||||
|
||||
if new_server_found:
|
||||
article.fetcher = None
|
||||
article.tries = 0
|
||||
|
||||
# Allow all servers to iterate over this nzo and nzf again
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('%s => found at least one untested server', article)
|
||||
|
||||
else:
|
||||
msg = T('%s => missing from all servers, discarding') % article
|
||||
logging.info(msg)
|
||||
article.nzf.nzo.inc_log('missing_art_log', msg)
|
||||
|
||||
return new_server_found
|
||||
|
||||
|
||||
YDEC_TRANS = ''.join([chr((i + 256 - 42) % 256) for i in xrange(256)])
|
||||
def decode(article, data, raw_data):
|
||||
# Do we have SABYenc? Let it do all the work
|
||||
if sabnzbd.decoder.HAVE_SABYENC:
|
||||
decoded_data, output_filename, crc, crc_expected, crc_correct = sabyenc.decode_usenet_chunks(raw_data, article.bytes)
|
||||
|
||||
# Assume it is yenc
|
||||
article.nzf.type = 'yenc'
|
||||
|
||||
# Only set the name if it was found
|
||||
if output_filename:
|
||||
article.nzf.filename = output_filename
|
||||
|
||||
# CRC check
|
||||
if not crc_correct:
|
||||
raise CrcError(crc_expected, crc, decoded_data)
|
||||
|
||||
return decoded_data
|
||||
|
||||
# Continue for _yenc or Python-yEnc
|
||||
def decode(article, data):
|
||||
# Filter out empty ones
|
||||
data = filter(None, data)
|
||||
# No point in continuing if we don't have any data left
|
||||
|
||||
@@ -27,11 +27,9 @@ from nntplib import NNTPPermanentError
|
||||
import socket
|
||||
import random
|
||||
import sys
|
||||
import Queue
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized, synchronized_CV, CV
|
||||
from sabnzbd.constants import MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE
|
||||
from sabnzbd.decoder import Decoder
|
||||
from sabnzbd.newswrapper import NewsWrapper, request_server_info
|
||||
from sabnzbd.articlecache import ArticleCache
|
||||
@@ -40,7 +38,7 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.bpsmeter import BPSMeter
|
||||
import sabnzbd.scheduler
|
||||
from sabnzbd.misc import from_units, nntp_to_msg
|
||||
from sabnzbd.misc import from_units
|
||||
from sabnzbd.utils.happyeyeballs import happyeyeballs
|
||||
|
||||
|
||||
@@ -90,7 +88,7 @@ class Server(object):
|
||||
self.errormsg = ''
|
||||
self.warning = ''
|
||||
self.info = None # Will hold getaddrinfo() list
|
||||
self.ssl_info = '' # Will hold the type and cipher of SSL connection
|
||||
self.ssl_info = '' # Will hold the type and cipher of SSL connection
|
||||
self.request = False # True if a getaddrinfo() request is pending
|
||||
self.have_body = 'free.xsusenet.com' not in host
|
||||
self.have_stat = True # Assume server has "STAT", until proven otherwise
|
||||
@@ -107,7 +105,7 @@ class Server(object):
|
||||
2 - and self.info has more than 1 entry (read: IP address): Return the quickest IP based on the happyeyeballs algorithm
|
||||
In case of problems: return the host name itself
|
||||
"""
|
||||
# Check if already a successful ongoing connection
|
||||
# Check if already a succesfull ongoing connection
|
||||
if self.busy_threads and self.busy_threads[0].nntp:
|
||||
# Re-use that IP
|
||||
logging.debug('%s: Re-using address %s', self.host, self.busy_threads[0].nntp.host)
|
||||
@@ -198,14 +196,7 @@ class Downloader(Thread):
|
||||
for server in config.get_servers():
|
||||
self.init_server(None, server)
|
||||
|
||||
self.decoder_queue = Queue.Queue()
|
||||
|
||||
# Initialize decoders, only 1 for non-SABYenc
|
||||
self.decoder_workers = []
|
||||
nr_decoders = cfg.nr_decoders() if sabnzbd.decoder.HAVE_SABYENC else 1
|
||||
for i in range(nr_decoders):
|
||||
self.decoder_workers.append(Decoder(self.servers, self.decoder_queue))
|
||||
|
||||
self.decoder = Decoder(self.servers)
|
||||
Downloader.do = self
|
||||
|
||||
def init_server(self, oldserver, newserver):
|
||||
@@ -226,7 +217,7 @@ class Downloader(Thread):
|
||||
timeout = srv.timeout()
|
||||
threads = srv.connections()
|
||||
priority = srv.priority()
|
||||
ssl = srv.ssl()
|
||||
ssl = srv.ssl() and sabnzbd.HAVE_SSL
|
||||
ssl_verify = srv.ssl_verify()
|
||||
username = srv.username()
|
||||
password = srv.password()
|
||||
@@ -385,14 +376,6 @@ class Downloader(Thread):
|
||||
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
|
||||
|
||||
def decode(self, article, lines, raw_data):
|
||||
self.decoder_queue.put((article, lines, raw_data))
|
||||
# See if there's space left in cache, pause otherwise
|
||||
# But do allow some articles to enter queue, in case of full cache
|
||||
qsize = self.decoder_queue.qsize()
|
||||
if (not ArticleCache.do.reserve_space(lines) and qsize > MAX_DECODE_QUEUE) or (qsize > LIMIT_DECODE_QUEUE):
|
||||
sabnzbd.downloader.Downloader.do.delay()
|
||||
|
||||
def run(self):
|
||||
# First check IPv6 connectivity
|
||||
sabnzbd.EXTERNAL_IPV6 = sabnzbd.test_ipv6()
|
||||
@@ -414,9 +397,8 @@ class Downloader(Thread):
|
||||
sabnzbd.HAVE_SSL_CONTEXT = False
|
||||
logging.debug('SSL verification test: %s', sabnzbd.HAVE_SSL_CONTEXT)
|
||||
|
||||
# Start decoders
|
||||
for decoder in self.decoder_workers:
|
||||
decoder.start()
|
||||
# Start decoder
|
||||
self.decoder.start()
|
||||
|
||||
# Kick BPS-Meter to check quota
|
||||
BPSMeter.do.update()
|
||||
@@ -476,7 +458,7 @@ class Downloader(Thread):
|
||||
# Article too old for the server, treat as missing
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('Article %s too old for %s', article.article, server.id)
|
||||
self.decode(article, None, None)
|
||||
self.decoder.decode(article, None)
|
||||
break
|
||||
|
||||
server.idle_threads.remove(nw)
|
||||
@@ -488,7 +470,8 @@ class Downloader(Thread):
|
||||
self.__request_article(nw)
|
||||
else:
|
||||
try:
|
||||
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.id)
|
||||
logging.info("%s@%s: Initiating connection",
|
||||
nw.thrdnum, server.id)
|
||||
nw.init_connect(self.write_fds)
|
||||
except:
|
||||
logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.id, sys.exc_info()[1])
|
||||
@@ -503,10 +486,8 @@ class Downloader(Thread):
|
||||
break
|
||||
|
||||
if empty:
|
||||
# Start decoders
|
||||
for decoder in self.decoder_workers:
|
||||
decoder.stop()
|
||||
decoder.join()
|
||||
self.decoder.stop()
|
||||
self.decoder.join()
|
||||
|
||||
for server in self.servers:
|
||||
server.stop(self.read_fds, self.write_fds)
|
||||
@@ -531,17 +512,17 @@ class Downloader(Thread):
|
||||
if readkeys or writekeys:
|
||||
read, write, error = select.select(readkeys, writekeys, (), 1.0)
|
||||
|
||||
# Why check so often when so few things happened?
|
||||
# Why check so often when so few things happend?
|
||||
if self.can_be_slowed and len(readkeys) >= 8 and len(read) <= 2:
|
||||
time.sleep(0.05)
|
||||
time.sleep(0.01)
|
||||
|
||||
# Need to initialize the check during first 20 seconds
|
||||
# Need to initalize the check during first 20 seconds
|
||||
if self.can_be_slowed is None or self.can_be_slowed_timer:
|
||||
# Wait for stable speed to start testing
|
||||
if not self.can_be_slowed_timer and BPSMeter.do.get_stable_speed(timespan=10):
|
||||
self.can_be_slowed_timer = time.time()
|
||||
|
||||
# Check 10 seconds after enabling slowdown
|
||||
# Check 10 seconds after enabeling slowdown
|
||||
if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10:
|
||||
# Now let's check if it was stable in the last 10 seconds
|
||||
self.can_be_slowed = (BPSMeter.do.get_stable_speed(timespan=10) > 0)
|
||||
@@ -613,17 +594,17 @@ class Downloader(Thread):
|
||||
if nzo:
|
||||
nzo.update_download_stats(BPSMeter.do.get_bps(), server.id, bytes)
|
||||
|
||||
to_decoder = True
|
||||
if not done and nw.status_code != '222':
|
||||
if not nw.connected or nw.status_code == '480':
|
||||
if len(nw.lines) == 1:
|
||||
code = nw.lines[0][:3]
|
||||
if not nw.connected or code == '480':
|
||||
done = False
|
||||
|
||||
try:
|
||||
nw.finish_connect(nw.status_code)
|
||||
nw.finish_connect(code)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nw.lines[0])
|
||||
nw.lines = []
|
||||
nw.data = []
|
||||
nw.data = ''
|
||||
except NNTPPermanentError, error:
|
||||
# Handle login problems
|
||||
block = False
|
||||
@@ -697,7 +678,7 @@ class Downloader(Thread):
|
||||
continue
|
||||
except:
|
||||
logging.error(T('Connecting %s@%s failed, message=%s'),
|
||||
nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
|
||||
nw.thrdnum, nw.server.id, nw.lines[0])
|
||||
# No reset-warning needed, above logging is sufficient
|
||||
self.__reset_nw(nw, None, warn=False)
|
||||
|
||||
@@ -705,28 +686,30 @@ class Downloader(Thread):
|
||||
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.id)
|
||||
self.__request_article(nw)
|
||||
|
||||
elif nw.status_code == '223':
|
||||
elif code == '223':
|
||||
done = True
|
||||
logging.debug('Article <%s> is present', article.article)
|
||||
self.decoder.decode(article, nw.lines)
|
||||
|
||||
elif nw.status_code == '211':
|
||||
elif code == '211':
|
||||
done = False
|
||||
logging.debug("group command ok -> %s", nntp_to_msg(nw.data))
|
||||
|
||||
logging.debug("group command ok -> %s",
|
||||
nw.lines)
|
||||
nw.group = nw.article.nzf.nzo.group
|
||||
nw.lines = []
|
||||
nw.data = []
|
||||
nw.data = ''
|
||||
self.__request_article(nw)
|
||||
|
||||
elif nw.status_code in ('411', '423', '430'):
|
||||
elif code in ('411', '423', '430'):
|
||||
done = True
|
||||
to_decoder = False
|
||||
logging.debug('Thread %s@%s: Article %s missing (error=%s)',
|
||||
nw.thrdnum, nw.server.id, article.article, nw.status_code)
|
||||
# Search for new article
|
||||
if not self.search_new_server(article):
|
||||
sabnzbd.nzbqueue.NzbQueue.do.register_article(article, False)
|
||||
nw.lines = None
|
||||
|
||||
elif nw.status_code == '480':
|
||||
logging.info('Thread %s@%s: Article ' +
|
||||
'%s missing (error=%s)',
|
||||
nw.thrdnum, nw.server.id, article.article, code)
|
||||
|
||||
elif code == '480':
|
||||
if server.active:
|
||||
server.active = False
|
||||
server.errormsg = T('Server %s requires user/password') % ''
|
||||
@@ -735,7 +718,7 @@ class Downloader(Thread):
|
||||
msg = T('Server %s requires user/password') % nw.server.id
|
||||
self.__reset_nw(nw, msg, quit=True)
|
||||
|
||||
elif nw.status_code == '500':
|
||||
elif code == '500':
|
||||
if nzo.precheck:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
@@ -745,7 +728,7 @@ class Downloader(Thread):
|
||||
server.have_body = False
|
||||
logging.debug('Server %s does not support BODY', server.id)
|
||||
nw.lines = []
|
||||
nw.data = []
|
||||
nw.data = ''
|
||||
self.__request_article(nw)
|
||||
|
||||
if done:
|
||||
@@ -753,10 +736,7 @@ class Downloader(Thread):
|
||||
server.errormsg = server.warning = ''
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.id, article.article)
|
||||
|
||||
# Missing articles are not decoded
|
||||
if to_decoder:
|
||||
self.decode(article, nw.lines, nw.data)
|
||||
self.decoder.decode(article, nw.lines)
|
||||
|
||||
nw.soft_reset()
|
||||
server.busy_threads.remove(nw)
|
||||
@@ -804,7 +784,7 @@ class Downloader(Thread):
|
||||
if article:
|
||||
if article.tries > cfg.max_art_tries() and (article.fetcher.optional or not cfg.max_art_opt()):
|
||||
# Too many tries on this server, consider article missing
|
||||
self.decode(article, None, None)
|
||||
self.decoder.decode(article, None)
|
||||
else:
|
||||
# Remove this server from try_list
|
||||
article.fetcher = None
|
||||
@@ -847,23 +827,6 @@ class Downloader(Thread):
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
self.__reset_nw(nw, "server broke off connection", quit=False)
|
||||
|
||||
def search_new_server(self, article):
|
||||
# Search new server
|
||||
article.add_to_try_list(article.fetcher)
|
||||
for server in self.servers:
|
||||
if server.active and not article.server_in_try_list(server):
|
||||
if server.priority >= article.fetcher.priority:
|
||||
article.fetcher = None
|
||||
article.tries = 0
|
||||
# Allow all servers for this nzo and nzf again (but not for this article)
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article.nzf, article.nzf.nzo)
|
||||
return True
|
||||
|
||||
msg = T('%s => missing from all servers, discarding') % article
|
||||
logging.debug(msg)
|
||||
article.nzf.nzo.inc_log('missing_art_log', msg)
|
||||
return False
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Timed restart of servers admin.
|
||||
# For each server all planned events are kept in a list.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -780,7 +780,7 @@ def exit_sab(value):
|
||||
sys.stdout.flush()
|
||||
if getattr(sys, 'frozen', None) == 'macosx_app':
|
||||
sabnzbd.SABSTOP = True
|
||||
from PyObjCTools import AppHelper
|
||||
from PyObjCTools import AppHelper # @UnresolvedImport
|
||||
AppHelper.stopEventLoop()
|
||||
sys.exit(value)
|
||||
|
||||
@@ -1427,6 +1427,15 @@ def is_writable(path):
|
||||
return True
|
||||
|
||||
|
||||
def format_source_url(url):
|
||||
""" Format URL suitable for 'Source' stage """
|
||||
if sabnzbd.HAVE_SSL:
|
||||
prot = 'https'
|
||||
else:
|
||||
prot = 'http:'
|
||||
return url
|
||||
|
||||
|
||||
def get_base_url(url):
|
||||
""" Return only the true root domain for the favicon, so api.oznzb.com -> oznzb.com
|
||||
But also api.althub.co.za -> althub.co.za
|
||||
@@ -1540,11 +1549,3 @@ def get_urlbase(url):
|
||||
""" Return the base URL (like http://server.domain.com/) """
|
||||
parsed_uri = urlparse(url)
|
||||
return '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
|
||||
|
||||
|
||||
def nntp_to_msg(text):
|
||||
""" Format raw NNTP data for display """
|
||||
if isinstance(text, list):
|
||||
text = text[0]
|
||||
lines = text.split('\r\n')
|
||||
return lines[0]
|
||||
|
||||
@@ -34,7 +34,7 @@ from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, \
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
from sabnzbd.misc import format_time_string, find_on_path, make_script_path, int_conv, \
|
||||
flag_file, real_path, globber, globber_full, get_all_passwords, renamer, clip_path, \
|
||||
has_win_device, calc_age
|
||||
has_win_device
|
||||
from sabnzbd.tvsort import SeriesSorter
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import Status, QCHECK_FILE, RENAMES_FILE
|
||||
@@ -80,7 +80,6 @@ ZIP_COMMAND = None
|
||||
SEVEN_COMMAND = None
|
||||
IONICE_COMMAND = None
|
||||
RAR_PROBLEM = False
|
||||
PAR2_MT = True
|
||||
RAR_VERSION = 0
|
||||
|
||||
|
||||
@@ -145,7 +144,6 @@ def find_programs(curdir):
|
||||
sabnzbd.newsunpack.PAR2C_COMMAND = sabnzbd.newsunpack.PAR2_COMMAND
|
||||
|
||||
if not (sabnzbd.WIN32 or sabnzbd.DARWIN):
|
||||
# Run check on rar version
|
||||
version, original = unrar_check(sabnzbd.newsunpack.RAR_COMMAND)
|
||||
sabnzbd.newsunpack.RAR_PROBLEM = not original or version < 380
|
||||
sabnzbd.newsunpack.RAR_VERSION = version
|
||||
@@ -153,38 +151,19 @@ def find_programs(curdir):
|
||||
if sabnzbd.newsunpack.RAR_PROBLEM:
|
||||
logging.info('Problematic UNRAR')
|
||||
|
||||
# Run check on par2-multicore
|
||||
sabnzbd.newsunpack.PAR2_MT = par2_mt_check(sabnzbd.newsunpack.PAR2_COMMAND)
|
||||
|
||||
|
||||
ENV_NZO_FIELDS = ['bytes', 'bytes_downloaded', 'bytes_tried', 'cat', 'duplicate', 'encrypted',
|
||||
'fail_msg', 'filename', 'final_name', 'group', 'nzo_id', 'oversized', 'password', 'pp',
|
||||
'priority', 'repair', 'script', 'status', 'unpack', 'unwanted_ext', 'url']
|
||||
|
||||
def external_processing(extern_proc, nzo, complete_dir, nicename, status):
|
||||
def external_processing(extern_proc, complete_dir, filename, nicename, cat, group, status, failure_url):
|
||||
""" Run a user postproc script, return console output and exit value """
|
||||
command = [str(extern_proc), str(complete_dir), str(nzo.filename),
|
||||
str(nicename), '', str(nzo.cat), str(nzo.group), str(status)]
|
||||
command = [str(extern_proc), str(complete_dir), str(filename),
|
||||
str(nicename), '', str(cat), str(group), str(status)]
|
||||
|
||||
failure_url = nzo.nzo_info.get('failure', '')
|
||||
if failure_url:
|
||||
command.append(str(failure_url))
|
||||
|
||||
# Fields not in the NZO directly
|
||||
extra_env_fields = {'failure_url': failure_url,
|
||||
'complete_dir': complete_dir,
|
||||
'pp_status': status,
|
||||
'download_time': nzo.nzo_info.get('download_time', ''),
|
||||
'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq),
|
||||
'age': calc_age(nzo.avg_date),
|
||||
'version': sabnzbd.__version__}
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = create_env(nzo, extra_env_fields)
|
||||
|
||||
env = fix_env()
|
||||
logging.info('Running external script %s(%s, %s, %s, %s, %s, %s, %s, %s)',
|
||||
extern_proc, complete_dir, nzo.filename, nicename, '', nzo.cat, nzo.group, status, failure_url)
|
||||
extern_proc, complete_dir, filename, nicename, '', cat, group, status, failure_url)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
startupinfo=stup, env=env, creationflags=creationflags)
|
||||
@@ -203,7 +182,7 @@ def external_script(script, p1, p2, p3=None, p4=None):
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = create_env()
|
||||
env = fix_env()
|
||||
logging.info('Running user script %s(%s, %s)', script, p1, p2)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
@@ -667,8 +646,6 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
|
||||
elif line.startswith('Cannot create') and sabnzbd.WIN32 and extraction_path.startswith('\\\\?\\'):
|
||||
# Can be due to Unicode problems on Windows, let's retry
|
||||
fail = 4
|
||||
# Kill the process (can stay in endless loop on Windows Server)
|
||||
p.kill()
|
||||
|
||||
elif line.startswith('Cannot create'):
|
||||
line2 = proc.readline()
|
||||
@@ -827,11 +804,12 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips):
|
||||
|
||||
def ZIP_Extract(zipfile, extraction_path, one_folder):
|
||||
""" Unzip single zip set 'zipfile' to 'extraction_path' """
|
||||
command = ['%s' % ZIP_COMMAND, '-o', '-Pnone', '%s' % clip_path(zipfile),
|
||||
'-d%s' % extraction_path]
|
||||
|
||||
if one_folder or cfg.flat_unpack():
|
||||
command.insert(3, '-j') # Unpack without folders
|
||||
option = '-j' # Unpack without folders
|
||||
else:
|
||||
option = '-qq' # Dummy option
|
||||
command = ['%s' % ZIP_COMMAND, '-o', '-qq', option, '-Pnone', '%s' % zipfile,
|
||||
'-d%s' % extraction_path]
|
||||
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
logging.debug('Starting unzip: %s', command)
|
||||
@@ -840,7 +818,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
|
||||
startupinfo=stup, creationflags=creationflags)
|
||||
|
||||
output = p.stdout.read()
|
||||
logging.debug('unzip output: \n%s', output)
|
||||
logging.debug('unzip output: %s', output)
|
||||
|
||||
ret = p.wait()
|
||||
|
||||
@@ -1549,46 +1527,19 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, sin
|
||||
return finished, readd, pars, datafiles, used_joinables, used_par2
|
||||
|
||||
|
||||
def create_env(nzo=None, extra_env_fields=None):
|
||||
""" Modify the environment for pp-scripts with extra information
|
||||
OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
def fix_env():
|
||||
""" OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
other: return None
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
|
||||
# Are we adding things?
|
||||
if nzo:
|
||||
for field in ENV_NZO_FIELDS:
|
||||
try:
|
||||
field_value = getattr(nzo, field)
|
||||
# Special filters for Python types
|
||||
if field_value is None:
|
||||
env['SAB_' + field.upper()] = ''
|
||||
elif isinstance(field_value, bool):
|
||||
env['SAB_' + field.upper()] = str(field_value*1)
|
||||
else:
|
||||
env['SAB_' + field.upper()] = str(deunicode(field_value))
|
||||
except:
|
||||
# Catch key/unicode errors
|
||||
pass
|
||||
|
||||
for field in extra_env_fields:
|
||||
try:
|
||||
env['SAB_' + field.upper()] = str(deunicode(extra_env_fields[field]))
|
||||
except:
|
||||
# Catch key/unicode errors
|
||||
pass
|
||||
|
||||
if sabnzbd.DARWIN:
|
||||
env = os.environ.copy()
|
||||
if 'PYTHONPATH' in env:
|
||||
del env['PYTHONPATH']
|
||||
if 'PYTHONHOME' in env:
|
||||
del env['PYTHONHOME']
|
||||
elif not nzo:
|
||||
# No modification
|
||||
return env
|
||||
else:
|
||||
return None
|
||||
return env
|
||||
|
||||
|
||||
def userxbit(filename):
|
||||
# Returns boolean if the x-bit for user is set on the given file
|
||||
@@ -1763,16 +1714,7 @@ def QuickCheck(set, nzo):
|
||||
nzf_list = nzo.finished_files
|
||||
renames = {}
|
||||
|
||||
# Files to ignore
|
||||
ignore_ext = cfg.quick_check_ext_ignore()
|
||||
|
||||
for file in md5pack:
|
||||
# Ignore these files
|
||||
if os.path.splitext(file)[1].lower().replace('.', '') in ignore_ext:
|
||||
logging.debug('Quick-check ignoring file %s', file)
|
||||
result = True
|
||||
continue
|
||||
|
||||
found = False
|
||||
file_platform = platform_encode(file)
|
||||
for nzf in nzf_list:
|
||||
@@ -1849,18 +1791,6 @@ def unrar_check(rar):
|
||||
return version, original
|
||||
|
||||
|
||||
def par2_mt_check(par2_path):
|
||||
""" Detect if we have multicore par2 variants """
|
||||
try:
|
||||
par2_version = run_simple([par2_path, '-V'])
|
||||
# We look either for par2-tbb or par2-mt
|
||||
if 'par2cmdline-mt' in par2_version or 'Thread Building Blocks' in par2_version:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def sfv_check(sfv_path):
|
||||
""" Verify files using SFV file,
|
||||
input: full path of sfv, file are assumed to be relative to sfv
|
||||
@@ -1943,7 +1873,7 @@ def pre_queue(name, pp, cat, script, priority, size, groups):
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = create_env()
|
||||
env = fix_env()
|
||||
logging.info('Running pre-queue script %s', command)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
|
||||
@@ -27,29 +27,44 @@ import time
|
||||
import logging
|
||||
import re
|
||||
import select
|
||||
import ssl
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import *
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.misc import nntp_to_msg
|
||||
|
||||
# Have to make errors available under Python <2.7.9
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
WantReadError = ssl.SSLWantReadError
|
||||
CertificateError = ssl.CertificateError
|
||||
import threading
|
||||
_RLock = threading.RLock
|
||||
del threading
|
||||
|
||||
# Import SSL if available
|
||||
if sabnzbd.HAVE_SSL:
|
||||
import ssl
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
WantReadError = ssl.SSLWantReadError
|
||||
CertificateError = ssl.CertificateError
|
||||
else:
|
||||
WantReadError = ssl.SSLError
|
||||
CertificateError = ssl.SSLError
|
||||
else:
|
||||
WantReadError = ssl.SSLError
|
||||
CertificateError = ssl.SSLError
|
||||
# Dummy class so this exception is ignored by clients without ssl installed
|
||||
class WantReadError(Exception):
|
||||
def __init__(self, value):
|
||||
self.parameter = value
|
||||
def __str__(self):
|
||||
return repr(self.parameter)
|
||||
class CertificateError(Exception):
|
||||
def __init__(self, value):
|
||||
self.parameter = value
|
||||
def __str__(self):
|
||||
return repr(self.parameter)
|
||||
|
||||
|
||||
# Set pre-defined socket timeout
|
||||
socket.setdefaulttimeout(DEF_TIMEOUT)
|
||||
|
||||
# getaddrinfo() can be very slow. In some situations this can lead
|
||||
# to delayed starts and timeouts on connections.
|
||||
# Because of this, the results will be cached in the server object.
|
||||
|
||||
|
||||
def _retrieve_info(server):
|
||||
""" Async attempt to run getaddrinfo() for specified server """
|
||||
info = GetServerParms(server.host, server.port)
|
||||
@@ -118,7 +133,7 @@ def con(sock, host, port, sslenabled, write_fds, nntp):
|
||||
try:
|
||||
sock.connect((host, port))
|
||||
sock.setblocking(0)
|
||||
if sslenabled:
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
# Log SSL/TLS info
|
||||
logging.info("%s@%s: Connected using %s (%s)",
|
||||
nntp.nw.thrdnum, nntp.nw.server.host, get_ssl_version(sock), sock.cipher()[0])
|
||||
@@ -149,6 +164,8 @@ def con(sock, host, port, sslenabled, write_fds, nntp):
|
||||
nntp.error(e)
|
||||
|
||||
|
||||
|
||||
|
||||
def probablyipv4(ip):
|
||||
if ip.count('.') == 3 and re.sub('[0123456789.]', '', ip) == '':
|
||||
return True
|
||||
@@ -187,7 +204,7 @@ class NNTP(object):
|
||||
if probablyipv6(host):
|
||||
af = socket.AF_INET6
|
||||
|
||||
if sslenabled:
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
# Use context or just wrapper
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
# Setup the SSL socket
|
||||
@@ -211,6 +228,10 @@ class NNTP(object):
|
||||
ciphers = sabnzbd.cfg.ssl_ciphers() if sabnzbd.cfg.ssl_ciphers() else None
|
||||
# Use a regular wrapper, no certificate validation
|
||||
self.sock = ssl.wrap_socket(socket.socket(af, socktype, proto), ciphers=ciphers)
|
||||
|
||||
elif sslenabled and not sabnzbd.HAVE_SSL:
|
||||
logging.error(T('Error importing OpenSSL module. Connecting with NON-SSL'))
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
else:
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
|
||||
@@ -223,7 +244,7 @@ class NNTP(object):
|
||||
# if blocking (server test) only wait for 15 seconds during connect until timeout
|
||||
self.sock.settimeout(15)
|
||||
self.sock.connect((self.host, self.port))
|
||||
if sslenabled:
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
# Log SSL/TLS info
|
||||
logging.info("%s@%s: Connected using %s (%s)",
|
||||
self.nw.thrdnum, self.nw.server.host, get_ssl_version(self.sock), self.sock.cipher()[0])
|
||||
@@ -247,6 +268,7 @@ class NNTP(object):
|
||||
finally:
|
||||
self.error(e)
|
||||
|
||||
|
||||
def error(self, error):
|
||||
if 'SSL23_GET_SERVER_HELLO' in str(error) or 'SSL3_GET_RECORD' in str(error):
|
||||
error = T('This server does not allow SSL on this port')
|
||||
@@ -279,7 +301,7 @@ class NewsWrapper(object):
|
||||
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.data = []
|
||||
self.data = ''
|
||||
self.lines = []
|
||||
|
||||
self.nntp = None
|
||||
@@ -296,14 +318,6 @@ class NewsWrapper(object):
|
||||
self.pass_ok = False
|
||||
self.force_login = False
|
||||
|
||||
@property
|
||||
def status_code(self):
|
||||
""" Shorthand to get the code """
|
||||
try:
|
||||
return self.data[0][:3]
|
||||
except:
|
||||
return ''
|
||||
|
||||
def init_connect(self, write_fds):
|
||||
self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl,
|
||||
self.server.send_group, self, self.server.username, self.server.password,
|
||||
@@ -323,7 +337,7 @@ class NewsWrapper(object):
|
||||
if code in ('501',) and self.user_sent:
|
||||
# Change to a sensible text
|
||||
code = '481'
|
||||
self.data[0] = T('Authentication failed, check username/password.')
|
||||
self.lines[0] = T('Authentication failed, check username/password.')
|
||||
self.user_ok = True
|
||||
self.pass_sent = True
|
||||
|
||||
@@ -336,11 +350,10 @@ class NewsWrapper(object):
|
||||
self.pass_ok = False
|
||||
|
||||
if code in ('400', '502'):
|
||||
raise NNTPPermanentError(nntp_to_msg(self.data))
|
||||
raise NNTPPermanentError(self.lines[0])
|
||||
elif not self.user_sent:
|
||||
command = 'authinfo user %s\r\n' % self.server.username
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.user_sent = True
|
||||
elif not self.user_ok:
|
||||
if code == '381':
|
||||
@@ -355,12 +368,11 @@ class NewsWrapper(object):
|
||||
if self.user_ok and not self.pass_sent:
|
||||
command = 'authinfo pass %s\r\n' % self.server.password
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.pass_sent = True
|
||||
elif self.user_ok and not self.pass_ok:
|
||||
if code != '281':
|
||||
# Assume that login failed (code 481 or other)
|
||||
raise NNTPPermanentError(nntp_to_msg(self.data))
|
||||
raise NNTPPermanentError(self.lines[0])
|
||||
else:
|
||||
self.connected = True
|
||||
|
||||
@@ -378,13 +390,11 @@ class NewsWrapper(object):
|
||||
else:
|
||||
command = 'ARTICLE <%s>\r\n' % (self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
|
||||
def send_group(self, group):
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
command = 'GROUP %s\r\n' % (group)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
|
||||
def recv_chunk(self, block=False):
|
||||
""" Receive data, return #bytes, done, skip """
|
||||
@@ -412,54 +422,32 @@ class NewsWrapper(object):
|
||||
else:
|
||||
return (0, False, True)
|
||||
|
||||
# Data is processed differently depending on C-yEnc version
|
||||
if sabnzbd.decoder.HAVE_SABYENC:
|
||||
# Append so we can do 1 join(), much faster than multiple!
|
||||
self.data.append(chunk)
|
||||
self.data += chunk
|
||||
new_lines = self.data.split('\r\n')
|
||||
# See if incorrect newline-only was used
|
||||
# Do this as a special case to prevent using extra memory
|
||||
# for normal articles
|
||||
if len(new_lines) == 1 and '\r' not in self.data:
|
||||
new_lines = self.data.split('\n')
|
||||
|
||||
# Official end-of-article is ".\r\n" but sometimes it can get lost between 2 chunks
|
||||
chunk_len = len(chunk)
|
||||
if chunk[-5:] == '\r\n.\r\n':
|
||||
return (chunk_len, True, False)
|
||||
elif chunk_len < 5 and len(self.data) > 1:
|
||||
# We need to make sure the end is not split over 2 chunks
|
||||
# This is faster than join()
|
||||
combine_chunk = self.data[-2][-5:] + chunk
|
||||
if combine_chunk[-5:] == '\r\n.\r\n':
|
||||
return (chunk_len, True, False)
|
||||
self.data = new_lines.pop()
|
||||
|
||||
# Still in middle of data, so continue!
|
||||
return (chunk_len, False, False)
|
||||
# Already remove the starting dots
|
||||
for i in xrange(len(new_lines)):
|
||||
if new_lines[i][:2] == '..':
|
||||
new_lines[i] = new_lines[i][1:]
|
||||
self.lines.extend(new_lines)
|
||||
|
||||
if self.lines and self.lines[-1] == '.':
|
||||
self.lines = self.lines[1:-1]
|
||||
return (len(chunk), True, False)
|
||||
else:
|
||||
# Perform manditory splitting
|
||||
new_lines = chunk.split('\n')
|
||||
|
||||
# Already remove the starting dots
|
||||
for i in xrange(len(new_lines)):
|
||||
if new_lines[i][:2] == '..':
|
||||
new_lines[i] = new_lines[i][1:]
|
||||
# Old Yenc can't handle newlines in it's data
|
||||
if not sabnzbd.decoder.HAVE_YENC and new_lines[i]:
|
||||
if new_lines[i][0] == '\n':
|
||||
new_lines[i] = new_lines[i][1:]
|
||||
if new_lines[i][-1] == '\r':
|
||||
new_lines[i] = new_lines[i][:-1]
|
||||
|
||||
self.lines.extend(new_lines)
|
||||
|
||||
# For status-code purposes
|
||||
if not self.data:
|
||||
self.data.append(chunk)
|
||||
|
||||
if self.lines and (self.lines[-1] == '.' or self.lines[-2] == '.\r' or self.lines[-2] == '.'):
|
||||
return (len(chunk), True, False)
|
||||
else:
|
||||
return (len(chunk), False, False)
|
||||
return (len(chunk), False, False)
|
||||
|
||||
def soft_reset(self):
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.data = []
|
||||
self.data = ''
|
||||
self.lines = []
|
||||
|
||||
def hard_reset(self, wait=True, quit=True):
|
||||
|
||||
@@ -120,7 +120,6 @@ def check_classes(gtype, section):
|
||||
logging.debug('Incorrect Notify option %s:%s_prio_%s', section, section, gtype)
|
||||
return False
|
||||
|
||||
|
||||
def get_prio(gtype, section):
|
||||
""" Check if `gtype` is enabled in `section` """
|
||||
try:
|
||||
@@ -178,7 +177,6 @@ def send_notification(title, msg, gtype):
|
||||
if have_ntfosd() and sabnzbd.cfg.ntfosd_enable() and check_classes(gtype, 'ntfosd'):
|
||||
send_notify_osd(title, msg)
|
||||
|
||||
|
||||
def reset_growl():
|
||||
""" Reset Growl (after changing language) """
|
||||
global _GROWL, _GROWL_REG
|
||||
@@ -519,7 +517,6 @@ def send_nscript(title, msg, gtype, force=False, test=None):
|
||||
return T('Notification script "%s" does not exist') % script_path
|
||||
return ''
|
||||
|
||||
|
||||
def send_windows(title, msg, gtype):
|
||||
if sabnzbd.WINTRAY and not sabnzbd.WINTRAY.terminate:
|
||||
try:
|
||||
|
||||
@@ -440,7 +440,7 @@ class NzbQueue:
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table.pop(nzo_id)
|
||||
nzo.deleted = True
|
||||
if cleanup and nzo.is_gone():
|
||||
if cleanup and nzo.status not in (Status.COMPLETED, Status.FAILED):
|
||||
nzo.status = Status.DELETED
|
||||
self.__nzo_list.remove(nzo)
|
||||
|
||||
@@ -900,7 +900,7 @@ class NzbQueue:
|
||||
bytes_left_previous_page += b_left
|
||||
|
||||
if (not search) or search in nzo.final_name_pw_clean.lower():
|
||||
if (not limit) or (start <= n < start + limit):
|
||||
if (not limit) or (start <= n < start+limit):
|
||||
pnfo_list.append(nzo.gather_info())
|
||||
n += 1
|
||||
|
||||
|
||||
@@ -28,12 +28,16 @@ import datetime
|
||||
import xml.sax
|
||||
import xml.sax.handler
|
||||
import xml.sax.xmlreader
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
try:
|
||||
import hashlib
|
||||
new_md5 = hashlib.md5
|
||||
except:
|
||||
import md5
|
||||
new_md5 = md5.new
|
||||
|
||||
# SABnzbd modules
|
||||
import sabnzbd
|
||||
@@ -42,9 +46,10 @@ from sabnzbd.constants import sample_match, GIGI, ATTRIB_FILE, JOB_ADMIN, \
|
||||
PAUSED_PRIORITY, TOP_PRIORITY, DUP_PRIORITY, REPAIR_PRIORITY, \
|
||||
RENAMES_FILE, Status, PNFO
|
||||
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
|
||||
get_unique_path, get_admin_path, remove_all, sanitize_filename, globber_full, \
|
||||
sanitize_foldername, int_conv, set_permissions, format_time_string, long_path, \
|
||||
trim_win_path, fix_unix_encoding, calc_age
|
||||
get_unique_path, get_admin_path, remove_all, format_source_url, \
|
||||
sanitize_filename, globber_full, sanitize_foldername, int_conv, \
|
||||
set_permissions, format_time_string, long_path, trim_win_path, \
|
||||
fix_unix_encoding, calc_age
|
||||
from sabnzbd.decorators import synchronized, IO_LOCK
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -77,6 +82,8 @@ class Article(TryList):
|
||||
TryList.__init__(self)
|
||||
|
||||
self.fetcher = None
|
||||
self.allow_fill_server = False
|
||||
|
||||
self.article = article
|
||||
self.art_id = None
|
||||
self.bytes = bytes
|
||||
@@ -168,6 +175,7 @@ class Article(TryList):
|
||||
TryList.__init__(self)
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
self.allow_fill_server = False
|
||||
self.tries = 0
|
||||
|
||||
def __repr__(self):
|
||||
@@ -362,7 +370,7 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
self.skipped_files = 0
|
||||
self.nzf_list = []
|
||||
self.groups = []
|
||||
self.md5 = hashlib.md5()
|
||||
self.md5 = new_md5()
|
||||
self.filter = remove_samples
|
||||
self.now = time.time()
|
||||
|
||||
@@ -909,6 +917,7 @@ class NzbObject(TryList):
|
||||
# Raise error, so it's not added
|
||||
raise TypeError
|
||||
|
||||
|
||||
def check_for_dupe(self, nzf):
|
||||
filename = nzf.filename
|
||||
|
||||
@@ -935,6 +944,7 @@ class NzbObject(TryList):
|
||||
self.servercount[serverid] = bytes
|
||||
self.bytes_downloaded += bytes
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def remove_nzf(self, nzf):
|
||||
if nzf in self.files:
|
||||
@@ -1032,18 +1042,13 @@ class NzbObject(TryList):
|
||||
# set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T('Aborted, cannot be completed') + ' - https://sabnzbd.org/not-complete'
|
||||
self.fail_msg = T('Aborted, cannot be completed') + ' - https://sabnzbd.org/not-complete'
|
||||
self.set_unpack_info('Download', self.fail_msg, unique=False)
|
||||
logging.debug('Abort job "%s", due to impossibility to complete it', self.final_name_pw_clean)
|
||||
# Update the last check time
|
||||
sabnzbd.LAST_HISTORY_UPDATE = time.time()
|
||||
return True, True, True
|
||||
|
||||
if not found:
|
||||
# Add extra parfiles when there was a damaged article and not pre-checking
|
||||
if cfg.prospective_par_download() and self.extrapars and not self.precheck:
|
||||
self.prospective_add(nzf)
|
||||
|
||||
if reset:
|
||||
self.reset_try_list()
|
||||
|
||||
@@ -1151,8 +1156,8 @@ class NzbObject(TryList):
|
||||
if dif > 0:
|
||||
prefix += T('WAIT %s sec') % dif + ' / ' # : Queue indicator for waiting URL fetch
|
||||
if (self.avg_stamp + float(cfg.propagation_delay() * 60)) > time.time() and self.priority != TOP_PRIORITY:
|
||||
wait_time = int((self.avg_stamp + float(cfg.propagation_delay() * 60) - time.time()) / 60 + 0.5)
|
||||
prefix += T('PROPAGATING %s min') % wait_time + ' / ' # : Queue indicator while waiting for propagation of post
|
||||
wait_time = int((self.avg_stamp + float(cfg.propagation_delay() * 60) - time.time())/60 + 0.5)
|
||||
prefix += T('PROPAGATING %s min') % wait_time + ' / ' # : Queue indicator while waiting for propagtion of post
|
||||
return '%s%s' % (prefix, self.final_name)
|
||||
|
||||
@property
|
||||
@@ -1220,6 +1225,7 @@ class NzbObject(TryList):
|
||||
|
||||
__re_quick_par2_check = re.compile(r'\.par2\W*', re.I)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def prospective_add(self, nzf):
|
||||
""" Add par2 files to compensate for missing articles
|
||||
@@ -1255,6 +1261,7 @@ class NzbObject(TryList):
|
||||
# Reset all try lists
|
||||
self.reset_all_try_lists()
|
||||
|
||||
|
||||
def check_quality(self, req_ratio=0):
|
||||
""" Determine amount of articles present on servers
|
||||
and return (gross available, nett) bytes
|
||||
@@ -1317,7 +1324,7 @@ class NzbObject(TryList):
|
||||
msg = u''.join((msg1, msg2, msg3, msg4, msg5, ))
|
||||
self.set_unpack_info('Download', msg, unique=True)
|
||||
if self.url:
|
||||
self.set_unpack_info('Source', self.url, unique=True)
|
||||
self.set_unpack_info('Source', format_source_url(self.url), unique=True)
|
||||
servers = config.get_servers()
|
||||
if len(self.servercount) > 0:
|
||||
msgs = ['%s=%sB' % (servers[server].displayname(), to_units(self.servercount[server])) for server in self.servercount if server in servers]
|
||||
@@ -1813,7 +1820,7 @@ def scan_password(name):
|
||||
# Is it maybe in 'name / password' notation?
|
||||
if slash == name.find(' / ') + 1:
|
||||
# Remove the extra space after name and before password
|
||||
return name[:slash - 1].strip('. '), name[slash + 2:]
|
||||
return name[:slash-1].strip('. '), name[slash + 2:]
|
||||
return name[:slash].strip('. '), name[slash + 1:]
|
||||
|
||||
# Look for "name password=password"
|
||||
|
||||
@@ -721,7 +721,6 @@ class SABnzbdDelegate(NSObject):
|
||||
|
||||
def restartAction_(self, sender):
|
||||
self.setMenuTitle_("\n\n%s\n" % (T('Stopping...')))
|
||||
logging.info('Restart requested by tray')
|
||||
sabnzbd.trigger_restart()
|
||||
self.setMenuTitle_("\n\n%s\n" % (T('Stopping...')))
|
||||
|
||||
|
||||
@@ -467,12 +467,20 @@ def process_job(nzo):
|
||||
# Run the user script
|
||||
script_path = make_script_path(script)
|
||||
if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path:
|
||||
# Set the current nzo status to "Ext Script...". Used in History
|
||||
# For windows, we use Short-Paths until 2.0.0 for compatibility
|
||||
if sabnzbd.WIN32:
|
||||
import win32api
|
||||
workdir_complete = clip_path(workdir_complete)
|
||||
if len(workdir_complete) > 259:
|
||||
workdir_complete = win32api.GetShortPathName(workdir_complete)
|
||||
|
||||
# set the current nzo status to "Ext Script...". Used in History
|
||||
nzo.status = Status.RUNNING
|
||||
nzo.set_action_line(T('Running script'), unicoder(script))
|
||||
nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True)
|
||||
script_log, script_ret = external_processing(script_path, nzo, clip_path(workdir_complete),
|
||||
dirname, job_result)
|
||||
script_log, script_ret = external_processing(script_path, workdir_complete, nzo.filename,
|
||||
dirname, cat, nzo.group, job_result,
|
||||
nzo.nzo_info.get('failure', ''))
|
||||
script_line = get_last_line(script_log)
|
||||
if script_log:
|
||||
script_output = nzo.nzo_id
|
||||
@@ -513,6 +521,7 @@ def process_job(nzo):
|
||||
# No '(more)' button needed
|
||||
nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, script_line), unique=True)
|
||||
|
||||
|
||||
# Cleanup again, including NZB files
|
||||
if all_ok:
|
||||
cleanup_list(workdir_complete, False)
|
||||
@@ -678,7 +687,7 @@ def parring(nzo, workdir):
|
||||
par2_filename = nzf_path
|
||||
|
||||
# Rename so handle_par2() picks it up
|
||||
newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol + 1)
|
||||
newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol+1)
|
||||
renamer(nzf_path, newpath)
|
||||
nzf_try.filename = os.path.split(newpath)[1]
|
||||
|
||||
@@ -791,7 +800,7 @@ def try_rar_check(nzo, workdir, setname):
|
||||
return True
|
||||
except rarfile.Error as e:
|
||||
nzo.fail_msg = T('RAR files failed to verify')
|
||||
msg = T('[%s] RAR-based verification failed: %s') % (unicoder(os.path.basename(rars[0])), unicoder(e.message.replace('\r\n', ' ')))
|
||||
msg = T('[%s] RAR-based verification failed: %s') % (unicoder(os.path.basename(rars[0])), unicoder(e.message.replace('\r\n',' ')))
|
||||
nzo.set_unpack_info('Repair', msg, set=setname)
|
||||
logging.info(msg)
|
||||
return False
|
||||
|
||||
@@ -133,6 +133,9 @@ class Rating(Thread):
|
||||
self.ratings = {}
|
||||
self.nzo_indexer_map = {}
|
||||
Thread.__init__(self)
|
||||
if not sabnzbd.HAVE_SSL:
|
||||
logging.warning(T('Ratings server requires secure connection'))
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
self.shutdown = True
|
||||
@@ -233,7 +236,7 @@ class Rating(Thread):
|
||||
|
||||
@synchronized(RATING_LOCK)
|
||||
def update_auto_flag(self, nzo_id, flag, flag_detail=None):
|
||||
if not flag or not cfg.rating_enable() or (nzo_id not in self.nzo_indexer_map):
|
||||
if not flag or not cfg.rating_enable() or not cfg.rating_feedback() or (nzo_id not in self.nzo_indexer_map):
|
||||
return
|
||||
logging.debug('Updating auto flag (%s: %s)', nzo_id, flag)
|
||||
indexer_id = self.nzo_indexer_map[nzo_id]
|
||||
|
||||
@@ -131,7 +131,6 @@ class SABTrayThread(SysTrayIconThread):
|
||||
# menu handler
|
||||
def restart(self, icon):
|
||||
self.hover_text = self.txt_restart
|
||||
logging.info('Restart requested by tray')
|
||||
sabnzbd.trigger_restart()
|
||||
|
||||
# menu handler
|
||||
@@ -158,7 +157,6 @@ class SABTrayThread(SysTrayIconThread):
|
||||
# menu handler - adapted from interface.py
|
||||
def shutdown(self, icon):
|
||||
self.hover_text = self.txt_shutdown
|
||||
logging.info('Shutdown requested by tray')
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
sabnzbd.SABSTOP = True
|
||||
|
||||
@@ -70,15 +70,10 @@ def init():
|
||||
for schedule in cfg.schedules():
|
||||
arguments = []
|
||||
argument_list = None
|
||||
|
||||
try:
|
||||
enabled, m, h, d, action_name = schedule.split()
|
||||
m, h, d, action_name = schedule.split()
|
||||
except:
|
||||
try:
|
||||
enabled, m, h, d, action_name, argument_list = schedule.split(None, 5)
|
||||
except:
|
||||
continue # Bad schedule, ignore
|
||||
|
||||
m, h, d, action_name, argument_list = schedule.split(None, 4)
|
||||
if argument_list:
|
||||
arguments = argument_list.split()
|
||||
|
||||
@@ -157,12 +152,10 @@ def init():
|
||||
logging.warning(T('Unknown action: %s'), action_name)
|
||||
continue
|
||||
|
||||
if enabled == '1':
|
||||
logging.debug("Scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
__SCHED.add_daytime_task(action, action_name, d, None, (h, m),
|
||||
kronos.method.sequential, arguments, None)
|
||||
else:
|
||||
logging.debug("Skipping %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
logging.debug("scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
|
||||
__SCHED.add_daytime_task(action, action_name, d, None, (h, m),
|
||||
kronos.method.sequential, arguments, None)
|
||||
|
||||
# Set Guardian interval to 30 seconds
|
||||
__SCHED.add_interval_task(sched_guardian, "Guardian", 15, 30,
|
||||
@@ -266,10 +259,10 @@ def sort_schedules(all_events, now=None):
|
||||
for schedule in cfg.schedules():
|
||||
parms = None
|
||||
try:
|
||||
enabled, m, h, dd, action, parms = schedule.split(None, 5)
|
||||
m, h, dd, action, parms = schedule.split(None, 4)
|
||||
except:
|
||||
try:
|
||||
enabled, m, h, dd, action = schedule.split(None, 4)
|
||||
m, h, dd, action = schedule.split(None, 3)
|
||||
except:
|
||||
continue # Bad schedule, ignore
|
||||
action = action.strip()
|
||||
@@ -284,7 +277,7 @@ def sort_schedules(all_events, now=None):
|
||||
# Expired event will occur again after a week
|
||||
dif = dif + week_min
|
||||
|
||||
events.append((dif, action, parms, schedule, enabled))
|
||||
events.append((dif, action, parms, schedule))
|
||||
if not all_events:
|
||||
break
|
||||
|
||||
@@ -309,11 +302,6 @@ def analyse(was_paused=False, priority=None):
|
||||
for ev in sort_schedules(all_events=True):
|
||||
if priority is None:
|
||||
logging.debug('Schedule check result = %s', ev)
|
||||
|
||||
# Skip if disabled
|
||||
if ev[4] == '0':
|
||||
continue
|
||||
|
||||
action = ev[1]
|
||||
try:
|
||||
value = ev[2]
|
||||
|
||||
@@ -312,7 +312,6 @@ SKIN_TEXT = {
|
||||
'opt-enable_unzip' : TT('Enable Unzip'),
|
||||
'opt-enable_7zip' : TT('Enable 7zip'),
|
||||
'explain-nosslcontext' : TT('Secure (SSL) connections from SABnzbd to newsservers and HTTPS websites will be encrypted, however, validating a server\'s identity using its certificates is not possible. Python 2.7.9 or above, OpenSSL 1.0.2 or above and up-to-date local CA certificates are required.'),
|
||||
'explain-getpar2mt': TT('Speed up repairs by installing multicore Par2, it is available for many platforms.'),
|
||||
'version' : TT('Version'),
|
||||
'uptime' : TT('Uptime'),
|
||||
'backup' : TT('Backup'), #: Indicates that server is Backup server in Status page
|
||||
@@ -328,6 +327,8 @@ SKIN_TEXT = {
|
||||
'explain-port' : TT('Port SABnzbd should listen on.'),
|
||||
'opt-web_dir' : TT('Web Interface'),
|
||||
'explain-web_dir' : TT('Choose a skin.'),
|
||||
'opt-web_dir2' : TT('Secondary Web Interface'),
|
||||
'explain-web_dir2' : TT('Activate an alternative skin.'),
|
||||
'opt-web_username' : TT('SABnzbd Username'),
|
||||
'explain-web_username' : TT('Optional authentication username.'),
|
||||
'opt-web_password' : TT('SABnzbd Password'),
|
||||
@@ -413,12 +414,16 @@ SKIN_TEXT = {
|
||||
'base-folder' : TT('Default Base Folder'),
|
||||
|
||||
# Config->Switches
|
||||
'opt-quick_check' : TT('Enable Quick Check'),
|
||||
'explain-quick_check' : TT('Skip par2 checking when files are 100% valid.'),
|
||||
'opt-enable_all_par' : TT('Download all par2 files'),
|
||||
'explain-enable_all_par' : TT('This prevents multiple repair runs by downloading all par2 files when needed.'),
|
||||
'explain-enable_all_par' : TT('This prevents multiple repair runs. QuickCheck on: download all par2 files when needed. QuickCheck off: always download all par2 files.'),
|
||||
'opt-enable_recursive' : TT('Enable recursive unpacking'),
|
||||
'explain-enable_recursive' : TT('Unpack archives (rar, zip, 7z) within archives.'),
|
||||
'opt-flat_unpack' : TT('Ignore any folders inside archives'),
|
||||
'explain-flat_unpack' : TT('All files will go into a single folder.'),
|
||||
'opt-overwrite_files' : TT('When unpacking, overwrite existing files'),
|
||||
'explain-overwrite_files' : TT('This will overwrite existing files instead of creating an alternative name.'),
|
||||
'opt-top_only' : TT('Only Get Articles for Top of Queue'),
|
||||
'explain-top_only' : TT('Enable for less memory usage. Disable to prevent slow jobs from blocking the queue.'),
|
||||
'opt-safe_postproc' : TT('Post-Process Only Verified Jobs'),
|
||||
@@ -444,6 +449,8 @@ SKIN_TEXT = {
|
||||
'explain-script_can_fail' : TT('When the user script returns a non-zero exit code, the job will be flagged as failed.'),
|
||||
'opt-new_nzb_on_failure' : TT('On failure, try alternative NZB'),
|
||||
'explain-new_nzb_on_failure' : TT('Some servers provide an alternative NZB when a download fails.'),
|
||||
'opt-enable_meta' : TT('Use tags from indexer'),
|
||||
'explain-enable_meta' : TT('Use tags from indexer for title, season, episode, etc. Otherwise all naming is derived from the NZB name.'),
|
||||
'opt-folder_rename' : TT('Enable folder rename'),
|
||||
'explain-folder_rename' : TT('Use temporary names during post processing. Disable when your system doesn\'t handle that properly.'),
|
||||
'opt-pre_script' : TT('Pre-queue user script'),
|
||||
@@ -509,9 +516,12 @@ SKIN_TEXT = {
|
||||
'opt-fail_hopeless_jobs' : TT('Abort jobs that cannot be completed'),
|
||||
'explain-fail_hopeless_jobs' : TT('When during download it becomes clear that too much data is missing, abort the job'),
|
||||
'opt-rating_enable' : TT('Enable Indexer Integration'),
|
||||
'explain-rating_enable' : TT('Indexers can supply rating information when a job is added and SABnzbd can report to the indexer if a job couldn\'t be completed. Depending on your indexer, the API key setting can be left blank.'),
|
||||
'explain-rating_enable' : TT('Enhanced functionality including ratings and extra status information is available when connected to OZnzb indexer.'),
|
||||
'opt-rating_api_key' : TT('API Key'),
|
||||
'opt-rating_host' : TT('Server address'),
|
||||
'explain-rating_api_key' : TT('This key provides identity to indexer. Check your profile on the indexer\'s website.'),
|
||||
'opt-rating_feedback' : TT('Automatic Feedback'),
|
||||
'explain-rating_feedback' : TT('Send automatically calculated validation results for downloads to indexer.'),
|
||||
'opt-rating_filter_enable' : TT('Enable Filtering'),
|
||||
'explain-rating_filter_enable' : TT('Action downloads according to filtering rules.'),
|
||||
'opt-rating_filter_abort_if' : TT('Abort If'),
|
||||
|
||||
@@ -24,7 +24,7 @@ import sys
|
||||
import select
|
||||
|
||||
from sabnzbd.newswrapper import NewsWrapper
|
||||
from sabnzbd.downloader import Server, clues_login, clues_too_many, nntp_to_msg
|
||||
from sabnzbd.downloader import Server, clues_login, clues_too_many
|
||||
from sabnzbd.config import get_servers
|
||||
from sabnzbd.misc import int_conv
|
||||
|
||||
@@ -83,13 +83,12 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
|
||||
nw.init_connect(None)
|
||||
while not nw.connected:
|
||||
nw.lines = []
|
||||
nw.data = []
|
||||
nw.recv_chunk(block=True)
|
||||
#more ssl related: handle 1/n-1 splitting to prevent Rizzo/Duong-Beast
|
||||
read_sockets, _, _ = select.select([nw.nntp.sock], [], [], 0.1)
|
||||
if read_sockets:
|
||||
nw.recv_chunk(block=True)
|
||||
nw.finish_connect(nw.status_code)
|
||||
nw.finish_connect(nw.lines[0][:3])
|
||||
|
||||
except socket.timeout, e:
|
||||
if port != 119 and not ssl:
|
||||
@@ -118,25 +117,31 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
|
||||
nw.nntp.sock.sendall('ARTICLE <test@home>\r\n')
|
||||
try:
|
||||
nw.lines = []
|
||||
nw.data = []
|
||||
nw.recv_chunk(block=True)
|
||||
except:
|
||||
return False, unicode(sys.exc_info()[1])
|
||||
|
||||
if nw.status_code == '480':
|
||||
# Could do with making a function for return codes to be used by downloader
|
||||
try:
|
||||
code = nw.lines[0][:3]
|
||||
except IndexError:
|
||||
code = ''
|
||||
nw.lines.append('')
|
||||
|
||||
if code == '480':
|
||||
return False, T('Server requires username and password.')
|
||||
|
||||
elif nw.status_code == '100' or nw.status_code.startswith('2') or nw.status_code.startswith('4'):
|
||||
elif code == '100' or code.startswith('2') or code.startswith('4'):
|
||||
return True, T('Connection Successful!')
|
||||
|
||||
elif nw.status_code == '502' or clues_login(nntp_to_msg(nw.data)):
|
||||
elif code == '502' or clues_login(nw.lines[0]):
|
||||
return False, T('Authentication failed, check username/password.')
|
||||
|
||||
elif clues_too_many(nw.lines[0]):
|
||||
return False, T('Too many connections, please pause downloading or try again later')
|
||||
|
||||
else:
|
||||
return False, T('Could not determine connection result (%s)') % nntp_to_msg(nw.data)
|
||||
return False, T('Could not determine connection result (%s)') % nw.lines[0]
|
||||
|
||||
# Close the connection
|
||||
nw.terminate(quit=True)
|
||||
|
||||
@@ -36,10 +36,11 @@ import sabnzbd.cfg as cfg
|
||||
|
||||
class Wizard(object):
|
||||
|
||||
def __init__(self, root):
|
||||
def __init__(self, web_dir, root, prim):
|
||||
self.__root = root
|
||||
# Get the path for the folder named wizard
|
||||
self.__web_dir = sabnzbd.WIZARD_DIR
|
||||
self.__prim = prim
|
||||
self.info = {'webdir': sabnzbd.WIZARD_DIR,
|
||||
'steps': 2,
|
||||
'version': sabnzbd.__version__,
|
||||
@@ -95,7 +96,7 @@ class Wizard(object):
|
||||
info['language'] = cfg.language()
|
||||
info['active_lang'] = info['language']
|
||||
info['T'] = Ttemplate
|
||||
info['have_ssl_context'] = sabnzbd.HAVE_SSL_CONTEXT
|
||||
info['have_ssl'] = bool(sabnzbd.HAVE_SSL)
|
||||
|
||||
servers = config.get_servers()
|
||||
if not servers:
|
||||
@@ -105,7 +106,6 @@ class Wizard(object):
|
||||
info['password'] = ''
|
||||
info['connections'] = ''
|
||||
info['ssl'] = 0
|
||||
info['ssl_verify'] = 2
|
||||
else:
|
||||
for server in servers:
|
||||
# If there are multiple servers, just use the first enabled one
|
||||
@@ -115,8 +115,8 @@ class Wizard(object):
|
||||
info['username'] = s.username()
|
||||
info['password'] = s.password.get_stars()
|
||||
info['connections'] = s.connections()
|
||||
|
||||
info['ssl'] = s.ssl()
|
||||
info['ssl_verify'] = s.ssl_verify()
|
||||
if s.enable():
|
||||
break
|
||||
template = Template(file=os.path.join(self.__web_dir, 'one.html'),
|
||||
|
||||
@@ -130,16 +130,15 @@ if not os.path.exists(PO_DIR):
|
||||
path, exe = os.path.split(sys.executable)
|
||||
if os.name == 'nt':
|
||||
TOOL = os.path.join(path, r'Tools\i18n\pygettext.py')
|
||||
TOOL = 'python ' + TOOL
|
||||
else:
|
||||
TOOL = os.path.join(path, 'pygettext.py')
|
||||
if not os.path.exists(TOOL):
|
||||
TOOL = 'pygettext'
|
||||
if not os.path.exists(TOOL):
|
||||
TOOL = 'pygettext'
|
||||
|
||||
|
||||
cmd = '%s %s %s' % (TOOL, PARMS, FILES)
|
||||
print 'Create POT file'
|
||||
#print cmd
|
||||
# print cmd
|
||||
os.system(cmd)
|
||||
|
||||
print 'Post-process the POT file'
|
||||
|
||||
Reference in New Issue
Block a user