mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-06 14:39:41 -05:00
Compare commits
163 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
840b03c875 | ||
|
|
325f876010 | ||
|
|
91606a24b8 | ||
|
|
f001d8b749 | ||
|
|
31c0c239f9 | ||
|
|
918c4dbfce | ||
|
|
f587319ef0 | ||
|
|
b4dd942899 | ||
|
|
97a6720fba | ||
|
|
f05c1ef9e8 | ||
|
|
91c1ea97fd | ||
|
|
74faa159e7 | ||
|
|
7e9892bb8d | ||
|
|
0a9e54e5c5 | ||
|
|
0f0d16a104 | ||
|
|
7d7ee6ca6a | ||
|
|
0d96cd3fe8 | ||
|
|
596244543c | ||
|
|
237d6b9414 | ||
|
|
a7c42779f8 | ||
|
|
c49e5f2054 | ||
|
|
6ca6037aa0 | ||
|
|
cc7f360c04 | ||
|
|
75991bffea | ||
|
|
afd1b1968c | ||
|
|
746e9d2a6d | ||
|
|
36b5b5d0f3 | ||
|
|
dd603cfcc8 | ||
|
|
cefce9913a | ||
|
|
0b29f27fcd | ||
|
|
f3f3e27bfe | ||
|
|
519c44a72a | ||
|
|
7e28da0530 | ||
|
|
af70d98b50 | ||
|
|
3f0b84ea22 | ||
|
|
e58abd45ec | ||
|
|
abeee263f0 | ||
|
|
30f68bd7b9 | ||
|
|
f13394d27f | ||
|
|
88e0617429 | ||
|
|
ab94ffc055 | ||
|
|
265ab99cc7 | ||
|
|
738adbe38e | ||
|
|
4e6862cef9 | ||
|
|
7aff60b24d | ||
|
|
8819e38073 | ||
|
|
2bbac91436 | ||
|
|
cf4dea432b | ||
|
|
bd70df1f05 | ||
|
|
b2638c1fac | ||
|
|
4aa9409f5d | ||
|
|
dfa863a54a | ||
|
|
626c04df48 | ||
|
|
6026fa57f0 | ||
|
|
0db28fb5e2 | ||
|
|
c9bf8ced99 | ||
|
|
18a55db245 | ||
|
|
efb9664761 | ||
|
|
3ee412c7a5 | ||
|
|
5afc00a502 | ||
|
|
e5ca0e6415 | ||
|
|
98f121258c | ||
|
|
b2474c51fd | ||
|
|
1bd6ebdb41 | ||
|
|
20ef99326d | ||
|
|
171a1b9ae3 | ||
|
|
cd2c9d151a | ||
|
|
8fbfe9a76a | ||
|
|
63cf0d4f97 | ||
|
|
faa98126f8 | ||
|
|
0dd70249b9 | ||
|
|
aadd99cac3 | ||
|
|
5bede842ba | ||
|
|
1f2ac77b5e | ||
|
|
3f4f35c6d1 | ||
|
|
9575ddbdb4 | ||
|
|
3ed918d98d | ||
|
|
89d5af3372 | ||
|
|
fa8f40eee3 | ||
|
|
8f06035500 | ||
|
|
8f7d969099 | ||
|
|
5422785feb | ||
|
|
911f82c00b | ||
|
|
fbff8c991b | ||
|
|
f1b139d55d | ||
|
|
9c7f196e20 | ||
|
|
4d8a37006e | ||
|
|
bc9d3d561f | ||
|
|
fd9e80bdf5 | ||
|
|
aee2f71170 | ||
|
|
102160d651 | ||
|
|
e33f26d33c | ||
|
|
57113fa02f | ||
|
|
2be7575f98 | ||
|
|
c5647b46e1 | ||
|
|
3450cff92f | ||
|
|
13a11411a9 | ||
|
|
6e7eb9dec4 | ||
|
|
c74eed7c0e | ||
|
|
2fc6811495 | ||
|
|
57e0dac45b | ||
|
|
537e31000e | ||
|
|
e0872f4536 | ||
|
|
9dddf6dd2e | ||
|
|
7a0c5feed3 | ||
|
|
42d154f0b7 | ||
|
|
44b0ab2203 | ||
|
|
4af59b50ad | ||
|
|
b309099f0b | ||
|
|
affea99cb1 | ||
|
|
9bc35a3026 | ||
|
|
57e9d499fb | ||
|
|
d53cf598a4 | ||
|
|
bdaca2bd37 | ||
|
|
53e3af9b30 | ||
|
|
9cfef895f8 | ||
|
|
56fa9644a5 | ||
|
|
8eff51a96b | ||
|
|
d130a1d44a | ||
|
|
98316fd282 | ||
|
|
59f1ea3073 | ||
|
|
270757f3bd | ||
|
|
843c6b36a8 | ||
|
|
f71a2a8fc2 | ||
|
|
63fc763958 | ||
|
|
9bc0aac63d | ||
|
|
ff529da874 | ||
|
|
ff40944f00 | ||
|
|
1d0ac46c7e | ||
|
|
d62bb1e5b6 | ||
|
|
89f91e46b7 | ||
|
|
bc2daa5f8b | ||
|
|
0fbf240a58 | ||
|
|
83d57b33f7 | ||
|
|
59ae23e315 | ||
|
|
df26adfe89 | ||
|
|
d1a92aeb36 | ||
|
|
c09f1b2f1c | ||
|
|
a70a1e6290 | ||
|
|
dde5258b59 | ||
|
|
dc438e6eb7 | ||
|
|
37a9a97f4f | ||
|
|
4fb6e3fe7b | ||
|
|
7884848c78 | ||
|
|
2ece328e50 | ||
|
|
b8ac3cd22f | ||
|
|
9b4bd7a3f0 | ||
|
|
facefc5c58 | ||
|
|
5d6a6b1af7 | ||
|
|
916e0ead99 | ||
|
|
aff7b07f33 | ||
|
|
8267b429ca | ||
|
|
5759bee1df | ||
|
|
f2648ec85c | ||
|
|
e083722f0b | ||
|
|
f03e63fa54 | ||
|
|
f33c3e30eb | ||
|
|
c9ee0b0fcb | ||
|
|
7aaa8036bc | ||
|
|
00f2410d2d | ||
|
|
be77a494db | ||
|
|
787a95bdd2 | ||
|
|
720ce591b7 |
@@ -1,5 +1,5 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 1.2.x ***
|
||||
*** This is SABnzbd 2.0.0 ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
@@ -12,9 +12,6 @@ and offers a complete API for third-party applications to hook into.
|
||||
There is an extensive Wiki on the use of SABnzbd.
|
||||
https://sabnzbd.org/wiki/
|
||||
|
||||
IMPORTANT INFORMATION about release 1.x.x:
|
||||
https://sabnzbd.org/wiki/new-features-and-changes
|
||||
|
||||
Please also read the file "ISSUES.txt"
|
||||
|
||||
The organization of the download queue is different from 0.7.x (and older).
|
||||
|
||||
23
INSTALL.txt
23
INSTALL.txt
@@ -1,4 +1,4 @@
|
||||
SABnzbd 1.2.3
|
||||
SABnzbd 2.0.0
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
@@ -21,7 +21,7 @@ along with this program; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
1) INSTALL with the Win32 installer
|
||||
1) INSTALL with the Windows installer
|
||||
-------------------------------------------------------------------------------
|
||||
Just run the downloaded EXE file and the installer will start.
|
||||
It's just a simple standard installer.
|
||||
@@ -32,7 +32,7 @@ Use the "Help" button in the web-interface to be directed to the Help Wiki.
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
2) INSTALL pre-built Win32 binaries
|
||||
2) INSTALL pre-built Windows binaries
|
||||
-------------------------------------------------------------------------------
|
||||
Unzip pre-built version to any folder of your liking.
|
||||
Start the SABnzbd.exe program.
|
||||
@@ -41,11 +41,11 @@ Use the "Help" button in the web-interface to be directed to the Help Wiki.
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
3) INSTALL pre-built OSX binaries
|
||||
3) INSTALL pre-built macOS binaries
|
||||
-------------------------------------------------------------------------------
|
||||
Download the DMG file, mount and drag the SABnzbd icon to Programs.
|
||||
Just like you do with so many apps.
|
||||
Make sure you pick the right folder, depending on your OSX version.
|
||||
Make sure you pick the right folder, depending on your macOS version.
|
||||
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
@@ -55,11 +55,11 @@ Make sure you pick the right folder, depending on your OSX version.
|
||||
You need to have Python installed plus some non-standard Python modules
|
||||
and a few tools.
|
||||
|
||||
Unix/Linux/OSX
|
||||
Unix/Linux/macOS
|
||||
Python-2.7.latest http://www.python.org (2.7.9+ recommended)
|
||||
|
||||
OSX Mavericks or newer
|
||||
Apple Python 2.7 Included in OSX (default)
|
||||
macOS Mavericks or newer
|
||||
Apple Python 2.7 Included in macOS (default)
|
||||
|
||||
Windows
|
||||
Python-2.7.latest http://www.python.org (2.7.9+ recommended)
|
||||
@@ -68,20 +68,19 @@ Windows
|
||||
Essential modules
|
||||
cheetah-2.0.1+ use "pip install cheetah"
|
||||
par2cmdline >= 0.4 http://parchive.sourceforge.net/
|
||||
Note: https://sabnzbd.org/wiki/configuration/1.2/switches#par2cmdline
|
||||
Note: https://sabnzbd.org/wiki/configuration/2.0/switches#par2cmdline
|
||||
unrar >= 5.00+ http://www.rarlab.com/rar_add.htm
|
||||
|
||||
Optional modules
|
||||
unzip >= 6.00 http://www.info-zip.org/
|
||||
7zip >= 9.20 http://www.7zip.org/
|
||||
yenc module >= 0.4 use "pip install yenc"
|
||||
https://sabnzbd.org/wiki/installation/yenc-0.4_py2.7.rar (Win32-only)
|
||||
sabyenc >= 2.7.0 use "pip install sabyenc" - https://sabnzbd.org/sabyenc
|
||||
openssl => 1.0.0 http://www.openssl.org/
|
||||
v0.9.8 will work, but limits certificate validation
|
||||
cryptography >= 1.0 use "pip install cryptography"
|
||||
Enables certificate generation and detection of encrypted RAR-files
|
||||
|
||||
Optional modules Unix/Linux/OSX
|
||||
Optional modules Unix/Linux/macOS
|
||||
pynotify Should be part of GTK for Python support on Debian/Ubuntu
|
||||
If not, you cannot use the NotifyOSD feature.
|
||||
python-dbus Enable option to Shutdown/Restart/Standby PC on queue finish.
|
||||
|
||||
@@ -24,13 +24,13 @@
|
||||
For these the server blocking method is not very favourable.
|
||||
There is an INI-only option that will limit blocks to 1 minute.
|
||||
no_penalties = 1
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
|
||||
- Some third-party utilties try to probe SABnzbd API in such a way that you will
|
||||
often see warnings about unauthenticated access.
|
||||
If you are sure these probes are harmless, you can suppress the warnings by
|
||||
setting the option "api_warnings" to 0.
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
|
||||
- On OSX you may encounter downloaded files with foreign characters.
|
||||
The par2 repair may fail when the files were created on a Windows system.
|
||||
@@ -41,7 +41,7 @@
|
||||
You will see this only when downloaded files contain accented characters.
|
||||
You need to fix it yourself by running the convmv utility (available for most Linux platforms).
|
||||
Possible the file system override setting 'fsys_type' might be solve things:
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
|
||||
- The "Watched Folder" sometimes fails to delete the NZB files it has
|
||||
processed. This happens when other software still accesses these files.
|
||||
@@ -81,4 +81,4 @@
|
||||
- Squeeze Linux
|
||||
There is a "special" option that will allow you to select an alternative library.
|
||||
use_pickle = 1
|
||||
See: https://sabnzbd.org/wiki/configuration/1.2/special
|
||||
See: https://sabnzbd.org/wiki/configuration/2.0/special
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 1.2.3
|
||||
Summary: SABnzbd-1.2.3
|
||||
Version: 2.0.0RC3
|
||||
Summary: SABnzbd-2.0.0RC3
|
||||
Home-page: http://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
26
README.md
26
README.md
@@ -1,37 +1,25 @@
|
||||
SABnzbd - The automated Usenet download tool
|
||||
============================================
|
||||
|
||||
This Unicode release is not compatible with 0.7.x queues!
|
||||
|
||||
There is also an issue with upgrading of the "sabnzbd.ini" file.
|
||||
Make sure that you have a backup!
|
||||
|
||||
Saved queues may not be compatible after updates.
|
||||
|
||||
----
|
||||
|
||||
SABnzbd is an Open Source Binary Newsreader written in Python.
|
||||
|
||||
It's totally free, incredibly easy to use, and works practically everywhere.
|
||||
|
||||
SABnzbd makes Usenet as simple and streamlined as possible by automating everything we can. All you have to do is add an .nzb. SABnzbd takes over from there, where it will be automatically downloaded, verified, repaired, extracted and filed away with zero human interaction.
|
||||
|
||||
SABnzbd makes Usenet as simple and streamlined as possible by automating everything we can. All you have to do is add an `.nzb`. SABnzbd takes over from there, where it will be automatically downloaded, verified, repaired, extracted and filed away with zero human interaction.
|
||||
If you want to know more you can head over to our website: http://sabnzbd.org.
|
||||
|
||||
## Resolving Dependencies
|
||||
|
||||
SABnzbd has a good deal of dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages floating around (Ubuntu, Debian, Fedora, etc), then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
SABnzbd has a good deal of dependencies you'll need before you can get running. If you've previously run SABnzbd from one of the various Linux packages, then you likely already have all the needed dependencies. If not, here's what you're looking for:
|
||||
|
||||
- `python` (only 2.7.x and higher, but not 3.x.x)
|
||||
- `python-cheetah`
|
||||
- `python-support`
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://forums.sabnzbd.org/viewtopic.php?f=16&t=18793#p99702))
|
||||
- `par2` (Multi-threaded par2 installation guide can be found [here](https://sabnzbd.org/wiki/installation/multicore-par2))
|
||||
- `unrar` (Make sure you get the "official" non-free version of unrar)
|
||||
- `sabyenc` (installation guide can be found [here](https://sabnzbd.org/sabyenc))
|
||||
|
||||
Optional:
|
||||
|
||||
- `python-cryptography` (enables certificate generation and detection of encrypted RAR-files during download)
|
||||
- `python-yenc`
|
||||
- `python-dbus` (enable option to Shutdown/Restart/Standby PC on queue finish)
|
||||
- `7zip`
|
||||
- `unzip`
|
||||
@@ -43,13 +31,13 @@ Your package manager should supply these. If not, we've got links in our more in
|
||||
Once you've sorted out all the dependencies, simply run:
|
||||
|
||||
```
|
||||
python SABnzbd.py
|
||||
python -OO SABnzbd.py
|
||||
```
|
||||
|
||||
Or, if you want to run in the background:
|
||||
|
||||
```
|
||||
python SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
python -OO SABnzbd.py -d -f /path/to/sabnzbd.ini
|
||||
```
|
||||
|
||||
If you want multi-language support, run:
|
||||
@@ -68,7 +56,7 @@ Basically:
|
||||
- `develop` is the target for integration and is **not** intended for end-users.
|
||||
- `1.1.x` is a release and maintenance branch for 1.1.x (1.1.0 -> 1.1.1 -> 1.1.2) and is **not** intended for end-users.
|
||||
- `feature/my_feature` is a temporary feature branch based on `develop`.
|
||||
- `hotfix/my_hotfix` is an optional temporary branch for bugfix(es) based on `develop`.
|
||||
- `bugfix/my_bugfix` is an optional temporary branch for bugfix(es) based on `develop`.
|
||||
|
||||
Conditions:
|
||||
- Merging of a stable release into `master` will be simple: the release branch is always right.
|
||||
|
||||
147
README.mkd
147
README.mkd
@@ -1,73 +1,92 @@
|
||||
Release Notes - SABnzbd 1.2.3
|
||||
==============================================
|
||||
Release Notes - SABnzbd 2.0.0RC3
|
||||
=========================================================
|
||||
|
||||
## Bug fixes in 1.2.3
|
||||
- Fix crashing Downloader when Quota were set
|
||||
- Fix broken webserver HTTPS for extra bound ports (e.g. IPv6)
|
||||
- Windows: Fix hanging Unrar
|
||||
- Windows: Unzip was not working
|
||||
- Windows: Incomplete folders would sometimes end in a dot
|
||||
- Starting with "-w" parameter fail
|
||||
## Bugfixes since RC2
|
||||
- SABYenc incorrectly gave CRC errors on some articles, fixed in SABYenc v3.0.2
|
||||
- par2cmdline would fail to repair jobs with split posts (.001, etc)
|
||||
- The insecure Certificate Verification level 'Default' is now called 'Minimal'
|
||||
- Show warning if job is paused because it appears cloaked
|
||||
- Fixed QuickCheck renaming issues
|
||||
- Unexpected characters in CRC part of an article could crash the Decoder
|
||||
|
||||
|
||||
## New in 2.0.0: SABYenc
|
||||
- To improve SABnzbd's performance on systems where CPU power is limiting
|
||||
download speed, we developed a new C-module called SABYenc to accelerate the
|
||||
decoding of usenet articles that can use multiple threads and is more efficient.
|
||||
Not only low-powered systems like NAS's or Raspberry Pi's benefit, with this
|
||||
new module speeds can increase up to 2x compared to 1.x.x releases on any
|
||||
system where the connection/newsserver capacity was not fully used.
|
||||
The Windows and macOS releases automatically include this module, for other
|
||||
platforms an installation guide can be found here: https://sabnzbd.org/sabyenc
|
||||
|
||||
## Changes/improvements in 2.0.0:
|
||||
- Windows and macOS releases now also come in 64bit versions.
|
||||
The installers will install the appropriate version automatically.
|
||||
Therefore, on 64bit Windows the installation directory will change to
|
||||
'Program Files' instead of 'Program Files (x86)'.
|
||||
On Windows our tests showed an additional 5-10% gain in download speed
|
||||
when using 64bit SABnzbd on 64bit Windows.
|
||||
- Linux: Detect if Multicore Par2 is installed.
|
||||
Multicore Par2 is now easily available through the PPA and other channels:
|
||||
https://sabnzbd.org/wiki/installation/multicore-par2
|
||||
- Post-processing scripts now get additional job information via SAB environment
|
||||
variables. https://sabnzbd.org/wiki/scripts/post-processing-scripts
|
||||
- Certificate Validation set to Strict for newly added newsservers
|
||||
In case of problems, see: https://sabnzbd.org/certificate-errors
|
||||
- Removed Secondary Web Interface option
|
||||
|
||||
## Smaller changes/improvements in 2.0.0
|
||||
- Schedule items can now be enabled and disabled
|
||||
- HTTP-redirects in interface are now relative URL's
|
||||
- Moved some lesser used settings to Config->Specials
|
||||
- Cache usage is now updated continuously in the Status Window
|
||||
- On macOS SABnzbd was set to have low IO-priority, this is now set to normal
|
||||
- Previously set password is now shown on Retry
|
||||
- Remove listquote module dependency
|
||||
- Warn if Complete folder is on FAT filesystem (4GB size limit)
|
||||
|
||||
## Bug fixes in 2.0.0
|
||||
- Malformed articles could break the Downloader
|
||||
- Retry ADMIN-data saving 3x before giving error
|
||||
- Checking for encryption during downloading could fail
|
||||
- QuickCheck could crash when renaming already renamed files
|
||||
- `skip_dashboard` set to 1 by default in `fullstatus` API-call
|
||||
- Top-only switch now really only downloads top job
|
||||
- Unblock Server button did not work
|
||||
- Linux: Warn in case encoding is not set to UTF-8
|
||||
- Windows: Incomplete folders would sometimes end in a dot
|
||||
|
||||
## Bug fix in 1.2.2
|
||||
- Windows: job-directory incorrectly passed to PostProcessing-script
|
||||
## Upgrade notices
|
||||
- Windows: When starting the Post-Processing script, the path to the job folder
|
||||
is no longer in short-path notation but includes the full path. To support
|
||||
long paths (>255), you might need to alter them to long-path notation (\\?\).
|
||||
- Schedule items are converted when upgrading to 2.x.x and will break when
|
||||
reverted back to pre-2.x.x releases.
|
||||
- The organization of the download queue is different from 0.7.x releases.
|
||||
So 2.x.x will not see the existing queue, but you can go to Status->QueueRepair
|
||||
and "Repair" the old queue.
|
||||
|
||||
## What's new in 1.2.1
|
||||
- QuickCheck will perform fast rename of obfuscated posts
|
||||
- RSS Downloaded page now shows icon to indicate source
|
||||
- HTML tags are filtered from single-line script output
|
||||
- New self-signed certificates now list local IP in SAN-list
|
||||
- Handle jobs on Windows with forbidden names (Con.*, Aux.*,..)
|
||||
|
||||
## Bug fixes in 1.2.1
|
||||
- Fix crashing Assembler
|
||||
- 'Only Download Top of Queue' was broken for a long time
|
||||
- Cloaked files (RAR within RAR) were not detected anymore
|
||||
- Incorrectly labeled some downloads as Encrypted
|
||||
- Passwords were not parsed correctly from filenames
|
||||
- RSS reading could fail on missing attributes
|
||||
- Multi-feed RSS will not stop if only 1 feed is not functioning
|
||||
- Duplicate detection set to Fail would not work for RSS feeds
|
||||
- Incorrectly marking jobs with folders inside as failed
|
||||
- Categories were not matched properly if a list of tags was set
|
||||
- PostProcessing-script was not called on Accept&Fail or Dupe detect
|
||||
- Support for newer par2cmdline(-mt) versions that need -B parameter
|
||||
- Some newsservers would timeout when connecting
|
||||
- More robust detection of execute permissions for scripts
|
||||
- CPU type reporting on Windows and macOS
|
||||
- Failed to start with some localhost configs
|
||||
- Removed some more stalling issues
|
||||
- Retry rename 3x before falling back to copy during "Moving"
|
||||
- Catch several SSL errors of the webserver
|
||||
- Disk-space information is now only checked every 10 seconds
|
||||
|
||||
## Translations
|
||||
- Many translations updated, thanks to our translators!
|
||||
|
||||
## About
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
thanks to its web-based user interface and advanced
|
||||
built-in post-processing options that automatically verify, repair,
|
||||
extract and clean up posts downloaded from Usenet.
|
||||
|
||||
(c) Copyright 2007-2017 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
|
||||
### IMPORTANT INFORMATION about release 1.x.x
|
||||
<https://sabnzbd.org/wiki/new-features-and-changes>
|
||||
|
||||
### Known problems and solutions
|
||||
- Read the file "ISSUES.txt"
|
||||
|
||||
### Upgrading from 0.7.x and older
|
||||
## Upgrading from 0.7.x and older
|
||||
- Finish queue
|
||||
- Stop SABnzbd
|
||||
- Install new version
|
||||
- Start SABnzbd
|
||||
|
||||
The organization of the download queue is different from older versions.
|
||||
1.x.x will not see the existing queue, but you can go to
|
||||
Status->QueueRepair and "Repair" the old queue.
|
||||
Also, your sabnzbd.ini file will be upgraded, making it
|
||||
incompatible with releases older than 0.7.9
|
||||
## IMPORTANT INFORMATION about release 2.x.x
|
||||
<https://sabnzbd.org/wiki/new-features-and-changes>
|
||||
|
||||
## Known problems and solutions
|
||||
- Read the file "ISSUES.txt"
|
||||
|
||||
## Translations
|
||||
- Numerous translations updated, thanks to our translators!
|
||||
|
||||
## About
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically, thanks
|
||||
to its web-based user interface and advanced built-in post-processing options
|
||||
that automatically verify, repair, extract and clean up posts downloaded
|
||||
from Usenet.
|
||||
|
||||
(c) Copyright 2007-2017 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
|
||||
175
SABnzbd.py
175
SABnzbd.py
@@ -130,24 +130,6 @@ def guard_loglevel():
|
||||
LOG_FLAG = True
|
||||
|
||||
|
||||
class FilterCP3:
|
||||
# Filter out all CherryPy3-Access logging that we receive,
|
||||
# because we have the root logger
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def filter(self, record):
|
||||
_cplogging = record.module == '_cplogging'
|
||||
# Python2.4 fix
|
||||
# record has no attribute called funcName under python 2.4
|
||||
if hasattr(record, 'funcName'):
|
||||
access = record.funcName == 'access'
|
||||
else:
|
||||
access = True
|
||||
return not (_cplogging and access)
|
||||
|
||||
|
||||
class guiHandler(logging.Handler):
|
||||
""" Logging handler collects the last warnings/errors/exceptions
|
||||
to be displayed in the web-gui
|
||||
@@ -205,7 +187,7 @@ def print_help():
|
||||
print " -2 --template2 <templ> Secondary template dir [*]"
|
||||
print
|
||||
print " -l --logging <0..2> Set logging level (-1=off, 0= least, 2= most) [*]"
|
||||
print " -w --weblogging <0..2> Set cherrypy logging (0= off, 1= on, 2= file-only) [*]"
|
||||
print " -w --weblogging Enable cherrypy access logging"
|
||||
print
|
||||
print " -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]"
|
||||
if sabnzbd.WIN32:
|
||||
@@ -304,9 +286,6 @@ def Web_Template(key, defweb, wdir):
|
||||
logging.info("Web dir is %s", full_dir)
|
||||
|
||||
if not os.path.exists(full_main):
|
||||
# Temporarily fix that allows missing Config
|
||||
if defweb == DEF_STDCONFIG:
|
||||
return ''
|
||||
# end temp fix
|
||||
logging.warning(T('Cannot find web template: %s, trying standard template'), full_main)
|
||||
full_dir = real_path(sabnzbd.DIR_INTERFACES, DEF_STDINTF)
|
||||
@@ -316,8 +295,6 @@ def Web_Template(key, defweb, wdir):
|
||||
panic_tmpl(full_dir)
|
||||
exit_sab(1)
|
||||
|
||||
# sabnzbd.lang.install_language(real_path(full_dir, DEF_INT_LANGUAGE), sabnzbd.cfg.language(), wdir)
|
||||
|
||||
return real_path(full_dir, "templates")
|
||||
|
||||
|
||||
@@ -428,10 +405,23 @@ def GetProfileInfo(vista_plus):
|
||||
|
||||
def print_modules():
|
||||
""" Log all detected optional or external modules """
|
||||
if sabnzbd.decoder.HAVE_YENC:
|
||||
logging.info("_yenc module... found!")
|
||||
if sabnzbd.decoder.SABYENC_ENABLED:
|
||||
# Yes, we have SABYenc, and it's the correct version, so it's enabled
|
||||
logging.info("SABYenc module (v%s)... found!", sabnzbd.constants.SABYENC_VERSION_REQUIRED)
|
||||
else:
|
||||
logging.warning(T('_yenc module... NOT found!'))
|
||||
# Something wrong with SABYenc, so let's determine and print what:
|
||||
if sabnzbd.decoder.SABYENC_VERSION:
|
||||
# We have a VERSION, thus a SABYenc module, but it's not the correct version
|
||||
logging.warning(T("SABYenc disabled: no correct version found! (Found v%s, expecting v%s)") % (sabnzbd.decoder.SABYENC_VERSION, sabnzbd.constants.SABYENC_VERSION_REQUIRED))
|
||||
else:
|
||||
# No SABYenc module at all
|
||||
logging.warning(T("SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc") % sabnzbd.constants.SABYENC_VERSION_REQUIRED)
|
||||
|
||||
# No correct SABYenc version or no SABYenc at all, so now we care about old-yEnc
|
||||
if sabnzbd.decoder.HAVE_YENC:
|
||||
logging.info("_yenc module... found!")
|
||||
else:
|
||||
logging.error(T('_yenc module... NOT found!'))
|
||||
|
||||
if sabnzbd.HAVE_CRYPTOGRAPHY:
|
||||
logging.info('Cryptography module (v%s)... found!', sabnzbd.HAVE_CRYPTOGRAPHY)
|
||||
@@ -449,8 +439,15 @@ def print_modules():
|
||||
if sabnzbd.newsunpack.RAR_COMMAND:
|
||||
logging.info("UNRAR binary... found (%s)", sabnzbd.newsunpack.RAR_COMMAND)
|
||||
|
||||
# Report problematic unrar
|
||||
if sabnzbd.newsunpack.RAR_PROBLEM and not sabnzbd.cfg.ignore_wrong_unrar():
|
||||
have_str = '%.2f' % (float(sabnzbd.newsunpack.RAR_VERSION) / 100)
|
||||
want_str = '%.2f' % (float(sabnzbd.constants.REC_RAR_VERSION) / 100)
|
||||
logging.warning(T('Your UNRAR version is %s, we recommend version %s or higher.<br />') % (have_str, want_str))
|
||||
elif not (sabnzbd.WIN32 or sabnzbd.DARWIN):
|
||||
logging.debug('UNRAR binary version %.2f', (float(sabnzbd.newsunpack.RAR_VERSION) / 100))
|
||||
else:
|
||||
logging.warning(T('unrar binary... NOT found'))
|
||||
logging.error(T('unrar binary... NOT found'))
|
||||
|
||||
if sabnzbd.newsunpack.ZIP_COMMAND:
|
||||
logging.info("unzip binary... found (%s)", sabnzbd.newsunpack.ZIP_COMMAND)
|
||||
@@ -780,7 +777,7 @@ def commandline_handler(frozen=True):
|
||||
try:
|
||||
opts, args = getopt.getopt(info, "phdvncwl:s:f:t:b:2:",
|
||||
['pause', 'help', 'daemon', 'nobrowser', 'clean', 'logging=',
|
||||
'weblogging=', 'server=', 'templates', 'ipv6_hosting=',
|
||||
'weblogging', 'server=', 'templates', 'ipv6_hosting=',
|
||||
'template2', 'browser=', 'config-file=', 'force',
|
||||
'version', 'https=', 'autorestarted', 'repair', 'repair-all',
|
||||
'log-all', 'no-login', 'pid=', 'new', 'console', 'pidfile=',
|
||||
@@ -845,7 +842,6 @@ def main():
|
||||
clean_up = False
|
||||
logging_level = None
|
||||
web_dir = None
|
||||
web_dir2 = None
|
||||
vista_plus = False
|
||||
vista64 = False
|
||||
force_web = False
|
||||
@@ -879,8 +875,6 @@ def main():
|
||||
exit_sab(0)
|
||||
elif opt in ('-t', '--templates'):
|
||||
web_dir = arg
|
||||
elif opt in ('-2', '--template2'):
|
||||
web_dir2 = arg
|
||||
elif opt in ('-s', '--server'):
|
||||
(cherryhost, cherryport) = split_host(arg)
|
||||
elif opt in ('-n', '--nobrowser'):
|
||||
@@ -895,13 +889,7 @@ def main():
|
||||
elif opt in ('-c', '--clean'):
|
||||
clean_up = True
|
||||
elif opt in ('-w', '--weblogging'):
|
||||
try:
|
||||
cherrypylogging = int(arg)
|
||||
except:
|
||||
cherrypylogging = -1
|
||||
if cherrypylogging < 0 or cherrypylogging > 2:
|
||||
print_help()
|
||||
exit_sab(1)
|
||||
cherrypylogging = True
|
||||
elif opt in ('-l', '--logging'):
|
||||
try:
|
||||
logging_level = int(arg)
|
||||
@@ -1139,7 +1127,6 @@ def main():
|
||||
|
||||
logformat = '%(asctime)s::%(levelname)s::[%(module)s:%(lineno)d] %(message)s'
|
||||
rollover_log.setFormatter(logging.Formatter(logformat))
|
||||
rollover_log.addFilter(FilterCP3())
|
||||
sabnzbd.LOGHANDLER = rollover_log
|
||||
logger.addHandler(rollover_log)
|
||||
logger.setLevel(LOGLEVELS[logging_level + 1])
|
||||
@@ -1169,7 +1156,6 @@ def main():
|
||||
|
||||
if consoleLogging:
|
||||
console = logging.StreamHandler()
|
||||
console.addFilter(FilterCP3())
|
||||
console.setLevel(LOGLEVELS[logging_level + 1])
|
||||
console.setFormatter(logging.Formatter(logformat))
|
||||
logger.addHandler(console)
|
||||
@@ -1198,11 +1184,18 @@ def main():
|
||||
logging.info('Platform = %s', os.name)
|
||||
logging.info('Python-version = %s', sys.version)
|
||||
logging.info('Arguments = %s', sabnzbd.CMDLINE)
|
||||
|
||||
# Find encoding; relevant for unrar activities
|
||||
try:
|
||||
logging.info('Preferred encoding = %s', locale.getpreferredencoding())
|
||||
preferredencoding = locale.getpreferredencoding()
|
||||
logging.info('Preferred encoding = %s', preferredencoding)
|
||||
except:
|
||||
logging.info('Preferred encoding = ERROR')
|
||||
preferredencoding = ''
|
||||
|
||||
# On Linux/FreeBSD/Unix "UTF-8" is strongly, strongly adviced:
|
||||
if not sabnzbd.WIN32 and not sabnzbd.DARWIN and not ('utf' in preferredencoding.lower() and '8' in preferredencoding.lower()):
|
||||
logging.warning(T("SABnzbd was started with encoding %s, this should be UTF-8. Expect problems with Unicoded file and directory names in downloads.") % preferredencoding)
|
||||
|
||||
if sabnzbd.cfg.log_level() > 1:
|
||||
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6
|
||||
@@ -1236,17 +1229,6 @@ def main():
|
||||
if cpumodel:
|
||||
logging.debug('CPU model name is %s', cpumodel)
|
||||
|
||||
# OSX 10.5 I/O priority setting
|
||||
if sabnzbd.DARWIN:
|
||||
logging.info('[osx] IO priority setting')
|
||||
try:
|
||||
from ctypes import cdll
|
||||
libc = cdll.LoadLibrary('/usr/lib/libc.dylib')
|
||||
boolSetResult = libc.setiopolicy_np(0, 1, 3)
|
||||
logging.info('[osx] IO priority set to throttle for process scope')
|
||||
except:
|
||||
logging.info('[osx] IO priority setting not supported')
|
||||
|
||||
logging.info('Read INI file %s', inifile)
|
||||
|
||||
if autobrowser is not None:
|
||||
@@ -1262,21 +1244,12 @@ def main():
|
||||
|
||||
os.chdir(sabnzbd.DIR_PROG)
|
||||
|
||||
web_dir = Web_Template(sabnzbd.cfg.web_dir, DEF_STDINTF, fix_webname(web_dir))
|
||||
web_dir2 = Web_Template(sabnzbd.cfg.web_dir2, '', fix_webname(web_dir2))
|
||||
web_dirc = Web_Template(None, DEF_STDCONFIG, '')
|
||||
sabnzbd.WEB_DIR = Web_Template(sabnzbd.cfg.web_dir, DEF_STDINTF, fix_webname(web_dir))
|
||||
sabnzbd.WEB_DIR_CONFIG = Web_Template(None, DEF_STDCONFIG, '')
|
||||
sabnzbd.WIZARD_DIR = os.path.join(sabnzbd.DIR_INTERFACES, 'wizard')
|
||||
|
||||
wizard_dir = os.path.join(sabnzbd.DIR_INTERFACES, 'wizard')
|
||||
|
||||
sabnzbd.WEB_DIR = web_dir
|
||||
sabnzbd.WEB_DIR2 = web_dir2
|
||||
sabnzbd.WEB_DIRC = web_dirc
|
||||
sabnzbd.WIZARD_DIR = wizard_dir
|
||||
|
||||
sabnzbd.WEB_COLOR = CheckColor(sabnzbd.cfg.web_color(), web_dir)
|
||||
sabnzbd.WEB_COLOR = CheckColor(sabnzbd.cfg.web_color(), sabnzbd.WEB_DIR)
|
||||
sabnzbd.cfg.web_color.set(sabnzbd.WEB_COLOR)
|
||||
sabnzbd.WEB_COLOR2 = CheckColor(sabnzbd.cfg.web_color2(), web_dir2)
|
||||
sabnzbd.cfg.web_color2.set(sabnzbd.WEB_COLOR2)
|
||||
|
||||
if fork and not sabnzbd.WIN32:
|
||||
daemonize()
|
||||
@@ -1304,23 +1277,6 @@ def main():
|
||||
logging.info("SSL version %s", sabnzbd.utils.sslinfo.ssl_version())
|
||||
logging.info("SSL supported protocols %s", str(sabnzbd.utils.sslinfo.ssl_protocols_labels()))
|
||||
|
||||
cherrylogtoscreen = False
|
||||
sabnzbd.WEBLOGFILE = None
|
||||
|
||||
if cherrypylogging:
|
||||
if logdir:
|
||||
sabnzbd.WEBLOGFILE = os.path.join(logdir, DEF_LOG_CHERRY)
|
||||
# Define our custom logger for cherrypy errors
|
||||
cherrypy_logging(sabnzbd.WEBLOGFILE, logging.handlers.RotatingFileHandler)
|
||||
if not fork:
|
||||
try:
|
||||
x = sys.stderr.fileno
|
||||
x = sys.stdout.fileno
|
||||
if cherrypylogging == 1:
|
||||
cherrylogtoscreen = True
|
||||
except:
|
||||
pass
|
||||
|
||||
https_cert = sabnzbd.cfg.https_cert.get_path()
|
||||
https_key = sabnzbd.cfg.https_key.get_path()
|
||||
https_chain = sabnzbd.cfg.https_chain.get_path()
|
||||
@@ -1387,52 +1343,45 @@ def main():
|
||||
'server.socket_host': cherryhost,
|
||||
'server.socket_port': cherryport,
|
||||
'server.shutdown_timeout': 0,
|
||||
'log.screen': cherrylogtoscreen,
|
||||
'log.screen': False,
|
||||
'engine.autoreload.on': False,
|
||||
'tools.encode.on': True,
|
||||
'tools.gzip.on': True,
|
||||
'tools.gzip.mime_types': mime_gzip,
|
||||
'request.show_tracebacks': True,
|
||||
'checker.check_localhost': bool(consoleLogging),
|
||||
'error_page.401': sabnzbd.panic.error_page_401,
|
||||
'error_page.404': sabnzbd.panic.error_page_404
|
||||
})
|
||||
|
||||
forced_mime_types = {'css': 'text/css', 'js': 'application/javascript'}
|
||||
static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dir, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
staticcfg = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dirc, 'staticcfg'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
wizard_static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(wizard_dir, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
|
||||
appconfig = {'/sabnzbd/api': {'tools.basic_auth.on': False},
|
||||
'/api': {'tools.basic_auth.on': False},
|
||||
'/m/api': {'tools.basic_auth.on': False},
|
||||
# Do we want CherryPy Logging? Cannot be done via the config
|
||||
if cherrypylogging:
|
||||
sabnzbd.WEBLOGFILE = os.path.join(logdir, DEF_LOG_CHERRY)
|
||||
cherrypy.log.screen = True
|
||||
cherrypy.log.access_log.propagate = True
|
||||
cherrypy.log.access_file = str(sabnzbd.WEBLOGFILE)
|
||||
else:
|
||||
cherrypy.log.access_log.propagate = False
|
||||
|
||||
# Force mimetypes (OS might overwrite them)
|
||||
forced_mime_types = {'css': 'text/css', 'js': 'application/javascript'}
|
||||
|
||||
static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
staticcfg = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
wizard_static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WIZARD_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
|
||||
appconfig = {'/api': {'tools.basic_auth.on': False},
|
||||
'/rss': {'tools.basic_auth.on': False},
|
||||
'/sabnzbd/rss': {'tools.basic_auth.on': False},
|
||||
'/m/rss': {'tools.basic_auth.on': False},
|
||||
'/sabnzbd/shutdown': {'streamResponse': True},
|
||||
'/sabnzbd/static': static,
|
||||
'/static': static,
|
||||
'/sabnzbd/wizard/static': wizard_static,
|
||||
'/wizard/static': wizard_static,
|
||||
'/favicon.ico': {'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(web_dirc, 'staticcfg', 'ico', 'favicon.ico')},
|
||||
'/sabnzbd/staticcfg': staticcfg,
|
||||
'/favicon.ico': {'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg', 'ico', 'favicon.ico')},
|
||||
'/staticcfg': staticcfg
|
||||
}
|
||||
|
||||
if web_dir2:
|
||||
static2 = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(web_dir2, 'static'), 'tools.staticdir.content_types': forced_mime_types}
|
||||
appconfig['/sabnzbd/m/api'] = {'tools.basic_auth.on': False}
|
||||
appconfig['/sabnzbd/m/rss'] = {'tools.basic_auth.on': False}
|
||||
appconfig['/sabnzbd/m/shutdown'] = {'streamResponse': True}
|
||||
appconfig['/sabnzbd/m/static'] = static2
|
||||
appconfig['/m/static'] = static2
|
||||
appconfig['/sabnzbd/m/wizard/static'] = wizard_static
|
||||
appconfig['/m/wizard/static'] = wizard_static
|
||||
appconfig['/sabnzbd/m/staticcfg'] = staticcfg
|
||||
appconfig['/m/staticcfg'] = staticcfg
|
||||
|
||||
login_page = sabnzbd.interface.MainPage(web_dir, '/', web_dir2, '/m/', web_dirc, first=2)
|
||||
cherrypy.tree.mount(login_page, '/', config=appconfig)
|
||||
# Make available from both URLs
|
||||
main_page = sabnzbd.interface.MainPage()
|
||||
cherrypy.tree.mount(main_page, '/', config=appconfig)
|
||||
cherrypy.tree.mount(main_page, '/sabnzbd/', config=appconfig)
|
||||
|
||||
# Set authentication for CherryPy
|
||||
sabnzbd.interface.set_auth(cherrypy.config)
|
||||
|
||||
@@ -214,18 +214,7 @@ class HTTPRedirect(CherryPyException):
|
||||
if isinstance(urls, text_or_bytes):
|
||||
urls = [urls]
|
||||
|
||||
abs_urls = []
|
||||
for url in urls:
|
||||
url = tonative(url, encoding or self.encoding)
|
||||
|
||||
# Note that urljoin will "do the right thing" whether url is:
|
||||
# 1. a complete URL with host (e.g. "http://www.example.com/test")
|
||||
# 2. a URL relative to root (e.g. "/dummy")
|
||||
# 3. a URL relative to the current path
|
||||
# Note that any query string in cherrypy.request is discarded.
|
||||
url = _urljoin(cherrypy.url(), url)
|
||||
abs_urls.append(url)
|
||||
self.urls = abs_urls
|
||||
self.urls = [tonative(url, encoding or self.encoding) for url in urls]
|
||||
|
||||
# RFC 2616 indicates a 301 response code fits our goal; however,
|
||||
# browser support for 301 is quite messy. Do 302/303 instead. See
|
||||
@@ -241,7 +230,7 @@ class HTTPRedirect(CherryPyException):
|
||||
raise ValueError('status must be between 300 and 399.')
|
||||
|
||||
self.status = status
|
||||
CherryPyException.__init__(self, abs_urls, status)
|
||||
CherryPyException.__init__(self, self.urls, status)
|
||||
|
||||
def set_response(self):
|
||||
"""Modify cherrypy.response status, headers, and body to represent
|
||||
|
||||
@@ -85,8 +85,8 @@ class BuiltinSSLAdapter(wsgiserver.SSLAdapter):
|
||||
|
||||
# Check if it's one of the known errors
|
||||
# Errors that are caught by PyOpenSSL, but thrown by built-in ssl
|
||||
_block_errors = ('unknown protocol', 'unknown ca', 'unknown_ca',
|
||||
'inappropriate fallback', 'wrong version number',
|
||||
_block_errors = ('unknown protocol', 'unknown ca', 'unknown_ca', 'unknown error',
|
||||
'https proxy request', 'inappropriate fallback', 'wrong version number',
|
||||
'no shared cipher', 'certificate unknown', 'ccs received early')
|
||||
for error_text in _block_errors:
|
||||
if error_text in e.args[1].lower():
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Config"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/configure"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/configure"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#from locale import getpreferredencoding#-->
|
||||
@@ -34,12 +34,7 @@
|
||||
<tr>
|
||||
<th scope="row">OpenSSL:</th>
|
||||
<td>
|
||||
<!--#if $have_ssl#-->
|
||||
$ssl_version [$ssl_protocols]
|
||||
<!--#else#-->
|
||||
<span class="label label-danger">$T('notAvailable')</span>
|
||||
<a href="$helpuri$help_uri#no_ssl" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a>
|
||||
<!--#end if#-->
|
||||
$ssl_version [$ssl_protocols]
|
||||
</td>
|
||||
</tr>
|
||||
<!--#if not $have_ssl_context#-->
|
||||
@@ -50,6 +45,15 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_mt_par2#-->
|
||||
<tr>
|
||||
<th scope="row">Multicore Par2</th>
|
||||
<td>
|
||||
<span class="label label-warning">$T('notAvailable')</span> $T('explain-getpar2mt')
|
||||
<a href="${helpuri}installation/multicore-par2" target="_blank">${helpuri}installation/multicore-par2</a>
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_cryptography #-->
|
||||
<tr>
|
||||
<th scope="row">Python Cryptography:</th>
|
||||
@@ -59,7 +63,7 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_yenc#-->
|
||||
<!--#if not $have_yenc and not $have_sabyenc#-->
|
||||
<tr>
|
||||
<th scope="row">yEnc:</th>
|
||||
<td>
|
||||
@@ -68,6 +72,15 @@
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_sabyenc#-->
|
||||
<tr>
|
||||
<th scope="row">SABYenc:</th>
|
||||
<td>
|
||||
<span class="label label-danger">$T('notAvailable')</span>
|
||||
<a href="$helpuri$help_uri#no_sabyenc" target="_blank"><span class="glyphicon glyphicon-question-sign"></span></a>
|
||||
</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#if not $have_unzip #-->
|
||||
<tr>
|
||||
<th scope="row">$T('opt-enable_unzip'):</th>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Categories"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/categories"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/categories"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<div class="colmask">
|
||||
<div class="section">
|
||||
@@ -17,7 +17,7 @@
|
||||
<th>$T('category')</th>
|
||||
<th>$T('priority')</th>
|
||||
<th>$T('mode')</th>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<th>$T('script')</th>
|
||||
<!--#end if#-->
|
||||
<th>$T('catFolderPath')</th>
|
||||
@@ -61,10 +61,10 @@
|
||||
<option value="3" <!--#if $slot.pp=="3" then 'selected="selected"' else ""#-->>$T('pp-delete')</option>
|
||||
</select>
|
||||
</td>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<td>
|
||||
<select name="script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<!--#if not ($sc == 'Default' and $slot.name == '*')#-->
|
||||
<option value="$sc" <!--#if $slot.script.lower()==$sc.lower() then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end if#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Folders"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/folders"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/folders"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="General"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/general"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/general"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -23,9 +23,9 @@
|
||||
<input type="number" name="port" id="port" value="$port" size="8" data-original="$port" />
|
||||
<span class="desc">$T('explain-port')</span>
|
||||
</div>
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<div class="field-pair">
|
||||
<label class="config" for="enable_https">$T('opt-enable_https')</label>
|
||||
<input type="checkbox" name="enable_https" id="enable_https" value="1" <!--#if int($enable_https) > 0 then 'checked="checked"' else ""#--> <!--#if int($have_ssl) == 0 then "disabled" else ""#--> />
|
||||
<input type="checkbox" name="enable_https" id="enable_https" value="1" <!--#if int($enable_https) > 0 then 'checked="checked"' else ""#-->/>
|
||||
<span class="desc">$T('explain-enable_https')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
@@ -39,21 +39,7 @@
|
||||
<!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<span class="desc">$T('explain-web_dir') <a href="$caller_url1">$caller_url1</a></span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="web_dir2">$T('opt-web_dir2')</label>
|
||||
<select name="web_dir2" id="web_dir2">
|
||||
<option value="None" selected="selected">$T("None")</option>
|
||||
<!--#for $webline in $web_list#-->
|
||||
<!--#if $webline.lower() == $web_dir2.lower()#-->
|
||||
<option value="$webline" selected="selected">$webline</option>
|
||||
<!--#else#-->
|
||||
<option value="$webline">$webline</option>
|
||||
<!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<span class="desc">$T('explain-web_dir2') <a href="$caller_url2">$caller_url2</a></span>
|
||||
<span class="desc">$T('explain-web_dir') <a href="$caller_url">$caller_url</a></span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="language">$T('opt-language')</label>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Email"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/notifications"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/notifications"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<!--#def show_notify_checkboxes($section_label)#-->
|
||||
@@ -340,7 +340,7 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="nscript_script">$T('opt-nscript_script')</label>
|
||||
<select name="nscript_script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $nscript_script == $sc then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="RSS"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/rss"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/rss"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
<div class="colmask">
|
||||
<!--#if not $active_feed#-->
|
||||
@@ -163,7 +163,7 @@
|
||||
<!--#if $rss[$feed]['pick_cat']#-->
|
||||
<td>
|
||||
<select name="cat">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct==$rss[$feed]['cat'] then 'selected="selected"' else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -191,7 +191,7 @@
|
||||
<!--#if $rss[$feed]['pick_script']#-->
|
||||
<td>
|
||||
<select name="script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $sc==$rss[$feed]['script'] then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -236,7 +236,7 @@
|
||||
<!--#if $rss[$feed]['pick_cat']#-->
|
||||
<td>
|
||||
<select name="cat">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct=='Default' then 'selected="selected"' else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -264,7 +264,7 @@
|
||||
<!--#if $rss[$feed]['pick_script']#-->
|
||||
<td>
|
||||
<select name="script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $sc=='Default' then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -312,7 +312,7 @@
|
||||
<!--#if $rss[$feed]['pick_cat']#-->
|
||||
<td>
|
||||
<select name="cat">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct==$filter[0] then 'selected="selected"' else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -340,7 +340,7 @@
|
||||
<!--#if $rss[$feed]['pick_script']#-->
|
||||
<td>
|
||||
<select name="script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $sc==$filter[2] then 'selected="selected"' else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/scheduling"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/scheduling"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<%
|
||||
@@ -80,21 +80,20 @@ else:
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<!--#if $schedlines#-->
|
||||
<!--#set $schednum = 0#-->
|
||||
<!--#set $odd = True#-->
|
||||
<!--#for $line in $schedlines#-->
|
||||
<!--#for $schednum, $line in enumerate($schedlines)#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<form action="delSchedule" method="post">
|
||||
<input type="hidden" name="session" value="$session"/>
|
||||
<input type="hidden" name="line" id="line" value="$line"/>
|
||||
<div class="field-pair infoTableSeperator <!--#if $odd then "" else " alt"#-->">
|
||||
<input type="checkbox" name="schedenabled" value="$line" <!--#if int($taskinfo[$schednum][5]) > 0 then 'checked="checked"' else ""#-->>
|
||||
<button class="btn btn-default float-left"><span class="glyphicon glyphicon-trash"></span></button>
|
||||
<div class="scheduleEntry">
|
||||
<span class="time">$taskinfo[$schednum][1]:$taskinfo[$schednum][2]</span><span class="frequency">$taskinfo[$schednum][3]</span> <span class="darkred">$taskinfo[$schednum][4]</span>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
<!--#set $schednum = $schednum+1#-->
|
||||
<!--#end for#-->
|
||||
<!--#else#-->
|
||||
<div class="field-pair">
|
||||
@@ -126,5 +125,18 @@ else:
|
||||
\$('#hidden_arguments').show()
|
||||
}*/
|
||||
})
|
||||
|
||||
\$('[name="schedenabled"]').click(function() {
|
||||
\$.ajax({
|
||||
type: "POST",
|
||||
url: "toggleSchedule",
|
||||
data: {line: \$(this).val(), session: "$session" }
|
||||
}).done(function() {
|
||||
// Let us leave!
|
||||
formWasSubmitted = true;
|
||||
formHasChanged = false;
|
||||
location.reload();
|
||||
});
|
||||
});
|
||||
</script>
|
||||
<!--#include $webdir + "/_inc_footer_uc.tmpl"#-->
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Servers"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/servers"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/servers"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -29,9 +29,9 @@
|
||||
<label class="config" for="port">$T('srv-port')</label>
|
||||
<input type="number" name="port" id="port" size="8" value="119" />
|
||||
</div>
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<div class="field-pair">
|
||||
<label class="config" for="ssl">$T('srv-ssl')</label>
|
||||
<input type="checkbox" name="ssl" id="ssl" value="1" <!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<input type="checkbox" name="ssl" id="ssl" value="1" />
|
||||
<span class="desc">$T('explain-ssl')</span>
|
||||
</div>
|
||||
<!-- Tricks to avoid browser auto-fill, fixed on-submit with javascript -->
|
||||
@@ -62,9 +62,9 @@
|
||||
<div class="field-pair <!--#if int($have_ssl_context) == 0 then "disabled" else ""#--> advanced-settings">
|
||||
<label class="config" for="ssl_verify">$T('opt-ssl_verify')</label>
|
||||
<select name="ssl_verify" id="ssl_verify" <!--#if int($have_ssl_context) == 0 then "disabled=\"disabled\"" else ""#-->>
|
||||
<option value="0">$T('ssl_verify-disabled')</option>
|
||||
<option value="1" selected>$T('ssl_verify-normal')</option>
|
||||
<option value="2">$T('ssl_verify-strict')</option>
|
||||
<option value="2" selected>$T('ssl_verify-strict')</option>
|
||||
<option value="1">$T('ssl_verify-normal')</option>
|
||||
<option value="0">$T('ssl_verify-disabled')</option>
|
||||
</select>
|
||||
<span class="desc">$T('explain-ssl_verify').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
@@ -149,9 +149,9 @@
|
||||
<label class="config" for="port$cur">$T('srv-port')</label>
|
||||
<input type="number" name="port" id="port$cur" value="$server['port']" size="8" />
|
||||
</div>
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<div class="field-pair">
|
||||
<label class="config" for="ssl$cur">$T('srv-ssl')</label>
|
||||
<input type="checkbox" name="ssl" id="ssl$cur" value="1" <!--#if int($server['ssl']) != 0 and int($have_ssl) == 1 then 'checked="checked"' else ""#--> <!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<input type="checkbox" name="ssl" id="ssl$cur" value="1" <!--#if int($server['ssl']) != 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-ssl')</span>
|
||||
</div>
|
||||
<!-- Tricks to avoid browser auto-fill, fixed on-submit with javascript -->
|
||||
@@ -183,9 +183,9 @@
|
||||
<div class="field-pair <!--#if int($have_ssl_context) == 0 then "disabled" else ""#--> advanced-settings">
|
||||
<label class="config" for="ssl_verify$cur">$T('opt-ssl_verify')</label>
|
||||
<select name="ssl_verify" id="ssl_verify$cur" <!--#if int($have_ssl_context) == 0 then "disabled=\"disabled\"" else ""#-->>
|
||||
<option value="0" <!--#if $server['ssl_verify'] == 0 then 'selected="selected"' else ""#--> >$T('ssl_verify-disabled')</option>
|
||||
<option value="1" <!--#if $server['ssl_verify'] == 1 then 'selected="selected"' else ""#--> >$T('ssl_verify-normal')</option>
|
||||
<option value="2" <!--#if $server['ssl_verify'] == 2 then 'selected="selected"' else ""#--> >$T('ssl_verify-strict')</option>
|
||||
<option value="2" <!--#if $server['ssl_verify'] == 2 then 'selected="selected"' else ""#--> >$T('ssl_verify-strict')</option>
|
||||
<option value="1" <!--#if $server['ssl_verify'] == 1 then 'selected="selected"' else ""#--> >$T('ssl_verify-normal')</option>
|
||||
<option value="0" <!--#if $server['ssl_verify'] == 0 then 'selected="selected"' else ""#--> >$T('ssl_verify-disabled')</option>
|
||||
</select>
|
||||
<span class="desc">$T('explain-ssl_verify').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Sorting"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/sorting"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/sorting"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -12,7 +12,7 @@
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="tv_cat" multiple="multiple" class="multiple_cats">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $tv_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -165,7 +165,7 @@
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="movie_cat" multiple="multiple" class="multiple_cats">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $movie_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
@@ -303,7 +303,7 @@
|
||||
<p>
|
||||
<b>$T('affectedCat')</b><br/>
|
||||
<select name="date_cat" multiple="multiple" class="multiple_cats">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $ct in $date_categories then 'selected="selected"' else ""#--> >$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Special"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/special"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/special"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<!--#set global $pane="Switches"#-->
|
||||
<!--#set global $help_uri="configuration/1.2/switches"#-->
|
||||
<!--#set global $help_uri="configuration/2.0/switches"#-->
|
||||
<!--#include $webdir + "/_inc_header_uc.tmpl"#-->
|
||||
|
||||
<div class="colmask">
|
||||
@@ -20,9 +20,9 @@
|
||||
</select>
|
||||
<span class="desc">$T('explain-load_balancing')</span>
|
||||
</div>
|
||||
<div class="field-pair <!--#if int($have_ssl) == 0 then "disabled" else ""#-->">
|
||||
<div class="field-pair">
|
||||
<label class="config" for="ssl_ciphers">$T('opt-ssl_ciphers')</label>
|
||||
<input type="text" name="ssl_ciphers" id="ssl_ciphers" value="$ssl_ciphers"<!--#if int($have_ssl) == 0 then "disabled=\"disabled\"" else ""#--> />
|
||||
<input type="text" name="ssl_ciphers" id="ssl_ciphers" value="$ssl_ciphers" />
|
||||
<span class="desc">$T('explain-ssl_ciphers') <br>$T('readwiki')
|
||||
<a href="${helpuri}advanced/ssl-ciphers" target="_blank">${helpuri}advanced/ssl-ciphers</a></span>
|
||||
</div>
|
||||
@@ -58,7 +58,7 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="pre_script">$T('opt-pre_script')</label>
|
||||
<select name="pre_script" id="pre_script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<!--#if $sc.lower() == $pre_script.lower()#-->
|
||||
<option value="$sc" selected="selected">$Tspec($sc)</option>
|
||||
<!--#else#-->
|
||||
@@ -159,11 +159,6 @@
|
||||
<input type="checkbox" name="enable_all_par" id="enable_all_par" value="1" <!--#if int($enable_all_par) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-enable_all_par').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="quick_check">$T('opt-quick_check')</label>
|
||||
<input type="checkbox" name="quick_check" id="quick_check" value="1" <!--#if int($quick_check) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-quick_check')</span>
|
||||
</div>
|
||||
<!--#if $have_multicore#-->
|
||||
<div class="field-pair">
|
||||
<label class="config" for="par2_multicore">$T('opt-par2_multicore')</label>
|
||||
@@ -196,11 +191,6 @@
|
||||
<input type="checkbox" name="flat_unpack" id="flat_unpack" value="1" <!--#if int($flat_unpack) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-flat_unpack')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="overwrite_files">$T('opt-overwrite_files')</label>
|
||||
<input type="checkbox" name="overwrite_files" id="overwrite_files" value="1" <!--#if int($overwrite_files) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-overwrite_files')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="script_can_fail">$T('opt-script_can_fail')</label>
|
||||
<input type="checkbox" name="script_can_fail" id="script_can_fail" value="1" <!--#if int($script_can_fail) > 0 then 'checked="checked"' else ""#--> />
|
||||
@@ -273,11 +263,6 @@
|
||||
<span class="desc">$T('explain-sanitize_safe')</span>
|
||||
</div>
|
||||
<!--#end if#-->
|
||||
<div class="field-pair">
|
||||
<label class="config" for="enable_meta">$T('opt-enable_meta')</label>
|
||||
<input type="checkbox" name="enable_meta" id="enable_meta" value="1" <!--#if int($enable_meta) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-enable_meta')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<button class="btn btn-default saveButton"><span class="glyphicon glyphicon-ok"></span> $T('button-saveChanges')</button>
|
||||
<button class="btn btn-default restoreDefaults"><span class="glyphicon glyphicon-asterisk"></span> $T('button-restoreDefaults')</button>
|
||||
@@ -331,16 +316,7 @@
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_enable">$T('opt-rating_enable')</label>
|
||||
<input type="checkbox" name="rating_enable" id="rating_enable" value="1" <!--#if int($rating_enable) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-rating_enable')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_feedback">$T('opt-rating_feedback')</label>
|
||||
<input type="checkbox" name="rating_feedback" id="rating_feedback" value="1" <!--#if int($rating_feedback) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-rating_feedback')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_host">$T('opt-rating_host')</label>
|
||||
<input type="text" name="rating_host" id="rating_host" value="$rating_host" />
|
||||
<span class="desc">$T('explain-rating_enable').replace('. ', '.<br/>')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="rating_api_key">$T('opt-rating_api_key')</label>
|
||||
|
||||
@@ -865,11 +865,17 @@ input[type="checkbox"] {
|
||||
max-width: 150px !important;
|
||||
}
|
||||
|
||||
.Scheduling input[type="checkbox"] {
|
||||
.Scheduling form[action="addSchedule"] input[type="checkbox"] {
|
||||
margin-top: 0px;
|
||||
margin-left: -20px;
|
||||
}
|
||||
|
||||
.Scheduling form[action="delSchedule"] input[type="checkbox"] {
|
||||
position: initial;
|
||||
float: left;
|
||||
margin: 9px 10px 0px 5px;
|
||||
}
|
||||
|
||||
.navbar .container {
|
||||
padding-right: 0;
|
||||
}
|
||||
|
||||
@@ -273,7 +273,7 @@ function do_restart() {
|
||||
error: function(status, text) {
|
||||
failureCounter = failureCounter+1;
|
||||
// Too many failuers and we give up
|
||||
if(failureCounter >= 7) {
|
||||
if(failureCounter >= 6) {
|
||||
// If the port has changed 'Access-Control-Allow-Origin' header will not allow
|
||||
// us to check if the server is back up. So after 7 failures we redirect
|
||||
// anyway in the hopes it works anyway..
|
||||
@@ -281,7 +281,7 @@ function do_restart() {
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 3000)
|
||||
}, 4000)
|
||||
|
||||
// Exception if we go from HTTPS to HTTP
|
||||
// (this is not allowed by browsers and all of the above will be ignored)
|
||||
|
||||
@@ -112,37 +112,36 @@
|
||||
<hr/>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('cache')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo">
|
||||
<span data-bind="text: statusInfo.cache_size"></span> (<span data-bind="text: statusInfo.cache_art"></span> $T('Glitter-articles'))
|
||||
<div class="col-sm-6">
|
||||
<span data-bind="text: cacheSize"></span> (<span data-bind="text: cacheArticles"></span> $T('Glitter-articles'))
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-systemPerformance')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasStatusInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<span data-bind="text: statusInfo.pystone"></span>
|
||||
<a href="#" data-bind="click: loadStatusInfo" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<a href="#" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small data-bind="truncatedText: statusInfo.cpumodel, length: 25, attr: { 'data-original-title': statusInfo.cpumodel }" data-tooltip="true"></small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-downloadDirSpeed')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasDiskStatusInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<span data-bind="text: statusInfo.downloaddirspeed()"></span> MB/s
|
||||
<a href="#" class="diskspeed-button" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small>(<span data-bind="truncatedText: statusInfo.downloaddir, length: 24, attr: { 'data-original-title': statusInfo.downloaddir }" data-tooltip="true"></span>)</small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasDiskStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-6">$T('dashboard-completeDirSpeed')</div>
|
||||
<div class="col-sm-6" data-bind="visible: hasDiskStatusInfo">
|
||||
<div class="col-sm-6" data-bind="visible: hasPerformanceInfo">
|
||||
<span data-bind="text: statusInfo.completedirspeed()"></span> MB/s
|
||||
<a href="#" class="diskspeed-button" data-bind="click: testDiskSpeed" data-tooltip="true" data-placement="right" title="$T('dashboard-repeatTest')"><span class="glyphicon glyphicon-repeat"></span></a>
|
||||
<small>(<span data-bind="truncatedText: statusInfo.completedir, length: 24, attr: { 'data-original-title': statusInfo.completedir }" data-tooltip="true"></span>)</small>
|
||||
</div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasDiskStatusInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
<div class="col-sm-6 col-loading" data-bind="visible: !hasPerformanceInfo()">$T('Glitter-loading')<span class="loader-dot-one">.</span><span class="loader-dot-two">.</span><span class="loader-dot-three">.</span></div>
|
||||
</div>
|
||||
<hr />
|
||||
<div class="row options-function-box">
|
||||
|
||||
@@ -152,7 +152,7 @@
|
||||
</li>
|
||||
<li title="$T('eoq-scripts')" data-tooltip="true" data-placement="left">
|
||||
<span class="glyphicon glyphicon-flash"></span>
|
||||
<select name="Post-processing" class="form-control" data-bind="options: parent.scriptsList, value: script, event: { change: changeScript }, enable: (parent.scriptsList().length > 0)"></select>
|
||||
<select name="Post-processing" class="form-control" data-bind="options: parent.scriptsList, value: script, event: { change: changeScript }, enable: (parent.scriptsList().length > 1)"></select>
|
||||
</li>
|
||||
</ul>
|
||||
<!-- /ko -->
|
||||
|
||||
@@ -55,15 +55,10 @@ function Fileslisting(parent) {
|
||||
$.each(response.files, function(index, slot) {
|
||||
// Existing or updating?
|
||||
var existingItem = ko.utils.arrayFirst(self.fileItems(), function(i) {
|
||||
return i.filename() == slot.filename;
|
||||
return i.nzf_id() == slot.nzf_id;
|
||||
});
|
||||
|
||||
if(existingItem) {
|
||||
// We skip queued files!
|
||||
// They cause problems because they can have the same filename
|
||||
// as files that we do want to be updated.. The slot.id is not unique!
|
||||
if(slot.status == "queued") return false;
|
||||
|
||||
// Update the rest
|
||||
existingItem.updateFromData(slot);
|
||||
} else {
|
||||
@@ -203,7 +198,7 @@ function FileslistingModel(parent, data) {
|
||||
self.file_age = ko.observable(data.age);
|
||||
self.mb = ko.observable(data.mb);
|
||||
self.percentage = ko.observable(fixPercentages((100 - (data.mbleft / data.mb * 100)).toFixed(0)));
|
||||
self.canselect = ko.observable(data.nzf_id !== undefined);
|
||||
self.canselect = ko.observable(data.status != "finished" && data.status != "queued");
|
||||
self.isdone = ko.observable(data.status == "finished");
|
||||
|
||||
// Update internally
|
||||
@@ -213,7 +208,7 @@ function FileslistingModel(parent, data) {
|
||||
self.file_age(data.age)
|
||||
self.mb(data.mb)
|
||||
self.percentage(fixPercentages((100 - (data.mbleft / data.mb * 100)).toFixed(0)));
|
||||
self.canselect(data.nzf_id !== undefined)
|
||||
self.canselect(data.status != "finished" && data.status != "queued")
|
||||
self.isdone(data.status == "finished")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,10 +356,17 @@ function HistoryModel(parent, data) {
|
||||
return displayDateTime(self.completed(), parent.parent.dateFormat(), 'X')
|
||||
});
|
||||
|
||||
// Subscribe to retryEvent so we can load the password
|
||||
self.canRetry.subscribe(function() {
|
||||
self.updateAllHistory = true;
|
||||
})
|
||||
|
||||
// Re-try button
|
||||
self.retry = function() {
|
||||
// Set JOB-id
|
||||
$('#modal-retry-job input[name="retry_job_id"]').val(self.nzo_id)
|
||||
// Set password
|
||||
$('#retry_job_password').val(self.historyStatus.password())
|
||||
// Open modal
|
||||
$('#modal-retry-job').modal("show")
|
||||
};
|
||||
|
||||
@@ -40,6 +40,8 @@ function ViewModel() {
|
||||
self.quotaLimit = ko.observable();
|
||||
self.quotaLimitLeft = ko.observable();
|
||||
self.systemLoad = ko.observable();
|
||||
self.cacheSize = ko.observable();
|
||||
self.cacheArticles = ko.observable();
|
||||
self.nrWarnings = ko.observable(0);
|
||||
self.allWarnings = ko.observableArray([]);
|
||||
self.allMessages = ko.observableArray([]);
|
||||
@@ -48,7 +50,7 @@ function ViewModel() {
|
||||
|
||||
// Statusinfo container
|
||||
self.hasStatusInfo = ko.observable(false);
|
||||
self.hasDiskStatusInfo = ko.observable(false);
|
||||
self.hasPerformanceInfo = ko.observable(false);
|
||||
self.statusInfo = {};
|
||||
self.statusInfo.folders = ko.observableArray([]);
|
||||
self.statusInfo.servers = ko.observableArray([]);
|
||||
@@ -59,8 +61,6 @@ function ViewModel() {
|
||||
self.statusInfo.pystone = ko.observable();
|
||||
self.statusInfo.cpumodel = ko.observable();
|
||||
self.statusInfo.loglevel = ko.observable();
|
||||
self.statusInfo.cache_size = ko.observable();
|
||||
self.statusInfo.cache_art = ko.observable();
|
||||
self.statusInfo.downloaddir = ko.observable();
|
||||
self.statusInfo.downloaddirspeed = ko.observable();
|
||||
self.statusInfo.completedir = ko.observable();
|
||||
@@ -183,6 +183,10 @@ function ViewModel() {
|
||||
// System load
|
||||
self.systemLoad(response.queue.loadavg)
|
||||
|
||||
// Cache
|
||||
self.cacheSize(response.queue.cache_size)
|
||||
self.cacheArticles(response.queue.cache_art)
|
||||
|
||||
// Warnings (new warnings will trigger an update of allMessages)
|
||||
self.nrWarnings(response.queue.have_warnings)
|
||||
|
||||
@@ -749,8 +753,6 @@ function ViewModel() {
|
||||
callAPI({ mode: 'fullstatus', skip_dashboard: (!statusFullRefresh)*1 }).then(function(data) {
|
||||
// Update basic
|
||||
self.statusInfo.loglevel(data.status.loglevel)
|
||||
self.statusInfo.cache_art(data.status.cache_art)
|
||||
self.statusInfo.cache_size(data.status.cache_size)
|
||||
self.statusInfo.folders(data.status.folders)
|
||||
|
||||
// Update the full set
|
||||
@@ -766,7 +768,7 @@ function ViewModel() {
|
||||
self.statusInfo.publicipv4(data.status.publicipv4)
|
||||
self.statusInfo.ipv6(data.status.ipv6 || glitterTranslate.noneText)
|
||||
// Loaded disk info
|
||||
self.hasDiskStatusInfo(true)
|
||||
self.hasPerformanceInfo(true)
|
||||
}
|
||||
|
||||
// Update the servers
|
||||
@@ -816,7 +818,7 @@ function ViewModel() {
|
||||
|
||||
// Do a disk-speedtest
|
||||
self.testDiskSpeed = function(item, event) {
|
||||
self.hasDiskStatusInfo(false)
|
||||
self.hasPerformanceInfo(false)
|
||||
|
||||
// Run it and then display it
|
||||
callSpecialAPI('./status/dashrefresh/').then(function() {
|
||||
|
||||
@@ -171,7 +171,7 @@ function QueueListModel(parent) {
|
||||
|
||||
// Do we show search box. So it doesn't dissapear when nothing is found
|
||||
self.hasQueueSearch = ko.pureComputed(function() {
|
||||
return (self.pagination.hasPagination() || self.searchTerm())
|
||||
return (self.pagination.hasPagination() || self.searchTerm() || (self.parent.hasQueue() && self.isMultiEditing()))
|
||||
})
|
||||
|
||||
// Searching in queue (rate-limited in decleration)
|
||||
@@ -652,7 +652,7 @@ function QueueModel(parent, data) {
|
||||
}
|
||||
self.changeScript = function(item) {
|
||||
// Not on empty handlers
|
||||
if(!item.script()) return;
|
||||
if(!item.script() || parent.scriptsList().length <= 1) return;
|
||||
callAPI({
|
||||
mode: 'change_script',
|
||||
value: item.id,
|
||||
|
||||
@@ -26,9 +26,9 @@
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="${path}staticcfg/ico/apple-touch-icon-76x76-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="${path}staticcfg/ico/apple-touch-icon-120x120-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="152x152" href="${path}staticcfg/ico/apple-touch-icon-152x152-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${path}staticcfg/ico/apple-touch-icon-180x180-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="${path}staticcfg/ico/apple-touch-icon-180x180-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="192x192" href="${path}staticcfg/ico/android-192x192.png" />
|
||||
|
||||
|
||||
<script type="text/javascript" src="${path}static/javascripts/lib.js?$version"></script>
|
||||
|
||||
#if $pane=="Main"#
|
||||
@@ -102,7 +102,6 @@
|
||||
<span id="warning_box"><b><a href="${path}status/#tabs-warnings" id="last_warning" title="#echo $last_warning.replace("\n"," ").replace('"',"'") #"><span id="have_warnings">$have_warnings</span> $T('warnings')</a></b></span>
|
||||
#if $pane=="Main"#
|
||||
#if $new_release#⋅ <a href="$new_rel_url" id="new_release" target="_blank">$T('Plush-updateAvailable').replace(' ',' ')</a>#end if#
|
||||
#if $warning#⋅ <a id="warning_message">$warning.replace(' ',' ')</a>#end if#
|
||||
#end if#
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -84,10 +84,10 @@ $T('Plush-containerWidth'):
|
||||
<div id="add_nzb_hr"><hr></div>
|
||||
<i>$T('pp'):</i>
|
||||
<table id="add_nzb_pp">
|
||||
#if $cat_list#
|
||||
#if $categories#
|
||||
<tr><td style="text-align:right">$T('category'):</td>
|
||||
<td><select id="addID_cat" name="cat">
|
||||
#for $ct in $cat_list#
|
||||
#for $ct in $categories#
|
||||
<option value="$ct">$Tspec($ct)</option>
|
||||
#end for#
|
||||
</select>
|
||||
@@ -111,10 +111,10 @@ $T('Plush-containerWidth'):
|
||||
<option value="3">$T('pp-delete')</option>
|
||||
</select>
|
||||
</td></tr>
|
||||
#if $script_list#
|
||||
#if $scripts#
|
||||
<tr><td style="text-align:right">$T('script'):</td>
|
||||
<td><select id="addID_script" name="script">
|
||||
#for $sc in $script_list#
|
||||
#for $sc in $scripts#
|
||||
<option value="$sc">$Tspec($sc)</option>
|
||||
#end for#
|
||||
</select>
|
||||
@@ -186,7 +186,7 @@ $T('Plush-containerWidth'):
|
||||
<option value='www.newshosting.com'>NewsHosting</option>
|
||||
<option value='www.readnews.com'>Readnews</option>
|
||||
<option value='www.supernews.com'>SuperNews</option>
|
||||
<option value='www.thundernews.com'>ThunderNews</option>
|
||||
<option value='www.thundernews.com'>ThunderNews</option>
|
||||
<option value='www.tweaknews.eu'>Tweaknews</option>
|
||||
<option value='www.usenetserver.com'>UsenetServer</option>
|
||||
<option value='www.xentech.net'>XenTech</option>
|
||||
|
||||
@@ -46,9 +46,9 @@
|
||||
<option value="hibernate_pc" <!--#if $finishaction == 'hibernate_pc' then 'selected' else ''#-->>$T('hibernatePc')</option>
|
||||
<!--#end if#-->
|
||||
</optgroup>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<optgroup label="$T('eoq-scripts')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<!--#if $sc != 'None'#-->
|
||||
<option value="script_$sc" <!--#if $finishaction == 'script_'+$sc then 'selected' else ''#-->>$sc</option>
|
||||
<!--#end if#-->
|
||||
@@ -85,7 +85,7 @@
|
||||
<ul>
|
||||
<li>
|
||||
$T('Plush-maxSpeed'):
|
||||
<input type="text" id="maxSpeed-option" size="4" />
|
||||
<input type="text" id="maxSpeed-option" size="4" />
|
||||
<select id="maxSpeed-label">
|
||||
<option value="%">%</option>
|
||||
<option value="K">KB/s</option>
|
||||
@@ -136,10 +136,10 @@
|
||||
<option value="pause">$T('sch-pause')</option>
|
||||
<option value="resume">$T('sch-resume')</option>
|
||||
</optgroup></select>
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select id="multi_cat"><optgroup label="$T('category')">
|
||||
<option value="">$T('category')</option>
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct">$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
@@ -158,10 +158,10 @@
|
||||
<option value="2">$T('pp-unpack')</option>
|
||||
<option value="3">$T('pp-delete')</option>
|
||||
</optgroup></select>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select id="multi_script"><optgroup label="$T('script')">
|
||||
<option value="">$T('script')</option>
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<!--#if $sc != "Default"#--><option value="$sc">$Tspec($sc)</option><!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
|
||||
@@ -15,9 +15,9 @@
|
||||
<option value=$i <!--#if $i == $index then "selected" else ""#-->>$i</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select name="cat"><optgroup label="$T('category')">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $slot.cat.lower() == $ct.lower() then "selected" else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
@@ -35,9 +35,9 @@
|
||||
<option value="2" <!--#if $slot.unpackopts == "2" then "selected" else ""#-->>$T('pp-unpack')</option>
|
||||
<option value="3" <!--#if $slot.unpackopts == "3" then "selected" else ""#-->>$T('pp-delete')</option>
|
||||
</optgroup></select>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select name="script"><optgroup label="$T('script')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $slot.script.lower() == $sc.lower() then "selected" else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
<td></td>
|
||||
<!--#end if#-->
|
||||
<!--#end if#-->
|
||||
|
||||
|
||||
<td>
|
||||
<div class="main_sprite_container sprite_progressbar_bg">
|
||||
<div class="main_sprite_container sprite_progress_done" style="background-position: -<!--#if $slot.mb == "0.00" then "120" else int(120 - 120.0 / 100.0 * int(100 - float($slot.mbleft) / float($slot.mb) * 100))#-->px -401px">
|
||||
@@ -94,9 +94,9 @@
|
||||
</td>
|
||||
|
||||
<td class="options nowrap">
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select class="change_cat"><optgroup label="$T('category')">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $slot.cat.lower() == $ct.lower() then "selected" else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
@@ -114,9 +114,9 @@
|
||||
<option value="2" <!--#if $slot.unpackopts == "2" then "selected " else ""#-->>$T('pp-unpack')</option>
|
||||
<option value="3" <!--#if $slot.unpackopts == "3" then "selected " else ""#-->>$T('pp-delete')</option>
|
||||
</optgroup></select>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select class="change_script"><optgroup label="$T('script')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $slot.script == $sc then "selected" else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup></select>
|
||||
|
||||
@@ -125,15 +125,15 @@
|
||||
|
||||
<div id="tabs-dashboard">
|
||||
<table class="rssTable">
|
||||
<tr>
|
||||
<th colspan="2">$T('dashboard-title')</th>
|
||||
<tr>
|
||||
<th colspan="2">$T('dashboard-title')</th>
|
||||
</tr>
|
||||
<!--#set $odd = False#-->
|
||||
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-localIP4')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $localipv4#-->
|
||||
$localipv4
|
||||
<!--#else#-->
|
||||
@@ -141,10 +141,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-publicIP4')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $publicipv4#-->
|
||||
$publicipv4
|
||||
<!--#else#-->
|
||||
@@ -152,10 +152,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-IP6')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $ipv6#-->
|
||||
$ipv6
|
||||
<!--#else#-->
|
||||
@@ -163,10 +163,10 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-NameserverDNS')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $dnslookup#-->
|
||||
$dnslookup
|
||||
<!--#else#-->
|
||||
@@ -178,27 +178,33 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-systemPerformance')</td>
|
||||
<td>$pystone</td>
|
||||
<td>
|
||||
<!--#if $pystone > 0 #-->
|
||||
$pystone
|
||||
<!--#elif $pystone == 0 #-->
|
||||
$T('dashboard-clickToStart')
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#if $cpumodel#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-cpuModel')</td>
|
||||
<td>$cpumodel</td>
|
||||
</tr>
|
||||
<!--#end if#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('opt-download_dir')</td>
|
||||
<td>$downloaddir</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-writingSpeed')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $downloaddirspeed > 0 #-->
|
||||
$downloaddirspeed MB/s
|
||||
<!--#elif $downloaddirspeed == 0 #-->
|
||||
@@ -208,15 +214,15 @@
|
||||
<!--#end if#-->
|
||||
</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('opt-complete_dir')</td>
|
||||
<td>$completedir</td>
|
||||
</tr>
|
||||
<!--#set $odd = not $odd#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<tr class="<!--#if $odd then "odd" else "even"#-->">
|
||||
<td>$T('dashboard-writingSpeed')</td>
|
||||
<td>
|
||||
<td>
|
||||
<!--#if $completedirspeed > 0 #-->
|
||||
$completedirspeed MB/s
|
||||
<!--#elif $completedirspeed == 0 #-->
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<!--#end if#-->
|
||||
<link href="rss?mode=history&apikey=$session" rel="alternate" type="application/rss+xml" title="SABnzbd History" />
|
||||
<link rel="shortcut icon" href="./staticcfg/ico/favicon.ico?v=1.1.0" />
|
||||
|
||||
|
||||
<meta name="apple-mobile-web-app-title" content="SABnzbd" />
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="./staticcfg/ico/apple-touch-icon-76x76-precomposed.png" />
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="./staticcfg/ico/apple-touch-icon-120x120-precomposed.png" />
|
||||
@@ -973,14 +973,14 @@ function loadingJSON(){
|
||||
clearTimeout ( jsontimeout );
|
||||
jsontimeout = setTimeout("loadingJSON();", RefreshTime*1000);
|
||||
}
|
||||
var url = "tapi?mode=qstatus&output=json&_dc="+Math.random()+"&session="+session;
|
||||
var url = "tapi?mode=queue&output=json&_dc="+Math.random()+"&session="+session;
|
||||
var d = loadJSONDoc(url);
|
||||
var gotMetadata = function (info)
|
||||
{
|
||||
//alert(info["kbpersec"]);
|
||||
|
||||
info = info["queue"]
|
||||
speed = info["speed"];
|
||||
kbpersec = info["kbpersec"].toFixed(0);
|
||||
kbpersec = info["kbpersec"];
|
||||
var paused = info["paused"];
|
||||
var pause_int = info["pause_int"];
|
||||
//if (paused==true) speed = '';
|
||||
@@ -1017,10 +1017,10 @@ function loadingJSON(){
|
||||
}
|
||||
|
||||
document.getElementById("speed").innerHTML = speed;
|
||||
document.getElementById("mbleft").innerHTML = info["mbleft"].toFixed(2);
|
||||
document.getElementById("mbtotal").innerHTML = info["mb"].toFixed(2);
|
||||
document.getElementById("ds1").innerHTML = info["diskspace1"].toFixed(2);
|
||||
document.getElementById("ds2").innerHTML = info["diskspace2"].toFixed(2);
|
||||
document.getElementById("mbleft").innerHTML = info["mbleft"];
|
||||
document.getElementById("mbtotal").innerHTML = info["mb"];
|
||||
document.getElementById("ds1").innerHTML = info["diskspace1"];
|
||||
document.getElementById("ds2").innerHTML = info["diskspace2"];
|
||||
document.getElementById("have_warnings").innerHTML = info["have_warnings"];
|
||||
load = document.getElementById("loadavg")
|
||||
if (load) load.innerHTML = info["loadavg"];
|
||||
@@ -1155,10 +1155,10 @@ function loadingJSON(){
|
||||
<div id="addNew" class="centerLinks" style="overflow: hidden; display: none;">
|
||||
<form action="addID" method="get">
|
||||
<input type="text" style="width:218px;" name="id" value="$T('enterURL')" onfocus="clearForm(this, 'Enter URL')" onblur="setForm(this, 'Enter URL')">
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select name="cat" >
|
||||
<optgroup label="$T('category')">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct">$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
@@ -1166,17 +1166,17 @@ function loadingJSON(){
|
||||
<!--#end if#-->
|
||||
<select name="pp">
|
||||
<optgroup label="$T('pp')">
|
||||
<option value="-1" <!--#if $cat_list then "selected" else ""#-->>$T('default')</option>
|
||||
<option value="-1" <!--#if $categories then "selected" else ""#-->>$T('default')</option>
|
||||
<option value="0">$T('none')</option>
|
||||
<option value="1">$T('pp-repair')</option>
|
||||
<option value="2">$T('pp-unpack')</option>
|
||||
<option value="3" <!--#if $cat_list then "" else "selected"#-->>$T('pp-delete')</option>
|
||||
<option value="3" <!--#if $categories then "" else "selected"#-->>$T('pp-delete')</option>
|
||||
</optgroup>
|
||||
</select>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select name="script">
|
||||
<optgroup label="$T('script')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc">$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
@@ -1196,10 +1196,10 @@ function loadingJSON(){
|
||||
</form>
|
||||
<form action="addFile" method="post" enctype="multipart/form-data">
|
||||
<input type="file" style="width:221px" name="nzbfile">
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select name="cat">
|
||||
<optgroup label="$T('category')">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct">$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
@@ -1207,17 +1207,17 @@ function loadingJSON(){
|
||||
<!--#end if#-->
|
||||
<select name="pp">
|
||||
<optgroup label="$T('pp')">
|
||||
<option value="-1" <!--#if $cat_list then "selected" else ""#-->>$T('default')</option>
|
||||
<option value="-1" <!--#if $categories then "selected" else ""#-->>$T('default')</option>
|
||||
<option value="0">$T('none')</option>
|
||||
<option value="1">$T('pp-repair')</option>
|
||||
<option value="2">$T('pp-unpack')</option>
|
||||
<option value="3" <!--#if $cat_list then "" else "selected"#-->>$T('pp-delete')</option>
|
||||
<option value="3" <!--#if $categories then "" else "selected"#-->>$T('pp-delete')</option>
|
||||
</optgroup>
|
||||
</select>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select name="script">
|
||||
<optgroup label="$T('script')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc">$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
@@ -1255,10 +1255,6 @@ function loadingJSON(){
|
||||
<option value="0" >$T("none")</option>
|
||||
</select>
|
||||
<br />SABnzbd $T('version'): $version | smpl skin</a></p>
|
||||
<!--#if $warning#-->
|
||||
<h2>$T('ft-warning')</h2>
|
||||
<b>$warning</b><br />
|
||||
<!--#end if#-->
|
||||
<!--#if $new_release#-->
|
||||
<!--#set $msg=$T('ft-newRelease@1')%($new_release)#-->
|
||||
<b>$msg <a href="$new_rel_url" target="_blank">SF.net</a></b><br/>
|
||||
|
||||
@@ -30,20 +30,20 @@
|
||||
</select>
|
||||
<!--#end if#-->
|
||||
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<br class="clear" />
|
||||
<label class="label">$T('script'):</label>
|
||||
<select name="script">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $slot.script.lower() == $sc.lower() then "selected" else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
<!--#end if#-->
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<br class="clear" />
|
||||
<label class="label">$T('category'):</label>
|
||||
<select name="cat">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $slot.cat.lower() == $ct.lower() then "selected" else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</select>
|
||||
|
||||
@@ -14,9 +14,9 @@ $T('onQueueFinish'):
|
||||
<!--#end if#-->
|
||||
<option value="shutdown_program" <!--#if $finishaction == 'shutdown_program' then 'selected' else ''#-->>Shutdown SABnzbd</option>
|
||||
</optgroup>
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<optgroup label="$T('eoq-scripts')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<!--#if $sc != 'None'#-->
|
||||
<option value="script_$sc" <!--#if $finishaction == 'script_'+$sc then 'selected' else ''#-->>$sc</option>
|
||||
<!--#end if#-->
|
||||
@@ -90,20 +90,20 @@ $T('smpl-timeleft'): <strong>$timeleft</strong> $T('eta'): <strong>$eta</strong>
|
||||
</optgroup>
|
||||
</select>
|
||||
<!--#end if#-->
|
||||
<!--#if $script_list#-->
|
||||
<!--#if $scripts#-->
|
||||
<select onfocus="pauseQueueDeferer(this.parentNode.parentNode.id)" onblur="javascript:lr('queue/','limit=$limit&start=$start', 1,-1, this.parentNode.parentNode.id);" onchange="javascript:changequeuedetails('queue/change_script?nzo_id=$slot.nzo_id&script='+this.options[this.selectedIndex].value, 'limit=$limit&start=$start');">
|
||||
<optgroup label="$T('script')">
|
||||
<!--#for $sc in $script_list#-->
|
||||
<!--#for $sc in $scripts#-->
|
||||
<option value="$sc" <!--#if $slot.script.lower() == $sc.lower() then "selected" else ""#-->>$Tspec($sc)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
</select>
|
||||
<!--#end if#-->
|
||||
|
||||
<!--#if $cat_list#-->
|
||||
<!--#if $categories#-->
|
||||
<select onfocus="pauseQueueDeferer(this.parentNode.parentNode.id)" onblur="javascript:lr('queue/','limit=$limit&start=$start', 1,-1, this.parentNode.parentNode.id);" onchange="javascript:changequeuedetails('queue/change_cat?nzo_id=$slot.nzo_id&cat='+this.options[this.selectedIndex].value, 'limit=$limit&start=$start');">
|
||||
<optgroup label="$T('category')">
|
||||
<!--#for $ct in $cat_list#-->
|
||||
<!--#for $ct in $categories#-->
|
||||
<option value="$ct" <!--#if $slot.cat.lower() == $ct.lower() then "selected" else ""#-->>$Tspec($ct)</option>
|
||||
<!--#end for#-->
|
||||
</optgroup>
|
||||
|
||||
@@ -40,9 +40,7 @@
|
||||
$T('srv-ssl')
|
||||
</label>
|
||||
<div class="col-sm-8 input-checkbox">
|
||||
<input type="checkbox" id="ssl" name="ssl" value="1" <!--#if $have_ssl then '' else 'disabled'#--><!--#if $ssl == 1 then 'checked' else ''#--> data-toggle="tooltip" data-placement="right" title="$T('wizard-server-ssl-explain')"/>
|
||||
<!--#if not $have_ssl then '<span class="label label-warning">OpenSSL '+$T('opt-notInstalled')+'</span>' else ''#-->
|
||||
<small></small>
|
||||
<input type="checkbox" id="ssl" name="ssl" value="1" <!--#if $ssl == 1 then 'checked' else ''#--> data-toggle="tooltip" data-placement="right" title="$T('wizard-server-ssl-explain')"/>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
@@ -66,13 +64,23 @@
|
||||
<input type="number" class="form-control" name="connections" id="connections" value="<!--#if $connections then $connections else '8'#-->" data-toggle="tooltip" data-placement="right" title="$T('wizard-server-con-explain') $T('wizard-server-con-eg')" />
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="ssl_verify" class="col-sm-4 control-label">$T('opt-ssl_verify')</label>
|
||||
<div class="col-sm-8">
|
||||
<select name="ssl_verify" id="ssl_verify" class="form-control" <!--#if int($have_ssl_context) == 0 then "disabled=\"disabled\"" else ""#-->>
|
||||
<option value="2" <!--#if $ssl_verify == 2 then 'selected="selected"' else ""#--> >$T('ssl_verify-strict')</option>
|
||||
<option value="1" <!--#if $ssl_verify == 1 then 'selected="selected"' else ""#--> >$T('ssl_verify-normal')</option>
|
||||
<option value="0" <!--#if $ssl_verify == 0 then 'selected="selected"' else ""#--> >$T('ssl_verify-disabled')</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col-sm-4">
|
||||
<button id="serverTest" class="btn btn-default"><span class="glyphicon glyphicon-sort"></span> $T('wizard-button-testServer')</button>
|
||||
</div>
|
||||
<div class="col-sm-8">
|
||||
<div id="serverQuote" class="btn btn-default disabled"><span id="serverResponse">$T('wizard-server-text')</span></div>
|
||||
<div id="serverResponse" class="well well-sm">$T('wizard-server-text')</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ $(document).ready(function() {
|
||||
} else {
|
||||
r = '<span class="failed"><span class="glyphicon glyphicon-minus-sign"></span> ' + result.value.message + '</span>';
|
||||
}
|
||||
|
||||
r = r.replace('https://sabnzbd.org/certificate-errors', '<a href="https://sabnzbd.org/certificate-errors" class="failed" target="_blank">https://sabnzbd.org/certificate-errors</a>')
|
||||
$('#serverResponse').html(r);
|
||||
}
|
||||
);
|
||||
|
||||
@@ -62,7 +62,7 @@ a[target="_blank"] {
|
||||
color: #00cc22;
|
||||
}
|
||||
.failed {
|
||||
color: red;
|
||||
color: red !important;
|
||||
}
|
||||
#rightGreyText {
|
||||
color: #ccc;
|
||||
@@ -164,16 +164,12 @@ label {
|
||||
text-decoration: line-through;
|
||||
color: #ccc;
|
||||
}
|
||||
#serverQuote {
|
||||
opacity: 0.8;
|
||||
box-shadow: none !important;
|
||||
white-space: normal;
|
||||
width: 100%;
|
||||
#serverResponse {
|
||||
padding: 6px 10px;
|
||||
}
|
||||
#host-tip {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.error-text {
|
||||
display: inline;
|
||||
color: red;
|
||||
@@ -192,7 +188,8 @@ label {
|
||||
#content a,
|
||||
#content a:hover,
|
||||
#content a:active,
|
||||
#content a:visited {
|
||||
#content a:visited,
|
||||
#serverResponse {
|
||||
color: #555;
|
||||
}
|
||||
.btn {
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
The module listquote.py is written by Michael Foord.
|
||||
|
||||
Home of the module:
|
||||
http://www.voidspace.org.uk/python/configobj-api/pythonutils.listquote-pysrc.html
|
||||
|
||||
It is covered by the following license.
|
||||
|
||||
-------------------------------------------------------------------------
|
||||
Copyright (c) 2003-2007, Michael Foord
|
||||
All rights reserved.
|
||||
E-mail : fuzzyman AT voidspace DOT org DOT uk
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* Neither the name of Michael Foord nor the name of Voidspace
|
||||
may be used to endorse or promote products derived from this
|
||||
software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
-------------------------------------------------------------------------
|
||||
1912
po/main/SABnzbd.pot
1912
po/main/SABnzbd.pot
File diff suppressed because it is too large
Load Diff
1960
po/main/da.po
1960
po/main/da.po
File diff suppressed because it is too large
Load Diff
1908
po/main/de.po
1908
po/main/de.po
File diff suppressed because it is too large
Load Diff
@@ -120,15 +120,6 @@ msgstr "Web interface"
|
||||
msgid "Script returned exit code %s and output \"%s\""
|
||||
msgstr "Notification script returned exit code %s and output \"%s\""
|
||||
|
||||
#: sabnzbd/skintext.py:521
|
||||
msgid ""
|
||||
"Enhanced functionality including ratings and extra status information is "
|
||||
"available when connected to OZnzb indexer."
|
||||
msgstr ""
|
||||
"Indexers can supply information when a job is added <strong>or</strong> "
|
||||
"using the settings below to provide ratings and extra status information. "
|
||||
"<br>The Server address and API key settings can be left blank, depending on your indexer. "
|
||||
|
||||
#: sabnzbd/skintext.py:333
|
||||
msgid "If empty, the standard port will only listen to HTTPS."
|
||||
msgstr "If empty, the SABnzbd Port set above will listen to HTTPS."
|
||||
|
||||
1899
po/main/es.po
1899
po/main/es.po
File diff suppressed because it is too large
Load Diff
1897
po/main/fi.po
1897
po/main/fi.po
File diff suppressed because it is too large
Load Diff
1908
po/main/fr.po
1908
po/main/fr.po
File diff suppressed because it is too large
Load Diff
1896
po/main/nb.po
1896
po/main/nb.po
File diff suppressed because it is too large
Load Diff
1915
po/main/nl.po
1915
po/main/nl.po
File diff suppressed because it is too large
Load Diff
1904
po/main/pl.po
1904
po/main/pl.po
File diff suppressed because it is too large
Load Diff
1901
po/main/pt_BR.po
1901
po/main/pt_BR.po
File diff suppressed because it is too large
Load Diff
1903
po/main/ro.po
1903
po/main/ro.po
File diff suppressed because it is too large
Load Diff
1872
po/main/ru.po
1872
po/main/ru.po
File diff suppressed because it is too large
Load Diff
1895
po/main/sr.po
1895
po/main/sr.po
File diff suppressed because it is too large
Load Diff
1898
po/main/sv.po
1898
po/main/sv.po
File diff suppressed because it is too large
Load Diff
1892
po/main/zh_CN.po
1892
po/main/zh_CN.po
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,11 @@
|
||||
#
|
||||
# SABnzbd Translation Template file NSIS
|
||||
# Copyright (C) 2011-2015 by the SABnzbd Team
|
||||
# Copyright 2011-2017 The SABnzbd-Team
|
||||
# team@sabnzbd.org
|
||||
#
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: SABnzbd-0.8.x\n"
|
||||
"Project-Id-Version: SABnzbd-develop\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: shypike@sabnzbd.org\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@@ -13,67 +13,71 @@ msgstr ""
|
||||
"Content-Type: text/plain; charset=ASCII\n"
|
||||
"Content-Transfer-Encoding: 7bit\n"
|
||||
|
||||
#: NSIS_Installer.nsi:416
|
||||
msgid "Go to the SABnzbd Wiki"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:418
|
||||
#: NSIS_Installer.nsi:473
|
||||
msgid "Show Release Notes"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:420
|
||||
#: NSIS_Installer.nsi:475
|
||||
msgid "Start SABnzbd"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:477
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:422
|
||||
#: NSIS_Installer.nsi:479
|
||||
msgid "Please close \"SABnzbd.exe\" first"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:424
|
||||
#: NSIS_Installer.nsi:481
|
||||
msgid "The installation directory has changed (now in \"Program Files\"). \\nIf you run SABnzbd as a service, you need to update the service settings."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:483
|
||||
msgid "This will uninstall SABnzbd from your system"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:426
|
||||
#: NSIS_Installer.nsi:485
|
||||
msgid "Run at startup"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:428
|
||||
#: NSIS_Installer.nsi:487
|
||||
msgid "Desktop Icon"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:430
|
||||
#: NSIS_Installer.nsi:489
|
||||
msgid "NZB File association"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:432
|
||||
#: NSIS_Installer.nsi:491
|
||||
msgid "Delete Program"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:434
|
||||
#: NSIS_Installer.nsi:493
|
||||
msgid "Delete Settings"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:436
|
||||
#: NSIS_Installer.nsi:495
|
||||
msgid "This system requires the Microsoft runtime library VC90 to be installed first. Do you want to do that now?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:438
|
||||
#: NSIS_Installer.nsi:497
|
||||
msgid "Downloading Microsoft runtime installer..."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:440
|
||||
#: NSIS_Installer.nsi:499
|
||||
msgid "Download error, retry?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:442
|
||||
#: NSIS_Installer.nsi:501
|
||||
msgid "Cannot install without runtime library, retry?"
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:444
|
||||
#: NSIS_Installer.nsi:503
|
||||
msgid "You cannot overwrite an existing installation. \\n\\nClick `OK` to remove the previous version or `Cancel` to cancel this upgrade."
|
||||
msgstr ""
|
||||
|
||||
#: NSIS_Installer.nsi:446
|
||||
#: NSIS_Installer.nsi:505
|
||||
msgid "Your settings and data will be preserved."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -79,19 +79,13 @@ else:
|
||||
##############################################################################
|
||||
# SSL CHECKS
|
||||
##############################################################################
|
||||
import ssl
|
||||
HAVE_SSL_CONTEXT = None
|
||||
HAVE_SSL = None
|
||||
try:
|
||||
import ssl
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
# Test availability of SSLContext (python 2.7.9+)
|
||||
ssl.SSLContext
|
||||
HAVE_SSL_CONTEXT = True
|
||||
except:
|
||||
HAVE_SSL_CONTEXT = False
|
||||
# Test availability of SSLContext (python 2.7.9+)
|
||||
ssl.SSLContext
|
||||
HAVE_SSL_CONTEXT = True
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
HAVE_SSL_CONTEXT = False
|
||||
|
||||
try:
|
||||
@@ -134,7 +128,7 @@ START = datetime.datetime.now()
|
||||
MY_NAME = None
|
||||
MY_FULLNAME = None
|
||||
RESTART_ARGS = []
|
||||
NEW_VERSION = None
|
||||
NEW_VERSION = (None, None)
|
||||
DIR_HOME = None
|
||||
DIR_APPDATA = None
|
||||
DIR_LCLDATA = None
|
||||
@@ -160,11 +154,9 @@ BROWSER_URL = None
|
||||
CMDLINE = '' # Rendering of original command line arguments
|
||||
|
||||
WEB_DIR = None
|
||||
WEB_DIR2 = None
|
||||
WEB_DIRC = None
|
||||
WEB_DIR_CONFIG = None
|
||||
WIZARD_DIR = None
|
||||
WEB_COLOR = None
|
||||
WEB_COLOR2 = None
|
||||
SABSTOP = False
|
||||
RESTART_REQ = False
|
||||
PAUSED_ALL = False
|
||||
@@ -177,6 +169,11 @@ LAST_ERROR = None
|
||||
EXTERNAL_IPV6 = False
|
||||
LAST_HISTORY_UPDATE = time.time()
|
||||
|
||||
# Performance measure for dashboard
|
||||
PYSTONE_SCORE = 0
|
||||
DOWNLOAD_DIR_SPEED = 0
|
||||
COMPLETE_DIR_SPEED = 0
|
||||
|
||||
__INITIALIZED__ = False
|
||||
__SHUTTING_DOWN__ = False
|
||||
|
||||
@@ -220,7 +217,6 @@ def connect_db(thread_index=0):
|
||||
return cherrypy.thread_data.history_db
|
||||
|
||||
|
||||
|
||||
@synchronized(INIT_LOCK)
|
||||
def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0):
|
||||
global __INITIALIZED__, __SHUTTING_DOWN__,\
|
||||
@@ -266,9 +262,7 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
|
||||
cfg.cherryhost.callback(guard_restart)
|
||||
cfg.cherryport.callback(guard_restart)
|
||||
cfg.web_dir.callback(guard_restart)
|
||||
cfg.web_dir2.callback(guard_restart)
|
||||
cfg.web_color.callback(guard_restart)
|
||||
cfg.web_color2.callback(guard_restart)
|
||||
cfg.username.callback(guard_restart)
|
||||
cfg.password.callback(guard_restart)
|
||||
cfg.log_dir.callback(guard_restart)
|
||||
@@ -317,8 +311,12 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
|
||||
else:
|
||||
newsched.append(sched)
|
||||
cfg.schedules.set(newsched)
|
||||
cfg.sched_converted.set(True)
|
||||
cfg.sched_converted.set(1)
|
||||
|
||||
# Second time schedule conversion
|
||||
if cfg.sched_converted() != 2:
|
||||
cfg.schedules.set(['%s %s' % (1, schedule) for schedule in cfg.schedules()])
|
||||
cfg.sched_converted.set(2)
|
||||
|
||||
if check_repair_request():
|
||||
repair = 2
|
||||
@@ -442,7 +440,6 @@ def halt():
|
||||
except:
|
||||
logging.error(T('Fatal error at saving state'), exc_info=True)
|
||||
|
||||
|
||||
# The Scheduler cannot be stopped when the stop was scheduled.
|
||||
# Since all warm-restarts have been removed, it's not longer
|
||||
# needed to stop the scheduler.
|
||||
@@ -454,8 +451,13 @@ def halt():
|
||||
__INITIALIZED__ = False
|
||||
|
||||
|
||||
def trigger_restart():
|
||||
def trigger_restart(timeout=None):
|
||||
""" Trigger a restart by setting a flag an shutting down CP """
|
||||
# Sometimes we need to wait a bit to send good-bye to the browser
|
||||
if timeout:
|
||||
time.sleep(timeout)
|
||||
|
||||
# Add extra arguments
|
||||
if sabnzbd.downloader.Downloader.do.paused:
|
||||
sabnzbd.RESTART_ARGS.append('-p')
|
||||
sys.argv = sabnzbd.RESTART_ARGS
|
||||
@@ -518,6 +520,7 @@ def guard_fsys_type():
|
||||
""" Callback for change of file system naming type """
|
||||
sabnzbd.encoding.change_fsys(cfg.fsys_type())
|
||||
|
||||
|
||||
def set_https_verification(value):
|
||||
prev = False
|
||||
try:
|
||||
@@ -896,25 +899,25 @@ def save_data(data, _id, path, do_pickle=True, silent=False):
|
||||
logging.debug("Saving data for %s in %s", _id, path)
|
||||
path = os.path.join(path, _id)
|
||||
|
||||
try:
|
||||
_f = open(path, 'wb')
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
pickler = pickle.Pickler(_f, 2)
|
||||
# We try 3 times, to avoid any dict or access problems
|
||||
for t in xrange(3):
|
||||
try:
|
||||
with open(path, 'wb') as data_file:
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
cPickle.dump(data, data_file)
|
||||
else:
|
||||
pickle.dump(data, data_file)
|
||||
else:
|
||||
data_file.write(data)
|
||||
break
|
||||
except:
|
||||
if t == 2:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
else:
|
||||
pickler = cPickle.Pickler(_f, 2)
|
||||
pickler.dump(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
pickler.clear_memo()
|
||||
del pickler
|
||||
else:
|
||||
_f.write(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
except:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# Wait a tiny bit before trying again
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
@@ -930,15 +933,14 @@ def load_data(_id, path, remove=True, do_pickle=True, silent=False):
|
||||
logging.debug("Loading data for %s from %s", _id, path)
|
||||
|
||||
try:
|
||||
_f = open(path, 'rb')
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(_f)
|
||||
with open(path, 'rb') as data_file:
|
||||
if do_pickle:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(data_file)
|
||||
else:
|
||||
data = cPickle.load(data_file)
|
||||
else:
|
||||
data = cPickle.load(_f)
|
||||
else:
|
||||
data = _f.read()
|
||||
_f.close()
|
||||
data = data_file.read()
|
||||
|
||||
if remove:
|
||||
os.remove(path)
|
||||
@@ -963,31 +965,31 @@ def remove_data(_id, path):
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def save_admin(data, _id, do_pickle=True):
|
||||
def save_admin(data, _id):
|
||||
""" Save data in admin folder in specified format """
|
||||
path = os.path.join(cfg.admin_dir.get_path(), _id)
|
||||
logging.info("Saving data for %s in %s", _id, path)
|
||||
|
||||
try:
|
||||
_f = open(path, 'wb')
|
||||
if do_pickle:
|
||||
pickler = cPickle.Pickler(_f, 2)
|
||||
pickler.dump(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
pickler.clear_memo()
|
||||
del pickler
|
||||
else:
|
||||
_f.write(data)
|
||||
_f.flush()
|
||||
_f.close()
|
||||
except:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
# We try 3 times, to avoid any dict or access problems
|
||||
for t in xrange(3):
|
||||
try:
|
||||
with open(path, 'wb') as data_file:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.dump(data, data_file)
|
||||
else:
|
||||
data = cPickle.dump(data, data_file)
|
||||
break
|
||||
except:
|
||||
if t == 2:
|
||||
logging.error(T('Saving %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
else:
|
||||
# Wait a tiny bit before trying again
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def load_admin(_id, remove=False, do_pickle=True, silent=False):
|
||||
def load_admin(_id, remove=False, silent=False):
|
||||
""" Read data in admin folder in specified format """
|
||||
path = os.path.join(cfg.admin_dir.get_path(), _id)
|
||||
logging.info("Loading data for %s from %s", _id, path)
|
||||
@@ -997,13 +999,11 @@ def load_admin(_id, remove=False, do_pickle=True, silent=False):
|
||||
return None
|
||||
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
if do_pickle:
|
||||
data = cPickle.load(f)
|
||||
else:
|
||||
data = f.read()
|
||||
f.close()
|
||||
|
||||
with open(path, 'rb') as data_file:
|
||||
if cfg.use_pickle():
|
||||
data = pickle.load(data_file)
|
||||
else:
|
||||
data = cPickle.load(data_file)
|
||||
if remove:
|
||||
os.remove(path)
|
||||
except:
|
||||
@@ -1152,11 +1152,12 @@ def wait_for_download_folder():
|
||||
logging.debug('Waiting for "incomplete" folder')
|
||||
time.sleep(2.0)
|
||||
|
||||
|
||||
def check_old_queue():
|
||||
""" Check for old queue (when a new queue is not present) """
|
||||
old = False
|
||||
if not os.path.exists(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME)):
|
||||
for ver in (QUEUE_VERSION -1 , QUEUE_VERSION - 2, QUEUE_VERSION - 3):
|
||||
for ver in (QUEUE_VERSION - 1, QUEUE_VERSION - 2, QUEUE_VERSION - 3):
|
||||
data = load_admin(QUEUE_FILE_TMPL % str(ver))
|
||||
if data:
|
||||
break
|
||||
@@ -1164,8 +1165,7 @@ def check_old_queue():
|
||||
old = bool(data and isinstance(data, tuple) and len(data[1]))
|
||||
except (TypeError, IndexError):
|
||||
pass
|
||||
if old and sabnzbd.WIN32 and ver < 10 and sabnzbd.DIR_LCLDATA != sabnzbd.DIR_HOME \
|
||||
and misc.is_relative_path(cfg.download_dir()):
|
||||
if old and sabnzbd.WIN32 and ver < 10 and sabnzbd.DIR_LCLDATA != sabnzbd.DIR_HOME and misc.is_relative_path(cfg.download_dir()):
|
||||
# For Windows and when version < 10: adjust old default location
|
||||
cfg.download_dir.set('Documents/' + cfg.download_dir())
|
||||
return old
|
||||
|
||||
270
sabnzbd/api.py
270
sabnzbd/api.py
@@ -28,6 +28,7 @@ import json
|
||||
import cherrypy
|
||||
import locale
|
||||
import socket
|
||||
from threading import Thread
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, "")
|
||||
except:
|
||||
@@ -54,6 +55,7 @@ from sabnzbd.utils.json import JsonWriter
|
||||
|
||||
from sabnzbd.utils.rsslib import RSS, Item
|
||||
from sabnzbd.utils.pathbrowser import folders_at_path
|
||||
from sabnzbd.utils.getperformance import getcpu
|
||||
from sabnzbd.misc import loadavg, to_units, diskspace, get_ext, \
|
||||
get_filename, int_conv, globber, globber_full, time_format, remove_all, \
|
||||
starts_with_path, cat_convert, clip_path, create_https_certificates, calc_age
|
||||
@@ -96,6 +98,7 @@ else:
|
||||
# Flag for using the fast json encoder, unless it fails
|
||||
FAST_JSON = True
|
||||
|
||||
|
||||
def api_handler(kwargs):
|
||||
""" API Dispatcher """
|
||||
mode = kwargs.get('mode', '')
|
||||
@@ -104,7 +107,7 @@ def api_handler(kwargs):
|
||||
callback = kwargs.get('callback', '')
|
||||
|
||||
# Extend the timeout of API calls to 10minutes
|
||||
cherrypy.response.timeout = 60*10
|
||||
cherrypy.response.timeout = 600
|
||||
|
||||
if isinstance(mode, list):
|
||||
mode = mode[0]
|
||||
@@ -138,6 +141,7 @@ def _api_set_config(name, output, kwargs):
|
||||
res, data = config.get_dconfig(kwargs.get('section'), kwargs.get('keyword'))
|
||||
return report(output, keyword='config', data=data)
|
||||
|
||||
|
||||
def _api_set_config_default(name, output, kwargs):
|
||||
""" API: Reset requested config variables back to defaults. Currently only for misc-section """
|
||||
keywords = kwargs.get('keyword', [])
|
||||
@@ -167,7 +171,8 @@ def _api_qstatus(name, output, kwargs):
|
||||
keyword = ''
|
||||
else:
|
||||
keyword = 'queue'
|
||||
return report(output, keyword=keyword, data=qstatus_data())
|
||||
info, pnfo_list, bytespersec = build_queue()
|
||||
return report(output, keyword='', data=remove_callable(info))
|
||||
|
||||
|
||||
def _api_queue(name, output, kwargs):
|
||||
@@ -270,21 +275,12 @@ def _api_queue_sort(output, value, kwargs):
|
||||
|
||||
def _api_queue_default(output, value, kwargs):
|
||||
""" API: accepts output, sort, dir, start, limit """
|
||||
sort = kwargs.get('sort')
|
||||
direction = kwargs.get('dir', '')
|
||||
start = int_conv(kwargs.get('start'))
|
||||
limit = int_conv(kwargs.get('limit'))
|
||||
trans = kwargs.get('trans')
|
||||
search = kwargs.get('search')
|
||||
|
||||
if output in ('xml', 'json'):
|
||||
if sort and sort != 'index':
|
||||
reverse = direction.lower() == 'desc'
|
||||
sort_queue(sort, reverse)
|
||||
|
||||
info, pnfo_list, bytespersec = build_queue(start=start, limit=limit, output=output, trans=trans, search=search)
|
||||
info['categories'] = info.pop('cat_list')
|
||||
info['scripts'] = info.pop('script_list')
|
||||
info, pnfo_list, bytespersec = build_queue(start=start, limit=limit, output=output, search=search)
|
||||
return report(output, keyword='queue', data=remove_callable(info))
|
||||
elif output == 'rss':
|
||||
return rss_qstatus()
|
||||
@@ -384,6 +380,7 @@ def _api_retry(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_cancel_pp(name, output, kwargs):
|
||||
""" API: accepts name, output, value(=nzo_id) """
|
||||
nzo_id = kwargs.get('value')
|
||||
@@ -392,6 +389,7 @@ def _api_cancel_pp(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_ITEM)
|
||||
|
||||
|
||||
def _api_addlocalfile(name, output, kwargs):
|
||||
""" API: accepts name, output, pp, script, cat, priority, nzbname """
|
||||
if name and isinstance(name, list):
|
||||
@@ -484,7 +482,7 @@ def _api_change_opts(name, output, kwargs):
|
||||
|
||||
def _api_fullstatus(name, output, kwargs):
|
||||
""" API: full history status"""
|
||||
status = build_status(skip_dashboard=kwargs.get('skip_dashboard'), output=output)
|
||||
status = build_status(skip_dashboard=kwargs.get('skip_dashboard', 1), output=output)
|
||||
return report(output, keyword='status', data=remove_callable(status))
|
||||
|
||||
|
||||
@@ -532,9 +530,7 @@ def _api_history(name, output, kwargs):
|
||||
else:
|
||||
return report(output, _MSG_NO_VALUE)
|
||||
elif not name:
|
||||
history = build_header(prim=True)
|
||||
if 'noofslots_total' in history:
|
||||
del history['noofslots_total']
|
||||
history = {}
|
||||
grand, month, week, day = BPSMeter.do.get_sums()
|
||||
history['total_size'], history['month_size'], history['week_size'], history['day_size'] = \
|
||||
to_units(grand), to_units(month), to_units(week), to_units(day)
|
||||
@@ -544,6 +540,7 @@ def _api_history(name, output, kwargs):
|
||||
categories=categories,
|
||||
output=output)
|
||||
history['last_history_update'] = int(sabnzbd.LAST_HISTORY_UPDATE)
|
||||
history['version'] = sabnzbd.__version__
|
||||
return report(output, keyword='history', data=remove_callable(history))
|
||||
else:
|
||||
return report(output, _MSG_NOT_IMPLEMENTED)
|
||||
@@ -606,6 +603,7 @@ def _api_resume(name, output, kwargs):
|
||||
|
||||
def _api_shutdown(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
logging.info('Shutdown requested by API')
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
sabnzbd.SABSTOP = True
|
||||
@@ -659,12 +657,15 @@ def _api_auth(name, output, kwargs):
|
||||
|
||||
def _api_restart(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
sabnzbd.trigger_restart()
|
||||
logging.info('Restart requested by API')
|
||||
# Do the shutdown async to still send goodbye to browser
|
||||
Thread(target=sabnzbd.trigger_restart, kwargs={'timeout': 1}).start()
|
||||
return report(output)
|
||||
|
||||
|
||||
def _api_restart_repair(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
logging.info('Queue repair requested by API')
|
||||
sabnzbd.request_repair()
|
||||
sabnzbd.trigger_restart()
|
||||
return report(output)
|
||||
@@ -752,12 +753,14 @@ def _api_test_email(name, output, kwargs):
|
||||
res = None
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_windows(name, output, kwargs):
|
||||
""" API: send a test to Windows, return result """
|
||||
logging.info("Sending test notification")
|
||||
res = sabnzbd.notifier.send_windows('SABnzbd', T('Test Notification'), 'other')
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_notif(name, output, kwargs):
|
||||
""" API: send a test to Notification Center, return result """
|
||||
logging.info("Sending test notification")
|
||||
@@ -799,12 +802,14 @@ def _api_test_pushbullet(name, output, kwargs):
|
||||
res = sabnzbd.notifier.send_pushbullet('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs)
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_test_nscript(name, output, kwargs):
|
||||
""" API: execute a test notification script, return result """
|
||||
logging.info("Executing notification script")
|
||||
res = sabnzbd.notifier.send_nscript('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs)
|
||||
return report(output, error=res)
|
||||
|
||||
|
||||
def _api_undefined(name, output, kwargs):
|
||||
""" API: accepts output """
|
||||
return report(output, _MSG_NOT_IMPLEMENTED)
|
||||
@@ -846,14 +851,10 @@ def _api_config_get_speedlimit(output, kwargs):
|
||||
|
||||
|
||||
def _api_config_set_colorscheme(output, kwargs):
|
||||
""" API: accepts output, value(=color for primary), value2(=color for secondary) """
|
||||
""" API: accepts output"""
|
||||
value = kwargs.get('value')
|
||||
value2 = kwargs.get('value2')
|
||||
if value:
|
||||
cfg.web_color.set(value)
|
||||
if value2:
|
||||
cfg.web_color2.set(value2)
|
||||
if value or value2:
|
||||
return report(output)
|
||||
else:
|
||||
return report(output, _MSG_NO_VALUE)
|
||||
@@ -1188,9 +1189,9 @@ def handle_cat_api(output, kwargs):
|
||||
return name
|
||||
|
||||
|
||||
def build_status(web_dir=None, root=None, prim=True, skip_dashboard=False, output=None):
|
||||
def build_status(skip_dashboard=False, output=None):
|
||||
# build up header full of basic information
|
||||
info = build_header(prim, web_dir)
|
||||
info = build_header()
|
||||
|
||||
info['logfile'] = sabnzbd.LOGFILE
|
||||
info['weblogfile'] = sabnzbd.WEBLOGFILE
|
||||
@@ -1198,7 +1199,19 @@ def build_status(web_dir=None, root=None, prim=True, skip_dashboard=False, outpu
|
||||
info['folders'] = [xml_name(item) for item in sabnzbd.nzbqueue.scan_jobs(all=False, action=False)]
|
||||
info['configfn'] = xml_name(config.get_filename())
|
||||
|
||||
# Dashboard: Begin
|
||||
# Dashboard: Speed of System
|
||||
info['cpumodel'] = getcpu()
|
||||
info['pystone'] = sabnzbd.PYSTONE_SCORE
|
||||
|
||||
# Dashboard: Speed of Download directory:
|
||||
info['downloaddir'] = sabnzbd.cfg.download_dir.get_path()
|
||||
info['downloaddirspeed'] = sabnzbd.DOWNLOAD_DIR_SPEED
|
||||
|
||||
# Dashboard: Speed of Complete directory:
|
||||
info['completedir'] = sabnzbd.cfg.complete_dir.get_path()
|
||||
info['completedirspeed'] = sabnzbd.COMPLETE_DIR_SPEED
|
||||
|
||||
# Dashboard: Connection information
|
||||
if not int_conv(skip_dashboard):
|
||||
info['localipv4'] = localipv4()
|
||||
info['publicipv4'] = publicipv4()
|
||||
@@ -1210,33 +1223,6 @@ def build_status(web_dir=None, root=None, prim=True, skip_dashboard=False, outpu
|
||||
except:
|
||||
info['dnslookup'] = None
|
||||
|
||||
# Dashboard: Speed of System
|
||||
from sabnzbd.utils.getperformance import getpystone, getcpu
|
||||
info['pystone'] = getpystone()
|
||||
info['cpumodel'] = getcpu()
|
||||
# Dashboard: Speed of Download directory:
|
||||
info['downloaddir'] = sabnzbd.cfg.download_dir.get_path()
|
||||
try:
|
||||
sabnzbd.downloaddirspeed # The persistent var
|
||||
except:
|
||||
# does not yet exist, so create it:
|
||||
sabnzbd.downloaddirspeed = 0 # 0 means ... not yet determined
|
||||
info['downloaddirspeed'] = sabnzbd.downloaddirspeed
|
||||
# Dashboard: Speed of Complete directory:
|
||||
info['completedir'] = sabnzbd.cfg.complete_dir.get_path()
|
||||
try:
|
||||
sabnzbd.completedirspeed # The persistent var
|
||||
except:
|
||||
# does not yet exist, so create it:
|
||||
sabnzbd.completedirspeed = 0 # 0 means ... not yet determined
|
||||
info['completedirspeed'] = sabnzbd.completedirspeed
|
||||
|
||||
try:
|
||||
sabnzbd.dashrefreshcounter # The persistent var @UndefinedVariable
|
||||
except:
|
||||
sabnzbd.dashrefreshcounter = 0
|
||||
info['dashrefreshcounter'] = sabnzbd.dashrefreshcounter
|
||||
|
||||
info['servers'] = []
|
||||
servers = sorted(Downloader.do.servers[:], key=lambda svr: '%02d%s' % (svr.priority, svr.displayname.lower()))
|
||||
for server in servers:
|
||||
@@ -1310,14 +1296,15 @@ def build_status(web_dir=None, root=None, prim=True, skip_dashboard=False, outpu
|
||||
|
||||
return info
|
||||
|
||||
def build_queue(web_dir=None, root=None, prim=True, webdir='', start=0, limit=0, trans=False, output=None, search=None):
|
||||
|
||||
def build_queue(start=0, limit=0, trans=False, output=None, search=None):
|
||||
if output:
|
||||
converter = unicoder
|
||||
else:
|
||||
converter = xml_name
|
||||
|
||||
# build up header full of basic information
|
||||
info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header(prim, webdir, search=search, start=start, limit=limit)
|
||||
info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header(search=search, start=start, limit=limit, output=output)
|
||||
|
||||
datestart = datetime.datetime.now()
|
||||
priorities = {TOP_PRIORITY: 'Force', REPAIR_PRIORITY: 'Repair', HIGH_PRIORITY: 'High', NORMAL_PRIORITY: 'Normal', LOW_PRIORITY: 'Low'}
|
||||
@@ -1325,8 +1312,8 @@ def build_queue(web_dir=None, root=None, prim=True, webdir='', start=0, limit=0,
|
||||
start = int_conv(start)
|
||||
|
||||
info['refresh_rate'] = str(cfg.refresh_rate()) if cfg.refresh_rate() > 0 else ''
|
||||
info['script_list'] = list_scripts()
|
||||
info['cat_list'] = list_cats(output is None)
|
||||
info['scripts'] = list_scripts()
|
||||
info['categories'] = list_cats(output is None)
|
||||
info['rating_enable'] = bool(cfg.rating_enable())
|
||||
info['noofslots'] = q_size
|
||||
info['start'] = start
|
||||
@@ -1381,7 +1368,8 @@ def build_queue(web_dir=None, root=None, prim=True, webdir='', start=0, limit=0,
|
||||
slot['status'] = Status.DOWNLOADING
|
||||
else:
|
||||
# ensure compatibility of API status
|
||||
if status in (Status.DELETED, ): status = Status.DOWNLOADING
|
||||
if status in (Status.DELETED, ):
|
||||
status = Status.DOWNLOADING
|
||||
slot['status'] = "%s" % (status)
|
||||
|
||||
if (Downloader.do.paused or Downloader.do.postproc or is_propagating or \
|
||||
@@ -1430,60 +1418,6 @@ def fast_queue():
|
||||
return paused, bytes_left, bpsnow, time_left
|
||||
|
||||
|
||||
def qstatus_data():
|
||||
""" Build up the queue status as a nested object and output as a JSON object """
|
||||
|
||||
qnfo = NzbQueue.do.queue_info()
|
||||
pnfo_list = qnfo.list
|
||||
|
||||
jobs = []
|
||||
bytesleftprogess = 0
|
||||
bpsnow = BPSMeter.do.get_bps()
|
||||
for pnfo in pnfo_list:
|
||||
filename = pnfo.filename
|
||||
bytesleft = pnfo.bytes_left / MEBI
|
||||
bytesleftprogess += pnfo.bytes_left
|
||||
bytes = pnfo.bytes / MEBI
|
||||
nzo_id = pnfo.nzo_id
|
||||
jobs.append({"id": nzo_id,
|
||||
"mb": bytes,
|
||||
"mbleft": bytesleft,
|
||||
"filename": unicoder(filename),
|
||||
"timeleft": calc_timeleft(bytesleftprogess, bpsnow)})
|
||||
|
||||
state = "IDLE"
|
||||
if Downloader.do.paused:
|
||||
state = Status.PAUSED
|
||||
elif qnfo.bytes_left / MEBI > 0:
|
||||
state = Status.DOWNLOADING
|
||||
|
||||
speed_limit = Downloader.do.get_limit()
|
||||
if speed_limit <= 0:
|
||||
speed_limit = 100
|
||||
|
||||
status = {
|
||||
"state": state,
|
||||
"pp_active": not PostProcessor.do.empty(),
|
||||
"paused": Downloader.do.paused,
|
||||
"pause_int": scheduler.pause_int(),
|
||||
"kbpersec": bpsnow / KIBI,
|
||||
"speed": to_units(bpsnow, dec_limit=1),
|
||||
"mbleft": qnfo.bytes_left / MEBI,
|
||||
"mb": qnfo.bytes / MEBI,
|
||||
"noofslots": len(pnfo_list),
|
||||
"noofslots_total": qnfo.q_fullsize,
|
||||
"have_warnings": str(sabnzbd.GUIHANDLER.count()),
|
||||
"diskspace1": diskspace(cfg.download_dir.get_path())[1],
|
||||
"diskspace2": diskspace(cfg.complete_dir.get_path())[1],
|
||||
"timeleft": calc_timeleft(qnfo.bytes_left, bpsnow),
|
||||
"loadavg": loadavg(),
|
||||
"speedlimit": "{1:0.{0}f}".format(int(speed_limit % 1 > 0), speed_limit),
|
||||
"speedlimit_abs": str(Downloader.do.get_limit_abs() or ''),
|
||||
"jobs": jobs
|
||||
}
|
||||
return status
|
||||
|
||||
|
||||
def build_file_list(nzo_id):
|
||||
""" Build file lists for specified job
|
||||
"""
|
||||
@@ -1496,27 +1430,23 @@ def build_file_list(nzo_id):
|
||||
active_files = pnfo.active_files
|
||||
queued_files = pnfo.queued_files
|
||||
|
||||
n = 0
|
||||
for nzf in finished_files:
|
||||
jobs.append({'filename': xml_name(nzf.filename if nzf.filename else nzf.subject),
|
||||
'mbleft': "%.2f" % (nzf.bytes_left / MEBI),
|
||||
'mb': "%.2f" % (nzf.bytes / MEBI),
|
||||
'bytes': "%.2f" % nzf.bytes,
|
||||
'age': calc_age(nzf.date),
|
||||
'id': str(n),
|
||||
'nzf_id': nzf.nzf_id,
|
||||
'status': 'finished'})
|
||||
n += 1
|
||||
|
||||
for nzf in active_files:
|
||||
jobs.append({'filename': xml_name(nzf.filename if nzf.filename else nzf.subject),
|
||||
'mbleft': "%.2f" % (nzf.bytes_left / MEBI),
|
||||
'mb': "%.2f" % (nzf.bytes / MEBI),
|
||||
'bytes': "%.2f" % nzf.bytes,
|
||||
'nzf_id': nzf.nzf_id,
|
||||
'age': calc_age(nzf.date),
|
||||
'id': str(n),
|
||||
'nzf_id': nzf.nzf_id,
|
||||
'status': 'active'})
|
||||
n += 1
|
||||
|
||||
for nzf in queued_files:
|
||||
jobs.append({'filename': xml_name(nzf.filename if nzf.filename else nzf.subject),
|
||||
@@ -1525,12 +1455,12 @@ def build_file_list(nzo_id):
|
||||
'mb': "%.2f" % (nzf.bytes / MEBI),
|
||||
'bytes': "%.2f" % nzf.bytes,
|
||||
'age': calc_age(nzf.date),
|
||||
'id': str(n),
|
||||
'nzf_id': nzf.nzf_id,
|
||||
'status': 'queued'})
|
||||
n += 1
|
||||
|
||||
return jobs
|
||||
|
||||
|
||||
def rss_qstatus():
|
||||
""" Return a RSS feed with the queue status """
|
||||
qnfo = NzbQueue.do.queue_info()
|
||||
@@ -1600,8 +1530,7 @@ def options_list(output):
|
||||
'zip': sabnzbd.newsunpack.ZIP_COMMAND,
|
||||
'7zip': sabnzbd.newsunpack.SEVEN_COMMAND,
|
||||
'nice': sabnzbd.newsunpack.NICE_COMMAND,
|
||||
'ionice': sabnzbd.newsunpack.IONICE_COMMAND,
|
||||
'ssl': sabnzbd.HAVE_SSL
|
||||
'ionice': sabnzbd.newsunpack.IONICE_COMMAND
|
||||
})
|
||||
|
||||
|
||||
@@ -1611,7 +1540,8 @@ def retry_job(job, new_nzb, password):
|
||||
history_db = sabnzbd.connect_db()
|
||||
futuretype, url, pp, script, cat = history_db.get_other(job)
|
||||
if futuretype:
|
||||
if pp == 'X': pp = None
|
||||
if pp == 'X':
|
||||
pp = None
|
||||
sabnzbd.add_url(url, pp, script, cat)
|
||||
history_db.remove_history(job)
|
||||
else:
|
||||
@@ -1684,24 +1614,13 @@ def clear_trans_cache():
|
||||
sabnzbd.WEBUI_READY = True
|
||||
|
||||
|
||||
def build_header(prim, webdir=''):
|
||||
def build_header(webdir='', output=None):
|
||||
""" Build the basic header """
|
||||
try:
|
||||
uptime = calc_age(sabnzbd.START)
|
||||
except:
|
||||
uptime = "-"
|
||||
|
||||
if prim:
|
||||
color = sabnzbd.WEB_COLOR
|
||||
else:
|
||||
color = sabnzbd.WEB_COLOR2
|
||||
if not color:
|
||||
color = ''
|
||||
|
||||
header = {'T': Ttemplate, 'Tspec': Tspec, 'Tx': Ttemplate, 'version': sabnzbd.__version__,
|
||||
'paused': Downloader.do.paused or Downloader.do.postproc,
|
||||
'pause_int': scheduler.pause_int(), 'paused_all': sabnzbd.PAUSED_ALL,
|
||||
'uptime': uptime, 'color_scheme': color}
|
||||
speed_limit = Downloader.do.get_limit()
|
||||
if speed_limit <= 0:
|
||||
speed_limit = 100
|
||||
@@ -1712,7 +1631,42 @@ def build_header(prim, webdir=''):
|
||||
disk_total1, disk_free1 = diskspace(cfg.download_dir.get_path())
|
||||
disk_total2, disk_free2 = diskspace(cfg.complete_dir.get_path())
|
||||
|
||||
header['helpuri'] = 'https://sabnzbd.org/wiki/'
|
||||
header = {}
|
||||
|
||||
# We don't output everything for API
|
||||
if not output:
|
||||
header['T'] = Ttemplate
|
||||
header['Tspec'] = Tspec
|
||||
header['Tx'] = Ttemplate
|
||||
header['uptime'] = uptime
|
||||
header['color_scheme'] = sabnzbd.WEB_COLOR or ''
|
||||
header['helpuri'] = 'https://sabnzbd.org/wiki/'
|
||||
|
||||
header['restart_req'] = sabnzbd.RESTART_REQ
|
||||
header['pid'] = os.getpid()
|
||||
|
||||
header['last_warning'] = sabnzbd.GUIHANDLER.last().replace('WARNING', ('WARNING:')).replace('ERROR', T('ERROR:'))
|
||||
header['active_lang'] = cfg.language()
|
||||
|
||||
header['my_lcldata'] = sabnzbd.DIR_LCLDATA
|
||||
header['my_home'] = sabnzbd.DIR_HOME
|
||||
header['webdir'] = webdir or sabnzbd.WEB_DIR
|
||||
|
||||
header['nt'] = sabnzbd.WIN32
|
||||
header['darwin'] = sabnzbd.DARWIN
|
||||
|
||||
header['power_options'] = sabnzbd.WIN32 or sabnzbd.DARWIN or sabnzbd.LINUX_POWER
|
||||
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
|
||||
|
||||
header['session'] = cfg.api_key()
|
||||
header['new_release'], header['new_rel_url'] = sabnzbd.NEW_VERSION
|
||||
|
||||
|
||||
header['version'] = sabnzbd.__version__
|
||||
header['paused'] = Downloader.do.paused or Downloader.do.postproc
|
||||
header['pause_int'] = scheduler.pause_int()
|
||||
header['paused_all'] = sabnzbd.PAUSED_ALL
|
||||
|
||||
header['diskspace1'] = "%.2f" % disk_free1
|
||||
header['diskspace2'] = "%.2f" % disk_free2
|
||||
header['diskspace1_norm'] = to_units(disk_free1 * GIGI)
|
||||
@@ -1720,25 +1674,11 @@ def build_header(prim, webdir=''):
|
||||
header['diskspacetotal1'] = "%.2f" % disk_total1
|
||||
header['diskspacetotal2'] = "%.2f" % disk_total2
|
||||
header['loadavg'] = loadavg()
|
||||
# Special formatting so only decimal points when needed
|
||||
header['speedlimit'] = "{1:0.{0}f}".format(int(speed_limit % 1 > 0), speed_limit)
|
||||
header['speedlimit_abs'] = "%s" % speed_limit_abs
|
||||
header['restart_req'] = sabnzbd.RESTART_REQ
|
||||
|
||||
header['have_warnings'] = str(sabnzbd.GUIHANDLER.count())
|
||||
header['last_warning'] = sabnzbd.GUIHANDLER.last().replace('WARNING', ('WARNING:')).replace('ERROR', T('ERROR:'))
|
||||
header['active_lang'] = cfg.language()
|
||||
header['my_lcldata'] = sabnzbd.DIR_LCLDATA
|
||||
header['my_home'] = sabnzbd.DIR_HOME
|
||||
|
||||
header['webdir'] = webdir
|
||||
header['pid'] = os.getpid()
|
||||
|
||||
header['finishaction'] = sabnzbd.QUEUECOMPLETE
|
||||
header['nt'] = sabnzbd.WIN32
|
||||
header['darwin'] = sabnzbd.DARWIN
|
||||
header['power_options'] = sabnzbd.WIN32 or sabnzbd.DARWIN or sabnzbd.LINUX_POWER
|
||||
|
||||
header['session'] = cfg.api_key()
|
||||
|
||||
header['quota'] = to_units(BPSMeter.do.quota)
|
||||
header['have_quota'] = bool(BPSMeter.do.quota > 0.0)
|
||||
@@ -1749,22 +1689,13 @@ def build_header(prim, webdir=''):
|
||||
header['cache_size'] = format_bytes(anfo.cache_size)
|
||||
header['cache_max'] = str(anfo.cache_limit)
|
||||
|
||||
header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event()
|
||||
|
||||
if sabnzbd.NEW_VERSION:
|
||||
header['new_release'], header['new_rel_url'] = sabnzbd.NEW_VERSION
|
||||
else:
|
||||
header['new_release'] = ''
|
||||
header['new_rel_url'] = ''
|
||||
|
||||
return header
|
||||
|
||||
|
||||
|
||||
def build_queue_header(prim, webdir='', search=None, start=0, limit=0):
|
||||
def build_queue_header(search=None, start=0, limit=0, output=None):
|
||||
""" Build full queue header """
|
||||
|
||||
header = build_header(prim, webdir)
|
||||
header = build_header(output=output)
|
||||
|
||||
bytespersec = BPSMeter.do.get_bps()
|
||||
qnfo = NzbQueue.do.queue_info(search=search, start=start, limit=limit)
|
||||
@@ -1958,21 +1889,6 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear
|
||||
return (items, fetched_items, total_items)
|
||||
|
||||
|
||||
def format_history_for_queue():
|
||||
""" Retrieves the information on currently active history items, and formats them for displaying in the queue """
|
||||
slotinfo = []
|
||||
history_items = get_active_history()
|
||||
|
||||
for item in history_items:
|
||||
slot = {'nzo_id': item['nzo_id'],
|
||||
'bookmark': '', 'filename': xml_name(item['name']), 'loaded': False,
|
||||
'stages': item['stage_log'], 'status': item['status'], 'bytes': item['bytes'],
|
||||
'size': item['size']}
|
||||
slotinfo.append(slot)
|
||||
|
||||
return slotinfo
|
||||
|
||||
|
||||
def get_active_history(queue=None, items=None):
|
||||
""" Get the currently in progress and active history queue. """
|
||||
if items is None:
|
||||
@@ -1987,7 +1903,7 @@ def get_active_history(queue=None, items=None):
|
||||
item['url'], item['status'], item['nzo_id'], item['storage'], item['path'], item['script_log'], \
|
||||
item['script_line'], item['download_time'], item['postproc_time'], item['stage_log'], \
|
||||
item['downloaded'], item['completeness'], item['fail_message'], item['url_info'], item['bytes'], \
|
||||
dummy, dummy = history
|
||||
dummy, dummy, item['password'] = history
|
||||
item['action_line'] = nzo.action_line
|
||||
item = unpack_history_info(item)
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ import threading
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.constants import GIGI, ANFO, Status
|
||||
from sabnzbd.constants import GIGI, ANFO
|
||||
|
||||
|
||||
ARTICLE_LOCK = threading.Lock()
|
||||
@@ -59,7 +59,7 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def reserve_space(self, data):
|
||||
""" Is there space left in the set limit? """
|
||||
data_size = sys.getsizeof(data)*64
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
self.__cache_size += data_size
|
||||
if self.__cache_size + data_size > self.__cache_limit:
|
||||
return False
|
||||
@@ -69,11 +69,10 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def free_reserve_space(self, data):
|
||||
""" Remove previously reserved space """
|
||||
data_size = sys.getsizeof(data)*64
|
||||
data_size = sys.getsizeof(data) * 64
|
||||
self.__cache_size -= data_size
|
||||
return self.__cache_size + data_size < self.__cache_limit
|
||||
|
||||
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def save_article(self, article, data):
|
||||
nzf = article.nzf
|
||||
@@ -148,7 +147,7 @@ class ArticleCache(object):
|
||||
@synchronized(ARTICLE_LOCK)
|
||||
def purge_articles(self, articles):
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("Purgable articles -> %s", articles)
|
||||
logging.debug("Purgeable articles -> %s", articles)
|
||||
for article in articles:
|
||||
if article in self.__article_list:
|
||||
self.__article_list.remove(article)
|
||||
|
||||
@@ -26,12 +26,7 @@ import struct
|
||||
import re
|
||||
from threading import Thread
|
||||
from time import sleep
|
||||
try:
|
||||
import hashlib
|
||||
new_md5 = hashlib.md5
|
||||
except:
|
||||
import md5
|
||||
new_md5 = md5.new
|
||||
import hashlib
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.misc import get_filepath, sanitize_filename, get_unique_filename, renamer, \
|
||||
@@ -65,7 +60,6 @@ class Assembler(Thread):
|
||||
self.queue.put(job)
|
||||
|
||||
def run(self):
|
||||
import sabnzbd.nzbqueue
|
||||
while 1:
|
||||
job = self.queue.get()
|
||||
if not job:
|
||||
@@ -160,7 +154,7 @@ def _assemble(nzf, path, dupe):
|
||||
fout = open(path, 'ab')
|
||||
|
||||
if cfg.quick_check():
|
||||
md5 = new_md5()
|
||||
md5 = hashlib.md5()
|
||||
else:
|
||||
md5 = None
|
||||
|
||||
@@ -172,7 +166,7 @@ def _assemble(nzf, path, dupe):
|
||||
break
|
||||
|
||||
# Sleep to allow decoder/assembler switching
|
||||
sleep(0.001)
|
||||
sleep(0.0001)
|
||||
article = decodetable[articlenum]
|
||||
|
||||
data = ArticleCache.do.load_article(article)
|
||||
@@ -265,7 +259,7 @@ def ParseFilePacket(f, header):
|
||||
|
||||
# Read and check the data
|
||||
data = f.read(len - 32)
|
||||
md5 = new_md5()
|
||||
md5 = hashlib.md5()
|
||||
md5.update(data)
|
||||
if md5sum != md5.digest():
|
||||
return nothing
|
||||
@@ -289,7 +283,7 @@ def ParseFilePacket(f, header):
|
||||
|
||||
|
||||
RE_SUBS = re.compile(r'\W+sub|subs|subpack|subtitle|subtitles(?![a-z])', re.I)
|
||||
def is_cloaked(path, names):
|
||||
def is_cloaked(nzo, path, names):
|
||||
""" Return True if this is likely to be a cloaked encrypted post """
|
||||
fname = unicoder(os.path.split(path)[1]).lower()
|
||||
fname = os.path.splitext(fname)[0]
|
||||
@@ -297,10 +291,16 @@ def is_cloaked(path, names):
|
||||
name = os.path.split(name.lower())[1]
|
||||
name, ext = os.path.splitext(unicoder(name))
|
||||
if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and len(names) < 3 and not RE_SUBS.search(fname):
|
||||
logging.debug('File %s is probably encrypted due to RAR with same name inside this RAR', fname)
|
||||
# Only warn once
|
||||
if nzo.encrypted == 0:
|
||||
logging.warning(T('Job "%s" is probably encrypted due to RAR with same name inside this RAR'), nzo.final_name)
|
||||
nzo.encrypted = 1
|
||||
return True
|
||||
elif 'password' in name:
|
||||
logging.debug('RAR %s is probably encrypted: "password" in filename %s', fname, name)
|
||||
# Only warn once
|
||||
if nzo.encrypted == 0:
|
||||
logging.warning(T('Job "%s" is probably encrypted: "password" in filename "%s"'), nzo.final_name, name)
|
||||
nzo.encrypted = 1
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -319,16 +319,17 @@ def check_encrypted_and_unwanted_files(nzo, filepath):
|
||||
|
||||
# Is it even a rarfile?
|
||||
if rarfile.is_rarfile(filepath):
|
||||
# Open the rar
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
zf = rarfile.RarFile(filepath, all_names=True)
|
||||
|
||||
# Check for encryption
|
||||
if nzo.encrypted == 0 and cfg.pause_on_pwrar() and (zf.needs_password() or is_cloaked(filepath, zf.namelist())):
|
||||
if nzo.encrypted == 0 and cfg.pause_on_pwrar() and (zf.needs_password() or is_cloaked(nzo, filepath, zf.namelist())):
|
||||
# Load all passwords
|
||||
passwords = get_all_passwords(nzo)
|
||||
|
||||
# Cloaked job?
|
||||
if is_cloaked(filepath, zf.namelist()):
|
||||
nzo.encrypted = 1
|
||||
if is_cloaked(nzo, filepath, zf.namelist()):
|
||||
encrypted = True
|
||||
elif not sabnzbd.HAVE_CRYPTOGRAPHY and not passwords:
|
||||
# if no cryptography installed, only error when no password was set
|
||||
@@ -339,7 +340,6 @@ def check_encrypted_and_unwanted_files(nzo, filepath):
|
||||
elif sabnzbd.HAVE_CRYPTOGRAPHY:
|
||||
# Lets test if any of the password work
|
||||
password_hit = False
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
|
||||
for password in passwords:
|
||||
if password:
|
||||
|
||||
@@ -22,7 +22,6 @@ sabnzbd.bpsmeter - bpsmeter
|
||||
import time
|
||||
import logging
|
||||
import re
|
||||
from math import floor
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import BYTES_FILE_NAME, BYTES_FILE_NAME_OLD, KIBI
|
||||
@@ -333,15 +332,15 @@ class BPSMeter(object):
|
||||
return None
|
||||
|
||||
# Calculate the variance in the speed
|
||||
avg = sum(self.bps_list[-timespan:])/timespan
|
||||
avg = sum(self.bps_list[-timespan:]) / timespan
|
||||
vari = 0
|
||||
for bps in self.bps_list[-timespan:]:
|
||||
vari += abs(bps - avg)
|
||||
vari = vari/timespan
|
||||
vari = vari / timespan
|
||||
|
||||
try:
|
||||
# See if the variance is less than 5%
|
||||
if (vari / (self.bps/KIBI)) < 0.05:
|
||||
if (vari / (self.bps / KIBI)) < 0.05:
|
||||
return avg
|
||||
else:
|
||||
return False
|
||||
@@ -350,7 +349,6 @@ class BPSMeter(object):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def reset_quota(self, force=False):
|
||||
""" Check if it's time to reset the quota, optionally resuming
|
||||
Return True, when still paused
|
||||
|
||||
@@ -64,6 +64,7 @@ else:
|
||||
##############################################################################
|
||||
quick_check = OptionBool('misc', 'quick_check', True)
|
||||
sfv_check = OptionBool('misc', 'sfv_check', True)
|
||||
quick_check_ext_ignore = OptionList('misc', 'quick_check_ext_ignore', ['nfo', 'sfv', 'srr'])
|
||||
|
||||
email_server = OptionStr('misc', 'email_server', validation=validate_server)
|
||||
email_to = OptionList('misc', 'email_to', validation=validate_email)
|
||||
@@ -112,7 +113,6 @@ req_completion_rate = OptionNumber('misc', 'req_completion_rate', 100.2, 100, 20
|
||||
rating_enable = OptionBool('misc', 'rating_enable', False)
|
||||
rating_host = OptionStr('misc', 'rating_host', 'api.oznzb.com')
|
||||
rating_api_key = OptionStr('misc', 'rating_api_key')
|
||||
rating_feedback = OptionBool('misc', 'rating_feedback', True)
|
||||
rating_filter_enable = OptionBool('misc', 'rating_filter_enable', False)
|
||||
rating_filter_abort_audio = OptionNumber('misc', 'rating_filter_abort_audio', 0)
|
||||
rating_filter_abort_video = OptionNumber('misc', 'rating_filter_abort_video', 0)
|
||||
@@ -212,9 +212,7 @@ refresh_rate = OptionNumber('misc', 'refresh_rate', 0)
|
||||
rss_rate = OptionNumber('misc', 'rss_rate', 60, 15, 24 * 60)
|
||||
cache_limit = OptionStr('misc', 'cache_limit')
|
||||
web_dir = OptionStr('misc', 'web_dir', DEF_STDINTF)
|
||||
web_dir2 = OptionStr('misc', 'web_dir2')
|
||||
web_color = OptionStr('misc', 'web_color', '')
|
||||
web_color2 = OptionStr('misc', 'web_color2')
|
||||
cleanup_list = OptionList('misc', 'cleanup_list')
|
||||
warned_old_queue = OptionBool('misc', 'warned_old_queue9', False)
|
||||
notified_new_skin = OptionNumber('misc', 'notified_new_skin', 0)
|
||||
@@ -393,6 +391,9 @@ allow_duplicate_files = OptionBool('misc', 'allow_duplicate_files', False)
|
||||
warn_dupl_jobs = OptionBool('misc', 'warn_dupl_jobs', True)
|
||||
new_nzb_on_failure = OptionBool('misc', 'new_nzb_on_failure', False)
|
||||
|
||||
# TEMP
|
||||
nr_decoders = OptionNumber('misc', 'nr_decoders', 2)
|
||||
|
||||
|
||||
##############################################################################
|
||||
# Set root folders for Folder config-items
|
||||
|
||||
@@ -26,7 +26,6 @@ import threading
|
||||
import shutil
|
||||
import sabnzbd.misc
|
||||
from sabnzbd.constants import CONFIG_VERSION, NORMAL_PRIORITY, DEFAULT_PRIORITY, MAX_WIN_DFOLDER
|
||||
from sabnzbd.utils import listquote
|
||||
from sabnzbd.utils import configobj
|
||||
from sabnzbd.decorators import synchronized
|
||||
|
||||
@@ -43,6 +42,8 @@ database = {} # Holds the option dictionary
|
||||
modified = False # Signals a change in option dictionary
|
||||
# Should be reset after saving to settings file
|
||||
|
||||
paramfinder = re.compile(r'''(?:'.*?')|(?:".*?")|(?:[^'",\s][^,]*)''')
|
||||
|
||||
|
||||
class Option(object):
|
||||
""" Basic option class, basic fields """
|
||||
@@ -260,7 +261,7 @@ class OptionList(Option):
|
||||
if '"' not in value and ',' not in value:
|
||||
value = value.split()
|
||||
else:
|
||||
value = listquote.simplelist(value)
|
||||
value = paramfinder.findall(value)
|
||||
if self.__validation:
|
||||
error, value = self.__validation(value)
|
||||
if not error:
|
||||
@@ -275,6 +276,14 @@ class OptionList(Option):
|
||||
else:
|
||||
return ', '.join(lst)
|
||||
|
||||
def default_string(self):
|
||||
""" Return the default list as a comma-separated string """
|
||||
lst = self.default()
|
||||
if isinstance(lst, basestring):
|
||||
return lst
|
||||
else:
|
||||
return ', '.join(lst)
|
||||
|
||||
|
||||
class OptionStr(Option):
|
||||
""" String class """
|
||||
@@ -377,7 +386,7 @@ class ConfigServer(object):
|
||||
self.password = OptionPassword(name, 'password', '', add=False)
|
||||
self.connections = OptionNumber(name, 'connections', 1, 0, 100, add=False)
|
||||
self.ssl = OptionBool(name, 'ssl', False, add=False)
|
||||
self.ssl_verify = OptionNumber(name, 'ssl_verify', 1, add=False) # 0=No, 1=Normal, 2=Strict (hostname verification)
|
||||
self.ssl_verify = OptionNumber(name, 'ssl_verify', 2, add=False) # 0=No, 1=Normal, 2=Strict (hostname verification)
|
||||
self.enable = OptionBool(name, 'enable', True, add=False)
|
||||
self.optional = OptionBool(name, 'optional', False, add=False)
|
||||
self.retention = OptionNumber(name, 'retention', add=False)
|
||||
@@ -542,7 +551,7 @@ class OptionFilters(Option):
|
||||
if isinstance(val, list):
|
||||
filters.append(val)
|
||||
else:
|
||||
filters.append(listquote.simplelist(val))
|
||||
filters.append(paramfinder.findall(val))
|
||||
while len(filters[-1]) < 7:
|
||||
filters[-1].append('1')
|
||||
if not filters[-1][6]:
|
||||
@@ -813,9 +822,11 @@ def save_config(force=False):
|
||||
except KeyError:
|
||||
CFG[sec] = {}
|
||||
value = database[section][option]()
|
||||
if type(value) == type(True):
|
||||
# bool is a subclass of int, check first
|
||||
if isinstance(value, bool):
|
||||
# convert bool to int when saving so we store 0 or 1
|
||||
CFG[sec][kw] = str(int(value))
|
||||
elif type(value) == type(0):
|
||||
elif isinstance(value, int):
|
||||
CFG[sec][kw] = str(value)
|
||||
else:
|
||||
CFG[sec][kw] = value
|
||||
|
||||
@@ -52,6 +52,8 @@ RENAMES_FILE = '__renames__'
|
||||
ATTRIB_FILE = 'SABnzbd_attrib'
|
||||
REPAIR_REQUEST = 'repair-all.sab'
|
||||
|
||||
SABYENC_VERSION_REQUIRED = '3.0.2'
|
||||
|
||||
DB_HISTORY_VERSION = 1
|
||||
DB_QUEUE_VERSION = 1
|
||||
|
||||
@@ -100,7 +102,7 @@ PAUSED_PRIORITY = -2
|
||||
DUP_PRIORITY = -3
|
||||
STOP_PRIORITY = -4
|
||||
|
||||
STAGES = { 'Source' : 0, 'Download' : 1, 'Servers' : 2, 'Repair' : 3, 'Filejoin' : 4, 'Unpack' : 5, 'Script' : 6 }
|
||||
STAGES = {'Source': 0, 'Download': 1, 'Servers': 2, 'Repair': 3, 'Filejoin': 4, 'Unpack': 5, 'Script': 6}
|
||||
|
||||
VALID_ARCHIVES = ('.zip', '.rar', '.7z')
|
||||
|
||||
@@ -132,7 +134,7 @@ class Status():
|
||||
COMPLETED = 'Completed' # PP: Job is finished
|
||||
CHECKING = 'Checking' # Q: Pre-check is running
|
||||
DOWNLOADING = 'Downloading' # Q: Normal downloading
|
||||
EXTRACTING = 'Extracting' # PP: Archives are being extraced
|
||||
EXTRACTING = 'Extracting' # PP: Archives are being extracted
|
||||
FAILED = 'Failed' # PP: Job has failed, now in History
|
||||
FETCHING = 'Fetching' # Q: Job is downloading extra par2 files
|
||||
GRABBING = 'Grabbing' # Q: Getting an NZB from an external site
|
||||
|
||||
@@ -40,6 +40,7 @@ from sabnzbd.constants import DB_HISTORY_NAME, STAGES
|
||||
from sabnzbd.encoding import unicoder
|
||||
from sabnzbd.bpsmeter import this_week, this_month
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.misc import get_all_passwords
|
||||
|
||||
DB_LOCK = threading.RLock()
|
||||
|
||||
@@ -111,6 +112,12 @@ class HistoryDB(object):
|
||||
_ = self.execute('PRAGMA user_version = 1;') and \
|
||||
self.execute('ALTER TABLE "history" ADD COLUMN series TEXT;') and \
|
||||
self.execute('ALTER TABLE "history" ADD COLUMN md5sum TEXT;')
|
||||
if version < 2:
|
||||
# Add any missing columns added since second DB version
|
||||
# Use "and" to stop when database has been reset due to corruption
|
||||
_ = self.execute('PRAGMA user_version = 2;') and \
|
||||
self.execute('ALTER TABLE "history" ADD COLUMN password TEXT;')
|
||||
|
||||
|
||||
def execute(self, command, args=(), save=False):
|
||||
''' Wrapper for executing SQL commands '''
|
||||
@@ -184,10 +191,11 @@ class HistoryDB(object):
|
||||
"bytes" INTEGER,
|
||||
"meta" TEXT,
|
||||
"series" TEXT,
|
||||
"md5sum" TEXT
|
||||
"md5sum" TEXT,
|
||||
"password" TEXT
|
||||
)
|
||||
""")
|
||||
self.execute('PRAGMA user_version = 1;')
|
||||
self.execute('PRAGMA user_version = 2;')
|
||||
|
||||
def save(self):
|
||||
""" Save database to disk """
|
||||
@@ -244,8 +252,8 @@ class HistoryDB(object):
|
||||
|
||||
if self.execute("""INSERT INTO history (completed, name, nzb_name, category, pp, script, report,
|
||||
url, status, nzo_id, storage, path, script_log, script_line, download_time, postproc_time, stage_log,
|
||||
downloaded, completeness, fail_message, url_info, bytes, series, md5sum)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", t):
|
||||
downloaded, completeness, fail_message, url_info, bytes, series, md5sum, password)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""", t):
|
||||
self.save()
|
||||
|
||||
def fetch_history(self, start=None, limit=None, search=None, failed_only=0, categories=None):
|
||||
@@ -469,9 +477,15 @@ def build_history_info(nzo, storage='', downpath='', postproc_time=0, script_out
|
||||
if seriesname and season and episode:
|
||||
series = u'%s/%s/%s' % (seriesname.lower(), season, episode)
|
||||
|
||||
# See whatever the first password was, for the Retry
|
||||
password = ''
|
||||
passwords = get_all_passwords(nzo)
|
||||
if passwords:
|
||||
password = passwords[0]
|
||||
|
||||
return (completed, name, nzb_name, category, pp, script, report, url, status, nzo_id, storage, path,
|
||||
script_log, script_line, download_time, postproc_time, stage_log, downloaded, completeness,
|
||||
fail_message, url_info, bytes, series, nzo.md5sum)
|
||||
fail_message, url_info, bytes, series, nzo.md5sum, password)
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -25,21 +25,31 @@ import logging
|
||||
import re
|
||||
from time import sleep
|
||||
from threading import Thread
|
||||
try:
|
||||
import _yenc
|
||||
HAVE_YENC = True
|
||||
|
||||
except ImportError:
|
||||
HAVE_YENC = False
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE
|
||||
from sabnzbd.articlecache import ArticleCache
|
||||
from sabnzbd.constants import Status, MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE, SABYENC_VERSION_REQUIRED
|
||||
import sabnzbd.articlecache
|
||||
import sabnzbd.downloader
|
||||
import sabnzbd.nzbqueue
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.encoding import yenc_name_fixer
|
||||
from sabnzbd.misc import match_str
|
||||
|
||||
try:
|
||||
import _yenc
|
||||
HAVE_YENC = True
|
||||
except ImportError:
|
||||
HAVE_YENC = False
|
||||
|
||||
try:
|
||||
import sabyenc
|
||||
SABYENC_ENABLED = True
|
||||
SABYENC_VERSION = sabyenc.__version__
|
||||
# Verify version
|
||||
if SABYENC_VERSION != SABYENC_VERSION_REQUIRED:
|
||||
raise ImportError
|
||||
except ImportError:
|
||||
SABYENC_ENABLED = False
|
||||
|
||||
class CrcError(Exception):
|
||||
|
||||
@@ -58,33 +68,26 @@ class BadYenc(Exception):
|
||||
|
||||
class Decoder(Thread):
|
||||
|
||||
def __init__(self, servers):
|
||||
def __init__(self, servers, queue):
|
||||
Thread.__init__(self)
|
||||
|
||||
self.queue = Queue.Queue()
|
||||
self.queue = queue
|
||||
self.servers = servers
|
||||
|
||||
def decode(self, article, lines):
|
||||
self.queue.put((article, lines))
|
||||
# See if there's space left in cache, pause otherwise
|
||||
# But do allow some articles to enter queue, in case of full cache
|
||||
qsize = self.queue.qsize()
|
||||
if (not ArticleCache.do.reserve_space(lines) and qsize > MAX_DECODE_QUEUE) or (qsize > LIMIT_DECODE_QUEUE):
|
||||
sabnzbd.downloader.Downloader.do.delay()
|
||||
|
||||
def stop(self):
|
||||
# Put multiple to stop all decoders
|
||||
self.queue.put(None)
|
||||
self.queue.put(None)
|
||||
|
||||
def run(self):
|
||||
from sabnzbd.nzbqueue import NzbQueue
|
||||
while 1:
|
||||
# Sleep to allow decoder/assembler switching
|
||||
sleep(0.001)
|
||||
sleep(0.0001)
|
||||
art_tup = self.queue.get()
|
||||
if not art_tup:
|
||||
break
|
||||
|
||||
article, lines = art_tup
|
||||
article, lines, raw_data = art_tup
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
art_id = article.article
|
||||
@@ -92,7 +95,8 @@ class Decoder(Thread):
|
||||
|
||||
# Check if the space that's now free can let us continue the queue?
|
||||
qsize = self.queue.qsize()
|
||||
if (ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and (qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
|
||||
if (sabnzbd.articlecache.ArticleCache.do.free_reserve_space(lines) or qsize < MAX_DECODE_QUEUE) and \
|
||||
(qsize < LIMIT_DECODE_QUEUE) and sabnzbd.downloader.Downloader.do.delayed:
|
||||
sabnzbd.downloader.Downloader.do.undelay()
|
||||
|
||||
data = None
|
||||
@@ -100,14 +104,14 @@ class Decoder(Thread):
|
||||
found = False # Proper article found
|
||||
logme = None
|
||||
|
||||
if lines:
|
||||
if lines or raw_data:
|
||||
try:
|
||||
if nzo.precheck:
|
||||
raise BadYenc
|
||||
register = True
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
data = decode(article, lines)
|
||||
data = decode(article, lines, raw_data)
|
||||
nzf.article_count += 1
|
||||
found = True
|
||||
|
||||
@@ -118,7 +122,7 @@ class Decoder(Thread):
|
||||
|
||||
sabnzbd.downloader.Downloader.do.pause()
|
||||
article.fetcher = None
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
register = False
|
||||
|
||||
except MemoryError, e:
|
||||
@@ -130,7 +134,7 @@ class Decoder(Thread):
|
||||
|
||||
sabnzbd.downloader.Downloader.do.pause()
|
||||
article.fetcher = None
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
register = False
|
||||
|
||||
except CrcError, e:
|
||||
@@ -139,17 +143,18 @@ class Decoder(Thread):
|
||||
|
||||
data = e.data
|
||||
|
||||
except BadYenc:
|
||||
except (BadYenc, ValueError):
|
||||
# Handles precheck and badly formed articles
|
||||
killed = False
|
||||
found = False
|
||||
if nzo.precheck and lines and lines[0].startswith('223 '):
|
||||
data_to_check = lines or raw_data
|
||||
if nzo.precheck and data_to_check and data_to_check[0].startswith('223 '):
|
||||
# STAT was used, so we only get a status code
|
||||
found = True
|
||||
else:
|
||||
# Examine headers (for precheck) or body (for download)
|
||||
# And look for DMCA clues (while skipping "X-" headers)
|
||||
for line in lines:
|
||||
for line in data_to_check:
|
||||
lline = line.lower()
|
||||
if 'message-id:' in lline:
|
||||
found = True
|
||||
@@ -162,14 +167,14 @@ class Decoder(Thread):
|
||||
if nzo.precheck:
|
||||
if found and not killed:
|
||||
# Pre-check, proper article found, just register
|
||||
logging.debug('Server has article %s', art_id)
|
||||
logging.debug('Server %s has article %s', article.fetcher, art_id)
|
||||
register = True
|
||||
elif not killed and not found:
|
||||
logme = T('Badly formed yEnc article in %s') % art_id
|
||||
logging.info(logme)
|
||||
|
||||
if not found or killed:
|
||||
new_server_found = self.__search_new_server(article)
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
logme = None
|
||||
@@ -178,8 +183,7 @@ class Decoder(Thread):
|
||||
logme = T('Unknown Error while decoding %s') % art_id
|
||||
logging.info(logme)
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
|
||||
new_server_found = self.__search_new_server(article)
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
logme = None
|
||||
@@ -191,66 +195,39 @@ class Decoder(Thread):
|
||||
nzo.inc_log('bad_art_log', art_id)
|
||||
|
||||
else:
|
||||
new_server_found = self.__search_new_server(article)
|
||||
new_server_found = sabnzbd.downloader.Downloader.do.search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
elif nzo.precheck:
|
||||
found = False
|
||||
|
||||
if logme or not found:
|
||||
# Add extra parfiles when there was a damaged article
|
||||
if cfg.prospective_par_download() and nzo.extrapars:
|
||||
nzo.prospective_add(nzf)
|
||||
|
||||
if data:
|
||||
ArticleCache.do.save_article(article, data)
|
||||
sabnzbd.articlecache.ArticleCache.do.save_article(article, data)
|
||||
|
||||
if register:
|
||||
NzbQueue.do.register_article(article, found)
|
||||
|
||||
def __search_new_server(self, article):
|
||||
from sabnzbd.nzbqueue import NzbQueue
|
||||
article.add_to_try_list(article.fetcher)
|
||||
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
|
||||
new_server_found = False
|
||||
fill_server_found = False
|
||||
|
||||
for server in self.servers:
|
||||
if server.active and not article.server_in_try_list(server):
|
||||
if not sabnzbd.highest_server(server):
|
||||
fill_server_found = True
|
||||
else:
|
||||
new_server_found = True
|
||||
break
|
||||
|
||||
# Only found one (or more) fill server(s)
|
||||
if not new_server_found and fill_server_found:
|
||||
article.allow_fill_server = True
|
||||
new_server_found = True
|
||||
|
||||
if new_server_found:
|
||||
article.fetcher = None
|
||||
article.tries = 0
|
||||
|
||||
# Allow all servers to iterate over this nzo and nzf again
|
||||
NzbQueue.do.reset_try_lists(nzf, nzo)
|
||||
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('%s => found at least one untested server', article)
|
||||
|
||||
else:
|
||||
msg = T('%s => missing from all servers, discarding') % article
|
||||
logging.info(msg)
|
||||
article.nzf.nzo.inc_log('missing_art_log', msg)
|
||||
|
||||
return new_server_found
|
||||
sabnzbd.nzbqueue.NzbQueue.do.register_article(article, found)
|
||||
|
||||
|
||||
YDEC_TRANS = ''.join([chr((i + 256 - 42) % 256) for i in xrange(256)])
|
||||
def decode(article, data):
|
||||
def decode(article, data, raw_data):
|
||||
# Do we have SABYenc? Let it do all the work
|
||||
if sabnzbd.decoder.SABYENC_ENABLED:
|
||||
decoded_data, output_filename, crc, crc_expected, crc_correct = sabyenc.decode_usenet_chunks(raw_data, article.bytes)
|
||||
|
||||
# Assume it is yenc
|
||||
article.nzf.type = 'yenc'
|
||||
|
||||
# Only set the name if it was found
|
||||
if output_filename:
|
||||
article.nzf.filename = output_filename
|
||||
|
||||
# CRC check
|
||||
if not crc_correct:
|
||||
raise CrcError(crc_expected, crc, decoded_data)
|
||||
|
||||
return decoded_data
|
||||
|
||||
# Continue for _yenc or Python-yEnc
|
||||
# Filter out empty ones
|
||||
data = filter(None, data)
|
||||
# No point in continuing if we don't have any data left
|
||||
@@ -308,7 +285,7 @@ def decode(article, data):
|
||||
crcname = 'crc32'
|
||||
|
||||
if crcname in yend:
|
||||
_partcrc = '0' * (8 - len(yend[crcname])) + yend[crcname].upper()
|
||||
_partcrc = yenc_name_fixer('0' * (8 - len(yend[crcname])) + yend[crcname].upper())
|
||||
else:
|
||||
_partcrc = None
|
||||
logging.debug("Corrupt header detected => yend: %s", yend)
|
||||
|
||||
@@ -76,9 +76,9 @@ def is_archive(path):
|
||||
return -1, None, ''
|
||||
elif rarfile.is_rarfile(path):
|
||||
try:
|
||||
zf = rarfile.RarFile(path)
|
||||
# Set path to tool to open it
|
||||
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
|
||||
zf = rarfile.RarFile(path)
|
||||
return 0, zf, '.rar'
|
||||
except:
|
||||
return -1, None, ''
|
||||
|
||||
@@ -27,9 +27,11 @@ from nntplib import NNTPPermanentError
|
||||
import socket
|
||||
import random
|
||||
import sys
|
||||
import Queue
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized, synchronized_CV, CV
|
||||
from sabnzbd.constants import MAX_DECODE_QUEUE, LIMIT_DECODE_QUEUE
|
||||
from sabnzbd.decoder import Decoder
|
||||
from sabnzbd.newswrapper import NewsWrapper, request_server_info
|
||||
from sabnzbd.articlecache import ArticleCache
|
||||
@@ -38,7 +40,7 @@ import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.bpsmeter import BPSMeter
|
||||
import sabnzbd.scheduler
|
||||
from sabnzbd.misc import from_units
|
||||
from sabnzbd.misc import from_units, nntp_to_msg
|
||||
from sabnzbd.utils.happyeyeballs import happyeyeballs
|
||||
|
||||
|
||||
@@ -88,7 +90,7 @@ class Server(object):
|
||||
self.errormsg = ''
|
||||
self.warning = ''
|
||||
self.info = None # Will hold getaddrinfo() list
|
||||
self.ssl_info = '' # Will hold the type and cipher of SSL connection
|
||||
self.ssl_info = '' # Will hold the type and cipher of SSL connection
|
||||
self.request = False # True if a getaddrinfo() request is pending
|
||||
self.have_body = 'free.xsusenet.com' not in host
|
||||
self.have_stat = True # Assume server has "STAT", until proven otherwise
|
||||
@@ -105,7 +107,7 @@ class Server(object):
|
||||
2 - and self.info has more than 1 entry (read: IP address): Return the quickest IP based on the happyeyeballs algorithm
|
||||
In case of problems: return the host name itself
|
||||
"""
|
||||
# Check if already a succesfull ongoing connection
|
||||
# Check if already a successful ongoing connection
|
||||
if self.busy_threads and self.busy_threads[0].nntp:
|
||||
# Re-use that IP
|
||||
logging.debug('%s: Re-using address %s', self.host, self.busy_threads[0].nntp.host)
|
||||
@@ -196,7 +198,14 @@ class Downloader(Thread):
|
||||
for server in config.get_servers():
|
||||
self.init_server(None, server)
|
||||
|
||||
self.decoder = Decoder(self.servers)
|
||||
self.decoder_queue = Queue.Queue()
|
||||
|
||||
# Initialize decoders, only 1 for non-SABYenc
|
||||
self.decoder_workers = []
|
||||
nr_decoders = cfg.nr_decoders() if sabnzbd.decoder.SABYENC_ENABLED else 1
|
||||
for i in range(nr_decoders):
|
||||
self.decoder_workers.append(Decoder(self.servers, self.decoder_queue))
|
||||
|
||||
Downloader.do = self
|
||||
|
||||
def init_server(self, oldserver, newserver):
|
||||
@@ -217,7 +226,7 @@ class Downloader(Thread):
|
||||
timeout = srv.timeout()
|
||||
threads = srv.connections()
|
||||
priority = srv.priority()
|
||||
ssl = srv.ssl() and sabnzbd.HAVE_SSL
|
||||
ssl = srv.ssl()
|
||||
ssl_verify = srv.ssl_verify()
|
||||
username = srv.username()
|
||||
password = srv.password()
|
||||
@@ -376,6 +385,14 @@ class Downloader(Thread):
|
||||
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
|
||||
|
||||
def decode(self, article, lines, raw_data):
|
||||
self.decoder_queue.put((article, lines, raw_data))
|
||||
# See if there's space left in cache, pause otherwise
|
||||
# But do allow some articles to enter queue, in case of full cache
|
||||
qsize = self.decoder_queue.qsize()
|
||||
if (not ArticleCache.do.reserve_space(lines) and qsize > MAX_DECODE_QUEUE) or (qsize > LIMIT_DECODE_QUEUE):
|
||||
sabnzbd.downloader.Downloader.do.delay()
|
||||
|
||||
def run(self):
|
||||
# First check IPv6 connectivity
|
||||
sabnzbd.EXTERNAL_IPV6 = sabnzbd.test_ipv6()
|
||||
@@ -397,8 +414,9 @@ class Downloader(Thread):
|
||||
sabnzbd.HAVE_SSL_CONTEXT = False
|
||||
logging.debug('SSL verification test: %s', sabnzbd.HAVE_SSL_CONTEXT)
|
||||
|
||||
# Start decoder
|
||||
self.decoder.start()
|
||||
# Start decoders
|
||||
for decoder in self.decoder_workers:
|
||||
decoder.start()
|
||||
|
||||
# Kick BPS-Meter to check quota
|
||||
BPSMeter.do.update()
|
||||
@@ -458,7 +476,7 @@ class Downloader(Thread):
|
||||
# Article too old for the server, treat as missing
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('Article %s too old for %s', article.article, server.id)
|
||||
self.decoder.decode(article, None)
|
||||
self.decode(article, None, None)
|
||||
break
|
||||
|
||||
server.idle_threads.remove(nw)
|
||||
@@ -470,8 +488,7 @@ class Downloader(Thread):
|
||||
self.__request_article(nw)
|
||||
else:
|
||||
try:
|
||||
logging.info("%s@%s: Initiating connection",
|
||||
nw.thrdnum, server.id)
|
||||
logging.info("%s@%s: Initiating connection", nw.thrdnum, server.id)
|
||||
nw.init_connect(self.write_fds)
|
||||
except:
|
||||
logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.id, sys.exc_info()[1])
|
||||
@@ -486,8 +503,10 @@ class Downloader(Thread):
|
||||
break
|
||||
|
||||
if empty:
|
||||
self.decoder.stop()
|
||||
self.decoder.join()
|
||||
# Start decoders
|
||||
for decoder in self.decoder_workers:
|
||||
decoder.stop()
|
||||
decoder.join()
|
||||
|
||||
for server in self.servers:
|
||||
server.stop(self.read_fds, self.write_fds)
|
||||
@@ -512,17 +531,17 @@ class Downloader(Thread):
|
||||
if readkeys or writekeys:
|
||||
read, write, error = select.select(readkeys, writekeys, (), 1.0)
|
||||
|
||||
# Why check so often when so few things happend?
|
||||
# Why check so often when so few things happened?
|
||||
if self.can_be_slowed and len(readkeys) >= 8 and len(read) <= 2:
|
||||
time.sleep(0.01)
|
||||
time.sleep(0.05)
|
||||
|
||||
# Need to initalize the check during first 20 seconds
|
||||
# Need to initialize the check during first 20 seconds
|
||||
if self.can_be_slowed is None or self.can_be_slowed_timer:
|
||||
# Wait for stable speed to start testing
|
||||
if not self.can_be_slowed_timer and BPSMeter.do.get_stable_speed(timespan=10):
|
||||
self.can_be_slowed_timer = time.time()
|
||||
|
||||
# Check 10 seconds after enabeling slowdown
|
||||
# Check 10 seconds after enabling slowdown
|
||||
if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10:
|
||||
# Now let's check if it was stable in the last 10 seconds
|
||||
self.can_be_slowed = (BPSMeter.do.get_stable_speed(timespan=10) > 0)
|
||||
@@ -594,17 +613,16 @@ class Downloader(Thread):
|
||||
if nzo:
|
||||
nzo.update_download_stats(BPSMeter.do.get_bps(), server.id, bytes)
|
||||
|
||||
if len(nw.lines) == 1:
|
||||
code = nw.lines[0][:3]
|
||||
if not nw.connected or code == '480':
|
||||
to_decoder = True
|
||||
if not done and nw.status_code != '222':
|
||||
if not nw.connected or nw.status_code == '480':
|
||||
done = False
|
||||
|
||||
try:
|
||||
nw.finish_connect(code)
|
||||
nw.finish_connect(nw.status_code)
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nw.lines[0])
|
||||
nw.lines = []
|
||||
nw.data = ''
|
||||
logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
|
||||
nw.clear_data()
|
||||
except NNTPPermanentError, error:
|
||||
# Handle login problems
|
||||
block = False
|
||||
@@ -678,7 +696,7 @@ class Downloader(Thread):
|
||||
continue
|
||||
except:
|
||||
logging.error(T('Connecting %s@%s failed, message=%s'),
|
||||
nw.thrdnum, nw.server.id, nw.lines[0])
|
||||
nw.thrdnum, nw.server.id, nntp_to_msg(nw.data))
|
||||
# No reset-warning needed, above logging is sufficient
|
||||
self.__reset_nw(nw, None, warn=False)
|
||||
|
||||
@@ -686,30 +704,27 @@ class Downloader(Thread):
|
||||
logging.info("Connecting %s@%s finished", nw.thrdnum, nw.server.id)
|
||||
self.__request_article(nw)
|
||||
|
||||
elif code == '223':
|
||||
elif nw.status_code == '223':
|
||||
done = True
|
||||
logging.debug('Article <%s> is present', article.article)
|
||||
self.decoder.decode(article, nw.lines)
|
||||
|
||||
elif code == '211':
|
||||
elif nw.status_code == '211':
|
||||
done = False
|
||||
|
||||
logging.debug("group command ok -> %s",
|
||||
nw.lines)
|
||||
logging.debug("group command ok -> %s", nntp_to_msg(nw.data))
|
||||
nw.group = nw.article.nzf.nzo.group
|
||||
nw.lines = []
|
||||
nw.data = ''
|
||||
nw.clear_data()
|
||||
self.__request_article(nw)
|
||||
|
||||
elif code in ('411', '423', '430'):
|
||||
elif nw.status_code in ('411', '423', '430'):
|
||||
done = True
|
||||
nw.lines = None
|
||||
to_decoder = False
|
||||
logging.debug('Thread %s@%s: Article %s missing (error=%s)',
|
||||
nw.thrdnum, nw.server.id, article.article, nw.status_code)
|
||||
# Search for new article
|
||||
if not self.search_new_server(article):
|
||||
sabnzbd.nzbqueue.NzbQueue.do.register_article(article, False)
|
||||
|
||||
logging.info('Thread %s@%s: Article ' +
|
||||
'%s missing (error=%s)',
|
||||
nw.thrdnum, nw.server.id, article.article, code)
|
||||
|
||||
elif code == '480':
|
||||
elif nw.status_code == '480':
|
||||
if server.active:
|
||||
server.active = False
|
||||
server.errormsg = T('Server %s requires user/password') % ''
|
||||
@@ -718,7 +733,7 @@ class Downloader(Thread):
|
||||
msg = T('Server %s requires user/password') % nw.server.id
|
||||
self.__reset_nw(nw, msg, quit=True)
|
||||
|
||||
elif code == '500':
|
||||
elif nw.status_code == '500':
|
||||
if nzo.precheck:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
@@ -727,8 +742,7 @@ class Downloader(Thread):
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug('Server %s does not support BODY', server.id)
|
||||
nw.lines = []
|
||||
nw.data = ''
|
||||
nw.clear_data()
|
||||
self.__request_article(nw)
|
||||
|
||||
if done:
|
||||
@@ -736,7 +750,10 @@ class Downloader(Thread):
|
||||
server.errormsg = server.warning = ''
|
||||
if sabnzbd.LOG_ALL:
|
||||
logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.id, article.article)
|
||||
self.decoder.decode(article, nw.lines)
|
||||
|
||||
# Missing articles are not decoded
|
||||
if to_decoder:
|
||||
self.decode(article, nw.lines, nw.data)
|
||||
|
||||
nw.soft_reset()
|
||||
server.busy_threads.remove(nw)
|
||||
@@ -784,7 +801,7 @@ class Downloader(Thread):
|
||||
if article:
|
||||
if article.tries > cfg.max_art_tries() and (article.fetcher.optional or not cfg.max_art_opt()):
|
||||
# Too many tries on this server, consider article missing
|
||||
self.decoder.decode(article, None)
|
||||
self.decode(article, None, None)
|
||||
else:
|
||||
# Remove this server from try_list
|
||||
article.fetcher = None
|
||||
@@ -827,6 +844,23 @@ class Downloader(Thread):
|
||||
logging.info("Traceback: ", exc_info=True)
|
||||
self.__reset_nw(nw, "server broke off connection", quit=False)
|
||||
|
||||
def search_new_server(self, article):
|
||||
# Search new server
|
||||
article.add_to_try_list(article.fetcher)
|
||||
for server in self.servers:
|
||||
if server.active and not article.server_in_try_list(server):
|
||||
if server.priority >= article.fetcher.priority:
|
||||
article.fetcher = None
|
||||
article.tries = 0
|
||||
# Allow all servers for this nzo and nzf again (but not for this article)
|
||||
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article.nzf, article.nzf.nzo)
|
||||
return True
|
||||
|
||||
msg = T('%s => missing from all servers, discarding') % article
|
||||
logging.debug(msg)
|
||||
article.nzf.nzo.inc_log('missing_art_log', msg)
|
||||
return False
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Timed restart of servers admin.
|
||||
# For each server all planned events are kept in a list.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -330,20 +330,20 @@ def sanitize_foldername(name, limit=True):
|
||||
else:
|
||||
lst.append(ch)
|
||||
name = ''.join(lst)
|
||||
|
||||
name = name.strip()
|
||||
if name != '.' and name != '..':
|
||||
name = name.rstrip('.')
|
||||
if not name:
|
||||
name = 'unknown'
|
||||
|
||||
if sabnzbd.WIN32 or cfg.sanitize_safe():
|
||||
name = replace_win_devices(name)
|
||||
|
||||
maxlen = cfg.folder_max_length()
|
||||
if limit and len(name) > maxlen:
|
||||
# Folders can't end on a dot in Windows
|
||||
name = name[:maxlen].strip('.')
|
||||
name = name[:maxlen]
|
||||
|
||||
# And finally, make sure it doesn't end in a dot
|
||||
if name != '.' and name != '..':
|
||||
name = name.rstrip('.')
|
||||
if not name:
|
||||
name = 'unknown'
|
||||
|
||||
return name
|
||||
|
||||
@@ -781,7 +781,7 @@ def exit_sab(value):
|
||||
sys.stdout.flush()
|
||||
if getattr(sys, 'frozen', None) == 'macosx_app':
|
||||
sabnzbd.SABSTOP = True
|
||||
from PyObjCTools import AppHelper # @UnresolvedImport
|
||||
from PyObjCTools import AppHelper
|
||||
AppHelper.stopEventLoop()
|
||||
sys.exit(value)
|
||||
|
||||
@@ -1428,15 +1428,6 @@ def is_writable(path):
|
||||
return True
|
||||
|
||||
|
||||
def format_source_url(url):
|
||||
""" Format URL suitable for 'Source' stage """
|
||||
if sabnzbd.HAVE_SSL:
|
||||
prot = 'https'
|
||||
else:
|
||||
prot = 'http:'
|
||||
return url
|
||||
|
||||
|
||||
def get_base_url(url):
|
||||
""" Return only the true root domain for the favicon, so api.oznzb.com -> oznzb.com
|
||||
But also api.althub.co.za -> althub.co.za
|
||||
@@ -1550,3 +1541,11 @@ def get_urlbase(url):
|
||||
""" Return the base URL (like http://server.domain.com/) """
|
||||
parsed_uri = urlparse(url)
|
||||
return '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
|
||||
|
||||
|
||||
def nntp_to_msg(text):
|
||||
""" Format raw NNTP data for display """
|
||||
if isinstance(text, list):
|
||||
text = text[0]
|
||||
lines = text.split('\r\n')
|
||||
return lines[0]
|
||||
|
||||
@@ -34,7 +34,7 @@ from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, \
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
from sabnzbd.misc import format_time_string, find_on_path, make_script_path, int_conv, \
|
||||
flag_file, real_path, globber, globber_full, get_all_passwords, renamer, clip_path, \
|
||||
has_win_device
|
||||
has_win_device, calc_age
|
||||
from sabnzbd.tvsort import SeriesSorter
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.constants import Status, QCHECK_FILE, RENAMES_FILE
|
||||
@@ -80,6 +80,7 @@ ZIP_COMMAND = None
|
||||
SEVEN_COMMAND = None
|
||||
IONICE_COMMAND = None
|
||||
RAR_PROBLEM = False
|
||||
PAR2_MT = True
|
||||
RAR_VERSION = 0
|
||||
|
||||
|
||||
@@ -144,26 +145,43 @@ def find_programs(curdir):
|
||||
sabnzbd.newsunpack.PAR2C_COMMAND = sabnzbd.newsunpack.PAR2_COMMAND
|
||||
|
||||
if not (sabnzbd.WIN32 or sabnzbd.DARWIN):
|
||||
# Run check on rar version
|
||||
version, original = unrar_check(sabnzbd.newsunpack.RAR_COMMAND)
|
||||
sabnzbd.newsunpack.RAR_PROBLEM = not original or version < 380
|
||||
sabnzbd.newsunpack.RAR_PROBLEM = not original or version < sabnzbd.constants.REC_RAR_VERSION
|
||||
sabnzbd.newsunpack.RAR_VERSION = version
|
||||
logging.debug('UNRAR binary version %.2f', (float(version) / 100))
|
||||
if sabnzbd.newsunpack.RAR_PROBLEM:
|
||||
logging.info('Problematic UNRAR')
|
||||
|
||||
def external_processing(extern_proc, complete_dir, filename, nicename, cat, group, status, failure_url):
|
||||
# Run check on par2-multicore
|
||||
sabnzbd.newsunpack.PAR2_MT = par2_mt_check(sabnzbd.newsunpack.PAR2_COMMAND)
|
||||
|
||||
|
||||
ENV_NZO_FIELDS = ['bytes', 'bytes_downloaded', 'bytes_tried', 'cat', 'duplicate', 'encrypted',
|
||||
'fail_msg', 'filename', 'final_name', 'group', 'nzo_id', 'oversized', 'password', 'pp',
|
||||
'priority', 'repair', 'script', 'status', 'unpack', 'unwanted_ext', 'url']
|
||||
|
||||
def external_processing(extern_proc, nzo, complete_dir, nicename, status):
|
||||
""" Run a user postproc script, return console output and exit value """
|
||||
command = [str(extern_proc), str(complete_dir), str(filename),
|
||||
str(nicename), '', str(cat), str(group), str(status)]
|
||||
command = [str(extern_proc), str(complete_dir), str(nzo.filename),
|
||||
str(nicename), '', str(nzo.cat), str(nzo.group), str(status)]
|
||||
|
||||
failure_url = nzo.nzo_info.get('failure', '')
|
||||
if failure_url:
|
||||
command.append(str(failure_url))
|
||||
|
||||
# Fields not in the NZO directly
|
||||
extra_env_fields = {'failure_url': failure_url,
|
||||
'complete_dir': complete_dir,
|
||||
'pp_status': status,
|
||||
'download_time': nzo.nzo_info.get('download_time', ''),
|
||||
'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq),
|
||||
'age': calc_age(nzo.avg_date),
|
||||
'version': sabnzbd.__version__}
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = fix_env()
|
||||
env = create_env(nzo, extra_env_fields)
|
||||
|
||||
logging.info('Running external script %s(%s, %s, %s, %s, %s, %s, %s, %s)',
|
||||
extern_proc, complete_dir, filename, nicename, '', cat, group, status, failure_url)
|
||||
extern_proc, complete_dir, nzo.filename, nicename, '', nzo.cat, nzo.group, status, failure_url)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
startupinfo=stup, env=env, creationflags=creationflags)
|
||||
@@ -182,7 +200,7 @@ def external_script(script, p1, p2, p3=None, p4=None):
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = fix_env()
|
||||
env = create_env()
|
||||
logging.info('Running user script %s(%s, %s)', script, p1, p2)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
@@ -636,6 +654,14 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
|
||||
logging.warning(T('ERROR: CRC failed in "%s"'), setname)
|
||||
fail = 2 # Older unrar versions report a wrong password as a CRC error
|
||||
|
||||
elif line.startswith('File too large'):
|
||||
nzo.fail_msg = T('Unpacking failed, file too large for filesystem (FAT?)')
|
||||
msg = (u'[%s] ' + T('Unpacking failed, file too large for filesystem (FAT?)')) % setname
|
||||
nzo.set_unpack_info('Unpack', unicoder(msg), set=setname)
|
||||
# ERROR: File too large for file system (bigfile-5000MB)
|
||||
logging.error(T('ERROR: File too large for filesystem (%s)'), setname)
|
||||
fail = 1
|
||||
|
||||
elif line.startswith('Write error'):
|
||||
nzo.fail_msg = T('Unpacking failed, write error or disk is full?')
|
||||
msg = (u'[%s] ' + T('Unpacking failed, write error or disk is full?')) % setname
|
||||
@@ -819,7 +845,7 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
|
||||
startupinfo=stup, creationflags=creationflags)
|
||||
|
||||
output = p.stdout.read()
|
||||
logging.debug('unzip output: %s', output)
|
||||
logging.debug('unzip output: \n%s', output)
|
||||
|
||||
ret = p.wait()
|
||||
|
||||
@@ -1319,18 +1345,29 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, sin
|
||||
nzo.status = Status.FAILED
|
||||
|
||||
elif line.startswith('You need'):
|
||||
# Because par2cmdline doesn't handle split files correctly
|
||||
# if there are joinables, let's join them first and try again
|
||||
# Only when in the par2-detection also only 1 output-file was mentioned
|
||||
if joinables and len(datafiles) == 1:
|
||||
error, newf = file_join(nzo, parfolder, parfolder, True, joinables)
|
||||
# Only do it again if we had a good join
|
||||
if newf:
|
||||
retry_classic = True
|
||||
# Save the renames in case of retry
|
||||
for jn in joinables:
|
||||
renames[datafiles[0]] = os.path.split(jn)[1]
|
||||
joinables = []
|
||||
# Need to set it to 1 so the renames get saved
|
||||
finished = 1
|
||||
break
|
||||
|
||||
chunks = line.split()
|
||||
|
||||
needed_blocks = int(chunks[2])
|
||||
|
||||
avail_blocks = 0
|
||||
logging.info('Need to fetch %s more blocks, checking blocks', needed_blocks)
|
||||
|
||||
avail_blocks = 0
|
||||
|
||||
extrapars = parfile_nzf.extrapars
|
||||
|
||||
block_table = {}
|
||||
|
||||
for nzf in extrapars:
|
||||
# Don't count extrapars that are completed already
|
||||
if nzf.completed:
|
||||
@@ -1430,8 +1467,15 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, sin
|
||||
reconstructed.append(os.path.join(workdir, old_name))
|
||||
|
||||
elif 'Could not write' in line and 'at offset 0:' in line and not classic:
|
||||
# Hit a bug in par2-tbb, retry with par2-classic
|
||||
retry_classic = sabnzbd.WIN32
|
||||
# If there are joinables, this error will only happen in case of 100% complete files
|
||||
# We can just skip the retry, because par2cmdline will fail in those cases
|
||||
# becauses it refuses to scan the ".001" file
|
||||
if joinables:
|
||||
finished = 1
|
||||
used_joinables = []
|
||||
else:
|
||||
# Hit a bug in par2-tbb, retry with par2-classic
|
||||
retry_classic = sabnzbd.WIN32
|
||||
|
||||
elif ' cannot be renamed to ' in line:
|
||||
if not classic and sabnzbd.WIN32:
|
||||
@@ -1528,19 +1572,46 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False, sin
|
||||
return finished, readd, pars, datafiles, used_joinables, used_par2
|
||||
|
||||
|
||||
def fix_env():
|
||||
""" OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
def create_env(nzo=None, extra_env_fields=None):
|
||||
""" Modify the environment for pp-scripts with extra information
|
||||
OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
|
||||
other: return None
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
|
||||
# Are we adding things?
|
||||
if nzo:
|
||||
for field in ENV_NZO_FIELDS:
|
||||
try:
|
||||
field_value = getattr(nzo, field)
|
||||
# Special filters for Python types
|
||||
if field_value is None:
|
||||
env['SAB_' + field.upper()] = ''
|
||||
elif isinstance(field_value, bool):
|
||||
env['SAB_' + field.upper()] = str(field_value*1)
|
||||
else:
|
||||
env['SAB_' + field.upper()] = str(deunicode(field_value))
|
||||
except:
|
||||
# Catch key/unicode errors
|
||||
pass
|
||||
|
||||
for field in extra_env_fields:
|
||||
try:
|
||||
env['SAB_' + field.upper()] = str(deunicode(extra_env_fields[field]))
|
||||
except:
|
||||
# Catch key/unicode errors
|
||||
pass
|
||||
|
||||
if sabnzbd.DARWIN:
|
||||
env = os.environ.copy()
|
||||
if 'PYTHONPATH' in env:
|
||||
del env['PYTHONPATH']
|
||||
if 'PYTHONHOME' in env:
|
||||
del env['PYTHONHOME']
|
||||
return env
|
||||
else:
|
||||
elif not nzo:
|
||||
# No modification
|
||||
return None
|
||||
return env
|
||||
|
||||
|
||||
def userxbit(filename):
|
||||
# Returns boolean if the x-bit for user is set on the given file
|
||||
@@ -1715,9 +1786,13 @@ def QuickCheck(set, nzo):
|
||||
nzf_list = nzo.finished_files
|
||||
renames = {}
|
||||
|
||||
# Files to ignore
|
||||
ignore_ext = cfg.quick_check_ext_ignore()
|
||||
|
||||
for file in md5pack:
|
||||
found = False
|
||||
file_platform = platform_encode(file)
|
||||
file_to_ignore = os.path.splitext(file_platform)[1].lower().replace('.', '') in ignore_ext
|
||||
for nzf in nzf_list:
|
||||
# Do a simple filename based check
|
||||
if file_platform == nzf.filename:
|
||||
@@ -1725,6 +1800,10 @@ def QuickCheck(set, nzo):
|
||||
if (nzf.md5sum is not None) and nzf.md5sum == md5pack[file]:
|
||||
logging.debug('Quick-check of file %s OK', file)
|
||||
result = True
|
||||
elif file_to_ignore:
|
||||
# We don't care about these files
|
||||
logging.debug('Quick-check ignoring file %s', file)
|
||||
result = True
|
||||
else:
|
||||
logging.info('Quick-check of file %s failed!', file)
|
||||
return False # When any file fails, just stop
|
||||
@@ -1732,15 +1811,24 @@ def QuickCheck(set, nzo):
|
||||
|
||||
# Now lets do obfuscation check
|
||||
if nzf.md5sum == md5pack[file]:
|
||||
renames[file_platform] = nzf.filename
|
||||
logging.debug('Quick-check renamed %s to %s', nzf.filename, file_platform)
|
||||
renamer(os.path.join(nzo.downpath, nzf.filename), os.path.join(nzo.downpath, file_platform))
|
||||
nzf.filename = file_platform
|
||||
result = True
|
||||
found = True
|
||||
break
|
||||
try:
|
||||
logging.debug('Quick-check will rename %s to %s', nzf.filename, file_platform)
|
||||
renamer(os.path.join(nzo.downpath, nzf.filename), os.path.join(nzo.downpath, file_platform))
|
||||
renames[file_platform] = nzf.filename
|
||||
nzf.filename = file_platform
|
||||
result = True
|
||||
found = True
|
||||
break
|
||||
except IOError:
|
||||
# Renamed failed for some reason, probably already done
|
||||
break
|
||||
|
||||
if not found:
|
||||
if file_to_ignore:
|
||||
# We don't care about these files
|
||||
logging.debug('Quick-check ignoring missing file %s', file)
|
||||
continue
|
||||
|
||||
logging.info('Cannot Quick-check missing file %s!', file)
|
||||
return False # Missing file is failure
|
||||
|
||||
@@ -1792,6 +1880,18 @@ def unrar_check(rar):
|
||||
return version, original
|
||||
|
||||
|
||||
def par2_mt_check(par2_path):
|
||||
""" Detect if we have multicore par2 variants """
|
||||
try:
|
||||
par2_version = run_simple([par2_path, '-h'])
|
||||
# Look for a threads option
|
||||
if '-t<' in par2_version:
|
||||
return True
|
||||
except:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def sfv_check(sfv_path):
|
||||
""" Verify files using SFV file,
|
||||
input: full path of sfv, file are assumed to be relative to sfv
|
||||
@@ -1874,7 +1974,7 @@ def pre_queue(name, pp, cat, script, priority, size, groups):
|
||||
|
||||
try:
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
env = fix_env()
|
||||
env = create_env()
|
||||
logging.info('Running pre-queue script %s', command)
|
||||
p = subprocess.Popen(command, shell=need_shell, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
|
||||
@@ -27,44 +27,29 @@ import time
|
||||
import logging
|
||||
import re
|
||||
import select
|
||||
import ssl
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import *
|
||||
import sabnzbd.cfg
|
||||
from sabnzbd.misc import nntp_to_msg
|
||||
|
||||
import threading
|
||||
_RLock = threading.RLock
|
||||
del threading
|
||||
|
||||
# Import SSL if available
|
||||
if sabnzbd.HAVE_SSL:
|
||||
import ssl
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
WantReadError = ssl.SSLWantReadError
|
||||
CertificateError = ssl.CertificateError
|
||||
else:
|
||||
WantReadError = ssl.SSLError
|
||||
CertificateError = ssl.SSLError
|
||||
# Have to make errors available under Python <2.7.9
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
WantReadError = ssl.SSLWantReadError
|
||||
CertificateError = ssl.CertificateError
|
||||
else:
|
||||
# Dummy class so this exception is ignored by clients without ssl installed
|
||||
class WantReadError(Exception):
|
||||
def __init__(self, value):
|
||||
self.parameter = value
|
||||
def __str__(self):
|
||||
return repr(self.parameter)
|
||||
class CertificateError(Exception):
|
||||
def __init__(self, value):
|
||||
self.parameter = value
|
||||
def __str__(self):
|
||||
return repr(self.parameter)
|
||||
|
||||
WantReadError = ssl.SSLError
|
||||
CertificateError = ssl.SSLError
|
||||
|
||||
# Set pre-defined socket timeout
|
||||
socket.setdefaulttimeout(DEF_TIMEOUT)
|
||||
|
||||
# getaddrinfo() can be very slow. In some situations this can lead
|
||||
# to delayed starts and timeouts on connections.
|
||||
# Because of this, the results will be cached in the server object.
|
||||
|
||||
|
||||
def _retrieve_info(server):
|
||||
""" Async attempt to run getaddrinfo() for specified server """
|
||||
info = GetServerParms(server.host, server.port)
|
||||
@@ -133,7 +118,7 @@ def con(sock, host, port, sslenabled, write_fds, nntp):
|
||||
try:
|
||||
sock.connect((host, port))
|
||||
sock.setblocking(0)
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
if sslenabled:
|
||||
# Log SSL/TLS info
|
||||
logging.info("%s@%s: Connected using %s (%s)",
|
||||
nntp.nw.thrdnum, nntp.nw.server.host, get_ssl_version(sock), sock.cipher()[0])
|
||||
@@ -164,8 +149,6 @@ def con(sock, host, port, sslenabled, write_fds, nntp):
|
||||
nntp.error(e)
|
||||
|
||||
|
||||
|
||||
|
||||
def probablyipv4(ip):
|
||||
if ip.count('.') == 3 and re.sub('[0123456789.]', '', ip) == '':
|
||||
return True
|
||||
@@ -204,7 +187,7 @@ class NNTP(object):
|
||||
if probablyipv6(host):
|
||||
af = socket.AF_INET6
|
||||
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
if sslenabled:
|
||||
# Use context or just wrapper
|
||||
if sabnzbd.HAVE_SSL_CONTEXT:
|
||||
# Setup the SSL socket
|
||||
@@ -228,10 +211,6 @@ class NNTP(object):
|
||||
ciphers = sabnzbd.cfg.ssl_ciphers() if sabnzbd.cfg.ssl_ciphers() else None
|
||||
# Use a regular wrapper, no certificate validation
|
||||
self.sock = ssl.wrap_socket(socket.socket(af, socktype, proto), ciphers=ciphers)
|
||||
|
||||
elif sslenabled and not sabnzbd.HAVE_SSL:
|
||||
logging.error(T('Error importing OpenSSL module. Connecting with NON-SSL'))
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
else:
|
||||
self.sock = socket.socket(af, socktype, proto)
|
||||
|
||||
@@ -244,7 +223,7 @@ class NNTP(object):
|
||||
# if blocking (server test) only wait for 15 seconds during connect until timeout
|
||||
self.sock.settimeout(15)
|
||||
self.sock.connect((self.host, self.port))
|
||||
if sslenabled and sabnzbd.HAVE_SSL:
|
||||
if sslenabled:
|
||||
# Log SSL/TLS info
|
||||
logging.info("%s@%s: Connected using %s (%s)",
|
||||
self.nw.thrdnum, self.nw.server.host, get_ssl_version(self.sock), self.sock.cipher()[0])
|
||||
@@ -268,7 +247,6 @@ class NNTP(object):
|
||||
finally:
|
||||
self.error(e)
|
||||
|
||||
|
||||
def error(self, error):
|
||||
if 'SSL23_GET_SERVER_HELLO' in str(error) or 'SSL3_GET_RECORD' in str(error):
|
||||
error = T('This server does not allow SSL on this port')
|
||||
@@ -301,8 +279,9 @@ class NewsWrapper(object):
|
||||
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.data = ''
|
||||
self.data = []
|
||||
self.lines = []
|
||||
self.last_line = ''
|
||||
|
||||
self.nntp = None
|
||||
self.recv = None
|
||||
@@ -318,6 +297,14 @@ class NewsWrapper(object):
|
||||
self.pass_ok = False
|
||||
self.force_login = False
|
||||
|
||||
@property
|
||||
def status_code(self):
|
||||
""" Shorthand to get the code """
|
||||
try:
|
||||
return self.data[0][:3]
|
||||
except:
|
||||
return ''
|
||||
|
||||
def init_connect(self, write_fds):
|
||||
self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl,
|
||||
self.server.send_group, self, self.server.username, self.server.password,
|
||||
@@ -337,7 +324,7 @@ class NewsWrapper(object):
|
||||
if code in ('501',) and self.user_sent:
|
||||
# Change to a sensible text
|
||||
code = '481'
|
||||
self.lines[0] = T('Authentication failed, check username/password.')
|
||||
self.data[0] = T('Authentication failed, check username/password.')
|
||||
self.user_ok = True
|
||||
self.pass_sent = True
|
||||
|
||||
@@ -350,10 +337,11 @@ class NewsWrapper(object):
|
||||
self.pass_ok = False
|
||||
|
||||
if code in ('400', '502'):
|
||||
raise NNTPPermanentError(self.lines[0])
|
||||
raise NNTPPermanentError(nntp_to_msg(self.data))
|
||||
elif not self.user_sent:
|
||||
command = 'authinfo user %s\r\n' % self.server.username
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.user_sent = True
|
||||
elif not self.user_ok:
|
||||
if code == '381':
|
||||
@@ -368,11 +356,12 @@ class NewsWrapper(object):
|
||||
if self.user_ok and not self.pass_sent:
|
||||
command = 'authinfo pass %s\r\n' % self.server.password
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
self.pass_sent = True
|
||||
elif self.user_ok and not self.pass_ok:
|
||||
if code != '281':
|
||||
# Assume that login failed (code 481 or other)
|
||||
raise NNTPPermanentError(self.lines[0])
|
||||
raise NNTPPermanentError(nntp_to_msg(self.data))
|
||||
else:
|
||||
self.connected = True
|
||||
|
||||
@@ -390,11 +379,13 @@ class NewsWrapper(object):
|
||||
else:
|
||||
command = 'ARTICLE <%s>\r\n' % (self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
|
||||
def send_group(self, group):
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
command = 'GROUP %s\r\n' % (group)
|
||||
self.nntp.sock.sendall(command)
|
||||
self.data = []
|
||||
|
||||
def recv_chunk(self, block=False):
|
||||
""" Receive data, return #bytes, done, skip """
|
||||
@@ -422,33 +413,60 @@ class NewsWrapper(object):
|
||||
else:
|
||||
return (0, False, True)
|
||||
|
||||
self.data += chunk
|
||||
new_lines = self.data.split('\r\n')
|
||||
# See if incorrect newline-only was used
|
||||
# Do this as a special case to prevent using extra memory
|
||||
# for normal articles
|
||||
if len(new_lines) == 1 and '\r' not in self.data:
|
||||
new_lines = self.data.split('\n')
|
||||
# Data is processed differently depending on C-yEnc version
|
||||
if sabnzbd.decoder.SABYENC_ENABLED:
|
||||
# Append so we can do 1 join(), much faster than multiple!
|
||||
self.data.append(chunk)
|
||||
|
||||
self.data = new_lines.pop()
|
||||
# Official end-of-article is ".\r\n" but sometimes it can get lost between 2 chunks
|
||||
chunk_len = len(chunk)
|
||||
if chunk[-5:] == '\r\n.\r\n':
|
||||
return (chunk_len, True, False)
|
||||
elif chunk_len < 5 and len(self.data) > 1:
|
||||
# We need to make sure the end is not split over 2 chunks
|
||||
# This is faster than join()
|
||||
combine_chunk = self.data[-2][-5:] + chunk
|
||||
if combine_chunk[-5:] == '\r\n.\r\n':
|
||||
return (chunk_len, True, False)
|
||||
|
||||
# Already remove the starting dots
|
||||
for i in xrange(len(new_lines)):
|
||||
if new_lines[i][:2] == '..':
|
||||
new_lines[i] = new_lines[i][1:]
|
||||
self.lines.extend(new_lines)
|
||||
|
||||
if self.lines and self.lines[-1] == '.':
|
||||
self.lines = self.lines[1:-1]
|
||||
return (len(chunk), True, False)
|
||||
# Still in middle of data, so continue!
|
||||
return (chunk_len, False, False)
|
||||
else:
|
||||
return (len(chunk), False, False)
|
||||
self.last_line += chunk
|
||||
new_lines = self.last_line.split('\r\n')
|
||||
# See if incorrect newline-only was used
|
||||
# Do this as a special case to prevent using extra memory
|
||||
# for normal articles
|
||||
if len(new_lines) == 1 and '\r' not in self.last_line:
|
||||
new_lines = self.last_line.split('\n')
|
||||
|
||||
self.last_line = new_lines.pop()
|
||||
|
||||
# Already remove the starting dots
|
||||
for i in xrange(len(new_lines)):
|
||||
if new_lines[i][:2] == '..':
|
||||
new_lines[i] = new_lines[i][1:]
|
||||
self.lines.extend(new_lines)
|
||||
|
||||
# For status-code purposes
|
||||
if not self.data:
|
||||
self.data.append(chunk)
|
||||
|
||||
if self.lines and self.lines[-1] == '.':
|
||||
self.lines = self.lines[1:-1]
|
||||
return (len(chunk), True, False)
|
||||
else:
|
||||
return (len(chunk), False, False)
|
||||
|
||||
def soft_reset(self):
|
||||
self.timeout = None
|
||||
self.article = None
|
||||
self.data = ''
|
||||
self.clear_data()
|
||||
|
||||
def clear_data(self):
|
||||
self.data = []
|
||||
self.lines = []
|
||||
self.last_line = ''
|
||||
|
||||
def hard_reset(self, wait=True, quit=True):
|
||||
if self.nntp:
|
||||
|
||||
@@ -120,6 +120,7 @@ def check_classes(gtype, section):
|
||||
logging.debug('Incorrect Notify option %s:%s_prio_%s', section, section, gtype)
|
||||
return False
|
||||
|
||||
|
||||
def get_prio(gtype, section):
|
||||
""" Check if `gtype` is enabled in `section` """
|
||||
try:
|
||||
@@ -177,6 +178,7 @@ def send_notification(title, msg, gtype):
|
||||
if have_ntfosd() and sabnzbd.cfg.ntfosd_enable() and check_classes(gtype, 'ntfosd'):
|
||||
send_notify_osd(title, msg)
|
||||
|
||||
|
||||
def reset_growl():
|
||||
""" Reset Growl (after changing language) """
|
||||
global _GROWL, _GROWL_REG
|
||||
@@ -517,6 +519,7 @@ def send_nscript(title, msg, gtype, force=False, test=None):
|
||||
return T('Notification script "%s" does not exist') % script_path
|
||||
return ''
|
||||
|
||||
|
||||
def send_windows(title, msg, gtype):
|
||||
if sabnzbd.WINTRAY and not sabnzbd.WINTRAY.terminate:
|
||||
try:
|
||||
|
||||
@@ -431,7 +431,7 @@ class NzbQueue:
|
||||
if not (quiet or nzo.status in ('Fetching',)):
|
||||
notifier.send_notification(T('NZB added to queue'), nzo.filename, 'download')
|
||||
|
||||
if cfg.auto_sort():
|
||||
if not quiet and cfg.auto_sort():
|
||||
self.sort_by_avg_age()
|
||||
return nzo.nzo_id
|
||||
|
||||
@@ -440,7 +440,7 @@ class NzbQueue:
|
||||
if nzo_id in self.__nzo_table:
|
||||
nzo = self.__nzo_table.pop(nzo_id)
|
||||
nzo.deleted = True
|
||||
if cleanup and nzo.status not in (Status.COMPLETED, Status.FAILED):
|
||||
if cleanup and not nzo.is_gone():
|
||||
nzo.status = Status.DELETED
|
||||
self.__nzo_list.remove(nzo)
|
||||
|
||||
@@ -791,13 +791,13 @@ class NzbQueue:
|
||||
# Check if past propagation delay, or forced
|
||||
if not cfg.propagation_delay() or nzo.priority == TOP_PRIORITY or (nzo.avg_stamp + float(cfg.propagation_delay() * 60)) < time.time():
|
||||
# Don't try to get an article if server is in try_list of nzo and category allowed by server
|
||||
if nzo.server_allowed(server) and not nzo.server_in_try_list(server):
|
||||
article = nzo.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
if nzo.server_allowed(server):
|
||||
if not nzo.server_in_try_list(server):
|
||||
article = nzo.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
# Stop after first job that wasn't paused/propagating/etc
|
||||
# But make sure not to get stuck behind bad category
|
||||
if self.__top_only and not nzo.server_allowed(server):
|
||||
if self.__top_only:
|
||||
return
|
||||
|
||||
@synchronized(NZBQUEUE_LOCK)
|
||||
@@ -809,7 +809,7 @@ class NzbQueue:
|
||||
logging.debug("Discarding article %s, no longer in queue", article.article)
|
||||
return
|
||||
|
||||
file_done, post_done, reset = nzo.remove_article(article, found)
|
||||
file_done, post_done = nzo.remove_article(article, found)
|
||||
|
||||
filename = nzf.filename
|
||||
|
||||
@@ -900,7 +900,7 @@ class NzbQueue:
|
||||
bytes_left_previous_page += b_left
|
||||
|
||||
if (not search) or search in nzo.final_name_pw_clean.lower():
|
||||
if (not limit) or (start <= n < start+limit):
|
||||
if (not limit) or (start <= n < start + limit):
|
||||
pnfo_list.append(nzo.gather_info())
|
||||
n += 1
|
||||
|
||||
|
||||
@@ -28,16 +28,12 @@ import datetime
|
||||
import xml.sax
|
||||
import xml.sax.handler
|
||||
import xml.sax.xmlreader
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
try:
|
||||
import hashlib
|
||||
new_md5 = hashlib.md5
|
||||
except:
|
||||
import md5
|
||||
new_md5 = md5.new
|
||||
|
||||
# SABnzbd modules
|
||||
import sabnzbd
|
||||
@@ -46,10 +42,9 @@ from sabnzbd.constants import sample_match, GIGI, ATTRIB_FILE, JOB_ADMIN, \
|
||||
PAUSED_PRIORITY, TOP_PRIORITY, DUP_PRIORITY, REPAIR_PRIORITY, \
|
||||
RENAMES_FILE, Status, PNFO
|
||||
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
|
||||
get_unique_path, get_admin_path, remove_all, format_source_url, \
|
||||
sanitize_filename, globber_full, sanitize_foldername, int_conv, \
|
||||
set_permissions, format_time_string, long_path, trim_win_path, \
|
||||
fix_unix_encoding, calc_age
|
||||
get_unique_path, get_admin_path, remove_all, sanitize_filename, globber_full, \
|
||||
sanitize_foldername, int_conv, set_permissions, format_time_string, long_path, \
|
||||
trim_win_path, fix_unix_encoding, calc_age
|
||||
from sabnzbd.decorators import synchronized, IO_LOCK
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
@@ -82,8 +77,6 @@ class Article(TryList):
|
||||
TryList.__init__(self)
|
||||
|
||||
self.fetcher = None
|
||||
self.allow_fill_server = False
|
||||
|
||||
self.article = article
|
||||
self.art_id = None
|
||||
self.bytes = bytes
|
||||
@@ -175,7 +168,6 @@ class Article(TryList):
|
||||
TryList.__init__(self)
|
||||
self.fetcher = None
|
||||
self.fetcher_priority = 0
|
||||
self.allow_fill_server = False
|
||||
self.tries = 0
|
||||
|
||||
def __repr__(self):
|
||||
@@ -241,7 +233,6 @@ class NzbFile(TryList):
|
||||
def finish_import(self):
|
||||
""" Load the article objects from disk """
|
||||
logging.debug("Finishing import on %s", self.subject)
|
||||
|
||||
article_db = sabnzbd.load_data(self.nzf_id, self.nzo.workpath, remove=False)
|
||||
if article_db:
|
||||
for partnum in article_db:
|
||||
@@ -253,8 +244,6 @@ class NzbFile(TryList):
|
||||
self.articles.append(article)
|
||||
self.decodetable[partnum] = article
|
||||
|
||||
# Look for article with lowest number
|
||||
self.initial_article = self.decodetable[self.lowest_partnum]
|
||||
self.import_finished = True
|
||||
|
||||
def remove_article(self, article, found):
|
||||
@@ -265,18 +254,7 @@ class NzbFile(TryList):
|
||||
self.bytes_left -= article.bytes
|
||||
self.nzo.bytes_tried += article.bytes
|
||||
|
||||
reset = False
|
||||
if article.partnum == self.lowest_partnum and self.articles:
|
||||
# Issue reset
|
||||
self.initial_article = None
|
||||
self.reset_try_list()
|
||||
reset = True
|
||||
|
||||
done = True
|
||||
if self.articles:
|
||||
done = False
|
||||
|
||||
return (done, reset)
|
||||
return (not self.articles)
|
||||
|
||||
def set_par2(self, setname, vol, blocks):
|
||||
""" Designate this this file as a par2 file """
|
||||
@@ -287,17 +265,11 @@ class NzbFile(TryList):
|
||||
|
||||
def get_article(self, server, servers):
|
||||
""" Get next article to be downloaded """
|
||||
if self.initial_article:
|
||||
article = self.initial_article.get_article(server, servers)
|
||||
for article in self.articles:
|
||||
article = article.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
|
||||
else:
|
||||
for article in self.articles:
|
||||
article = article.get_article(server, servers)
|
||||
if article:
|
||||
return article
|
||||
|
||||
self.add_to_try_list(server)
|
||||
|
||||
def reset_all_try_lists(self):
|
||||
@@ -311,11 +283,6 @@ class NzbFile(TryList):
|
||||
""" Is this file completed? """
|
||||
return self.import_finished and not bool(self.articles)
|
||||
|
||||
@property
|
||||
def lowest_partnum(self):
|
||||
""" Get lowest article number of this file """
|
||||
return min(self.decodetable)
|
||||
|
||||
def remove_admin(self):
|
||||
""" Remove article database from disk (sabnzbd_nzf_<id>)"""
|
||||
try:
|
||||
@@ -370,7 +337,7 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
self.skipped_files = 0
|
||||
self.nzf_list = []
|
||||
self.groups = []
|
||||
self.md5 = new_md5()
|
||||
self.md5 = hashlib.md5()
|
||||
self.filter = remove_samples
|
||||
self.now = time.time()
|
||||
|
||||
@@ -917,7 +884,6 @@ class NzbObject(TryList):
|
||||
# Raise error, so it's not added
|
||||
raise TypeError
|
||||
|
||||
|
||||
def check_for_dupe(self, nzf):
|
||||
filename = nzf.filename
|
||||
|
||||
@@ -944,7 +910,6 @@ class NzbObject(TryList):
|
||||
self.servercount[serverid] = bytes
|
||||
self.bytes_downloaded += bytes
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def remove_nzf(self, nzf):
|
||||
if nzf in self.files:
|
||||
@@ -1034,7 +999,7 @@ class NzbObject(TryList):
|
||||
@synchronized(IO_LOCK)
|
||||
def remove_article(self, article, found):
|
||||
nzf = article.nzf
|
||||
file_done, reset = nzf.remove_article(article, found)
|
||||
file_done = nzf.remove_article(article, found)
|
||||
|
||||
if file_done:
|
||||
self.remove_nzf(nzf)
|
||||
@@ -1042,15 +1007,17 @@ class NzbObject(TryList):
|
||||
# set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T('Aborted, cannot be completed') + ' - https://sabnzbd.org/not-complete'
|
||||
self.fail_msg = T('Aborted, cannot be completed') + ' - https://sabnzbd.org/not-complete'
|
||||
self.set_unpack_info('Download', self.fail_msg, unique=False)
|
||||
logging.debug('Abort job "%s", due to impossibility to complete it', self.final_name_pw_clean)
|
||||
# Update the last check time
|
||||
sabnzbd.LAST_HISTORY_UPDATE = time.time()
|
||||
return True, True, True
|
||||
return True, True
|
||||
|
||||
if reset:
|
||||
self.reset_try_list()
|
||||
if not found:
|
||||
# Add extra parfiles when there was a damaged article and not pre-checking
|
||||
if cfg.prospective_par_download() and self.extrapars and not self.precheck:
|
||||
self.prospective_add(nzf)
|
||||
|
||||
if file_done:
|
||||
self.handle_par2(nzf, file_done)
|
||||
@@ -1062,7 +1029,7 @@ class NzbObject(TryList):
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
|
||||
return (file_done, post_done, reset)
|
||||
return (file_done, post_done)
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def remove_saved_article(self, article):
|
||||
@@ -1156,8 +1123,8 @@ class NzbObject(TryList):
|
||||
if dif > 0:
|
||||
prefix += T('WAIT %s sec') % dif + ' / ' # : Queue indicator for waiting URL fetch
|
||||
if (self.avg_stamp + float(cfg.propagation_delay() * 60)) > time.time() and self.priority != TOP_PRIORITY:
|
||||
wait_time = int((self.avg_stamp + float(cfg.propagation_delay() * 60) - time.time())/60 + 0.5)
|
||||
prefix += T('PROPAGATING %s min') % wait_time + ' / ' # : Queue indicator while waiting for propagtion of post
|
||||
wait_time = int((self.avg_stamp + float(cfg.propagation_delay() * 60) - time.time()) / 60 + 0.5)
|
||||
prefix += T('PROPAGATING %s min') % wait_time + ' / ' # : Queue indicator while waiting for propagation of post
|
||||
return '%s%s' % (prefix, self.final_name)
|
||||
|
||||
@property
|
||||
@@ -1225,7 +1192,6 @@ class NzbObject(TryList):
|
||||
|
||||
__re_quick_par2_check = re.compile(r'\.par2\W*', re.I)
|
||||
|
||||
|
||||
@synchronized(IO_LOCK)
|
||||
def prospective_add(self, nzf):
|
||||
""" Add par2 files to compensate for missing articles
|
||||
@@ -1261,7 +1227,6 @@ class NzbObject(TryList):
|
||||
# Reset all try lists
|
||||
self.reset_all_try_lists()
|
||||
|
||||
|
||||
def check_quality(self, req_ratio=0):
|
||||
""" Determine amount of articles present on servers
|
||||
and return (gross available, nett) bytes
|
||||
@@ -1324,7 +1289,7 @@ class NzbObject(TryList):
|
||||
msg = u''.join((msg1, msg2, msg3, msg4, msg5, ))
|
||||
self.set_unpack_info('Download', msg, unique=True)
|
||||
if self.url:
|
||||
self.set_unpack_info('Source', format_source_url(self.url), unique=True)
|
||||
self.set_unpack_info('Source', self.url, unique=True)
|
||||
servers = config.get_servers()
|
||||
if len(self.servercount) > 0:
|
||||
msgs = ['%s=%sB' % (servers[server].displayname(), to_units(self.servercount[server])) for server in self.servercount if server in servers]
|
||||
@@ -1820,7 +1785,7 @@ def scan_password(name):
|
||||
# Is it maybe in 'name / password' notation?
|
||||
if slash == name.find(' / ') + 1:
|
||||
# Remove the extra space after name and before password
|
||||
return name[:slash-1].strip('. '), name[slash + 2:]
|
||||
return name[:slash - 1].strip('. '), name[slash + 2:]
|
||||
return name[:slash].strip('. '), name[slash + 1:]
|
||||
|
||||
# Look for "name password=password"
|
||||
|
||||
@@ -721,6 +721,7 @@ class SABnzbdDelegate(NSObject):
|
||||
|
||||
def restartAction_(self, sender):
|
||||
self.setMenuTitle_("\n\n%s\n" % (T('Stopping...')))
|
||||
logging.info('Restart requested by tray')
|
||||
sabnzbd.trigger_restart()
|
||||
self.setMenuTitle_("\n\n%s\n" % (T('Stopping...')))
|
||||
|
||||
|
||||
@@ -50,6 +50,7 @@ import sabnzbd.nzbqueue
|
||||
import sabnzbd.database as database
|
||||
import sabnzbd.notifier as notifier
|
||||
import sabnzbd.utils.rarfile as rarfile
|
||||
import sabnzbd.utils.checkdir
|
||||
|
||||
|
||||
class PostProcessor(Thread):
|
||||
@@ -164,9 +165,16 @@ class PostProcessor(Thread):
|
||||
return None
|
||||
|
||||
def run(self):
|
||||
""" Actual processing """
|
||||
check_eoq = False
|
||||
""" Postprocessor loop """
|
||||
# First we do a dircheck
|
||||
complete_dir = sabnzbd.cfg.complete_dir.get_path()
|
||||
if sabnzbd.utils.checkdir.isFAT(complete_dir):
|
||||
logging.warning(T('Completed Download Folder %s is on FAT file system, limiting maximum file size to 4GB') % complete_dir)
|
||||
else:
|
||||
logging.info("Completed Download Folder %s is not on FAT", complete_dir)
|
||||
|
||||
# Start looping
|
||||
check_eoq = False
|
||||
while not self.__stop:
|
||||
self.__busy = False
|
||||
|
||||
@@ -467,20 +475,12 @@ def process_job(nzo):
|
||||
# Run the user script
|
||||
script_path = make_script_path(script)
|
||||
if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path:
|
||||
# For windows, we use Short-Paths until 2.0.0 for compatibility
|
||||
if sabnzbd.WIN32:
|
||||
import win32api
|
||||
workdir_complete = clip_path(workdir_complete)
|
||||
if len(workdir_complete) > 259:
|
||||
workdir_complete = win32api.GetShortPathName(workdir_complete)
|
||||
|
||||
# set the current nzo status to "Ext Script...". Used in History
|
||||
# Set the current nzo status to "Ext Script...". Used in History
|
||||
nzo.status = Status.RUNNING
|
||||
nzo.set_action_line(T('Running script'), unicoder(script))
|
||||
nzo.set_unpack_info('Script', T('Running user script %s') % unicoder(script), unique=True)
|
||||
script_log, script_ret = external_processing(script_path, workdir_complete, nzo.filename,
|
||||
dirname, cat, nzo.group, job_result,
|
||||
nzo.nzo_info.get('failure', ''))
|
||||
script_log, script_ret = external_processing(script_path, nzo, clip_path(workdir_complete),
|
||||
dirname, job_result)
|
||||
script_line = get_last_line(script_log)
|
||||
if script_log:
|
||||
script_output = nzo.nzo_id
|
||||
@@ -521,7 +521,6 @@ def process_job(nzo):
|
||||
# No '(more)' button needed
|
||||
nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, script_line), unique=True)
|
||||
|
||||
|
||||
# Cleanup again, including NZB files
|
||||
if all_ok:
|
||||
cleanup_list(workdir_complete, False)
|
||||
@@ -687,7 +686,7 @@ def parring(nzo, workdir):
|
||||
par2_filename = nzf_path
|
||||
|
||||
# Rename so handle_par2() picks it up
|
||||
newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol+1)
|
||||
newpath = '%s.vol%d+%d.par2' % (par2_filename, par2_vol, par2_vol + 1)
|
||||
renamer(nzf_path, newpath)
|
||||
nzf_try.filename = os.path.split(newpath)[1]
|
||||
|
||||
@@ -800,7 +799,7 @@ def try_rar_check(nzo, workdir, setname):
|
||||
return True
|
||||
except rarfile.Error as e:
|
||||
nzo.fail_msg = T('RAR files failed to verify')
|
||||
msg = T('[%s] RAR-based verification failed: %s') % (unicoder(os.path.basename(rars[0])), unicoder(e.message.replace('\r\n',' ')))
|
||||
msg = T('[%s] RAR-based verification failed: %s') % (unicoder(os.path.basename(rars[0])), unicoder(e.message.replace('\r\n', ' ')))
|
||||
nzo.set_unpack_info('Repair', msg, set=setname)
|
||||
logging.info(msg)
|
||||
return False
|
||||
|
||||
@@ -133,9 +133,6 @@ class Rating(Thread):
|
||||
self.ratings = {}
|
||||
self.nzo_indexer_map = {}
|
||||
Thread.__init__(self)
|
||||
if not sabnzbd.HAVE_SSL:
|
||||
logging.warning(T('Ratings server requires secure connection'))
|
||||
self.stop()
|
||||
|
||||
def stop(self):
|
||||
self.shutdown = True
|
||||
@@ -236,7 +233,7 @@ class Rating(Thread):
|
||||
|
||||
@synchronized(RATING_LOCK)
|
||||
def update_auto_flag(self, nzo_id, flag, flag_detail=None):
|
||||
if not flag or not cfg.rating_enable() or not cfg.rating_feedback() or (nzo_id not in self.nzo_indexer_map):
|
||||
if not flag or not cfg.rating_enable() or (nzo_id not in self.nzo_indexer_map):
|
||||
return
|
||||
logging.debug('Updating auto flag (%s: %s)', nzo_id, flag)
|
||||
indexer_id = self.nzo_indexer_map[nzo_id]
|
||||
|
||||
@@ -131,6 +131,7 @@ class SABTrayThread(SysTrayIconThread):
|
||||
# menu handler
|
||||
def restart(self, icon):
|
||||
self.hover_text = self.txt_restart
|
||||
logging.info('Restart requested by tray')
|
||||
sabnzbd.trigger_restart()
|
||||
|
||||
# menu handler
|
||||
@@ -157,6 +158,7 @@ class SABTrayThread(SysTrayIconThread):
|
||||
# menu handler - adapted from interface.py
|
||||
def shutdown(self, icon):
|
||||
self.hover_text = self.txt_shutdown
|
||||
logging.info('Shutdown requested by tray')
|
||||
sabnzbd.halt()
|
||||
cherrypy.engine.exit()
|
||||
sabnzbd.SABSTOP = True
|
||||
|
||||
@@ -70,10 +70,15 @@ def init():
|
||||
for schedule in cfg.schedules():
|
||||
arguments = []
|
||||
argument_list = None
|
||||
|
||||
try:
|
||||
m, h, d, action_name = schedule.split()
|
||||
enabled, m, h, d, action_name = schedule.split()
|
||||
except:
|
||||
m, h, d, action_name, argument_list = schedule.split(None, 4)
|
||||
try:
|
||||
enabled, m, h, d, action_name, argument_list = schedule.split(None, 5)
|
||||
except:
|
||||
continue # Bad schedule, ignore
|
||||
|
||||
if argument_list:
|
||||
arguments = argument_list.split()
|
||||
|
||||
@@ -152,10 +157,12 @@ def init():
|
||||
logging.warning(T('Unknown action: %s'), action_name)
|
||||
continue
|
||||
|
||||
logging.debug("scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
|
||||
__SCHED.add_daytime_task(action, action_name, d, None, (h, m),
|
||||
kronos.method.sequential, arguments, None)
|
||||
if enabled == '1':
|
||||
logging.debug("Scheduling %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
__SCHED.add_daytime_task(action, action_name, d, None, (h, m),
|
||||
kronos.method.sequential, arguments, None)
|
||||
else:
|
||||
logging.debug("Skipping %s(%s) on days %s at %02d:%02d", action_name, arguments, d, h, m)
|
||||
|
||||
# Set Guardian interval to 30 seconds
|
||||
__SCHED.add_interval_task(sched_guardian, "Guardian", 15, 30,
|
||||
@@ -259,10 +266,10 @@ def sort_schedules(all_events, now=None):
|
||||
for schedule in cfg.schedules():
|
||||
parms = None
|
||||
try:
|
||||
m, h, dd, action, parms = schedule.split(None, 4)
|
||||
enabled, m, h, dd, action, parms = schedule.split(None, 5)
|
||||
except:
|
||||
try:
|
||||
m, h, dd, action = schedule.split(None, 3)
|
||||
enabled, m, h, dd, action = schedule.split(None, 4)
|
||||
except:
|
||||
continue # Bad schedule, ignore
|
||||
action = action.strip()
|
||||
@@ -277,7 +284,7 @@ def sort_schedules(all_events, now=None):
|
||||
# Expired event will occur again after a week
|
||||
dif = dif + week_min
|
||||
|
||||
events.append((dif, action, parms, schedule))
|
||||
events.append((dif, action, parms, schedule, enabled))
|
||||
if not all_events:
|
||||
break
|
||||
|
||||
@@ -302,6 +309,11 @@ def analyse(was_paused=False, priority=None):
|
||||
for ev in sort_schedules(all_events=True):
|
||||
if priority is None:
|
||||
logging.debug('Schedule check result = %s', ev)
|
||||
|
||||
# Skip if disabled
|
||||
if ev[4] == '0':
|
||||
continue
|
||||
|
||||
action = ev[1]
|
||||
try:
|
||||
value = ev[2]
|
||||
|
||||
@@ -312,6 +312,7 @@ SKIN_TEXT = {
|
||||
'opt-enable_unzip' : TT('Enable Unzip'),
|
||||
'opt-enable_7zip' : TT('Enable 7zip'),
|
||||
'explain-nosslcontext' : TT('Secure (SSL) connections from SABnzbd to newsservers and HTTPS websites will be encrypted, however, validating a server\'s identity using its certificates is not possible. Python 2.7.9 or above, OpenSSL 1.0.2 or above and up-to-date local CA certificates are required.'),
|
||||
'explain-getpar2mt': TT('Speed up repairs by installing multicore Par2, it is available for many platforms.'),
|
||||
'version' : TT('Version'),
|
||||
'uptime' : TT('Uptime'),
|
||||
'backup' : TT('Backup'), #: Indicates that server is Backup server in Status page
|
||||
@@ -327,8 +328,6 @@ SKIN_TEXT = {
|
||||
'explain-port' : TT('Port SABnzbd should listen on.'),
|
||||
'opt-web_dir' : TT('Web Interface'),
|
||||
'explain-web_dir' : TT('Choose a skin.'),
|
||||
'opt-web_dir2' : TT('Secondary Web Interface'),
|
||||
'explain-web_dir2' : TT('Activate an alternative skin.'),
|
||||
'opt-web_username' : TT('SABnzbd Username'),
|
||||
'explain-web_username' : TT('Optional authentication username.'),
|
||||
'opt-web_password' : TT('SABnzbd Password'),
|
||||
@@ -414,16 +413,12 @@ SKIN_TEXT = {
|
||||
'base-folder' : TT('Default Base Folder'),
|
||||
|
||||
# Config->Switches
|
||||
'opt-quick_check' : TT('Enable Quick Check'),
|
||||
'explain-quick_check' : TT('Skip par2 checking when files are 100% valid.'),
|
||||
'opt-enable_all_par' : TT('Download all par2 files'),
|
||||
'explain-enable_all_par' : TT('This prevents multiple repair runs. QuickCheck on: download all par2 files when needed. QuickCheck off: always download all par2 files.'),
|
||||
'explain-enable_all_par' : TT('This prevents multiple repair runs by downloading all par2 files when needed.'),
|
||||
'opt-enable_recursive' : TT('Enable recursive unpacking'),
|
||||
'explain-enable_recursive' : TT('Unpack archives (rar, zip, 7z) within archives.'),
|
||||
'opt-flat_unpack' : TT('Ignore any folders inside archives'),
|
||||
'explain-flat_unpack' : TT('All files will go into a single folder.'),
|
||||
'opt-overwrite_files' : TT('When unpacking, overwrite existing files'),
|
||||
'explain-overwrite_files' : TT('This will overwrite existing files instead of creating an alternative name.'),
|
||||
'opt-top_only' : TT('Only Get Articles for Top of Queue'),
|
||||
'explain-top_only' : TT('Enable for less memory usage. Disable to prevent slow jobs from blocking the queue.'),
|
||||
'opt-safe_postproc' : TT('Post-Process Only Verified Jobs'),
|
||||
@@ -449,8 +444,6 @@ SKIN_TEXT = {
|
||||
'explain-script_can_fail' : TT('When the user script returns a non-zero exit code, the job will be flagged as failed.'),
|
||||
'opt-new_nzb_on_failure' : TT('On failure, try alternative NZB'),
|
||||
'explain-new_nzb_on_failure' : TT('Some servers provide an alternative NZB when a download fails.'),
|
||||
'opt-enable_meta' : TT('Use tags from indexer'),
|
||||
'explain-enable_meta' : TT('Use tags from indexer for title, season, episode, etc. Otherwise all naming is derived from the NZB name.'),
|
||||
'opt-folder_rename' : TT('Enable folder rename'),
|
||||
'explain-folder_rename' : TT('Use temporary names during post processing. Disable when your system doesn\'t handle that properly.'),
|
||||
'opt-pre_script' : TT('Pre-queue user script'),
|
||||
@@ -516,12 +509,9 @@ SKIN_TEXT = {
|
||||
'opt-fail_hopeless_jobs' : TT('Abort jobs that cannot be completed'),
|
||||
'explain-fail_hopeless_jobs' : TT('When during download it becomes clear that too much data is missing, abort the job'),
|
||||
'opt-rating_enable' : TT('Enable Indexer Integration'),
|
||||
'explain-rating_enable' : TT('Enhanced functionality including ratings and extra status information is available when connected to OZnzb indexer.'),
|
||||
'explain-rating_enable' : TT('Indexers can supply rating information when a job is added and SABnzbd can report to the indexer if a job couldn\'t be completed. Depending on your indexer, the API key setting can be left blank.'),
|
||||
'opt-rating_api_key' : TT('API Key'),
|
||||
'opt-rating_host' : TT('Server address'),
|
||||
'explain-rating_api_key' : TT('This key provides identity to indexer. Check your profile on the indexer\'s website.'),
|
||||
'opt-rating_feedback' : TT('Automatic Feedback'),
|
||||
'explain-rating_feedback' : TT('Send automatically calculated validation results for downloads to indexer.'),
|
||||
'opt-rating_filter_enable' : TT('Enable Filtering'),
|
||||
'explain-rating_filter_enable' : TT('Action downloads according to filtering rules.'),
|
||||
'opt-rating_filter_abort_if' : TT('Abort If'),
|
||||
@@ -553,9 +543,9 @@ SKIN_TEXT = {
|
||||
'srv-ssl' : TT('SSL'), #: Server SSL tickbox
|
||||
'explain-ssl' : TT('Secure connection to server'), #: Server SSL tickbox
|
||||
'opt-ssl_verify' : TT('Certificate verification'),
|
||||
'explain-ssl_verify' : TT('Default: when SSL is enabled, verify the identity of the server using its certificates. Strict: verify and enforce matching hostname.'),
|
||||
'explain-ssl_verify' : TT('Minimal: when SSL is enabled, verify the identity of the server using its certificates. Strict: verify and enforce matching hostname.'),
|
||||
'ssl_verify-disabled' : TT('Disabled'),
|
||||
'ssl_verify-normal' : TT('Default'),
|
||||
'ssl_verify-normal' : TT('Minimal'),
|
||||
'ssl_verify-strict' : TT('Strict'),
|
||||
'srv-priority' : TT('Priority'), #: Server priority
|
||||
'explain-svrprio' : TT('0 is highest priority, 100 is the lowest priority'), #: Explain server priority
|
||||
|
||||
105
sabnzbd/utils/checkdir.py
Normal file
105
sabnzbd/utils/checkdir.py
Normal file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""
|
||||
Functions to check if the path filesystem uses FAT
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
debug = False
|
||||
|
||||
|
||||
def isFAT(dir):
|
||||
|
||||
# Check if "dir" is on FAT. FAT considered harmful (for big files)
|
||||
# Works for Linux, Windows, MacOS
|
||||
# NB: On Windows, full path with drive letter is needed!
|
||||
|
||||
FAT = False # default: not FAT
|
||||
# We're dealing with OS calls, so put everything in a try/except, just in case:
|
||||
try:
|
||||
if 'linux' in sys.platform:
|
||||
# On Linux:
|
||||
# df -T /home/sander/weg
|
||||
|
||||
'''
|
||||
Example output of a 500GB external USB drive formatted with FAT:
|
||||
$ df -T /media/sander/INTENSO
|
||||
Filesystem Type 1K-blocks Used Available Use% Mounted on
|
||||
/dev/sda1 vfat 488263616 163545248 324718368 34% /media/sander/INTENSO
|
||||
'''
|
||||
|
||||
cmd = "df -T " + dir + " 2>&1"
|
||||
for thisline in os.popen(cmd).readlines():
|
||||
#print thisline
|
||||
if thisline.find('/') == 0:
|
||||
# Starts with /, so a real, local device
|
||||
fstype = thisline.split()[1]
|
||||
if debug: print "File system type:", fstype
|
||||
if fstype.lower().find('fat') >= 0:
|
||||
FAT = True
|
||||
if debug: print "FAT found"
|
||||
break
|
||||
elif 'win32' in sys.platform:
|
||||
import win32api
|
||||
if '?' in dir:
|
||||
# Remove \\?\ or \\?\UNC\ prefix from Windows path
|
||||
dir = dir.replace(u'\\\\?\\UNC\\', u'\\\\', 1).replace(u'\\\\?\\', u'', 1)
|
||||
try:
|
||||
result = win32api.GetVolumeInformation(os.path.splitdrive(dir)[0])
|
||||
if debug: print result
|
||||
if(result[4].startswith("FAT")):
|
||||
FAT = True
|
||||
except:
|
||||
pass
|
||||
elif 'darwin' in sys.platform:
|
||||
# MacOS formerly known as OSX
|
||||
'''
|
||||
MacOS needs a two-step approach:
|
||||
|
||||
# First: directory => device
|
||||
server:~ sander$ df /Volumes/CARTUNES/Tuna/
|
||||
Filesystem 512-blocks Used Available Capacity iused ifree %iused Mounted on
|
||||
/dev/disk9s1 120815744 108840000 11975744 91% 0 0 100% /Volumes/CARTUNES
|
||||
|
||||
# Then: device => filesystem type
|
||||
server:~ sander$ mount | grep /dev/disk9s1
|
||||
/dev/disk9s1 on /Volumes/CARTUNES (msdos, local, nodev, nosuid, noowners)
|
||||
|
||||
|
||||
'''
|
||||
dfcmd = "df " + dir
|
||||
device = ''
|
||||
for thisline in os.popen(dfcmd).readlines():
|
||||
if thisline.find('/')==0:
|
||||
if debug: print thisline
|
||||
# Starts with /, so a real, local device
|
||||
device = thisline.split()[0]
|
||||
mountcmd = "mount | grep " + device
|
||||
mountoutput = os.popen(mountcmd).readline().strip()
|
||||
if debug: print mountoutput
|
||||
if 'msdos' in mountoutput.split('(')[1]:
|
||||
FAT = True
|
||||
break
|
||||
|
||||
except:
|
||||
pass
|
||||
return FAT
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if debug: print sys.platform
|
||||
try:
|
||||
dir = sys.argv[1]
|
||||
except:
|
||||
print "Specify dir on the command line"
|
||||
sys.exit(0)
|
||||
if isFAT(dir):
|
||||
print dir, "is on FAT"
|
||||
else:
|
||||
print dir, "is not on FAT"
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ def diskspeedmeasure(dirname):
|
||||
# On Windows, this crazy action is needed to
|
||||
# avoid a "permission denied" error
|
||||
try:
|
||||
os.system('echo Hi >%s' % filename)
|
||||
os.popen('echo Hi >%s' % filename)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -3,8 +3,7 @@ import platform, subprocess
|
||||
|
||||
def getcpu():
|
||||
# find the CPU name (which needs a different method per OS), and return it
|
||||
# return None if none found
|
||||
# works on Linux, MacOS (aka OSX), Windows
|
||||
# If none found, return platform.platform().
|
||||
|
||||
cputype = None
|
||||
|
||||
@@ -25,15 +24,17 @@ def getcpu():
|
||||
# model name : Intel(R) Xeon(R) CPU E5335 @ 2.00GHz
|
||||
cputype = myline.split(":", 1)[1] # get everything after the first ":"
|
||||
break # we're done
|
||||
|
||||
except:
|
||||
# An exception, maybe due to a subprocess call gone wrong
|
||||
cputype = "Unknown"
|
||||
pass
|
||||
|
||||
if cputype:
|
||||
# remove unnneeded space:
|
||||
# OK, found. Remove unnneeded spaces:
|
||||
cputype = " ".join(cputype.split())
|
||||
else:
|
||||
# Not found, so let's fall back to platform()
|
||||
cputype = platform.platform()
|
||||
|
||||
return cputype
|
||||
|
||||
|
||||
|
||||
@@ -1,856 +0,0 @@
|
||||
# 2005/08/28
|
||||
# v1.4.0
|
||||
# listquote.py
|
||||
|
||||
# Lists 'n' Quotes
|
||||
# Handling lists and quoted strings
|
||||
# Can be used for parsing/creating lists - or lines in a CSV file
|
||||
# And also quoting or unquoting elements.
|
||||
|
||||
# Homepage : http://www.voidspace.org.uk/python/modules.shtml
|
||||
|
||||
# Copyright Michael Foord, 2004 & 2005.
|
||||
# Released subject to the BSD License
|
||||
# Please see http://www.voidspace.org.uk/python/license.shtml
|
||||
|
||||
# For information about bugfixes, updates and support, please join the Pythonutils mailing list.
|
||||
# http://groups.google.com/group/pythonutils/
|
||||
# Comments, suggestions and bug reports welcome.
|
||||
# Scripts maintained at http://www.voidspace.org.uk/python/index.shtml
|
||||
# E-mail fuzzyman@voidspace.org.uk
|
||||
|
||||
"""
|
||||
Having written modules to handle turning a string representation of a list back
|
||||
into a list (including nested lists) and also a very simple CSV parser, I
|
||||
realised I needed a more solid set of functions for handling lists (comma
|
||||
delimited lines) and quoting/unquoting elements of lists.
|
||||
|
||||
The test stuff provides useful examples of how the functions work.
|
||||
"""
|
||||
|
||||
# Pre-2.3 workaround for basestring.
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
basestring = (str, unicode)
|
||||
|
||||
import re
|
||||
inquotes = re.compile(r'''\s*(".*?"|'.*?')(.*)''')
|
||||
badchars = re.compile(r'''^[^'," \[\]\(\)#]+$''')
|
||||
##commented_line = re.compile(r'''\s*([^#]*)\s*(#.*)''')
|
||||
paramfinder = re.compile(r'''(?:'.*?')|(?:".*?")|(?:[^'",\s][^,]*)''')
|
||||
unquoted = re.compile(r'''
|
||||
([^\#,"'\(\)\[\]][^\#,\]\)]*) # value
|
||||
\s* # whitespace - XXX not caught
|
||||
([\#,\)\]].*)? # rest of the line
|
||||
$''', re.VERBOSE)
|
||||
|
||||
__all__ = [
|
||||
'elem_quote',
|
||||
'unquote',
|
||||
'ListQuoteError',
|
||||
'QuoteError',
|
||||
'UnQuoteError',
|
||||
'BadLineError',
|
||||
'CommentError',
|
||||
'quote_escape',
|
||||
'quote_unescape',
|
||||
'simplelist',
|
||||
'LineParser',
|
||||
'lineparse',
|
||||
'csvread',
|
||||
'csvwrite',
|
||||
'list_stringify',
|
||||
'makelist'
|
||||
]
|
||||
|
||||
class ListQuoteError(SyntaxError):
|
||||
"""Base class for errors raised by the listquote module."""
|
||||
|
||||
class QuoteError(ListQuoteError):
|
||||
"""This value can't be quoted."""
|
||||
|
||||
class UnQuoteError(ListQuoteError):
|
||||
"""The value is badly quoted."""
|
||||
|
||||
class BadLineError(ListQuoteError):
|
||||
"""A line is badly built."""
|
||||
|
||||
class CommentError(BadLineError):
|
||||
"""A line contains a disallowed comment."""
|
||||
|
||||
class CSVError(ListQuoteError):
|
||||
"""The CSV File contained errors."""
|
||||
|
||||
#################################################################
|
||||
# functions for quoting and unquoting
|
||||
|
||||
def elem_quote(member, nonquote=True, stringify=False, encoding=None):
|
||||
"""
|
||||
Simple method to add the most appropriate quote to an element - either single
|
||||
quotes or double quotes.
|
||||
|
||||
If member contains ``\n`` a ``QuoteError`` is raised - multiline values
|
||||
can't be quoted by elem_quote.
|
||||
|
||||
If ``nonquote`` is set to ``True`` (the default), then if member contains none
|
||||
of ``'," []()#;`` then it isn't quoted at all.
|
||||
|
||||
If member contains both single quotes *and* double quotes then all double
|
||||
quotes (``"``) will be escaped as ``&mjf-quot;`` and member will then be quoted
|
||||
with double quotes.
|
||||
|
||||
If ``stringify`` is set to ``True`` (the default is ``False``) then non string
|
||||
(unicode or byte-string) values will be first converted to strings using the
|
||||
``str`` function. Otherwise elem_quote raises a ``TypeError``.
|
||||
|
||||
If ``encoding`` is not ``None`` and member is a byte string, then it will be
|
||||
decoded into unicode using this encoding.
|
||||
|
||||
>>> elem_quote('hello')
|
||||
'hello'
|
||||
>>> elem_quote('hello', nonquote=False)
|
||||
'"hello"'
|
||||
>>> elem_quote('"hello"')
|
||||
'\\'"hello"\\''
|
||||
>>> elem_quote(3)
|
||||
Traceback (most recent call last):
|
||||
TypeError: Can only quote strings. "3"
|
||||
>>> elem_quote(3, stringify=True)
|
||||
'3'
|
||||
>>> elem_quote('hello', encoding='ascii')
|
||||
u'hello'
|
||||
>>> elem_quote('\\n')
|
||||
Traceback (most recent call last):
|
||||
QuoteError: Multiline values can't be quoted.
|
||||
"
|
||||
"
|
||||
"""
|
||||
if not isinstance(member, basestring):
|
||||
if stringify:
|
||||
member = str(member)
|
||||
else:
|
||||
# FIXME: is this the appropriate error message ?
|
||||
raise TypeError('Can only quote strings. "%s"' % str(member))
|
||||
if encoding and isinstance(member, str):
|
||||
# from string to unicode
|
||||
member = unicode(member, encoding)
|
||||
if '\n' in member:
|
||||
raise QuoteError('Multiline values can\'t be quoted.\n"%s"' % str(member))
|
||||
#
|
||||
if nonquote and badchars.match(member) is not None:
|
||||
return member
|
||||
# this ordering of tests determines which quote character will be used in
|
||||
# preference - here we have \" first...
|
||||
elif member.find('"') == -1:
|
||||
return '"%s"' % member
|
||||
# but we will use either... which may not suit some people
|
||||
elif member.find("'") == -1:
|
||||
return "'%s'" % member
|
||||
else:
|
||||
raise QuoteError('Value can\'t be quoted : "%s"' % member)
|
||||
|
||||
def unquote(inline, fullquote=True, retain=False):
|
||||
"""
|
||||
Unquote a value.
|
||||
|
||||
If the value isn't quoted it returns the value.
|
||||
|
||||
If the value is badly quoted it raises ``UnQuoteError``.
|
||||
|
||||
If retain is ``True`` (default is ``False``) then the quotes are left
|
||||
around the value (but leading or trailing whitespace will have been
|
||||
removed).
|
||||
|
||||
If fullquote is ``False`` (default is ``True``) then unquote will only
|
||||
unquote the first part of the ``inline``. If there is anything after the
|
||||
quoted element, this will be returned as well (instead of raising an
|
||||
error).
|
||||
|
||||
In this case the return value is ``(value, rest)``.
|
||||
|
||||
>>> unquote('hello')
|
||||
'hello'
|
||||
>>> unquote('"hello"')
|
||||
'hello'
|
||||
>>> unquote('"hello')
|
||||
Traceback (most recent call last):
|
||||
UnQuoteError: Value is badly quoted: ""hello"
|
||||
>>> unquote('"hello" fish')
|
||||
Traceback (most recent call last):
|
||||
UnQuoteError: Value is badly quoted: ""hello" fish"
|
||||
>>> unquote("'hello'", retain=True)
|
||||
"'hello'"
|
||||
>>> unquote('"hello" fish', fullquote=False)
|
||||
('hello', ' fish')
|
||||
"""
|
||||
mat = inquotes.match(inline)
|
||||
if mat is None:
|
||||
if inline.strip()[0] not in '\'\"': # not quoted
|
||||
return inline
|
||||
else:
|
||||
# badly quoted
|
||||
raise UnQuoteError('Value is badly quoted: "%s"' % inline)
|
||||
quoted, rest = mat.groups()
|
||||
if fullquote and rest.strip():
|
||||
# badly quoted
|
||||
raise UnQuoteError('Value is badly quoted: "%s"' % inline)
|
||||
if not retain:
|
||||
quoted = quoted[1:-1]
|
||||
if not fullquote:
|
||||
return quoted, rest
|
||||
else:
|
||||
return quoted
|
||||
|
||||
def quote_escape(value, lf='&mjf-lf;', quot='&mjf-quot;'):
|
||||
"""
|
||||
Escape a string so that it can safely be quoted. You should use this if the
|
||||
value to be quoted *may* contain line-feeds or both single quotes and double
|
||||
quotes.
|
||||
|
||||
If the value contains ``\n`` then it will be escaped using ``lf``. By
|
||||
default this is ``&mjf-lf;``.
|
||||
|
||||
If the value contains single quotes *and* double quotes, then all double
|
||||
quotes will be escaped using ``quot``. By default this is ``&mjf-quot;``.
|
||||
|
||||
>>> quote_escape('hello')
|
||||
'hello'
|
||||
>>> quote_escape('hello\\n')
|
||||
'hello&mjf-lf;'
|
||||
>>> quote_escape('hello"')
|
||||
'hello"'
|
||||
>>> quote_escape('hello"\\'')
|
||||
"hello&mjf-quot;'"
|
||||
>>> quote_escape('hello"\\'\\n', '&fish;', '&wobble;')
|
||||
"hello&wobble;'&fish;"
|
||||
"""
|
||||
if '\n' in value:
|
||||
value = value.replace('\n', lf)
|
||||
if '\'' in value and '\"' in value:
|
||||
value = value.replace('"', quot)
|
||||
return value
|
||||
|
||||
def quote_unescape(value, lf='&mjf-lf;', quot='&mjf-quot;'):
|
||||
"""
|
||||
Unescape a string escaped by ``quote_escape``.
|
||||
|
||||
If it was escaped using anything other than the defaults for ``lf`` and
|
||||
``quot`` you must pass them to this function.
|
||||
|
||||
>>> quote_unescape("hello&wobble;'&fish;", '&fish;', '&wobble;')
|
||||
'hello"\\'\\n'
|
||||
>>> quote_unescape('hello')
|
||||
'hello'
|
||||
>>> quote_unescape('hello&mjf-lf;')
|
||||
'hello\\n'
|
||||
>>> quote_unescape("'hello'")
|
||||
"'hello'"
|
||||
>>> quote_unescape('hello"')
|
||||
'hello"'
|
||||
>>> quote_unescape("hello&mjf-quot;'")
|
||||
'hello"\\''
|
||||
>>> quote_unescape("hello&wobble;'&fish;", '&fish;', '&wobble;')
|
||||
'hello"\\'\\n'
|
||||
"""
|
||||
return value.replace(lf, '\n').replace(quot, '"')
|
||||
|
||||
def simplelist(inline):
|
||||
"""
|
||||
Parse a string to a list.
|
||||
|
||||
A simple regex that extracts quoted items from a list.
|
||||
|
||||
It retains quotes around elements. (So unquote each element)
|
||||
|
||||
>>> simplelist('''hello, goodbye, 'title', "name", "I can't"''')
|
||||
['hello', 'goodbye', "'title'", '"name"', '"I can\\'t"']
|
||||
|
||||
FIXME: This doesn't work fully (allows some badly formed lists):
|
||||
e.g.
|
||||
>>> simplelist('hello, fish, "wobble" bottom hooray')
|
||||
['hello', 'fish', '"wobble"', 'bottom hooray']
|
||||
"""
|
||||
return paramfinder.findall(inline)
|
||||
|
||||
##############################################
|
||||
# LineParser - a multi purpose line parser
|
||||
# handles lines with comma separated values on it, followed by a comment
|
||||
# correctly handles quoting
|
||||
# *and* can handle nested lists - marked between '[...]' or '(...)'
|
||||
# See the docstring for how this works
|
||||
# by default it returns a (list, comment) tuple !
|
||||
# There are several keyword arguments that control how LineParser works.
|
||||
|
||||
class LineParser(object):
|
||||
"""An object to parse nested lists from strings."""
|
||||
|
||||
liststart = { '[' : ']', '(' : ')' }
|
||||
quotes = ['\'', '"']
|
||||
|
||||
def __init__(self, options=None, **keywargs):
|
||||
"""Initialise the LineParser."""
|
||||
self.reset(options, **keywargs)
|
||||
|
||||
def reset(self, options=None, **keywargs):
|
||||
"""Reset the parser with the specified options."""
|
||||
if options is None:
|
||||
options = {}
|
||||
options.update(keywargs)
|
||||
#
|
||||
defaults = {
|
||||
'recursive': True,
|
||||
'comment': True,
|
||||
'retain': False,
|
||||
'force_list': False,
|
||||
'csv': False
|
||||
}
|
||||
defaults.update(options)
|
||||
if defaults['csv']:
|
||||
defaults.update({
|
||||
'recursive': False,
|
||||
'force_list': True,
|
||||
'comment': False,
|
||||
})
|
||||
# check all the options are valid
|
||||
for entry in defaults.keys():
|
||||
if entry not in ['comment',
|
||||
'retain',
|
||||
'csv',
|
||||
'recursive',
|
||||
'force_list']:
|
||||
raise TypeError, ("'%s' is an invalid keyword argument for "
|
||||
"this function" % entry)
|
||||
#
|
||||
self.recursive = defaults['recursive']
|
||||
self.comment = defaults['comment']
|
||||
self.retain = defaults['retain']
|
||||
self.force_list = defaults['force_list']
|
||||
|
||||
def feed(self, inline, endchar=None):
|
||||
"""
|
||||
Parse a single line (or fragment).
|
||||
|
||||
Uses the options set in the parser object.
|
||||
|
||||
Can parse lists - including nested lists. (If ``recursive`` is
|
||||
``False`` then nested lists will cause a ``BadLineError``).
|
||||
|
||||
Return value depends on options.
|
||||
|
||||
If ``comment`` is ``False`` it returns ``outvalue``
|
||||
|
||||
If ``comment`` is ``True`` it returns ``(outvalue, comment)``. (Even if
|
||||
comment is just ``''``).
|
||||
|
||||
If ``force_list`` is ``False`` then ``outvalue`` may be a list or a
|
||||
single item.
|
||||
|
||||
If ``force_list`` is ``True`` then ``outvalue`` will always be a list -
|
||||
even if it has just one member.
|
||||
|
||||
List syntax :
|
||||
|
||||
* Comma separated lines ``a, b, c, d``
|
||||
* Lists can optionally be between square or ordinary brackets
|
||||
- ``[a, b, c, d]``
|
||||
- ``(a, b, c, d)``
|
||||
* Nested lists *must* be between brackets - ``a, [a, b, c, d], c``
|
||||
* A single element list can be shown by a trailing quote - ``a,``
|
||||
* An empty list is shown by ``()`` or ``[]``
|
||||
|
||||
Elements can be quoted with single or double quotes (but can't contain
|
||||
both).
|
||||
|
||||
The line can optionally end with a comment (preeded by a '#').
|
||||
This depends on the ``comment`` attribute.
|
||||
|
||||
If the line is badly built then this method will raise one of : ::
|
||||
|
||||
CommentError, BadLineError, UnQuoteError
|
||||
|
||||
Using the ``csv`` option is the same as setting : ::
|
||||
|
||||
'recursive': False
|
||||
'force_list': True
|
||||
'comment': False
|
||||
"""
|
||||
# preserve the original line
|
||||
# for error messages
|
||||
if endchar is None:
|
||||
self.origline = inline
|
||||
inline = inline.lstrip()
|
||||
#
|
||||
outlist = []
|
||||
comma_needed = False
|
||||
found_comma = False
|
||||
while inline:
|
||||
# NOTE: this sort of operation would be quicker
|
||||
# with lists - but then can't use regexes
|
||||
thischar = inline[0]
|
||||
if thischar == '#':
|
||||
# reached a comment
|
||||
# end of the line...
|
||||
break
|
||||
#
|
||||
if thischar == endchar:
|
||||
return outlist, inline[1:]
|
||||
#
|
||||
if comma_needed:
|
||||
if thischar == ',':
|
||||
inline = inline[1:].lstrip()
|
||||
comma_needed = False
|
||||
found_comma = True
|
||||
continue
|
||||
raise BadLineError('Line is badly built :\n%s' % self.origline)
|
||||
#
|
||||
try:
|
||||
# the character that marks the end of the list
|
||||
listend = self.liststart[thischar]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if not self.recursive and endchar is not None:
|
||||
raise BadLineError('Line is badly built :\n%s' % self.origline)
|
||||
newlist, inline = self.feed(inline[1:], endchar=listend)
|
||||
outlist.append(newlist)
|
||||
inline = inline.lstrip()
|
||||
comma_needed = True
|
||||
continue
|
||||
#
|
||||
if thischar in self.quotes:
|
||||
# this might raise an error
|
||||
# FIXME: trap the error and raise a more appropriate one ?
|
||||
element, inline = unquote(inline, fullquote=False,
|
||||
retain=self.retain)
|
||||
inline = inline.lstrip()
|
||||
outlist.append(element)
|
||||
comma_needed = True
|
||||
continue
|
||||
#
|
||||
# must be an unquoted element
|
||||
mat = unquoted.match(inline)
|
||||
if mat is not None:
|
||||
# FIXME: if the regex was better we wouldn't need an rstrip
|
||||
element = mat.group(1).rstrip()
|
||||
# group 2 will be ``None`` if we reach the end of the line
|
||||
inline = mat.group(2) or ''
|
||||
outlist.append(element)
|
||||
comma_needed = True
|
||||
continue
|
||||
# or it's a badly built line
|
||||
raise BadLineError('Line is badly built :\n%s' % self.origline)
|
||||
#
|
||||
# if we've been called recursively
|
||||
# we shouldn't have got this far
|
||||
if endchar is not None:
|
||||
raise BadLineError('Line is badly built :\n%s' % self.origline)
|
||||
#
|
||||
if not found_comma:
|
||||
# if we didn't find a comma
|
||||
# the value could be a nested list
|
||||
if outlist:
|
||||
outlist = outlist[0]
|
||||
else:
|
||||
outlist = ''
|
||||
if self.force_list and not isinstance(outlist, list):
|
||||
if outlist:
|
||||
outlist = [outlist]
|
||||
else:
|
||||
outlist = []
|
||||
if not self.comment:
|
||||
if inline:
|
||||
raise CommentError('Comment not allowed :\n%s' % self.origline)
|
||||
return outlist
|
||||
return outlist, inline
|
||||
|
||||
def lineparse(inline, options=None, **keywargs):
|
||||
"""
|
||||
A compatibility function that mimics the old lineparse.
|
||||
|
||||
Also more convenient for single line use.
|
||||
|
||||
Note: It still uses the new ``LineParser`` - and so takes the same
|
||||
keyword arguments as that.
|
||||
|
||||
>>> lineparse('''"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment''')
|
||||
(['hello', 'goodbye', "I can't do that", 'You "can" !'], '# a comment')
|
||||
>>> lineparse('''"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment''', comment=False)
|
||||
Traceback (most recent call last):
|
||||
CommentError: Comment not allowed :
|
||||
"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment
|
||||
>>> lineparse('''"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment''', recursive=False)
|
||||
(['hello', 'goodbye', "I can't do that", 'You "can" !'], '# a comment')
|
||||
>>> lineparse('''"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment''', csv=True)
|
||||
Traceback (most recent call last):
|
||||
CommentError: Comment not allowed :
|
||||
"hello", 'goodbye', "I can't do that", 'You "can" !' # a comment
|
||||
>>> lineparse('''"hello", 'goodbye', "I can't do that", 'You "can" !' ''', comment=False)
|
||||
['hello', 'goodbye', "I can't do that", 'You "can" !']
|
||||
>>> lineparse('')
|
||||
('', '')
|
||||
>>> lineparse('', force_list=True)
|
||||
([], '')
|
||||
>>> lineparse('[]')
|
||||
([], '')
|
||||
>>> lineparse('()')
|
||||
([], '')
|
||||
>>> lineparse('()', force_list=True)
|
||||
([], '')
|
||||
>>> lineparse('1,')
|
||||
(['1'], '')
|
||||
>>> lineparse('"Yo"')
|
||||
('Yo', '')
|
||||
>>> lineparse('"Yo"', force_list=True)
|
||||
(['Yo'], '')
|
||||
>>> lineparse('''h, i, j, (h, i, ['hello', "f"], [], ([]),), k''')
|
||||
(['h', 'i', 'j', ['h', 'i', ['hello', 'f'], [], [[]]], 'k'], '')
|
||||
>>> lineparse('''h, i, j, (h, i, ['hello', "f"], [], ([]),), k''', recursive=False)
|
||||
Traceback (most recent call last):
|
||||
BadLineError: Line is badly built :
|
||||
h, i, j, (h, i, ['hello', "f"], [], ([]),), k
|
||||
>>> lineparse('fish#dog')
|
||||
('fish', '#dog')
|
||||
>>> lineparse('"fish"#dog')
|
||||
('fish', '#dog')
|
||||
>>> lineparse('(((())))')
|
||||
([[[[]]]], '')
|
||||
>>> lineparse('((((,))))')
|
||||
Traceback (most recent call last):
|
||||
BadLineError: Line is badly built :
|
||||
((((,))))
|
||||
>>> lineparse('hi, ()')
|
||||
(['hi', []], '')
|
||||
>>> lineparse('"hello", "",')
|
||||
(['hello', ''], '')
|
||||
>>> lineparse('"hello", ,')
|
||||
Traceback (most recent call last):
|
||||
BadLineError: Line is badly built :
|
||||
"hello", ,
|
||||
>>> lineparse('"hello", ["hi", ""], ""')
|
||||
(['hello', ['hi', ''], ''], '')
|
||||
>>> lineparse('''"member 1", "member 2", ["nest 1", ("nest 2", 'nest 2b', ['nest 3', 'value'], nest 2c), nest1b]''')
|
||||
(['member 1', 'member 2', ['nest 1', ['nest 2', 'nest 2b', ['nest 3', 'value'], 'nest 2c'], 'nest1b']], '')
|
||||
>>> lineparse('''"member 1", "member 2", ["nest 1", ("nest 2", 'nest 2b', ['nest 3', 'value'], nest 2c), nest1b]]''')
|
||||
Traceback (most recent call last):
|
||||
BadLineError: Line is badly built :
|
||||
"member 1", "member 2", ["nest 1", ("nest 2", 'nest 2b', ['nest 3', 'value'], nest 2c), nest1b]]
|
||||
"""
|
||||
p = LineParser(options, **keywargs)
|
||||
return p.feed(inline)
|
||||
|
||||
############################################################################
|
||||
# a couple of functions to help build lists
|
||||
|
||||
def list_stringify(inlist):
|
||||
"""
|
||||
Recursively rebuilds a list - making sure all the members are strings.
|
||||
|
||||
Can take any iterable or a sequence as the argument and always
|
||||
returns a list.
|
||||
|
||||
Useful before writing out lists.
|
||||
|
||||
Used by makelist if stringify is set.
|
||||
|
||||
Uses the ``str`` function for stringification.
|
||||
|
||||
Every element will be a string or a unicode object.
|
||||
|
||||
Doesn't handle decoding strings into unicode objects (or vice-versa).
|
||||
|
||||
>>> list_stringify([2, 2, 2, 2, (3, 3, 2.9)])
|
||||
['2', '2', '2', '2', ['3', '3', '2.9']]
|
||||
>>> list_stringify(None)
|
||||
Traceback (most recent call last):
|
||||
TypeError: 'NoneType' object is not iterable
|
||||
>>> list_stringify([])
|
||||
[]
|
||||
|
||||
FIXME: can receive any iterable - e.g. a sequence
|
||||
>>> list_stringify('')
|
||||
[]
|
||||
>>> list_stringify('Hello There')
|
||||
['H', 'e', 'l', 'l', 'o', ' ', 'T', 'h', 'e', 'r', 'e']
|
||||
"""
|
||||
outlist = []
|
||||
for item in inlist:
|
||||
if not isinstance(item, (tuple, list)):
|
||||
if not isinstance(item, basestring):
|
||||
item = str(item)
|
||||
else:
|
||||
item = list_stringify(item)
|
||||
outlist.append(item)
|
||||
return outlist
|
||||
|
||||
|
||||
def makelist(inlist, listchar='', stringify=False, escape=False, encoding=None):
|
||||
"""
|
||||
Given a list - turn it into a string that represents that list. (Suitable
|
||||
for parsing by ``LineParser``).
|
||||
|
||||
listchar should be ``'['``, ``'('`` or ``''``. This is the type of bracket
|
||||
used to enclose the list. (``''`` meaning no bracket of course).
|
||||
|
||||
If you have nested lists and listchar is ``''``, makelist will
|
||||
automatically use ``'['`` for the nested lists.
|
||||
|
||||
If stringify is ``True`` (default is ``False``) makelist will stringify the
|
||||
inlist first (using ``list_stringify``).
|
||||
|
||||
If ``escape`` is ``True`` (default is ``False``) makelist will call
|
||||
``quote_escape`` on each element before passing them to ``elem_quote`` to
|
||||
be quoted.
|
||||
|
||||
If encoding keyword is not ``None``, all strings are decoded to unicode
|
||||
with the specified encoding. Each item will then be a unicode object
|
||||
instead of a string.
|
||||
|
||||
>>> makelist([])
|
||||
'[]'
|
||||
>>> makelist(['a', 'b', 'I can\\'t do it', 'Yes you "can" !'])
|
||||
'a, b, "I can\\'t do it", \\'Yes you "can" !\\''
|
||||
>>> makelist([3, 4, 5, [6, 7, 8]], stringify=True)
|
||||
'3, 4, 5, [6, 7, 8]'
|
||||
>>> makelist([3, 4, 5, [6, 7, 8]])
|
||||
Traceback (most recent call last):
|
||||
TypeError: Can only quote strings. "3"
|
||||
>>> makelist(['a', 'b', 'c', ('d', 'e'), ('f', 'g')], listchar='(')
|
||||
'(a, b, c, (d, e), (f, g))'
|
||||
>>> makelist(['hi\\n', 'Quote "heck\\''], escape=True)
|
||||
'hi&mjf-lf;, "Quote &mjf-quot;heck\\'"'
|
||||
>>> makelist(['a', 'b', 'c', ('d', 'e'), ('f', 'g')], encoding='UTF8')
|
||||
u'a, b, c, [d, e], [f, g]'
|
||||
"""
|
||||
if stringify:
|
||||
inlist = list_stringify(inlist)
|
||||
listdict = {'[' : '[%s]', '(' : '(%s)', '' : '%s'}
|
||||
outline = []
|
||||
# this makes '[' the default for empty or single value lists
|
||||
if len(inlist) < 2:
|
||||
listchar = listchar or '['
|
||||
for item in inlist:
|
||||
if not isinstance(item, (list, tuple)):
|
||||
if escape:
|
||||
item = quote_escape(item)
|
||||
outline.append(elem_quote(item, encoding=encoding))
|
||||
else:
|
||||
# recursive for nested lists
|
||||
outline.append(makelist(item, listchar or '[',
|
||||
stringify, escape, encoding))
|
||||
return listdict[listchar] % (', '.join(outline))
|
||||
|
||||
############################################################################
|
||||
# CSV functions
|
||||
# csvread, csvwrite
|
||||
|
||||
def csvread(infile):
|
||||
"""
|
||||
Given an infile as an iterable, return the CSV as a list of lists.
|
||||
|
||||
infile can be an open file object or a list of lines.
|
||||
|
||||
If any of the lines are badly built then a ``CSVError`` will be raised.
|
||||
This has a ``csv`` attribute - which is a reference to the parsed CSV.
|
||||
Every line that couldn't be parsed will have ``[]`` for it's entry.
|
||||
|
||||
The error *also* has an ``errors`` attribute. This is a list of all the
|
||||
errors raised. Error in this will have an ``index`` attribute, which is the
|
||||
line number, and a ``line`` attribute - which is the actual line that
|
||||
caused the error.
|
||||
|
||||
Example of usage :
|
||||
|
||||
.. raw:: html
|
||||
|
||||
{+coloring}
|
||||
|
||||
handle = open(filename)
|
||||
# remove the trailing '\n' from each line
|
||||
the_file = [line.rstrip('\n') for line in handle.readlines()]
|
||||
csv = csvread(the_file)
|
||||
|
||||
{-coloring}
|
||||
|
||||
>>> a = '''"object 1", 'object 2', object 3
|
||||
... test 1 , "test 2" ,'test 3'
|
||||
... 'obj 1',obj 2,"obj 3"'''
|
||||
>>> csvread(a.splitlines())
|
||||
[['object 1', 'object 2', 'object 3'], ['test 1', 'test 2', 'test 3'], ['obj 1', 'obj 2', 'obj 3']]
|
||||
>>> csvread(['object 1,'])
|
||||
[['object 1']]
|
||||
>>> try:
|
||||
... csvread(['object 1, "hello', 'object 1, # a comment in a csv ?'])
|
||||
... except CSVError, e:
|
||||
... for entry in e.errors:
|
||||
... print entry.index, entry
|
||||
0 Value is badly quoted: ""hello"
|
||||
1 Comment not allowed :
|
||||
object 1, # a comment in a csv ?
|
||||
"""
|
||||
out_csv = []
|
||||
errors = []
|
||||
index = -1
|
||||
p = LineParser(csv=True)
|
||||
for line in infile:
|
||||
index += 1
|
||||
try:
|
||||
values = p.feed(line)
|
||||
except ListQuoteError, e:
|
||||
values = []
|
||||
e.line = line
|
||||
e.index = index
|
||||
errors.append(e)
|
||||
#
|
||||
out_csv.append(values)
|
||||
#
|
||||
if errors:
|
||||
e = CSVError("Parsing CSV failed. See 'errors' attribute.")
|
||||
e.csv = out_csv
|
||||
e.errors = errors
|
||||
raise e
|
||||
return out_csv
|
||||
|
||||
def csvwrite(inlist, stringify=False):
|
||||
"""
|
||||
Given a list of lists it turns each entry into a line in a CSV.
|
||||
(Given a list of lists it returns a list of strings).
|
||||
|
||||
The lines will *not* be ``\n`` terminated.
|
||||
|
||||
Set stringify to ``True`` (default is ``False``) to convert entries to
|
||||
strings before creating the line.
|
||||
|
||||
If stringify is ``False`` then any non string value will raise a
|
||||
``TypeError``.
|
||||
|
||||
Every member will be quoted using ``elem_quote``, but no escaping is done.
|
||||
|
||||
Example of usage :
|
||||
|
||||
.. raw:: html
|
||||
|
||||
{+coloring}
|
||||
|
||||
# escape each entry in each line (optional)
|
||||
for index in range(len(the_list)):
|
||||
the_list[index] = [quote_escape(val) for val in the_list[index]]
|
||||
#
|
||||
the_file = csvwrite(the_list)
|
||||
# add a '\n' to each line - ready to write to file
|
||||
the_file = [line + '\n' for line in the_file]
|
||||
|
||||
{-coloring}
|
||||
|
||||
>>> csvwrite([['object 1', 'object 2', 'object 3'], ['test 1', 'test 2', 'test 3'], ['obj 1', 'obj 2', 'obj 3']])
|
||||
['"object 1", "object 2", "object 3"', '"test 1", "test 2", "test 3"', '"obj 1", "obj 2", "obj 3"']
|
||||
>>> csvwrite([[3, 3, 3]])
|
||||
Traceback (most recent call last):
|
||||
TypeError: Can only quote strings. "3"
|
||||
>>> csvwrite([[3, 3, 3]], True)
|
||||
['3, 3, 3']
|
||||
"""
|
||||
out_list = []
|
||||
for entry in inlist:
|
||||
if stringify:
|
||||
new_entry = []
|
||||
for val in entry:
|
||||
if not isinstance(val, basestring):
|
||||
val = str(val)
|
||||
new_entry.append(val)
|
||||
entry = new_entry
|
||||
this_line = ', '.join([elem_quote(val) for val in entry])
|
||||
out_list.append(this_line)
|
||||
return out_list
|
||||
|
||||
############################################################################
|
||||
|
||||
def _test():
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
|
||||
if __name__ == "__main__":
|
||||
_test()
|
||||
|
||||
|
||||
"""
|
||||
ISSUES/TODO
|
||||
===========
|
||||
|
||||
Fix bug in simplelist
|
||||
|
||||
Triple quote multiline values ?
|
||||
|
||||
Doesn't allow Python style string escaping (but has '&mjf-quot;' and '&mjf-lf;').
|
||||
|
||||
Uses both \' and \" as quotes and sometimes doesn't quote at all - see
|
||||
elem_quote - may not *always* be compatible with other programs.
|
||||
|
||||
Allow space separated lists ? e.g. 10 5 100 20
|
||||
|
||||
Lineparser could create tuples.
|
||||
|
||||
Allow ',' as an empty list ?
|
||||
|
||||
CHANGELOG
|
||||
=========
|
||||
|
||||
2005/08/28 - Version 1.4.0
|
||||
--------------------------
|
||||
|
||||
* Greater use of regular expressions for added speed
|
||||
* Re-implemented ``lineparse`` as the ``LineParser`` object
|
||||
* Added doctests
|
||||
* Custom exceptions
|
||||
* Changed the behaviour of ``csvread`` and ``csvwrite``
|
||||
* Removed the CSV ``compare`` function and the ``uncomment`` function
|
||||
* Only ``'#'`` allowed for comments
|
||||
* ``elem_quote`` raises exceptions
|
||||
* Changed behaviour of ``unquote``
|
||||
* Added ``quote_escape`` and ``quote_unescape``
|
||||
* Removed the ``uni_conv`` option in the CSV functions
|
||||
|
||||
.. note::
|
||||
|
||||
These changes are quite extensive. If any of them cause you problems then
|
||||
let me know. I can provide a workaround in the next release.
|
||||
|
||||
2005/06/01 Version 1.3.0
|
||||
Fixed bug in lineparse handling of empty list members.
|
||||
Thnks to bug report and fix by Par Pandit <ppandit@yahoo.com>
|
||||
The 'unquote' function is now regex based.
|
||||
(bugfix it now doesn't return a tuple if fullquote is 0)
|
||||
Added the simplelist regex/function.
|
||||
elem_quote and uncomment use a regex for clarity and speed.
|
||||
Added a bunch of asserts to the tests.
|
||||
|
||||
2005/03/07 Version 1.2.1
|
||||
makelist improved - better handling of empty or single member lists
|
||||
|
||||
2005/02/23 Version 1.2.0
|
||||
Added uncomment for ConfigObj 3.3.0
|
||||
Optimised unquote - not a character by character search any more.
|
||||
lineparse does full '&mjf..;' escape conversions - even when unquote isn't used
|
||||
makelist and elem_quote takes an 'encoding' keyword for string members to be used to decode strigns to unicode
|
||||
optimised makelist (including a minor bugfix)
|
||||
Change to lineparse - it wouldn't allow '[' or '(' inside elements unless they were quoted.
|
||||
|
||||
2004/12/04 Version 1.1.2
|
||||
Changed the license (*again* - now OSI compatible).
|
||||
Empty values are now quoted by elem_quote.
|
||||
|
||||
30-08-04 Version 1.1.1
|
||||
Removed the unicode hammer in csvread.
|
||||
Improved docs.
|
||||
|
||||
16-08-04 Version 1.1.0
|
||||
Added handling for non-string elements in elem_quote (optional).
|
||||
Replaced some old += with lists and ''.join() for speed improvements...
|
||||
Using basestring and hasattr('__getitem__') tests instead of isinstance(list) and str in a couple of places.
|
||||
Changed license text.
|
||||
Made the tests useful.
|
||||
|
||||
19-06-04 Version 1.0.0
|
||||
Seems to work ok. A worthy successor to listparse and csv_s - although not as elegant as it could be.
|
||||
|
||||
"""
|
||||
@@ -24,7 +24,7 @@ import sys
|
||||
import select
|
||||
|
||||
from sabnzbd.newswrapper import NewsWrapper
|
||||
from sabnzbd.downloader import Server, clues_login, clues_too_many
|
||||
from sabnzbd.downloader import Server, clues_login, clues_too_many, nntp_to_msg
|
||||
from sabnzbd.config import get_servers
|
||||
from sabnzbd.misc import int_conv
|
||||
|
||||
@@ -82,13 +82,13 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
|
||||
nw = NewsWrapper(s, -1, block=True)
|
||||
nw.init_connect(None)
|
||||
while not nw.connected:
|
||||
nw.lines = []
|
||||
nw.clear_data()
|
||||
nw.recv_chunk(block=True)
|
||||
#more ssl related: handle 1/n-1 splitting to prevent Rizzo/Duong-Beast
|
||||
read_sockets, _, _ = select.select([nw.nntp.sock], [], [], 0.1)
|
||||
if read_sockets:
|
||||
nw.recv_chunk(block=True)
|
||||
nw.finish_connect(nw.lines[0][:3])
|
||||
nw.finish_connect(nw.status_code)
|
||||
|
||||
except socket.timeout, e:
|
||||
if port != 119 and not ssl:
|
||||
@@ -116,32 +116,25 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl=
|
||||
if not username or not password:
|
||||
nw.nntp.sock.sendall('ARTICLE <test@home>\r\n')
|
||||
try:
|
||||
nw.lines = []
|
||||
nw.clear_data()
|
||||
nw.recv_chunk(block=True)
|
||||
except:
|
||||
return False, unicode(sys.exc_info()[1])
|
||||
|
||||
# Could do with making a function for return codes to be used by downloader
|
||||
try:
|
||||
code = nw.lines[0][:3]
|
||||
except IndexError:
|
||||
code = ''
|
||||
nw.lines.append('')
|
||||
|
||||
if code == '480':
|
||||
if nw.status_code == '480':
|
||||
return False, T('Server requires username and password.')
|
||||
|
||||
elif code == '100' or code.startswith('2') or code.startswith('4'):
|
||||
elif nw.status_code == '100' or nw.status_code.startswith('2') or nw.status_code.startswith('4'):
|
||||
return True, T('Connection Successful!')
|
||||
|
||||
elif code == '502' or clues_login(nw.lines[0]):
|
||||
elif nw.status_code == '502' or clues_login(nntp_to_msg(nw.data)):
|
||||
return False, T('Authentication failed, check username/password.')
|
||||
|
||||
elif clues_too_many(nw.lines[0]):
|
||||
return False, T('Too many connections, please pause downloading or try again later')
|
||||
|
||||
else:
|
||||
return False, T('Could not determine connection result (%s)') % nw.lines[0]
|
||||
return False, T('Could not determine connection result (%s)') % nntp_to_msg(nw.data)
|
||||
|
||||
# Close the connection
|
||||
nw.terminate(quit=True)
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
|
||||
# You MUST use double quotes (so " and not ')
|
||||
|
||||
__version__ = "develop"
|
||||
__version__ = "2.0.0-develop"
|
||||
__baseline__ = "unknown"
|
||||
|
||||
@@ -36,11 +36,10 @@ import sabnzbd.cfg as cfg
|
||||
|
||||
class Wizard(object):
|
||||
|
||||
def __init__(self, web_dir, root, prim):
|
||||
def __init__(self, root):
|
||||
self.__root = root
|
||||
# Get the path for the folder named wizard
|
||||
self.__web_dir = sabnzbd.WIZARD_DIR
|
||||
self.__prim = prim
|
||||
self.info = {'webdir': sabnzbd.WIZARD_DIR,
|
||||
'steps': 2,
|
||||
'version': sabnzbd.__version__,
|
||||
@@ -96,7 +95,7 @@ class Wizard(object):
|
||||
info['language'] = cfg.language()
|
||||
info['active_lang'] = info['language']
|
||||
info['T'] = Ttemplate
|
||||
info['have_ssl'] = bool(sabnzbd.HAVE_SSL)
|
||||
info['have_ssl_context'] = sabnzbd.HAVE_SSL_CONTEXT
|
||||
|
||||
servers = config.get_servers()
|
||||
if not servers:
|
||||
@@ -106,6 +105,7 @@ class Wizard(object):
|
||||
info['password'] = ''
|
||||
info['connections'] = ''
|
||||
info['ssl'] = 0
|
||||
info['ssl_verify'] = 2
|
||||
else:
|
||||
for server in servers:
|
||||
# If there are multiple servers, just use the first enabled one
|
||||
@@ -115,8 +115,8 @@ class Wizard(object):
|
||||
info['username'] = s.username()
|
||||
info['password'] = s.password.get_stars()
|
||||
info['connections'] = s.connections()
|
||||
|
||||
info['ssl'] = s.ssl()
|
||||
info['ssl_verify'] = s.ssl_verify()
|
||||
if s.enable():
|
||||
break
|
||||
template = Template(file=os.path.join(self.__web_dir, 'one.html'),
|
||||
|
||||
@@ -130,15 +130,16 @@ if not os.path.exists(PO_DIR):
|
||||
path, exe = os.path.split(sys.executable)
|
||||
if os.name == 'nt':
|
||||
TOOL = os.path.join(path, r'Tools\i18n\pygettext.py')
|
||||
TOOL = 'python ' + TOOL
|
||||
else:
|
||||
TOOL = os.path.join(path, 'pygettext.py')
|
||||
if not os.path.exists(TOOL):
|
||||
TOOL = 'pygettext'
|
||||
if not os.path.exists(TOOL):
|
||||
TOOL = 'pygettext'
|
||||
|
||||
|
||||
cmd = '%s %s %s' % (TOOL, PARMS, FILES)
|
||||
print 'Create POT file'
|
||||
# print cmd
|
||||
#print cmd
|
||||
os.system(cmd)
|
||||
|
||||
print 'Post-process the POT file'
|
||||
|
||||
Reference in New Issue
Block a user