mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2026-01-06 14:39:41 -05:00
Compare commits
105 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c237ddfef4 | ||
|
|
b543dcb5ac | ||
|
|
ccfbb07333 | ||
|
|
256ccbd6a1 | ||
|
|
d8d507f110 | ||
|
|
7b3309649f | ||
|
|
9a7a6652e8 | ||
|
|
db4891748f | ||
|
|
3dce2e8908 | ||
|
|
c91291c315 | ||
|
|
a2a5a1f8e4 | ||
|
|
7651f709ad | ||
|
|
a565077348 | ||
|
|
6cf99e7d3a | ||
|
|
f730a82005 | ||
|
|
5449607c1d | ||
|
|
c62415abfd | ||
|
|
dcbea3057c | ||
|
|
91642d16c8 | ||
|
|
2f2773149d | ||
|
|
adaba03f50 | ||
|
|
58a5e09540 | ||
|
|
20dc906095 | ||
|
|
e2f41d3761 | ||
|
|
ab1372c7fc | ||
|
|
e305678cf4 | ||
|
|
a82df9bf2e | ||
|
|
986604f27c | ||
|
|
59324c7453 | ||
|
|
91613a5b37 | ||
|
|
5ca05fd2c0 | ||
|
|
4d4045cff4 | ||
|
|
1f209a42d8 | ||
|
|
bffbb362db | ||
|
|
435eed8818 | ||
|
|
f86656543a | ||
|
|
9c510c6dd1 | ||
|
|
f81ab3d1c0 | ||
|
|
d1585c28a9 | ||
|
|
9c314532c0 | ||
|
|
853bda5d86 | ||
|
|
d05e31f7f0 | ||
|
|
383354871d | ||
|
|
2086a217e0 | ||
|
|
34f3574746 | ||
|
|
1dfe0b957e | ||
|
|
17d14bc3b4 | ||
|
|
885032e436 | ||
|
|
ceee95aaf7 | ||
|
|
bc6b3091eb | ||
|
|
4be1a13316 | ||
|
|
a77327ee7f | ||
|
|
aa706012af | ||
|
|
f5b6203194 | ||
|
|
1ced9a54e4 | ||
|
|
06c7089a77 | ||
|
|
ee1d864eea | ||
|
|
d703338935 | ||
|
|
e87b24c460 | ||
|
|
3404ef6516 | ||
|
|
181897e92b | ||
|
|
26a504e3e2 | ||
|
|
b72ed09011 | ||
|
|
bb99c0d58e | ||
|
|
4516027fdb | ||
|
|
e35f2ea3cd | ||
|
|
6b79fad626 | ||
|
|
ac311be430 | ||
|
|
4fb32bff5f | ||
|
|
5fda342a55 | ||
|
|
e23aab4710 | ||
|
|
3837d5dace | ||
|
|
f61e7cb1ed | ||
|
|
3de0c0e4ac | ||
|
|
63796d3feb | ||
|
|
6b07529300 | ||
|
|
e10676710c | ||
|
|
77f67c6666 | ||
|
|
bdbcdd61e1 | ||
|
|
4ab7ec754d | ||
|
|
20f98f48bc | ||
|
|
84e0502e50 | ||
|
|
2aa1b00dbb | ||
|
|
972078a514 | ||
|
|
be8382d25b | ||
|
|
8d46e88cd8 | ||
|
|
6b6b1b79ad | ||
|
|
e1fd40b34d | ||
|
|
bc1f8f97a8 | ||
|
|
b51705f458 | ||
|
|
aaed5f4797 | ||
|
|
a8eedef1d2 | ||
|
|
9407e21e1e | ||
|
|
ba6dcfd467 | ||
|
|
e2c1de5008 | ||
|
|
10b7403748 | ||
|
|
1ba924cc12 | ||
|
|
11eb034bd3 | ||
|
|
c3250e15cb | ||
|
|
8ff8a59b4c | ||
|
|
0c646d88b2 | ||
|
|
05670ea599 | ||
|
|
e25eb32885 | ||
|
|
250f75f084 | ||
|
|
cdd39e6777 |
@@ -1,5 +1,5 @@
|
||||
*******************************************
|
||||
*** This is SABnzbd 0.7.4 ***
|
||||
*** This is SABnzbd 0.7.9 ***
|
||||
*******************************************
|
||||
SABnzbd is an open-source cross-platform binary newsreader.
|
||||
It simplifies the process of downloading from Usenet dramatically,
|
||||
|
||||
@@ -1,3 +1,85 @@
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.9Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Fix fatal error in decoder when encountering a malformed article
|
||||
- Fix compatibility with free.xsusenet.com
|
||||
- Small fix in smpl-black CSS
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.8Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Fix problem with %fn substitution in Sorting
|
||||
- Add special "wait_for_dfolder", enables waiting for external temp download folder
|
||||
- Work-around for servers that do not support STAT command
|
||||
- Removed articles are now listed seperately in download report
|
||||
- Add "abort" option to encryption detection
|
||||
- Fix missing Retry link for "Out of retention" jobs.
|
||||
- Option to abort download when it is clear that not enough data is available
|
||||
- Support "nzbname" parameter in addfile/addlocalfile api calls for
|
||||
ZIP files with a single NZB
|
||||
- Support NZB-1.1 meta data "password" and "category"
|
||||
- Don't retry an empty but correct NZB from an indexer
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.7Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Windows/OSX: Update unrar to 4.20
|
||||
- Fix some issues with orphaned items
|
||||
- Generic sort didn't always rename media files in multi-part jobs properly
|
||||
- Optional web-ui watchdog
|
||||
- Always show RSS items in the same order as the original RSS feed
|
||||
- Remove unusable folders from folder selector (Plush skin)
|
||||
- Remove newzbin support
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.6Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Recursive scanning when re-queuing downloaded NZB files
|
||||
- Log "User-Agent" header of API calls
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.6Beta2 by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- A damaged smallest par2 can block fetching of more par2 files
|
||||
- Fix evaluation of schedules at startup
|
||||
- Make check for running SABnzbd instance more robust
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.6Beta1 by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Handle par2 sets that were renamed after creation
|
||||
- Prevent blocking assembly of completed files, ( this resulted in
|
||||
excessive CPU and memory usage)
|
||||
- Fix speed issues with some Usenet servers due to unreachable IPv6 addresses
|
||||
- Fix issues with SFV-base checks
|
||||
- Prevent crash on Unix-Pythons that don't have the os.getloadavg() function
|
||||
- Successfully pre-checked job lost its attributes when those were changed during check
|
||||
- Remove version check when looking for a running instance of SABnzbd
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.5Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Add missing %dn formula to Generic Sort
|
||||
- Improve RSS logging
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.5RC1 by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Prevent stuck jobs at end of pre-check.
|
||||
- Fix issues with accented and special characters in names of downloaded files.
|
||||
- Adjust nzbmatrix category table.
|
||||
- Add 'prio_sort_list' special
|
||||
- Add special option 'empty_postproc'.
|
||||
- Prevent CherryPy crash when reading a cookie from another app which has a non-standard name.
|
||||
- Prevent crash when trying to open non-existing "complete" folder from Windows System-tray icon.
|
||||
- Fix problem with "Read" button when RSS feed name contains "&".
|
||||
- Prevent unusual SFV files from crashing post-processing.
|
||||
- OSX: Retina compatible menu-bar icons.
|
||||
- Don't show speed and ETA when download is paused during post-processing
|
||||
- Prevent soft-crash when api-function "addfile" is called without parameters.
|
||||
- Add news channel frame
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.4Final by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
- Pre-queue script no longer got the show/season/episode information.
|
||||
- Prevent crash on startup when a fully downloaded job is still in download queue.
|
||||
- New RSS feed should no longer be considered new after first, but empty readout.
|
||||
- Make "auth" call backward-compatible with 0.6.x releases.
|
||||
- Config->Notifications: email and growl server addresses should not be marked as "url" type.
|
||||
- OSX: fix top menu queue info so that it shows total queue size
|
||||
-------------------------------------------------------------------------------
|
||||
0.7.4RC2 by The SABnzbd-Team
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
|
||||
The SABnzbd-team is:
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
SABnzbd 0.7.4
|
||||
SABnzbd 0.7.9
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
0) LICENSE
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
|
||||
This program is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU General Public License
|
||||
@@ -60,7 +60,8 @@ Unix/Linux/OSX
|
||||
OSX Leopard/SnowLeopard
|
||||
Python 2.6 http://www.activestate.com
|
||||
|
||||
OSX Lion Apple Python 2.7 (included in OSX)
|
||||
OSX Lion/MountainLion
|
||||
Apple Python 2.7 Included in OSX (default)
|
||||
|
||||
Windows
|
||||
Python-2.7.latest http://www.activestate.com
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
(c) Copyright 2007-2012 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
(c) Copyright 2007-2013 by "The SABnzbd-team" <team@sabnzbd.org>
|
||||
|
||||
This program is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU General Public License
|
||||
|
||||
@@ -320,7 +320,7 @@ WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninst
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "URLUpdateInfo" 'http://sabnzbd.org/'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "Comments" 'The automated Usenet download tool'
|
||||
WriteRegStr HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "DisplayIcon" '$INSTDIR\interfaces\Classic\templates\static\images\favicon.ico'
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "EstimatedSize" 29622
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "EstimatedSize" 25674
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "NoRepair" -1
|
||||
WriteRegDWORD HKEY_LOCAL_MACHINE "Software\Microsoft\Windows\CurrentVersion\Uninstall\SABnzbd" "NoModify" -1
|
||||
; write out uninstaller
|
||||
|
||||
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 0.7.4RC2
|
||||
Summary: SABnzbd-0.7.4RC2
|
||||
Version: 0.7.9
|
||||
Summary: SABnzbd-0.7.9
|
||||
Home-page: http://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
44
README.mkd
44
README.mkd
@@ -1,29 +1,25 @@
|
||||
Release Notes - SABnzbd 0.7.4RC2
|
||||
==================================
|
||||
Release Notes - SABnzbd 0.7.9
|
||||
===============================
|
||||
|
||||
## Fixes in 0.7.4RC2
|
||||
- Pre-check failed to consider extra par2 files
|
||||
- Fixed unjustified warning that can occur with OSX Growl 2.0
|
||||
- Show memory usage on Linux systems
|
||||
- Fix incorrect end-of-month quota reset
|
||||
- Fix UI refresh issue when using Safari on iOS6
|
||||
## Bug fix 0.7.9
|
||||
- Fix fatal error when encountering a malformed article
|
||||
|
||||
## Fixes in 0.7.4RC1
|
||||
## Features (0.7.8)
|
||||
- Use "category" and "password" meta-data in NZB files
|
||||
(Provided by some indexers)
|
||||
- Option to abort download when it is clear that not enough data is available
|
||||
(For removed posts it will be faster than pre-download check)
|
||||
- Add "Abort" option for encryption detection
|
||||
- Removed articles are now listed separately in download report
|
||||
- Special option "wait_for_dfolder", enables waiting for external temp download folder at startup
|
||||
|
||||
- OSX Mountain Lion: Notification Center support
|
||||
- OSX Mountain Lion: improved "keep awake" support
|
||||
- Restore SABnzbd icon for Growl
|
||||
- Scheduler: action can now run on multiple weekdays
|
||||
- Scheduler: add "remove failed jobs" action
|
||||
- After successful pre-check, preserve a job's position in the queue
|
||||
- Make Windows version less eager to use par2-classic
|
||||
- Support for HTTPS chain files (needed when you buy your own certificate)
|
||||
- Prevent jobs from showing up in queue and history simultaneously
|
||||
- Fix failure to fetch more par2-files for posts with badly formatted subject lines
|
||||
- Special option: rss_odd_titles (see Wiki)
|
||||
- Special option: 'overwrite_files' (See Wiki)
|
||||
- A number of small issues (see changelog)
|
||||
- Fix for third-party tools requesting too much history
|
||||
## Bug fixes (0.7.8)
|
||||
- Fix problem with %fn substitution in Sorting
|
||||
- Pre-download check did not work with all servers
|
||||
- Fix missing Retry link for "Out of retention" jobs.
|
||||
- API function "addfile" now accepts "nzbname" parameter for ZIP/RAR files with one NZB.
|
||||
- Prevent retries when an NZB with just samples is retrieved from an indexer
|
||||
(and you had the "do not download samples" option enabled).
|
||||
|
||||
## What's new in 0.7.0
|
||||
|
||||
@@ -53,7 +49,7 @@ Release Notes - SABnzbd 0.7.4RC2
|
||||
built-in post-processing options that automatically verify, repair,
|
||||
extract and clean up posts downloaded from Usenet.
|
||||
|
||||
(c) Copyright 2007-2012 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
(c) Copyright 2007-2013 by "The SABnzbd-team" \<team@sabnzbd.org\>
|
||||
|
||||
|
||||
### IMPORTANT INFORMATION about release 0.7.x
|
||||
|
||||
27
SABnzbd.py
27
SABnzbd.py
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/python -OO
|
||||
# Copyright 2008-2012 The SABnzbd-Team <team@sabnzbd.org>
|
||||
# Copyright 2008-2013 The SABnzbd-Team <team@sabnzbd.org>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
@@ -28,6 +28,7 @@ import signal
|
||||
import socket
|
||||
import platform
|
||||
import time
|
||||
import re
|
||||
|
||||
try:
|
||||
import Cheetah
|
||||
@@ -259,7 +260,7 @@ def print_version():
|
||||
print """
|
||||
%s-%s
|
||||
|
||||
Copyright (C) 2008-2012, The SABnzbd-Team <team@sabnzbd.org>
|
||||
Copyright (C) 2008-2013, The SABnzbd-Team <team@sabnzbd.org>
|
||||
SABnzbd comes with ABSOLUTELY NO WARRANTY.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions. It is licensed under the
|
||||
@@ -685,16 +686,13 @@ def attach_server(host, port, cert=None, key=None, chain=None):
|
||||
adapter.subscribe()
|
||||
|
||||
|
||||
def is_sabnzbd_running(url):
|
||||
def is_sabnzbd_running(url, timeout=None):
|
||||
""" Return True when there's already a SABnzbd instance running.
|
||||
"""
|
||||
try:
|
||||
url = '%s&mode=version' % (url)
|
||||
ver = sabnzbd.newsunpack.get_from_url(url)
|
||||
if ver and ver.strip(' \n\r\t') == sabnzbd.__version__:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
ver = sabnzbd.newsunpack.get_from_url(url, timeout=timeout)
|
||||
return bool(ver and re.search(r'\d+\.\d+\.', ver))
|
||||
except:
|
||||
return False
|
||||
|
||||
@@ -714,7 +712,7 @@ def find_free_port(host, currentport):
|
||||
|
||||
|
||||
def check_for_sabnzbd(url, upload_nzbs, allow_browser=True):
|
||||
""" Check for a running instance of sabnzbd(same version) on this port
|
||||
""" Check for a running instance of sabnzbd on this port
|
||||
allow_browser==True|None will launch the browser, False will not.
|
||||
"""
|
||||
if allow_browser is None:
|
||||
@@ -1565,7 +1563,7 @@ def main():
|
||||
add_local(f)
|
||||
|
||||
# Have to keep this running, otherwise logging will terminate
|
||||
timer = 0
|
||||
timer = timer5 = 0
|
||||
while not sabnzbd.SABSTOP:
|
||||
if sabnzbd.WIN_SERVICE:
|
||||
rc = win32event.WaitForMultipleObjects((sabnzbd.WIN_SERVICE.hWaitStop,
|
||||
@@ -1605,6 +1603,15 @@ def main():
|
||||
if sabnzbd.WIN_SERVICE and mail:
|
||||
mail.send('active')
|
||||
|
||||
if timer5 > 9:
|
||||
### 5 minute polling tasks
|
||||
timer5 = 0
|
||||
if sabnzbd.cfg.web_watchdog() and not is_sabnzbd_running('%s/api?tickleme=1' % sabnzbd.BROWSER_URL, 120):
|
||||
autorestarted = True
|
||||
cherrypy.engine.execv = True
|
||||
else:
|
||||
timer5 += 1
|
||||
|
||||
else:
|
||||
timer += 1
|
||||
|
||||
|
||||
@@ -658,7 +658,10 @@ class Request(object):
|
||||
# Handle cookies differently because on Konqueror, multiple
|
||||
# cookies come on different lines with the same key
|
||||
if name == 'Cookie':
|
||||
self.cookie.load(value)
|
||||
try:
|
||||
self.cookie.load(value)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not dict.__contains__(headers, 'Host'):
|
||||
# All Internet-based HTTP/1.1 servers MUST respond with a 400
|
||||
|
||||
@@ -6,59 +6,6 @@
|
||||
<!--#set global $submenu="newzbin"#-->
|
||||
<!--#include $webdir + "/inc_cmenu.tmpl"#-->
|
||||
|
||||
<h2>Newzbin</h2>
|
||||
$T('explain-newzbin')<br/><br/>
|
||||
<form action="saveNewzbin" method="post" autocomplete="off">
|
||||
<div class="EntryBlock">
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('accountInfo')</legend>
|
||||
<strong>$T('opt-username_newzbin'):</strong><br>
|
||||
$T('explain-username_newzbin')<br>
|
||||
<input type="text" name="username_newzbin" value="$username_newzbin">
|
||||
<br>
|
||||
<br>
|
||||
<strong>$T('opt-password_newzbin'):</strong><br>
|
||||
$T('explain-password_newzbin')<br>
|
||||
<input type="password" name="password_newzbin" value="$password_newzbin">
|
||||
</fieldset>
|
||||
</div>
|
||||
|
||||
<div class="EntryBlock">
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('newzbinBookmarks')</legend>
|
||||
<label><input type="checkbox" name="newzbin_bookmarks" value="1" <!--#if $newzbin_bookmarks > 0 then "checked=1" else ""#--> <strong>$T('opt-newzbin_bookmarks'):</strong></label><br>
|
||||
$T('explain-newzbin_bookmarks')<br>
|
||||
<a href="getBookmarks?session=$session">$T('link-getBookmarks')</a>
|
||||
<br>
|
||||
<!--#if $bookmarks_list#-->
|
||||
<a href="hideBookmarks?session=$session">$T('link-HideBM')</a>
|
||||
<!--#else#-->
|
||||
<a href="showBookmarks?session=$session">$T('link-ShowBM')</a>
|
||||
<!--#end if#-->
|
||||
<br/>
|
||||
<br/>
|
||||
<label><input type="checkbox" name="newzbin_unbookmark" value="1" <!--#if $newzbin_unbookmark > 0 then "checked=1" else ""#--> /> <strong>$T('opt-newzbin_unbookmark'):</strong></label><br>
|
||||
$T('explain-newzbin_unbookmark')<br>
|
||||
<br/>
|
||||
<strong>$T('opt-bookmark_rate'):</strong><br>
|
||||
$T('explain-bookmark_rate')<br>
|
||||
<input type="text" name="bookmark_rate" value="$bookmark_rate">
|
||||
</fieldset>
|
||||
</div>
|
||||
<!--#if $bookmarks_list#-->
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('processedBM')</legend>
|
||||
<!--#for $msgid in $bookmarks_list#-->
|
||||
<a href="https://$newzbin_url/browse/post/$msgid/" target="_blank">$msgid</a>
|
||||
<!--#end for#-->
|
||||
</fieldset>
|
||||
<!--#end if#-->
|
||||
<input type="hidden" name="session" value="$session">
|
||||
<p><input type="submit" value="$T('button-saveChanges')"></p>
|
||||
</form>
|
||||
|
||||
<hr/>
|
||||
|
||||
<h2>NzbMatrix</h2>
|
||||
$T('explain-nzbmatrix')<br/><br/>
|
||||
|
||||
|
||||
@@ -47,11 +47,13 @@
|
||||
<a href="$cpath/notify/">$T('cmenu-notif')</a> |
|
||||
<!--#end if#-->
|
||||
|
||||
<!--#if 0#-->
|
||||
<!--#if $submenu=="indexers"#-->
|
||||
<a class="current" href="./">$T('cmenu-newzbin')</a> |
|
||||
<!--#else#-->
|
||||
<a href="$cpath/indexers/">$T('cmenu-newzbin')</a> |
|
||||
<!--#end if#-->
|
||||
<!--#end if#-->
|
||||
|
||||
<!--#if $submenu=="categories"#-->
|
||||
<a class="current" href="./">$T('cmenu-cat')</a> |
|
||||
|
||||
@@ -17,8 +17,7 @@
|
||||
<div class="EntryBlock">
|
||||
<form action="addID" method="get">
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('add')
|
||||
<!--#if $varExists('newzbinDetails')#--> $T('reportId') / <!--#end if#-->URL</legend>
|
||||
<legend>$T('add') URL</legend>
|
||||
<input type="text" name="id">
|
||||
<!--#if $cat_list#-->
|
||||
<select name="cat" >
|
||||
|
||||
@@ -128,9 +128,11 @@
|
||||
<a href="${root}config/notify/">
|
||||
<div #if $pane == "Email" then 'class="active"' else ""#>$T('cmenu-notif')</div>
|
||||
</a>
|
||||
<!--#if 0#-->
|
||||
<a href="${root}config/indexers/">
|
||||
<div #if $pane == "Index Sites" then 'class="active"' else ""#>$T('cmenu-newzbin')</div>
|
||||
</a>
|
||||
<!--#end if#-->
|
||||
<a href="${root}config/categories/">
|
||||
<div #if $pane == "Categories" then 'class="active"' else ""#>$T('cmenu-cat')</div>
|
||||
</a>
|
||||
|
||||
@@ -21,9 +21,14 @@
|
||||
</table>
|
||||
</div>
|
||||
<div class="padding alt">
|
||||
<h5 class="copyright">Copyright © 2008-2012 The SABnzbd Team <<span style="color: #0000ff;">team@sabnzbd.org</span>></h5>
|
||||
<h5 class="copyright">Copyright © 2008-2013 The SABnzbd Team <<span style="color: #0000ff;">team@sabnzbd.org</span>></h5>
|
||||
<p class="copyright"><small>$T('yourRights')</small></p>
|
||||
</div>
|
||||
<!--#if $news_items#-->
|
||||
<div class="padding">
|
||||
<iframe frameborder=0 width=100% src="http://sabnzbdplus.sourceforge.net/version/news.html"></iframe>
|
||||
</div>
|
||||
<!--#end if#-->
|
||||
</div>
|
||||
|
||||
<!--#include $webdir + "/_inc_footer_uc.tmpl"#-->
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="cleanup_list">$T('opt-cleanup_list')</label>
|
||||
<input type="text" name="cleanup_list" id="cleanup_list" value="$cleanup_list" size="50" placeholder=".nfo, .sfv" />
|
||||
<input type="text" name="cleanup_list" id="cleanup_list" value="$cleanup_list" size="50"/>
|
||||
<span class="desc">$T('explain-cleanup_list')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
|
||||
@@ -33,84 +33,6 @@
|
||||
</fieldset>
|
||||
</div><!-- /col1 -->
|
||||
</div><!-- /section -->
|
||||
<div class="section">
|
||||
<div class="col2">
|
||||
<h3>Newzbin $T('accountInfo')</h3>
|
||||
<p>$T('explain-newzbin')</p>
|
||||
</div><!-- /col2 -->
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="username_newzbin">$T('opt-username_newzbin')</label>
|
||||
<input type="text" name="username_newzbin" id="username_newzbin" value="$username_newzbin" size="30" />
|
||||
<span class="desc">$T('explain-username_newzbin')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="password_newzbin">$T('opt-password_newzbin')</label>
|
||||
<input type="password" name="password_newzbin" id="password_newzbin" value="$password_newzbin" size="30" />
|
||||
<span class="desc">$T('explain-password_newzbin')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<input type="submit" value="$T('button-saveChanges')" class="saveButton" />
|
||||
</div>
|
||||
</fieldset>
|
||||
</div><!-- /col1 -->
|
||||
</div><!-- /section -->
|
||||
<div class="section">
|
||||
<div class="col2">
|
||||
<h3>Newzbin $T('newzbinBookmarks')</h3>
|
||||
<p>
|
||||
<input type="button" id="getBookmarks" value="$T('link-getBookmarks')" />
|
||||
<span id="getBookmarks-result" class="icon"> </span>
|
||||
<br/><br/>
|
||||
<!--#if $bookmarks_list#-->
|
||||
<input type="button" id="hideBookmarks" value="$T('link-HideBM')" />
|
||||
<!--#else#-->
|
||||
<input type="button" id="showBookmarks" value="$T('link-ShowBM')" />
|
||||
<!--#end if#-->
|
||||
</p>
|
||||
</div><!-- /col2 -->
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="newzbin_bookmarks">$T('opt-newzbin_bookmarks')</label>
|
||||
<input type="checkbox" name="newzbin_bookmarks" id="newzbin_bookmarks" value="1" <!--#if int($newzbin_bookmarks) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-newzbin_bookmarks')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="newzbin_unbookmark">$T('opt-newzbin_unbookmark')</label>
|
||||
<input type="checkbox" name="newzbin_unbookmark" id="newzbin_unbookmark" value="1" <!--#if int($newzbin_unbookmark) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-newzbin_unbookmark')</span>
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="bookmark_rate">$T('opt-bookmark_rate')</label>
|
||||
<input type="number" name="bookmark_rate" id="bookmark_rate" value="$bookmark_rate" size="8" min="15" max="1440" />
|
||||
<span class="desc">$T('explain-bookmark_rate')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<input type="submit" value="$T('button-saveChanges')" class="saveButton" />
|
||||
</div>
|
||||
</fieldset>
|
||||
</div><!-- /col1 -->
|
||||
</div><!-- /section -->
|
||||
<!--#if $bookmarks_list#-->
|
||||
<div class="section">
|
||||
<div class="col2">
|
||||
<h3>Newzbin Bookmarks</h3>
|
||||
</div><!-- /col2 -->
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<!--#set $odd = False#-->
|
||||
<!--#for $msgid in $bookmarks_list#-->
|
||||
<!--#set $odd = not $odd#-->
|
||||
<div class="field-pair <!--#if $odd then "alt" else ""#-->">
|
||||
<a href="https://$newzbin_url/browse/post/$msgid/" target="_blank">$msgid</a><br/>
|
||||
</div>
|
||||
<!--#end for#-->
|
||||
</fieldset>
|
||||
</div><!-- /col1 -->
|
||||
</div><!-- /section -->
|
||||
<!--#end if#-->
|
||||
<div class="padding alt">
|
||||
<input type="submit" value="$T('button-saveChanges')" class="saveButton" />
|
||||
<input type="button" value="$T('button-restart') SABnzbd" class="sabnzbd_restart" />
|
||||
|
||||
@@ -48,7 +48,7 @@
|
||||
<fieldset>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="email_server">$T('opt-email_server')</label>
|
||||
<input type="url" name="email_server" id="email_server" value="$email_server" size="40" />
|
||||
<input type="text" name="email_server" id="email_server" value="$email_server" size="40" />
|
||||
<span class="desc">$T('explain-email_server')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
@@ -111,7 +111,7 @@
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="growl_server">$T('opt-growl_server')</label>
|
||||
<input type="url" name="growl_server" id="growl_server" value="$growl_server" size="40" />
|
||||
<input type="text" name="growl_server" id="growl_server" value="$growl_server" size="40" />
|
||||
<span class="desc">$T('explain-growl_server')</span>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
|
||||
@@ -498,6 +498,10 @@
|
||||
</div><!-- /colmask -->
|
||||
|
||||
<script>
|
||||
function urlencode(str) {
|
||||
return encodeURIComponent(str).replace(/!/g, '%21').replace(/'/g, '%27').replace(/\(/g, '%28').replace(/\)/g, '%29').replace(/\*/g, '%2A').replace(/%20/g, '+');
|
||||
}
|
||||
|
||||
\$(document).ready(function(){
|
||||
\$('.editFeed').click(function(){
|
||||
var oldURI = \$(this).prev().val();
|
||||
@@ -537,7 +541,7 @@
|
||||
url: "test_rss_feed",
|
||||
data: {feed: whichFeed, session: "$session" }
|
||||
}).done(function( msg ) {
|
||||
location = '?feed=' + whichFeed;
|
||||
location = '?feed=' + urlencode(whichFeed);
|
||||
// location.reload();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="password">$T('srv-password')</label>
|
||||
<input type="text" name="password" id="password" size="30" />
|
||||
<input type="password" name="password" id="password" size="30" />
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="connections">$T('srv-connections')</label>
|
||||
@@ -155,10 +155,10 @@
|
||||
<div class="col2" style="display:block;">
|
||||
<!--#if 'amounts' in $servers[$server]#-->
|
||||
<b>$T('srv-bandwidth'):</b><br/>
|
||||
$T('total'): $servers[$server]['amounts'][0]<br/>
|
||||
$T('today'): $servers[$server]['amounts'][3]<br/>
|
||||
$T('thisWeek'): $servers[$server]['amounts'][2]<br/>
|
||||
$T('thisMonth'): $servers[$server]['amounts'][1]
|
||||
$T('total'): $(servers[$server]['amounts'][0])B<br/>
|
||||
$T('today'): $(servers[$server]['amounts'][3])B<br/>
|
||||
$T('thisWeek'): $(servers[$server]['amounts'][2])B<br/>
|
||||
$T('thisMonth'): $(servers[$server]['amounts'][1])B
|
||||
<!--#end if#-->
|
||||
</div>
|
||||
</div><!-- /section -->
|
||||
|
||||
@@ -265,6 +265,11 @@
|
||||
<td>$T('sort-File')</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td class="align-right"><b>$T('orgDirname'):</b></td>
|
||||
<td>%dn</td>
|
||||
<td>$T("sort-Folder")</td>
|
||||
</tr>
|
||||
<tr class="even">
|
||||
<td class="align-right"><b>$T('lowercase'):</b></td>
|
||||
<td>{$T('TEXT')}</td>
|
||||
<td>$T('text')</td>
|
||||
@@ -432,7 +437,7 @@
|
||||
return function(callback, ms){
|
||||
clearTimeout (timer);
|
||||
timer = setTimeout(callback, ms);
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
function tvSet(val) {
|
||||
|
||||
@@ -83,6 +83,11 @@
|
||||
</div><!-- /col2 -->
|
||||
<div class="col1">
|
||||
<fieldset>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="fail_hopeless">$T('opt-fail_hopeless')</label>
|
||||
<input type="checkbox" name="fail_hopeless" id="fail_hopeless" value="1" <!--#if int($fail_hopeless) > 0 then 'checked="checked"' else ""#--> />
|
||||
<span class="desc">$T('explain-fail_hopeless')</span>
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="config" for="pre_check">$T('opt-pre_check')</label>
|
||||
<input type="checkbox" name="pre_check" id="pre_check" value="1" <!--#if int($pre_check) > 0 then 'checked="checked"' else ""#--> />
|
||||
@@ -104,7 +109,11 @@
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="config" for="pause_on_pwrar">$T('opt-pause_on_pwrar')</label>
|
||||
<input type="checkbox" name="pause_on_pwrar" id="pause_on_pwrar" value="1" <!--#if int($pause_on_pwrar) > 0 then 'checked="checked"' else ""#--> />
|
||||
<select name="pause_on_pwrar" id="pause_on_pwrar">
|
||||
<option value="0" <!--#if int($pause_on_pwrar) == 0 then 'selected="selected" class="selected"' else ""#--> >$T('nodupes-off')</option>
|
||||
<option value="1" <!--#if int($pause_on_pwrar) == 1 then 'selected="selected" class="selected"' else ""#--> >$T('nodupes-pause')</option>
|
||||
<option value="2" <!--#if int($pause_on_pwrar) == 2 then 'selected="selected" class="selected"' else ""#--> >$T('abort')</option>
|
||||
</select>
|
||||
<span class="desc">$T('explain-pause_on_pwrar')</span>
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
|
||||
@@ -126,8 +126,10 @@
|
||||
<div class="config_sprite_container sprite_config_nav_scheduling">$T('Plush-cmenu-scheduling')</div></a></li>
|
||||
<li><a class="#if $pane=="Email"#nav_active#end if#" id="config_nav_email" href="${path}config/notify/">
|
||||
<div class="config_sprite_container sprite_config_nav_email">$T('cmenu-notif')</div></a></li>
|
||||
<!--#if 0#-->
|
||||
<li><a class="#if $pane=="Index Sites"#nav_active#end if#" id="config_nav_index_sites" href="${path}config/indexers/">
|
||||
<div class="config_sprite_container sprite_config_nav_indexsites">$T('cmenu-newzbin')</div></a></li>
|
||||
<!--#end if#-->
|
||||
<li><a class="#if $pane=="Categories"#nav_active#end if#" id="config_nav_categories" href="${path}config/categories/">
|
||||
<div class="config_sprite_container sprite_config_nav_categories">$T('cmenu-cat')</div></a></li>
|
||||
<li><a class="#if $pane=="Sorting"#nav_active#end if#" id="config_nav_sorting" href="${path}config/sorting/">
|
||||
|
||||
@@ -43,90 +43,6 @@
|
||||
</fieldset>
|
||||
</div><!-- /component-group1 -->
|
||||
|
||||
<div id="core-component-group2" class="component-group clearfix">
|
||||
<div class="component-group-desc">
|
||||
<h3>Newzbin $T('accountInfo')</h3>
|
||||
<p>$T('explain-newzbin')</p>
|
||||
</div>
|
||||
<fieldset class="component-group-list">
|
||||
<div class="field-pair">
|
||||
<label class="nocheck clearfix" for="username_newzbin">
|
||||
<span class="component-title">$T('opt-username_newzbin')</span>
|
||||
<input type="text" name="username_newzbin" id="username_newzbin" value="$username_newzbin"/>
|
||||
</label>
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title"> </span>
|
||||
<span class="component-desc">$T('explain-username_newzbin')</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<label class="nocheck clearfix" for="password_newzbin">
|
||||
<span class="component-title">$T('opt-password_newzbin')</span>
|
||||
<input type="password" name="password_newzbin" id="password_newzbin" value="$password_newzbin"/>
|
||||
</label>
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title"> </span>
|
||||
<span class="component-desc">$T('explain-password_newzbin')</span>
|
||||
</label>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div><!-- /component-group2 -->
|
||||
|
||||
<div id="core-component-group3" class="component-group clearfix">
|
||||
<div class="component-group-desc">
|
||||
<h3>Newzbin $T('newzbinBookmarks')</h3>
|
||||
<p>
|
||||
<input type="button" class="juiButton" id="getBookmarks" value="$T('link-getBookmarks')" />
|
||||
<br/><br/>
|
||||
<!--#if $bookmarks_list#-->
|
||||
<input type="button" class="juiButton" id="hideBookmarks" value="$T('link-HideBM')" />
|
||||
<!--#else#-->
|
||||
<input type="button" class="juiButton" id="showBookmarks" value="$T('link-ShowBM')" />
|
||||
<!--#end if#-->
|
||||
</p>
|
||||
</div>
|
||||
<fieldset class="component-group-list">
|
||||
<div class="field-pair">
|
||||
<input type="checkbox" name="newzbin_bookmarks" id="newzbin_bookmarks" value="1" <!--#if $newzbin_bookmarks > 0 then "checked=1" else ""#--> />
|
||||
<label class="clearfix" for="newzbin_bookmarks">
|
||||
<span class="component-title">$T('opt-newzbin_bookmarks')</span>
|
||||
<span class="component-desc">$T('explain-newzbin_bookmarks')</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair alt">
|
||||
<input type="checkbox" name="newzbin_unbookmark" id="newzbin_unbookmark" value="1" <!--#if $newzbin_unbookmark > 0 then "checked=1" else ""#--> />
|
||||
<label class="clearfix" for="newzbin_unbookmark">
|
||||
<span class="component-title">$T('opt-newzbin_unbookmark')</span>
|
||||
<span class="component-desc">$T('explain-newzbin_unbookmark')</span>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
<label class="nocheck clearfix" for="bookmark_rate">
|
||||
<span class="component-title">$T('opt-bookmark_rate')</span>
|
||||
<input type="text" name="bookmark_rate" id="bookmark_rate" size="6" value="$bookmark_rate"/>
|
||||
</label>
|
||||
<label class="nocheck clearfix">
|
||||
<span class="component-title"> </span>
|
||||
<span class="component-desc">$T('explain-bookmark_rate')</span>
|
||||
</label>
|
||||
</div>
|
||||
</fieldset>
|
||||
</div><!-- /component-group3 -->
|
||||
|
||||
<!--#if $bookmarks_list#-->
|
||||
<div id="core-component-group4" class="component-group clearfix">
|
||||
<div class="component-group-desc">
|
||||
<h3>Newzbin $T('accountInfo')</h3>
|
||||
<p>$T('explain-newzbin')</p>
|
||||
</div>
|
||||
<fieldset class="component-group-list">
|
||||
<!--#for $msgid in $bookmarks_list#-->
|
||||
<a href="https://$newzbin_url/browse/post/$msgid/" target="_blank">$msgid</a><br/>
|
||||
<!--#end for#-->
|
||||
</fieldset>
|
||||
</div><!-- /component-group4 -->
|
||||
<!--#end if#-->
|
||||
|
||||
<div class="component-group-last clearfix">
|
||||
<div class="component-group-desc">
|
||||
<h3> </h3>
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
<div class="field-pair alt">
|
||||
<label class="nocheck clearfix" for="password">
|
||||
<span class="component-title">$T('srv-password')</span>
|
||||
<input type="text" size="25" name="password"/>
|
||||
<input type="password" size="25" name="password"/>
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
@@ -156,7 +156,7 @@
|
||||
<div class="field-pair alt">
|
||||
<label class="nocheck clearfix" for="password">
|
||||
<span class="component-title">$T('srv-password')</span>
|
||||
<input type="text" size="25" name="password" value="$servers[$server]['password']" />
|
||||
<input type="password" size="25" name="password" value="$servers[$server]['password']" />
|
||||
</label>
|
||||
</div>
|
||||
<div class="field-pair">
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
<a class="sf-with-ul">$T('menu-queue')</a>
|
||||
<ul>
|
||||
<!--#if $have_quota#--><li><a id="reset_quota_now" class="pointer">$T('link-resetQuota')</a></li><!--#end if#-->
|
||||
<!--#if $varExists('newzbinDetails')#--><li><a id="get_bookmarks_now" class="pointer">$T('link-getBookmarks')</a></li><!--#end if#-->
|
||||
<!--#if $have_rss_defined#--><li><a id="get_rss_now" class="pointer">$T('button-rssNow')</a></li><!--#end if#-->
|
||||
<!--#if $have_watched_dir#--><li><a id="get_watched_now" class="pointer">$T('sch-scan_folder')</a></li><!--#end if#-->
|
||||
<li><a id="topmenu_toggle" class="pointer">$T('Plush-topMenu')</a></li>
|
||||
|
||||
@@ -895,7 +895,7 @@ $("a","#multiops_inputs").click(function(e){
|
||||
headers: {"Cache-Control": "no-cache"},
|
||||
type: "POST",
|
||||
url: "tapi",
|
||||
data: {mode:'history', name:'delete', value:value, del_files:del_files, apikey: $.plush.apikey},
|
||||
data: {mode:'history', name:'delete', value:value, del_files:del_files, search: $('#historySearchBox').val(), apikey: $.plush.apikey},
|
||||
success: function(){
|
||||
$.colorbox.close();
|
||||
$.plush.modalOpen=false;
|
||||
@@ -1242,12 +1242,16 @@ $.plush.histprevslots = $.plush.histnoofslots; // for the next refresh
|
||||
SetQueueETAStats : function(speed,kbpersec,timeleft,eta) {
|
||||
|
||||
// ETA/speed stats at top of queue
|
||||
if (kbpersec < 1 && $.plush.paused)
|
||||
if (kbpersec < 1 || $.plush.paused) {
|
||||
$('#stats_eta').html('—');
|
||||
else
|
||||
$('#stats_speed').html('—');
|
||||
$('#time-left').attr('title','—'); // Tooltip on "time left"
|
||||
}
|
||||
else {
|
||||
$('#stats_eta').html(timeleft);
|
||||
$('#stats_speed').html(speed+"B/s");
|
||||
$('#time-left').attr('title',eta); // Tooltip on "time left"
|
||||
$('#stats_speed').html(speed+"B/s");
|
||||
$('#time-left').attr('title',eta); // Tooltip on "time left"
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
|
||||
@@ -1,69 +1,4 @@
|
||||
<a href="${helpuri}Configure+Indexers-0-7" id="help" target="_blank">$T('menu-help')</a><h3>Newzbin</h3>
|
||||
<form id="configNewzbin" class="cmxform" autocomplete="off">
|
||||
|
||||
$T('explain-newzbin')<br/>
|
||||
<br/>
|
||||
<div class="EntryBlock">
|
||||
|
||||
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('accountInfo')</legend>
|
||||
<hr />
|
||||
<label class="label">$T('opt-username_newzbin'):</label>
|
||||
<input type="text" name="username_newzbin" value="$username_newzbin">
|
||||
<span class="tips">$T('explain-username_newzbin')</span>
|
||||
<br class="clear" />
|
||||
|
||||
|
||||
<label class="label">$T('opt-password_newzbin'):</label>
|
||||
<input type="password" name="password_newzbin" value="$password_newzbin">
|
||||
<span class="tips">$T('explain-password_newzbin')</span>
|
||||
<br class="clear" />
|
||||
</fieldset>
|
||||
|
||||
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('newzbinBookmarks')</legend>
|
||||
<hr />
|
||||
|
||||
<label><span class="label">$T('newzbinBookmarks'):</span>
|
||||
<input class="radio" type="checkbox" name="newzbin_bookmarks" value="1" <!--#if $newzbin_bookmarks > 0 then "checked=1" else ""#--> />
|
||||
<span class="tips">$T('explain-newzbin_bookmarks')</span></label>
|
||||
<br class="clear" />
|
||||
|
||||
<label><span class="label">$T('opt-newzbin_unbookmark'):</span>
|
||||
<input class="radio" type="checkbox" name="newzbin_unbookmark" value="1" <!--#if $newzbin_unbookmark > 0 then "checked=1" else ""#--> />
|
||||
<span class="tips">$T('explain-newzbin_unbookmark')</span></label>
|
||||
<br class="clear" />
|
||||
|
||||
<label class="label">$T('opt-bookmark_rate'):</label>
|
||||
<input type="text" name="bookmark_rate" value="$bookmark_rate">
|
||||
<span class="tips">$T('explain-bookmark_rate')</span>
|
||||
<br class="clear" />
|
||||
</fieldset>
|
||||
|
||||
<a class="config" onClick="getBookmarks();">$T('link-getBookmarks')</a>
|
||||
<!--#if $bookmarks_list#-->
|
||||
<a class="config" onClick="lr('config/indexers/hideBookmarks');">$T('link-HideBM')</a>
|
||||
<!--#else#-->
|
||||
<a class="config" onClick="lr('config/indexers/showBookmarks');">$T('link-ShowBM')</a>
|
||||
<!--#end if#-->
|
||||
|
||||
<!--#if $bookmarks_list#-->
|
||||
<fieldset class="EntryFieldSet">
|
||||
<legend>$T('processedBM')</legend>
|
||||
<hr />
|
||||
<!--#for $msgid in $bookmarks_list#-->
|
||||
<a href="https://$newzbin_url/browse/post/$msgid/" target="_blank">$msgid</a>
|
||||
<!--#end for#-->
|
||||
<br class="clear" />
|
||||
</fieldset>
|
||||
<!--#end if#-->
|
||||
|
||||
|
||||
</div>
|
||||
|
||||
<br/><hr/>
|
||||
<a href="${helpuri}Configure+Indexers-0-7" id="help" target="_blank">$T('menu-help')</a>
|
||||
<h3>NzbMatrix</h3><br/>
|
||||
|
||||
$T('explain-nzbmatrix')<br/>
|
||||
|
||||
@@ -1133,7 +1133,9 @@ function loadingJSON(){
|
||||
<li><a class="config" href="$prefix/config/scheduling/" onclick="lr('config/scheduling/','', 0, 0);">$T('cmenu-scheduling')</a> </li>
|
||||
<li><a class="config" href="$prefix/config/rss/" onclick="lr('config/rss/','', 0, 0);">$T('cmenu-rss')</a> </li>
|
||||
<li><a class="config" href="$prefix/config/notify/" onclick="lr('config/notify/','', 0, 0);">$T('cmenu-notif')</a></li>
|
||||
<!--#if 0#-->
|
||||
<li><a class="config" href="$prefix/config/indexers/" onclick="lr('config/indexers/', '', 0, 0);">$T('cmenu-newzbin')</a></li>
|
||||
<!--#end if#-->
|
||||
<li><a class="config" href="$prefix/config/categories/" onclick="lr('config/categories/', '', 0, 0);">$T('cmenu-cat')</a></li>
|
||||
<li><a class="config" href="$prefix/config/sorting/" onclick="lr('config/sorting/', '', 0, 0);">$T('cmenu-sorting')</a></li>
|
||||
</ul>
|
||||
@@ -1152,9 +1154,6 @@ function loadingJSON(){
|
||||
<li><a class="config" onclick="javascript:timedPause()">$T("smpl-custom")</a></li>
|
||||
|
||||
</ul>
|
||||
<!--#if $varExists('newzbinDetails')#-->
|
||||
<li><a onclick="getBookmarks()">$T('smpl-getbookmarks')</a></li>
|
||||
<!--#end if#-->
|
||||
<!--#if $have_quota#-->
|
||||
<li><a onclick="resetQuota()">$T('link-resetQuota')</a></li>
|
||||
<!--#end if#-->
|
||||
@@ -1183,7 +1182,7 @@ function loadingJSON(){
|
||||
<div id="RightContainer" class="left-border">
|
||||
<div id="addNew" class="centerLinks" style="overflow: hidden; display: none;">
|
||||
<form action="addID" method="get">
|
||||
<input type="text" style="width:218px;" name="id" value="$T('enterURL')<!--#if $varExists('newzbinDetails') then $T('enterID') else '' #-->" onfocus="clearForm(this, 'Enter URL<!--#if $varExists('newzbinDetails') then " or Report ID" else "" #-->')" onblur="setForm(this, 'Enter URL<!--#if $varExists('newzbinDetails') then " or Report ID" else "" #-->')">
|
||||
<input type="text" style="width:218px;" name="id" value="$T('enterURL')" onfocus="clearForm(this, 'Enter URL<!--#if $varExists('newzbinDetails') then " or Report ID" else "" #-->')" onblur="setForm(this, 'Enter URL<!--#if $varExists('newzbinDetails') then " or Report ID" else "" #-->')">
|
||||
<!--#if $cat_list#-->
|
||||
<select name="cat" >
|
||||
<optgroup label="$T('category')">
|
||||
|
||||
@@ -24,15 +24,15 @@ border-top: 1px dotted #222;
|
||||
}
|
||||
|
||||
#progressBar {
|
||||
background-color: #fff;
|
||||
border: 1px solid #000;
|
||||
background-color: #fff;
|
||||
border: 1px solid #000;
|
||||
}
|
||||
#progressBartop {
|
||||
background-color: #fff;
|
||||
border: 1px solid #ccc;
|
||||
background-color: #fff;
|
||||
border: 1px solid #ccc;
|
||||
}
|
||||
#percentageBar {
|
||||
background-color: #4B4545;
|
||||
background-color: #4B4545;
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ table{border-spacing:0;}
|
||||
|
||||
|
||||
|
||||
input, select {
|
||||
input, select, option {
|
||||
background-color:#232323;
|
||||
border-color:#3a3a3a;
|
||||
color:white;
|
||||
@@ -110,4 +110,4 @@ span.unselected {
|
||||
color: white;
|
||||
background-color:#333;
|
||||
border: 1px solid #555;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
<!--#include $webdir + "/inc_top.tmpl"#-->
|
||||
<script type="text/javascript" src="static/javascript/jquery.js"></script>
|
||||
<script type="text/javascript" src="static/javascript/restart.js"></script>
|
||||
<br/><br/>
|
||||
<h4 id="restarting" class="align-center">$T('wizard-restarting')</h4>
|
||||
<h4 id="complete" class="align-center success hidden">$T('wizard-complete')</h4>
|
||||
<br />
|
||||
<br/>
|
||||
<div id="tips" class="hidden">
|
||||
$T('wizard-tip1') <span class="bold">$T('wizard-tip2')</span><br/>
|
||||
<!--#set $tip3 = $T('wizard-tip3') % ''#-->
|
||||
$tip3<br/><br/>
|
||||
<div class="quoteBlock">
|
||||
<!--#set $i = 0#-->
|
||||
<!--#for $url in $urls#-->
|
||||
<!--#set $i = $i+1#-->
|
||||
<a href="$url">$url</a><!--#if $i != len($urls)#--><br /><!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</div><br/>
|
||||
$T('wizard-tip4')
|
||||
<br/><br/>
|
||||
$T('wizard-tip-wiki') <a href="$helpuri">wiki</a>
|
||||
</div>
|
||||
</div>
|
||||
<hr /><br/>
|
||||
<div class="full-width">
|
||||
<table class="full-width">
|
||||
<tr class="align-center">
|
||||
<td><input type="hidden" name="session" id="apikey" value="$session"><input class="bigbutton disabled" type="button" onclick="document.location ='$access_url'" value="$T('wizard-goto')" disabled="disabled"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!--#include $webdir + "/inc_bottom.tmpl"#-->
|
||||
@@ -1,39 +1,34 @@
|
||||
<!--#include $webdir + "/inc_top.tmpl"#-->
|
||||
|
||||
<form action="./four" method="post" autocomplete="off">
|
||||
<p>$T('wizard-index-explain')</p>
|
||||
<div id="serverDetails">
|
||||
<h3><a href="http://$newzbin_url" target="_blank">Newzbin2.es</a> ($T('wizard-optional'))</h3>
|
||||
<label class="label">$T('srv-username'):</label><input type="text" size="20" value="$newzbin_user" name="newzbin_user">
|
||||
<br class="clear" />
|
||||
<label class="label">$T('srv-password'):</label><input type="password" size="20" value="$newzbin_pass" name="newzbin_pass">
|
||||
<br class="clear" />
|
||||
<input type="checkbox" name="newzbin_bookmarks" id="newzbin_bookmarks" value="1" <!--#if $newzbin_bookmarks == 1 then 'checked="checked"' else ''#-->> <label for="newzbin_bookmarks">$T('wizard-index-bookmark')</label><br />
|
||||
|
||||
|
||||
<h3><a href="http://nzbmatrix.com" target="_blank">NZBMatrix.com</a> ($T('wizard-optional'))</h3>
|
||||
<label class="label">$T('srv-username'):</label><input type="text" size="20" value="$matrix_user" name="matrix_user">
|
||||
<br class="clear" />
|
||||
<label class="label">$T('opt-apikey'):</label><input type="text" size="20" value="$matrix_apikey" name="matrix_apikey">
|
||||
</div></div>
|
||||
|
||||
<script type="text/javascript" src="static/javascript/jquery.js"></script>
|
||||
<script type="text/javascript" src="static/javascript/restart.js"></script>
|
||||
<br/><br/>
|
||||
<h4 id="restarting" class="align-center">$T('wizard-restarting')</h4>
|
||||
<h4 id="complete" class="align-center success hidden">$T('wizard-complete')</h4>
|
||||
<br />
|
||||
<br/>
|
||||
<div id="tips" class="hidden">
|
||||
$T('wizard-tip1') <span class="bold">$T('wizard-tip2')</span><br/>
|
||||
<!--#set $tip3 = $T('wizard-tip3') % ''#-->
|
||||
$tip3<br/><br/>
|
||||
<div class="quoteBlock">
|
||||
<!--#set $i = 0#-->
|
||||
<!--#for $url in $urls#-->
|
||||
<!--#set $i = $i+1#-->
|
||||
<a href="$url">$url</a><!--#if $i != len($urls)#--><br /><!--#end if#-->
|
||||
<!--#end for#-->
|
||||
</div><br/>
|
||||
$T('wizard-tip4')
|
||||
<br/><br/>
|
||||
$T('wizard-tip-wiki') <a href="$helpuri">wiki</a>
|
||||
</div>
|
||||
</div>
|
||||
<hr /><br/>
|
||||
<div class="full-width">
|
||||
<table class="full-width">
|
||||
<tr>
|
||||
<td><input class="bigbutton" type="button" onclick="document.location ='./two'" value="‹ $T('wizard-previous')" /></td>
|
||||
<td>
|
||||
<div class="align-center">
|
||||
<!--#for $step in xrange($steps)#-->
|
||||
<!--#set $step = $step + 1#-->
|
||||
<span class="<!--#if $step == $number then 'selected' else 'unselected'#-->">$step</span>
|
||||
<!--#end for#-->
|
||||
</div>
|
||||
</td>
|
||||
<td class="align-right"><input class="bigbutton" type="submit" value="$T('wizard-next') »" /></td>
|
||||
<tr class="align-center">
|
||||
<td><input type="hidden" name="session" id="apikey" value="$session"><input class="bigbutton disabled" type="button" onclick="document.location ='$access_url'" value="$T('wizard-goto')" disabled="disabled"/></td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<!--#include $webdir + "/inc_bottom.tmpl"#-->
|
||||
<!--#include $webdir + "/inc_bottom.tmpl"#-->
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 388 B |
BIN
osx/resources/sab_clicked.tiff
Normal file
BIN
osx/resources/sab_clicked.tiff
Normal file
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 902 B |
BIN
osx/resources/sab_idle.tiff
Normal file
BIN
osx/resources/sab_idle.tiff
Normal file
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 1.1 KiB |
BIN
osx/resources/sab_pause.tiff
Normal file
BIN
osx/resources/sab_pause.tiff
Normal file
Binary file not shown.
BIN
osx/unrar/unrar
BIN
osx/unrar/unrar
Binary file not shown.
@@ -382,7 +382,7 @@ if target == 'app':
|
||||
DATA_FILES = ['interfaces', 'locale', 'email', ('', glob.glob("osx/resources/*"))]
|
||||
|
||||
NZBFILE = dict(
|
||||
CFBundleTypeExtensions = [ "nzb","zip","rar" ],
|
||||
CFBundleTypeExtensions = [ "nzb" ],
|
||||
CFBundleTypeIconFile = 'nzbfile.icns',
|
||||
CFBundleTypeMIMETypes = [ "text/nzb" ],
|
||||
CFBundleTypeName = 'NZB File',
|
||||
|
||||
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2012-04-28 12:01+0000\n"
|
||||
"PO-Revision-Date: 2011-06-26 10:50+0000\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"PO-Revision-Date: 2012-12-28 10:58+0000\n"
|
||||
"Last-Translator: Thomas Lucke (Lucky) <Unknown>\n"
|
||||
"Language-Team: German <de@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2012-04-29 05:17+0000\n"
|
||||
"X-Generator: Launchpad (build 15149)\n"
|
||||
"X-Launchpad-Export-Date: 2012-12-29 05:11+0000\n"
|
||||
"X-Generator: Launchpad (build 16378)\n"
|
||||
|
||||
#: email/email.tmpl:1
|
||||
msgid ""
|
||||
@@ -189,3 +189,24 @@ msgid ""
|
||||
"\n"
|
||||
"Bye\n"
|
||||
msgstr ""
|
||||
"## Translation by Thomas Lucke (Lucky)\n"
|
||||
"##\n"
|
||||
"## Bad URL Fetch Email template for SABnzbd\n"
|
||||
"## This a Cheetah template\n"
|
||||
"## Documentation: http://sabnzbd.wikidot.com/email-templates\n"
|
||||
"##\n"
|
||||
"## Newlines and whitespace are significant!\n"
|
||||
"##\n"
|
||||
"## These are the email headers\n"
|
||||
"To: $to\n"
|
||||
"From: $from\n"
|
||||
"Date: $date\n"
|
||||
"Subject: SABnzbd konnte eine NZB-Datei nicht herunterladen\n"
|
||||
"X-priority: 5\n"
|
||||
"X-MS-priority: 5\n"
|
||||
"## After this comes the body, the empty line is required!\n"
|
||||
"\n"
|
||||
"Hallo,\n"
|
||||
"\n"
|
||||
"SABnzbd konnte die NZB-Datei von $url nicht herrunterladen.\n"
|
||||
"Die Fehlermeldung war: $msg\n"
|
||||
|
||||
1418
po/main/SABnzbd.pot
1418
po/main/SABnzbd.pot
File diff suppressed because it is too large
Load Diff
1437
po/main/da.po
1437
po/main/da.po
File diff suppressed because it is too large
Load Diff
1445
po/main/de.po
1445
po/main/de.po
File diff suppressed because it is too large
Load Diff
1446
po/main/es.po
1446
po/main/es.po
File diff suppressed because it is too large
Load Diff
1454
po/main/fr.po
1454
po/main/fr.po
File diff suppressed because it is too large
Load Diff
1427
po/main/nb.po
1427
po/main/nb.po
File diff suppressed because it is too large
Load Diff
1447
po/main/nl.po
1447
po/main/nl.po
File diff suppressed because it is too large
Load Diff
1447
po/main/pl.px
1447
po/main/pl.px
File diff suppressed because it is too large
Load Diff
1468
po/main/pt_BR.po
1468
po/main/pt_BR.po
File diff suppressed because it is too large
Load Diff
1474
po/main/ro.px
1474
po/main/ro.px
File diff suppressed because it is too large
Load Diff
1449
po/main/sv.po
1449
po/main/sv.po
File diff suppressed because it is too large
Load Diff
@@ -8,14 +8,14 @@ msgstr ""
|
||||
"Project-Id-Version: sabnzbd\n"
|
||||
"Report-Msgid-Bugs-To: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"POT-Creation-Date: 2012-08-14 18:42+0000\n"
|
||||
"PO-Revision-Date: 2012-05-03 03:22+0000\n"
|
||||
"Last-Translator: Rene <Unknown>\n"
|
||||
"PO-Revision-Date: 2012-12-28 11:02+0000\n"
|
||||
"Last-Translator: Steffen Thomsen <urskov@gmail.com>\n"
|
||||
"Language-Team: Danish <da@li.org>\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
"X-Launchpad-Export-Date: 2012-08-15 05:11+0000\n"
|
||||
"X-Generator: Launchpad (build 15801)\n"
|
||||
"X-Launchpad-Export-Date: 2012-12-29 05:12+0000\n"
|
||||
"X-Generator: Launchpad (build 16378)\n"
|
||||
|
||||
#: NSIS_Installer.nsi:425
|
||||
msgid "Go to the SABnzbd Wiki"
|
||||
@@ -27,11 +27,11 @@ msgstr "Vis udgivelsesbemærkninger"
|
||||
|
||||
#: NSIS_Installer.nsi:429
|
||||
msgid "Support the project, Donate!"
|
||||
msgstr "Støtte projektet, donere!"
|
||||
msgstr "Støt projektet, donér!"
|
||||
|
||||
#: NSIS_Installer.nsi:431
|
||||
msgid "Please close \"SABnzbd.exe\" first"
|
||||
msgstr "Luk 'SABnzbd.exe' først"
|
||||
msgstr "Luk venligst \"SABnzbd.exe\" først"
|
||||
|
||||
#: NSIS_Installer.nsi:433
|
||||
msgid ""
|
||||
@@ -52,7 +52,7 @@ msgstr "Kør ved opstart"
|
||||
|
||||
#: NSIS_Installer.nsi:439
|
||||
msgid "Desktop Icon"
|
||||
msgstr "Skrivebords ikon"
|
||||
msgstr "Skrivebordsikon"
|
||||
|
||||
#: NSIS_Installer.nsi:441
|
||||
msgid "NZB File association"
|
||||
@@ -64,19 +64,19 @@ msgstr "Slet program"
|
||||
|
||||
#: NSIS_Installer.nsi:445
|
||||
msgid "Delete Settings"
|
||||
msgstr "Slet instillinger"
|
||||
msgstr "Slet indstillinger"
|
||||
|
||||
#: NSIS_Installer.nsi:447
|
||||
msgid ""
|
||||
"This system requires the Microsoft runtime library VC90 to be installed "
|
||||
"first. Do you want to do that now?"
|
||||
msgstr ""
|
||||
"Dette system kræver, at Microsoft runtime biblioteket VC90, der skal "
|
||||
"installeres først. Ønsker du at gøre det nu?"
|
||||
"Dette system kræver, at Microsoft runtime biblioteket VC90 skal installeres "
|
||||
"først. Ønsker du at gøre det nu?"
|
||||
|
||||
#: NSIS_Installer.nsi:449
|
||||
msgid "Downloading Microsoft runtime installer..."
|
||||
msgstr "Downloading Microsoft runtime installer..."
|
||||
msgstr "Downloader Microsoft runtime installationsfil..."
|
||||
|
||||
#: NSIS_Installer.nsi:451
|
||||
msgid "Download error, retry?"
|
||||
@@ -91,12 +91,13 @@ msgid ""
|
||||
"You cannot overwrite an existing installation. \\n\\nClick `OK` to remove "
|
||||
"the previous version or `Cancel` to cancel this upgrade."
|
||||
msgstr ""
|
||||
"Du kan ikke overskrive en eksisterende installation. Klik `OK` for at fjerne "
|
||||
"den tidligere version eller `Annuller` for at annullere denne opgradering."
|
||||
"Du kan ikke overskrive en eksisterende installation. \\n\\nKlik `OK` for at "
|
||||
"fjerne den tidligere version eller `Annuller` for at annullere denne "
|
||||
"opgradering."
|
||||
|
||||
#: NSIS_Installer.nsi:457
|
||||
msgid "Your settings and data will be preserved."
|
||||
msgstr "Dine indstillinger og data vil blive opbevaret."
|
||||
msgstr "Dine indstillinger og data vil blive bevaret."
|
||||
|
||||
#~ msgid "Start SABnzbd (hidden)"
|
||||
#~ msgstr "Start SABnzbd"
|
||||
|
||||
@@ -220,6 +220,13 @@ def initialize(pause_downloader = False, clean_up = False, evalSched=False, repa
|
||||
# New admin folder
|
||||
misc.remove_all(cfg.admin_dir.get_path(), '*.sab')
|
||||
|
||||
### Optionally wait for "incomplete" to become online
|
||||
if cfg.wait_for_dfolder():
|
||||
wait_for_download_folder()
|
||||
else:
|
||||
cfg.download_dir.set(cfg.download_dir(), create=True)
|
||||
cfg.download_dir.set_create(True)
|
||||
|
||||
### Set access rights for "incomplete" base folder
|
||||
misc.set_permissions(cfg.download_dir.get_path(), recursive=False)
|
||||
|
||||
@@ -302,10 +309,11 @@ def initialize(pause_downloader = False, clean_up = False, evalSched=False, repa
|
||||
PostProcessor()
|
||||
|
||||
NzbQueue()
|
||||
NzbQueue.do.read_queue(repair)
|
||||
|
||||
Assembler()
|
||||
|
||||
NzbQueue.do.read_queue(repair)
|
||||
|
||||
Downloader(pause_downloader or paused)
|
||||
|
||||
DirScanner()
|
||||
@@ -616,7 +624,7 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
|
||||
if ext.lower() in ('.zip', '.rar'):
|
||||
return ProcessArchiveFile(filename, path, pp, script, cat, priority=priority)
|
||||
return ProcessArchiveFile(filename, path, pp, script, cat, priority=priority, nzbname=nzbname)
|
||||
else:
|
||||
return ProcessSingleFile(filename, path, pp, script, cat, priority=priority, nzbname=nzbname, keep=keep, reuse=reuse)
|
||||
|
||||
@@ -1037,6 +1045,9 @@ def check_all_tasks():
|
||||
# Check one-shot pause
|
||||
sabnzbd.scheduler.pause_check()
|
||||
|
||||
# Check (and terminate) idle jobs
|
||||
sabnzbd.nzbqueue.NzbQueue.do.stop_idle_jobs()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -1071,6 +1082,13 @@ def check_incomplete_vs_complete():
|
||||
cfg.download_dir.set('incomplete')
|
||||
|
||||
|
||||
def wait_for_download_folder():
|
||||
""" Wait for download folder to become available """
|
||||
while not cfg.download_dir.test_path():
|
||||
logging.debug('Waiting for "incomplete" folder')
|
||||
time.sleep(2.0)
|
||||
|
||||
|
||||
# Required wrapper because nzbstuff.py cannot import downloader.py
|
||||
def active_primaries():
|
||||
return sabnzbd.downloader.Downloader.do.active_primaries()
|
||||
|
||||
@@ -52,7 +52,7 @@ from sabnzbd.utils.json import JsonWriter
|
||||
from sabnzbd.utils.pathbrowser import folders_at_path
|
||||
from sabnzbd.misc import loadavg, to_units, diskfree, disktotal, get_ext, \
|
||||
get_filename, int_conv, globber, time_format, remove_all, \
|
||||
starts_with_path
|
||||
starts_with_path, cat_convert
|
||||
from sabnzbd.encoding import xml_name, unicoder, special_fixer, platform_encode, html_escape
|
||||
from sabnzbd.postproc import PostProcessor
|
||||
from sabnzbd.articlecache import ArticleCache
|
||||
@@ -293,9 +293,11 @@ def _api_addfile(name, output, kwargs):
|
||||
#Side effect of next line is that attribute .value is created
|
||||
#which is needed to make add_nzbfile() work
|
||||
size = name.length
|
||||
else:
|
||||
elif hasattr(name, 'value'):
|
||||
size = len(name.value)
|
||||
if name is not None and name.filename and size:
|
||||
else:
|
||||
size = 0
|
||||
if name is not None and size and name.filename:
|
||||
cat = kwargs.get('cat')
|
||||
xcat = kwargs.get('xcat')
|
||||
if not cat and xcat:
|
||||
@@ -344,7 +346,7 @@ def _api_addlocalfile(name, output, kwargs):
|
||||
|
||||
if get_ext(name) in ('.zip', '.rar'):
|
||||
res = sabnzbd.dirscanner.ProcessArchiveFile(\
|
||||
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True)
|
||||
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname)
|
||||
elif get_ext(name) in ('.nzb', '.gz'):
|
||||
res = sabnzbd.dirscanner.ProcessSingleFile(\
|
||||
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname)
|
||||
@@ -432,10 +434,10 @@ def _api_history(name, output, kwargs):
|
||||
history_db = cherrypy.thread_data.history_db
|
||||
if special in ('all', 'failed'):
|
||||
if del_files:
|
||||
del_job_files(history_db.get_failed_paths())
|
||||
history_db.remove_failed()
|
||||
del_job_files(history_db.get_failed_paths(search))
|
||||
history_db.remove_failed(search)
|
||||
if special in ('all', 'completed'):
|
||||
history_db.remove_completed()
|
||||
history_db.remove_completed(search)
|
||||
return report(output)
|
||||
elif value:
|
||||
jobs = value.split(',')
|
||||
@@ -556,10 +558,13 @@ def _api_auth(name, output, kwargs):
|
||||
if not cfg.disable_key():
|
||||
auth = 'badkey'
|
||||
key = kwargs.get('key', '')
|
||||
if key == cfg.nzb_key():
|
||||
auth = 'nzbkey'
|
||||
if key == cfg.api_key():
|
||||
if not key:
|
||||
auth = 'apikey'
|
||||
else:
|
||||
if key == cfg.nzb_key():
|
||||
auth = 'nzbkey'
|
||||
if key == cfg.api_key():
|
||||
auth = 'apikey'
|
||||
elif cfg.username() and cfg.password():
|
||||
auth = 'login'
|
||||
return report(output, keyword='auth', data=auth)
|
||||
@@ -1102,7 +1107,7 @@ def build_queue(web_dir=None, root=None, verbose=False, prim=True, webdir='', ve
|
||||
slot['mbdone_fmt'] = locale.format('%d', int(mb-mbleft), True)
|
||||
slot['size'] = format_bytes(bytes)
|
||||
slot['sizeleft'] = format_bytes(bytesleft)
|
||||
if not Downloader.do.paused and status != 'Paused' and status != 'Fetching' and not found_active:
|
||||
if not Downloader.do.paused and status not in (Status.PAUSED, Status.FETCHING) and not found_active:
|
||||
if status == Status.CHECKING:
|
||||
slot['status'] = Status.CHECKING
|
||||
else:
|
||||
@@ -1126,7 +1131,7 @@ def build_queue(web_dir=None, root=None, verbose=False, prim=True, webdir='', ve
|
||||
slot['percentage'] = "%s" % (int(((mb-mbleft) / mb) * 100))
|
||||
slot['missing'] = missing
|
||||
|
||||
if status in (Status.PAUSED, Status.CHECKING):
|
||||
if Downloader.do.paused or Downloader.do.postproc or status not in (Status.DOWNLOADING, Status.QUEUED):
|
||||
slot['timeleft'] = '0:00:00'
|
||||
slot['eta'] = 'unknown'
|
||||
else:
|
||||
@@ -1539,7 +1544,8 @@ def build_header(prim, webdir=''):
|
||||
if not color:
|
||||
color = ''
|
||||
|
||||
header = { 'T': Ttemplate, 'Tspec': Tspec, 'Tx' : Ttemplate, 'version':sabnzbd.__version__, 'paused': Downloader.do.paused,
|
||||
header = { 'T': Ttemplate, 'Tspec': Tspec, 'Tx' : Ttemplate, 'version':sabnzbd.__version__,
|
||||
'paused': Downloader.do.paused or Downloader.do.postproc,
|
||||
'pause_int': scheduler.pause_int(), 'paused_all': sabnzbd.PAUSED_ALL,
|
||||
'uptime':uptime, 'color_scheme':color }
|
||||
speed_limit = Downloader.do.get_limit()
|
||||
@@ -1590,13 +1596,13 @@ def build_header(prim, webdir=''):
|
||||
header['left_quota'] = to_units(BPSMeter.do.left)
|
||||
|
||||
status = ''
|
||||
if Downloader.do.paused:
|
||||
if Downloader.do.paused or Downloader.do.postproc:
|
||||
status = Status.PAUSED
|
||||
elif bytespersec > 0:
|
||||
status = Status.DOWNLOADING
|
||||
else:
|
||||
status = 'Idle'
|
||||
header['status'] = "%s" % status
|
||||
header['status'] = status
|
||||
|
||||
anfo = ArticleCache.do.cache_info()
|
||||
|
||||
|
||||
@@ -112,8 +112,14 @@ class Assembler(Thread):
|
||||
logging.debug('Got md5pack for set %s', setname)
|
||||
|
||||
if check_encrypted_rar(nzo, filepath):
|
||||
logging.warning(Ta('WARNING: Paused job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
|
||||
nzo.pause()
|
||||
if cfg.pause_on_pwrar() == 1:
|
||||
logging.warning(Ta('WARNING: Paused job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
|
||||
nzo.pause()
|
||||
else:
|
||||
logging.warning(Ta('WARNING: Aborted job "%s" because of encrypted RAR file'), latin1(nzo.final_name))
|
||||
nzo.fail_msg = T('Aborted, encryption detected')
|
||||
import sabnzbd.nzbqueue
|
||||
sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo)
|
||||
nzf.completed = True
|
||||
else:
|
||||
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False)
|
||||
@@ -139,7 +145,7 @@ def _assemble(nzf, path, dupe):
|
||||
decodetable = nzf.decodetable
|
||||
|
||||
for articlenum in decodetable:
|
||||
sleep(0.01)
|
||||
sleep(0.001)
|
||||
article = decodetable[articlenum]
|
||||
|
||||
data = ArticleCache.do.load_article(article)
|
||||
@@ -294,11 +300,11 @@ def is_cloaked(path, names):
|
||||
def check_encrypted_rar(nzo, filepath):
|
||||
""" Check if file is rar and is encrypted """
|
||||
encrypted = False
|
||||
if not nzo.password and cfg.pause_on_pwrar() and is_rarfile(filepath):
|
||||
if not nzo.password and not nzo.meta.get('password') and cfg.pause_on_pwrar() and is_rarfile(filepath):
|
||||
try:
|
||||
zf = RarFile(filepath, all_names=True)
|
||||
encrypted = zf.encrypted or is_cloaked(filepath, zf.namelist())
|
||||
if encrypted and int(nzo.encrypted) < 2:
|
||||
if encrypted and int(nzo.encrypted) < 2 and not nzo.reuse:
|
||||
nzo.encrypted = 1
|
||||
else:
|
||||
encrypted = False
|
||||
|
||||
@@ -80,6 +80,7 @@ email_dir = OptionDir('misc', 'email_dir', create=True)
|
||||
email_rss = OptionBool('misc', 'email_rss', False)
|
||||
|
||||
version_check = OptionNumber('misc', 'check_new_rel', 1)
|
||||
news_items = OptionBool('misc', 'news_items', True)
|
||||
autobrowser = OptionBool('misc', 'auto_browser', True)
|
||||
replace_illegal = OptionBool('misc', 'replace_illegal', True)
|
||||
pre_script = OptionStr('misc', 'pre_script', 'None')
|
||||
@@ -102,6 +103,7 @@ par2_multicore = OptionBool('misc', 'par2_multicore', True)
|
||||
allow_64bit_tools = OptionBool('misc', 'allow_64bit_tools', True)
|
||||
allow_streaming = OptionBool('misc', 'allow_streaming', False)
|
||||
pre_check = OptionBool('misc', 'pre_check', False)
|
||||
fail_hopeless = OptionBool('misc', 'fail_hopeless', False)
|
||||
req_completion_rate = OptionNumber('misc', 'req_completion_rate', 100.2, 100, 200)
|
||||
|
||||
newzbin_username = OptionStr('newzbin', 'username')
|
||||
@@ -125,8 +127,10 @@ auto_sort = OptionBool('misc', 'auto_sort', False)
|
||||
folder_rename = OptionBool('misc', 'folder_rename', True)
|
||||
folder_max_length = OptionNumber('misc', 'folder_max_length', DEF_FOLDER_MAX, 20, 65000)
|
||||
pause_on_pwrar = OptionBool('misc', 'pause_on_pwrar', True)
|
||||
prio_sort_list = OptionList('misc', 'prio_sort_list')
|
||||
|
||||
safe_postproc = OptionBool('misc', 'safe_postproc', True)
|
||||
empty_postproc = OptionBool('misc', 'empty_postproc', False)
|
||||
pause_on_post_processing = OptionBool('misc', 'pause_on_post_processing', False)
|
||||
ampm = OptionBool('misc', 'ampm', False)
|
||||
rss_filenames = OptionBool('misc', 'rss_filenames', False)
|
||||
@@ -159,7 +163,7 @@ xxx_apikey = OptionStr('nzbxxx', 'apikey')
|
||||
configlock = OptionBool('misc', 'config_lock', 0)
|
||||
|
||||
umask = OptionStr('misc', 'permissions', '', validation=validate_octal)
|
||||
download_dir = OptionDir('misc', 'download_dir', DEF_DOWNLOAD_DIR, validation=validate_safedir)
|
||||
download_dir = OptionDir('misc', 'download_dir', DEF_DOWNLOAD_DIR, create=False, validation=validate_safedir)
|
||||
download_free = OptionStr('misc', 'download_free')
|
||||
complete_dir = OptionDir('misc', 'complete_dir', DEF_COMPLETE_DIR, create=False, \
|
||||
apply_umask=True, validation=validate_notempty)
|
||||
@@ -173,6 +177,8 @@ dirscan_speed = OptionNumber('misc', 'dirscan_speed', DEF_SCANRATE, 0, 3600)
|
||||
size_limit = OptionStr('misc', 'size_limit', '0')
|
||||
password_file = OptionDir('misc', 'password_file', '', create=False)
|
||||
fsys_type = OptionNumber('misc', 'fsys_type', 0, 0, 2)
|
||||
wait_for_dfolder = OptionBool('misc', 'wait_for_dfolder', False)
|
||||
warn_empty_nzb = OptionBool('misc', 'warn_empty_nzb', True)
|
||||
|
||||
cherryhost = OptionStr('misc', 'host', DEF_HOST)
|
||||
if sabnzbd.WIN32:
|
||||
@@ -215,6 +221,7 @@ ssl_type = OptionStr('misc', 'ssl_type', 'v23')
|
||||
unpack_check = OptionBool('misc', 'unpack_check', True)
|
||||
no_penalties = OptionBool('misc', 'no_penalties', False)
|
||||
randomize_server_ip = OptionBool('misc', 'randomize_server_ip', False)
|
||||
ipv6_servers = OptionNumber('misc', 'ipv6_servers', 1, 0, 2)
|
||||
|
||||
# Internal options, not saved in INI file
|
||||
debug_delay = OptionNumber('misc', 'debug_delay', 0, add=False)
|
||||
@@ -223,6 +230,7 @@ api_key = OptionStr('misc', 'api_key', create_api_key())
|
||||
nzb_key = OptionStr('misc', 'nzb_key', create_api_key())
|
||||
disable_key = OptionBool('misc', 'disable_api_key', False)
|
||||
api_warnings = OptionBool('misc', 'api_warnings', True)
|
||||
local_range = OptionStr('misc', 'local_range')
|
||||
max_art_tries = OptionNumber('misc', 'max_art_tries', 3, 2)
|
||||
max_art_opt = OptionBool('misc', 'max_art_opt', False)
|
||||
use_pickle = OptionBool('misc', 'use_pickle', False)
|
||||
@@ -250,6 +258,7 @@ marker_file = OptionStr('misc', 'nomedia_marker', '')
|
||||
wait_ext_drive = OptionNumber('misc', 'wait_ext_drive', 5, 1, 60)
|
||||
history_limit = OptionNumber('misc', 'history_limit', 50, 0)
|
||||
show_sysload = OptionNumber('misc', 'show_sysload', 2, 0, 2)
|
||||
web_watchdog = OptionBool('misc', 'web_watchdog', False)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Set root folders for Folder config-items
|
||||
|
||||
@@ -186,6 +186,14 @@ class OptionDir(Option):
|
||||
res, path = sabnzbd.misc.create_real_path(self.ident()[1], self.__root, value, self.__apply_umask)
|
||||
return path
|
||||
|
||||
def test_path(self):
|
||||
""" Return True if path exists """
|
||||
value = self.get()
|
||||
if value:
|
||||
return os.path.exists(sabnzbd.misc.real_path(self.__root, value))
|
||||
else:
|
||||
return False
|
||||
|
||||
def set_root(self, root):
|
||||
""" Set new root, is assumed to be valid """
|
||||
self.__root = root
|
||||
@@ -210,6 +218,10 @@ class OptionDir(Option):
|
||||
self._Option__set(value)
|
||||
return error
|
||||
|
||||
def set_create(self, value):
|
||||
""" Set auto-creation value """
|
||||
self.__create = value
|
||||
|
||||
|
||||
class OptionList(Option):
|
||||
""" List option class """
|
||||
@@ -224,7 +236,10 @@ class OptionList(Option):
|
||||
error = None
|
||||
if value is not None:
|
||||
if not isinstance(value, list):
|
||||
value = listquote.simplelist(value)
|
||||
if '"' not in value and ',' not in value:
|
||||
value = value.split()
|
||||
else:
|
||||
value = listquote.simplelist(value)
|
||||
if self.__validation:
|
||||
error, value = self.__validation(value)
|
||||
if not error:
|
||||
|
||||
@@ -43,6 +43,7 @@ PNFO_MISSING_FIELD = 18
|
||||
QNFO_BYTES_FIELD = 0
|
||||
QNFO_BYTES_LEFT_FIELD = 1
|
||||
QNFO_PNFO_LIST_FIELD = 2
|
||||
QNFO_Q_SIZE_LIST_FIELD = 3
|
||||
|
||||
ANFO_ARTICLE_SUM_FIELD = 0
|
||||
ANFO_CACHE_SIZE_FIELD = 1
|
||||
@@ -64,6 +65,7 @@ FUTURE_Q_FOLDER = 'future'
|
||||
JOB_ADMIN = '__ADMIN__'
|
||||
VERIFIED_FILE = '__verified__'
|
||||
QCHECK_FILE = '__skip_qcheck__'
|
||||
RENAMES_FILE = '__renames__'
|
||||
ATTRIB_FILE = 'SABnzbd_attrib'
|
||||
REPAIR_REQUEST = 'repair-all.sab'
|
||||
|
||||
|
||||
@@ -51,6 +51,27 @@ def get_history_handle():
|
||||
return HistoryDB(_HISTORY_DB)
|
||||
|
||||
|
||||
def convert_search(search):
|
||||
""" Convert classic wildcard to SQL wildcard """
|
||||
if not search:
|
||||
# Default value
|
||||
search = ''
|
||||
else:
|
||||
# Allow * for wildcard matching and space
|
||||
search = search.replace('*','%').replace(' ', '%')
|
||||
|
||||
# Allow ^ for start of string and $ for end of string
|
||||
if search and search.startswith('^'):
|
||||
search = search.replace('^','')
|
||||
search += '%'
|
||||
elif search and search.endswith('$'):
|
||||
search = search.replace('$','')
|
||||
search = '%' + search
|
||||
else:
|
||||
search = '%' + search + '%'
|
||||
return search
|
||||
|
||||
|
||||
# Note: Add support for execute return values
|
||||
|
||||
class HistoryDB(object):
|
||||
@@ -141,19 +162,22 @@ class HistoryDB(object):
|
||||
logging.error(Ta('Failed to close database, see log'))
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
|
||||
def remove_completed(self):
|
||||
return self.execute("""DELETE FROM history WHERE status = 'Completed'""", save=True)
|
||||
def remove_completed(self, search=None):
|
||||
search = convert_search(search)
|
||||
return self.execute("""DELETE FROM history WHERE name LIKE ? AND status = 'Completed'""", (search,), save=True)
|
||||
|
||||
def get_failed_paths(self):
|
||||
def get_failed_paths(self, search=None):
|
||||
""" Return list of all storage paths of failed jobs (may contain non-existing or empty paths) """
|
||||
fetch_ok = self.execute("""SELECT path FROM history WHERE status = 'Failed'""")
|
||||
search = convert_search(search)
|
||||
fetch_ok = self.execute("""SELECT path FROM history WHERE name LIKE ? AND status = 'Failed'""", (search,))
|
||||
if fetch_ok:
|
||||
return [item.get('path') for item in self.c.fetchall()]
|
||||
else:
|
||||
return []
|
||||
|
||||
def remove_failed(self):
|
||||
return self.execute("""DELETE FROM history WHERE status = 'Failed'""", save=True)
|
||||
def remove_failed(self, search=None):
|
||||
search = convert_search(search)
|
||||
return self.execute("""DELETE FROM history WHERE name LIKE ? AND status = 'Failed'""", (search,), save=True)
|
||||
|
||||
def remove_history(self, jobs=None):
|
||||
if jobs is None:
|
||||
@@ -180,22 +204,7 @@ class HistoryDB(object):
|
||||
|
||||
def fetch_history(self, start=None, limit=None, search=None, failed_only=0):
|
||||
|
||||
if not search:
|
||||
# Default value
|
||||
search = ''
|
||||
else:
|
||||
# Allow * for wildcard matching and space
|
||||
search = search.replace('*','%').replace(' ', '%')
|
||||
|
||||
# Allow ^ for start of string and $ for end of string
|
||||
if search and search.startswith('^'):
|
||||
search = search.replace('^','')
|
||||
search += '%'
|
||||
elif search and search.endswith('$'):
|
||||
search = search.replace('$','')
|
||||
search = '%' + search
|
||||
else:
|
||||
search = '%' + search + '%'
|
||||
search = convert_search(search)
|
||||
|
||||
# Get the number of results
|
||||
if failed_only:
|
||||
|
||||
@@ -23,6 +23,7 @@ import Queue
|
||||
import binascii
|
||||
import logging
|
||||
import re
|
||||
from time import sleep
|
||||
from threading import Thread
|
||||
try:
|
||||
import _yenc
|
||||
@@ -72,6 +73,7 @@ class Decoder(Thread):
|
||||
def run(self):
|
||||
from sabnzbd.nzbqueue import NzbQueue
|
||||
while 1:
|
||||
sleep(0.001)
|
||||
art_tup = self.queue.get()
|
||||
if not art_tup:
|
||||
break
|
||||
@@ -82,6 +84,8 @@ class Decoder(Thread):
|
||||
article, lines = art_tup
|
||||
nzf = article.nzf
|
||||
nzo = nzf.nzo
|
||||
art_id = article.article
|
||||
killed = False
|
||||
|
||||
data = None
|
||||
|
||||
@@ -94,13 +98,13 @@ class Decoder(Thread):
|
||||
if nzo.precheck:
|
||||
raise BadYenc
|
||||
register = True
|
||||
logging.debug("Decoding %s", article)
|
||||
logging.debug("Decoding %s", art_id)
|
||||
|
||||
data = decode(article, lines)
|
||||
nzf.article_count += 1
|
||||
found = True
|
||||
except IOError, e:
|
||||
logme = Ta('Decoding %s failed') % article
|
||||
logme = Ta('Decoding %s failed') % art_id
|
||||
logging.info(logme)
|
||||
sabnzbd.downloader.Downloader.do.pause()
|
||||
|
||||
@@ -111,7 +115,7 @@ class Decoder(Thread):
|
||||
register = False
|
||||
|
||||
except CrcError, e:
|
||||
logme = Ta('CRC Error in %s (%s -> %s)') % (article, e.needcrc, e.gotcrc)
|
||||
logme = Ta('CRC Error in %s (%s -> %s)') % (art_id, e.needcrc, e.gotcrc)
|
||||
logging.info(logme)
|
||||
|
||||
data = e.data
|
||||
@@ -133,27 +137,32 @@ class Decoder(Thread):
|
||||
# Examine headers (for precheck) or body (for download)
|
||||
# And look for DMCA clues (while skipping "X-" headers)
|
||||
for line in lines:
|
||||
if not line.startswith('X-') and match_str(line.lower(), ('dmca', 'removed', 'cancel', 'blocked')):
|
||||
logging.info('Article removed from server (%s)', article)
|
||||
lline = line.lower()
|
||||
if 'message-id:' in lline:
|
||||
found = True
|
||||
if not line.startswith('X-') and match_str(lline, ('dmca', 'removed', 'cancel', 'blocked')):
|
||||
killed = True
|
||||
break
|
||||
if killed:
|
||||
logme = 'Article removed from server (%s)'
|
||||
logging.info(logme, art_id)
|
||||
if nzo.precheck:
|
||||
if found or not killed:
|
||||
if found and not killed:
|
||||
# Pre-check, proper article found, just register
|
||||
logging.debug('Server has article %s', article)
|
||||
logging.debug('Server has article %s', art_id)
|
||||
register = True
|
||||
elif not killed and not found:
|
||||
logme = Ta('Badly formed yEnc article in %s') % article
|
||||
logme = Ta('Badly formed yEnc article in %s') % art_id
|
||||
logging.info(logme)
|
||||
|
||||
if not found:
|
||||
if not found or killed:
|
||||
new_server_found = self.__search_new_server(article)
|
||||
if new_server_found:
|
||||
register = False
|
||||
logme = None
|
||||
|
||||
except:
|
||||
logme = Ta('Unknown Error while decoding %s') % article
|
||||
logme = Ta('Unknown Error while decoding %s') % art_id
|
||||
logging.info(logme)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
|
||||
@@ -163,7 +172,10 @@ class Decoder(Thread):
|
||||
logme = None
|
||||
|
||||
if logme:
|
||||
article.nzf.nzo.inc_log('bad_art_log', logme)
|
||||
if killed:
|
||||
article.nzf.nzo.inc_log('killed_art_log', art_id)
|
||||
else:
|
||||
article.nzf.nzo.inc_log('bad_art_log', art_id)
|
||||
|
||||
else:
|
||||
new_server_found = self.__search_new_server(article)
|
||||
@@ -234,7 +246,7 @@ def decode(article, data):
|
||||
if not ybegin:
|
||||
found = False
|
||||
try:
|
||||
for i in xrange(10):
|
||||
for i in xrange(min(40, len(data))):
|
||||
if data[i].startswith('begin '):
|
||||
nzf.filename = name_fixer(data[i].split(None, 2)[2])
|
||||
nzf.type = 'uu'
|
||||
@@ -298,7 +310,7 @@ def yCheck(data):
|
||||
yend = None
|
||||
|
||||
## Check head
|
||||
for i in xrange(40):
|
||||
for i in xrange(min(40, len(data))):
|
||||
try:
|
||||
if data[i].startswith('=ybegin '):
|
||||
splits = 3
|
||||
|
||||
@@ -59,7 +59,8 @@ def CompareStat(tup1, tup2):
|
||||
return True
|
||||
|
||||
|
||||
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False, priority=None, url=''):
|
||||
def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=None, keep=False,
|
||||
priority=None, url='', nzbname=None):
|
||||
""" Analyse ZIP file and create job(s).
|
||||
Accepts ZIP files with ONLY nzb/nfo/folder files in it.
|
||||
returns (status, nzo_ids)
|
||||
@@ -88,6 +89,7 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
|
||||
status = 1
|
||||
names = zf.namelist()
|
||||
names.sort()
|
||||
nzbcount = 0
|
||||
for name in names:
|
||||
name = name.lower()
|
||||
if not (name.endswith('.nzb') or name.endswith('.nfo') or name.endswith('/')):
|
||||
@@ -95,7 +97,10 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
|
||||
break
|
||||
elif name.endswith('.nzb'):
|
||||
status = 0
|
||||
nzbcount += 1
|
||||
if status == 0:
|
||||
if nzbcount != 1:
|
||||
nzbname = None
|
||||
for name in names:
|
||||
if name.lower().endswith('.nzb'):
|
||||
try:
|
||||
@@ -108,7 +113,8 @@ def ProcessArchiveFile(filename, path, pp=None, script=None, cat=None, catdir=No
|
||||
name = misc.sanitize_foldername(name)
|
||||
if data:
|
||||
try:
|
||||
nzo = nzbstuff.NzbObject(name, 0, pp, script, data, cat=cat, url=url, priority=priority)
|
||||
nzo = nzbstuff.NzbObject(name, 0, pp, script, data, cat=cat, url=url,
|
||||
priority=priority, nzbname=nzbname)
|
||||
except:
|
||||
nzo = None
|
||||
if nzo:
|
||||
@@ -171,6 +177,9 @@ def ProcessSingleFile(filename, path, pp=None, script=None, cat=None, catdir=Non
|
||||
except TypeError:
|
||||
# Duplicate, ignore
|
||||
nzo = None
|
||||
except ValueError:
|
||||
# Empty, but correct file
|
||||
return -1, nzo_ids
|
||||
except:
|
||||
if data.find("<nzb") >= 0 and data.find("</nzb") < 0:
|
||||
# Looks like an incomplete file, retry
|
||||
|
||||
@@ -77,7 +77,8 @@ class Server(object):
|
||||
self.warning = ''
|
||||
self.info = None # Will hold getaddrinfo() list
|
||||
self.request = False # True if a getaddrinfo() request is pending
|
||||
self.oddball = 'free.xsusenet.com' in host
|
||||
self.have_body = 'free.xsusenet.com' not in host
|
||||
self.have_stat = True # Assume server has "STAT", until proven otherwise
|
||||
|
||||
for i in range(threads):
|
||||
self.idle_threads.append(NewsWrapper(self, i+1))
|
||||
@@ -599,6 +600,19 @@ class Downloader(Thread):
|
||||
msg = T('Server %s requires user/password') % ('%s:%s' % (nw.server.host, nw.server.port))
|
||||
self.__reset_nw(nw, msg, quit=True)
|
||||
|
||||
elif code == '500':
|
||||
if nzo.precheck:
|
||||
# Assume "STAT" command is not supported
|
||||
server.have_stat = False
|
||||
logging.debug('Server %s does not support STAT', server.host)
|
||||
else:
|
||||
# Assume "BODY" command is not supported
|
||||
server.have_body = False
|
||||
logging.debug('Server %s does not support BODY', server.host)
|
||||
nw.lines = []
|
||||
nw.data = ''
|
||||
self.__request_article(nw)
|
||||
|
||||
if done:
|
||||
server.bad_cons = 0 # Succesful data, clear "bad" counter
|
||||
if sabnzbd.LOG_ALL:
|
||||
|
||||
@@ -84,6 +84,12 @@ def check_server(host, port):
|
||||
return badParameterResponse(T('Server address "%s:%s" is not valid.') % (host, port))
|
||||
|
||||
|
||||
def check_access():
|
||||
""" Check if external address is allowed """
|
||||
referrer = cherrypy.request.remote.ip
|
||||
return referrer in ('127.0.0.1', '::1') or referrer.startswith(cfg.local_range())
|
||||
|
||||
|
||||
def ConvertSpecials(p):
|
||||
""" Convert None to 'None' and 'Default' to ''
|
||||
"""
|
||||
@@ -158,6 +164,8 @@ def set_auth(conf):
|
||||
|
||||
def check_session(kwargs):
|
||||
""" Check session key """
|
||||
if not check_access():
|
||||
return u'No access'
|
||||
key = kwargs.get('session')
|
||||
if not key:
|
||||
key = kwargs.get('apikey')
|
||||
@@ -176,6 +184,10 @@ def check_apikey(kwargs, nokey=False):
|
||||
""" Check api key or nzbkey
|
||||
Return None when OK, otherwise an error message
|
||||
"""
|
||||
def log_warning(txt):
|
||||
txt = '%s %s' % (txt, cherrypy.request.headers.get('User-Agent', '??'))
|
||||
logging.warning('%s', txt)
|
||||
|
||||
output = kwargs.get('output')
|
||||
mode = kwargs.get('mode', '')
|
||||
callback = kwargs.get('callback')
|
||||
@@ -188,19 +200,22 @@ def check_apikey(kwargs, nokey=False):
|
||||
# For NZB upload calls, a separate key can be used
|
||||
nzbkey = kwargs.get('mode', '') in ('addid', 'addurl', 'addfile', 'addlocalfile')
|
||||
|
||||
if not nzbkey and not check_access():
|
||||
return report(output, 'No access')
|
||||
|
||||
# First check APIKEY, if OK that's sufficient
|
||||
if not (cfg.disable_key() or nokey):
|
||||
key = kwargs.get('apikey')
|
||||
if not key:
|
||||
if not special:
|
||||
logging.warning(Ta('API Key missing, please enter the api key from Config->General into your 3rd party program:'))
|
||||
log_warning(Ta('API Key missing, please enter the api key from Config->General into your 3rd party program:'))
|
||||
return report(output, 'API Key Required', callback=callback)
|
||||
elif nzbkey and key == cfg.nzb_key():
|
||||
return None
|
||||
elif key == cfg.api_key():
|
||||
return None
|
||||
else:
|
||||
logging.warning(Ta('API Key incorrect, Use the api key from Config->General in your 3rd party program:'))
|
||||
log_warning(Ta('API Key incorrect, Use the api key from Config->General in your 3rd party program:'))
|
||||
return report(output, 'API Key Incorrect', callback=callback)
|
||||
|
||||
# No active APIKEY, check web credentials instead
|
||||
@@ -209,7 +224,7 @@ def check_apikey(kwargs, nokey=False):
|
||||
pass
|
||||
else:
|
||||
if not special:
|
||||
logging.warning(Ta('Authentication missing, please enter username/password from Config->General into your 3rd party program:'))
|
||||
log_warning(Ta('Authentication missing, please enter username/password from Config->General into your 3rd party program:'))
|
||||
return report(output, 'Missing authentication', callback=callback)
|
||||
return None
|
||||
|
||||
@@ -249,6 +264,8 @@ class MainPage(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if not check_access(): return Protected()
|
||||
|
||||
if sabnzbd.OLD_QUEUE and not cfg.warned_old_queue():
|
||||
cfg.warned_old_queue.set(True)
|
||||
config.save_config()
|
||||
@@ -293,6 +310,7 @@ class MainPage(object):
|
||||
|
||||
|
||||
def add_handler(self, kwargs):
|
||||
if not check_access(): return Protected()
|
||||
id = kwargs.get('id', '')
|
||||
if not id:
|
||||
id = kwargs.get('url', '')
|
||||
@@ -397,7 +415,9 @@ class MainPage(object):
|
||||
def api(self, **kwargs):
|
||||
"""Handler for API over http, with explicit authentication parameters
|
||||
"""
|
||||
logging.debug('API-call from %s %s', cherrypy.request.remote.ip, kwargs)
|
||||
if not kwargs.get('tickleme') or not cfg.web_watchdog():
|
||||
logging.debug('API-call from %s [%s] %s', cherrypy.request.remote.ip, \
|
||||
cherrypy.request.headers.get('User-Agent', '??'), kwargs)
|
||||
if kwargs.get('mode', '') not in ('version', 'auth'):
|
||||
msg = check_apikey(kwargs)
|
||||
if msg: return msg
|
||||
@@ -407,6 +427,7 @@ class MainPage(object):
|
||||
def scriptlog(self, **kwargs):
|
||||
""" Duplicate of scriptlog of History, needed for some skins """
|
||||
# No session key check, due to fixed URLs
|
||||
if not check_access(): return Protected()
|
||||
|
||||
name = kwargs.get('name')
|
||||
if name:
|
||||
@@ -458,7 +479,7 @@ class NzoPage(object):
|
||||
# /nzb/SABnzbd_nzo_xxxxx/files
|
||||
# /nzb/SABnzbd_nzo_xxxxx/bulk_operation
|
||||
# /nzb/SABnzbd_nzo_xxxxx/save
|
||||
|
||||
if not check_access(): return Protected()
|
||||
nzo_id = None
|
||||
for a in args:
|
||||
if a.startswith('SABnzbd_nzo'):
|
||||
@@ -629,6 +650,7 @@ class QueuePage(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if not check_access(): return Protected()
|
||||
start = kwargs.get('start')
|
||||
limit = kwargs.get('limit')
|
||||
dummy2 = kwargs.get('dummy2')
|
||||
@@ -845,6 +867,7 @@ class HistoryPage(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if not check_access(): return Protected()
|
||||
start = kwargs.get('start')
|
||||
limit = kwargs.get('limit')
|
||||
search = kwargs.get('search')
|
||||
@@ -863,9 +886,11 @@ class HistoryPage(object):
|
||||
#history_items, total_bytes, bytes_beginning = sabnzbd.history_info()
|
||||
#history['bytes_beginning'] = "%.2f" % (bytes_beginning / GIGI)
|
||||
|
||||
postfix = T('B') #: Abbreviation for bytes, as in GB
|
||||
grand, month, week, day = BPSMeter.do.get_sums()
|
||||
history['total_size'], history['month_size'], history['week_size'], history['day_size'] = \
|
||||
to_units(grand), to_units(month), to_units(week), to_units(day)
|
||||
to_units(grand, postfix=postfix), to_units(month, postfix=postfix), \
|
||||
to_units(week, postfix=postfix), to_units(day, postfix=postfix)
|
||||
|
||||
history['lines'], history['fetched'], history['noofslots'] = build_history(limit=limit, start=start, verbose=self.__verbose, verbose_list=self.__verbose_list, search=search, failed_only=failed_only)
|
||||
|
||||
@@ -963,7 +988,7 @@ class HistoryPage(object):
|
||||
def scriptlog(self, **kwargs):
|
||||
""" Duplicate of scriptlog of History, needed for some skins """
|
||||
# No session key check, due to fixed URLs
|
||||
|
||||
if not check_access(): return Protected()
|
||||
name = kwargs.get('name')
|
||||
if name:
|
||||
history_db = cherrypy.thread_data.history_db
|
||||
@@ -1009,6 +1034,7 @@ class ConfigPage(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if not check_access(): return Protected()
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
|
||||
conf['configfn'] = config.get_filename()
|
||||
@@ -1018,6 +1044,7 @@ class ConfigPage(object):
|
||||
for svr in config.get_servers():
|
||||
new[svr] = {}
|
||||
conf['servers'] = new
|
||||
conf['news_items'] = cfg.news_items()
|
||||
|
||||
conf['folders'] = sabnzbd.nzbqueue.scan_jobs(all=False, action=False)
|
||||
|
||||
@@ -1065,12 +1092,14 @@ class ConfigPage(object):
|
||||
def orphan_delete(kwargs):
|
||||
path = kwargs.get('name')
|
||||
if path:
|
||||
path = platform_encode(path)
|
||||
path = os.path.join(cfg.download_dir.get_path(), path)
|
||||
remove_all(path, recursive=True)
|
||||
|
||||
def orphan_add(kwargs):
|
||||
path = kwargs.get('name')
|
||||
if path:
|
||||
path = platform_encode(path)
|
||||
path = os.path.join(cfg.download_dir.get_path(), path)
|
||||
sabnzbd.nzbqueue.repair_job(path, None)
|
||||
|
||||
@@ -1090,7 +1119,7 @@ class ConfigFolders(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1134,7 +1163,7 @@ SWITCH_LIST = \
|
||||
'ignore_samples', 'pause_on_post_processing', 'quick_check', 'nice', 'ionice',
|
||||
'ssl_type', 'pre_script', 'pause_on_pwrar', 'ampm', 'sfv_check', 'folder_rename',
|
||||
'unpack_check', 'quota_size', 'quota_day', 'quota_resume', 'quota_period',
|
||||
'pre_check', 'max_art_tries', 'max_art_opt'
|
||||
'pre_check', 'max_art_tries', 'max_art_opt', 'fail_hopeless'
|
||||
)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
@@ -1146,7 +1175,7 @@ class ConfigSwitches(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1186,16 +1215,17 @@ class ConfigSwitches(object):
|
||||
SPECIAL_BOOL_LIST = \
|
||||
( 'start_paused', 'no_penalties', 'ignore_wrong_unrar', 'create_group_folders',
|
||||
'queue_complete_pers', 'api_warnings', 'allow_64bit_tools', 'par2_multicore',
|
||||
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames',
|
||||
'never_repair', 'allow_streaming', 'ignore_unrar_dates', 'rss_filenames', 'news_items',
|
||||
'osx_menu', 'osx_speed', 'win_menu', 'uniconfig', 'use_pickle', 'allow_incomplete_nzb',
|
||||
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files'
|
||||
'randomize_server_ip', 'no_ipv6', 'keep_awake', 'overwrite_files', 'empty_postproc',
|
||||
'web_watchdog', 'wait_for_dfolder', 'warn_empty_nzb'
|
||||
)
|
||||
SPECIAL_VALUE_LIST = \
|
||||
( 'size_limit', 'folder_max_length', 'fsys_type', 'movie_rename_limit', 'nomedia_marker',
|
||||
'req_completion_rate', 'wait_ext_drive', 'history_limit', 'show_sysload'
|
||||
'req_completion_rate', 'wait_ext_drive', 'history_limit', 'show_sysload', 'ipv6_servers'
|
||||
)
|
||||
SPECIAL_LIST_LIST = \
|
||||
( 'rss_odd_titles',
|
||||
( 'rss_odd_titles', 'prio_sort_list'
|
||||
)
|
||||
|
||||
class ConfigSpecial(object):
|
||||
@@ -1206,7 +1236,7 @@ class ConfigSpecial(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1240,7 +1270,7 @@ class ConfigSpecial(object):
|
||||
#------------------------------------------------------------------------------
|
||||
GENERAL_LIST = (
|
||||
'host', 'port', 'username', 'password', 'disable_api_key',
|
||||
'refresh_rate', 'cache_limit',
|
||||
'refresh_rate', 'cache_limit', 'local_range',
|
||||
'enable_https', 'https_port', 'https_cert', 'https_key', 'https_chain'
|
||||
)
|
||||
|
||||
@@ -1275,7 +1305,7 @@ class ConfigGeneral(object):
|
||||
else:
|
||||
return ''
|
||||
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1348,6 +1378,7 @@ class ConfigGeneral(object):
|
||||
conf['cache_limit'] = cfg.cache_limit()
|
||||
conf['cleanup_list'] = cfg.cleanup_list.get_string()
|
||||
conf['nzb_key'] = cfg.nzb_key()
|
||||
conf['local_range'] = cfg.local_range()
|
||||
conf['my_lcldata'] = cfg.admin_dir.get_path()
|
||||
|
||||
template = Template(file=os.path.join(self.__web_dir, 'config_general.tmpl'),
|
||||
@@ -1456,7 +1487,7 @@ class ConfigServer(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1615,7 +1646,7 @@ class ConfigRss(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1913,7 +1944,7 @@ class ConfigScheduling(object):
|
||||
days["7"] = T('Sunday')
|
||||
return days
|
||||
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -1924,7 +1955,7 @@ class ConfigScheduling(object):
|
||||
conf['schedlines'] = []
|
||||
snum = 1
|
||||
conf['taskinfo'] = []
|
||||
for ev in scheduler.sort_schedules(forward=True):
|
||||
for ev in scheduler.sort_schedules(all_events=False):
|
||||
line = ev[3]
|
||||
conf['schedlines'].append(line)
|
||||
try:
|
||||
@@ -1950,13 +1981,13 @@ class ConfigScheduling(object):
|
||||
action = Ttemplate("sch-" + act) + ' ' + server
|
||||
|
||||
if day_numbers == "1234567":
|
||||
days_of_week = "Daily"
|
||||
days_of_week = "Daily"
|
||||
elif day_numbers == "12345":
|
||||
days_of_week = "Weekdays"
|
||||
days_of_week = "Weekdays"
|
||||
elif day_numbers == "67":
|
||||
days_of_week = "Weekends"
|
||||
days_of_week = "Weekends"
|
||||
else:
|
||||
days_of_week = ", ".join([day_names.get(i, "**") for i in day_numbers])
|
||||
days_of_week = ", ".join([day_names.get(i, "**") for i in day_numbers])
|
||||
item = (snum, '%02d' % int(h), '%02d' % int(m), days_of_week, '%s %s' % (action, value))
|
||||
|
||||
conf['taskinfo'].append(item)
|
||||
@@ -1984,6 +2015,8 @@ class ConfigScheduling(object):
|
||||
minute = kwargs.get('minute')
|
||||
hour = kwargs.get('hour')
|
||||
days_of_week = ''.join([str(x) for x in kwargs.get('daysofweek', '')])
|
||||
if not days_of_week:
|
||||
days_of_week = '1234567'
|
||||
action = kwargs.get('action')
|
||||
arguments = kwargs.get('arguments')
|
||||
|
||||
@@ -2043,7 +2076,7 @@ class ConfigIndexers(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -2126,7 +2159,7 @@ class ConfigCats(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -2200,7 +2233,7 @@ class ConfigSorting(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
@@ -2253,6 +2286,7 @@ class Status(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if not check_access(): return Protected()
|
||||
header, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
|
||||
header['logfile'] = sabnzbd.LOGFILE
|
||||
@@ -2607,7 +2641,7 @@ class ConfigNotify(object):
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, **kwargs):
|
||||
if cfg.configlock():
|
||||
if cfg.configlock() or not check_access():
|
||||
return Protected()
|
||||
|
||||
conf, pnfo_list, bytespersec = build_header(self.__prim, self.__web_dir)
|
||||
|
||||
@@ -39,7 +39,7 @@ except:
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.decorators import synchronized
|
||||
from sabnzbd.constants import DEFAULT_PRIORITY, FUTURE_Q_FOLDER, JOB_ADMIN, GIGI, VERIFIED_FILE, Status, MEBI
|
||||
from sabnzbd.constants import DEFAULT_PRIORITY, FUTURE_Q_FOLDER, JOB_ADMIN, GIGI, Status, MEBI
|
||||
import sabnzbd.config as config
|
||||
import sabnzbd.cfg as cfg
|
||||
from sabnzbd.encoding import unicoder, latin1
|
||||
@@ -631,7 +631,7 @@ def from_units(val):
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
def to_units(val, spaces=0, dec_limit=2):
|
||||
def to_units(val, spaces=0, dec_limit=2, postfix=''):
|
||||
""" Convert number to K/M/G/T/P notation
|
||||
Add "spaces" if not ending in letter
|
||||
dig_limit==1 show single decimal for M and higher
|
||||
@@ -660,8 +660,8 @@ def to_units(val, spaces=0, dec_limit=2):
|
||||
else:
|
||||
decimals = 0
|
||||
|
||||
format = '%%s%%.%sf %%s' % decimals
|
||||
return format % (sign, val, unit)
|
||||
format = '%%s%%.%sf %%s%%s' % decimals
|
||||
return format % (sign, val, unit, postfix)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def same_file(a, b):
|
||||
@@ -1010,7 +1010,7 @@ def memory_usage():
|
||||
res = int(_PAGE_SIZE * int(v[1]) / MEBI)
|
||||
return "V=%sM R=%sM" % (virt, res)
|
||||
except:
|
||||
return None
|
||||
return ''
|
||||
|
||||
try:
|
||||
_PAGE_SIZE = os.sysconf("SC_PAGE_SIZE")
|
||||
@@ -1026,7 +1026,10 @@ def loadavg():
|
||||
if not sabnzbd.WIN32 and not sabnzbd.DARWIN:
|
||||
opt = cfg.show_sysload()
|
||||
if opt:
|
||||
p = '%.2f | %.2f | %.2f' % os.getloadavg()
|
||||
try:
|
||||
p = '%.2f | %.2f | %.2f' % os.getloadavg()
|
||||
except:
|
||||
pass
|
||||
if opt > 1 and _HAVE_STATM:
|
||||
p = '%s | %s' % (p, memory_usage())
|
||||
return p
|
||||
@@ -1078,7 +1081,11 @@ def int_conv(value):
|
||||
# Diskfree
|
||||
if sabnzbd.WIN32:
|
||||
# windows diskfree
|
||||
import win32api
|
||||
try:
|
||||
# Careful here, because win32api test hasn't been done yet!
|
||||
import win32api
|
||||
except:
|
||||
pass
|
||||
def diskfree(_dir):
|
||||
""" Return amount of free diskspace in GBytes
|
||||
"""
|
||||
|
||||
@@ -32,10 +32,11 @@ from sabnzbd.encoding import TRANS, UNTRANS, unicode2local, name_fixer, \
|
||||
reliable_unpack_names, unicoder, latin1, platform_encode
|
||||
from sabnzbd.utils.rarfile import RarFile, is_rarfile
|
||||
from sabnzbd.misc import format_time_string, find_on_path, make_script_path, int_conv, \
|
||||
flag_file
|
||||
flag_file, real_path
|
||||
from sabnzbd.tvsort import SeriesSorter
|
||||
import sabnzbd.cfg as cfg
|
||||
from constants import Status, QCHECK_FILE
|
||||
from sabnzbd.constants import Status, QCHECK_FILE, RENAMES_FILE
|
||||
load_data = save_data = None
|
||||
|
||||
if sabnzbd.WIN32:
|
||||
try:
|
||||
@@ -78,6 +79,7 @@ CURL_COMMAND = None
|
||||
def find_programs(curdir):
|
||||
"""Find external programs
|
||||
"""
|
||||
global load_data, save_data
|
||||
def check(path, program):
|
||||
p = os.path.abspath(os.path.join(path, program))
|
||||
if os.access(p, os.X_OK):
|
||||
@@ -85,6 +87,10 @@ def find_programs(curdir):
|
||||
else:
|
||||
return None
|
||||
|
||||
# Another crazy Python import bug work-around
|
||||
load_data = sabnzbd.load_data
|
||||
save_data = sabnzbd.save_data
|
||||
|
||||
if sabnzbd.DARWIN:
|
||||
try:
|
||||
os_version = subprocess.Popen("sw_vers -productVersion", stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True).stdout.read()
|
||||
@@ -465,13 +471,15 @@ def rar_extract(rarfile, numrars, one_folder, nzo, setname, extraction_path):
|
||||
passwords = [nzo.password]
|
||||
else:
|
||||
passwords = []
|
||||
# Append meta passwords, to prevent changing the original list
|
||||
passwords.extend(nzo.meta.get('password', []))
|
||||
pw_file = cfg.password_file.get_path()
|
||||
if pw_file:
|
||||
try:
|
||||
pwf = open(pw_file, 'r')
|
||||
passwords = pwf.read().split('\n')
|
||||
lines = pwf.read().split('\n')
|
||||
# Remove empty lines and space-only passwords and remove surrounding spaces
|
||||
passwords = [pw.strip('\r\n ') for pw in passwords if pw.strip('\r\n ')]
|
||||
passwords.extend([pw.strip('\r\n ') for pw in lines if pw.strip('\r\n ')])
|
||||
pwf.close()
|
||||
logging.info('Read the passwords file %s', pw_file)
|
||||
except IOError:
|
||||
@@ -631,7 +639,7 @@ def rar_extract_core(rarfile, numrars, one_folder, nzo, setname, extraction_path
|
||||
else:
|
||||
m = re.search(r'^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$', line)
|
||||
if m:
|
||||
extracted.append(TRANS(m.group(2)))
|
||||
extracted.append(real_path(extraction_path, TRANS(m.group(2))))
|
||||
|
||||
if fail:
|
||||
if proc:
|
||||
@@ -888,6 +896,8 @@ def par2_repair(parfile_nzf, nzo, workdir, setname):
|
||||
|
||||
|
||||
_RE_BLOCK_FOUND = re.compile('File: "([^"]+)" - found \d+ of \d+ data blocks from "([^"]+)"')
|
||||
_RE_IS_MATCH_FOR = re.compile('File: "([^"]+)" - is a match for "([^"]+)"')
|
||||
|
||||
def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
""" Run par2 on par-set """
|
||||
if cfg.never_repair():
|
||||
@@ -918,6 +928,9 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
if setname in joinable:
|
||||
command.append(joinable)
|
||||
|
||||
# Append the wildcard for this set
|
||||
command.append('%s*' % os.path.join(os.path.split(parfile)[0], setname))
|
||||
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
logging.debug('Starting par2: %s', command)
|
||||
|
||||
@@ -935,6 +948,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
# Set up our variables
|
||||
pars = []
|
||||
datafiles = []
|
||||
renames = {}
|
||||
|
||||
linebuf = ''
|
||||
finished = 0
|
||||
@@ -965,12 +979,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
if 'Repairing:' not in line:
|
||||
lines.append(line)
|
||||
|
||||
if 'The recovery file does not exist' in line:
|
||||
logging.info('%s', line)
|
||||
nzo.set_unpack_info('Repair', unicoder(line), set=setname)
|
||||
nzo.status = Status.FAILED
|
||||
|
||||
elif line.startswith('Invalid option specified'):
|
||||
if line.startswith('Invalid option specified'):
|
||||
msg = T('[%s] PAR2 received incorrect options, check your Config->Switches settings') % unicoder(setname)
|
||||
nzo.set_unpack_info('Repair', msg, set=setname)
|
||||
nzo.status = Status.FAILED
|
||||
@@ -990,7 +999,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
start = time()
|
||||
verified = 1
|
||||
|
||||
elif line.startswith('Main packet not found'):
|
||||
elif line.startswith('Main packet not found') or 'The recovery file does not exist' in line:
|
||||
## Initialparfile probably didn't decode properly,
|
||||
logging.info(Ta('Main packet not found...'))
|
||||
|
||||
@@ -1009,8 +1018,13 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
|
||||
logging.info("Found new par2file %s", nzf.filename)
|
||||
|
||||
## Move from extrapar list to files to be downloaded
|
||||
nzo.add_parfile(nzf)
|
||||
extrapars.remove(nzf)
|
||||
## Now set new par2 file as primary par2
|
||||
nzo.partable[setname] = nzf
|
||||
nzf.extrapars= extrapars
|
||||
parfile_nzf = []
|
||||
## mark for readd
|
||||
readd = True
|
||||
else:
|
||||
@@ -1129,6 +1143,33 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
# Hit a bug in par2-tbb, retry with par2-classic
|
||||
retry_classic = True
|
||||
|
||||
elif ' cannot be renamed to ' in line:
|
||||
if not classic and sabnzbd.WIN32:
|
||||
# Hit a bug in par2-tbb, retry with par2-classic
|
||||
retry_classic = True
|
||||
else:
|
||||
msg = unicoder(line.strip())
|
||||
nzo.fail_msg = msg
|
||||
msg = u'[%s] %s' % (unicoder(setname), msg)
|
||||
nzo.set_unpack_info('Repair', msg, set=setname)
|
||||
nzo.status = Status.FAILED
|
||||
|
||||
# File: "oldname.rar" - is a match for "newname.rar".
|
||||
elif 'is a match for' in line:
|
||||
m = _RE_IS_MATCH_FOR.search(line)
|
||||
if m:
|
||||
old_name = m.group(1)
|
||||
new_name = m.group(2)
|
||||
logging.debug('PAR2 will rename "%s" to "%s"', old_name, new_name)
|
||||
renames[new_name] = old_name
|
||||
|
||||
elif 'No details available for recoverable file' in line:
|
||||
msg = unicoder(line.strip())
|
||||
nzo.fail_msg = msg
|
||||
msg = u'[%s] %s' % (unicoder(setname), msg)
|
||||
nzo.set_unpack_info('Repair', msg, set=setname)
|
||||
nzo.status = Status.FAILED
|
||||
|
||||
elif not verified:
|
||||
if line.startswith('Verifying source files'):
|
||||
nzo.set_action_line(T('Verifying'), '01/%02d' % verifytotal)
|
||||
@@ -1169,6 +1210,13 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
|
||||
|
||||
logging.debug('PAR2 output was\n%s', '\n'.join(lines))
|
||||
|
||||
# If successful, add renamed files to the collection
|
||||
if finished and renames:
|
||||
previous = load_data(RENAMES_FILE, nzo.workpath, remove=False)
|
||||
for name in previous or {}:
|
||||
renames[name] = previous[name]
|
||||
save_data(renames, RENAMES_FILE, nzo.workpath)
|
||||
|
||||
if retry_classic:
|
||||
logging.debug('Retry PAR2-joining with par2-classic')
|
||||
return PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=True)
|
||||
@@ -1306,13 +1354,12 @@ def QuickCheck(set, nzo):
|
||||
nzf_list = nzo.finished_files
|
||||
|
||||
for file in md5pack:
|
||||
file = name_fixer(file)
|
||||
if sabnzbd.misc.on_cleanup_list(file, False):
|
||||
result = True
|
||||
continue
|
||||
found = False
|
||||
for nzf in nzf_list:
|
||||
if file == name_fixer(nzf.filename):
|
||||
if file == nzf.filename:
|
||||
found = True
|
||||
if (nzf.md5sum is not None) and nzf.md5sum == md5pack[file]:
|
||||
logging.debug('Quick-check of file %s OK', file)
|
||||
@@ -1375,20 +1422,21 @@ def sfv_check(sfv_path):
|
||||
root = os.path.split(sfv_path)[0]
|
||||
for line in fp:
|
||||
line = line.strip('\n\r ')
|
||||
if line[0] != ';':
|
||||
if line and line[0] != ';':
|
||||
x = line.rfind(' ')
|
||||
filename = platform_encode(line[:x].strip())
|
||||
checksum = line[x:].strip()
|
||||
path = os.path.join(root, filename)
|
||||
if os.path.exists(path):
|
||||
if crc_check(path, checksum):
|
||||
logging.debug('File %s passed SFV check', path)
|
||||
if x > 0:
|
||||
filename = platform_encode(line[:x].strip())
|
||||
checksum = line[x:].strip()
|
||||
path = os.path.join(root, filename)
|
||||
if os.path.exists(path):
|
||||
if crc_check(path, checksum):
|
||||
logging.debug('File %s passed SFV check', path)
|
||||
else:
|
||||
logging.info('File %s did not pass SFV check', latin1(path))
|
||||
failed.append(unicoder(filename))
|
||||
else:
|
||||
logging.info('File %s did not pass SFV check', latin1(path))
|
||||
logging.info('File %s missing in SFV check', latin1(path))
|
||||
failed.append(unicoder(filename))
|
||||
else:
|
||||
logging.info('File %s missing in SFV check', latin1(path))
|
||||
failed.append(unicoder(filename))
|
||||
fp.close()
|
||||
return failed
|
||||
|
||||
@@ -1415,6 +1463,7 @@ def crc_check(path, target_crc):
|
||||
def analyse_show(name):
|
||||
""" Do a quick SeasonSort check and return basic facts """
|
||||
job = SeriesSorter(name, None, None)
|
||||
job.match(force=True)
|
||||
if job.is_match():
|
||||
job.get_values()
|
||||
info = job.show_info
|
||||
@@ -1486,7 +1535,10 @@ def list2cmdline(lst):
|
||||
#------------------------------------------------------------------------------
|
||||
# Work-around for the failure of Python2.5 on Windows to support IPV6 with HTTPS
|
||||
|
||||
def get_from_url(url):
|
||||
def get_from_url(url, timeout=None):
|
||||
""" Retrieve URL and return content
|
||||
`timeout` sets non-standard timeout and skips when on Windows
|
||||
"""
|
||||
if 'https:' in url and sabnzbd.WIN32 and sys.version_info < (2,6) and sabnzbd.newsunpack.CURL_COMMAND:
|
||||
command = [sabnzbd.newsunpack.CURL_COMMAND, "-k", url]
|
||||
stup, need_shell, command, creationflags = build_command(command)
|
||||
@@ -1498,6 +1550,14 @@ def get_from_url(url):
|
||||
p.wait()
|
||||
else:
|
||||
import urllib2
|
||||
s = urllib2.urlopen(url)
|
||||
output = s.read()
|
||||
if sys.version_info < (2, 6):
|
||||
timeout = 0
|
||||
try:
|
||||
if timeout:
|
||||
s = urllib2.urlopen(url, timeout=timeout)
|
||||
else:
|
||||
s = urllib2.urlopen(url)
|
||||
output = s.read()
|
||||
except:
|
||||
output = None
|
||||
return output
|
||||
|
||||
@@ -28,6 +28,7 @@ import logging
|
||||
|
||||
import sabnzbd
|
||||
from sabnzbd.constants import *
|
||||
import sabnzbd.cfg
|
||||
|
||||
try:
|
||||
from OpenSSL import SSL
|
||||
@@ -84,23 +85,32 @@ def request_server_info(server):
|
||||
|
||||
|
||||
def GetServerParms(host, port):
|
||||
# Make sure port is numeric (unicode input not supported)
|
||||
""" Return processed getaddrinfo() for server
|
||||
"""
|
||||
try:
|
||||
int(port)
|
||||
except:
|
||||
# Could do with a warning here
|
||||
port = 119
|
||||
opt = sabnzbd.cfg.ipv6_servers()
|
||||
try:
|
||||
# Standard IPV4
|
||||
return socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
|
||||
# Standard IPV4 or IPV6
|
||||
ips = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
|
||||
if opt == 2 or (_EXTERNAL_IPV6 and opt == 1):
|
||||
# IPv6 reachable and allowed, or forced by user
|
||||
return ips
|
||||
else:
|
||||
# IPv6 unreachable or not allowed by user
|
||||
return [ip for ip in ips if ':' not in ip[4][0]]
|
||||
except:
|
||||
try:
|
||||
# Try IPV6 explicitly
|
||||
return socket.getaddrinfo(host, port, socket.AF_INET6,
|
||||
socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
|
||||
except:
|
||||
# Nothing found!
|
||||
return None
|
||||
if opt == 2 or (_EXTERNAL_IPV6 and opt == 1):
|
||||
try:
|
||||
# Try IPV6 explicitly
|
||||
return socket.getaddrinfo(host, port, socket.AF_INET6,
|
||||
socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME)
|
||||
except:
|
||||
# Nothing found!
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def con(sock, host, port, sslenabled, write_fds, nntp):
|
||||
@@ -310,11 +320,14 @@ class NewsWrapper(object):
|
||||
def body(self, precheck):
|
||||
self.timeout = time.time() + self.server.timeout
|
||||
if precheck:
|
||||
command = 'STAT <%s>\r\n' % (self.article.article)
|
||||
elif self.server.oddball:
|
||||
command = 'ARTICLE <%s>\r\n' % (self.article.article)
|
||||
else:
|
||||
if self.server.have_stat:
|
||||
command = 'STAT <%s>\r\n' % (self.article.article)
|
||||
else:
|
||||
command = 'HEAD <%s>\r\n' % (self.article.article)
|
||||
elif self.server.have_body:
|
||||
command = 'BODY <%s>\r\n' % (self.article.article)
|
||||
else:
|
||||
command = 'ARTICLE <%s>\r\n' % (self.article.article)
|
||||
self.nntp.sock.sendall(command)
|
||||
|
||||
def send_group(self, group):
|
||||
@@ -415,3 +428,25 @@ class SSLConnection(object):
|
||||
return apply(self._ssl_conn.%s, args)
|
||||
finally:
|
||||
self._lock.release()\n""" % (f, f)
|
||||
|
||||
|
||||
def test_ipv6():
|
||||
""" Check if external IPv6 addresses are reachable """
|
||||
# Use google.com to test IPv6 access
|
||||
try:
|
||||
info = socket.getaddrinfo('www.google.com', 80, socket.AF_INET6, socket.SOCK_STREAM,
|
||||
socket.IPPROTO_IP, socket.AI_CANONNAME)
|
||||
except:
|
||||
return False
|
||||
|
||||
try:
|
||||
af, socktype, proto, canonname, sa = info[0]
|
||||
sock = socket.socket(af, socktype, proto)
|
||||
sock.settimeout(6)
|
||||
sock.connect(sa[0:2])
|
||||
sock.close()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
_EXTERNAL_IPV6 = test_ipv6()
|
||||
|
||||
@@ -27,7 +27,7 @@ import datetime
|
||||
import sabnzbd
|
||||
from sabnzbd.trylist import TryList
|
||||
from sabnzbd.nzbstuff import NzbObject
|
||||
from sabnzbd.misc import exit_sab, cat_to_opts, flag_file, \
|
||||
from sabnzbd.misc import exit_sab, cat_to_opts, \
|
||||
get_admin_path, remove_all, globber
|
||||
from sabnzbd.panic import panic_queue
|
||||
import sabnzbd.database as database
|
||||
@@ -147,7 +147,13 @@ class NzbQueue(TryList):
|
||||
|
||||
|
||||
def repair_job(self, folder, new_nzb=None):
|
||||
""" Reconstruct admin for a single job folder, optionally with new NZB """
|
||||
""" Reconstruct admin for a single job folder, optionally with new NZB
|
||||
"""
|
||||
def all_verified(path):
|
||||
""" Return True when all sets have been successfully verified """
|
||||
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x':False}
|
||||
return not bool([True for x in verified if not verified[x]])
|
||||
|
||||
name = os.path.basename(folder)
|
||||
path = os.path.join(folder, JOB_ADMIN)
|
||||
if hasattr(new_nzb, 'filename'):
|
||||
@@ -155,7 +161,7 @@ class NzbQueue(TryList):
|
||||
else:
|
||||
filename = ''
|
||||
if not filename:
|
||||
if not flag_file(folder, VERIFIED_FILE):
|
||||
if not all_verified(path):
|
||||
filename = globber(path, '*.gz')
|
||||
if len(filename) > 0:
|
||||
logging.debug('Repair job %s by reparsing stored NZB', latin1(name))
|
||||
@@ -178,8 +184,9 @@ class NzbQueue(TryList):
|
||||
logging.debug('Failed to find NZB file after pre-check (%s)', nzo.nzo_id)
|
||||
return
|
||||
from sabnzbd.dirscanner import ProcessSingleFile
|
||||
nzo_id = ProcessSingleFile(os.path.split(nzb_path)[1], nzb_path, reuse=True)[1][0]
|
||||
self.replace_in_q(nzo, nzo_id)
|
||||
res, nzo_ids = ProcessSingleFile(nzo.work_name + '.nzb', nzb_path, reuse=True)
|
||||
if res == 0 and nzo_ids:
|
||||
self.replace_in_q(nzo, nzo_ids[0])
|
||||
|
||||
|
||||
@synchronized(NZBQUEUE_LOCK)
|
||||
@@ -189,8 +196,8 @@ class NzbQueue(TryList):
|
||||
new_nzo = self.get_nzo(nzo_id)
|
||||
pos = self.__nzo_list.index(new_nzo)
|
||||
targetpos = self.__nzo_list.index(nzo)
|
||||
self.__nzo_list.pop(pos)
|
||||
self.__nzo_list[targetpos] = new_nzo
|
||||
self.__nzo_list.pop(pos)
|
||||
del self.__nzo_table[nzo.nzo_id]
|
||||
del nzo
|
||||
except:
|
||||
@@ -741,12 +748,14 @@ class NzbQueue(TryList):
|
||||
"""
|
||||
if self.actives(grabs=False) < 2 and cfg.autodisconnect():
|
||||
# This was the last job, close server connections
|
||||
sabnzbd.downloader.Downloader.do.disconnect()
|
||||
if sabnzbd.downloader.Downloader.do:
|
||||
sabnzbd.downloader.Downloader.do.disconnect()
|
||||
|
||||
# Notify assembler to call postprocessor
|
||||
if not nzo.deleted:
|
||||
nzo.deleted = True
|
||||
if nzo.precheck:
|
||||
nzo.save_attribs()
|
||||
# Check result
|
||||
enough, ratio = nzo.check_quality()
|
||||
if enough:
|
||||
@@ -757,7 +766,7 @@ class NzbQueue(TryList):
|
||||
return
|
||||
else:
|
||||
# Not enough data, let postprocessor show it as failed
|
||||
nzo.save_attribs()
|
||||
pass
|
||||
Assembler.do.process((nzo, None))
|
||||
|
||||
|
||||
@@ -779,18 +788,24 @@ class NzbQueue(TryList):
|
||||
def queue_info(self, for_cli=False, max_jobs=0):
|
||||
bytes_left = 0
|
||||
bytes = 0
|
||||
q_size = 0
|
||||
pnfo_list = []
|
||||
n = 0
|
||||
for nzo in self.__nzo_list:
|
||||
pnfo = nzo.gather_info(for_cli = for_cli)
|
||||
if nzo.status != 'Paused':
|
||||
bytes += pnfo[PNFO_BYTES_FIELD]
|
||||
bytes_left += pnfo[PNFO_BYTES_LEFT_FIELD]
|
||||
pnfo_list.append(pnfo)
|
||||
if not max_jobs or n < max_jobs:
|
||||
pnfo = nzo.gather_info(for_cli = for_cli)
|
||||
pnfo_list.append(pnfo)
|
||||
if nzo.status != 'Paused':
|
||||
bytes += pnfo[PNFO_BYTES_FIELD]
|
||||
bytes_left += pnfo[PNFO_BYTES_LEFT_FIELD]
|
||||
q_size += 1
|
||||
elif nzo.status != 'Paused':
|
||||
b, b_left = nzo.total_and_remaining()
|
||||
bytes += b
|
||||
bytes_left += b_left
|
||||
q_size += 1
|
||||
n += 1
|
||||
if max_jobs and n >= max_jobs:
|
||||
break
|
||||
return (bytes, bytes_left, pnfo_list)
|
||||
return (bytes, bytes_left, pnfo_list, q_size)
|
||||
|
||||
|
||||
@synchronized(NZBQUEUE_LOCK)
|
||||
@@ -819,6 +834,17 @@ class NzbQueue(TryList):
|
||||
|
||||
ArticleCache.do.purge_articles(nzo.saved_articles)
|
||||
|
||||
@synchronized(NZBQUEUE_LOCK)
|
||||
def stop_idle_jobs(self):
|
||||
""" Detect jobs that have zero files left and send them to post processing
|
||||
"""
|
||||
empty = []
|
||||
for nzo in self.__nzo_list:
|
||||
if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING):
|
||||
empty.append(nzo)
|
||||
for nzo in empty:
|
||||
self.end_job(nzo)
|
||||
|
||||
def get_urls(self):
|
||||
""" Return list of future-types needing URL """
|
||||
lst = []
|
||||
|
||||
@@ -37,7 +37,7 @@ import sabnzbd
|
||||
from sabnzbd.constants import sample_match, GIGI, ATTRIB_FILE, JOB_ADMIN, \
|
||||
DEFAULT_PRIORITY, LOW_PRIORITY, NORMAL_PRIORITY, \
|
||||
HIGH_PRIORITY, PAUSED_PRIORITY, TOP_PRIORITY, DUP_PRIORITY, \
|
||||
Status
|
||||
RENAMES_FILE, Status
|
||||
from sabnzbd.misc import to_units, cat_to_opts, cat_convert, sanitize_foldername, \
|
||||
get_unique_path, get_admin_path, remove_all, format_source_url, \
|
||||
sanitize_filename, globber, sanitize_foldername, int_conv, \
|
||||
@@ -322,6 +322,11 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
self.in_group = False
|
||||
self.in_segments = False
|
||||
self.in_segment = False
|
||||
self.in_head = False
|
||||
self.in_meta = False
|
||||
self.meta_type = ''
|
||||
self.meta_types = {}
|
||||
self.meta_content = []
|
||||
self.filename = ''
|
||||
self.avg_age = 0
|
||||
self.valids = 0
|
||||
@@ -374,6 +379,16 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
elif name == 'groups' and self.in_nzb and self.in_file:
|
||||
self.in_groups = True
|
||||
|
||||
elif name == 'head' and self.in_nzb:
|
||||
self.in_head = True
|
||||
|
||||
elif name == 'meta' and self.in_nzb and self.in_head:
|
||||
self.in_meta = True
|
||||
meta_type = attrs.get('type')
|
||||
if meta_type:
|
||||
self.meta_type = meta_type.lower()
|
||||
self.meta_content = []
|
||||
|
||||
elif name == 'nzb':
|
||||
self.in_nzb = True
|
||||
|
||||
@@ -382,6 +397,8 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
self.group_name.append(content)
|
||||
elif self.in_segment:
|
||||
self.article_id.append(content)
|
||||
elif self.in_meta:
|
||||
self.meta_content.append(content)
|
||||
|
||||
def endElement(self, name):
|
||||
if name == 'group' and self.in_group:
|
||||
@@ -436,12 +453,24 @@ class NzbParser(xml.sax.handler.ContentHandler):
|
||||
sabnzbd.remove_data(nzf.nzf_id, self.nzo.workpath)
|
||||
self.skipped_files += 1
|
||||
|
||||
elif name == 'head':
|
||||
self.in_head = False
|
||||
|
||||
elif name == 'meta':
|
||||
self.in_meta = False
|
||||
if self.meta_type:
|
||||
if self.meta_type not in self.meta_types:
|
||||
self.meta_types[self.meta_type] = []
|
||||
self.meta_types[self.meta_type].append(''.join(self.meta_content))
|
||||
|
||||
elif name == 'nzb':
|
||||
self.in_nzb = False
|
||||
|
||||
def endDocument(self):
|
||||
""" End of the file """
|
||||
self.nzo.groups = self.groups
|
||||
self.nzo.meta = self.meta_types
|
||||
logging.debug('META-DATA = %s', self.nzo.meta)
|
||||
files = max(1, self.valids)
|
||||
self.nzo.avg_stamp = self.avg_age / files
|
||||
self.nzo.avg_date = datetime.datetime.fromtimestamp(self.avg_age / files)
|
||||
@@ -501,7 +530,9 @@ NzbObjectMapper = (
|
||||
('oversized', 'oversized'), # Was detected as oversized
|
||||
('create_group_folder', 'create_group_folder'),
|
||||
('precheck', 'precheck'),
|
||||
('incomplete', 'incomplete') # Was detected as incomplete
|
||||
('incomplete', 'incomplete'), # Was detected as incomplete
|
||||
('reuse', 'reuse'),
|
||||
('meta', 'meta') # Meta-date from 1.1 type NZB
|
||||
)
|
||||
|
||||
class NzbObject(TryList):
|
||||
@@ -537,6 +568,7 @@ class NzbObject(TryList):
|
||||
self.work_name = work_name
|
||||
self.final_name = work_name
|
||||
|
||||
self.meta = {}
|
||||
self.created = False # dirprefixes + work_name created
|
||||
self.bytes = 0 # Original bytesize
|
||||
self.bytes_downloaded = 0 # Downloaded byte
|
||||
@@ -588,6 +620,7 @@ class NzbObject(TryList):
|
||||
self.oversized = False
|
||||
self.precheck = False
|
||||
self.incomplete = False
|
||||
self.reuse = reuse
|
||||
if self.status == Status.QUEUED and not reuse:
|
||||
self.precheck = cfg.pre_check()
|
||||
if self.precheck:
|
||||
@@ -695,12 +728,23 @@ class NzbObject(TryList):
|
||||
|
||||
if not self.files and not reuse:
|
||||
self.purge_data(keep_basic=False)
|
||||
if self.url:
|
||||
logging.warning(Ta('Empty NZB file %s') + ' [%s]', filename, self.url)
|
||||
if cfg.warn_empty_nzb():
|
||||
mylog = logging.warning
|
||||
else:
|
||||
logging.warning(Ta('Empty NZB file %s'), filename)
|
||||
mylog = logging.info
|
||||
if self.url:
|
||||
mylog(Ta('Empty NZB file %s') + ' [%s]', filename, self.url)
|
||||
else:
|
||||
mylog(Ta('Empty NZB file %s'), filename)
|
||||
raise ValueError
|
||||
|
||||
if cat is None:
|
||||
for metacat in self.meta.get('category', ()):
|
||||
metacat = cat_convert(metacat)
|
||||
if metacat:
|
||||
cat = metacat
|
||||
break
|
||||
|
||||
if cat is None:
|
||||
for grp in self.groups:
|
||||
cat = cat_convert(grp)
|
||||
@@ -827,8 +871,7 @@ class NzbObject(TryList):
|
||||
# Move only when not current NZF and filename was extractable from subject
|
||||
if name and nzf is not xnzf:
|
||||
head, vol, block = analyse_par2(name)
|
||||
# When only subject is known, it's enough that that 'parset' is in subject
|
||||
if head and lparset in head.lower():
|
||||
if head and matcher(lparset, head.lower()):
|
||||
xnzf.set_par2(parset, vol, block)
|
||||
self.extrapars[parset].append(xnzf)
|
||||
if not self.precheck:
|
||||
@@ -846,6 +889,9 @@ class NzbObject(TryList):
|
||||
head, vol, block = analyse_par2(fn)
|
||||
## Is a par2file and repair mode activated
|
||||
if head and (self.repair or cfg.allow_streaming()):
|
||||
## Skip if mini-par2 is not complete
|
||||
if not block and nzf.bytes_left:
|
||||
return
|
||||
nzf.set_par2(head, vol, block)
|
||||
## Already got a parfile for this set?
|
||||
if head in self.partable:
|
||||
@@ -888,6 +934,14 @@ class NzbObject(TryList):
|
||||
|
||||
if file_done:
|
||||
self.remove_nzf(nzf)
|
||||
if not self.reuse and not self.precheck and cfg.fail_hopeless() and not self.check_quality(99)[0]:
|
||||
#set the nzo status to return "Queued"
|
||||
self.status = Status.QUEUED
|
||||
self.set_download_report()
|
||||
self.fail_msg = T('Aborted, cannot be completed')
|
||||
self.set_unpack_info('Download', self.fail_msg, unique=False)
|
||||
logging.debug('Abort job "%s", due to impossibility to complete it', self.final_name_pw_clean)
|
||||
return True, True, True
|
||||
|
||||
if reset:
|
||||
self.reset_try_list()
|
||||
@@ -910,6 +964,15 @@ class NzbObject(TryList):
|
||||
"""
|
||||
# Get a list of already present files
|
||||
files = [os.path.basename(f) for f in globber(wdir) if os.path.isfile(f)]
|
||||
|
||||
# Substitute renamed files
|
||||
renames = sabnzbd.load_data(RENAMES_FILE, self.workpath, remove=True)
|
||||
if renames:
|
||||
for name in renames:
|
||||
if name in files:
|
||||
files.remove(name)
|
||||
files.append(renames[name])
|
||||
|
||||
# Looking for the longest name first, minimizes the chance on a mismatch
|
||||
files.sort(lambda x, y: len(y) - len(x))
|
||||
|
||||
@@ -924,6 +987,7 @@ class NzbObject(TryList):
|
||||
if (nzf.filename == filename) or (subject == filename) or (filename in subject):
|
||||
nzf.filename = filename
|
||||
nzf.completed = True
|
||||
nzf.bytes_left = 0
|
||||
self.handle_par2(nzf, file_done=True)
|
||||
self.remove_nzf(nzf)
|
||||
nzfs.remove(nzf)
|
||||
@@ -941,6 +1005,7 @@ class NzbObject(TryList):
|
||||
self.bytes += nzf.bytes
|
||||
nzf.filename = filename
|
||||
nzf.completed = True
|
||||
nzf.bytes_left = 0
|
||||
self.handle_par2(nzf, file_done=True)
|
||||
self.remove_nzf(nzf)
|
||||
logging.info('File %s added to job', filename)
|
||||
@@ -1017,7 +1082,7 @@ class NzbObject(TryList):
|
||||
self.partable.pop(setname)
|
||||
|
||||
__re_quick_par2_check = re.compile('\.par2\W*', re.I)
|
||||
def check_quality(self):
|
||||
def check_quality(self, req_ratio=0):
|
||||
""" Determine amount of articles present on servers
|
||||
and return (gross available, nett) bytes
|
||||
"""
|
||||
@@ -1028,7 +1093,8 @@ class NzbObject(TryList):
|
||||
for nzf_id in self.files_table:
|
||||
nzf = self.files_table[nzf_id]
|
||||
assert isinstance(nzf, NzbFile)
|
||||
short += nzf.bytes_left
|
||||
if nzf.deleted:
|
||||
short += nzf.bytes_left
|
||||
if self.__re_quick_par2_check.search(nzf.subject):
|
||||
pars += nzf.bytes
|
||||
anypars = True
|
||||
@@ -1037,7 +1103,7 @@ class NzbObject(TryList):
|
||||
have = need + pars - short
|
||||
ratio = float(have) / float(max(1, need))
|
||||
if anypars:
|
||||
enough = ratio * 100.0 >= float(cfg.req_completion_rate())
|
||||
enough = ratio * 100.0 >= (req_ratio or float(cfg.req_completion_rate()))
|
||||
else:
|
||||
enough = have >= need
|
||||
logging.debug('Download Quality: enough=%s, have=%s, need=%s, ratio=%s', enough, have, need, ratio)
|
||||
@@ -1061,15 +1127,18 @@ class NzbObject(TryList):
|
||||
msg1 = T('Downloaded in %s at an average of %sB/s') % (complete_time, to_units(avg_bps*1024, dec_limit=1))
|
||||
bad = self.nzo_info.get('bad_art_log', [])
|
||||
miss = self.nzo_info.get('missing_art_log', [])
|
||||
killed = self.nzo_info.get('killed_art_log', [])
|
||||
dups = self.nzo_info.get('dup_art_log', [])
|
||||
msg2 = msg3 = msg4 = ''
|
||||
msg2 = msg3 = msg4 = msg5 = ''
|
||||
if bad:
|
||||
msg2 = ('<br/>' + T('%s articles were malformed')) % len(bad)
|
||||
if miss:
|
||||
msg3 = ('<br/>' + T('%s articles were missing')) % len(miss)
|
||||
if dups:
|
||||
msg4 = ('<br/>' + T('%s articles had non-matching duplicates')) % len(dups)
|
||||
msg = ''.join((msg1, msg2, msg3, msg4,))
|
||||
if killed:
|
||||
msg5 = ('<br/>' + T('%s articles were removed')) % len(killed)
|
||||
msg = ''.join((msg1, msg2, msg3, msg4, msg5, ))
|
||||
self.set_unpack_info('Download', msg, unique=True)
|
||||
if self.url:
|
||||
self.set_unpack_info('Source', format_source_url(self.url), unique=True)
|
||||
@@ -1241,6 +1310,15 @@ class NzbObject(TryList):
|
||||
bytes_left += nzf.bytes_left
|
||||
return bytes_left
|
||||
|
||||
def total_and_remaining(self):
|
||||
""" Return total and remaining bytes """
|
||||
bytes = 0
|
||||
bytes_left = 0
|
||||
for nzf in self.files:
|
||||
bytes += nzf.bytes
|
||||
bytes_left += nzf.bytes_left
|
||||
return bytes, bytes_left
|
||||
|
||||
def gather_info(self, for_cli = False):
|
||||
bytes_left_all = 0
|
||||
|
||||
@@ -1373,6 +1451,8 @@ class NzbObject(TryList):
|
||||
self.pp_active = False
|
||||
self.avg_stamp = time.mktime(self.avg_date.timetuple())
|
||||
self.wait = None
|
||||
if self.meta is None:
|
||||
self.meta = {}
|
||||
TryList.__init__(self)
|
||||
|
||||
|
||||
@@ -1383,8 +1463,9 @@ class NzbObject(TryList):
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def nzf_get_filename(nzf):
|
||||
# Return filename, if the filename not set, try the
|
||||
# the full subject line instead. Can produce non-ideal results
|
||||
""" Return filename, if the filename not set, try the
|
||||
the full subject line instead. Can produce non-ideal results
|
||||
"""
|
||||
name = nzf.filename
|
||||
if not name:
|
||||
name = nzf.subject
|
||||
@@ -1393,8 +1474,31 @@ def nzf_get_filename(nzf):
|
||||
return name.lower()
|
||||
|
||||
|
||||
def get_ext_list():
|
||||
""" Return priority extenstion list, with extensions starting with a period
|
||||
"""
|
||||
exts = []
|
||||
for ext in cfg.prio_sort_list():
|
||||
ext = ext.strip()
|
||||
if not ext.startswith('.'):
|
||||
ext = '.' + ext
|
||||
exts.append(ext)
|
||||
return exts
|
||||
|
||||
|
||||
def ext_on_list(name, lst):
|
||||
""" Return True if `name` contains any extension in `lst`
|
||||
"""
|
||||
for ext in lst:
|
||||
if name.rfind(ext) >= 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def nzf_cmp_date(nzf1, nzf2):
|
||||
# Compare files based on date, but give vol-par files preference
|
||||
""" Compare files based on date, but give vol-par files preference.
|
||||
Wrapper needed, because `cmp` function doesn't handle extra parms.
|
||||
"""
|
||||
return nzf_cmp_name(nzf1, nzf2, name=False)
|
||||
|
||||
|
||||
@@ -1422,6 +1526,16 @@ def nzf_cmp_name(nzf1, nzf2, name=True):
|
||||
if is_par2 and not is_par1:
|
||||
return -1
|
||||
|
||||
# Anything with a priority extention goes first
|
||||
ext_list = get_ext_list()
|
||||
if ext_list:
|
||||
onlist1 = ext_on_list(name1, ext_list)
|
||||
onlist2 = ext_on_list(name2, ext_list)
|
||||
if onlist1 and not onlist2:
|
||||
return -1
|
||||
if onlist2 and not onlist1:
|
||||
return 1
|
||||
|
||||
if name:
|
||||
# Prioritise .rar files above any other type of file (other than vol-par)
|
||||
# Useful for nzb streaming
|
||||
@@ -1520,8 +1634,10 @@ def get_attrib_file(path, size):
|
||||
return [None for n in xrange(size)]
|
||||
|
||||
for n in xrange(size):
|
||||
line = f.readline().strip('\n ')
|
||||
line = f.readline().strip('\r\n ')
|
||||
if line:
|
||||
if line.lower() == 'none':
|
||||
line = None
|
||||
try:
|
||||
line = int(line)
|
||||
except:
|
||||
@@ -1560,7 +1676,7 @@ def analyse_par2(name):
|
||||
vol = m.group(2)
|
||||
block = m.group(3)
|
||||
elif name.lower().find('.par2') > 0:
|
||||
head = os.path.splitext(name)[0]
|
||||
head = os.path.splitext(name)[0].strip()
|
||||
else:
|
||||
head = None
|
||||
return head, vol, block
|
||||
@@ -1574,4 +1690,14 @@ def name_extractor(subject):
|
||||
name = name.strip(' "')
|
||||
if name and RE_NORMAL_NAME.search(name):
|
||||
result = name
|
||||
return result
|
||||
return platform_encode(result)
|
||||
|
||||
|
||||
def matcher(pattern, txt):
|
||||
""" Return True if `pattern` is sufficiently equal to `txt`
|
||||
"""
|
||||
if txt.endswith(pattern):
|
||||
txt = txt[:txt.rfind(pattern)].strip()
|
||||
return (not txt) or txt.endswith('"')
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -54,7 +54,7 @@ from sabnzbd.newzbin import Bookmarks
|
||||
from sabnzbd.database import get_history_handle
|
||||
from sabnzbd.encoding import unicoder
|
||||
|
||||
status_icons = {'idle':'../Resources/sab_idle.png','pause':'../Resources/sab_pause.png','clicked':'../Resources/sab_clicked.png'}
|
||||
status_icons = {'idle':'../Resources/sab_idle.tiff','pause':'../Resources/sab_pause.tiff','clicked':'../Resources/sab_clicked.tiff'}
|
||||
start_time = NSDate.date()
|
||||
debug = 0
|
||||
|
||||
@@ -347,10 +347,7 @@ class SABnzbdDelegate(NSObject):
|
||||
self.menu_queue.addItem_(menu_queue_item)
|
||||
self.menu_queue.addItem_(NSMenuItem.separatorItem())
|
||||
|
||||
job_nb = 1
|
||||
for pnfo in pnfo_list:
|
||||
if job_nb > 10:
|
||||
break
|
||||
filename = unicoder(pnfo[PNFO_FILENAME_FIELD])
|
||||
msgid = pnfo[PNFO_MSGID_FIELD]
|
||||
bytesleft = pnfo[PNFO_BYTES_LEFT_FIELD] / MEBI
|
||||
@@ -360,11 +357,10 @@ class SABnzbdDelegate(NSObject):
|
||||
timeleft = self.calc_timeleft(bytesleftprogess, bpsnow)
|
||||
|
||||
job = "%s\t(%d/%d MB) %s" % (filename, bytesleft, bytes, timeleft)
|
||||
job_nb += 1
|
||||
menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, '', '')
|
||||
self.menu_queue.addItem_(menu_queue_item)
|
||||
|
||||
self.info = "%d nzb(s)\t( %d / %d MB )" % (len(pnfo_list),(qnfo[QNFO_BYTES_LEFT_FIELD] / MEBI), (qnfo[QNFO_BYTES_FIELD] / MEBI))
|
||||
self.info = "%d nzb(s)\t( %d / %d MB )" % (qnfo[QNFO_Q_SIZE_LIST_FIELD],(qnfo[QNFO_BYTES_LEFT_FIELD] / MEBI), (qnfo[QNFO_BYTES_FIELD] / MEBI))
|
||||
|
||||
else:
|
||||
menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '')
|
||||
|
||||
@@ -31,9 +31,9 @@ import re
|
||||
from sabnzbd.newsunpack import unpack_magic, par2_repair, external_processing, sfv_check
|
||||
from threading import Thread
|
||||
from sabnzbd.misc import real_path, get_unique_path, create_dirs, move_to_path, \
|
||||
get_unique_filename, make_script_path, flag_file, \
|
||||
make_script_path, \
|
||||
on_cleanup_list, renamer, remove_dir, remove_all, globber, \
|
||||
set_permissions
|
||||
set_permissions, cleanup_empty_directories
|
||||
from sabnzbd.tvsort import Sorter
|
||||
from sabnzbd.constants import REPAIR_PRIORITY, TOP_PRIORITY, POSTPROC_QUEUE_FILE_NAME, \
|
||||
POSTPROC_QUEUE_VERSION, sample_match, JOB_ADMIN, Status, VERIFIED_FILE
|
||||
@@ -206,6 +206,8 @@ def process_job(nzo):
|
||||
par_error = False
|
||||
# keep track of any unpacking errors
|
||||
unpack_error = False
|
||||
# Signal empty download, for when 'empty_postproc' is enabled
|
||||
empty = False
|
||||
nzb_list = []
|
||||
# These need to be initialised incase of a crash
|
||||
workdir_complete = ''
|
||||
@@ -232,6 +234,12 @@ def process_job(nzo):
|
||||
nzo.save_attribs()
|
||||
all_ok = False
|
||||
|
||||
if nzo.fail_msg: # Special case: aborted due to too many missing data
|
||||
nzo.status = Status.FAILED
|
||||
nzo.save_attribs()
|
||||
all_ok = False
|
||||
par_error = unpack_error = True
|
||||
|
||||
try:
|
||||
|
||||
# Get the folder containing the download result
|
||||
@@ -239,7 +247,7 @@ def process_job(nzo):
|
||||
tmp_workdir_complete = None
|
||||
|
||||
# if no files are present (except __admin__), fail the job
|
||||
if len(globber(workdir)) < 2:
|
||||
if all_ok and len(globber(workdir)) < 2:
|
||||
if nzo.precheck:
|
||||
enough, ratio = nzo.check_quality()
|
||||
req_ratio = float(cfg.req_completion_rate()) / 100.0
|
||||
@@ -252,13 +260,15 @@ def process_job(nzo):
|
||||
emsg = T('Download might fail, only %s of required %s available') % (emsg, emsg2)
|
||||
else:
|
||||
emsg = T('Download failed - Out of your server\'s retention?')
|
||||
empty = True
|
||||
nzo.fail_msg = emsg
|
||||
nzo.set_unpack_info('Fail', emsg)
|
||||
nzo.status = Status.FAILED
|
||||
# do not run unpacking or parity verification
|
||||
flag_repair = flag_unpack = False
|
||||
par_error = unpack_error = True
|
||||
all_ok = False
|
||||
all_ok = cfg.empty_postproc() and empty
|
||||
if not all_ok:
|
||||
par_error = unpack_error = True
|
||||
|
||||
script = nzo.script
|
||||
cat = nzo.cat
|
||||
@@ -268,7 +278,7 @@ def process_job(nzo):
|
||||
filename, flag_repair, flag_unpack, flag_delete, script, cat)
|
||||
|
||||
## Par processing, if enabled
|
||||
if flag_repair:
|
||||
if all_ok and flag_repair:
|
||||
par_error, re_add = parring(nzo, workdir)
|
||||
if re_add:
|
||||
# Try to get more par files
|
||||
@@ -371,10 +381,7 @@ def process_job(nzo):
|
||||
nzb_list = None
|
||||
if nzb_list:
|
||||
nzo.set_unpack_info('Download', T('Sent %s to queue') % unicoder(nzb_list))
|
||||
try:
|
||||
remove_dir(tmp_workdir_complete)
|
||||
except:
|
||||
pass
|
||||
cleanup_empty_directories(tmp_workdir_complete)
|
||||
else:
|
||||
cleanup_list(tmp_workdir_complete, False)
|
||||
|
||||
@@ -392,7 +399,10 @@ def process_job(nzo):
|
||||
logging.error(Ta('Error renaming "%s" to "%s"'), tmp_workdir_complete, workdir_complete)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
|
||||
job_result = int(par_error) + int(unpack_error)*2
|
||||
if empty:
|
||||
job_result = -1
|
||||
else:
|
||||
job_result = int(par_error) + int(unpack_error)*2
|
||||
|
||||
if cfg.ignore_samples() > 0:
|
||||
remove_samples(workdir_complete)
|
||||
@@ -410,7 +420,7 @@ def process_job(nzo):
|
||||
|
||||
## Run the user script
|
||||
script_path = make_script_path(script)
|
||||
if all_ok and (not nzb_list) and script_path:
|
||||
if (all_ok or not cfg.safe_postproc()) and (not nzb_list) and script_path:
|
||||
#set the current nzo status to "Ext Script...". Used in History
|
||||
nzo.status = Status.RUNNING
|
||||
nzo.set_action_line(T('Running script'), unicoder(script))
|
||||
@@ -460,6 +470,9 @@ def process_job(nzo):
|
||||
elif all_ok and isinstance(nzo.url, str):
|
||||
sabnzbd.proxy_rm_bookmark(nzo.url)
|
||||
|
||||
## Force error for empty result
|
||||
all_ok = all_ok and not empty
|
||||
|
||||
## Show final status in history
|
||||
if all_ok:
|
||||
growler.send_notification(T('Download Completed'), filename, 'complete')
|
||||
@@ -533,6 +546,9 @@ def parring(nzo, workdir):
|
||||
growler.send_notification(T('Post-processing'), nzo.final_name, 'pp')
|
||||
logging.info('Par2 check starting on %s', filename)
|
||||
|
||||
## Get verification status of sets
|
||||
verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {}
|
||||
|
||||
## Collect the par files
|
||||
if nzo.partable:
|
||||
par_table = nzo.partable.copy()
|
||||
@@ -544,52 +560,66 @@ def parring(nzo, workdir):
|
||||
par_error = False
|
||||
|
||||
if repair_sets:
|
||||
for setname in repair_sets:
|
||||
if cfg.ignore_samples() > 0 and 'sample' in setname.lower():
|
||||
continue
|
||||
if not verified.get(setname, False):
|
||||
logging.info("Running repair on set %s", setname)
|
||||
parfile_nzf = par_table[setname]
|
||||
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, setname)
|
||||
re_add = re_add or need_re_add
|
||||
if not res and not need_re_add and cfg.sfv_check():
|
||||
res = try_sfv_check(nzo, workdir, setname)
|
||||
verified[setname] = res
|
||||
par_error = par_error or not res
|
||||
else:
|
||||
logging.info("No par2 sets for %s", filename)
|
||||
nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename))
|
||||
if cfg.sfv_check():
|
||||
par_error = not try_sfv_check(nzo, workdir, '')
|
||||
verified[''] = not par_error
|
||||
|
||||
for set_ in repair_sets:
|
||||
logging.info("Running repair on set %s", set_)
|
||||
parfile_nzf = par_table[set_]
|
||||
need_re_add, res = par2_repair(parfile_nzf, nzo, workdir, set_)
|
||||
if need_re_add:
|
||||
re_add = True
|
||||
par_error = par_error or not res
|
||||
if re_add:
|
||||
logging.info('Readded %s to queue', filename)
|
||||
if nzo.priority != TOP_PRIORITY:
|
||||
nzo.priority = REPAIR_PRIORITY
|
||||
sabnzbd.nzbqueue.add_nzo(nzo)
|
||||
sabnzbd.downloader.Downloader.do.resume_from_postproc()
|
||||
|
||||
if re_add:
|
||||
logging.info('Readded %s to queue', filename)
|
||||
if nzo.priority != TOP_PRIORITY:
|
||||
nzo.priority = REPAIR_PRIORITY
|
||||
sabnzbd.nzbqueue.add_nzo(nzo)
|
||||
sabnzbd.downloader.Downloader.do.resume_from_postproc()
|
||||
sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath)
|
||||
|
||||
logging.info('Par2 check finished on %s', filename)
|
||||
|
||||
if (par_error and not re_add) or not repair_sets:
|
||||
# See if alternative SFV check is possible
|
||||
if cfg.sfv_check() and not (flag_file(workdir, VERIFIED_FILE) and not repair_sets):
|
||||
sfvs = globber(workdir, '*.sfv')
|
||||
else:
|
||||
sfvs = None
|
||||
if sfvs:
|
||||
par_error = False
|
||||
nzo.set_unpack_info('Repair', T('Trying SFV verification'))
|
||||
for sfv in sfvs:
|
||||
failed = sfv_check(sfv)
|
||||
if failed:
|
||||
msg = T('Some files failed to verify against "%s"') % unicoder(os.path.basename(sfv))
|
||||
msg += '; '
|
||||
msg += '; '.join(failed)
|
||||
nzo.set_unpack_info('Repair', msg)
|
||||
par_error = True
|
||||
if not par_error:
|
||||
nzo.set_unpack_info('Repair', T('Verified successfully using SFV files'))
|
||||
elif not repair_sets:
|
||||
logging.info("No par2 sets for %s", filename)
|
||||
nzo.set_unpack_info('Repair', T('[%s] No par2 sets') % unicoder(filename))
|
||||
|
||||
if not par_error:
|
||||
flag_file(workdir, VERIFIED_FILE, create=True)
|
||||
logging.info('Par2 check finished on %s', filename)
|
||||
return par_error, re_add
|
||||
|
||||
|
||||
def try_sfv_check(nzo, workdir, setname):
|
||||
""" Attempt to verify set using SFV file
|
||||
Return True if verified, False when failed
|
||||
When setname is '', all SFV files will be used, otherwise only the matching one
|
||||
When setname is '' and no SFV files are found, True is returned
|
||||
"""
|
||||
# Get list of SFV names; shortest name first, minimizes the chance on a mismatch
|
||||
sfvs = globber(workdir, '*.sfv')
|
||||
sfvs.sort(lambda x, y: len(x) - len(y))
|
||||
par_error = False
|
||||
found = False
|
||||
for sfv in sfvs:
|
||||
if setname in os.path.basename(sfv):
|
||||
found = True
|
||||
nzo.set_unpack_info('Repair', T('Trying SFV verification'))
|
||||
failed = sfv_check(sfv)
|
||||
if failed:
|
||||
msg = T('Some files failed to verify against "%s"') % unicoder(os.path.basename(sfv))
|
||||
msg += '; '
|
||||
msg += '; '.join(failed)
|
||||
nzo.set_unpack_info('Repair', msg)
|
||||
par_error = True
|
||||
else:
|
||||
nzo.set_unpack_info('Repair', T('Verified successfully using SFV files'))
|
||||
if setname:
|
||||
break
|
||||
return (found or not setname) and not par_error
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
@@ -642,6 +672,11 @@ def cleanup_list(wdir, skip_nzb):
|
||||
except:
|
||||
logging.error(Ta('Removing %s failed'), path)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
if files:
|
||||
try:
|
||||
remove_dir(wdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def prefix(path, pre):
|
||||
@@ -657,29 +692,24 @@ def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
|
||||
if so send to queue and remove if on CleanList
|
||||
Returns list of processed NZB's
|
||||
"""
|
||||
lst = []
|
||||
|
||||
try:
|
||||
files = os.listdir(wdir)
|
||||
except:
|
||||
files = []
|
||||
files = []
|
||||
for root, dirs, names in os.walk(wdir):
|
||||
for name in names:
|
||||
files.append(os.path.join(root, name))
|
||||
|
||||
for file_ in files:
|
||||
if os.path.splitext(file_)[1].lower() != '.nzb':
|
||||
return lst
|
||||
return None
|
||||
|
||||
# For a single NZB, use the current job name
|
||||
# For multiple NZBs, cannot use the current job name
|
||||
if len(files) != 1:
|
||||
nzbname = None
|
||||
|
||||
# Process all NZB files
|
||||
for file_ in files:
|
||||
if file_.lower().endswith('.nzb'):
|
||||
dirscanner.ProcessSingleFile(file_, os.path.join(wdir, file_), pp, script, cat,
|
||||
priority=priority, keep=False, dup_check=False, nzbname=nzbname)
|
||||
lst.append(file_)
|
||||
|
||||
return lst
|
||||
dirscanner.ProcessSingleFile(os.path.split(file_)[1], file_, pp, script, cat,
|
||||
priority=priority, keep=False, dup_check=False, nzbname=nzbname)
|
||||
return files
|
||||
|
||||
|
||||
def one_file_or_folder(folder):
|
||||
|
||||
@@ -303,13 +303,7 @@ class RSSQueue(object):
|
||||
regcount = len(regexes)
|
||||
|
||||
# Set first if this is the very first scan of this URI
|
||||
if feed not in self.jobs:
|
||||
self.jobs[feed] = {}
|
||||
first = not bool(self.jobs[feed])
|
||||
|
||||
jobs = self.jobs[feed]
|
||||
|
||||
first = first and ignoreFirst
|
||||
first = (feed not in self.jobs) and ignoreFirst
|
||||
|
||||
# Add sabnzbd's custom User Agent
|
||||
feedparser.USER_AGENT = 'SABnzbd+/%s' % sabnzbd.version.__version__
|
||||
@@ -319,6 +313,8 @@ class RSSQueue(object):
|
||||
uri += '&dl=1'
|
||||
|
||||
# Read the RSS feed
|
||||
msg = None
|
||||
entries = None
|
||||
if readout:
|
||||
uri = uri.replace(' ', '%20')
|
||||
logging.debug("Running feedparser on %s", uri)
|
||||
@@ -343,9 +339,18 @@ class RSSQueue(object):
|
||||
if not entries:
|
||||
msg = Ta('RSS Feed %s was empty') % uri
|
||||
logging.info(msg)
|
||||
|
||||
if feed not in self.jobs:
|
||||
self.jobs[feed] = {}
|
||||
jobs = self.jobs[feed]
|
||||
if readout:
|
||||
if not entries:
|
||||
return unicoder(msg)
|
||||
else:
|
||||
entries = jobs.keys()
|
||||
# Sort in the order the jobs came from the feed
|
||||
entries.sort(lambda x, y: jobs[x].get('order', 0) - jobs[y].get('order', 0))
|
||||
|
||||
|
||||
order = 0
|
||||
# Filter out valid new links
|
||||
@@ -487,8 +492,7 @@ class RSSQueue(object):
|
||||
for feed in feeds.keys():
|
||||
try:
|
||||
if feeds[feed].enable.get():
|
||||
if not active:
|
||||
logging.info('Starting scheduled RSS read-out')
|
||||
logging.info('Starting scheduled RSS read-out for "%s"', feed)
|
||||
active = True
|
||||
self.run_feed(feed, download=True, ignoreFirst=True)
|
||||
# Wait 15 seconds, else sites may get irritated
|
||||
@@ -502,7 +506,7 @@ class RSSQueue(object):
|
||||
pass
|
||||
if active:
|
||||
self.save()
|
||||
logging.info('Finished scheduled RSS read-out')
|
||||
logging.info('Finished scheduled RSS read-outs')
|
||||
|
||||
|
||||
@synchronized(LOCK)
|
||||
@@ -556,7 +560,7 @@ class RSSQueue(object):
|
||||
self.jobs[feed][item]['status'] = 'D-'
|
||||
|
||||
|
||||
RE_NEWZBIN = re.compile(r'(newz)(bin|xxx|bin2)\.[\w]+/browse/post/(\d+)', re.I)
|
||||
RE_NEWZBIN = re.compile(r'(newz)(bin|xxx|bin2|xxx2)\.[\w]+/browse/post/(\d+)', re.I)
|
||||
|
||||
def _HandleLink(jobs, link, title, flag, orgcat, cat, pp, script, download, star, order,
|
||||
priority=NORMAL_PRIORITY, rule=0):
|
||||
@@ -617,7 +621,7 @@ def _get_link(uri, entry):
|
||||
link = None
|
||||
category = ''
|
||||
uri = uri.lower()
|
||||
if 'newzbin.' in uri or 'newzxxx.'in uri or 'newzbin2.' in uri:
|
||||
if 'newzbin.' in uri or 'newzxxx.' in uri or 'newzbin2.' in uri or 'newzxxx2.' in uri:
|
||||
link = entry.link
|
||||
if not (link and '/post/' in link.lower()):
|
||||
# Use alternative link
|
||||
|
||||
@@ -94,7 +94,10 @@ class SABTrayThread(SysTrayIconThread):
|
||||
|
||||
# menu handler
|
||||
def opencomplete(self, icon):
|
||||
os.startfile(cfg.complete_dir.get_path())
|
||||
try:
|
||||
os.startfile(cfg.complete_dir.get_path())
|
||||
except WindowsError:
|
||||
pass
|
||||
|
||||
# menu handler
|
||||
def browse(self, icon):
|
||||
|
||||
@@ -223,43 +223,47 @@ def abort():
|
||||
__SCHED.running = False
|
||||
|
||||
|
||||
def sort_schedules(forward):
|
||||
def sort_schedules(all_events, now=None):
|
||||
""" Sort the schedules, based on order of happening from now
|
||||
forward: assume expired daily event to occur tomorrow
|
||||
`all_events=True`: Return an event for each active day
|
||||
`all_events=False`: Return only first occurring event of the week
|
||||
`now` : for testing: simulated localtime()
|
||||
"""
|
||||
|
||||
day_min = 24 * 60
|
||||
week_min = 7 * day_min
|
||||
events = []
|
||||
now = time.localtime()
|
||||
now_hm = int(now[3])*60 + int(now[4])
|
||||
now = int(now[6])*24*60 + now_hm
|
||||
|
||||
now = now or time.localtime()
|
||||
now_hm = now[3] * 60 + now[4]
|
||||
now = now[6] * day_min + now_hm
|
||||
|
||||
for schedule in cfg.schedules():
|
||||
parms = None
|
||||
try:
|
||||
m, h, d, action, parms = schedule.split(None, 4)
|
||||
m, h, dd, action, parms = schedule.split(None, 4)
|
||||
except:
|
||||
try:
|
||||
m, h, d, action = schedule.split(None, 3)
|
||||
m, h, dd, action = schedule.split(None, 3)
|
||||
except:
|
||||
continue # Bad schedule, ignore
|
||||
action = action.strip()
|
||||
try:
|
||||
then = int(h)*60 + int(m)
|
||||
if d == '*':
|
||||
d = int(now/(24*60))
|
||||
if forward and (then < now_hm): d = (d + 1) % 7
|
||||
else:
|
||||
d = int(d)-1
|
||||
then = d*24*60 + then
|
||||
except:
|
||||
if dd == '*':
|
||||
dd = '1234567'
|
||||
if not dd.isdigit():
|
||||
continue # Bad schedule, ignore
|
||||
for d in dd:
|
||||
then = (int(d) - 1) * day_min + int(h) * 60 + int(m)
|
||||
dif = then - now
|
||||
if all_events and dif < 0:
|
||||
# Expired event will occur again after a week
|
||||
dif = dif + week_min
|
||||
|
||||
dif = then - now
|
||||
if dif < 0: dif = dif + 7*24*60
|
||||
events.append((dif, action, parms, schedule))
|
||||
if not all_events:
|
||||
break
|
||||
|
||||
events.append((dif, action, parms, schedule))
|
||||
|
||||
events.sort(lambda x, y: x[0]-y[0])
|
||||
events.sort(lambda x, y: x[0] - y[0])
|
||||
return events
|
||||
|
||||
|
||||
@@ -272,7 +276,7 @@ def analyse(was_paused=False):
|
||||
speedlimit = None
|
||||
servers = {}
|
||||
|
||||
for ev in sort_schedules(forward=False):
|
||||
for ev in sort_schedules(all_events=True):
|
||||
logging.debug('Schedule check result = %s', ev)
|
||||
action = ev[1]
|
||||
try:
|
||||
|
||||
@@ -380,13 +380,14 @@ SKIN_TEXT = {
|
||||
'explain-top_only' : TT('Enable for less memory usage. Disable to prevent slow jobs from blocking the queue.'),
|
||||
'opt-safe_postproc' : TT('Post-Process Only Verified Jobs'),
|
||||
'explain-safe_postproc' : TT('Only perform post-processing on jobs that passed all PAR2 checks.'),
|
||||
'opt-pause_on_pwrar' : TT('Pause job when encrypted RAR is downloaded'),
|
||||
'explain-pause_on_pwrar' : TT('You\'ll need to set a password and resume the job.'),
|
||||
'opt-pause_on_pwrar' : TT('Action when encrypted RAR is downloaded'),
|
||||
'explain-pause_on_pwrar' : TT('In case of "Pause", you\'ll need to set a password and resume the job.'),
|
||||
'opt-no_dupes' : TT('Detect Duplicate Downloads'),
|
||||
'explain-no_dupes' : TT('Detect identically named NZB files (requires NZB backup option) and duplicate titles across RSS feeds.'),
|
||||
'nodupes-off' : TT('Off'), #: Three way switch for duplicates
|
||||
'nodupes-ignore' : TT('Discard'), #: Three way switch for duplicates
|
||||
'nodupes-pause' : TT('Pause'), #: Three way switch for duplicates
|
||||
'abort' : TT('Abort'), #: Three way switch for encrypted posts
|
||||
'opt-sfv_check' : TT('Enable SFV-based checks'),
|
||||
'explain-sfv_check' : TT('Do an extra verification based on SFV files.'),
|
||||
'opt-unpack_check' : TT('Check result of unpacking'),
|
||||
@@ -458,6 +459,8 @@ SKIN_TEXT = {
|
||||
'explain-max_art_tries' : TT('Maximum number of retries per server'),
|
||||
'opt-max_art_opt' : TT('Only for optional servers'),
|
||||
'explain-max_art_opt' : TT('Apply maximum retries only to optional servers'),
|
||||
'opt-fail_hopeless' : TT('Abort jobs that cannot be completed'),
|
||||
'explain-fail_hopeless' : TT('When during download it becomes clear that too much data is missing, abort the job'),
|
||||
|
||||
|
||||
# Config->Server
|
||||
|
||||
@@ -425,11 +425,10 @@ class SeriesSorter(object):
|
||||
file, filepath, size = largest
|
||||
# >20MB
|
||||
if filepath and size > 20971520:
|
||||
tmp, self.ext = os.path.splitext(file)
|
||||
self.fname = tmp
|
||||
self.fname, self.ext = os.path.splitext(os.path.split(file)[1])
|
||||
newname = "%s%s" % (self.filename_set, self.ext)
|
||||
# Replace %fn with the original filename
|
||||
newname = newname.replace('%fn', tmp)
|
||||
newname = newname.replace('%fn', self.fname)
|
||||
newpath = os.path.join(current_path, newname)
|
||||
# Replace %ext with extension
|
||||
newpath = newpath.replace('%ext', self.ext)
|
||||
@@ -440,7 +439,7 @@ class SeriesSorter(object):
|
||||
except:
|
||||
logging.error("Failed to rename: %s to %s", current_path, newpath)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
rename_similar(current_path, self.ext, self.filename_set)
|
||||
rename_similar(current_path, self.ext, self.filename_set, ())
|
||||
else:
|
||||
logging.debug('Current path already exists, skipping rename, %s', newpath)
|
||||
else:
|
||||
@@ -625,6 +624,9 @@ class GenericSorter(object):
|
||||
mapping.append(('%decade', self.movie_info['decade']))
|
||||
mapping.append(('%0decade', self.movie_info['decade_two']))
|
||||
|
||||
# Original dir name
|
||||
mapping.append(('%dn', self.original_dirname))
|
||||
|
||||
path = path_subst(sorter, mapping)
|
||||
|
||||
for key, name in REPLACE_AFTER.iteritems():
|
||||
@@ -676,10 +678,9 @@ class GenericSorter(object):
|
||||
else:
|
||||
filepath = os.path.join(current_path, file)
|
||||
if os.path.exists(filepath):
|
||||
tmp, ext = os.path.splitext(file)
|
||||
self.fname = tmp
|
||||
self.fname, ext = os.path.splitext(os.path.split(file)[1])
|
||||
newname = "%s%s" % (self.filename_set, ext)
|
||||
newname = newname.replace('%fn', tmp)
|
||||
newname = newname.replace('%fn', self.fname)
|
||||
newpath = os.path.join(current_path, newname)
|
||||
try:
|
||||
logging.debug("Rename: %s to %s", filepath, newpath)
|
||||
@@ -687,7 +688,7 @@ class GenericSorter(object):
|
||||
except:
|
||||
logging.error(Ta('Failed to rename: %s to %s'), filepath, newpath)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
rename_similar(current_path, ext, self.filename_set)
|
||||
rename_similar(current_path, ext, self.filename_set, ())
|
||||
|
||||
## Sequence File Handling
|
||||
# if there is more than one extracted file check for CD1/1/A in the title
|
||||
@@ -696,12 +697,13 @@ class GenericSorter(object):
|
||||
# rename files marked as in a set
|
||||
if matched_files:
|
||||
logging.debug("Renaming a series of generic files (%s)", matched_files)
|
||||
renamed = matched_files.values()
|
||||
for index, file in matched_files.iteritems():
|
||||
filepath = os.path.join(current_path, file)
|
||||
tmp, ext = os.path.splitext(file)
|
||||
self.fname = tmp
|
||||
renamed.append(filepath)
|
||||
self.fname, ext = os.path.splitext(os.path.split(file)[1])
|
||||
name = '%s%s' % (self.filename_set, self.extra)
|
||||
name = name.replace('%1', str(index)).replace('%fn', tmp)
|
||||
name = name.replace('%1', str(index)).replace('%fn', self.fname)
|
||||
name = name + ext
|
||||
newpath = os.path.join(current_path, name)
|
||||
try:
|
||||
@@ -710,7 +712,7 @@ class GenericSorter(object):
|
||||
except:
|
||||
logging.error(Ta('Failed to rename: %s to %s'), filepath, newpath)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
rename_similar(current_path, ext, self.filename_set)
|
||||
rename_similar(current_path, ext, self.filename_set, renamed)
|
||||
else:
|
||||
logging.debug("Movie files not in sequence %s", _files)
|
||||
|
||||
@@ -886,10 +888,9 @@ class DateSorter(object):
|
||||
size = os.stat(filepath).st_size
|
||||
if size > cfg.movie_rename_limit.get_int():
|
||||
if 'sample' not in file:
|
||||
tmp, ext = os.path.splitext(file)
|
||||
self.fname = tmp
|
||||
self.fname, ext = os.path.splitext(os.path.split(file)[1])
|
||||
newname = "%s%s" % (self.filename_set, ext)
|
||||
newname = newname.replace('%fn', tmp)
|
||||
newname = newname.replace('%fn', self.fname)
|
||||
newpath = os.path.join(current_path, newname)
|
||||
if not os.path.exists(newpath):
|
||||
try:
|
||||
@@ -898,7 +899,7 @@ class DateSorter(object):
|
||||
except:
|
||||
logging.error(Ta('Failed to rename: %s to %s'), current_path, newpath)
|
||||
logging.info("Traceback: ", exc_info = True)
|
||||
rename_similar(current_path, ext, self.filename_set)
|
||||
rename_similar(current_path, ext, self.filename_set, ())
|
||||
break
|
||||
|
||||
|
||||
@@ -1091,10 +1092,11 @@ def strip_folders(path):
|
||||
return os.path.normpath('/'.join([strip_all(x) for x in f]))
|
||||
|
||||
|
||||
def rename_similar(folder, skip_ext, name):
|
||||
def rename_similar(folder, skip_ext, name, skipped_files):
|
||||
""" Rename all other files in the 'folder' hierarchy after 'name'
|
||||
and move them to the root of 'folder'.
|
||||
Files having extension 'skip_ext' will be moved, but not renamed.
|
||||
Don't touch files in list `skipped_files`
|
||||
"""
|
||||
logging.debug('Give files in set "%s" matching names.', name)
|
||||
folder = os.path.normpath(folder)
|
||||
@@ -1103,6 +1105,8 @@ def rename_similar(folder, skip_ext, name):
|
||||
for root, dirs, files in os.walk(folder):
|
||||
for f in files:
|
||||
path = os.path.join(root, f)
|
||||
if path in skipped_files:
|
||||
continue
|
||||
org, ext = os.path.splitext(f)
|
||||
if ext.lower() == skip_ext:
|
||||
# Move file, but do not rename
|
||||
|
||||
@@ -166,7 +166,7 @@ class URLGrabber(Thread):
|
||||
logging.error(msg)
|
||||
misc.bad_fetch(future_nzo, clean_matrix_url(url), msg, retry=True)
|
||||
continue
|
||||
category = _MATRIX_MAP.get(category, category)
|
||||
category = get_matrix_category(url, category)
|
||||
|
||||
if del_bookmark:
|
||||
# No retries of nzbmatrix bookmark removals
|
||||
@@ -207,6 +207,10 @@ class URLGrabber(Thread):
|
||||
if res == -2:
|
||||
logging.info('Incomplete NZB, retry after 5 min %s', url)
|
||||
when = 300
|
||||
elif res == -1:
|
||||
# Error, but no reason to retry. Warning is already given
|
||||
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False)
|
||||
continue
|
||||
else:
|
||||
logging.info('Unknown error fetching NZB, retry after 2 min %s', url)
|
||||
when = 120
|
||||
@@ -398,13 +402,13 @@ _MATRIX_MAP = {
|
||||
'13' : 'games.xbox',
|
||||
'14' : 'games.xbox360',
|
||||
'56' : 'games.xbox360 (other)',
|
||||
'54' : 'movies.brrip',
|
||||
'2' : 'movies.divx/xvid',
|
||||
'1' : 'movies.dvd',
|
||||
'50' : 'movies.hd (image)',
|
||||
'1' : 'movies.sd (image)',
|
||||
'2' : 'movies.sd',
|
||||
'54' : 'movies.hd (remux)',
|
||||
'42' : 'movies.hd (x264)',
|
||||
'50' : 'movies.hd (image)',
|
||||
'4' : 'movies.other',
|
||||
'24' : 'music.dvd',
|
||||
'24' : 'music.sd (image)',
|
||||
'23' : 'music.lossless',
|
||||
'22' : 'music.mp3, albums',
|
||||
'47' : 'music.mp3, singles',
|
||||
@@ -418,7 +422,7 @@ _MATRIX_MAP = {
|
||||
'38' : 'other.iOS/iPhone',
|
||||
'40' : 'other.other',
|
||||
'26' : 'other.radio',
|
||||
'5' : 'tv.dvd (image)',
|
||||
'5' : 'tv.sd (image)',
|
||||
'57' : 'tv.hd (image)',
|
||||
'41' : 'tv.hd (x264)',
|
||||
'8' : 'tv.other',
|
||||
@@ -426,3 +430,9 @@ _MATRIX_MAP = {
|
||||
'7' : 'tv.sport/ent'
|
||||
}
|
||||
|
||||
def get_matrix_category(url, category):
|
||||
category = _MATRIX_MAP.get(category, category)
|
||||
if category and 'nzbxxx.com' in url:
|
||||
return 'XXX: ' + category
|
||||
else:
|
||||
return category
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -30,6 +30,12 @@ else:
|
||||
|
||||
import sabnzbd
|
||||
|
||||
_JUNKFOLDERS = (
|
||||
'boot', 'bootmgr', 'cache', 'msocache', 'recovery', '$recycle.bin', 'recycler',
|
||||
'system volume information', 'temporary internet files', # windows specific
|
||||
'.fseventd', '.spotlight', '.trashes', '.vol', 'cachedmessages', 'caches', 'trash' # osx specific
|
||||
)
|
||||
|
||||
# this is for the drive letter code, it only works on windows
|
||||
if os.name == 'nt':
|
||||
from ctypes import windll
|
||||
@@ -91,6 +97,7 @@ def folders_at_path(path, include_parent = False):
|
||||
if doit:
|
||||
file_list.append({ 'name': unicoder(filename), 'path': unicoder(fpath) })
|
||||
file_list = filter(lambda entry: os.path.isdir(entry['path']), file_list)
|
||||
file_list = filter(lambda entry: entry['name'].lower() not in _JUNKFOLDERS, file_list)
|
||||
file_list = sorted(file_list, lambda x, y: cmp(os.path.basename(x['name']).lower(), os.path.basename(y['path']).lower()))
|
||||
except:
|
||||
# No access, ignore
|
||||
|
||||
@@ -41,7 +41,7 @@ class Wizard(object):
|
||||
self.__web_dir = sabnzbd.WIZARD_DIR
|
||||
self.__prim = prim
|
||||
self.info = {'webdir': sabnzbd.WIZARD_DIR,
|
||||
'steps':4, 'version':sabnzbd.__version__,
|
||||
'steps':3, 'version':sabnzbd.__version__,
|
||||
'T': T}
|
||||
|
||||
@cherrypy.expose
|
||||
@@ -162,46 +162,19 @@ class Wizard(object):
|
||||
if not cfg.username() or not cfg.password():
|
||||
sabnzbd.interface.set_auth(cherrypy.config)
|
||||
|
||||
# Create Indexers page
|
||||
info = self.info.copy()
|
||||
info['num'] = '» %s' % T('Step Three')
|
||||
info['number'] = 3
|
||||
info['newzbin_user'] = cfg.newzbin_username()
|
||||
info['newzbin_pass'] = cfg.newzbin_password.get_stars()
|
||||
info['newzbin_bookmarks'] = cfg.newzbin_bookmarks()
|
||||
info['newzbin_url'] = cfg.newzbin_url()
|
||||
info['matrix_user'] = cfg.matrix_username()
|
||||
info['matrix_apikey'] = cfg.matrix_apikey()
|
||||
info['T'] = Ttemplate
|
||||
template = Template(file=os.path.join(self.__web_dir, 'three.html'),
|
||||
searchList=[info], compilerSettings=sabnzbd.interface.DIRECTIVES)
|
||||
return template.respond()
|
||||
|
||||
@cherrypy.expose
|
||||
def four(self, **kwargs):
|
||||
""" Accept Indexers and show Restart screen """
|
||||
if kwargs:
|
||||
if 'newzbin_user' in kwargs and 'newzbin_pass' in kwargs:
|
||||
cfg.newzbin_username.set(kwargs.get('newzbin_user',''))
|
||||
cfg.newzbin_password.set(kwargs.get('newzbin_pass',''))
|
||||
cfg.newzbin_bookmarks.set(kwargs.get('newzbin_bookmarks', '0'))
|
||||
if 'matrix_user' in kwargs and 'matrix_apikey' in kwargs:
|
||||
cfg.matrix_username.set(kwargs.get('matrix_user',''))
|
||||
cfg.matrix_apikey.set(kwargs.get('matrix_apikey',''))
|
||||
|
||||
config.save_config()
|
||||
|
||||
# Show Restart screen
|
||||
info = self.info.copy()
|
||||
info['num'] = '» %s' % T('Step Four')
|
||||
info['number'] = 4
|
||||
info['num'] = '» %s' % T('Step Three')
|
||||
info['number'] = 3
|
||||
info['helpuri'] = 'http://wiki.sabnzbd.org/'
|
||||
info['session'] = cfg.api_key()
|
||||
|
||||
info['access_url'], info['urls'] = self.get_access_info()
|
||||
info['T'] = Ttemplate
|
||||
|
||||
template = Template(file=os.path.join(self.__web_dir, 'four.html'),
|
||||
template = Template(file=os.path.join(self.__web_dir, 'three.html'),
|
||||
searchList=[info], compilerSettings=sabnzbd.interface.DIRECTIVES)
|
||||
return template.respond()
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user