mirror of
https://github.com/sabnzbd/sabnzbd.git
synced 2025-12-24 08:08:37 -05:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e06c1d61fb | ||
|
|
600c5209c6 | ||
|
|
bee90366ee | ||
|
|
e9bc4e9417 | ||
|
|
f01ff15761 | ||
|
|
356ada159d |
4
PKG-INFO
4
PKG-INFO
@@ -1,7 +1,7 @@
|
||||
Metadata-Version: 1.0
|
||||
Name: SABnzbd
|
||||
Version: 3.5.0RC1
|
||||
Summary: SABnzbd-3.5.0RC1
|
||||
Version: 3.5.0RC4
|
||||
Summary: SABnzbd-3.5.0RC4
|
||||
Home-page: https://sabnzbd.org
|
||||
Author: The SABnzbd Team
|
||||
Author-email: team@sabnzbd.org
|
||||
|
||||
14
README.mkd
14
README.mkd
@@ -1,6 +1,18 @@
|
||||
Release Notes - SABnzbd 3.5.0 Release Candidate 1
|
||||
Release Notes - SABnzbd 3.5.0 Release Candidate 3
|
||||
=========================================================
|
||||
|
||||
## Changes and bugfixes since 3.5.0 Release Candidate 3
|
||||
- RSS-feed URLs were corrupted.
|
||||
|
||||
## Changes and bugfixes since 3.5.0 Release Candidate 2
|
||||
- Failed 7zip unpack was not reported.
|
||||
- Multivolume 7zip's were not always unpacked.
|
||||
|
||||
## Changes and bugfixes since 3.5.0 Release Candidate 1
|
||||
- `Defobfuscate final filenames` is skipped for DVD's and Blu-ray's.
|
||||
- HTML characters in configuration fields were shown incorrectly.
|
||||
- Global interface settings would not always be applied correctly.
|
||||
|
||||
## Changes since 3.4.2
|
||||
- Removed Python 3.6 support.
|
||||
- SOCKS5 proxy support for all outgoing connections.
|
||||
|
||||
@@ -30,6 +30,7 @@ import hashlib
|
||||
import socket
|
||||
import ssl
|
||||
import functools
|
||||
import copy
|
||||
from random import randint
|
||||
from xml.sax.saxutils import escape
|
||||
from Cheetah.Template import Template
|
||||
@@ -367,8 +368,11 @@ def check_apikey(kwargs):
|
||||
|
||||
def template_filtered_response(file: str, search_list: Dict[str, Any]):
|
||||
"""Wrapper for Cheetah response"""
|
||||
recursive_html_escape(search_list, exclude_items=("webdir",))
|
||||
return Template(file=file, searchList=[search_list], compilerSettings=CHEETAH_DIRECTIVES).respond()
|
||||
# We need a copy, because otherwise source-dicts might be modified
|
||||
search_list_copy = copy.deepcopy(search_list)
|
||||
# 'filters' is excluded because the RSS-filters are listed twice
|
||||
recursive_html_escape(search_list_copy, exclude_items=("webdir", "filters"))
|
||||
return Template(file=file, searchList=[search_list_copy], compilerSettings=CHEETAH_DIRECTIVES).respond()
|
||||
|
||||
|
||||
def log_warning_and_ip(txt):
|
||||
@@ -1445,7 +1449,7 @@ class ConfigRss:
|
||||
|
||||
if filt:
|
||||
feed_cfg.filters.update(
|
||||
int(kwargs.get("index", 0)), (cat, pp, script, kwargs.get("filter_type"), filt, prio, enabled)
|
||||
int(kwargs.get("index", 0)), [cat, pp, script, kwargs.get("filter_type"), filt, prio, enabled]
|
||||
)
|
||||
|
||||
# Move filter if requested
|
||||
|
||||
@@ -1080,11 +1080,13 @@ def recursive_html_escape(input_dict_or_list: Union[Dict[str, Any], List], exclu
|
||||
iterator = enumerate(input_dict_or_list)
|
||||
|
||||
for key, value in iterator:
|
||||
# We ignore any other than str and those on the exclude_items-list
|
||||
if isinstance(value, str) and key not in exclude_items:
|
||||
input_dict_or_list[key] = html.escape(value, quote=True)
|
||||
if isinstance(value, (dict, list)):
|
||||
recursive_html_escape(value)
|
||||
# Ignore any keys that are not safe to convert
|
||||
if key not in exclude_items:
|
||||
# We ignore any other than str
|
||||
if isinstance(value, str):
|
||||
input_dict_or_list[key] = html.escape(value, quote=True)
|
||||
if isinstance(value, (dict, list)):
|
||||
recursive_html_escape(value, exclude_items=exclude_items)
|
||||
else:
|
||||
raise ValueError("Expected dict or str, got %s" % type(input_dict_or_list))
|
||||
|
||||
|
||||
@@ -970,6 +970,8 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: Lis
|
||||
logging.info("Starting extract on 7zip set/file: %s ", seven_set)
|
||||
nzo.set_action_line(T("Unpacking"), setname_from_path(seven_set))
|
||||
|
||||
# Sort, so that x.001 is the first one
|
||||
seven_sets[seven_set].sort()
|
||||
seven_path = seven_sets[seven_set][0]
|
||||
|
||||
if workdir_complete and seven_path.startswith(nzo.download_path):
|
||||
@@ -978,7 +980,9 @@ def unseven(nzo: NzbObject, workdir_complete: str, one_folder: bool, sevens: Lis
|
||||
extraction_path = os.path.split(seven_path)[0]
|
||||
|
||||
res, new_files_set = seven_extract(nzo, seven_path, seven_set, extraction_path, one_folder)
|
||||
if not res and nzo.delete:
|
||||
if res:
|
||||
unseven_failed = True
|
||||
elif nzo.delete:
|
||||
for seven in seven_sets[seven_set]:
|
||||
try:
|
||||
remove_file(seven)
|
||||
|
||||
Reference in New Issue
Block a user