Compare commits

...

9 Commits

Author SHA1 Message Date
Safihre
9cbae891d0 WIP on release313 2024-10-16 10:37:53 +02:00
Safihre
fb0ef21768 The tests folder was ignored by default by Renovate 2024-10-15 11:55:26 +02:00
Safihre
277679ef53 Add Python 3.13 to CI tests 2024-10-15 10:10:24 +02:00
Safihre
e7e47bbcb0 Do not compare articles just based on article-ID
Turns out that there are NZBs that contain duplicate article-ID's within 1 file. This causes all "article in nzf.article" comparisons to return the wrong comparison.
2024-10-15 09:44:04 +02:00
Safihre
65ffb5ca81 All resets of try lists should be locked fully 2024-10-15 09:43:55 +02:00
SABnzbd Automation
6cf308e441 Update translatable texts
[skip ci]
2024-10-14 01:47:23 +00:00
renovate[bot]
870fa40c91 Update all dependencies 2024-10-14 01:46:42 +00:00
Safihre
39d9eaec2a Create new history database in case of no such table error 2024-10-08 21:06:59 +02:00
SABnzbd Automation
6fd4d0882c Update translatable texts
[skip ci]
2024-10-08 07:39:00 +00:00
32 changed files with 98 additions and 82 deletions

View File

@@ -15,6 +15,7 @@
"builder/release-requirements.txt"
]
},
"ignorePaths": [],
"ignoreDeps": [
"jaraco.text",
"jaraco.context",

View File

@@ -31,18 +31,18 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
python-architecture: ["x64"]
name: ["Linux"]
os: [ubuntu-latest]
include:
- name: macOS
os: macos-latest
python-version: "3.12"
python-version: "3.13"
python-architecture: "x64"
- name: Windows
os: windows-latest
python-version: "3.12"
python-version: "3.13"
python-architecture: "x64"
- name: Windows (32bit)
os: windows-latest

View File

@@ -4,7 +4,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: team@sabnzbd.org\n"
"Language-Team: SABnzbd <team@sabnzbd.org>\n"

View File

@@ -4,7 +4,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: team@sabnzbd.org\n"
"Language-Team: SABnzbd <team@sabnzbd.org>\n"

View File

@@ -7,7 +7,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Czech (https://app.transifex.com/sabnzbd/teams/111101/cs/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Danish (https://app.transifex.com/sabnzbd/teams/111101/da/)\n"

View File

@@ -8,7 +8,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Spanish (https://app.transifex.com/sabnzbd/teams/111101/es/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Finnish (https://app.transifex.com/sabnzbd/teams/111101/fi/)\n"

View File

@@ -7,7 +7,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: ION, 2024\n"
"Language-Team: Hebrew (https://app.transifex.com/sabnzbd/teams/111101/he/)\n"

View File

@@ -3,7 +3,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Language-Team: Italian (https://app.transifex.com/sabnzbd/teams/111101/it/)\n"
"MIME-Version: 1.0\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Norwegian Bokmål (https://app.transifex.com/sabnzbd/teams/111101/nb/)\n"

View File

@@ -8,7 +8,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2024\n"
"Language-Team: Dutch (https://app.transifex.com/sabnzbd/teams/111101/nl/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Polish (https://app.transifex.com/sabnzbd/teams/111101/pl/)\n"

View File

@@ -7,7 +7,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Portuguese (Brazil) (https://app.transifex.com/sabnzbd/teams/111101/pt_BR/)\n"

View File

@@ -7,7 +7,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Romanian (https://app.transifex.com/sabnzbd/teams/111101/ro/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Russian (https://app.transifex.com/sabnzbd/teams/111101/ru/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Serbian (https://app.transifex.com/sabnzbd/teams/111101/sr/)\n"

View File

@@ -6,7 +6,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Safihre <safihre@sabnzbd.org>, 2023\n"
"Language-Team: Swedish (https://app.transifex.com/sabnzbd/teams/111101/sv/)\n"

View File

@@ -7,7 +7,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: 2020-06-27 15:49+0000\n"
"Last-Translator: Kangwei Li <lkw20010211@gmail.com>, 2023\n"
"Language-Team: Chinese (China) (https://app.transifex.com/sabnzbd/teams/111101/zh_CN/)\n"

View File

@@ -4,7 +4,7 @@
#
msgid ""
msgstr ""
"Project-Id-Version: SABnzbd-4.4.0Alpha1\n"
"Project-Id-Version: SABnzbd-4.4.0Alpha2\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: team@sabnzbd.org\n"
"Language-Team: SABnzbd <team@sabnzbd.org>\n"

View File

@@ -23,6 +23,7 @@ pytz==2024.2
sgmllib3k==1.0.0
portend==3.2.0
chardet==5.2.0
pyunormalize==16.0.0
PySocks==1.7.1
puremagic==1.28
guessit==3.8.0
@@ -38,7 +39,7 @@ cryptography==43.0.1
ujson==5.10.0
# Windows system integration
pywin32==307; sys_platform == 'win32'
pywin32==308; sys_platform == 'win32'
windows-toasts==1.3.0; sys_platform == 'win32' and python_version > '3.8'
winrt-runtime==2.2.0; sys_platform == 'win32' and python_version > '3.8'
winrt-Windows.Data.Xml.Dom==2.2.0; sys_platform == 'win32' and python_version > '3.8'
@@ -61,7 +62,7 @@ markdown==3.7
paho-mqtt==1.6.1 # Pinned, newer versions don't work with AppRise yet
# Requests Requirements
charset_normalizer==3.3.2
charset_normalizer==3.4.0
idna==3.10
urllib3==2.2.3
certifi==2024.8.30

View File

@@ -35,7 +35,7 @@ from sabnzbd.constants import DB_HISTORY_NAME, STAGES, Status, PP_LOOKUP
from sabnzbd.bpsmeter import this_week, this_month
from sabnzbd.decorators import synchronized
from sabnzbd.encoding import ubtou, utob
from sabnzbd.misc import caller_name, opts_to_pp, to_units, bool_conv
from sabnzbd.misc import caller_name, opts_to_pp, to_units, bool_conv, match_str
from sabnzbd.filesystem import remove_file, clip_path
DB_LOCK = threading.Lock()
@@ -129,7 +129,7 @@ class HistoryDB:
logging.error(T("Cannot write to History database, check access rights!"))
# Report back success, because there's no recovery possible
return True
elif "not a database" in error or "malformed" in error or "duplicate column name" in error:
elif match_str(error, ("not a database", "malformed", "no such table", "duplicate column name")):
logging.error(T("Damaged History database, created empty replacement"))
logging.info("Traceback: ", exc_info=True)
self.close()
@@ -141,7 +141,7 @@ class HistoryDB:
self.connect()
# Return False in case of "duplicate column" error
# because the column addition in connect() must be terminated
return "duplicate column name" not in error
return True
else:
logging.error(T("SQL Command Failed, see log"))
logging.info("SQL: %s", command)

View File

@@ -92,7 +92,7 @@ def decode(article: Article, data_view: memoryview):
sabnzbd.Downloader.pause()
# This article should be fetched again
sabnzbd.NzbQueue.reset_try_lists(article)
article.allow_new_fetcher()
return
except BadData as error:

View File

@@ -33,7 +33,7 @@ import os
import re
import sabnzbd
from sabnzbd.filesystem import get_unique_filename, renamer, get_ext, get_basename
from sabnzbd.filesystem import get_unique_filename, renamer, get_ext, get_basename, listdir_normalized
from sabnzbd.par2file import is_parfile, parse_par2_file
import sabnzbd.utils.file_extension as file_extension
from sabnzbd.misc import match_str
@@ -59,7 +59,7 @@ def decode_par2(parfile: str) -> List[str]:
# Parse all files in the folder
dirname = os.path.dirname(parfile)
new_files = [] # list of new files generated
for fn in os.listdir(dirname):
for fn in listdir_normalized(dirname):
filepath = os.path.join(dirname, fn)
# Only check files
if os.path.isfile(filepath):

View File

@@ -202,7 +202,7 @@ class Server:
articles getting stuck in the Server when enabled/disabled"""
logging.debug("Resetting article queue for %s (%s)", self, self.article_queue)
for article in self.article_queue:
sabnzbd.NzbQueue.reset_try_lists(article)
article.allow_new_fetcher()
self.article_queue = []
def request_addrinfo(self):
@@ -968,9 +968,9 @@ class Downloader(Thread):
self.decode(nw.article)
nw.article.tries = 0
else:
# Allow all servers again on this server
# Allow all servers again for this article
# Do not use the article_queue, as the server could already have been disabled when we get here!
sabnzbd.NzbQueue.reset_try_lists(nw.article)
nw.article.allow_new_fetcher()
# Reset connection object
nw.hard_reset(wait)

View File

@@ -20,6 +20,7 @@ sabnzbd.encoding - Unicode/byte translation functions
"""
import locale
import pyunormalize
import chardet
from xml.sax.saxutils import escape
from typing import AnyStr
@@ -27,6 +28,11 @@ from typing import AnyStr
CODEPAGE = locale.getpreferredencoding()
def normalize_utf8(inputstring: str) -> str:
"""Make sure we return an utf8 normalized version"""
return pyunormalize.NFC(inputstring)
def utob(str_in: AnyStr) -> bytes:
"""Shorthand for converting UTF-8 string to bytes"""
if isinstance(str_in, bytes):
@@ -37,22 +43,19 @@ def utob(str_in: AnyStr) -> bytes:
def ubtou(str_in: AnyStr) -> str:
"""Shorthand for converting unicode bytes to UTF-8 string"""
if isinstance(str_in, str):
return str_in
return str_in.decode("utf-8")
return normalize_utf8(str_in)
return normalize_utf8(str_in.decode("utf-8"))
def platform_btou(str_in: AnyStr) -> str:
"""Return Unicode string, if not already Unicode, decode with locale encoding.
NOTE: Used for POpen because universal_newlines/text parameter doesn't
always work! We cannot use encoding-parameter because it's Python 3.7+
"""
"""Return Unicode string, if not already Unicode, decode with locale encoding"""
if isinstance(str_in, bytes):
try:
return ubtou(str_in)
except UnicodeDecodeError:
return str_in.decode(CODEPAGE, errors="replace").replace("?", "!")
return normalize_utf8(str_in.decode(CODEPAGE, errors="replace").replace("?", "!"))
else:
return str_in
return normalize_utf8(str_in)
def correct_unknown_encoding(str_or_bytes_in: AnyStr) -> str:
@@ -71,10 +74,10 @@ def correct_unknown_encoding(str_or_bytes_in: AnyStr) -> str:
except UnicodeDecodeError:
try:
# Try using 8-bit ASCII, if came from Windows
return str_or_bytes_in.decode("ISO-8859-1")
return normalize_utf8(str_or_bytes_in.decode("ISO-8859-1"))
except ValueError:
# Last resort we use the slow chardet package
return str_or_bytes_in.decode(chardet.detect(str_or_bytes_in)["encoding"])
return normalize_utf8(str_or_bytes_in.decode(chardet.detect(str_or_bytes_in)["encoding"]))
def correct_cherrypy_encoding(inputstring: str) -> str:

View File

@@ -46,7 +46,7 @@ except ImportError:
import sabnzbd
from sabnzbd.decorators import synchronized, cache_maintainer
from sabnzbd.constants import FUTURE_Q_FOLDER, JOB_ADMIN, GIGI, DEF_FILE_MAX, IGNORED_FILES_AND_FOLDERS, DEF_LOG_FILE
from sabnzbd.encoding import correct_unknown_encoding, utob, ubtou
from sabnzbd.encoding import correct_unknown_encoding, utob, ubtou, normalize_utf8
from sabnzbd.utils import rarfile
@@ -561,7 +561,7 @@ def globber(path: str, pattern: str = "*") -> List[str]:
"""Return matching base file/folder names in folder `path`"""
# Cannot use glob.glob() because it doesn't support Windows long name notation
if os.path.exists(path):
return [f for f in os.listdir(path) if safe_fnmatch(f, pattern)]
return [f for f in listdir_normalized(path) if safe_fnmatch(f, pattern)]
return []
@@ -569,7 +569,8 @@ def globber_full(path: str, pattern: str = "*") -> List[str]:
"""Return matching full file/folder names in folder `path`"""
# Cannot use glob.glob() because it doesn't support Windows long name notation
if os.path.exists(path):
return [os.path.join(path, f) for f in os.listdir(path) if safe_fnmatch(f, pattern)]
path = normalize_utf8(path)
return [os.path.join(path, f) for f in listdir_normalized(path) if safe_fnmatch(f, pattern)]
return []
@@ -581,7 +582,7 @@ def fix_unix_encoding(folder: str):
if not sabnzbd.WIN32 and not sabnzbd.MACOS:
for root, dirs, files in os.walk(folder):
for name in files:
new_name = correct_unknown_encoding(name)
new_name = normalize_utf8(correct_unknown_encoding(name))
if name != new_name:
try:
renamer(os.path.join(root, name), os.path.join(root, new_name))
@@ -804,6 +805,12 @@ def get_unique_filename(path: str) -> str:
return path
def listdir_normalized(input_dir: str) -> List[str]:
"""On macOS, the OS returns un-normalized results.
Always use the same normalization on all platforms"""
return [normalize_utf8(path) for path in os.listdir(input_dir)]
@synchronized(DIR_LOCK)
def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
"""List all files in dirs and sub-dirs"""
@@ -812,7 +819,7 @@ def listdir_full(input_dir: str, recursive: bool = True) -> List[str]:
for file in files:
# Ignore special folders and resources files created by macOS
if not sabnzbd.misc.match_str(root, IGNORED_FILES_AND_FOLDERS) and not file.startswith("._"):
filelist.append(os.path.join(root, file))
filelist.append(normalize_utf8(os.path.join(root, file)))
if not recursive:
break
return filelist
@@ -1386,7 +1393,7 @@ def pathbrowser(path: str, show_hidden: bool = False, show_files: bool = False)
# List all files and folders
file_list = []
for filename in os.listdir(path):
for filename in listdir_normalized(path):
fpath = os.path.join(path, filename)
isdir = os.path.isdir(fpath)

View File

@@ -63,6 +63,7 @@ from sabnzbd.filesystem import (
SEVENMULTI_RE,
is_size,
get_basename,
listdir_normalized,
)
from sabnzbd.nzbstuff import NzbObject
import sabnzbd.cfg as cfg
@@ -1020,7 +1021,7 @@ def par2_repair(nzo: NzbObject, setname: str) -> Tuple[bool, bool]:
return False, True
parfile = os.path.join(nzo.download_path, parfile_nzf.filename)
old_dir_content = os.listdir(nzo.download_path)
old_dir_content = listdir_normalized(nzo.download_path)
used_joinables = ()
joinables = ()
used_for_repair = ()
@@ -1080,7 +1081,7 @@ def par2_repair(nzo: NzbObject, setname: str) -> Tuple[bool, bool]:
try:
if cfg.enable_par_cleanup():
deletables = []
new_dir_content = os.listdir(nzo.download_path)
new_dir_content = listdir_normalized(nzo.download_path)
# If Multipar or par2cmdline repairs a broken part of a joinable, it doesn't list it as such.
# So we need to manually add all joinables of the set to the list of used joinables.

View File

@@ -669,16 +669,6 @@ class NzbQueue:
except:
return -1
@staticmethod
def reset_try_lists(article: Article, remove_fetcher_from_trylist: bool = True):
"""Let article get new fetcher and reset trylists"""
if remove_fetcher_from_trylist:
article.remove_from_try_list(article.fetcher)
article.fetcher = None
article.tries = 0
article.nzf.reset_try_list()
article.nzf.nzo.reset_try_list()
def has_forced_jobs(self) -> bool:
"""Check if the queue contains any Forced
Priority jobs to download while paused
@@ -885,7 +875,7 @@ class NzbQueue:
logging.info("Found idle job %s", nzo.final_name)
empty.append(nzo)
# Stall prevention by checking if all servers are in the trylist
# Stall prevention by checking if all servers are in the try list
# This is a CPU-cheaper alternative to prevent stalling
if nzo.all_servers_in_try_list(active_servers):
# Maybe the NZF's need a reset too?
@@ -904,7 +894,7 @@ class NzbQueue:
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
nzf.reset_try_list()
# Reset main trylist, minimal performance impact
# Reset main try list, minimal performance impact
logging.info("Resetting bad trylist for job %s", nzo.final_name)
nzo.reset_try_list()

View File

@@ -111,7 +111,7 @@ RE_RAR = re.compile(r"(\.rar|\.r\d\d|\.s\d\d|\.t\d\d|\.u\d\d|\.v\d\d)$", re.I)
# Trylist
##############################################################################
TRYLIST_LOCK = threading.Lock()
TRYLIST_LOCK = threading.RLock()
class TryList:
@@ -203,12 +203,25 @@ class Article(TryList):
self.crc32: Optional[int] = None
self.nzf: NzbFile = nzf
@synchronized(TRYLIST_LOCK)
def reset_try_list(self):
"""In addition to resetting the try list, also reset fetcher so all servers are tried again"""
"""In addition to resetting the try list, also reset fetcher so all servers
are tried again. Locked so fetcher setting changes are also protected."""
self.fetcher = None
self.fetcher_priority = 0
super().reset_try_list()
@synchronized(TRYLIST_LOCK)
def allow_new_fetcher(self, remove_fetcher_from_try_list: bool = True):
"""Let article get new fetcher and reset try lists of file and job.
Locked so all resets are performed at once"""
if remove_fetcher_from_try_list:
self.remove_from_try_list(self.fetcher)
self.fetcher = None
self.tries = 0
self.nzf.reset_try_list()
self.nzf.nzo.reset_try_list()
def get_article(self, server: Server, servers: List[Server]):
"""Return article when appropriate for specified server"""
if self.fetcher or self.server_in_try_list(server):
@@ -249,7 +262,7 @@ class Article(TryList):
if server.priority >= self.fetcher.priority:
self.tries = 0
# Allow all servers for this nzo and nzf again (but not this fetcher for this article)
sabnzbd.NzbQueue.reset_try_lists(self, remove_fetcher_from_trylist=False)
self.allow_new_fetcher(remove_fetcher_from_try_list=False)
return True
logging.info("Article %s unavailable on all servers, discarding", self.article)
@@ -276,17 +289,6 @@ class Article(TryList):
self.fetcher_priority = 0
self.tries = 0
def __eq__(self, other):
"""Articles with the same usenet address are the same"""
return self.article == other.article
def __hash__(self):
"""Required because we implement eq. Articles with the same
usenet address can appear in different NZF's. So we make every
article object unique, even though it is bad practice.
"""
return id(self)
def __repr__(self):
return "<Article: article=%s, bytes=%s, art_id=%s>" % (self.article, self.bytes, self.art_id)
@@ -431,8 +433,11 @@ class NzbFile(TryList):
self.add_to_try_list(server)
return articles
@synchronized(TRYLIST_LOCK)
def reset_all_try_lists(self):
"""Clear all lists of visited servers"""
"""Reset all try lists. Locked so reset is performed
for all items at the same time without chance of another
thread changing any of the items while we are resetting"""
for art in self.articles:
art.reset_try_list()
self.reset_try_list()
@@ -481,10 +486,12 @@ class NzbFile(TryList):
"""Assume it's the same file if the number bytes and first article
are the same or if there are no articles left, use the filenames.
Some NZB's are just a mess and report different sizes for the same article.
We used to compare (__eq__) articles based on article-ID, however, this failed
because some NZB's had the same article-ID twice within one NZF.
"""
if other and (self.bytes == other.bytes or len(self.decodetable) == len(other.decodetable)):
if self.decodetable and other.decodetable:
return self.decodetable[0] == other.decodetable[0]
return self.decodetable[0].article == other.decodetable[0].article
# Fallback to filename comparison
return self.filename == other.filename
return False
@@ -1001,7 +1008,11 @@ class NzbObject(TryList):
except:
logging.debug("The lastrar swap did not go well")
@synchronized(TRYLIST_LOCK)
def reset_all_try_lists(self):
"""Reset all try lists. Locked so reset is performed
for all items at the same time without chance of another
thread changing any of the items while we are resetting"""
for nzf in self.files:
nzf.reset_all_try_lists()
self.reset_try_list()
@@ -1442,7 +1453,7 @@ class NzbObject(TryList):
@synchronized(NZO_LOCK)
def add_parfile(self, parfile: NzbFile) -> bool:
"""Add parfile to the files to be downloaded
Resets trylist just to be sure
Resets try list just to be sure
Adjust download-size accordingly
Returns False when the file couldn't be added
"""

View File

@@ -73,6 +73,7 @@ from sabnzbd.filesystem import (
get_filename,
directory_is_writable,
check_filesystem_capabilities,
listdir_normalized,
)
from sabnzbd.nzbstuff import NzbObject
from sabnzbd.sorting import Sorter
@@ -961,7 +962,7 @@ def rar_renamer(nzo: NzbObject) -> int:
volnrext = {}
# Scan rar files in workdir, but not subdirs
workdir_files = os.listdir(nzo.download_path)
workdir_files = listdir_normalized(nzo.download_path)
for file_to_check in workdir_files:
file_to_check = os.path.join(nzo.download_path, file_to_check)
@@ -1185,7 +1186,7 @@ def one_file_or_folder(folder: str) -> str:
"""If the dir only contains one file or folder, join that file/folder onto the path"""
if os.path.exists(folder) and os.path.isdir(folder):
try:
cont = os.listdir(folder)
cont = listdir_normalized(folder)
if len(cont) == 1:
folder = os.path.join(folder, cont[0])
folder = one_file_or_folder(folder)

View File

@@ -37,6 +37,7 @@ from sabnzbd.filesystem import (
renamer,
sanitize_foldername,
clip_path,
listdir_normalized,
)
import sabnzbd.config as config
import sabnzbd.cfg as cfg
@@ -616,7 +617,7 @@ def move_to_parent_directory(workdir: str) -> Tuple[str, bool]:
logging.debug("Moving all files from %s to %s", workdir, dest)
# Check for DVD folders and bail out if found
for item in os.listdir(workdir):
for item in listdir_normalized(workdir):
if item.lower() in IGNORED_MOVIE_FOLDERS:
return workdir, True