Compare commits

..

9 Commits
3.5.2 ... 3.5.3

Author SHA1 Message Date
Safihre
bca9f3b753 Set version to 3.5.3 2022-03-17 14:29:50 +01:00
Safihre
cad8a9a5d3 Merge branch '3.5.x' 2022-03-17 14:12:55 +01:00
Safihre
f5f36d21e8 Update text files 3.5.3 2022-03-17 14:12:40 +01:00
Safihre
c51435c114 Revert "Notify users of Prowl/Pushover/Pushbullet to switch to nzb-notify"
This reverts commit 56fe140ebf.
2022-03-15 21:52:16 +01:00
Safihre
2a7f1780b4 Update text files for 3.5.3RC1 2022-03-13 13:26:18 +01:00
Safihre
98a44e40fb Jobs waiting to fetch get stuck indefinitely upon restart
Closes #2114
2022-03-11 16:29:28 +01:00
Safihre
65cf6fa9a1 Prevent Direct Unpack proceeding faster than it should, locking files
Relates to #2113
2022-03-11 16:29:19 +01:00
Safihre
b2e32d1720 Log also the OSError.winerror just to be sure 2022-03-11 16:29:11 +01:00
Safihre
f0bfedbe8e Revert "Revert "Disable buffering when writing files in assembler""
This reverts commit 03b380f90b.
2022-03-11 16:29:04 +01:00
8 changed files with 27 additions and 25 deletions

View File

@@ -1,7 +1,7 @@
Metadata-Version: 1.0
Name: SABnzbd
Version: 3.5.2
Summary: SABnzbd-3.5.2
Version: 3.5.3
Summary: SABnzbd-3.5.3
Home-page: https://sabnzbd.org
Author: The SABnzbd Team
Author-email: team@sabnzbd.org

View File

@@ -1,8 +1,9 @@
Release Notes - SABnzbd 3.5.2
Release Notes - SABnzbd 3.5.3
=========================================================
## Bugfix since 3.5.1
- Reverted change to file assembly that resulted in disk errors.
## Bugfix since 3.5.0
- Prevent disk errors due to Direct Unpack being too aggressive.
- URL's waiting to fetch get stuck indefinitely upon restart.
## Changes and bugfixes since 3.5.0
- Prevent permissions errors on systems that do not support them.

View File

@@ -318,12 +318,6 @@ def initialize(pause_downloader=False, clean_up=False, repair=0):
cfg.cache_limit.set(misc.get_cache_limit())
sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int())
# Notify about the removed support of Prowl/Pushover/Pushbullet
if cfg.prowl_enable() or cfg.pushbullet_enable() or cfg.pushover_enable():
misc.helpful_warning(
"Support for Prowl/Pushover/Pushbullet will be removed in SABnzbd 3.6.0. Please switch to the Notification Script 'nzb-notify' to continue using these services."
)
logging.info("All processes started")
sabnzbd.RESTART_REQ = False
sabnzbd.__INITIALIZED__ = True

View File

@@ -191,9 +191,13 @@ class Assembler(Thread):
else:
logging.error(T("Disk error on creating file %s"), clip_path(filepath))
# Log traceback
logging.info("Traceback: ", exc_info=True)
if sabnzbd.WIN32:
logging.info("Winerror: %s", hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2**32))
logging.info(
"Winerror: %s - %s",
err.winerror,
hex(ctypes.windll.ntdll.RtlGetLastNtStatus() + 2**32),
)
logging.info("Traceback: ", exc_info=True)
# Pause without saving
sabnzbd.Downloader.pause()
else:
@@ -215,7 +219,8 @@ class Assembler(Thread):
if not nzf.md5:
nzf.md5 = hashlib.md5()
with open(nzf.filepath, "ab") as fout:
# We write large article-sized chunks, so we can safely skip the buffering of Python
with open(nzf.filepath, "ab", buffering=0) as fout:
for article in nzf.decodetable:
# Break if deleted during writing
if nzf.nzo.status is Status.DELETED:

View File

@@ -352,10 +352,10 @@ class DirectUnpacker(threading.Thread):
return False
def wait_for_next_volume(self):
"""Wait for the correct volume to appear
But stop if it was killed or the NZB is done
"""Wait for the correct volume to appear but stop if it was killed
or the NZB is in post-processing and no new files will be downloaded.
"""
while not self.have_next_volume() and not self.killed and self.nzo.files:
while not self.have_next_volume() and not self.killed and not self.nzo.pp_active:
with self.next_file_lock:
self.next_file_lock.wait()

View File

@@ -681,7 +681,7 @@ def _get_link(entry):
# GUID usually has URL to result on page
infourl = None
if entry.get("id") and entry.id != link and entry.id.startswith("http"):
if entry.get("id") and entry.id != link and entry.id.lower().startswith("http"):
infourl = entry.id
if size == 0:
@@ -716,7 +716,7 @@ def _get_link(entry):
except (KeyError, IndexError):
season = episode = 0
if link and "http" in link.lower():
if link and link.lower().startswith("http"):
try:
category = entry.cattext
except AttributeError:

View File

@@ -63,8 +63,6 @@ class URLGrabber(Thread):
def __init__(self):
super().__init__()
self.queue: queue.Queue[Tuple[Optional[str], Optional[NzbObject]]] = queue.Queue()
for url_nzo_tup in sabnzbd.NzbQueue.get_urls():
self.queue.put(url_nzo_tup)
self.shutdown = False
def add(self, url: str, future_nzo: NzbObject, when: Optional[int] = None):
@@ -87,7 +85,11 @@ class URLGrabber(Thread):
self.queue.put((None, None))
def run(self):
self.shutdown = False
# Read all URL's to grab from the queue
for url_nzo_tup in sabnzbd.NzbQueue.get_urls():
self.queue.put(url_nzo_tup)
# Start fetching
while not self.shutdown:
# Set NzbObject object to None so reference from this thread
# does not keep the object alive in the future (see #1628)
@@ -403,7 +405,7 @@ def add_url(
password: Optional[str] = None,
):
"""Add NZB based on a URL, attributes optional"""
if "http" not in url:
if not url.lower().startswith("http"):
return
if not pp or pp == "-1":
pp = None

View File

@@ -5,5 +5,5 @@
# You MUST use double quotes (so " and not ')
__version__ = "3.5.2"
__baseline__ = "4a73484603f7dc84d2d32f18536a58052c76bc04"
__version__ = "3.5.3"
__baseline__ = "cad8a9a5d3b2bc1eadfcda4cff84ecad1f352d7e"