BE+FE: refactor timezone UTC additional work #1506

Signed-off-by: jokob-sk <jokob.sk@gmail.com>
This commit is contained in:
jokob-sk
2026-02-15 16:13:53 +11:00
parent fd71527b09
commit 3587169791
7 changed files with 256 additions and 221 deletions

View File

@@ -27,8 +27,11 @@ function initOnlineHistoryGraph() {
var archivedCounts = [];
res.data.forEach(function(entry) {
var dateObj = new Date(entry.Scan_Date);
var formattedTime = dateObj.toLocaleTimeString([], {hour: '2-digit', minute: '2-digit', hour12: false});
console.log(entry.Scan_Date);
// var dateObj = new Date(entry.Scan_Date);
var formattedTime = localizeTimestamp(entry.Scan_Date).slice(11, 17);
// dateObj.toLocaleTimeString([], {hour: '2-digit', minute: '2-digit', hour12: false});
timeStamps.push(formattedTime);
onlineCounts.push(entry.Online_Devices);

View File

@@ -99,7 +99,7 @@
"description": [
{
"language_code": "en_us",
"string": "Selects the ICMP engine to use. <code>ping</code> checks devices individually and works even when the ARP / neighbor cache is empty, but is slower on larger networks. <code>fping</code> scans IP ranges in parallel and is significantly faster, but relies on the system neighbor cache to resolve IP addresses to MAC addresses. For most networks, <code>fping</code> is recommended. The default command arguments <code>ICMP_ARGS</code> are compatible with both modes."
"string": "Selects the ICMP engine to use. <code>ping</code> checks devices individually, works even with an empty ARP/neighbor cache, but is slower on large networks. <code>fping</code> scans IP ranges in parallel and is much faster, but depends on the system neighbor cache, which can delay MAC resolution. For most networks, <code>fping</code> is recommended, unless precise and timely offline/online detection is needed. Default <code>ICMP_ARGS</code> work with both engines."
}
]
},

View File

@@ -534,7 +534,6 @@ def is_timestamps_in_utc(sql) -> bool:
try:
# Get timezone offset in seconds
import conf
from zoneinfo import ZoneInfo
import datetime as dt
now = dt.datetime.now(dt.UTC).replace(microsecond=0)
@@ -754,4 +753,3 @@ def migrate_timestamps_to_utc(sql) -> bool:
except Exception as e:
mylog("none", f"[db_upgrade] ERROR during timestamp migration: {e}")
return False

View File

@@ -9,7 +9,7 @@ import logging
# NetAlertX imports
import conf
from const import logPath
from utils.datetime_utils import timeNowUTC
from utils.datetime_utils import timeNowTZ
DEFAULT_LEVEL = "none"
@@ -124,12 +124,12 @@ def start_log_writer_thread():
# -------------------------------------------------------------------------------
def file_print(*args):
result = timeNowUTC(as_string=False).strftime("%H:%M:%S") + " "
result = timeNowTZ(as_string=False).strftime("%H:%M:%S") + " "
for arg in args:
if isinstance(arg, list):
arg = " ".join(
str(a) for a in arg
) # so taht new lines are handled correctly also when passing a list
) # so that new lines are handled correctly also when passing a list
result += str(arg)
logging.log(custom_to_logging_levels.get(currentLevel, logging.NOTSET), result)

View File

@@ -10,9 +10,10 @@
# cvc90 2023 https://github.com/cvc90 GNU GPLv3 #
# ---------------------------------------------------------------------------------#
import json
import os
import json
import sys
from zoneinfo import ZoneInfo
# Register NetAlertX directories
INSTALL_PATH = os.getenv("NETALERTX_APP", "/app")
@@ -23,231 +24,237 @@ from helper import ( # noqa: E402 [flake8 lint suppression]
)
from logger import mylog # noqa: E402 [flake8 lint suppression]
from db.sql_safe_builder import create_safe_condition_builder # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import get_timezone_offset # noqa: E402 [flake8 lint suppression]
from utils.datetime_utils import format_date_iso # noqa: E402 [flake8 lint suppression]
import conf # noqa: E402 [flake8 lint suppression]
# ===============================================================================
# Timezone conversion
# ===============================================================================
DATETIME_FIELDS = {
"new_devices": ["Datetime"],
"down_devices": ["eve_DateTime"],
"down_reconnected": ["eve_DateTime"],
"events": ["Datetime"],
"plugins": ["DateTimeChanged"],
}
def get_datetime_fields_from_columns(column_names):
return [
col for col in column_names
if "date" in col.lower() or "time" in col.lower()
]
def apply_timezone_to_json(json_obj, section=None):
data = json_obj.json["data"]
columns = json_obj.columnNames
fields = DATETIME_FIELDS.get(section) or get_datetime_fields_from_columns(columns)
return apply_timezone(data, fields)
def apply_timezone(data, fields):
"""
Convert UTC datetime fields in a list of dicts to the configured timezone.
Args:
data (list[dict]): Rows returned from DB
fields (list[str]): Field names to convert
Returns:
list[dict]: Modified data with timezone-aware ISO strings
"""
if not data or not fields:
return data
# Determine local timezone
tz = conf.tz
if isinstance(tz, str):
tz = ZoneInfo(tz)
for row in data:
if not isinstance(row, dict):
continue
for field in fields:
value = row.get(field)
if not value:
continue
try:
# Convert DB UTC string → local timezone ISO
# format_date_iso already assumes UTC if naive
row[field] = format_date_iso(value)
except Exception:
# Never crash, leave original value if conversion fails
continue
return data
# ===============================================================================
# REPORTING
# ===============================================================================
# -------------------------------------------------------------------------------
def get_notifications(db):
sql = db.sql # TO-DO
"""
Fetch notifications for all configured sections, applying timezone conversions.
# Reporting section
mylog("verbose", ["[Notification] Check if something to report"])
Args:
db: Database object with `.sql` for executing queries.
# prepare variables for JSON construction
json_new_devices = []
json_new_devices_meta = {}
json_down_devices = []
json_down_devices_meta = {}
json_down_reconnected = []
json_down_reconnected_meta = {}
json_events = []
json_events_meta = {}
json_plugins = []
json_plugins_meta = {}
Returns:
dict: JSON-ready dict with data and metadata for each section.
"""
sql = db.sql
# Disable reporting on events for devices where reporting is disabled based on the MAC address
mylog("verbose", "[Notification] Check if something to report")
# Disable notifications (except down/down reconnected) on devices where devAlertEvents is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType not in ('Device Down', 'Down Reconnected', 'New Device' ) AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertEvents = 0
)""")
# Disable down/down reconnected notifications on devices where devAlertDown is disabled
sql.execute("""UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1 AND eve_EventType in ('Device Down', 'Down Reconnected') AND eve_MAC IN
(
SELECT devMac FROM Devices WHERE devAlertDown = 0
)""")
# Disable events where reporting is disabled
sql.execute("""
UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1
AND eve_EventType NOT IN ('Device Down', 'Down Reconnected', 'New Device')
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertEvents = 0)
""")
sql.execute("""
UPDATE Events SET eve_PendingAlertEmail = 0
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Device Down', 'Down Reconnected')
AND eve_MAC IN (SELECT devMac FROM Devices WHERE devAlertDown = 0)
""")
sections = get_setting_value("NTFPRCS_INCLUDED_SECTIONS")
mylog("verbose", ["[Notification] Included sections: ", sections])
if "new_devices" in sections:
# Compose New Devices Section (no empty lines in SQL queries!)
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
condition_builder = create_safe_condition_builder()
new_dev_condition_setting = get_setting_value("NTFPRCS_new_dev_condition")
try:
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
new_dev_condition_setting
)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device' {}
ORDER BY eve_DateTime""".format(safe_condition)
except (ValueError, KeyError, TypeError) as e:
mylog("verbose", ["[Notification] Error building safe condition for new devices: ", e])
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType = 'New Device'
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] new_devices SQL query: ", sqlQuery])
mylog("debug", ["[Notification] new_devices parameters: ", parameters])
# Get the events as JSON using parameterized query
json_obj = db.get_table_as_json(sqlQuery, parameters)
json_new_devices_meta = {
"title": "🆕 New devices",
"columnNames": json_obj.columnNames,
}
json_new_devices = json_obj.json["data"]
if "down_devices" in sections:
# Compose Devices Down Section
# - select only Down Alerts with pending email of devices that didn't reconnect within the specified time window
minutes = int(get_setting_value("NTFPRCS_alert_down_time") or 0)
tz_offset = get_timezone_offset()
sqlQuery = f"""
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{minutes} minutes', '{tz_offset}')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime;
"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_down_devices_meta = {
"title": "🔴 Down devices",
"columnNames": json_obj.columnNames,
}
json_down_devices = json_obj.json["data"]
mylog("debug", f"[Notification] json_down_devices: {json.dumps(json_down_devices)}")
if "down_reconnected" in sections:
# Compose Reconnected Down Section
# - select only Devices, that were previously down and now are Connected
sqlQuery = """
SELECT devName, eve_MAC, devVendor, eve_IP, eve_DateTime, eve_EventType
FROM Events_Devices AS reconnected_devices
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
AND reconnected_devices.eve_PendingAlertEmail = 1
ORDER BY reconnected_devices.eve_DateTime;
"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_down_reconnected_meta = {
"title": "🔁 Reconnected down devices",
"columnNames": json_obj.columnNames,
}
json_down_reconnected = json_obj.json["data"]
mylog("debug", f"[Notification] json_down_reconnected: {json.dumps(json_down_reconnected)}")
if "events" in sections:
# Compose Events Section (no empty lines in SQL queries!)
# Use SafeConditionBuilder to prevent SQL injection vulnerabilities
condition_builder = create_safe_condition_builder()
event_condition_setting = get_setting_value("NTFPRCS_event_condition")
try:
safe_condition, parameters = condition_builder.get_safe_condition_legacy(
event_condition_setting
)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {}
ORDER BY eve_DateTime""".format(safe_condition)
except Exception as e:
mylog("verbose", f"[Notification] Error building safe condition for events: {e}")
# Fall back to safe default (no additional conditions)
sqlQuery = """SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed')
ORDER BY eve_DateTime"""
parameters = {}
mylog("debug", ["[Notification] events SQL query: ", sqlQuery])
mylog("debug", ["[Notification] events parameters: ", parameters])
# Get the events as JSON using parameterized query
json_obj = db.get_table_as_json(sqlQuery, parameters)
json_events_meta = {"title": "⚡ Events", "columnNames": json_obj.columnNames}
json_events = json_obj.json["data"]
if "plugins" in sections:
# Compose Plugins Section
sqlQuery = """SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
from Plugins_Events"""
# Get the events as JSON
json_obj = db.get_table_as_json(sqlQuery)
json_plugins_meta = {"title": "🔌 Plugins", "columnNames": json_obj.columnNames}
json_plugins = json_obj.json["data"]
final_json = {
"new_devices": json_new_devices,
"new_devices_meta": json_new_devices_meta,
"down_devices": json_down_devices,
"down_devices_meta": json_down_devices_meta,
"down_reconnected": json_down_reconnected,
"down_reconnected_meta": json_down_reconnected_meta,
"events": json_events,
"events_meta": json_events_meta,
"plugins": json_plugins,
"plugins_meta": json_plugins_meta,
# Define SQL templates per section
sql_templates = {
"new_devices": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1 AND eve_EventType = 'New Device' {condition}
ORDER BY eve_DateTime
""",
"down_devices": f"""
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS down_events
WHERE eve_PendingAlertEmail = 1
AND down_events.eve_EventType = 'Device Down'
AND eve_DateTime < datetime('now', '-{int(get_setting_value("NTFPRCS_alert_down_time") or 0)} minutes')
AND NOT EXISTS (
SELECT 1
FROM Events AS connected_events
WHERE connected_events.eve_MAC = down_events.eve_MAC
AND connected_events.eve_EventType = 'Connected'
AND connected_events.eve_DateTime > down_events.eve_DateTime
)
ORDER BY down_events.eve_DateTime
""",
"down_reconnected": """
SELECT
devName,
eve_MAC,
devVendor,
eve_IP,
eve_DateTime,
eve_EventType
FROM Events_Devices AS reconnected_devices
WHERE reconnected_devices.eve_EventType = 'Down Reconnected'
AND reconnected_devices.eve_PendingAlertEmail = 1
ORDER BY reconnected_devices.eve_DateTime
""",
"events": """
SELECT
eve_MAC as MAC,
eve_DateTime as Datetime,
devLastIP as IP,
eve_EventType as "Event Type",
devName as "Device name",
devComments as Comments
FROM Events_Devices
WHERE eve_PendingAlertEmail = 1
AND eve_EventType IN ('Connected', 'Down Reconnected', 'Disconnected','IP Changed') {condition}
ORDER BY eve_DateTime
""",
"plugins": """
SELECT
Plugin,
Object_PrimaryId,
Object_SecondaryId,
DateTimeChanged,
Watched_Value1,
Watched_Value2,
Watched_Value3,
Watched_Value4,
Status
FROM Plugins_Events
"""
}
# Titles for metadata
section_titles = {
"new_devices": "🆕 New devices",
"down_devices": "🔴 Down devices",
"down_reconnected": "🔁 Reconnected down devices",
"events": "⚡ Events",
"plugins": "🔌 Plugins"
}
final_json = {}
# Pre-initialize final_json with all expected keys
final_json = {}
for section in ["new_devices", "down_devices", "down_reconnected", "events", "plugins"]:
final_json[section] = []
final_json[f"{section}_meta"] = {"title": section_titles.get(section, section), "columnNames": []}
# Loop through each included section
for section in sections:
try:
# Build safe condition for sections that support it
condition_builder = create_safe_condition_builder()
condition_setting = get_setting_value(f"NTFPRCS_{section}_condition")
safe_condition, parameters = condition_builder.get_safe_condition_legacy(condition_setting)
sqlQuery = sql_templates.get(section, "").format(condition=safe_condition)
except Exception:
# Fallback if safe condition fails
sqlQuery = sql_templates.get(section, "").format(condition="")
parameters = {}
mylog("debug", [f"[Notification] {section} SQL query: ", sqlQuery])
mylog("debug", [f"[Notification] {section} parameters: ", parameters])
# Fetch data as JSON
json_obj = db.get_table_as_json(sqlQuery, parameters)
mylog("debug", [f"[Notification] json_obj.json: {json.dumps(json_obj.json)}"])
# Apply timezone conversion
json_obj.json["data"] = apply_timezone_to_json(json_obj, section=section)
# Save data and metadata
final_json[section] = json_obj.json["data"]
final_json[f"{section}_meta"] = {
"title": section_titles.get(section, section),
"columnNames": json_obj.columnNames
}
mylog("debug", [f"[Notification] final_json: {json.dumps(final_json)}"])
return final_json

View File

@@ -16,7 +16,7 @@ from helper import (
getBuildTimeStampAndVersion,
)
from messaging.in_app import write_notification
from utils.datetime_utils import timeNowUTC, get_timezone_offset
from utils.datetime_utils import timeNowUTC, timeNowTZ, get_timezone_offset
# -----------------------------------------------------------------------------
@@ -107,7 +107,7 @@ class NotificationInstance:
mail_html = mail_html.replace("NEW_VERSION", newVersionText)
# Report "REPORT_DATE" in Header & footer
timeFormated = timeNowUTC()
timeFormated = timeNowTZ()
mail_text = mail_text.replace("REPORT_DATE", timeFormated)
mail_html = mail_html.replace("REPORT_DATE", timeFormated)

View File

@@ -47,6 +47,33 @@ def timeNowUTC(as_string=True):
return utc_now.strftime(DATETIME_PATTERN) if as_string else utc_now
def timeNowTZ(as_string=True):
"""
Return the current time in the configured local timezone.
Falls back to UTC if conf.tz is invalid or missing.
"""
# Get canonical UTC time
utc_now = timeNowUTC(as_string=False)
# Resolve timezone safely
tz = None
try:
if isinstance(conf.tz, datetime.tzinfo):
tz = conf.tz
elif isinstance(conf.tz, str) and conf.tz:
tz = ZoneInfo(conf.tz)
except Exception:
tz = None
if tz is None:
tz = datetime.UTC # fallback to UTC
# Convert to local timezone (or UTC fallback)
local_now = utc_now.astimezone(tz)
return local_now.strftime(DATETIME_PATTERN) if as_string else local_now
def get_timezone_offset():
if conf.tz:
now = timeNowUTC(as_string=False).astimezone(conf.tz)