From 46cbf8558463bb6c43a67022fe73cdba802ac5d1 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Sun, 4 May 2025 08:52:13 +1000 Subject: [PATCH] LOG_LEVEL fix, WEBMON timeout multiplier, docs, fix for watched-changed #1053 --- docs/DEVICE_DISPLAY_SETTINGS.md | 11 +- .../device_management_display_settings.png | Bin 0 -> 7891 bytes front/php/templates/language/ca_ca.json | 0 front/php/templates/language/cs_cz.json | 0 front/php/templates/language/es_es.json | 0 front/php/templates/language/it_it.json | 0 front/php/templates/language/tr_tr.json | 0 front/php/templates/language/zh_cn.json | 0 front/plugins/_publisher_email/email_smtp.py | 2 +- front/plugins/website_monitor/config.json | 3 +- front/plugins/website_monitor/script.py | 8 +- front/report_templates/report_template.html | 2 +- install/install_dependencies.debian.sh | 2 +- server/__main__.py | 19 +- server/initialise.py | 5 +- server/logger.py | 17 +- server/models/device_instance.py | 2 +- server/models/plugin_object_instance.py | 2 +- server/notification.py | 2 +- server/plugin.py | 319 +++++++++--------- server/reporting.py | 3 +- server/scan/device_handling.py | 2 +- server/scheduler.py | 2 +- server/workflows/app_events.py | 2 +- 24 files changed, 213 insertions(+), 190 deletions(-) create mode 100755 docs/img/DEVICE_MANAGEMENT/device_management_display_settings.png mode change 100644 => 100755 front/php/templates/language/ca_ca.json mode change 100644 => 100755 front/php/templates/language/cs_cz.json mode change 100644 => 100755 front/php/templates/language/es_es.json mode change 100644 => 100755 front/php/templates/language/it_it.json mode change 100644 => 100755 front/php/templates/language/tr_tr.json mode change 100644 => 100755 front/php/templates/language/zh_cn.json diff --git a/docs/DEVICE_DISPLAY_SETTINGS.md b/docs/DEVICE_DISPLAY_SETTINGS.md index 81260151..260e1210 100755 --- a/docs/DEVICE_DISPLAY_SETTINGS.md +++ b/docs/DEVICE_DISPLAY_SETTINGS.md @@ -3,4 +3,13 @@ This set of settings allows you to group Devices under different views. The Archived toggle allows you to exclude a Device from most listings and notifications. -![Display settings](./img/DEVICE_MANAGEMENT/DeviceDetails_DisplaySettings.png) \ No newline at end of file +![Display settings](./img/DEVICE_MANAGEMENT/DeviceDetails_DisplaySettings.png) + + +## Status Colors + +![Sattus colors](./img/DEVICE_MANAGEMENT/device_management_display_settings.png) + +1. Online (Green) = A device that is no longer marked as a "New Device" +2. New (Green) = A newly discovered device that is online and is still "ticked" as a "New Device" +3. New (Grey) = Same as No.2 but device is now offline. diff --git a/docs/img/DEVICE_MANAGEMENT/device_management_display_settings.png b/docs/img/DEVICE_MANAGEMENT/device_management_display_settings.png new file mode 100755 index 0000000000000000000000000000000000000000..52b8385778a5b4e9ba603ab643551da91a22c99b GIT binary patch literal 7891 zcmaKR1ymGD^fx7qz#?4>ETMF}u$08^f}~1^D77>!T>{eJvMegyp$G^_NTY<(9R?w? zba&%_^}X-CcfRjC|2=1R<~MU==g!QXduL=KtFh`RQ6|cm$ci&b&6F?qTbT4({7i0bZTg5r zZQohejokd+cEvfu7H{uevW;<_5ko#)r@#JonX=1&{`$R_fLVjg*PbdKHNEHbL(biz zL44D1`D;^mYS);{$LPE>b;?(B-^Eu=Lt7isy>f3T?* zOb-C{%(wH$sl?Uq++g-1_MiwX%ps|0+}-{@QF8%Kh_W5G(~h6+7~#21?P%&tUBlk7 zTAG*k?O5G~H_*rb%e}>yst(_(%9j1z4^&Qj-_2eZE;Wz%#CrQ?Fy7Rhts^5?g9~SC zLtA6GowhdKEnJ%v4?h}#M}%wP;|dF|;NcNsqVPy?H2_!CatZz!MdT9x)86<^sB%vg z4#(B^9(&r_x*{FiyeJ$(%5kD55%-O~jJ5B|J$7>uv9@u0WGmw5;{KZj58@|>Yr5Ea zSp)rCoL!M}ehS=wDCBVM-`%3zz&|8jP72({+Im1$H&0ujjEIGojzbFE4qjgFa-yQXzP=*9k|J)N_M+loFj!PfLR3OR7)K$D^mp~L_7iqR^86*_ z|Hx6ZMLzaKxO*YoT!FvkT0e60_EO;H{;lYr&tJdO)(`QoCRgO&)51+q^!JFUxQLkO zKeBPGkl(#>dI&#TXA?Dq3(h<^9f}f?GLS#?|If(38vny-{4b}tnDl=+|1q;c6=P z{qQ#}DE#QvUtaBpw9x@z1n|(EC>V_%t@?eMpisH%rKlL{rN>4j(5#Um51)yVuZL## zZ9^WJ4~*Ck$0j_?*{~L(+m~-juz0^VGD#bCjfwy97q8mG7Kl|1GVwi#HIK|K2m?7bY@UBGfTg)Bp4qSiCT6iiP%V(8BOX!VBM(VQZKsTxIy0 zhN+p^X)SVj@M}*x!aFiDnukjf-(tqX!O>aG-kx7nUg8#cUS6JqLNLqB%(uCHGJFFE zVVgE zCwbu8heRYK1w{=ebOKz;27TucCtLc@Pt<}f*aLI3OE+CMR~)wW>?gc+gr&uAk}VHr zt2&jQGnAH!-IKV+1>q*>VauhlTH%xy?wZK=_GQ~BJ_$?w}A-dfB(UOsXWDNotTFuqL|nasD) zK|QOgr4^qSaA>n-Q{0kQg%6K~lUfgbA0H=`STyC+g`p21h1rlom4#2b%C=n|7psfE zek$wFp*u~*YJVmqgfXGBQA@>qSnLuPpA3U-5mfovqaV3zLIhxVqas%AWBo#xE4Osd1TEC#-2aCzN z1zS#Wk5enD_jwR!hog+(<>Q|nx8#?ruN?f3nk-?d#%Bt>c%PYpHV zr&xLL%34?TWA$59P#@cPoGJ(%@yuFJ(=&#P5--WmH8uTyP4sIx*d-6~SDSqO$|Pwg z)c!>*bK@$4>>BK`9q~t%706tV^QFF&Ou(^RWK^>hUhbZwlT&5zci$kLJF7DrRt^qK zQG3eSk)SADk}tz)EL1tml?WAQXEyEv?o+Y!-V^LX;?B{p=D}V# z)XuPuPjP&TmzRInTIMiWY9{y72vpwv`65y~-`YopwTJeaB%S9{Kl5aM#_0-biRQtB z7eP#0cG=5*H06Z1L?tCDY}Q~W9v&WS2WxANE|Tsp}J5-{@PX$_i%`mjZ_F8 zX8$+x4&x`W-&)JFx8ALOyD)S8ba_1cl#Q=jc#dKIg;HBo%lT2$_}0J&E?<}oImM@@ z=eFMMgEL(MNDnX?+_xmo$$>wkS%@dx zA|kG>)}8L_nHr*utO5z=w!b}NyjoDdahECaV!k5*lA~oQ;o(S?FIy-sF5a>?y6&|% zR2Tj_d~hJ zW`SF`?)dvR@CYI%$4l;|$!;n^!dH^BJQhBQzh97IlkPk@R+FXH0JvEopzE%WwF zONq4TjU*n0tv=1LPH4{+VQuxA7z&^J@J!3)!*6}kRz0uCIuAn@YK%1W9-+w-H%j^{V;xra7H)86)SnJhDbWI@o~wjQYFM2v_eM5=PrxYShCh)QPW8( zR5+L*J{?Uurk>HwR{7e$D+vatITdn5(?MxM>Ey6vB&QnM!2148tFNM z4`>a`jKk!&o0EgRcd0qu_cplpp-ZT~~u zWr|O1nVSAFKjC#y`u#VtnY<~&W=mooHfM?_KL}H{G2IB{_d1vU&4y)UuR`FFJJ5b8 z`x#=a*hA9D2MhpFAs($@ayC*wHE4H&H3l&?oWU)NC14-5jQ6`ehMD@RHOP>7+GAP2nI z6wFN^P-`aMSV2t==}eCIUNdyO=7W|^w;{2zo1k?skgq1Pw+912@mZ0wZuFzFL_tZ)(f6 z$y<@5`IjPgY3WDCfZ5I-r}KlY`b>zs%4?yxpVm+O(}= zH+>E){+7sB#;4Cz%@}el)M{KC7nxr%IAvEFCc9SE;`;6v4~X&D0T~GZzh<|zc7xuF?Fs65F7VaVKI9Nf zm)dDwaL)G|7grN>!VjQ&9r`oz8;J2l8{ z1T(*v3Eug*`#On-9j~D@FDm z{3_1Zwdm6D=W6L5;uYoWWOAQw$Q3j@;$nyu1h% zh5R`Y;s_W@(=PajJNA{tws~<8a}{9{K)s2C2yuj5+Y<@U&OePwCq<{cZ|x)|Qb1vD z^d|S4@{Td(8%`QYOA0gPScdMP-6`XS?$`y%=+qhRmV_E`ycjboGmK&sXtdxNYZm_8 z`jNA)ImW8YJbd$o5C`~t`L1d6sQ0|GTsPa?kc7RuZcTIp-a2o(nBz#Vs-XMA`P4ky z`i~TeyvyaKC%!g3l7~;qlp3v1%!S$lsjiO3hITqiS_Dc&d}xIb43V@&BY6@84=CCS zstNn3FS3pw=$fBOZROObIz2CFry$w#qv!K1lkjvt_HTeO zQ9q=ty`Zeyg6%DRTb(1HvhhQaiib3rM;S~va$>95?MFr@6YkoH=w`iL`o=J*5#@aU z;~k%GD@J%q+3vD^ekM3t-aT&5U_qB3w8uy&s$vND#X< zk=}pHRwGKo!^L1Q_%JX;m8tQ_$*IMCX^@<2q$8Q8CZ<2DqK8ho)9gdS7O6n`$oT?+ z(}z$sVT@r0;@xX$>%#SX1LahC{m6-(&YvXY`ja+~yS<&PqyeGQ3uiSVUTT3*x=0&B-jvYK5#J+%ZdfJ(ywn1}VmC3iE* z2Qg)?gHLsyM@aduW{0C)nY4MFQY6~$ewk2kfsmxvmH|Ds10Jqh^@M^c&W#)K3I7$UQh451guEQwj70A z#PO#s>&j(+-MMkxUeVIJdM~)_S)I9V0d7q7J0BJ1V<%iS!BI-1c#mWe-%$|@T0XA zm#rvDQTU|7c9ge(mjO+O2uAiS%Mua=P#`6r8X7UWvL1ExKA^KX48O_4kB5 znXF{3PXssclv=bXH?QP=7z4?a+@~WOO2)|m=r1%7FC`v}YZRlFI9}V!4~iWxXg0q; zce${esn{J|V$?Tp$?&g3Eq*8ywCJ$FIGH->4fW?yAa;IYqIpS4M)NwNPwS>k`)?rv zHIHsZM?E<$vsw_*9mEWNJqpqd9lZ70bIrai3%c^XGabZC-;@xc!LGjaKz=o8M?i^kt-gXW^uVz9poh zPuRZ!%#@iGIsV+Hl@X&A6~Z?x<5BB$XogsGRvFc|c_ilJcH~)q144X_Cd?$!R8ZslwNAK@++Xn8pk? z5T+Rgep}M!lJ|8k4k!uX0fAs^-AOkTB;s9OYpEsiykJt);JDwT6^#DY`L*S-TPO#uh;t6(je2Jr39W)RH&YnowkD7dak}SR=1Ky80NG$5DM=ovW0?D8 z?NmKwN9~@;3k^YPr^)gui)J)%b?NGk22Z-$djY}g8ggz0kcK|p;1MI%R+$>98}7jX zvc$ruN!AILYZLlzkNGHRQ(djf^I7J^Wl@u_qj(Z9GgLdO(s}0pr2`E(i za3&X1Y7*+CzLikiKVB8thq(VBM{RZFv9p=sLQ_8}JGZhM7S_QJ zpbXCZop80fqdZes!pkT(;EHzenimWna9raZ4-QCZ+IPKar?}v%PxVkPKsi*@7xa z2v{pc*iV&92;}|2qtl0~em*m|RNiTgUcQEgk5kWo^)9svRZ}4&OpCB=8e@GvY=-wZ zzd$_5m;>p_920Tk=N7-Z-&(w6mw8)1vIE*X91yh$a6w00R;GmmLRzfZEibpH^zPTt zD_LCtS1iWZDYiWQ)!bUs*wa(udx#eX8}4tYx63eq-_?{&4`;PIVvu@w0`ilG+CmSV z+B#z{g4iHl-NUMHB!6e_st6{HPyhhrjib5UHH8WXa433quCr(Kblwcl_l)fZOQaXQ zud6!(RsJf-&mW$gT=G^xg-cz3edTz>VpXw90oTzE)F@!f;k~Is22JR;M;$QNdtX7W zWS`6mXTB1ArM>X2#&g1vBypZLX~n?}nHea@BO-sG|z-D4%VqEXeWol|V+<%s0Qf*iD{&NXLX-?ovgILyQ3k!?aivF5LuJcko zxT_r(eE;4Ev57NXXFjJlu7hgv1{8h&*-b;NIEO(NHd=@*lE@*+OcYpn?wjI2; zPhaXq{V|dVo`TJkV7BnsS+sfiMW%5LnAw2DwAe;Vedkv>4acR7kl!g|Xue6C`NP_h z5*BG+nJQaTY22B=o1CqE4UTpOcs+TO1U!TW$jk_;yn_#4mS_<3ZX_N!HyRQ1*8IWg zO;@m?;y_-Q3|XVn1Ppz~qmZ$67;nqA$3U> zG!oUE@7}Et!np!UGO|yJ^etuyYXEFRn)a5#-m z0vIr>A+mXoES#<~DFs`?3Vu8KdA5?S=ycIV^ZJn)T-cy87?xw>0vMc59OxdYKWt;bCxTA}OLI}h%?A>00B|X1a3|L1;obu{)^!dkqjcd NetAlertX Logo - NetAlertx + NetAlertx diff --git a/install/install_dependencies.debian.sh b/install/install_dependencies.debian.sh index 8cac386f..add491f9 100755 --- a/install/install_dependencies.debian.sh +++ b/install/install_dependencies.debian.sh @@ -30,5 +30,5 @@ source myenv/bin/activate update-alternatives --install /usr/bin/python python /usr/bin/python3 10 # install packages thru pip3 -pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros git+https://github.com/foreign-sub/aiofreepybox.git +pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag git+https://github.com/foreign-sub/aiofreepybox.git diff --git a/server/__main__.py b/server/__main__.py index 36a2f8eb..b199e7ad 100755 --- a/server/__main__.py +++ b/server/__main__.py @@ -33,7 +33,7 @@ from initialise import importConfigs from database import DB from reporting import get_notifications from notification import Notification_obj -from plugin import run_plugin_scripts, check_and_run_user_event +from plugin import plugin_manager from scan.device_handling import update_devices_names from workflows.manager import WorkflowManager @@ -96,6 +96,9 @@ def main (): # re-load user configuration and plugins all_plugins, imported = importConfigs(db, all_plugins) + # initiate plugin manager + pm = plugin_manager(db, all_plugins) + # update time started conf.loop_start_time = timeNowTZ() @@ -103,11 +106,11 @@ def main (): # Handle plugins executed ONCE if conf.plugins_once_run == False: - run_plugin_scripts(db, all_plugins, 'once') + pm.run_plugin_scripts('once') conf.plugins_once_run = True # check if user is waiting for api_update - check_and_run_user_event(db, all_plugins) + pm.check_and_run_user_event() # Update API endpoints update_api(db, all_plugins, False) @@ -126,13 +129,13 @@ def main (): startTime = startTime.replace (microsecond=0) # Check if any plugins need to run on schedule - run_plugin_scripts(db, all_plugins, 'schedule') + pm.run_plugin_scripts('schedule') # determine run/scan type based on passed time # -------------------------------------------- # Runs plugin scripts which are set to run every time after a scans finished - run_plugin_scripts(db, all_plugins, 'always_after_scan') + pm.run_plugin_scripts('always_after_scan') # process all the scanned data into new devices processScan = updateState("Check scan").processScan @@ -146,7 +149,7 @@ def main (): # -------- # Reporting # run plugins before notification processing (e.g. Plugins to discover device names) - run_plugin_scripts(db, all_plugins, 'before_name_updates') + pm.run_plugin_scripts('before_name_updates') # Resolve devices names mylog('debug','[Main] Resolve devices names') @@ -160,7 +163,7 @@ def main (): # new devices were found if len(newDevices) > 0: # run all plugins registered to be run when new devices are found - run_plugin_scripts(db, all_plugins, 'on_new_device') + pm.run_plugin_scripts('on_new_device') # Notification handling # ---------------------------------------- @@ -175,7 +178,7 @@ def main (): # run all enabled publisher gateways if notificationObj.HasNotifications: - run_plugin_scripts(db, all_plugins, 'on_notification') + pm.run_plugin_scripts('on_notification') notification.setAllProcessed() notification.clearPendingEmailFlag() diff --git a/server/initialise.py b/server/initialise.py index c8492be5..86991ef9 100755 --- a/server/initialise.py +++ b/server/initialise.py @@ -17,7 +17,7 @@ from app_state import updateState from logger import mylog from api import update_api from scheduler import schedule_class -from plugin import print_plugin_info, run_plugin_scripts +from plugin import plugin_manager, print_plugin_info from plugin_utils import get_plugins_configs, get_set_value_for_init from notification import write_notification from crypto_utils import get_random_bytes @@ -402,7 +402,8 @@ def importConfigs (db, all_plugins): update_api(db, all_plugins, True, ["settings"]) # run plugins that are modifying the config - run_plugin_scripts(db, all_plugins, 'before_config_save' ) + pm = plugin_manager(db, all_plugins) + pm.run_plugin_scripts('before_config_save') # Used to determine the next import conf.lastImportedConfFile = os.path.getmtime(config_file) diff --git a/server/logger.py b/server/logger.py index cf578cd5..958fdd4a 100755 --- a/server/logger.py +++ b/server/logger.py @@ -45,10 +45,11 @@ reqLvl = 0 #------------------------------------------------------------------------------- class Logger: - def __init__(self, LOG_LEVEL='verbose'): + def __init__(self, LOG_LEVEL): global currentLevel currentLevel = LOG_LEVEL + conf.LOG_LEVEL = currentLevel # Automatically set up custom logging handler self.setup_logging() @@ -89,7 +90,7 @@ def mylog(requestedDebugLevel, n): reqLvl = lvl[1] if reqLvl <= setLvl: - file_print (*n) + file_print (*n) #------------------------------------------------------------------------------- # Queue for log messages @@ -138,18 +139,6 @@ def file_print(*args): # Ensure the log writer thread is running start_log_writer_thread() -#------------------------------------------------------------------------------- -def print_log(pText): - # Check if logging is active - if not conf.LOG_LEVEL == 'debug': - return - - # Current Time - log_timestamp2 = datetime.datetime.now(conf.tz).replace(microsecond=0) - - # Print line + time + text - file_print('[LOG_LEVEL=debug]', log_timestamp2.strftime('%H:%M:%S'), pText) - return pText #------------------------------------------------------------------------------- def append_file_binary(file_path, input_data): diff --git a/server/models/device_instance.py b/server/models/device_instance.py index 63176af4..430a3c6f 100755 --- a/server/models/device_instance.py +++ b/server/models/device_instance.py @@ -4,7 +4,7 @@ import sys INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/server"]) -from logger import mylog, print_log +from logger import mylog #------------------------------------------------------------------------------- # Device object handling (WIP) diff --git a/server/models/plugin_object_instance.py b/server/models/plugin_object_instance.py index 83f58512..347ad849 100755 --- a/server/models/plugin_object_instance.py +++ b/server/models/plugin_object_instance.py @@ -4,7 +4,7 @@ import sys INSTALL_PATH="/app" sys.path.extend([f"{INSTALL_PATH}/server"]) -from logger import mylog, print_log +from logger import mylog #------------------------------------------------------------------------------- # Plugin object handling (WIP) diff --git a/server/notification.py b/server/notification.py index 3e6e478a..c641f708 100755 --- a/server/notification.py +++ b/server/notification.py @@ -12,7 +12,7 @@ from json2table import convert # Register NetAlertX modules import conf from const import applicationPath, logPath, apiPath, confFileName, reportTemplatesPath -from logger import logResult, mylog, print_log +from logger import logResult, mylog from helper import generate_mac_links, removeDuplicateNewLines, timeNowTZ, get_file_content, write_file, get_setting_value, get_timezone_offset #------------------------------------------------------------------------------- diff --git a/server/plugin.py b/server/plugin.py index cbf07100..586f4a4b 100755 --- a/server/plugin.py +++ b/server/plugin.py @@ -20,8 +20,150 @@ from notification import Notification_obj, write_notification from user_events_queue import UserEventsQueue from crypto_utils import generate_deterministic_guid -# Make sure log level is initialized correctly -Logger(get_setting_value('LOG_LEVEL')) + +#------------------------------------------------------------------------------- +class plugin_manager: + def __init__(self, db, all_plugins): + self.db = db + self.all_plugins = all_plugins + + # Make sure log level is initialized correctly + Logger(get_setting_value('LOG_LEVEL')) + + #------------------------------------------------------------------------------- + def run_plugin_scripts(self, runType): + + # Header + updateState("Run: Plugins") + + mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) + + for plugin in self.all_plugins: + + shouldRun = False + prefix = plugin["unique_prefix"] + + set = get_plugin_setting_obj(plugin, "RUN") + + # mylog('debug', [f'[run_plugin_scripts] plugin: {plugin}']) + # mylog('debug', [f'[run_plugin_scripts] set: {set}']) + if set != None and set['value'] == runType: + if runType != "schedule": + shouldRun = True + elif runType == "schedule": + # run if overdue scheduled time + # check schedules if any contains a unique plugin prefix matching the current plugin + for schd in conf.mySchedules: + if schd.service == prefix: + # Check if schedule overdue + shouldRun = schd.runScheduleCheck() + + if shouldRun: + # Header + updateState(f"Plugin: {prefix}") + + print_plugin_info(plugin, ['display_name']) + mylog('debug', ['[Plugins] CMD: ', get_plugin_setting_obj(plugin, "CMD")["value"]]) + execute_plugin(self.db, self.all_plugins, plugin) + # update last run time + if runType == "schedule": + for schd in conf.mySchedules: + if schd.service == prefix: + # note the last time the scheduled plugin run was executed + schd.last_run = timeNowTZ() + + #=============================================================================== + # Handling of user initialized front-end events + #=============================================================================== + def check_and_run_user_event(self): + """ + Process user events from the execution queue log file and notify the user about executed events. + """ + execution_log = UserEventsQueue() + + # Track whether to show notification for executed events + executed_events = [] + + # Read the log file to get the lines + lines = execution_log.read_log() + if not lines: + mylog('debug', ['[check_and_run_user_event] User Execution Queue is empty']) + return # Exit early if the log file is empty + else: + mylog('debug', ['[check_and_run_user_event] Process User Execution Queue:' + ', '.join(map(str, lines))]) + + for line in lines: + # Extract event name and parameters from the log line + columns = line.strip().split('|')[2:4] + + event, param = "", "" + if len(columns) == 2: + event, param = columns + + # Process each event type + if event == 'test': + handle_test(param) + executed_events.append(f"test with param {param}") + execution_log.finalize_event("test") + elif event == 'run': + handle_run(param) + executed_events.append(f"run with param {param}") + execution_log.finalize_event("run") + elif event == 'update_api': + # async handling + update_api(self.db, self.all_plugins, False, param.split(','), True) + + else: + mylog('minimal', ['[check_and_run_user_event] WARNING: Unhandled event in execution queue: ', event, ' | ', param]) + execution_log.finalize_event(event) # Finalize unknown events to remove them + + # Notify user about executed events (if applicable) + if len(executed_events) > 0 and executed_events: + executed_events_message = ', '.join(executed_events) + mylog('minimal', ['[check_and_run_user_event] INFO: Executed events: ', executed_events_message]) + write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowTZ()) + + return + + + + #------------------------------------------------------------------------------- + def handle_run(self, runType): + + mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType]) + + # run the plugin to run + for plugin in self.all_plugins: + if plugin["unique_prefix"] == runType: + execute_plugin(self.db, self.all_plugins, plugin) + + mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType]) + return + + + + #------------------------------------------------------------------------------- + def handle_test(self, runType): + + mylog('minimal', ['[', timeNowTZ(), '] [Test] START Test: ', runType]) + + # Prepare test samples + sample_json = json.loads(get_file_content(reportTemplatesPath + 'webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] + + # Create fake notification + notification = Notification_obj(db) + notificationObj = notification.create(sample_json, "") + + # Run test + handle_run(runType) + + # Remove sample notification + notificationObj.remove(notificationObj.GUID) + + mylog('minimal', ['[Test] END Test: ', runType]) + + return + #------------------------------------------------------------------------------- class plugin_param: @@ -103,47 +245,7 @@ class plugin_param: self.paramValuesCount = paramValuesCount self.multiplyTimeout = multiplyTimeout -#------------------------------------------------------------------------------- -def run_plugin_scripts(db, all_plugins, runType): - # Header - updateState("Run: Plugins") - - mylog('debug', ['[Plugins] Check if any plugins need to be executed on run type: ', runType]) - - for plugin in all_plugins: - - shouldRun = False - prefix = plugin["unique_prefix"] - - set = get_plugin_setting_obj(plugin, "RUN") - - # mylog('debug', [f'[run_plugin_scripts] plugin: {plugin}']) - # mylog('debug', [f'[run_plugin_scripts] set: {set}']) - if set != None and set['value'] == runType: - if runType != "schedule": - shouldRun = True - elif runType == "schedule": - # run if overdue scheduled time - # check schedules if any contains a unique plugin prefix matching the current plugin - for schd in conf.mySchedules: - if schd.service == prefix: - # Check if schedule overdue - shouldRun = schd.runScheduleCheck() - - if shouldRun: - # Header - updateState(f"Plugin: {prefix}") - - print_plugin_info(plugin, ['display_name']) - mylog('debug', ['[Plugins] CMD: ', get_plugin_setting_obj(plugin, "CMD")["value"]]) - execute_plugin(db, all_plugins, plugin) - # update last run time - if runType == "schedule": - for schd in conf.mySchedules: - if schd.service == prefix: - # note the last time the scheduled plugin run was executed - schd.last_run = timeNowTZ() # Function to run a plugin command @@ -448,13 +550,10 @@ def execute_plugin(db, all_plugins, plugin ): # check if the subprocess / SQL query failed / there was no valid output if len(sqlParams) == 0: mylog('none', [f'[Plugins] No output received from the plugin "{plugin["unique_prefix"]}"']) - return + else: - mylog('verbose', ['[Plugins] SUCCESS, received ', len(sqlParams), ' entries']) - mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams]) - - # process results if any - if len(sqlParams) > 0: + mylog('verbose', [f'[Plugins] SUCCESS for {plugin["unique_prefix"]} received {len(sqlParams)} entries']) + # mylog('debug', ['[Plugins] sqlParam entries: ', sqlParams]) # create objects process_plugin_events(db, plugin, sqlParams) @@ -483,7 +582,8 @@ def process_plugin_events(db, plugin, plugEventsArr): pluginPref = plugin["unique_prefix"] - mylog('debug', ['[Plugins] Processing : ', pluginPref]) + mylog('verbose', ['[Plugins] Processing : ', pluginPref]) + try: # Begin a transaction @@ -497,8 +597,7 @@ def process_plugin_events(db, plugin, plugEventsArr): for obj in plugObjectsArr: pluginObjects.append(plugin_object_class(plugin, obj)) - - + # create plugin objects from events - will be processed to find existing objects for eve in plugEventsArr: pluginEvents.append(plugin_object_class(plugin, eve)) @@ -506,15 +605,13 @@ def process_plugin_events(db, plugin, plugEventsArr): mylog('debug', ['[Plugins] Existing objects from Plugins_Objects: ', len(pluginObjects)]) mylog('debug', ['[Plugins] Logged events from the plugin run : ', len(pluginEvents)]) - # Loop thru all current events and update the status to "exists" if the event matches an existing object index = 0 for tmpObjFromEvent in pluginEvents: # compare hash of the IDs for uniqueness - if any(x.idsHash == tmpObjFromEvent.idsHash for x in pluginObjects): - + if any(x.idsHash == tmpObjFromEvent.idsHash for x in pluginObjects): pluginEvents[index].status = "exists" index += 1 @@ -526,9 +623,13 @@ def process_plugin_events(db, plugin, plugEventsArr): if tmpObjFromEvent.status == "exists": - # compare hash of the changed watched columns for uniqueness - if any(x.watchedHash != tmpObjFromEvent.watchedHash for x in pluginObjects): - pluginEvents[index].status = "watched-changed" + # compare hash of the changed watched columns for uniqueness - make sure you compare the values with the same idsHash before checking watchedHash + if any( + x.idsHash == tmpObjFromEvent.idsHash and x.watchedHash != tmpObjFromEvent.watchedHash + for x in pluginObjects + ): + pluginEvents[index].status = "watched-changed" + else: pluginEvents[index].status = "watched-not-changed" index += 1 @@ -612,9 +713,9 @@ def process_plugin_events(db, plugin, plugEventsArr): mylog('debug', ['[Plugins] objects_to_insert count: ', len(objects_to_insert)]) mylog('debug', ['[Plugins] objects_to_update count: ', len(objects_to_update)]) - mylog('trace', ['[Plugins] objects_to_update: ', objects_to_update]) - mylog('trace', ['[Plugins] events_to_insert: ', events_to_insert]) - mylog('trace', ['[Plugins] history_to_insert: ', history_to_insert]) + # mylog('debug', ['[Plugins] objects_to_update: ', objects_to_update]) + # mylog('debug', ['[Plugins] events_to_insert: ', events_to_insert]) + # mylog('debug', ['[Plugins] history_to_insert: ', history_to_insert]) logEventStatusCounts('pluginEvents', pluginEvents) logEventStatusCounts('pluginObjects', pluginObjects) @@ -838,106 +939,20 @@ class plugin_object_class: for clmName in self.watchedClmns: for mapping in indexNameColumnMapping: - if clmName == indexNameColumnMapping[1]: - self.watchedIndxs.append(indexNameColumnMapping[0]) + if clmName == mapping[1]: + self.watchedIndxs.append(mapping[0]) tmp = '' for indx in self.watchedIndxs: + tmp += str(objDbRow[indx]) self.watchedHash = str(hash(tmp)) - -#=============================================================================== -# Handling of user initialized front-end events -#=============================================================================== -def check_and_run_user_event(db, all_plugins): - """ - Process user events from the execution queue log file and notify the user about executed events. - """ - execution_log = UserEventsQueue() - - # Track whether to show notification for executed events - executed_events = [] - - # Read the log file to get the lines - lines = execution_log.read_log() - if not lines: - mylog('debug', ['[check_and_run_user_event] User Execution Queue is empty']) - return # Exit early if the log file is empty - else: - mylog('debug', ['[check_and_run_user_event] Process User Execution Queue:' + ', '.join(map(str, lines))]) - - for line in lines: - # Extract event name and parameters from the log line - columns = line.strip().split('|')[2:4] - - event, param = "", "" - if len(columns) == 2: - event, param = columns - - # Process each event type - if event == 'test': - handle_test(param, db, all_plugins) - executed_events.append(f"test with param {param}") - execution_log.finalize_event("test") - elif event == 'run': - handle_run(param, db, all_plugins) - executed_events.append(f"run with param {param}") - execution_log.finalize_event("run") - elif event == 'update_api': - # async handling - update_api(db, all_plugins, False, param.split(','), True) - - else: - mylog('minimal', ['[check_and_run_user_event] WARNING: Unhandled event in execution queue: ', event, ' | ', param]) - execution_log.finalize_event(event) # Finalize unknown events to remove them - - # Notify user about executed events (if applicable) - if len(executed_events) > 0 and executed_events: - executed_events_message = ', '.join(executed_events) - mylog('minimal', ['[check_and_run_user_event] INFO: Executed events: ', executed_events_message]) - write_notification(f"[Ad-hoc events] Events executed: {executed_events_message}", "interrupt", timeNowTZ()) - - return + def __repr__(self): + attrs = vars(self) + return f"" -#------------------------------------------------------------------------------- -def handle_run(runType, db, all_plugins): - - mylog('minimal', ['[', timeNowTZ(), '] START Run: ', runType]) - - # run the plugin to run - for plugin in all_plugins: - if plugin["unique_prefix"] == runType: - execute_plugin(db, all_plugins, plugin) - - mylog('minimal', ['[', timeNowTZ(), '] END Run: ', runType]) - return - - - -#------------------------------------------------------------------------------- -def handle_test(runType, db, all_plugins): - - mylog('minimal', ['[', timeNowTZ(), '] [Test] START Test: ', runType]) - - # Prepare test samples - sample_json = json.loads(get_file_content(reportTemplatesPath + 'webhook_json_sample.json'))[0]["body"]["attachments"][0]["text"] - - # Create fake notification - notification = Notification_obj(db) - notificationObj = notification.create(sample_json, "") - - # Run test - handle_run(runType, db, all_plugins) - - # Remove sample notification - notificationObj.remove(notificationObj.GUID) - - mylog('minimal', ['[Test] END Test: ', runType]) - - return - diff --git a/server/reporting.py b/server/reporting.py index b6a761da..6ab1e7dd 100755 --- a/server/reporting.py +++ b/server/reporting.py @@ -16,8 +16,7 @@ import json import conf from const import applicationPath, logPath, apiPath, confFileName from helper import timeNowTZ, get_file_content, write_file, get_timezone_offset, get_setting_value -from logger import logResult, mylog, print_log - +from logger import logResult, mylog #=============================================================================== # REPORTING diff --git a/server/scan/device_handling.py b/server/scan/device_handling.py index ddfb45bc..948ab52d 100755 --- a/server/scan/device_handling.py +++ b/server/scan/device_handling.py @@ -9,7 +9,7 @@ import conf import os import re from helper import timeNowTZ, get_setting, get_setting_value, list_to_where, resolve_device_name_dig, get_device_name_nbtlookup, get_device_name_nslookup, get_device_name_mdns, check_IP_format, sanitize_SQL_input -from logger import mylog, print_log +from logger import mylog from const import vendorsPath, vendorsPathNewest, sql_generateGuid from models.device_instance import DeviceInstance diff --git a/server/scheduler.py b/server/scheduler.py index 47b4981f..0ca4c0e5 100755 --- a/server/scheduler.py +++ b/server/scheduler.py @@ -1,7 +1,7 @@ """ class to manage schedules """ import datetime -from logger import mylog, print_log +from logger import mylog import conf #------------------------------------------------------------------------------- diff --git a/server/workflows/app_events.py b/server/workflows/app_events.py index 5a623973..364fb791 100755 --- a/server/workflows/app_events.py +++ b/server/workflows/app_events.py @@ -14,7 +14,7 @@ from helper import get_setting_value, timeNowTZ # Make sure the TIMEZONE for logging is correct # conf.tz = pytz.timezone(get_setting_value('TIMEZONE')) -from logger import mylog, Logger, print_log, logResult +from logger import mylog, Logger, logResult # Make sure log level is initialized correctly Logger(get_setting_value('LOG_LEVEL'))