-
-
-
- Events -
-
-
+
+
@@ -18,75 +18,110 @@ showSpinner()
$(document).ready(function() {
- // Load JSON data from the provided URL
- $.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
- // Process the JSON data and generate UI dynamically
- processData(data)
+ // Load JSON data from the provided URL
+ $.getJSON('/php/server/query_json.php?file=table_appevents.json', function(data) {
+ // Process the JSON data and generate UI dynamically
+ processData(data)
- // hide loading dialog
- hideSpinner()
- });
+ // hide loading dialog
+ hideSpinner()
+ });
});
function processData(data) {
- // Create an object to store unique ObjectType values as app event identifiers
- var appEventIdentifiers = {};
+ // Create an object to store unique ObjectType values as app event identifiers
+ var appEventIdentifiers = {};
- // Array to accumulate data for DataTable
- var allData = [];
+ // Array to accumulate data for DataTable
+ var allData = [];
- // Iterate through the data and generate tabs and content dynamically
- $.each(data.data, function(index, item) {
-
- // Accumulate data for DataTable
- allData.push(item);
-
- });
-
- // Initialize DataTable for all app events
+ // Iterate through the data and generate tabs and content dynamically
+ $.each(data.data, function(index, item) {
- $('#appevents-table').DataTable({
- data: allData,
- paging: true,
- lengthChange: true,
- lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
- searching: true,
- ordering: true,
- info: true,
- autoWidth: false,
- pageLength: 25, // Set the default paging to 25
- columns: [
- { data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
- { data: 'AppEventType', title: getString('AppEvents_Type') },
- { data: 'ObjectType', title: getString('AppEvents_ObjectType') },
- { data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
- { data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
- { data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
- { data: 'Extra', title: getString('AppEvents_Extra') },
- { data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
- // Add other columns as needed
- ],
- // Add column-specific configurations if needed
- columnDefs: [
- { className: 'text-center', targets: [3] },
- { width: '80px', targets: [6] },
- // ... Add other columnDefs as needed
- // Full MAC
- {targets: [3, 4],
- 'createdCell': function (td, cellData, rowData, row, col) {
- if (!emptyArr.includes(cellData)){
- $(td).html (createDeviceLink(cellData));
- } else {
- $(td).html ('');
- }
- } },
- ]
- });
+ // Accumulate data for DataTable
+ allData.push(item);
+
+ });
+
+ console.log(allData);
+
+
+ // Initialize DataTable for all app events
+
+ $('#appevents-table').DataTable({
+ data: allData,
+ paging: true,
+ lengthChange: true,
+ lengthMenu: [[10, 25, 50, 100, 500, -1], [10, 25, 50, 100, 500, 'All']],
+ searching: true,
+ ordering: true,
+ info: true,
+ autoWidth: false,
+ pageLength: 25, // Set the default paging to 25
+ columns: [
+ { data: 'DateTimeCreated', title: getString('AppEvents_DateTimeCreated') },
+ { data: 'AppEventProcessed', title: getString('AppEvents_AppEventProcessed') },
+ { data: 'AppEventType', title: getString('AppEvents_Type') },
+ { data: 'ObjectType', title: getString('AppEvents_ObjectType') },
+ { data: 'ObjectPrimaryID', title: getString('AppEvents_ObjectPrimaryID') },
+ { data: 'ObjectSecondaryID', title: getString('AppEvents_ObjectSecondaryID') },
+ { data: 'ObjectStatus', title: getString('AppEvents_ObjectStatus') },
+ { data: 'ObjectPlugin', title: getString('AppEvents_Plugin') },
+ { data: 'ObjectGUID', title: "GUID" },
+ // Add other columns as needed
+ ],
+ // Add column-specific configurations if needed
+ columnDefs: [
+ { className: 'text-center', targets: [4] },
+ { width: '80px', targets: [7] },
+ // ... Add other columnDefs as needed
+ // Full MAC
+ {targets: [4, 5],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ if (!emptyArr.includes(cellData)){
+ $(td).html (createDeviceLink(cellData));
+ } else {
+ $(td).html ('');
+ }
+ } },
+ // Processed
+ {targets: [1],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ // console.log(cellData);
+ $(td).html (cellData);
+ }
+ },
+ // Datetime
+ {targets: [0],
+ 'createdCell': function (td, cellData, rowData, row, col) {
+ let timezone = $("#NAX_TZ").html(); // e.g., 'Europe/Berlin'
+ let utcDate = new Date(cellData + ' UTC'); // Adding ' UTC' makes it interpreted as UTC time
+
+ // Format the date in the desired timezone
+ let options = {
+ year: 'numeric',
+ month: 'short',
+ day: '2-digit',
+ hour: '2-digit',
+ minute: '2-digit',
+ second: '2-digit',
+ hour12: false, // Use 24-hour format
+ timeZone: timezone // Use the specified timezone
+ };
+
+ let localDate = new Intl.DateTimeFormat('en-GB', options).format(utcDate);
+
+ // Update the table cell
+ $(td).html(localDate);
+ }
+ },
+ ]
+ });
- // Activate the first tab
- $('#tabs-location li:first-child').addClass('active');
- $('#tabs-content-location .tab-pane:first-child').addClass('active');
+ // Activate the first tab
+ $('#tabs-location li:first-child').addClass('active');
+ $('#tabs-content-location .tab-pane:first-child').addClass('active');
}
diff --git a/front/css/app.css b/front/css/app.css
index 92482f51..afb72dae 100755
--- a/front/css/app.css
+++ b/front/css/app.css
@@ -1840,6 +1840,47 @@ input[readonly] {
height:50px;
}
+/* -----------------------------------------------------------------------------
+ Workflows
+----------------------------------------------------------------------------- */
+
+#workflowContainerWrap .panel-collapse
+{
+ padding: 5px;
+}
+
+.workflows .btn-secondary{
+ color: #000;
+}
+
+.workflows .condition-list button
+{
+ margin: 2px;
+}
+
+.workflows button
+{
+ /* width:100%; */
+}
+
+#workflowContainerWrap
+{
+ display: contents;
+}
+
+.workflow-card, .condition-list, .actions-list
+{
+ display: grid;
+ padding: 5px;
+ padding-left: 10px;
+}
+
+.condition
+{
+ padding: 5px;
+ padding-left: 10px;
+}
+
/* -----------------------------------------------------------------------------
Floating edit button
----------------------------------------------------------------------------- */
diff --git a/front/devices.php b/front/devices.php
index 2f8c7123..5a260eae 100755
--- a/front/devices.php
+++ b/front/devices.php
@@ -123,7 +123,7 @@
\ No newline at end of file
diff --git a/server/__main__.py b/server/__main__.py
index 7ecb18fa..a2f4cee5 100755
--- a/server/__main__.py
+++ b/server/__main__.py
@@ -28,13 +28,14 @@ from logger import mylog
from helper import filePermissions, timeNowTZ, get_setting_value
from app_state import updateState
from api import update_api
-from networkscan import process_scan
+from scan.session_events import process_scan
from initialise import importConfigs
from database import DB
from reporting import get_notifications
from notification import Notification_obj
from plugin import run_plugin_scripts, check_and_run_user_event
-from device import update_devices_names
+from scan.device_handling import update_devices_names
+from workflows.manager import WorkflowManager
#===============================================================================
#===============================================================================
@@ -79,6 +80,9 @@ def main ():
# Upgrade DB if needed
db.upgradeDB()
+ # Initialize the WorkflowManager
+ workflow_manager = WorkflowManager(db)
+
#===============================================================================
# This is the main loop of NetAlertX
#===============================================================================
@@ -180,15 +184,37 @@ def main ():
# Commit SQL
db.commitDB()
-
- # Footer
-
+
mylog('verbose', ['[MAIN] Process: Idle'])
else:
# do something
# mylog('verbose', ['[MAIN] Waiting to start next loop'])
- updateState("Process: Idle")
-
+ updateState("Process: Idle")
+
+ # WORKFLOWS handling
+ # ----------------------------------------
+ # Fetch new unprocessed events
+ new_events = workflow_manager.get_new_app_events()
+
+ # Process each new event and check triggers
+ if new_events:
+ updateState("Workflows: Start")
+ update_api_flag = False
+ for event in new_events:
+ mylog('debug', [f'[MAIN] Processing WORKFLOW app event with GUID {event["GUID"]}'])
+
+ # proceed to process events
+ workflow_manager.process_event(event)
+
+ if workflow_manager.update_api:
+ # Update API endpoints if needed
+ update_api_flag = True
+
+ if update_api_flag:
+ update_api(db, all_plugins, True)
+
+ updateState("Workflows: End")
+
#loop
time.sleep(5) # wait for N seconds
diff --git a/server/const.py b/server/const.py
index 74ae2c48..9663c7ec 100755
--- a/server/const.py
+++ b/server/const.py
@@ -71,7 +71,7 @@ sql_devices_all = """
FROM Devices
"""
-sql_appevents = """select * from AppEvents"""
+sql_appevents = """select * from AppEvents order by DateTimeCreated desc"""
# The below query calculates counts of devices in various categories:
# (connected/online, offline, down, new, archived),
# as well as a combined count for devices that match any status listed in the UI_MY_DEVICES setting
diff --git a/server/crypto_utils.py b/server/crypto_utils.py
index 3576f9dd..b139d488 100755
--- a/server/crypto_utils.py
+++ b/server/crypto_utils.py
@@ -3,6 +3,7 @@ from Crypto.Util.Padding import pad, unpad
import base64
import os
import hashlib
+import uuid
# SIMPLE CRYPT - requeres C compiler -------------------------------------------------------------------------
@@ -56,4 +57,10 @@ def get_random_bytes(length):
# Format hexadecimal string with hyphens
formatted_hex = '-'.join(hex_string[i:i+2] for i in range(0, len(hex_string), 2))
- return formatted_hex
\ No newline at end of file
+ return formatted_hex
+
+#-------------------------------------------------------------------------------
+def generate_deterministic_guid(plugin, primary_id, secondary_id):
+ """Generates a deterministic GUID based on plugin, primary ID, and secondary ID."""
+ data = f"{plugin}-{primary_id}-{secondary_id}".encode("utf-8")
+ return str(uuid.UUID(hashlib.md5(data).hexdigest()))
\ No newline at end of file
diff --git a/server/database.py b/server/database.py
index 5aad5b67..6a460c7c 100755
--- a/server/database.py
+++ b/server/database.py
@@ -9,7 +9,7 @@ from const import fullDbPath, sql_devices_stats, sql_devices_all, sql_generateGu
from logger import mylog
from helper import json_obj, initOrSetParam, row_to_json, timeNowTZ
-from appevent import AppEvent_obj
+from workflows.app_events import AppEvent_obj
class DB():
"""
@@ -543,6 +543,7 @@ class DB():
sql_Plugins_Objects = """ CREATE TABLE IF NOT EXISTS Plugins_Objects(
"Index" INTEGER,
Plugin TEXT NOT NULL,
+ ObjectGUID TEXT,
Object_PrimaryID TEXT NOT NULL,
Object_SecondaryID TEXT NOT NULL,
DateTimeCreated TEXT NOT NULL,
@@ -589,6 +590,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Objects" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Objects') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Objects table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_Objects" ADD "ObjectGUID" TEXT
+ """)
+
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -645,6 +658,17 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal2" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_Events" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_Events') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_Events table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_Events" ADD "ObjectGUID" TEXT
+ """)
# -----------------------------------------
# REMOVE after 6/6/2025 - END
@@ -703,6 +727,18 @@ class DB():
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal3" TEXT')
self.sql.execute('ALTER TABLE "Plugins_History" ADD COLUMN "HelpVal4" TEXT')
+
+ # plug_ObjectGUID_missing column
+ plug_ObjectGUID_missing = self.sql.execute ("""
+ SELECT COUNT(*) AS CNTREC FROM pragma_table_info('Plugins_History') WHERE name='ObjectGUID'
+ """).fetchone()[0] == 0
+
+ if plug_ObjectGUID_missing :
+ mylog('verbose', ["[upgradeDB] Adding ObjectGUID to the Plugins_History table"])
+ self.sql.execute("""
+ ALTER TABLE "Plugins_History" ADD "ObjectGUID" TEXT
+ """)
+
# -----------------------------------------
# REMOVE after 6/6/2025 - END
# -----------------------------------------
diff --git a/server/flows.py b/server/flows.py
deleted file mode 100755
index 87b59ba4..00000000
--- a/server/flows.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import json
-
-def update_value(json_data, object_path, key, value, target_property, desired_value):
- # Helper function to traverse the JSON structure and get the target object
- def traverse(obj, path):
- keys = path.split(".")
- for key in keys:
- if isinstance(obj, list):
- key = int(key)
- obj = obj[key]
- return obj
-
- # Helper function to update the target property with the desired value
- def update(obj, path, key, value, target_property, desired_value):
- keys = path.split(".")
- for i, key in enumerate(keys):
- if isinstance(obj, list):
- key = int(key)
- # Check if we have reached the desired object
- if i == len(keys) - 1 and obj[key][key] == value:
- # Update the target property with the desired value
- obj[key][target_property] = desired_value
- else:
- obj = obj[key]
- return obj
-
- # Get the target object based on the object path
- target_obj = traverse(json_data, object_path)
- # Update the value in the target object
- updated_obj = update(json_data, object_path, key, value, target_property, desired_value)
- return updated_obj
\ No newline at end of file
diff --git a/server/models/device_instance.py b/server/models/device_instance.py
new file mode 100755
index 00000000..294c32bc
--- /dev/null
+++ b/server/models/device_instance.py
@@ -0,0 +1,84 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+from logger import mylog, print_log
+
+#-------------------------------------------------------------------------------
+# Device object handling (WIP)
+#-------------------------------------------------------------------------------
+class DeviceInstance:
+ def __init__(self, db):
+ self.db = db
+
+ # Get all
+ def getAll(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices
+ """)
+ return self.db.sql.fetchall()
+
+ # Get all with unknown names
+ def getUnknown(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
+ """)
+ return self.db.sql.fetchall()
+
+ # Get specific column value based on devMac
+ def getValueWithMac(self, column_name, devMac):
+
+ query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
+ self.db.sql.execute(query, (devMac,))
+ result = self.db.sql.fetchone()
+ return result[column_name] if result else None
+
+ # Get all down
+ def getDown(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
+ """)
+ return self.db.sql.fetchall()
+
+ # Get all down
+ def getOffline(self):
+ self.db.sql.execute("""
+ SELECT * FROM Devices WHERE devPresentLastScan = 0
+ """)
+ return self.db.sql.fetchall()
+
+ # Get a device by devGUID
+ def getByGUID(self, devGUID):
+ self.db.sql.execute("SELECT * FROM Devices WHERE devGUID = ?", (devGUID,))
+ result = self.db.sql.fetchone()
+ return dict(result) if result else None
+
+ # Check if a device exists by devGUID
+ def exists(self, devGUID):
+ self.db.sql.execute("SELECT COUNT(*) AS count FROM Devices WHERE devGUID = ?", (devGUID,))
+ result = self.db.sql.fetchone()
+ return result["count"] > 0
+
+ # Update a specific field for a device
+ def updateField(self, devGUID, field, value):
+ if not self.exists(devGUID):
+ m = f"[Device] In 'updateField': GUID {devGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute(f"""
+ UPDATE Devices SET {field} = ? WHERE devGUID = ?
+ """, (value, devGUID))
+ self.db.sql.commit()
+
+ # Delete a device by devGUID
+ def delete(self, devGUID):
+ if not self.exists(devGUID):
+ m = f"[Device] In 'delete': GUID {devGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute("DELETE FROM Devices WHERE devGUID = ?", (devGUID,))
+ self.db.sql.commit()
\ No newline at end of file
diff --git a/server/models/plugin_object_instance.py b/server/models/plugin_object_instance.py
new file mode 100755
index 00000000..21fe8cd9
--- /dev/null
+++ b/server/models/plugin_object_instance.py
@@ -0,0 +1,65 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+from logger import mylog, print_log
+
+#-------------------------------------------------------------------------------
+# Plugin object handling (WIP)
+#-------------------------------------------------------------------------------
+class PluginObjectInstance:
+ def __init__(self, db):
+ self.db = db
+
+ # Get all plugin objects
+ def getAll(self):
+ self.db.sql.execute("""
+ SELECT * FROM Plugins_Objects
+ """)
+ return self.db.sql.fetchall()
+
+ # Get plugin object by ObjectGUID
+ def getByGUID(self, ObjectGUID):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ result = self.db.sql.fetchone()
+ return dict(result) if result else None
+
+ # Check if a plugin object exists by ObjectGUID
+ def exists(self, ObjectGUID):
+ self.db.sql.execute("SELECT COUNT(*) AS count FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ result = self.db.sql.fetchone()
+ return result["count"] > 0
+
+ # Get objects by plugin name
+ def getByPlugin(self, plugin):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Plugin = ?", (plugin,))
+ return self.db.sql.fetchall()
+
+ # Get objects by status
+ def getByStatus(self, status):
+ self.db.sql.execute("SELECT * FROM Plugins_Objects WHERE Status = ?", (status,))
+ return self.db.sql.fetchall()
+
+ # Update a specific field for a plugin object
+ def updateField(self, ObjectGUID, field, value):
+ if not self.exists(ObjectGUID):
+ m = f"[PluginObject] In 'updateField': GUID {ObjectGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute(f"""
+ UPDATE Plugins_Objects SET {field} = ? WHERE ObjectGUID = ?
+ """, (value, ObjectGUID))
+ self.db.sql.commit()
+
+ # Delete a plugin object by ObjectGUID
+ def delete(self, ObjectGUID):
+ if not self.exists(ObjectGUID):
+ m = f"[PluginObject] In 'delete': GUID {ObjectGUID} not found."
+ mylog('none', m)
+ raise ValueError(m)
+
+ self.db.sql.execute("DELETE FROM Plugins_Objects WHERE ObjectGUID = ?", (ObjectGUID,))
+ self.db.sql.commit()
diff --git a/server/plugin.py b/server/plugin.py
index 5cfd6251..13275130 100755
--- a/server/plugin.py
+++ b/server/plugin.py
@@ -18,6 +18,7 @@ from api import update_api
from plugin_utils import logEventStatusCounts, get_plugin_string, get_plugin_setting_obj, print_plugin_info, list_to_csv, combine_plugin_objects, resolve_wildcards_arr, handle_empty, custom_plugin_decoder, decode_and_rename_files
from notification import Notification_obj, write_notification
from user_events_queue import UserEventsQueue
+from crypto_utils import generate_deterministic_guid
# Make sure log level is initialized correctly
Logger(get_setting_value('LOG_LEVEL'))
@@ -582,13 +583,14 @@ def process_plugin_events(db, plugin, plugEventsArr):
for plugObj in pluginObjects:
# keep old createdTime time if the plugObj already was created before
createdTime = plugObj.changed if plugObj.status == 'new' else plugObj.created
- # 18 values without Index
+ # 19 values without Index
values = (
plugObj.pluginPref, plugObj.primaryId, plugObj.secondaryId, createdTime,
plugObj.changed, plugObj.watched1, plugObj.watched2, plugObj.watched3,
plugObj.watched4, plugObj.status, plugObj.extra, plugObj.userData,
plugObj.foreignKey, plugObj.syncHubNodeName,
- plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4
+ plugObj.helpVal1, plugObj.helpVal2, plugObj.helpVal3, plugObj.helpVal4,
+ plugObj.objectGUID
)
if plugObj.status == 'new':
@@ -625,8 +627,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4", "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", objects_to_insert
)
@@ -637,7 +639,8 @@ def process_plugin_events(db, plugin, plugEventsArr):
UPDATE Plugins_Objects
SET "Plugin" = ?, "Object_PrimaryID" = ?, "Object_SecondaryID" = ?, "DateTimeCreated" = ?,
"DateTimeChanged" = ?, "Watched_Value1" = ?, "Watched_Value2" = ?, "Watched_Value3" = ?,
- "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?
+ "Watched_Value4" = ?, "Status" = ?, "Extra" = ?, "UserData" = ?, "ForeignKey" = ?, "SyncHubNodeName" = ?, "HelpVal1" = ?, "HelpVal2" = ?, "HelpVal3" = ?, "HelpVal4" = ?,
+ "ObjectGUID" = ?
WHERE "Index" = ?
""", objects_to_update
)
@@ -651,8 +654,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
+ "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", events_to_insert
)
@@ -665,8 +669,9 @@ def process_plugin_events(db, plugin, plugEventsArr):
("Plugin", "Object_PrimaryID", "Object_SecondaryID", "DateTimeCreated",
"DateTimeChanged", "Watched_Value1", "Watched_Value2", "Watched_Value3",
"Watched_Value4", "Status", "Extra", "UserData", "ForeignKey", "SyncHubNodeName",
- "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4")
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ "HelpVal1", "HelpVal2", "HelpVal3", "HelpVal4",
+ "ObjectGUID")
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", history_to_insert
)
@@ -807,6 +812,7 @@ class plugin_object_class:
self.helpVal2 = objDbRow[16]
self.helpVal3 = objDbRow[17]
self.helpVal4 = objDbRow[18]
+ self.objectGUID = generate_deterministic_guid(self.pluginPref, self.primaryId, self.secondaryId)
# Check if self.status is valid
diff --git a/server/plugin_utils.py b/server/plugin_utils.py
index 5cd55ad3..e60e7f6b 100755
--- a/server/plugin_utils.py
+++ b/server/plugin_utils.py
@@ -6,7 +6,7 @@ from logger import mylog
from const import pluginsPath, logPath, apiPath
from helper import timeNowTZ, get_file_content, write_file, get_setting, get_setting_value, setting_value_to_python_type
from app_state import updateState
-from crypto_utils import decrypt_data
+from crypto_utils import decrypt_data, generate_deterministic_guid
module_name = 'Plugin utils'
diff --git a/server/device.py b/server/scan/device_handling.py
similarity index 95%
rename from server/device.py
rename to server/scan/device_handling.py
index 0a2dce28..1a0d9623 100755
--- a/server/device.py
+++ b/server/scan/device_handling.py
@@ -1,62 +1,17 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
import subprocess
-
import conf
import os
import re
from helper import timeNowTZ, get_setting, get_setting_value, list_to_where, resolve_device_name_dig, get_device_name_nbtlookup, get_device_name_nslookup, get_device_name_mdns, check_IP_format, sanitize_SQL_input
from logger import mylog, print_log
from const import vendorsPath, vendorsPathNewest, sql_generateGuid
-
-#-------------------------------------------------------------------------------
-# Device object handling (WIP)
-#-------------------------------------------------------------------------------
-class Device_obj:
- def __init__(self, db):
- self.db = db
-
- # Get all
- def getAll(self):
- self.db.sql.execute("""
- SELECT * FROM Devices
- """)
- return self.db.sql.fetchall()
-
- # Get all with unknown names
- def getUnknown(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devName in ("(unknown)", "(name not found)", "" )
- """)
- return self.db.sql.fetchall()
-
- # Get specific column value based on devMac
- def getValueWithMac(self, column_name, devMac):
-
- query = f"SELECT {column_name} FROM Devices WHERE devMac = ?"
-
- self.db.sql.execute(query, (devMac,))
-
- result = self.db.sql.fetchone()
-
- return result[column_name] if result else None
-
- # Get all down
- def getDown(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devAlertDown = 1 and devPresentLastScan = 0
- """)
- return self.db.sql.fetchall()
-
- # Get all down
- def getOffline(self):
- self.db.sql.execute("""
- SELECT * FROM Devices WHERE devPresentLastScan = 0
- """)
- return self.db.sql.fetchall()
-
-
-
-
+from models.device_instance import DeviceInstance
#-------------------------------------------------------------------------------
# Removing devices from the CurrentScan DB table which the user chose to ignore by MAC or IP
@@ -535,7 +490,7 @@ def update_devices_names (db):
foundNbtLookup = 0
# Gen unknown devices
- device_handler = Device_obj(db)
+ device_handler = DeviceInstance(db)
# Retrieve devices
unknownDevices = device_handler.getUnknown()
diff --git a/server/networkscan.py b/server/scan/session_events.py
similarity index 97%
rename from server/networkscan.py
rename to server/scan/session_events.py
index 6ed3aa8d..6c357e4c 100755
--- a/server/networkscan.py
+++ b/server/scan/session_events.py
@@ -1,15 +1,16 @@
+import sys
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
import conf
-
-
-from device import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
+from scan.device_handling import create_new_devices, print_scan_stats, save_scanned_devices, update_devices_data_from_scan, exclude_ignored_devices
from helper import timeNowTZ
from logger import mylog
from reporting import skip_repeated_notifications
-
#===============================================================================
# SCAN NETWORK
#===============================================================================
diff --git a/server/workflows/actions.py b/server/workflows/actions.py
new file mode 100755
index 00000000..7ef7dad4
--- /dev/null
+++ b/server/workflows/actions.py
@@ -0,0 +1,90 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+from workflows.triggers import Trigger
+
+class Action:
+ """Base class for all actions."""
+
+ def __init__(self, trigger):
+ self.trigger = trigger
+
+ def execute(self, obj):
+ """Executes the action on the given object."""
+ raise NotImplementedError("Subclasses must implement execute()")
+
+
+class UpdateFieldAction(Action):
+ """Action to update a specific field of an object."""
+
+ def __init__(self, field, value, trigger):
+ super().__init__(trigger) # Call the base class constructor
+ self.field = field
+ self.value = value
+
+ def execute(self):
+ mylog('verbose', [f"Updating field '{self.field}' to '{self.value}' for event object {self.trigger.object_type}"])
+
+ obj = self.trigger.object
+
+ if isinstance(obj, dict) and "ObjectGUID" in obj:
+ plugin_instance = PluginObjectInstance(self.trigger.db)
+ plugin_instance.updateField(obj["ObjectGUID"], self.field, self.value)
+ elif isinstance(obj, dict) and "devGUID" in obj:
+ device_instance = DeviceInstance(self.trigger.db)
+ device_instance.updateField(obj["devGUID"], self.field, self.value)
+ return obj
+
+
+class RunPluginAction(Action):
+ """Action to run a specific plugin."""
+
+ def __init__(self, plugin_name, params, trigger): # Add trigger
+ super().__init__(trigger) # Call parent constructor
+ self.plugin_name = plugin_name
+ self.params = params
+
+ def execute(self):
+
+ obj = self.trigger.object
+
+ mylog('verbose', [f"Executing plugin '{self.plugin_name}' with parameters {self.params} for object {obj}"])
+ # PluginManager.run(self.plugin_name, self.parameters)
+ return obj
+
+
+class SendNotificationAction(Action):
+ """Action to send a notification."""
+
+ def __init__(self, method, message, trigger):
+ super().__init__(trigger) # Call parent constructor
+ self.method = method # Fix attribute name
+ self.message = message
+
+ def execute(self):
+ obj = self.trigger.object
+ mylog('verbose', [f"Sending notification via '{self.method}': {self.message} for object {obj}"])
+ # NotificationManager.send(self.method, self.message)
+ return obj
+
+
+class ActionGroup:
+ """Handles multiple actions applied to an object."""
+
+ def __init__(self, actions):
+ self.actions = actions
+
+ def execute(self, obj):
+ for action in self.actions:
+ action.execute(obj)
+ return obj
\ No newline at end of file
diff --git a/server/appevent.py b/server/workflows/app_events.py
similarity index 92%
rename from server/appevent.py
rename to server/workflows/app_events.py
index c855150b..05222be9 100755
--- a/server/appevent.py
+++ b/server/workflows/app_events.py
@@ -1,13 +1,28 @@
import datetime
import json
import uuid
+import sys
+import pytz
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
# Register NetAlertX modules
import conf
+from helper import get_setting_value, timeNowTZ
+# Make sure the TIMEZONE for logging is correct
+# conf.tz = pytz.timezone(get_setting_value('TIMEZONE'))
+
+from logger import mylog, Logger, print_log, logResult
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
from const import applicationPath, logPath, apiPath, confFileName, sql_generateGuid
-from logger import logResult, mylog, print_log
from helper import timeNowTZ
+
#-------------------------------------------------------------------------------
# Execution object handling
#-------------------------------------------------------------------------------
@@ -32,6 +47,7 @@ class AppEvent_obj:
self.db.sql.execute("""CREATE TABLE IF NOT EXISTS "AppEvents" (
"Index" INTEGER,
"GUID" TEXT UNIQUE,
+ "AppEventProcessed" BOOLEAN,
"DateTimeCreated" TEXT,
"ObjectType" TEXT, -- ObjectType (Plugins, Notifications, Events)
"ObjectGUID" TEXT,
@@ -59,7 +75,9 @@ class AppEvent_obj:
sql_devices_mappedColumns = '''
"GUID",
"DateTimeCreated",
+ "AppEventProcessed",
"ObjectType",
+ "ObjectGUID",
"ObjectPrimaryID",
"ObjectSecondaryID",
"ObjectStatus",
@@ -81,7 +99,9 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
+ FALSE,
'Devices',
+ NEW.devGUID,
NEW.devMac,
NEW.devLastIP,
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
@@ -111,7 +131,9 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
+ FALSE,
'Devices',
+ NEW.devGUID,
NEW.devMac,
NEW.devLastIP,
CASE WHEN NEW.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
@@ -135,7 +157,9 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
+ FALSE,
'Devices',
+ OLD.devGUID,
OLD.devMac,
OLD.devLastIP,
CASE WHEN OLD.devPresentLastScan = 1 THEN 'online' ELSE 'offline' END,
@@ -155,7 +179,9 @@ class AppEvent_obj:
sql_plugins_objects_mappedColumns = '''
"GUID",
"DateTimeCreated",
+ "AppEventProcessed",
"ObjectType",
+ "ObjectGUID",
"ObjectPlugin",
"ObjectPrimaryID",
"ObjectSecondaryID",
@@ -176,8 +202,10 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
- 'Plugins_Objects',
- NEW.Plugin,
+ FALSE,
+ 'Plugins_Objects',
+ NEW.ObjectGUID,
+ NEW.Plugin,
NEW.Object_PrimaryID,
NEW.Object_SecondaryID,
NEW.ForeignKey,
@@ -199,7 +227,9 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
+ FALSE,
'Plugins_Objects',
+ NEW.ObjectGUID,
NEW.Plugin,
NEW.Object_PrimaryID,
NEW.Object_SecondaryID,
@@ -222,7 +252,9 @@ class AppEvent_obj:
VALUES (
{sql_generateGuid},
DATETIME('now'),
+ FALSE,
'Plugins_Objects',
+ OLD.ObjectGUID,
OLD.Plugin,
OLD.Object_PrimaryID,
OLD.Object_SecondaryID,
diff --git a/server/workflows/conditions.py b/server/workflows/conditions.py
new file mode 100755
index 00000000..801974df
--- /dev/null
+++ b/server/workflows/conditions.py
@@ -0,0 +1,83 @@
+import re
+import sys
+import json
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+class Condition:
+ """Evaluates a single condition."""
+
+ def __init__(self, condition_json):
+ self.field = condition_json["field"]
+ self.operator = condition_json["operator"]
+ self.value = condition_json["value"]
+ self.negate = condition_json.get("negate", False)
+
+ def evaluate(self, trigger):
+
+ # try finding the value of the field on the event triggering this workflow or thre object triggering the app event
+ appEvent_value = trigger.event[self.field] if self.field in trigger.event.keys() else None
+ eveObj_value = trigger.object[self.field] if self.field in trigger.object.keys() else None
+
+
+ # proceed only if value found
+ if appEvent_value is None and eveObj_value is None:
+ return False
+ elif appEvent_value is not None:
+ obj_value = appEvent_value
+ elif eveObj_value is not None:
+ obj_value = eveObj_value
+
+ # process based on operators
+ if self.operator == "equals":
+ result = str(obj_value) == str(self.value)
+ elif self.operator == "contains":
+ result = str(self.value) in str(obj_value)
+ elif self.operator == "regex":
+ result = bool(re.match(self.value, str(obj_value)))
+ else:
+ m = f"[WF] Unsupported operator: {self.operator}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ return not result if self.negate else result
+
+
+class ConditionGroup:
+ """Handles condition groups with AND, OR logic, supporting nested groups."""
+
+ def __init__(self, group_json):
+
+ mylog('none', ["[WF] json.dumps(group_json)"])
+ mylog('none', [json.dumps(group_json)])
+ mylog('none', [group_json])
+
+ self.logic = group_json.get("logic", "AND").upper()
+ self.conditions = []
+
+ for condition in group_json["conditions"]:
+ if "field" in condition: # Simple condition
+ self.conditions.append(Condition(condition))
+ else: # Nested condition group
+ self.conditions.append(ConditionGroup(condition))
+
+ def evaluate(self, event):
+ results = [condition.evaluate(event) for condition in self.conditions]
+
+ if self.logic == "AND":
+ return all(results)
+ elif self.logic == "OR":
+ return any(results)
+ else:
+ m = f"[WF] Unsupported logic: {self.logic}"
+ mylog('none', [m])
+ raise ValueError(m)
diff --git a/server/workflows/manager.py b/server/workflows/manager.py
new file mode 100755
index 00000000..8740ec33
--- /dev/null
+++ b/server/workflows/manager.py
@@ -0,0 +1,154 @@
+import sys
+import json
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from const import fullConfFolder
+import workflows.actions
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+from workflows.triggers import Trigger
+from workflows.conditions import ConditionGroup
+from workflows.actions import *
+
+class WorkflowManager:
+ def __init__(self, db):
+ self.db = db
+ self.workflows = self.load_workflows()
+ self.update_api = False
+
+ def load_workflows(self):
+ """Load workflows from workflows.json."""
+ try:
+ workflows_json_path = fullConfFolder + '/workflows.json'
+ with open(workflows_json_path, 'r') as f:
+ workflows = json.load(f)
+ return workflows
+ except (FileNotFoundError, json.JSONDecodeError):
+ mylog('none', ['[WF] Failed to load workflows.json'])
+ return []
+
+ def get_new_app_events(self):
+ """Get new unprocessed events from the AppEvents table."""
+ result = self.db.sql.execute("""
+ SELECT * FROM AppEvents
+ WHERE AppEventProcessed = 0
+ ORDER BY DateTimeCreated ASC
+ """).fetchall()
+ return result
+
+ def process_event(self, event):
+ """Process the events. Check if events match a workflow trigger"""
+
+ mylog('verbose', [f"[WF] Processing event with GUID {event["GUID"]}"])
+
+ # Check if the trigger conditions match
+ for workflow in self.workflows:
+
+ # construct trigger object which also evaluates if the current event triggers it
+ trigger = Trigger(workflow["trigger"], event, self.db)
+
+ if trigger.triggered:
+
+ mylog('verbose', [f"[WF] Event with GUID '{event["GUID"]}' triggered the workflow '{workflow["name"]}'"])
+
+ self.execute_workflow(workflow, trigger)
+
+ # After processing the event, mark the event as processed (set AppEventProcessed to 1)
+ self.db.sql.execute("""
+ UPDATE AppEvents
+ SET AppEventProcessed = 1
+ WHERE "Index" = ?
+ """, (event['Index'],)) # Pass the event's unique identifier
+ self.db.commitDB()
+
+
+
+ def execute_workflow(self, workflow, trigger):
+ """Execute the actions in the given workflow if conditions are met."""
+
+ # Ensure conditions exist
+ if not isinstance(workflow.get("conditions"), list):
+ m = f"[WF] workflow['conditions'] must be a list"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ # Evaluate each condition group separately
+ for condition_group in workflow["conditions"]:
+
+ evaluator = ConditionGroup(condition_group)
+
+ if evaluator.evaluate(trigger): # If any group evaluates to True
+
+ mylog('none', [f"[WF] Workflow {workflow["name"]} will be executed - conditions were evalueted as TRUE"])
+ mylog('debug', [f"[WF] Workflow condition_group: {condition_group}"])
+
+ self.execute_actions(workflow["actions"], trigger)
+ return # Stop if a condition group succeeds
+
+ mylog('none', ["[WF] No condition group matched. Actions not executed."])
+
+
+ def execute_actions(self, actions, trigger):
+ """Execute the actions defined in a workflow."""
+
+ for action in actions:
+ if action["type"] == "update_field":
+ field = action["field"]
+ value = action["value"]
+ action_instance = UpdateFieldAction(field, value, trigger)
+ # indicate if the api has to be updated
+ self.update_api = True
+
+ elif action["type"] == "run_plugin":
+ plugin_name = action["plugin"]
+ params = action["params"]
+ action_instance = RunPluginAction(plugin_name, params, trigger)
+
+ # elif action["type"] == "send_notification":
+ # method = action["method"]
+ # message = action["message"]
+ # action_instance = SendNotificationAction(method, message, trigger)
+
+ else:
+ m = f"[WF] Unsupported action type: {action['type']}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ action_instance.execute() # Execute the action
+
+ # if result:
+ # # Iterate through actions and execute them
+ # for action in workflow["actions"]:
+ # if action["type"] == "update_field":
+ # # Action type is "update_field", so map to UpdateFieldAction
+ # field = action["field"]
+ # value = action["value"]
+ # action_instance = UpdateFieldAction(field, value)
+ # action_instance.execute(trigger.event)
+
+ # elif action["type"] == "run_plugin":
+ # # Action type is "run_plugin", so map to RunPluginAction
+ # plugin_name = action["plugin"]
+ # params = action["params"]
+ # action_instance = RunPluginAction(plugin_name, params)
+ # action_instance.execute(trigger.event)
+ # elif action["type"] == "send_notification":
+ # # Action type is "send_notification", so map to SendNotificationAction
+ # method = action["method"]
+ # message = action["message"]
+ # action_instance = SendNotificationAction(method, message)
+ # action_instance.execute(trigger.event)
+ # else:
+ # # Handle unsupported action types
+ # raise ValueError(f"Unsupported action type: {action['type']}")
+
+
+
diff --git a/server/workflows/triggers.py b/server/workflows/triggers.py
new file mode 100755
index 00000000..f5f4be60
--- /dev/null
+++ b/server/workflows/triggers.py
@@ -0,0 +1,62 @@
+import sys
+
+# Register NetAlertX directories
+INSTALL_PATH="/app"
+sys.path.extend([f"{INSTALL_PATH}/server"])
+
+import conf
+from logger import mylog, Logger
+from helper import get_setting_value, timeNowTZ
+
+# Make sure log level is initialized correctly
+Logger(get_setting_value('LOG_LEVEL'))
+
+
+class Trigger:
+ """Represents a trigger definition"""
+
+ def __init__(self, triggerJson, event, db):
+ """
+ :param name: Friendly name of the trigger
+ :param triggerJson: JSON trigger object {"object_type":"Devices",event_type":"update"}
+ :param event: The actual event that the trigger is evaluated against
+ :param db: DB connection in case trigger matches and object needs to be retrieved
+ """
+ self.object_type = triggerJson["object_type"]
+ self.event_type = triggerJson["event_type"]
+ self.event = event # Store the triggered event context, if provided
+ self.triggered = self.object_type == event["ObjectType"] and self.event_type == event["AppEventType"]
+
+ mylog('verbose', [f"[WF] self.triggered '{self.triggered}'"])
+
+ if self.triggered:
+ # object type corresponds with the DB table name
+ db_table = self.object_type
+
+ if db_table == "Devices":
+ refField = "devGUID"
+ elif db_table == "Plugins_Objects":
+ refField = "ObjectGUID"
+ else:
+ m = f"[WF] Unsupported object_type: {self.object_type}"
+ mylog('none', [m])
+ raise ValueError(m)
+
+ query = f"""
+ SELECT * FROM
+ {db_table}
+ WHERE {refField} = '{event["ObjectGUID"]}'
+ """
+
+ mylog('debug', [query])
+
+ result = db.sql.execute(query).fetchall()
+ self.object = result[0]
+ else:
+ self.object = None
+
+
+ def set_event(self, event):
+ """Set or update the event context for this trigger"""
+ self.event = event
+
diff --git a/test/workflows.json b/test/workflows.json
new file mode 100755
index 00000000..754ef622
--- /dev/null
+++ b/test/workflows.json
@@ -0,0 +1,74 @@
+[
+ {
+ "name": "Sample Device Update Workflow",
+ "trigger": {
+ "object_type": "Devices",
+ "event_type": "update"
+ },
+ "conditions": [
+ {
+ "logic": "AND",
+ "conditions": [
+ {
+ "field": "devVendor",
+ "operator": "contains",
+ "value": "Google"
+ },
+ {
+ "field": "devIsNew",
+ "operator": "equals",
+ "value": "1"
+ },
+ {
+ "logic": "OR",
+ "conditions": [
+ {
+ "field": "devIsNew",
+ "operator": "equals",
+ "value": "1"
+ },
+ {
+ "field": "devName",
+ "operator": "contains",
+ "value": "Google"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "actions": [
+ {
+ "type": "update_field",
+ "field": "devIsNew",
+ "value": "0"
+ }
+ ]
+ },
+ {
+ "name": "Sample Plugin Object Workflow",
+ "trigger": {
+ "object_type": "Plugins_Objects",
+ "event_type": "create"
+ },
+ "conditions": [
+ {
+ "logic": "AND",
+ "conditions": [
+ {
+ "field": "Plugin",
+ "operator": "equals",
+ "value": "ARPSCAN"
+ },
+ {
+ "field": "Status",
+ "operator": "equals",
+ "value": "missing-in-last-scan"
+ }
+ ]
+ }
+ ],
+ "actions": [
+ ]
+ }
+]
\ No newline at end of file
-
+
- Events +
+
+