From 09e360c746faf861c2b99d4ded3d102547784be8 Mon Sep 17 00:00:00 2001 From: jokob-sk Date: Mon, 4 Aug 2025 15:12:51 +1000 Subject: [PATCH] prometheus metrics endpoint --- Dockerfile | 2 +- Dockerfile.debian | 2 +- install/install_dependencies.debian.sh | 2 +- server/api.py | 2 +- .../api_server_start.py} | 41 ++++++++-- .../graphql_schema.py | 0 server/api_server/prometheus_metrics.py | 76 +++++++++++++++++++ 7 files changed, 114 insertions(+), 11 deletions(-) rename server/{graphql_server/graphql_server_start.py => api_server/api_server_start.py} (63%) rename server/{graphql_server => api_server}/graphql_schema.py (100%) create mode 100755 server/api_server/prometheus_metrics.py diff --git a/Dockerfile b/Dockerfile index 68ed9b72..a19ef51e 100755 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,7 @@ ENV PATH="/opt/venv/bin:$PATH" COPY . ${INSTALL_DIR}/ -RUN pip install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag git+https://github.com/foreign-sub/aiofreepybox.git \ +RUN pip install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask flask-cors tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag git+https://github.com/foreign-sub/aiofreepybox.git \ && bash -c "find ${INSTALL_DIR} -type d -exec chmod 750 {} \;" \ && bash -c "find ${INSTALL_DIR} -type f -exec chmod 640 {} \;" \ && bash -c "find ${INSTALL_DIR} -type f \( -name '*.sh' -o -name '*.py' -o -name 'speedtest-cli' \) -exec chmod 750 {} \;" diff --git a/Dockerfile.debian b/Dockerfile.debian index 0941216c..b3cf222f 100755 --- a/Dockerfile.debian +++ b/Dockerfile.debian @@ -43,7 +43,7 @@ RUN phpenmod -v 8.2 sqlite3 RUN apt-get install -y python3-venv RUN python3 -m venv myenv -RUN /bin/bash -c "source myenv/bin/activate && update-alternatives --install /usr/bin/python python /usr/bin/python3 10 && pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag " +RUN /bin/bash -c "source myenv/bin/activate && update-alternatives --install /usr/bin/python python /usr/bin/python3 10 && pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask flask-cors tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag " # Create a buildtimestamp.txt to later check if a new version was released RUN date +%s > ${INSTALL_DIR}/front/buildtimestamp.txt diff --git a/install/install_dependencies.debian.sh b/install/install_dependencies.debian.sh index add491f9..81acf967 100755 --- a/install/install_dependencies.debian.sh +++ b/install/install_dependencies.debian.sh @@ -30,5 +30,5 @@ source myenv/bin/activate update-alternatives --install /usr/bin/python python /usr/bin/python3 10 # install packages thru pip3 -pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag git+https://github.com/foreign-sub/aiofreepybox.git +pip3 install openwrt-luci-rpc asusrouter asyncio aiohttp graphene flask flask-cors tplink-omada-client wakeonlan pycryptodome requests paho-mqtt scapy cron-converter pytz json2table dhcp-leases pyunifi speedtest-cli chardet python-nmap dnspython librouteros yattag git+https://github.com/foreign-sub/aiofreepybox.git diff --git a/server/api.py b/server/api.py index 58d667c7..17d0ee43 100755 --- a/server/api.py +++ b/server/api.py @@ -13,7 +13,7 @@ from models.user_events_queue_instance import UserEventsQueueInstance from messaging.in_app import write_notification # Import the start_server function -from graphql_server.graphql_server_start import start_server +from api_server.api_server_start import start_server apiEndpoints = [] diff --git a/server/graphql_server/graphql_server_start.py b/server/api_server/api_server_start.py similarity index 63% rename from server/graphql_server/graphql_server_start.py rename to server/api_server/api_server_start.py index 9d4be614..7502e399 100755 --- a/server/graphql_server/graphql_server_start.py +++ b/server/api_server/api_server_start.py @@ -1,6 +1,8 @@ import threading -from flask import Flask, request, jsonify +from flask import Flask, request, jsonify, Response +from flask_cors import CORS from .graphql_schema import devicesSchema +from .prometheus_metrics import getMetricStats from graphene import Schema import sys @@ -15,9 +17,11 @@ from messaging.in_app import write_notification # Flask application app = Flask(__name__) +CORS(app, resources={r"/metrics": {"origins": "*"}}, supports_credentials=True, allow_headers=["Authorization"]) -# Retrieve API token and port -graphql_port_value = get_setting_value("GRAPHQL_PORT") +# -------------------------- +# GraphQL Endpoints +# -------------------------- # Endpoint used when accessed via browser @app.route("/graphql", methods=["GET"]) @@ -29,10 +33,7 @@ def graphql_debug(): @app.route("/graphql", methods=["POST"]) def graphql_endpoint(): # Check for API token in headers - incoming_header_token = request.headers.get("Authorization") - api_token_value = get_setting_value("API_TOKEN") - - if incoming_header_token != f"Bearer {api_token_value}": + if not is_authorized(): msg = '[graphql_server] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.' mylog('verbose', [msg]) return jsonify({"error": msg}), 401 @@ -47,6 +48,32 @@ def graphql_endpoint(): # Return the result as JSON return jsonify(result.data) +# -------------------------- +# Prometheus /metrics Endpoint +# -------------------------- + +@app.route("/metrics") +def metrics(): + + # Check for API token in headers + if not is_authorized(): + msg = '[metrics] Unauthorized access attempt - make sure your GRAPHQL_PORT and API_TOKEN settings are correct.' + mylog('verbose', [msg]) + return jsonify({"error": msg}), 401 + + + # Return Prometheus metrics as plain text + return Response(getMetricStats(), mimetype="text/plain") + + +# -------------------------- +# Background Server Start +# -------------------------- +def is_authorized(): + token = request.headers.get("Authorization") + return token == f"Bearer {get_setting_value('API_TOKEN')}" + + def start_server(graphql_port, app_state): """Start the GraphQL server in a background thread.""" diff --git a/server/graphql_server/graphql_schema.py b/server/api_server/graphql_schema.py similarity index 100% rename from server/graphql_server/graphql_schema.py rename to server/api_server/graphql_schema.py diff --git a/server/api_server/prometheus_metrics.py b/server/api_server/prometheus_metrics.py new file mode 100755 index 00000000..de9d8fcf --- /dev/null +++ b/server/api_server/prometheus_metrics.py @@ -0,0 +1,76 @@ +import json +import sys + +# Register NetAlertX directories +INSTALL_PATH = "/app" +sys.path.extend([f"{INSTALL_PATH}/server"]) + +from logger import mylog +from const import apiPath +from helper import is_random_mac, get_number_of_children, format_ip_long, get_setting_value + +def escape_label_value(val): + """ + Escape special characters for Prometheus labels. + """ + return str(val).replace('\\', '\\\\').replace('\n', '\\n').replace('"', '\\"') + +# Define a base URL with the user's home directory +folder = apiPath + +def getMetricStats(): + output = [] + + # 1. Dashboard totals + try: + with open(folder + 'table_devices_tiles.json', 'r') as f: + tiles_data = json.load(f)["data"] + + if isinstance(tiles_data, list) and tiles_data: + totals = tiles_data[0] + output.append(f'netalertx_connected_devices {totals.get("connected", 0)}') + output.append(f'netalertx_offline_devices {totals.get("offline", 0)}') + output.append(f'netalertx_down_devices {totals.get("down", 0)}') + output.append(f'netalertx_new_devices {totals.get("new", 0)}') + output.append(f'netalertx_archived_devices {totals.get("archived", 0)}') + output.append(f'netalertx_favorite_devices {totals.get("favorites", 0)}') + output.append(f'netalertx_my_devices {totals.get("my_devices", 0)}') + else: + output.append("# Unexpected format in table_devices_tiles.json") + except (FileNotFoundError, json.JSONDecodeError) as e: + mylog('none', f'[metrics] Error loading tiles data: {e}') + output.append(f"# Error loading tiles data: {e}") + except Exception as e: + output.append(f"# General error loading dashboard totals: {e}") + + # 2. Device-level metrics + try: + with open(folder + 'table_devices.json', 'r') as f: + data = json.load(f) + + devices = data.get("data", []) + + for row in devices: + name = escape_label_value(row.get("devName", "unknown")) + mac = escape_label_value(row.get("devMac", "unknown")) + ip = escape_label_value(row.get("devLastIP", "unknown")) + vendor = escape_label_value(row.get("devVendor", "unknown")) + first_conn = escape_label_value(row.get("devFirstConnection", "unknown")) + last_conn = escape_label_value(row.get("devLastConnection", "unknown")) + dev_type = escape_label_value(row.get("devType", "unknown")) + raw_status = row.get("devStatus", "Unknown") + dev_status = raw_status.replace("-", "").capitalize() + + output.append( + f'netalertx_device_status{{device="{name}", mac="{mac}", ip="{ip}", vendor="{vendor}", ' + f'first_connection="{first_conn}", last_connection="{last_conn}", dev_type="{dev_type}", ' + f'device_status="{dev_status}"}} 1' + ) + + except (FileNotFoundError, json.JSONDecodeError) as e: + mylog('none', f'[metrics] Error loading devices data: {e}') + output.append(f"# Error loading devices data: {e}") + except Exception as e: + output.append(f"# General error processing device metrics: {e}") + + return "\n".join(output) + "\n"