mirror of
https://github.com/wizarrrr/wizarr.git
synced 2025-12-23 23:59:23 -05:00
feat(activity): Implement activity ingestion, identity resolution, and maintenance services
- Added `identity_resolution.py` for resolving Wizarr users and identities. - Created `ingestion.py` to handle recording and updating activity events. - Introduced `maintenance.py` for cleanup and session management tasks. - Developed `queries.py` for read-oriented operations on activity sessions. - Implemented background tasks in `activity.py` for scheduled maintenance. - Added tests for activity services and blueprint to ensure functionality.
This commit is contained in:
@@ -45,6 +45,26 @@ def create_app(config_object=DevelopmentConfig):
|
||||
for bp in all_blueprints:
|
||||
app.register_blueprint(bp)
|
||||
|
||||
# Initialise activity monitoring (blueprint already registered above)
|
||||
from app.activity import init_app as init_activity
|
||||
|
||||
init_activity(app)
|
||||
|
||||
# Register activity scheduler tasks if the scheduler is available
|
||||
try:
|
||||
from .extensions import scheduler as activity_scheduler
|
||||
|
||||
if (
|
||||
activity_scheduler
|
||||
and hasattr(activity_scheduler, "scheduler")
|
||||
and activity_scheduler.scheduler
|
||||
):
|
||||
from app.tasks.activity import register_activity_tasks
|
||||
|
||||
register_activity_tasks(app, activity_scheduler)
|
||||
except Exception as exc:
|
||||
app.logger.warning(f"Failed to register activity tasks: {exc}")
|
||||
|
||||
# Step 5: Setup context processors and filters
|
||||
if show_startup:
|
||||
logger.step("Configuring request processing", "⚙️")
|
||||
|
||||
67
app/activity/__init__.py
Normal file
67
app/activity/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""
|
||||
Activity monitoring module for Wizarr.
|
||||
|
||||
Provides real-time activity monitoring and historical tracking of media playback
|
||||
sessions across all configured media servers.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import threading
|
||||
|
||||
import structlog
|
||||
from flask import Flask
|
||||
|
||||
from app.models import ActivitySession, ActivitySnapshot
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
from .monitoring.monitor import WebSocketMonitor
|
||||
|
||||
|
||||
def init_app(app: Flask) -> None:
|
||||
"""Initialise activity monitoring features with the Flask application."""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
# Skip activity monitoring during tests
|
||||
if app.config.get("TESTING"):
|
||||
logger.debug("Skipping activity monitoring in test mode")
|
||||
return
|
||||
|
||||
if getattr(app, "debug", False) and os.environ.get("WERKZEUG_RUN_MAIN") != "true":
|
||||
logger.debug("Skipping activity monitoring in reloader parent process")
|
||||
return
|
||||
|
||||
app.extensions = getattr(app, "extensions", {})
|
||||
if "activity_monitor" in app.extensions:
|
||||
logger.debug("Activity monitoring already initialized, skipping")
|
||||
return
|
||||
|
||||
logger.info("Initializing activity monitoring")
|
||||
monitor = WebSocketMonitor(app)
|
||||
app.extensions["activity_monitor"] = monitor
|
||||
|
||||
def delayed_start():
|
||||
import time
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
from app.tasks.activity import recover_sessions_on_startup_task
|
||||
|
||||
recovered_count = recover_sessions_on_startup_task(app)
|
||||
logger.info(
|
||||
"Session recovery completed on startup: %s orphaned sessions cleaned up",
|
||||
recovered_count,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error("Session recovery failed on startup: %s", exc, exc_info=True)
|
||||
|
||||
monitor.start_monitoring()
|
||||
|
||||
threading.Thread(target=delayed_start, daemon=True).start()
|
||||
|
||||
logger.info("Activity monitoring initialized")
|
||||
|
||||
|
||||
__all__ = ["ActivitySession", "ActivitySnapshot", "ActivityService", "init_app"]
|
||||
380
app/activity/api/blueprint.py
Normal file
380
app/activity/api/blueprint.py
Normal file
@@ -0,0 +1,380 @@
|
||||
"""
|
||||
Activity monitoring blueprint for Wizarr.
|
||||
|
||||
Provides routes for activity dashboard, analytics, and API endpoints
|
||||
for managing and viewing media playback activity data.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import structlog
|
||||
from flask import Blueprint, current_app, jsonify, render_template, request
|
||||
from flask_login import login_required
|
||||
|
||||
try:
|
||||
from flask_babel import gettext as _
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import MediaServer
|
||||
except ImportError:
|
||||
# For testing without Flask app context
|
||||
MediaServer = None # type: ignore[assignment]
|
||||
db = None # type: ignore[assignment]
|
||||
|
||||
def _(x): # type: ignore[no-redef]
|
||||
return x
|
||||
|
||||
|
||||
from app.activity.domain.models import ActivityQuery
|
||||
from app.models import ActivitySession
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
# Create blueprint
|
||||
activity_bp = Blueprint(
|
||||
"activity",
|
||||
__name__,
|
||||
url_prefix="/activity",
|
||||
template_folder="../templates",
|
||||
)
|
||||
|
||||
|
||||
# Template filters
|
||||
@activity_bp.app_template_filter("format_duration")
|
||||
def format_duration_filter(value):
|
||||
"""Format duration in hours to human-readable string."""
|
||||
if not value or value == 0:
|
||||
return "0m"
|
||||
|
||||
hours = int(value)
|
||||
minutes = int((value - hours) * 60)
|
||||
|
||||
if hours > 0:
|
||||
if minutes > 0:
|
||||
return f"{hours}h {minutes}m"
|
||||
return f"{hours}h"
|
||||
return f"{minutes}m"
|
||||
|
||||
|
||||
@activity_bp.route("/", methods=["GET"], strict_slashes=False)
|
||||
@login_required
|
||||
def activity_dashboard():
|
||||
"""Display activity index with tabbed interface."""
|
||||
return render_template("activity/index.html")
|
||||
|
||||
|
||||
@activity_bp.route("/dashboard")
|
||||
@login_required
|
||||
def dashboard_tab():
|
||||
"""Display dashboard tab with statistics."""
|
||||
try:
|
||||
activity_service = ActivityService()
|
||||
|
||||
# Get query parameters
|
||||
days = int(request.args.get("days", 7))
|
||||
|
||||
# Get enhanced activity statistics
|
||||
stats = activity_service.get_dashboard_stats(days=days)
|
||||
|
||||
return render_template("activity/dashboard_tab.html", stats=stats, days=days)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to load dashboard: %s", e, exc_info=True)
|
||||
return render_template(
|
||||
"activity/dashboard_tab.html",
|
||||
error=_("Failed to load dashboard data"),
|
||||
stats={},
|
||||
days=7,
|
||||
)
|
||||
|
||||
|
||||
@activity_bp.route("/history")
|
||||
@login_required
|
||||
def history_tab():
|
||||
"""Display history tab with activity table."""
|
||||
try:
|
||||
# Get available servers for filtering
|
||||
servers = []
|
||||
if db is not None:
|
||||
servers = db.session.query(MediaServer).filter_by(verified=True).all()
|
||||
|
||||
return render_template("activity/history_tab.html", servers=servers)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to load history tab: %s", e, exc_info=True)
|
||||
return render_template(
|
||||
"activity/history_tab.html",
|
||||
error=_("Failed to load history data"),
|
||||
servers=[],
|
||||
)
|
||||
|
||||
|
||||
@activity_bp.route("/grid")
|
||||
@login_required
|
||||
def activity_grid():
|
||||
"""Get activity grid data."""
|
||||
try:
|
||||
activity_service = ActivityService()
|
||||
|
||||
# Get query parameters
|
||||
page = int(request.args.get("page", 1))
|
||||
limit = int(request.args.get("limit", 20)) # Table view - fewer rows per page
|
||||
days = request.args.get(
|
||||
"days", type=int
|
||||
) # None if not provided - shows all data
|
||||
server_id = request.args.get("server_id", type=int)
|
||||
user_name = request.args.get("user_name")
|
||||
media_type = request.args.get("media_type")
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Build query
|
||||
if days is None or days == 0:
|
||||
# All time - no date filter (default for history tab)
|
||||
start_date = None
|
||||
else:
|
||||
# Apply date filter (for dashboard tab)
|
||||
start_date = datetime.now(UTC) - timedelta(days=days)
|
||||
|
||||
query = ActivityQuery(
|
||||
server_ids=[server_id] if server_id else None,
|
||||
user_names=[user_name] if user_name else None,
|
||||
media_types=[media_type] if media_type else None,
|
||||
start_date=start_date,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
order_by="started_at",
|
||||
order_direction="desc",
|
||||
)
|
||||
|
||||
sessions, total_count = activity_service.get_activity_sessions(query)
|
||||
|
||||
# Calculate pagination info
|
||||
total_pages = (total_count + limit - 1) // limit
|
||||
has_next = page < total_pages
|
||||
has_prev = page > 1
|
||||
|
||||
return render_template(
|
||||
"activity/_activity_table.html",
|
||||
sessions=sessions,
|
||||
page=page,
|
||||
has_next=has_next,
|
||||
has_prev=has_prev,
|
||||
total_count=total_count,
|
||||
total_pages=total_pages,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to load activity grid: %s", e, exc_info=True)
|
||||
return render_template(
|
||||
"activity/_activity_table.html",
|
||||
sessions=[],
|
||||
error=_("Failed to load activity data"),
|
||||
)
|
||||
|
||||
|
||||
@activity_bp.route("/stats")
|
||||
@login_required
|
||||
def activity_stats():
|
||||
"""Get activity statistics."""
|
||||
try:
|
||||
activity_service = ActivityService()
|
||||
days = int(request.args.get("days", 7))
|
||||
|
||||
stats = activity_service.get_activity_stats(days=days)
|
||||
return jsonify(stats)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to get activity stats: %s", e, exc_info=True)
|
||||
return jsonify({"error": _("Failed to get activity statistics")}), 500
|
||||
|
||||
|
||||
@activity_bp.route("/session/<int:session_id>")
|
||||
@login_required
|
||||
def activity_session(session_id):
|
||||
"""Get session details."""
|
||||
try:
|
||||
if db is None:
|
||||
return jsonify({"error": _("Database not available")}), 500
|
||||
|
||||
session = db.session.query(ActivitySession).get_or_404(session_id)
|
||||
|
||||
return jsonify(session.to_dict())
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error(
|
||||
"Failed to get session %s: %s",
|
||||
session_id,
|
||||
e,
|
||||
exc_info=True,
|
||||
)
|
||||
return jsonify({"error": _("Failed to get session details")}), 500
|
||||
|
||||
|
||||
@activity_bp.route("/export")
|
||||
@login_required
|
||||
def activity_export():
|
||||
"""Export activity data as CSV or JSON."""
|
||||
try:
|
||||
activity_service = ActivityService()
|
||||
|
||||
# Get query parameters
|
||||
format_type = request.args.get("format", "csv").lower()
|
||||
days = int(request.args.get("days", 30))
|
||||
server_id = request.args.get("server_id", type=int)
|
||||
user_name = request.args.get("user_name")
|
||||
|
||||
# Build query
|
||||
query = ActivityQuery(
|
||||
server_ids=[server_id] if server_id else None,
|
||||
user_names=[user_name] if user_name else None,
|
||||
start_date=datetime.now(UTC) - timedelta(days=days),
|
||||
order_by="started_at",
|
||||
order_direction="desc",
|
||||
)
|
||||
|
||||
sessions, _ = activity_service.get_activity_sessions(query)
|
||||
|
||||
if format_type == "json":
|
||||
return jsonify([session.to_dict() for session in sessions])
|
||||
# CSV export
|
||||
import csv
|
||||
import io
|
||||
|
||||
from flask import Response
|
||||
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Write headers
|
||||
writer.writerow(
|
||||
[
|
||||
"Session ID",
|
||||
"User Name",
|
||||
"Media Title",
|
||||
"Media Type",
|
||||
"Started At",
|
||||
"Duration (minutes)",
|
||||
"Device Name",
|
||||
"Client Name",
|
||||
"Server ID",
|
||||
]
|
||||
)
|
||||
|
||||
# Write data
|
||||
for session in sessions:
|
||||
writer.writerow(
|
||||
[
|
||||
session.session_id,
|
||||
session.user_name,
|
||||
session.media_title,
|
||||
session.media_type,
|
||||
session.started_at.isoformat() if session.started_at else "",
|
||||
session.duration_minutes,
|
||||
session.device_name,
|
||||
session.client_name,
|
||||
session.server_id,
|
||||
]
|
||||
)
|
||||
|
||||
output.seek(0)
|
||||
return Response(
|
||||
output.getvalue(),
|
||||
mimetype="text/csv",
|
||||
headers={
|
||||
"Content-Disposition": f"attachment; filename=activity_export_{days}days.csv"
|
||||
},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to export activity data: %s", e, exc_info=True)
|
||||
return (
|
||||
jsonify({"error": _("Failed to export activity data")}), # type: ignore[misc]
|
||||
500,
|
||||
)
|
||||
|
||||
|
||||
@activity_bp.route("/settings", methods=["GET", "POST"])
|
||||
@login_required
|
||||
def activity_settings():
|
||||
"""Activity monitoring settings."""
|
||||
if request.method == "GET":
|
||||
try:
|
||||
# Get monitoring status
|
||||
monitor = getattr(current_app.extensions, "activity_monitor", None)
|
||||
status = {
|
||||
"monitoring_enabled": monitor is not None,
|
||||
"connection_status": monitor.get_connection_status() if monitor else {},
|
||||
}
|
||||
|
||||
template = (
|
||||
"activity/settings_tab.html"
|
||||
if request.headers.get("HX-Request")
|
||||
else "activity/settings.html"
|
||||
)
|
||||
return render_template(template, status=status)
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to load activity settings: %s", e, exc_info=True)
|
||||
template = (
|
||||
"activity/settings_tab.html"
|
||||
if request.headers.get("HX-Request")
|
||||
else "activity/settings.html"
|
||||
)
|
||||
return render_template(template, error=_("Failed to load settings"))
|
||||
|
||||
else: # POST
|
||||
try:
|
||||
action = request.form.get("action")
|
||||
|
||||
if action == "restart_monitoring":
|
||||
monitor = getattr(current_app.extensions, "activity_monitor", None)
|
||||
if monitor:
|
||||
monitor.stop_monitoring()
|
||||
monitor.start_monitoring()
|
||||
return jsonify(
|
||||
{"success": True, "message": _("Monitoring restarted")}
|
||||
)
|
||||
return jsonify(
|
||||
{"success": False, "message": _("Monitor not available")}
|
||||
)
|
||||
|
||||
if action == "cleanup_old_data":
|
||||
activity_service = ActivityService()
|
||||
retention_days = int(request.form.get("retention_days", 90))
|
||||
deleted_count = activity_service.cleanup_old_activity(retention_days)
|
||||
return jsonify(
|
||||
{
|
||||
"success": True,
|
||||
"message": _("Cleaned up {} old activity records").format(
|
||||
deleted_count
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
if action == "end_stale_sessions":
|
||||
activity_service = ActivityService()
|
||||
timeout_hours = int(request.form.get("timeout_hours", 24))
|
||||
ended_count = activity_service.end_stale_sessions(timeout_hours)
|
||||
return jsonify(
|
||||
{
|
||||
"success": True,
|
||||
"message": _("Ended {} stale sessions").format(ended_count),
|
||||
}
|
||||
)
|
||||
|
||||
return jsonify({"success": False, "message": _("Unknown action")})
|
||||
|
||||
except Exception as e:
|
||||
logger = structlog.get_logger(__name__)
|
||||
logger.error("Failed to update activity settings: %s", e, exc_info=True)
|
||||
return jsonify(
|
||||
{"success": False, "message": _("Failed to update settings")}
|
||||
), 500
|
||||
65
app/activity/domain/models.py
Normal file
65
app/activity/domain/models.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""
|
||||
Activity monitoring models for Wizarr.
|
||||
|
||||
Tracks media playback sessions and real-time snapshots for comprehensive
|
||||
activity monitoring and historical analysis.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActivityEvent:
|
||||
"""Data transfer object for activity events before persistence."""
|
||||
|
||||
event_type: str # session_start, session_end, session_progress, session_pause, session_resume
|
||||
server_id: int
|
||||
session_id: str
|
||||
user_name: str
|
||||
media_title: str
|
||||
timestamp: datetime | None = None
|
||||
user_id: str | None = None
|
||||
media_type: str | None = None
|
||||
media_id: str | None = None
|
||||
series_name: str | None = None
|
||||
season_number: int | None = None
|
||||
episode_number: int | None = None
|
||||
duration_ms: int | None = None
|
||||
position_ms: int | None = None
|
||||
device_name: str | None = None
|
||||
client_name: str | None = None
|
||||
ip_address: str | None = None
|
||||
platform: str | None = None
|
||||
player_version: str | None = None
|
||||
state: str | None = None # playing, paused, stopped
|
||||
transcoding_info: dict[str, Any] | None = None
|
||||
metadata: dict[str, Any] | None = None
|
||||
artwork_url: str | None = None
|
||||
thumbnail_url: str | None = None
|
||||
bandwidth_kbps: int | None = None
|
||||
quality: str | None = None
|
||||
subtitle_stream: str | None = None
|
||||
audio_stream: str | None = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.timestamp is None:
|
||||
self.timestamp = datetime.now(UTC)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActivityQuery:
|
||||
"""Data transfer object for activity queries."""
|
||||
|
||||
server_ids: list[int] | None = None
|
||||
user_names: list[str] | None = None
|
||||
media_types: list[str] | None = None
|
||||
start_date: datetime | None = None
|
||||
end_date: datetime | None = None
|
||||
active_only: bool = False
|
||||
include_snapshots: bool = False
|
||||
limit: int | None = None
|
||||
offset: int | None = None
|
||||
order_by: str = "started_at"
|
||||
order_direction: str = "desc"
|
||||
24
app/activity/monitoring/collectors/__init__.py
Normal file
24
app/activity/monitoring/collectors/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
||||
"""
|
||||
Activity collectors for different media server types.
|
||||
|
||||
Provides specialized collectors for real-time activity monitoring:
|
||||
- PlexCollector: Uses PlexAPI AlertListener for real-time events
|
||||
- JellyfinCollector: Uses WebSocket API for real-time events
|
||||
- EmbyCollector: Uses WebSocket API for real-time events
|
||||
- AudiobookshelfCollector: Uses Socket.IO for real-time events
|
||||
- PollingCollector: Fallback polling for other server types
|
||||
"""
|
||||
|
||||
from .audiobookshelf import AudiobookshelfCollector
|
||||
from .emby import EmbyCollector
|
||||
from .jellyfin import JellyfinCollector
|
||||
from .plex import PlexCollector
|
||||
from .polling import PollingCollector
|
||||
|
||||
__all__ = [
|
||||
"PlexCollector",
|
||||
"JellyfinCollector",
|
||||
"EmbyCollector",
|
||||
"AudiobookshelfCollector",
|
||||
"PollingCollector",
|
||||
]
|
||||
203
app/activity/monitoring/collectors/audiobookshelf.py
Normal file
203
app/activity/monitoring/collectors/audiobookshelf.py
Normal file
@@ -0,0 +1,203 @@
|
||||
"""
|
||||
Audiobookshelf activity collector using REST API polling for session monitoring.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from ...domain.models import ActivityEvent
|
||||
from ..monitor import BaseCollector
|
||||
|
||||
|
||||
class AudiobookshelfCollector(BaseCollector):
|
||||
"""Audiobookshelf activity collector using REST API polling for session monitoring."""
|
||||
|
||||
def __init__(self, server, event_callback):
|
||||
super().__init__(server, event_callback)
|
||||
self.active_sessions: dict[str, dict[str, Any]] = {}
|
||||
self.last_seen_sessions: set[str] = set() # Track session IDs we've seen
|
||||
self.poll_interval = 30 # Poll every 30 seconds
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop using Audiobookshelf REST API polling."""
|
||||
self.logger.info(
|
||||
f"Starting Audiobookshelf REST API polling for {self.server.name}"
|
||||
)
|
||||
|
||||
while self.running and not self._stop_event.is_set():
|
||||
try:
|
||||
# Get current sessions from API
|
||||
client = self._get_media_client()
|
||||
if client:
|
||||
sessions = client.now_playing()
|
||||
self._process_sessions(sessions)
|
||||
else:
|
||||
self.logger.warning("No media client available")
|
||||
|
||||
# Wait for next poll interval
|
||||
self._stop_event.wait(self.poll_interval)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Audiobookshelf polling error: {e}")
|
||||
self.error_count += 1
|
||||
|
||||
# Wait longer on error
|
||||
self._stop_event.wait(min(60, self.poll_interval * 2))
|
||||
|
||||
def _process_sessions(self, sessions):
|
||||
"""Process current sessions from REST API."""
|
||||
self.logger.debug(f"Processing {len(sessions)} sessions from API")
|
||||
|
||||
if not sessions:
|
||||
# No active sessions - check for session ends
|
||||
self._handle_no_active_sessions()
|
||||
return
|
||||
|
||||
# Log first session for debugging
|
||||
if sessions:
|
||||
self.logger.info(
|
||||
f"Sample session data: {sessions[0]}"
|
||||
) # Changed to info level for debugging
|
||||
|
||||
current_session_ids = set()
|
||||
|
||||
for session_data in sessions:
|
||||
try:
|
||||
session_id = session_data.get("session_id")
|
||||
if not session_id:
|
||||
self.logger.warning(f"Session missing session_id: {session_data}")
|
||||
continue
|
||||
|
||||
current_session_ids.add(session_id)
|
||||
|
||||
# Check if this is a new session
|
||||
is_new_session = session_id not in self.last_seen_sessions
|
||||
|
||||
if is_new_session:
|
||||
self.logger.info(
|
||||
f"🎬 New session started: {session_id} for user {session_data.get('user_name', 'Unknown')}"
|
||||
)
|
||||
self._emit_session_event(session_data, "session_start")
|
||||
self.last_seen_sessions.add(session_id)
|
||||
else:
|
||||
# Existing session - emit progress event
|
||||
self._emit_session_event(session_data, "session_progress")
|
||||
|
||||
# Update our tracking
|
||||
self.active_sessions[session_id] = session_data
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error processing session: {e}")
|
||||
|
||||
# Check for ended sessions
|
||||
ended_sessions = self.last_seen_sessions - current_session_ids
|
||||
for session_id in ended_sessions:
|
||||
self.logger.info(f"🛑 Session ended: {session_id}")
|
||||
|
||||
# Get the last known session data if available
|
||||
if session_id in self.active_sessions:
|
||||
session_data = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(session_data, "session_end")
|
||||
|
||||
self.last_seen_sessions.discard(session_id)
|
||||
|
||||
def _handle_no_active_sessions(self):
|
||||
"""Handle the case where there are no active sessions."""
|
||||
# If we had active sessions before and now we don't, they ended
|
||||
for session_id in list(self.last_seen_sessions):
|
||||
self.logger.info(f"🛑 Session ended (no active sessions): {session_id}")
|
||||
|
||||
if session_id in self.active_sessions:
|
||||
session_data = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(session_data, "session_end")
|
||||
|
||||
self.last_seen_sessions.clear()
|
||||
self.active_sessions.clear()
|
||||
|
||||
def _extract_session_data_from_polling(
|
||||
self, session_data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Extract and standardize session data from REST API polling response."""
|
||||
try:
|
||||
metadata = session_data.get("metadata", {}) or {}
|
||||
# Prefer human-friendly usernames when available
|
||||
user_name = (
|
||||
session_data.get("user_name")
|
||||
or metadata.get("username")
|
||||
or metadata.get("user_name")
|
||||
or metadata.get("display_name")
|
||||
or session_data.get("user_id")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
# The now_playing() method returns session data with these field names
|
||||
return {
|
||||
"session_id": session_data.get("session_id", ""),
|
||||
"user_name": user_name,
|
||||
"user_id": session_data.get(
|
||||
"user_id"
|
||||
), # This might not be in the response
|
||||
"media_title": session_data.get("media_title", "Unknown"),
|
||||
"media_type": session_data.get("media_type", "audiobook"),
|
||||
"media_id": session_data.get("media_id"),
|
||||
"series_name": session_data.get("series_name"),
|
||||
"duration_ms": session_data.get("duration_ms", 0),
|
||||
"position_ms": session_data.get("position_ms", 0),
|
||||
"device_name": session_data.get("device_name", "Unknown Device"),
|
||||
"client_name": session_data.get("client", "Audiobookshelf"),
|
||||
"state": session_data.get("state", "playing"),
|
||||
"artwork_url": session_data.get("artwork_url"),
|
||||
"thumbnail_url": session_data.get("thumbnail_url"),
|
||||
"metadata": {
|
||||
"audiobookshelf_session_id": session_data.get("session_id", ""),
|
||||
"transcoding": session_data.get("transcoding", {}),
|
||||
**metadata,
|
||||
},
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error extracting session data: {e}")
|
||||
return {}
|
||||
|
||||
def _emit_session_event(self, session_data: dict[str, Any], event_type: str):
|
||||
"""Convert session data to ActivityEvent and emit."""
|
||||
try:
|
||||
# Extract data from polling response or use standardized format
|
||||
extracted_data = self._extract_session_data_from_polling(session_data)
|
||||
|
||||
if not extracted_data:
|
||||
self.logger.warning("No extracted data available for event emission")
|
||||
return
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type=event_type,
|
||||
server_id=self.server.id,
|
||||
session_id=extracted_data["session_id"],
|
||||
user_name=extracted_data["user_name"],
|
||||
media_title=extracted_data["media_title"],
|
||||
timestamp=datetime.now(UTC),
|
||||
user_id=extracted_data.get("user_id"),
|
||||
media_type=extracted_data.get("media_type"),
|
||||
media_id=extracted_data.get("media_id"),
|
||||
series_name=extracted_data.get("series_name"),
|
||||
duration_ms=extracted_data.get("duration_ms"),
|
||||
position_ms=extracted_data.get("position_ms"),
|
||||
device_name=extracted_data.get("device_name"),
|
||||
client_name=extracted_data.get("client_name"),
|
||||
state=extracted_data.get("state"),
|
||||
artwork_url=extracted_data.get("artwork_url"),
|
||||
thumbnail_url=extracted_data.get("thumbnail_url"),
|
||||
metadata=extracted_data.get("metadata"),
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"📤 About to emit event: server_id={self.server.id}, server_name={self.server.name}, user={extracted_data['user_name']}, media={extracted_data['media_title']}"
|
||||
)
|
||||
self._emit_event(event)
|
||||
self.logger.info(
|
||||
f"✅ Emitted {event_type} event for session {extracted_data['session_id']}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to emit Audiobookshelf session event: {e}", exc_info=True
|
||||
)
|
||||
458
app/activity/monitoring/collectors/emby.py
Normal file
458
app/activity/monitoring/collectors/emby.py
Normal file
@@ -0,0 +1,458 @@
|
||||
"""
|
||||
Emby activity collector using WebSocket API for real-time monitoring.
|
||||
|
||||
Connects to Emby's WebSocket endpoint to receive real-time notifications
|
||||
about playback events. Uses similar approach to Jellyfin since Emby
|
||||
and Jellyfin share similar APIs.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
import websocket # websocket-client package
|
||||
|
||||
WEBSOCKET_AVAILABLE = True
|
||||
except ImportError:
|
||||
WEBSOCKET_AVAILABLE = False
|
||||
|
||||
from ...domain.models import ActivityEvent
|
||||
from ..monitor import BaseCollector
|
||||
|
||||
|
||||
class EmbyCollector(BaseCollector):
|
||||
"""Emby activity collector using WebSocket for real-time events."""
|
||||
|
||||
def __init__(self, server, event_callback):
|
||||
super().__init__(server, event_callback)
|
||||
self.ws: websocket.WebSocketApp | None = None
|
||||
self.authenticated = False
|
||||
self.active_sessions: dict[str, dict[str, Any]] = {}
|
||||
self.last_ping = datetime.now(UTC)
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop using Emby WebSocket."""
|
||||
if not WEBSOCKET_AVAILABLE:
|
||||
self.logger.error("websocket-client not available, falling back to polling")
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
retry_count = 0
|
||||
max_retries = 5
|
||||
|
||||
while (
|
||||
self.running and not self._stop_event.is_set() and retry_count < max_retries
|
||||
):
|
||||
try:
|
||||
self._connect_websocket()
|
||||
retry_count = 0 # Reset on successful connection
|
||||
|
||||
# Keep connection alive
|
||||
while self.running and not self._stop_event.is_set():
|
||||
if self.ws and not self.ws.sock:
|
||||
self.logger.warning("WebSocket connection lost, reconnecting")
|
||||
break
|
||||
|
||||
# Send ping periodically
|
||||
if (datetime.now(UTC) - self.last_ping).total_seconds() > 30:
|
||||
self._send_ping()
|
||||
self.last_ping = datetime.now(UTC)
|
||||
|
||||
self._stop_event.wait(5)
|
||||
|
||||
except Exception as e:
|
||||
retry_count += 1
|
||||
self.logger.error(
|
||||
f"Emby WebSocket error: {e}, retry {retry_count}/{max_retries}"
|
||||
)
|
||||
self.error_count += 1
|
||||
|
||||
if retry_count < max_retries:
|
||||
wait_time = min(60, 5 * retry_count) # Exponential backoff, max 60s
|
||||
self._stop_event.wait(wait_time)
|
||||
|
||||
if retry_count >= max_retries:
|
||||
self.logger.error("Max retries reached, falling back to polling")
|
||||
self._fallback_to_polling()
|
||||
|
||||
def _connect_websocket(self):
|
||||
"""Connect to Emby WebSocket endpoint."""
|
||||
try:
|
||||
# Build WebSocket URL - Emby uses similar format to Jellyfin
|
||||
base_url = self.server.server_url.rstrip("/")
|
||||
ws_url = base_url.replace("http://", "ws://").replace("https://", "wss://")
|
||||
ws_url += f"/embywebsocket?api_key={self.server.server_api_key}"
|
||||
|
||||
self.logger.info(f"Connecting to Emby WebSocket: {ws_url}")
|
||||
|
||||
self.ws = websocket.WebSocketApp(
|
||||
ws_url,
|
||||
on_open=self._on_open,
|
||||
on_message=self._on_message,
|
||||
on_error=self._on_error,
|
||||
on_close=self._on_close,
|
||||
)
|
||||
|
||||
# Run WebSocket in this thread
|
||||
self.ws.run_forever(ping_interval=30, ping_timeout=10)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to connect to Emby WebSocket: {e}")
|
||||
raise
|
||||
|
||||
def _on_open(self, ws):
|
||||
"""Handle WebSocket connection opened."""
|
||||
self.logger.info(f"Connected to Emby WebSocket for {self.server.server_name}")
|
||||
|
||||
# Subscribe to session events
|
||||
self._subscribe_to_sessions()
|
||||
|
||||
def _on_message(self, ws, message):
|
||||
"""Handle WebSocket message."""
|
||||
try:
|
||||
data = json.loads(message)
|
||||
message_type = data.get("MessageType")
|
||||
|
||||
if message_type == "Sessions":
|
||||
self._handle_sessions_message(data)
|
||||
elif message_type == "PlaybackStart":
|
||||
self._handle_playback_start(data)
|
||||
elif message_type == "PlaybackProgress":
|
||||
self._handle_playback_progress(data)
|
||||
elif message_type == "PlaybackStopped":
|
||||
self._handle_playback_stopped(data)
|
||||
else:
|
||||
self.logger.debug(f"Ignoring message type: {message_type}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error handling Emby WebSocket message: {e}", exc_info=True
|
||||
)
|
||||
self.error_count += 1
|
||||
|
||||
def _on_error(self, ws, error):
|
||||
"""Handle WebSocket error."""
|
||||
self.logger.error(f"Emby WebSocket error: {error}")
|
||||
self.error_count += 1
|
||||
|
||||
def _on_close(self, ws, close_status_code, close_msg):
|
||||
"""Handle WebSocket connection closed."""
|
||||
self.logger.info(f"Emby WebSocket closed: {close_status_code} - {close_msg}")
|
||||
|
||||
def _subscribe_to_sessions(self):
|
||||
"""Subscribe to session events."""
|
||||
try:
|
||||
subscribe_message = {
|
||||
"MessageType": "SessionsStart",
|
||||
"Data": "100,100", # Request session updates every 100ms
|
||||
}
|
||||
|
||||
if self.ws and self.ws.sock:
|
||||
self.ws.send(json.dumps(subscribe_message))
|
||||
self.logger.debug("Subscribed to Emby session events")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to subscribe to sessions: {e}")
|
||||
|
||||
def _send_ping(self):
|
||||
"""Send ping to keep connection alive."""
|
||||
try:
|
||||
if self.ws and self.ws.sock:
|
||||
ping_message = {"MessageType": "KeepAlive"}
|
||||
self.ws.send(json.dumps(ping_message))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to send ping: {e}")
|
||||
|
||||
def _handle_sessions_message(self, data):
|
||||
"""Handle Sessions message with current session data."""
|
||||
try:
|
||||
sessions_data = data.get("Data", [])
|
||||
if not isinstance(sessions_data, list):
|
||||
return
|
||||
|
||||
current_session_ids = set()
|
||||
|
||||
for session_data in sessions_data:
|
||||
session_id = session_data.get("Id")
|
||||
if not session_id:
|
||||
continue
|
||||
|
||||
current_session_ids.add(session_id)
|
||||
|
||||
# Check if this is a new session or has significant changes
|
||||
if session_id not in self.active_sessions:
|
||||
# New session
|
||||
self.active_sessions[session_id] = session_data
|
||||
if session_data.get("NowPlayingItem"):
|
||||
self._emit_session_event(session_data, "session_start")
|
||||
else:
|
||||
# Check for significant changes
|
||||
old_session = self.active_sessions[session_id]
|
||||
self.active_sessions[session_id] = session_data
|
||||
|
||||
# Check for state changes
|
||||
old_state = old_session.get("PlayState", {}).get("IsPaused", False)
|
||||
new_state = session_data.get("PlayState", {}).get("IsPaused", False)
|
||||
|
||||
if old_state != new_state:
|
||||
if new_state:
|
||||
self._emit_session_event(session_data, "session_pause")
|
||||
else:
|
||||
self._emit_session_event(session_data, "session_resume")
|
||||
else:
|
||||
# Regular progress update
|
||||
self._emit_session_event(session_data, "session_progress")
|
||||
|
||||
# Remove ended sessions
|
||||
ended_sessions = set(self.active_sessions.keys()) - current_session_ids
|
||||
for session_id in ended_sessions:
|
||||
old_session = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(old_session, "session_end")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling sessions message: {e}", exc_info=True)
|
||||
|
||||
def _handle_playback_start(self, data):
|
||||
"""Handle PlaybackStart message."""
|
||||
try:
|
||||
session_data = data.get("Data", {})
|
||||
session_id = session_data.get("SessionId")
|
||||
|
||||
if session_id:
|
||||
self.active_sessions[session_id] = session_data
|
||||
self._emit_session_event(session_data, "session_start")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling playback start: {e}")
|
||||
|
||||
def _handle_playback_progress(self, data):
|
||||
"""Handle PlaybackProgress message."""
|
||||
try:
|
||||
session_data = data.get("Data", {})
|
||||
session_id = session_data.get("SessionId")
|
||||
|
||||
if session_id:
|
||||
self.active_sessions[session_id] = session_data
|
||||
self._emit_session_event(session_data, "session_progress")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling playback progress: {e}")
|
||||
|
||||
def _handle_playback_stopped(self, data):
|
||||
"""Handle PlaybackStopped message."""
|
||||
try:
|
||||
session_data = data.get("Data", {})
|
||||
session_id = session_data.get("SessionId")
|
||||
|
||||
if session_id and session_id in self.active_sessions:
|
||||
session_data = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(session_data, "session_end")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling playback stopped: {e}")
|
||||
|
||||
def _emit_session_event(self, session_data: dict[str, Any], event_type: str):
|
||||
"""Convert Emby session data to ActivityEvent and emit."""
|
||||
try:
|
||||
extracted_data = self._extract_session_data(session_data)
|
||||
if not extracted_data:
|
||||
return
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type=event_type,
|
||||
server_id=self.server.id,
|
||||
session_id=extracted_data["session_id"],
|
||||
user_name=extracted_data["user_name"],
|
||||
media_title=extracted_data["media_title"],
|
||||
timestamp=datetime.now(UTC),
|
||||
user_id=extracted_data.get("user_id"),
|
||||
media_type=extracted_data.get("media_type"),
|
||||
media_id=extracted_data.get("media_id"),
|
||||
series_name=extracted_data.get("series_name"),
|
||||
season_number=extracted_data.get("season_number"),
|
||||
episode_number=extracted_data.get("episode_number"),
|
||||
duration_ms=extracted_data.get("duration_ms"),
|
||||
position_ms=extracted_data.get("position_ms"),
|
||||
device_name=extracted_data.get("device_name"),
|
||||
client_name=extracted_data.get("client_name"),
|
||||
ip_address=extracted_data.get("ip_address"),
|
||||
platform=extracted_data.get("platform"),
|
||||
player_version=extracted_data.get("player_version"),
|
||||
state=extracted_data.get("state"),
|
||||
transcoding_info=extracted_data.get("transcoding_info"),
|
||||
metadata=extracted_data.get("metadata"),
|
||||
artwork_url=extracted_data.get("artwork_url"),
|
||||
thumbnail_url=extracted_data.get("thumbnail_url"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to emit Emby session event: {e}", exc_info=True)
|
||||
|
||||
def _extract_session_data(
|
||||
self, session_data: dict[str, Any]
|
||||
) -> dict[str, Any] | None:
|
||||
"""Extract relevant data from Emby session."""
|
||||
try:
|
||||
session_id = session_data.get("Id")
|
||||
if not session_id:
|
||||
return None
|
||||
|
||||
# User info
|
||||
user_info = session_data.get("UserName", "Unknown")
|
||||
user_id = session_data.get("UserId")
|
||||
|
||||
# Now playing item
|
||||
now_playing = session_data.get("NowPlayingItem", {})
|
||||
if not now_playing:
|
||||
return None
|
||||
|
||||
media_title = now_playing.get("Name", "Unknown")
|
||||
media_type = now_playing.get("Type", "unknown").lower()
|
||||
media_id = now_playing.get("Id")
|
||||
|
||||
# Series info for episodes
|
||||
series_name = None
|
||||
season_number = None
|
||||
episode_number = None
|
||||
if media_type == "episode":
|
||||
series_name = now_playing.get("SeriesName")
|
||||
season_number = now_playing.get("ParentIndexNumber")
|
||||
episode_number = now_playing.get("IndexNumber")
|
||||
|
||||
# Timing info
|
||||
play_state = session_data.get("PlayState", {})
|
||||
duration_ms = (
|
||||
now_playing.get("RunTimeTicks", 0) // 10000
|
||||
) # Convert from ticks
|
||||
position_ms = play_state.get("PositionTicks", 0) // 10000
|
||||
|
||||
# Device info
|
||||
device_name = session_data.get("DeviceName", "Unknown")
|
||||
client_name = session_data.get("Client", "Unknown")
|
||||
app_version = session_data.get("ApplicationVersion")
|
||||
|
||||
# Network info
|
||||
ip_address = session_data.get("RemoteEndPoint")
|
||||
|
||||
# State
|
||||
is_paused = play_state.get("IsPaused", False)
|
||||
state = "paused" if is_paused else "playing"
|
||||
|
||||
# Transcoding info - Emby structure might be slightly different
|
||||
transcoding_info = {}
|
||||
transcode_info = session_data.get("TranscodingInfo")
|
||||
if transcode_info:
|
||||
transcoding_info = {
|
||||
"is_transcoding": True,
|
||||
"video_codec": transcode_info.get("VideoCodec"),
|
||||
"audio_codec": transcode_info.get("AudioCodec"),
|
||||
"container": transcode_info.get("Container"),
|
||||
"video_resolution": f"{transcode_info.get('Width', 0)}x{transcode_info.get('Height', 0)}",
|
||||
"transcoding_speed": transcode_info.get("TranscodingSpeed"),
|
||||
}
|
||||
else:
|
||||
# Check if direct playing
|
||||
play_method = play_state.get("PlayMethod")
|
||||
transcoding_info = {
|
||||
"is_transcoding": False,
|
||||
"direct_play": play_method == "DirectPlay",
|
||||
"direct_stream": play_method == "DirectStream",
|
||||
}
|
||||
|
||||
# Artwork - Emby uses similar image system to Jellyfin
|
||||
artwork_url = None
|
||||
thumbnail_url = None
|
||||
if now_playing.get("ImageTags", {}).get("Primary"):
|
||||
base_url = self.server.server_url.rstrip("/")
|
||||
item_id = now_playing.get("Id")
|
||||
artwork_url = f"{base_url}/emby/Items/{item_id}/Images/Primary"
|
||||
thumbnail_url = f"{base_url}/emby/Items/{item_id}/Images/Primary?maxHeight=300&maxWidth=300"
|
||||
|
||||
return {
|
||||
"session_id": session_id,
|
||||
"user_name": user_info,
|
||||
"user_id": user_id,
|
||||
"media_title": media_title,
|
||||
"media_type": media_type,
|
||||
"media_id": media_id,
|
||||
"series_name": series_name,
|
||||
"season_number": season_number,
|
||||
"episode_number": episode_number,
|
||||
"duration_ms": duration_ms,
|
||||
"position_ms": position_ms,
|
||||
"device_name": device_name,
|
||||
"client_name": client_name,
|
||||
"ip_address": ip_address,
|
||||
"platform": session_data.get("DeviceType"),
|
||||
"player_version": app_version,
|
||||
"state": state,
|
||||
"transcoding_info": transcoding_info,
|
||||
"artwork_url": artwork_url,
|
||||
"thumbnail_url": thumbnail_url,
|
||||
"metadata": {
|
||||
"emby_session_id": session_id,
|
||||
"emby_item_id": media_id,
|
||||
"play_method": play_state.get("PlayMethod"),
|
||||
},
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to extract Emby session data: {e}", exc_info=True
|
||||
)
|
||||
return None
|
||||
|
||||
def _fallback_to_polling(self):
|
||||
"""Fallback to polling if WebSocket is not available."""
|
||||
self.logger.info("Using polling fallback for Emby server")
|
||||
|
||||
while self.running and not self._stop_event.is_set():
|
||||
try:
|
||||
client = self._get_media_client()
|
||||
if client:
|
||||
sessions = client.now_playing()
|
||||
self._process_polling_sessions(sessions)
|
||||
|
||||
# Poll every 30 seconds
|
||||
self._stop_event.wait(30)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Polling error: {e}")
|
||||
self._stop_event.wait(60) # Wait longer on error
|
||||
|
||||
def _process_polling_sessions(self, sessions):
|
||||
"""Process sessions from polling."""
|
||||
for session_data in sessions:
|
||||
try:
|
||||
event = ActivityEvent(
|
||||
event_type="session_progress",
|
||||
server_id=self.server.id,
|
||||
session_id=session_data.get("session_id", ""),
|
||||
user_name=session_data.get("user_name", "Unknown"),
|
||||
media_title=session_data.get("media_title", "Unknown"),
|
||||
timestamp=datetime.now(UTC),
|
||||
media_type=session_data.get("media_type"),
|
||||
duration_ms=session_data.get("duration_ms"),
|
||||
position_ms=session_data.get("position_ms"),
|
||||
device_name=session_data.get("device_name"),
|
||||
client_name=session_data.get("client"),
|
||||
state=session_data.get("state", "playing"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to process polling session: {e}")
|
||||
|
||||
def stop(self):
|
||||
"""Stop the Emby collector and WebSocket connection."""
|
||||
super().stop()
|
||||
if self.ws:
|
||||
try:
|
||||
self.ws.close()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error closing WebSocket: {e}")
|
||||
143
app/activity/monitoring/collectors/jellyfin.py
Normal file
143
app/activity/monitoring/collectors/jellyfin.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""
|
||||
Jellyfin activity collector using Sessions API polling.
|
||||
|
||||
Polls Jellyfin's Sessions API to monitor active playback sessions.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from ...domain.models import ActivityEvent
|
||||
from ..monitor import BaseCollector
|
||||
|
||||
|
||||
class JellyfinCollector(BaseCollector):
|
||||
"""Jellyfin activity collector using Sessions API polling."""
|
||||
|
||||
def __init__(self, server, event_callback):
|
||||
super().__init__(server, event_callback)
|
||||
self.active_sessions: dict[str, dict[str, Any]] = {}
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop using Jellyfin Sessions API polling."""
|
||||
self.logger.info("Starting Jellyfin Sessions API polling")
|
||||
|
||||
while self.running and not self._stop_event.is_set():
|
||||
try:
|
||||
client = self._get_media_client()
|
||||
if client:
|
||||
self.logger.debug("Polling Jellyfin Sessions API...")
|
||||
sessions = client.now_playing()
|
||||
|
||||
if sessions:
|
||||
self.logger.info(f"Found {len(sessions)} active sessions")
|
||||
for i, session in enumerate(sessions):
|
||||
self.logger.debug(
|
||||
f"Session {i + 1}: {session.get('user_name', 'Unknown')} - {session.get('media_title', 'Unknown')}"
|
||||
)
|
||||
else:
|
||||
self.logger.debug("No active sessions found")
|
||||
|
||||
self._process_sessions(sessions)
|
||||
else:
|
||||
self.logger.warning("Failed to get media client for polling")
|
||||
|
||||
# Poll every 10 seconds for responsive monitoring
|
||||
self._stop_event.wait(10)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Jellyfin API polling error: {e}", exc_info=True)
|
||||
self.error_count += 1
|
||||
self._stop_event.wait(30) # Wait longer on error
|
||||
|
||||
def _process_sessions(self, sessions):
|
||||
"""Process sessions from Jellyfin API and emit events."""
|
||||
if not sessions:
|
||||
# All sessions ended
|
||||
for session_id in list(self.active_sessions.keys()):
|
||||
old_session = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(old_session, "session_end")
|
||||
return
|
||||
|
||||
current_session_ids = set()
|
||||
|
||||
for session_data in sessions:
|
||||
try:
|
||||
session_id = session_data.get("session_id", "")
|
||||
if not session_id:
|
||||
continue
|
||||
|
||||
current_session_ids.add(session_id)
|
||||
|
||||
# Check if this is a new session or has changes
|
||||
if session_id not in self.active_sessions:
|
||||
# New session
|
||||
self.active_sessions[session_id] = session_data
|
||||
self._emit_session_event(session_data, "session_start")
|
||||
else:
|
||||
# Check for state changes
|
||||
old_session = self.active_sessions[session_id]
|
||||
self.active_sessions[session_id] = session_data
|
||||
|
||||
old_state = old_session.get("state", "playing")
|
||||
new_state = session_data.get("state", "playing")
|
||||
|
||||
if old_state != new_state:
|
||||
if new_state == "paused":
|
||||
self._emit_session_event(session_data, "session_pause")
|
||||
else:
|
||||
self._emit_session_event(session_data, "session_resume")
|
||||
else:
|
||||
# Regular progress update
|
||||
self._emit_session_event(session_data, "session_progress")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to process session: {e}", exc_info=True)
|
||||
|
||||
# Remove ended sessions
|
||||
ended_sessions = set(self.active_sessions.keys()) - current_session_ids
|
||||
for session_id in ended_sessions:
|
||||
old_session = self.active_sessions.pop(session_id)
|
||||
self._emit_session_event(old_session, "session_end")
|
||||
|
||||
def _emit_session_event(self, session_data: dict[str, Any], event_type: str):
|
||||
"""Convert session data to ActivityEvent and emit."""
|
||||
try:
|
||||
event = ActivityEvent(
|
||||
event_type=event_type,
|
||||
server_id=self.server.id,
|
||||
session_id=session_data.get("session_id", ""),
|
||||
user_name=session_data.get("user_name", "Unknown"),
|
||||
media_title=session_data.get("media_title", "Unknown"),
|
||||
timestamp=datetime.now(UTC),
|
||||
user_id=session_data.get("user_id"),
|
||||
media_type=session_data.get("media_type"),
|
||||
media_id=session_data.get("media_id"),
|
||||
series_name=session_data.get("series_name"),
|
||||
season_number=session_data.get("season_number"),
|
||||
episode_number=session_data.get("episode_number"),
|
||||
duration_ms=session_data.get("duration_ms"),
|
||||
position_ms=session_data.get("position_ms"),
|
||||
device_name=session_data.get("device_name"),
|
||||
client_name=session_data.get("client"),
|
||||
ip_address=session_data.get("ip_address"),
|
||||
platform=session_data.get("platform"),
|
||||
player_version=session_data.get("player_version"),
|
||||
state=session_data.get("state", "playing"),
|
||||
transcoding_info=session_data.get("transcoding_info"),
|
||||
metadata=session_data.get("metadata"),
|
||||
artwork_url=session_data.get("artwork_url"),
|
||||
thumbnail_url=session_data.get("thumbnail_url"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to emit Jellyfin session event: {e}", exc_info=True
|
||||
)
|
||||
|
||||
def stop(self):
|
||||
"""Stop the Jellyfin collector."""
|
||||
super().stop()
|
||||
self.logger.info("Jellyfin collector stopped")
|
||||
469
app/activity/monitoring/collectors/plex.py
Normal file
469
app/activity/monitoring/collectors/plex.py
Normal file
@@ -0,0 +1,469 @@
|
||||
"""
|
||||
Plex activity collector using AlertListener for real-time monitoring.
|
||||
|
||||
Uses PlexAPI's AlertListener to receive real-time notifications about
|
||||
playback events for efficient activity tracking.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
try:
|
||||
from plexapi.alert import AlertListener
|
||||
|
||||
PLEXAPI_AVAILABLE = True
|
||||
except ImportError:
|
||||
PLEXAPI_AVAILABLE = False
|
||||
|
||||
from ...domain.models import ActivityEvent
|
||||
from ..monitor import BaseCollector
|
||||
from ..session_manager import SessionManager
|
||||
|
||||
EXCLUDED_ALERT_TYPES = {
|
||||
"timeline",
|
||||
"activity",
|
||||
"status",
|
||||
"progress",
|
||||
"transcodeSession.update",
|
||||
"update.statechange",
|
||||
"provider.content.change",
|
||||
"backgroundProcessingQueue",
|
||||
}
|
||||
|
||||
|
||||
class PlexCollector(BaseCollector):
|
||||
"""Plex activity collector using AlertListener for real-time events."""
|
||||
|
||||
def __init__(self, server, event_callback):
|
||||
super().__init__(server, event_callback)
|
||||
self.alert_listener: AlertListener | None = None
|
||||
self.plex_server = None
|
||||
self.session_manager = SessionManager(event_callback=self._emit_event)
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop using Plex AlertListener."""
|
||||
if not PLEXAPI_AVAILABLE:
|
||||
self.logger.error("PlexAPI not available, falling back to polling")
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
try:
|
||||
self.logger.info(f"Starting Plex collector for {self.server.name}")
|
||||
|
||||
# Get Plex server instance
|
||||
self.logger.debug("Getting Plex media client...")
|
||||
client = self._get_media_client()
|
||||
if not client:
|
||||
self.logger.error("Failed to get Plex client - falling back to polling")
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
self.logger.debug(f"Got Plex client: {client}")
|
||||
|
||||
# Access the PlexServer instance from the client
|
||||
if hasattr(client, "server"):
|
||||
self.plex_server = client.server
|
||||
self.logger.debug(f"Got Plex server instance: {self.plex_server}")
|
||||
elif hasattr(client, "plex"):
|
||||
self.plex_server = client.plex
|
||||
self.logger.debug(
|
||||
f"Got Plex server from client.plex: {self.plex_server}"
|
||||
)
|
||||
else:
|
||||
self.logger.error(
|
||||
f"Cannot find Plex server instance in client: {type(client)}"
|
||||
)
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
# Test server connection
|
||||
try:
|
||||
self.logger.debug("Testing Plex server connection...")
|
||||
account = self.plex_server.account()
|
||||
self.logger.info(
|
||||
f"Connected to Plex server: {self.plex_server.friendlyName} (account: {account.title if account else 'Unknown'})"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not verify Plex server connection: {e}")
|
||||
|
||||
# Start AlertListener
|
||||
self.logger.info(f"Creating AlertListener for {self.server.name}")
|
||||
try:
|
||||
self.alert_listener = AlertListener(
|
||||
server=self.plex_server,
|
||||
callback=self._on_alert,
|
||||
callbackError=self._on_alert_error,
|
||||
)
|
||||
self.logger.info("AlertListener created successfully")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create AlertListener: {e}", exc_info=True)
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
self.logger.info(f"Starting Plex AlertListener for {self.server.name}")
|
||||
try:
|
||||
self.alert_listener.start()
|
||||
self.logger.info(
|
||||
"AlertListener started successfully - WebSocket connection should be active"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to start AlertListener: {e}", exc_info=True)
|
||||
self._fallback_to_polling()
|
||||
return
|
||||
|
||||
# Keep the listener running
|
||||
self.logger.info("Entering monitoring loop...")
|
||||
loop_count = 0
|
||||
while self.running and not self._stop_event.is_set():
|
||||
# Log periodic status
|
||||
loop_count += 1
|
||||
if loop_count % 6 == 1: # Every minute (6 * 10 seconds)
|
||||
self.logger.debug(
|
||||
f"AlertListener monitoring active (events: {self.event_count}, errors: {self.error_count})"
|
||||
)
|
||||
|
||||
# Simple check - just wait and let the AlertListener handle reconnections
|
||||
self._stop_event.wait(10)
|
||||
|
||||
self.logger.info("Exiting monitoring loop")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Plex collector failed: {e}", exc_info=True)
|
||||
self.error_count += 1
|
||||
# Fallback to polling on any error
|
||||
self.logger.info("Falling back to polling due to error")
|
||||
self._fallback_to_polling()
|
||||
finally:
|
||||
if self.alert_listener:
|
||||
try:
|
||||
self.logger.info("Stopping AlertListener...")
|
||||
self.alert_listener.stop()
|
||||
self.logger.info("AlertListener stopped")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping AlertListener: {e}")
|
||||
|
||||
def stop(self):
|
||||
"""Stop the Plex collector and AlertListener."""
|
||||
super().stop()
|
||||
if self.alert_listener:
|
||||
try:
|
||||
self.alert_listener.stop()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping AlertListener: {e}")
|
||||
|
||||
# Clean up session manager
|
||||
try:
|
||||
self.session_manager.cleanup_all_sessions()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error cleaning up session manager: {e}")
|
||||
|
||||
def _on_alert(self, data: dict[str, Any]):
|
||||
"""Handle Plex alert events."""
|
||||
try:
|
||||
alert_type = data.get("type")
|
||||
if alert_type in EXCLUDED_ALERT_TYPES:
|
||||
self.logger.debug(f"Skipping Plex alert type: {alert_type}")
|
||||
return
|
||||
|
||||
self.logger.info(f"📡 Received Plex alert: {data}")
|
||||
|
||||
# Use the enhanced session manager to process alerts
|
||||
success = self.session_manager.process_alert(data, self.server.id)
|
||||
if success:
|
||||
self.event_count += 1
|
||||
else:
|
||||
self.error_count += 1
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error handling Plex alert: {e}", exc_info=True)
|
||||
self.error_count += 1
|
||||
|
||||
def _on_alert_error(self, error):
|
||||
"""Handle AlertListener errors."""
|
||||
self.logger.error(f"❌ Plex AlertListener error: {error}")
|
||||
self.error_count += 1
|
||||
|
||||
def _handle_playing_alert(self, data: dict[str, Any]):
|
||||
"""Handle playing state alerts from Plex."""
|
||||
playing_queue = data.get("PlayQueue", {})
|
||||
if not playing_queue:
|
||||
return
|
||||
|
||||
session_key = playing_queue.get("playQueueSessionKey")
|
||||
if not session_key:
|
||||
return
|
||||
|
||||
# Get session details from server
|
||||
try:
|
||||
if self.plex_server:
|
||||
sessions = self.plex_server.sessions()
|
||||
target_session = None
|
||||
|
||||
for session in sessions:
|
||||
if hasattr(session, "sessionKey") and str(
|
||||
session.sessionKey
|
||||
) == str(session_key):
|
||||
target_session = session
|
||||
break
|
||||
|
||||
if target_session:
|
||||
self._process_session(target_session, "session_progress")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get session details: {e}")
|
||||
|
||||
def _handle_timeline_alert(self, data: dict[str, Any]):
|
||||
"""Handle timeline alerts from Plex."""
|
||||
timeline_entries = data.get("TimelineEntry", [])
|
||||
if not isinstance(timeline_entries, list):
|
||||
timeline_entries = [timeline_entries]
|
||||
|
||||
for entry in timeline_entries:
|
||||
if entry.get("type") == "video" or entry.get("type") == "music":
|
||||
state = entry.get("state")
|
||||
session_key = entry.get("sessionKey")
|
||||
|
||||
if session_key and state:
|
||||
self._handle_timeline_entry(entry, session_key, state)
|
||||
|
||||
def _handle_timeline_entry(
|
||||
self, entry: dict[str, Any], session_key: str, state: str
|
||||
):
|
||||
"""Process a timeline entry."""
|
||||
try:
|
||||
# Get full session details
|
||||
if self.plex_server:
|
||||
sessions = self.plex_server.sessions()
|
||||
target_session = None
|
||||
|
||||
for session in sessions:
|
||||
if hasattr(session, "sessionKey") and str(
|
||||
session.sessionKey
|
||||
) == str(session_key):
|
||||
target_session = session
|
||||
break
|
||||
|
||||
if target_session:
|
||||
event_type = self._map_plex_state_to_event(state)
|
||||
if event_type:
|
||||
self._process_session(target_session, event_type)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to handle timeline entry: {e}")
|
||||
|
||||
def _process_session(self, session, event_type: str):
|
||||
"""Process a Plex session into an ActivityEvent."""
|
||||
try:
|
||||
# Extract session data
|
||||
session_data = self._extract_session_data(session)
|
||||
if not session_data:
|
||||
return
|
||||
|
||||
# Create activity event
|
||||
event = ActivityEvent(
|
||||
event_type=event_type,
|
||||
server_id=self.server.id,
|
||||
session_id=session_data["session_id"],
|
||||
user_name=session_data["user_name"],
|
||||
media_title=session_data["media_title"],
|
||||
timestamp=datetime.now(UTC),
|
||||
user_id=session_data.get("user_id"),
|
||||
media_type=session_data.get("media_type"),
|
||||
media_id=session_data.get("media_id"),
|
||||
series_name=session_data.get("series_name"),
|
||||
season_number=session_data.get("season_number"),
|
||||
episode_number=session_data.get("episode_number"),
|
||||
duration_ms=session_data.get("duration_ms"),
|
||||
position_ms=session_data.get("position_ms"),
|
||||
device_name=session_data.get("device_name"),
|
||||
client_name=session_data.get("client_name"),
|
||||
ip_address=session_data.get("ip_address"),
|
||||
platform=session_data.get("platform"),
|
||||
player_version=session_data.get("player_version"),
|
||||
state=session_data.get("state"),
|
||||
transcoding_info=session_data.get("transcoding_info"),
|
||||
metadata=session_data.get("metadata"),
|
||||
artwork_url=session_data.get("artwork_url"),
|
||||
thumbnail_url=session_data.get("thumbnail_url"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to process Plex session: {e}", exc_info=True)
|
||||
|
||||
def _extract_session_data(self, session) -> dict[str, Any] | None:
|
||||
"""Extract relevant data from Plex session object."""
|
||||
try:
|
||||
# Basic session info
|
||||
session_id = str(getattr(session, "sessionKey", ""))
|
||||
if not session_id:
|
||||
return None
|
||||
|
||||
# User info
|
||||
user_name = (
|
||||
getattr(session.user, "title", "Unknown")
|
||||
if hasattr(session, "user")
|
||||
else "Unknown"
|
||||
)
|
||||
user_id = (
|
||||
str(getattr(session.user, "id", ""))
|
||||
if hasattr(session, "user")
|
||||
else None
|
||||
)
|
||||
|
||||
# Media info
|
||||
media = session
|
||||
media_title = getattr(media, "title", "Unknown")
|
||||
media_type = getattr(media, "type", "unknown")
|
||||
media_id = str(getattr(media, "ratingKey", ""))
|
||||
|
||||
# Series info for TV shows
|
||||
series_name = None
|
||||
season_number = None
|
||||
episode_number = None
|
||||
if media_type == "episode":
|
||||
series_name = getattr(media, "grandparentTitle", None)
|
||||
season_number = getattr(media, "parentIndex", None)
|
||||
episode_number = getattr(media, "index", None)
|
||||
|
||||
# Timing info
|
||||
duration_ms = (
|
||||
getattr(media, "duration", 0) if hasattr(media, "duration") else 0
|
||||
)
|
||||
view_offset = (
|
||||
getattr(session, "viewOffset", 0)
|
||||
if hasattr(session, "viewOffset")
|
||||
else 0
|
||||
)
|
||||
position_ms = view_offset
|
||||
|
||||
# Player info
|
||||
player = getattr(session, "player", None)
|
||||
device_name = getattr(player, "title", "Unknown") if player else "Unknown"
|
||||
client_name = getattr(player, "product", "Unknown") if player else "Unknown"
|
||||
platform = getattr(player, "platform", "Unknown") if player else "Unknown"
|
||||
player_version = (
|
||||
getattr(player, "version", "Unknown") if player else "Unknown"
|
||||
)
|
||||
|
||||
# Network info
|
||||
ip_address = getattr(player, "address", None) if player else None
|
||||
|
||||
# State
|
||||
state = getattr(session, "state", "unknown")
|
||||
|
||||
# Transcoding info
|
||||
transcoding_info = {}
|
||||
if hasattr(session, "transcodeSessions") and session.transcodeSessions:
|
||||
transcode = session.transcodeSessions[0]
|
||||
transcoding_info = {
|
||||
"is_transcoding": True,
|
||||
"video_codec": getattr(transcode, "videoCodec", None),
|
||||
"audio_codec": getattr(transcode, "audioCodec", None),
|
||||
"container": getattr(transcode, "container", None),
|
||||
"video_resolution": getattr(transcode, "videoResolution", None),
|
||||
"transcoding_speed": getattr(transcode, "speed", None),
|
||||
}
|
||||
else:
|
||||
# Check media streams for direct play info
|
||||
if hasattr(media, "media") and media.media:
|
||||
media_item = media.media[0]
|
||||
if hasattr(media_item, "parts") and media_item.parts:
|
||||
part = media_item.parts[0]
|
||||
transcoding_info = {
|
||||
"is_transcoding": False,
|
||||
"direct_play": True,
|
||||
"container": getattr(part, "container", None),
|
||||
}
|
||||
|
||||
# Artwork
|
||||
artwork_url = None
|
||||
thumbnail_url = None
|
||||
if hasattr(media, "thumb") and self.plex_server:
|
||||
thumbnail_url = self.plex_server.url(media.thumb, includeToken=True)
|
||||
if hasattr(media, "art") and self.plex_server:
|
||||
artwork_url = self.plex_server.url(media.art, includeToken=True)
|
||||
|
||||
return {
|
||||
"session_id": session_id,
|
||||
"user_name": user_name,
|
||||
"user_id": user_id,
|
||||
"media_title": media_title,
|
||||
"media_type": media_type,
|
||||
"media_id": media_id,
|
||||
"series_name": series_name,
|
||||
"season_number": season_number,
|
||||
"episode_number": episode_number,
|
||||
"duration_ms": duration_ms,
|
||||
"position_ms": position_ms,
|
||||
"device_name": device_name,
|
||||
"client_name": client_name,
|
||||
"ip_address": ip_address,
|
||||
"platform": platform,
|
||||
"player_version": player_version,
|
||||
"state": state,
|
||||
"transcoding_info": transcoding_info,
|
||||
"artwork_url": artwork_url,
|
||||
"thumbnail_url": thumbnail_url,
|
||||
"metadata": {
|
||||
"plex_session_key": session_id,
|
||||
"plex_rating_key": media_id,
|
||||
},
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to extract session data: {e}", exc_info=True)
|
||||
return None
|
||||
|
||||
def _map_plex_state_to_event(self, plex_state: str) -> str | None:
|
||||
"""Map Plex state to activity event type."""
|
||||
state_mapping = {
|
||||
"playing": "session_progress",
|
||||
"paused": "session_pause",
|
||||
"stopped": "session_end",
|
||||
"buffering": "session_progress",
|
||||
}
|
||||
return state_mapping.get(plex_state.lower())
|
||||
|
||||
def _fallback_to_polling(self):
|
||||
"""Fallback to polling if AlertListener is not available."""
|
||||
self.logger.info("Using polling fallback for Plex server")
|
||||
|
||||
while self.running and not self._stop_event.is_set():
|
||||
try:
|
||||
client = self._get_media_client()
|
||||
if client:
|
||||
sessions = client.now_playing()
|
||||
self._process_polling_sessions(sessions)
|
||||
|
||||
# Poll every 30 seconds
|
||||
self._stop_event.wait(30)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Polling error: {e}")
|
||||
self._stop_event.wait(60) # Wait longer on error
|
||||
|
||||
def _process_polling_sessions(self, sessions):
|
||||
"""Process sessions from polling."""
|
||||
for session_data in sessions:
|
||||
try:
|
||||
event = ActivityEvent(
|
||||
event_type="session_progress",
|
||||
server_id=self.server.id,
|
||||
session_id=session_data.get("session_id", ""),
|
||||
user_name=session_data.get("user_name", "Unknown"),
|
||||
media_title=session_data.get("media_title", "Unknown"),
|
||||
timestamp=datetime.now(UTC),
|
||||
media_type=session_data.get("media_type"),
|
||||
duration_ms=session_data.get("duration_ms"),
|
||||
position_ms=session_data.get("position_ms"),
|
||||
device_name=session_data.get("device_name"),
|
||||
client_name=session_data.get("client"),
|
||||
state=session_data.get("state", "playing"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to process polling session: {e}")
|
||||
132
app/activity/monitoring/collectors/polling.py
Normal file
132
app/activity/monitoring/collectors/polling.py
Normal file
@@ -0,0 +1,132 @@
|
||||
"""
|
||||
Polling activity collector for media servers without WebSocket support.
|
||||
|
||||
Uses the existing now_playing() API methods to poll for active sessions
|
||||
on servers that don't support real-time WebSocket notifications.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from ...domain.models import ActivityEvent
|
||||
from ..monitor import BaseCollector
|
||||
|
||||
|
||||
class PollingCollector(BaseCollector):
|
||||
"""Generic polling collector for servers without WebSocket support."""
|
||||
|
||||
def __init__(self, server, event_callback):
|
||||
super().__init__(server, event_callback)
|
||||
self.active_sessions: dict[str, dict[str, Any]] = {}
|
||||
self.poll_interval = 30 # seconds
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop using polling."""
|
||||
self.logger.info(f"Starting polling collector for {self.server.name}")
|
||||
|
||||
while self.running and not self._stop_event.is_set():
|
||||
try:
|
||||
self._poll_sessions()
|
||||
self._stop_event.wait(self.poll_interval)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Polling error: {e}", exc_info=True)
|
||||
self.error_count += 1
|
||||
# Wait longer on error
|
||||
self._stop_event.wait(60)
|
||||
|
||||
def _poll_sessions(self):
|
||||
"""Poll server for current sessions."""
|
||||
try:
|
||||
client = self._get_media_client()
|
||||
if not client:
|
||||
self.logger.warning("No media client available for polling")
|
||||
return
|
||||
|
||||
# Get current sessions from server
|
||||
current_sessions = client.now_playing()
|
||||
current_session_ids = set()
|
||||
|
||||
# Process each current session
|
||||
for session_data in current_sessions:
|
||||
session_id = session_data.get("session_id", "")
|
||||
if not session_id:
|
||||
continue
|
||||
|
||||
current_session_ids.add(session_id)
|
||||
|
||||
# Check if this is a new session
|
||||
if session_id not in self.active_sessions:
|
||||
# New session started
|
||||
self.active_sessions[session_id] = session_data
|
||||
self._emit_polling_event(session_data, "session_start")
|
||||
else:
|
||||
# Existing session - check for changes
|
||||
old_session = self.active_sessions[session_id]
|
||||
self.active_sessions[session_id] = session_data
|
||||
|
||||
# Check for state changes
|
||||
old_state = old_session.get("state", "unknown")
|
||||
new_state = session_data.get("state", "unknown")
|
||||
|
||||
if old_state != new_state:
|
||||
if new_state == "paused":
|
||||
self._emit_polling_event(session_data, "session_pause")
|
||||
elif new_state == "playing" and old_state == "paused":
|
||||
self._emit_polling_event(session_data, "session_resume")
|
||||
else:
|
||||
self._emit_polling_event(session_data, "session_progress")
|
||||
else:
|
||||
# Regular progress update
|
||||
self._emit_polling_event(session_data, "session_progress")
|
||||
|
||||
# Find sessions that ended (no longer in current list)
|
||||
ended_sessions = set(self.active_sessions.keys()) - current_session_ids
|
||||
for session_id in ended_sessions:
|
||||
old_session = self.active_sessions.pop(session_id)
|
||||
self._emit_polling_event(old_session, "session_end")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to poll sessions: {e}", exc_info=True)
|
||||
self.error_count += 1
|
||||
|
||||
def _emit_polling_event(self, session_data: dict[str, Any], event_type: str):
|
||||
"""Convert polling session data to ActivityEvent and emit."""
|
||||
try:
|
||||
# Map common session data fields
|
||||
session_id = session_data.get("session_id", "")
|
||||
user_name = session_data.get("user_name", "Unknown")
|
||||
media_title = session_data.get("media_title", "Unknown")
|
||||
|
||||
# Create activity event
|
||||
event = ActivityEvent(
|
||||
event_type=event_type,
|
||||
server_id=self.server.id,
|
||||
session_id=session_id,
|
||||
user_name=user_name,
|
||||
media_title=media_title,
|
||||
timestamp=datetime.now(UTC),
|
||||
user_id=session_data.get("user_id"),
|
||||
media_type=session_data.get("media_type"),
|
||||
media_id=session_data.get("media_id"),
|
||||
series_name=session_data.get("series_name"),
|
||||
season_number=session_data.get("season_number"),
|
||||
episode_number=session_data.get("episode_number"),
|
||||
duration_ms=session_data.get("duration_ms"),
|
||||
position_ms=session_data.get("position_ms"),
|
||||
device_name=session_data.get("device_name"),
|
||||
client_name=session_data.get("client"),
|
||||
ip_address=session_data.get("ip_address"),
|
||||
platform=session_data.get("platform"),
|
||||
player_version=session_data.get("player_version"),
|
||||
state=session_data.get("state", "playing"),
|
||||
transcoding_info=session_data.get("transcoding", {}),
|
||||
metadata=session_data.get("metadata", {}),
|
||||
artwork_url=session_data.get("artwork_url"),
|
||||
thumbnail_url=session_data.get("thumbnail_url"),
|
||||
)
|
||||
|
||||
self._emit_event(event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to emit polling event: {e}", exc_info=True)
|
||||
330
app/activity/monitoring/monitor.py
Normal file
330
app/activity/monitoring/monitor.py
Normal file
@@ -0,0 +1,330 @@
|
||||
"""
|
||||
WebSocket monitoring infrastructure for Wizarr.
|
||||
|
||||
Manages real-time connections to media servers for activity monitoring
|
||||
using WebSocket APIs where available, with fallback to polling.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
import structlog
|
||||
|
||||
try:
|
||||
from flask import Flask
|
||||
|
||||
from app.extensions import db
|
||||
from app.models import MediaServer
|
||||
except ImportError: # pragma: no cover
|
||||
Flask = None # type: ignore
|
||||
MediaServer = None # type: ignore
|
||||
db = None # type: ignore
|
||||
|
||||
from app.activity.domain.models import ActivityEvent
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
# Global app instance for background thread access
|
||||
_app_instance = None
|
||||
|
||||
|
||||
class WebSocketMonitor:
|
||||
"""Manages WebSocket connections to media servers for real-time activity monitoring."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
global _app_instance
|
||||
self.app = app
|
||||
_app_instance = app # Store globally for background thread access
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
self.activity_service = ActivityService()
|
||||
self.connections: dict[int, BaseCollector] = {}
|
||||
self.executor = None # Initialize later when actually needed
|
||||
self.monitoring = False
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
def start_monitoring(self):
|
||||
"""Start monitoring all configured servers."""
|
||||
if self.monitoring:
|
||||
self.logger.debug("Activity monitoring already started, skipping")
|
||||
return
|
||||
|
||||
# Clear any previous stop signal before starting fresh
|
||||
self._stop_event.clear()
|
||||
|
||||
self.monitoring = True
|
||||
self.logger.info("Starting activity monitoring")
|
||||
|
||||
# Initialize executor if not already done
|
||||
if self.executor is None:
|
||||
self.executor = ThreadPoolExecutor(
|
||||
max_workers=10, thread_name_prefix="activity-monitor"
|
||||
)
|
||||
self.logger.info("Initialized ThreadPoolExecutor for activity monitoring")
|
||||
|
||||
# Start monitoring in background thread
|
||||
self.executor.submit(self._monitor_loop)
|
||||
|
||||
def stop_monitoring(self):
|
||||
"""Stop all monitoring connections."""
|
||||
if not self.monitoring:
|
||||
return
|
||||
|
||||
self.logger.info("Stopping activity monitoring")
|
||||
self.monitoring = False
|
||||
self._stop_event.set()
|
||||
|
||||
# Stop all collectors
|
||||
for collector in self.connections.values():
|
||||
try:
|
||||
collector.stop()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping collector: {e}")
|
||||
|
||||
self.connections.clear()
|
||||
if self.executor:
|
||||
self.executor.shutdown(wait=True)
|
||||
self.executor = None
|
||||
|
||||
def _monitor_loop(self):
|
||||
"""Main monitoring loop that manages collectors."""
|
||||
with self.app.app_context():
|
||||
while self.monitoring and not self._stop_event.is_set():
|
||||
try:
|
||||
self._update_collectors()
|
||||
time.sleep(30) # Check for new/removed servers every 30 seconds
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in monitoring loop: {e}", exc_info=True)
|
||||
time.sleep(5)
|
||||
|
||||
def _update_collectors(self):
|
||||
"""Update collectors based on current server configuration."""
|
||||
if db is None:
|
||||
return
|
||||
|
||||
try:
|
||||
# Check if the media_server table exists before querying
|
||||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(db.engine)
|
||||
if "media_server" not in inspector.get_table_names():
|
||||
# Tables not created yet (fresh install or tests)
|
||||
return
|
||||
|
||||
# Get all active media servers
|
||||
servers = db.session.query(MediaServer).filter_by(verified=True).all()
|
||||
current_server_ids = {server.id for server in servers}
|
||||
active_collector_ids = set(self.connections.keys())
|
||||
|
||||
# Remove collectors for deleted/disabled servers
|
||||
for server_id in active_collector_ids - current_server_ids:
|
||||
collector = self.connections.pop(server_id)
|
||||
collector.stop()
|
||||
self.logger.info(f"Stopped monitoring server {server_id}")
|
||||
|
||||
# Add collectors for new servers
|
||||
for server in servers:
|
||||
if server.id not in self.connections:
|
||||
self.logger.info(
|
||||
f"Creating collector for server {server.id} ({server.server_type})"
|
||||
)
|
||||
collector = self._create_collector(server)
|
||||
if collector:
|
||||
self.connections[server.id] = collector
|
||||
self.logger.info(
|
||||
f"Submitting collector task for server {server.id}"
|
||||
)
|
||||
if self.executor:
|
||||
future = self.executor.submit(collector.start)
|
||||
self.logger.info(
|
||||
f"Started monitoring server {server.id} ({server.server_type})"
|
||||
)
|
||||
|
||||
# Add a callback to catch any exceptions from the collector
|
||||
def collector_done_callback(fut, server_id=server.id):
|
||||
try:
|
||||
fut.result() # This will raise any exceptions that occurred
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Collector for server {server_id} failed: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
future.add_done_callback(collector_done_callback)
|
||||
else:
|
||||
self.logger.error(
|
||||
"Executor not initialized - cannot start collector"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
self.logger.error(
|
||||
f"Failed to create collector for server {server.id}"
|
||||
)
|
||||
else:
|
||||
# Server already has a collector - check if it's still running
|
||||
collector = self.connections[server.id]
|
||||
if not collector.is_connected():
|
||||
self.logger.warning(
|
||||
f"Collector for server {server.id} appears disconnected, checking status"
|
||||
)
|
||||
# Could add reconnection logic here if needed
|
||||
|
||||
except Exception as e:
|
||||
# Silently ignore database errors during testing or when tables don't exist yet
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
if isinstance(e, OperationalError) and "no such table" in str(e):
|
||||
self.logger.debug(f"Database not fully initialized yet: {e}")
|
||||
else:
|
||||
self.logger.error(f"Failed to update collectors: {e}", exc_info=True)
|
||||
|
||||
def _create_collector(self, server: MediaServer) -> Optional["BaseCollector"]:
|
||||
"""Create appropriate collector for server type."""
|
||||
try:
|
||||
self.logger.info(
|
||||
f"Creating collector for server type: {server.server_type}"
|
||||
)
|
||||
|
||||
if server.server_type == "plex":
|
||||
self.logger.info("Creating PlexCollector...")
|
||||
from .collectors.plex import PlexCollector
|
||||
|
||||
collector = PlexCollector(server, self._on_activity_event)
|
||||
self.logger.info(f"PlexCollector created: {collector}")
|
||||
return collector
|
||||
if server.server_type == "jellyfin":
|
||||
self.logger.info("Creating JellyfinCollector...")
|
||||
from .collectors.jellyfin import JellyfinCollector
|
||||
|
||||
return JellyfinCollector(server, self._on_activity_event)
|
||||
if server.server_type == "emby":
|
||||
self.logger.info("Creating EmbyCollector...")
|
||||
from .collectors.emby import EmbyCollector
|
||||
|
||||
return EmbyCollector(server, self._on_activity_event)
|
||||
if server.server_type == "audiobookshelf":
|
||||
self.logger.info("Creating AudiobookshelfCollector...")
|
||||
from .collectors.audiobookshelf import AudiobookshelfCollector
|
||||
|
||||
return AudiobookshelfCollector(server, self._on_activity_event)
|
||||
# For other server types, use polling collector
|
||||
self.logger.info("Creating PollingCollector...")
|
||||
from .collectors.polling import PollingCollector
|
||||
|
||||
return PollingCollector(server, self._on_activity_event)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to create collector for server {server.id}: {e}", exc_info=True
|
||||
)
|
||||
return None
|
||||
|
||||
def _on_activity_event(self, event: ActivityEvent):
|
||||
"""Handle activity events from collectors."""
|
||||
try:
|
||||
with self.app.app_context():
|
||||
session = self.activity_service.record_activity_event(event)
|
||||
if session:
|
||||
self.logger.debug(
|
||||
f"Recorded activity event: {event.event_type} for {event.user_name}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to handle activity event: {e}", exc_info=True)
|
||||
|
||||
def get_connection_status(self) -> dict[int, dict[str, Any]]:
|
||||
"""Get status of all monitoring connections."""
|
||||
status = {}
|
||||
for server_id, collector in self.connections.items():
|
||||
status[server_id] = {
|
||||
"connected": collector.is_connected(),
|
||||
"last_event": collector.get_last_event_time(),
|
||||
"event_count": collector.get_event_count(),
|
||||
"errors": collector.get_error_count(),
|
||||
}
|
||||
return status
|
||||
|
||||
|
||||
class BaseCollector:
|
||||
"""Base class for activity collectors."""
|
||||
|
||||
def __init__(
|
||||
self, server: MediaServer, event_callback: Callable[[ActivityEvent], None]
|
||||
):
|
||||
self.server = server
|
||||
self.event_callback = event_callback
|
||||
self.logger = structlog.get_logger(
|
||||
f"activity.collector.{getattr(server, 'server_type', 'unknown')}"
|
||||
)
|
||||
self.running = False
|
||||
self.last_event_time: datetime | None = None
|
||||
self.event_count = 0
|
||||
self.error_count = 0
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
def start(self):
|
||||
"""Start collecting activity data."""
|
||||
if self.running:
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.logger.info(f"Starting collector for {self.server.name}")
|
||||
try:
|
||||
self._collect_loop()
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Collector failed for {self.server.name}: {e}", exc_info=True
|
||||
)
|
||||
self.error_count += 1
|
||||
finally:
|
||||
self.running = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop collecting activity data."""
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
self.logger.info(f"Stopping collector for {self.server.name}")
|
||||
self.running = False
|
||||
self._stop_event.set()
|
||||
|
||||
def _collect_loop(self):
|
||||
"""Main collection loop - to be implemented by subclasses."""
|
||||
raise NotImplementedError
|
||||
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if collector is connected and working."""
|
||||
return self.running and not self._stop_event.is_set()
|
||||
|
||||
def get_last_event_time(self) -> datetime | None:
|
||||
"""Get timestamp of last event."""
|
||||
return self.last_event_time
|
||||
|
||||
def get_event_count(self) -> int:
|
||||
"""Get total event count."""
|
||||
return self.event_count
|
||||
|
||||
def get_error_count(self) -> int:
|
||||
"""Get total error count."""
|
||||
return self.error_count
|
||||
|
||||
def _emit_event(self, event: ActivityEvent):
|
||||
"""Emit an activity event."""
|
||||
try:
|
||||
self.event_callback(event)
|
||||
self.last_event_time = datetime.now(UTC)
|
||||
self.event_count += 1
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to emit event: {e}")
|
||||
self.error_count += 1
|
||||
|
||||
def _get_media_client(self):
|
||||
"""Get media client for this server."""
|
||||
try:
|
||||
from app.services.media.service import get_client_for_media_server
|
||||
|
||||
return get_client_for_media_server(self.server)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get media client: {e}")
|
||||
return None
|
||||
771
app/activity/monitoring/session_manager.py
Normal file
771
app/activity/monitoring/session_manager.py
Normal file
@@ -0,0 +1,771 @@
|
||||
"""
|
||||
Enhanced session management inspired by Tautulli's robust approach.
|
||||
|
||||
This module provides sophisticated session lifecycle management with
|
||||
state tracking, automatic cleanup, and intelligent session grouping.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import structlog
|
||||
|
||||
from ..domain.models import ActivityEvent
|
||||
|
||||
|
||||
class SessionState(Enum):
|
||||
"""Possible session states."""
|
||||
|
||||
PLAYING = "playing"
|
||||
PAUSED = "paused"
|
||||
STOPPED = "stopped"
|
||||
BUFFERING = "buffering"
|
||||
ERROR = "error"
|
||||
UNKNOWN = "unknown"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SessionTransition:
|
||||
"""Represents a state transition for a session."""
|
||||
|
||||
session_key: str
|
||||
from_state: SessionState | None
|
||||
to_state: SessionState
|
||||
timestamp: datetime
|
||||
view_offset: int | None = None
|
||||
metadata: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class SessionManager:
|
||||
"""
|
||||
Enhanced session manager with state tracking and lifecycle management.
|
||||
|
||||
Inspired by Tautulli's robust session handling with automatic cleanup,
|
||||
state transitions, and intelligent session grouping.
|
||||
"""
|
||||
|
||||
def __init__(self, event_callback=None):
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
self.event_callback = event_callback # Callback to emit events properly
|
||||
self.active_sessions: dict[str, dict[str, Any]] = {}
|
||||
self.session_timers: dict[str, Any] = {}
|
||||
self.cleanup_interval = 300 # 5 minutes stale session cleanup
|
||||
|
||||
def process_alert(self, alert_data: dict[str, Any], server_id: int) -> bool:
|
||||
"""
|
||||
Process a Plex alert with sophisticated state management.
|
||||
|
||||
Args:
|
||||
alert_data: Raw alert data from Plex WebSocket
|
||||
server_id: Media server ID
|
||||
|
||||
Returns:
|
||||
True if alert was processed successfully
|
||||
"""
|
||||
try:
|
||||
alert_type = alert_data.get("type")
|
||||
|
||||
if alert_type == "playing":
|
||||
return self._process_playing_alert(alert_data, server_id)
|
||||
if alert_type == "transcodeSession.start":
|
||||
return self._process_transcode_start(alert_data, server_id)
|
||||
if alert_type == "transcodeSession.end":
|
||||
return self._process_transcode_end(alert_data, server_id)
|
||||
if alert_type in [
|
||||
"activity",
|
||||
"status",
|
||||
"timeline",
|
||||
"transcodeSession.update",
|
||||
"update.statechange",
|
||||
]:
|
||||
# Filter out noisy alerts completely
|
||||
return True
|
||||
self.logger.debug(f"Ignoring alert type: {alert_type}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error processing alert: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
def _process_playing_alert(
|
||||
self, alert_data: dict[str, Any], server_id: int
|
||||
) -> bool:
|
||||
"""Process playing state notifications with state machine logic."""
|
||||
notifications = alert_data.get("PlaySessionStateNotification", [])
|
||||
if not isinstance(notifications, list):
|
||||
notifications = [notifications]
|
||||
|
||||
for notification in notifications:
|
||||
session_key = notification.get("sessionKey")
|
||||
if not session_key:
|
||||
continue
|
||||
|
||||
try:
|
||||
session_key = str(session_key)
|
||||
new_state = self._map_plex_state(notification.get("state", "unknown"))
|
||||
view_offset = notification.get("viewOffset", 0)
|
||||
rating_key = notification.get("ratingKey")
|
||||
|
||||
# Get current session state
|
||||
current_session = self.active_sessions.get(session_key)
|
||||
last_state = None
|
||||
|
||||
if current_session:
|
||||
last_state = SessionState(current_session.get("state", "unknown"))
|
||||
|
||||
# Create state transition
|
||||
transition = SessionTransition(
|
||||
session_key=session_key,
|
||||
from_state=last_state,
|
||||
to_state=new_state,
|
||||
timestamp=datetime.now(UTC),
|
||||
view_offset=view_offset,
|
||||
metadata={
|
||||
"rating_key": rating_key,
|
||||
"server_id": server_id,
|
||||
"raw_alert": notification,
|
||||
},
|
||||
)
|
||||
|
||||
# Process the state transition
|
||||
self._handle_state_transition(transition)
|
||||
|
||||
# Reset cleanup timer
|
||||
self._schedule_cleanup(session_key)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error processing playing alert for session {session_key}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
return True
|
||||
|
||||
def _handle_state_transition(self, transition: SessionTransition):
|
||||
"""
|
||||
Handle state transitions with sophisticated logic.
|
||||
|
||||
This is inspired by Tautulli's state machine approach.
|
||||
"""
|
||||
session_key = transition.session_key
|
||||
from_state = transition.from_state
|
||||
to_state = transition.to_state
|
||||
|
||||
self.logger.debug(
|
||||
f"Session {session_key} transition: {from_state} -> {to_state}"
|
||||
)
|
||||
|
||||
# Update session tracking
|
||||
if session_key not in self.active_sessions:
|
||||
self.active_sessions[session_key] = {
|
||||
"session_key": session_key,
|
||||
"started_at": transition.timestamp,
|
||||
"state": to_state.value,
|
||||
"view_offset": transition.view_offset,
|
||||
"server_id": transition.metadata.get("server_id")
|
||||
if transition.metadata
|
||||
else None,
|
||||
"rating_key": transition.metadata.get("rating_key")
|
||||
if transition.metadata
|
||||
else None,
|
||||
"last_updated": transition.timestamp,
|
||||
}
|
||||
else:
|
||||
self.active_sessions[session_key].update(
|
||||
{
|
||||
"state": to_state.value,
|
||||
"view_offset": transition.view_offset,
|
||||
"last_updated": transition.timestamp,
|
||||
}
|
||||
)
|
||||
|
||||
# Handle specific state transitions
|
||||
if from_state is None and to_state == SessionState.PLAYING:
|
||||
self._on_session_start(transition)
|
||||
elif from_state == SessionState.PLAYING and to_state == SessionState.PAUSED:
|
||||
self._on_session_pause(transition)
|
||||
elif from_state == SessionState.PAUSED and to_state == SessionState.PLAYING:
|
||||
self._on_session_resume(transition)
|
||||
elif to_state == SessionState.STOPPED:
|
||||
self._on_session_stop(transition)
|
||||
elif to_state == SessionState.BUFFERING:
|
||||
self._on_session_buffer(transition)
|
||||
elif to_state == SessionState.ERROR:
|
||||
self._on_session_error(transition)
|
||||
|
||||
# Always record progress for playing/paused states
|
||||
if to_state in (SessionState.PLAYING, SessionState.PAUSED):
|
||||
self._record_progress(transition)
|
||||
|
||||
def _on_session_start(self, transition: SessionTransition):
|
||||
"""Handle session start with rich logging and validation."""
|
||||
session_key = transition.session_key
|
||||
server_id = (
|
||||
transition.metadata.get("server_id") if transition.metadata else None
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"🎬 Session start handler called for {session_key}, server_id={server_id}"
|
||||
)
|
||||
|
||||
# Retry logic for session data lookup
|
||||
session_data = {}
|
||||
max_retries = 3
|
||||
retry_delay = 0.5 # 0.5 seconds between retries
|
||||
|
||||
for attempt in range(max_retries):
|
||||
session_data = self._get_session_from_current_activity(
|
||||
session_key, server_id
|
||||
)
|
||||
|
||||
# Check if we got valid session data
|
||||
if (
|
||||
session_data.get("username") != "Unknown"
|
||||
and session_data.get("full_title") != "Unknown"
|
||||
):
|
||||
break
|
||||
|
||||
if attempt < max_retries - 1: # Don't sleep after last attempt
|
||||
self.logger.debug(
|
||||
f"Session data not ready for {session_key}, retrying in {retry_delay}s (attempt {attempt + 1}/{max_retries})"
|
||||
)
|
||||
import time
|
||||
|
||||
time.sleep(retry_delay)
|
||||
retry_delay *= 2 # Exponential backoff
|
||||
|
||||
user_name = session_data.get("username", "Unknown")
|
||||
media_title = session_data.get("full_title", "Unknown")
|
||||
rating_key = (
|
||||
transition.metadata.get("rating_key") if transition.metadata else None
|
||||
)
|
||||
|
||||
# If we still have Unknown data after retries, log a warning but continue
|
||||
if user_name == "Unknown" or media_title == "Unknown":
|
||||
self.logger.warning(
|
||||
f"⚠️ Session {session_key} still has unknown data after {max_retries} retries - creating session anyway"
|
||||
)
|
||||
|
||||
self.logger.info(
|
||||
f"🎬 Session {session_key} started for user {user_name}, title: {media_title} (rating_key: {rating_key})"
|
||||
)
|
||||
|
||||
# Cache the session data including timestamp for duration calculation
|
||||
self.active_sessions[session_key] = {
|
||||
**session_data,
|
||||
"started_at": transition.timestamp,
|
||||
"last_update": transition.timestamp,
|
||||
}
|
||||
|
||||
# Create activity event with rich data from session
|
||||
event = ActivityEvent(
|
||||
event_type="session_start",
|
||||
server_id=int(transition.metadata.get("server_id", 0))
|
||||
if transition.metadata
|
||||
else 0,
|
||||
session_id=session_key,
|
||||
user_name=user_name,
|
||||
user_id=session_data.get("user_id"),
|
||||
media_title=media_title,
|
||||
media_type=session_data.get("media_type", "unknown"),
|
||||
media_id=session_data.get("rating_key", rating_key),
|
||||
device_name=session_data.get("device", "Unknown"),
|
||||
client_name=session_data.get("player", "Unknown"),
|
||||
platform=session_data.get("platform", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata=transition.metadata,
|
||||
)
|
||||
|
||||
# Record the event
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _on_session_pause(self, transition: SessionTransition):
|
||||
"""Handle session pause with timestamp tracking."""
|
||||
session_key = transition.session_key
|
||||
self.logger.info(
|
||||
f"⏸️ Session {session_key} paused at {transition.view_offset}ms"
|
||||
)
|
||||
|
||||
# Track pause timestamp for duration calculations
|
||||
if session_key in self.active_sessions:
|
||||
self.active_sessions[session_key]["paused_at"] = transition.timestamp
|
||||
|
||||
# Get complete session data for pause event
|
||||
server_id = (
|
||||
int(transition.metadata.get("server_id", 0)) if transition.metadata else 0
|
||||
)
|
||||
session_data = self._get_session_from_current_activity(session_key, server_id)
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_pause",
|
||||
server_id=server_id,
|
||||
session_id=session_key,
|
||||
user_name=session_data.get("username", "Unknown"),
|
||||
media_title=session_data.get("full_title", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata=transition.metadata,
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _on_session_resume(self, transition: SessionTransition):
|
||||
"""Handle session resume with pause duration tracking."""
|
||||
session_key = transition.session_key
|
||||
self.logger.info(
|
||||
f"▶️ Session {session_key} resumed from {transition.view_offset}ms"
|
||||
)
|
||||
|
||||
# Calculate pause duration
|
||||
pause_duration = None
|
||||
if session_key in self.active_sessions:
|
||||
paused_at = self.active_sessions[session_key].get("paused_at")
|
||||
if paused_at:
|
||||
pause_duration = (transition.timestamp - paused_at).total_seconds()
|
||||
self.active_sessions[session_key].pop("paused_at", None)
|
||||
|
||||
# Get complete session data for resume event
|
||||
server_id = (
|
||||
int(transition.metadata.get("server_id", 0)) if transition.metadata else 0
|
||||
)
|
||||
session_data = self._get_session_from_current_activity(session_key, server_id)
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_resume",
|
||||
server_id=server_id,
|
||||
session_id=session_key,
|
||||
user_name=session_data.get("username", "Unknown"),
|
||||
media_title=session_data.get("full_title", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata={**transition.metadata, "pause_duration_seconds": pause_duration},
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _on_session_stop(self, transition: SessionTransition):
|
||||
"""Handle session stop with cleanup and final recording."""
|
||||
session_key = transition.session_key
|
||||
server_id = (
|
||||
int(transition.metadata.get("server_id", 0)) if transition.metadata else 0
|
||||
)
|
||||
self.logger.info(
|
||||
f"⏹️ Session {session_key} stopped at {transition.view_offset}ms"
|
||||
)
|
||||
|
||||
# Clean up session tracking
|
||||
session_data = self.active_sessions.pop(session_key, {})
|
||||
self._cancel_cleanup_timer(session_key)
|
||||
|
||||
# Debug: Log what's in the cached session data
|
||||
self.logger.debug(
|
||||
f"🔍 Cached session data for {session_key}: username={session_data.get('username', 'missing')}, title={session_data.get('full_title', 'missing')}"
|
||||
)
|
||||
|
||||
# Use cached data first (from session start), then try current activity as fallback
|
||||
# This is because stopped sessions are often already removed from Plex's active sessions
|
||||
current_session_data = session_data
|
||||
|
||||
# If cached data is incomplete, try to get current session data
|
||||
if (
|
||||
not current_session_data
|
||||
or current_session_data.get("username") == "Unknown"
|
||||
):
|
||||
self.logger.debug(
|
||||
f"🔍 Cached data incomplete for {session_key}, trying current activity lookup"
|
||||
)
|
||||
lookup_data = self._get_session_from_current_activity(
|
||||
session_key, server_id
|
||||
)
|
||||
if lookup_data and lookup_data.get("username") != "Unknown":
|
||||
self.logger.debug(
|
||||
f"🔍 Current activity lookup successful for {session_key}: username={lookup_data.get('username')}"
|
||||
)
|
||||
current_session_data = lookup_data
|
||||
else:
|
||||
self.logger.debug(
|
||||
f"🔍 Current activity lookup also failed for {session_key}"
|
||||
)
|
||||
|
||||
user_name = current_session_data.get("username", "Unknown")
|
||||
media_title = current_session_data.get("full_title", "Unknown")
|
||||
|
||||
# Calculate total duration
|
||||
started_at = session_data.get("started_at")
|
||||
total_duration = None
|
||||
if started_at:
|
||||
total_duration = (transition.timestamp - started_at).total_seconds()
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_end",
|
||||
server_id=server_id,
|
||||
session_id=session_key,
|
||||
user_name=user_name,
|
||||
media_title=media_title,
|
||||
media_type=current_session_data.get("media_type", "unknown"),
|
||||
media_id=current_session_data.get("rating_key"),
|
||||
device_name=current_session_data.get("device", "Unknown"),
|
||||
client_name=current_session_data.get("player", "Unknown"),
|
||||
platform=current_session_data.get("platform", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata={
|
||||
**transition.metadata,
|
||||
"total_duration_seconds": total_duration,
|
||||
},
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _on_session_buffer(self, transition: SessionTransition):
|
||||
"""Handle buffering events with frequency tracking."""
|
||||
session_key = transition.session_key
|
||||
|
||||
# Track buffer events frequency
|
||||
if session_key in self.active_sessions:
|
||||
buffer_count = self.active_sessions[session_key].get("buffer_count", 0) + 1
|
||||
self.active_sessions[session_key]["buffer_count"] = buffer_count
|
||||
|
||||
# Only log excessive buffering
|
||||
if buffer_count >= 3:
|
||||
self.logger.warning(
|
||||
f"🔄 Session {session_key} buffering (count: {buffer_count})"
|
||||
)
|
||||
|
||||
# Get complete session data for buffer event
|
||||
server_id = (
|
||||
int(transition.metadata.get("server_id", 0)) if transition.metadata else 0
|
||||
)
|
||||
session_data = self._get_session_from_current_activity(session_key, server_id)
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_buffer",
|
||||
server_id=server_id,
|
||||
session_id=session_key,
|
||||
user_name=session_data.get("username", "Unknown"),
|
||||
media_title=session_data.get("full_title", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata=transition.metadata,
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _on_session_error(self, transition: SessionTransition):
|
||||
"""Handle session errors."""
|
||||
session_key = transition.session_key
|
||||
self.logger.warning(f"❌ Session {session_key} encountered error")
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_error",
|
||||
server_id=int(transition.metadata.get("server_id", 0))
|
||||
if transition.metadata
|
||||
else 0,
|
||||
session_id=session_key,
|
||||
user_name="Unknown",
|
||||
media_title="Unknown",
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
metadata=transition.metadata,
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
|
||||
def _record_progress(self, transition: SessionTransition):
|
||||
"""Record periodic progress snapshots."""
|
||||
# Only record progress snapshots every 30 seconds to avoid spam
|
||||
session_key = transition.session_key
|
||||
session_data = self.active_sessions.get(session_key, {})
|
||||
last_progress = session_data.get(
|
||||
"last_progress_recorded", datetime.min.replace(tzinfo=UTC)
|
||||
)
|
||||
|
||||
if (transition.timestamp - last_progress).total_seconds() >= 30:
|
||||
# Get complete session data for progress event
|
||||
server_id = (
|
||||
int(transition.metadata.get("server_id", 0))
|
||||
if transition.metadata
|
||||
else 0
|
||||
)
|
||||
session_data = self._get_session_from_current_activity(
|
||||
session_key, server_id
|
||||
)
|
||||
|
||||
event = ActivityEvent(
|
||||
event_type="session_progress",
|
||||
server_id=server_id,
|
||||
session_id=session_key,
|
||||
user_name=session_data.get("username", "Unknown"),
|
||||
media_title=session_data.get("full_title", "Unknown"),
|
||||
timestamp=transition.timestamp,
|
||||
position_ms=transition.view_offset,
|
||||
state=transition.to_state.value,
|
||||
metadata=transition.metadata,
|
||||
)
|
||||
|
||||
if self.event_callback:
|
||||
self.event_callback(event)
|
||||
self.active_sessions[session_key]["last_progress_recorded"] = (
|
||||
transition.timestamp
|
||||
)
|
||||
|
||||
def _schedule_cleanup(self, session_key: str, timeout_minutes: int = 5):
|
||||
"""Schedule automatic cleanup for stale sessions (Tautulli-inspired)."""
|
||||
# Cancel existing timer
|
||||
self._cancel_cleanup_timer(session_key)
|
||||
|
||||
# Schedule new cleanup
|
||||
from threading import Timer
|
||||
|
||||
timer = Timer(
|
||||
timeout_minutes * 60, self._force_stop_session, args=[session_key]
|
||||
)
|
||||
timer.start()
|
||||
self.session_timers[session_key] = timer
|
||||
|
||||
def _cancel_cleanup_timer(self, session_key: str):
|
||||
"""Cancel cleanup timer for a session."""
|
||||
timer = self.session_timers.pop(session_key, None)
|
||||
if timer:
|
||||
timer.cancel()
|
||||
|
||||
def _force_stop_session(self, session_key: str):
|
||||
"""Force stop a stale session (Tautulli-inspired)."""
|
||||
if session_key in self.active_sessions:
|
||||
self.logger.warning(f"🧹 Force stopping stale session {session_key}")
|
||||
|
||||
# Create a synthetic stop transition
|
||||
session_data = self.active_sessions[session_key]
|
||||
transition = SessionTransition(
|
||||
session_key=session_key,
|
||||
from_state=SessionState(session_data.get("state", "unknown")),
|
||||
to_state=SessionState.STOPPED,
|
||||
timestamp=datetime.now(UTC),
|
||||
view_offset=session_data.get("view_offset", 0),
|
||||
metadata={
|
||||
"server_id": session_data.get("server_id"),
|
||||
"rating_key": session_data.get("rating_key"),
|
||||
"force_stopped": True,
|
||||
},
|
||||
)
|
||||
|
||||
self._handle_state_transition(transition)
|
||||
|
||||
def _map_plex_state(self, plex_state: str) -> SessionState:
|
||||
"""Map Plex state strings to our SessionState enum."""
|
||||
state_mapping = {
|
||||
"playing": SessionState.PLAYING,
|
||||
"paused": SessionState.PAUSED,
|
||||
"stopped": SessionState.STOPPED,
|
||||
"buffering": SessionState.BUFFERING,
|
||||
"error": SessionState.ERROR,
|
||||
"unknown": SessionState.UNKNOWN,
|
||||
}
|
||||
return state_mapping.get(plex_state.lower(), SessionState.UNKNOWN)
|
||||
|
||||
def _process_timeline_alert(
|
||||
self, alert_data: dict[str, Any], server_id: int
|
||||
) -> bool:
|
||||
"""Process timeline alerts (library changes, etc.)."""
|
||||
# For now, just log these - could be enhanced for library monitoring
|
||||
self.logger.debug(
|
||||
f"Timeline alert received: {alert_data.get('type', 'unknown')}"
|
||||
)
|
||||
return True
|
||||
|
||||
def _process_transcode_start(
|
||||
self, alert_data: dict[str, Any], server_id: int
|
||||
) -> bool:
|
||||
"""Process transcoding session start."""
|
||||
transcode_sessions = alert_data.get("TranscodeSession", [])
|
||||
if not isinstance(transcode_sessions, list):
|
||||
transcode_sessions = [transcode_sessions]
|
||||
|
||||
for session in transcode_sessions:
|
||||
self.logger.info(
|
||||
f"🔄 Transcode session started: {session.get('key', 'unknown')}"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def _process_transcode_end(
|
||||
self, alert_data: dict[str, Any], server_id: int
|
||||
) -> bool:
|
||||
"""Process transcoding session end."""
|
||||
transcode_sessions = alert_data.get("TranscodeSession", [])
|
||||
if not isinstance(transcode_sessions, list):
|
||||
transcode_sessions = [transcode_sessions]
|
||||
|
||||
for session in transcode_sessions:
|
||||
self.logger.info(
|
||||
f"✅ Transcode session ended: {session.get('key', 'unknown')}"
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
def get_active_sessions(self) -> dict[str, dict[str, Any]]:
|
||||
"""Get all currently active sessions."""
|
||||
return self.active_sessions.copy()
|
||||
|
||||
def cleanup_all_sessions(self):
|
||||
"""Clean up all active sessions (for shutdown)."""
|
||||
self.logger.info("Cleaning up all active sessions")
|
||||
|
||||
for session_key in list(self.active_sessions.keys()):
|
||||
self._cancel_cleanup_timer(session_key)
|
||||
|
||||
self.active_sessions.clear()
|
||||
self.session_timers.clear()
|
||||
|
||||
def _get_session_from_current_activity(
|
||||
self, session_key: str, server_id: int
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Get complete session data using Tautulli's approach.
|
||||
Makes one API call to /status/sessions and finds our session.
|
||||
"""
|
||||
try:
|
||||
# Import Flask dependencies
|
||||
from flask import has_app_context
|
||||
|
||||
def _do_session_lookup():
|
||||
"""Helper function to perform the actual session lookup."""
|
||||
from app.extensions import db
|
||||
from app.models import MediaServer, User
|
||||
from app.services.media.service import get_client_for_media_server
|
||||
|
||||
# Get the server
|
||||
server = MediaServer.query.get(server_id)
|
||||
if not server:
|
||||
self.logger.warning(f"Server {server_id} not found")
|
||||
return {}
|
||||
|
||||
# Get the media client
|
||||
client = get_client_for_media_server(server)
|
||||
if not client or not hasattr(client, "server"):
|
||||
self.logger.warning(f"No valid client for server {server_id}")
|
||||
return {}
|
||||
|
||||
# Get ALL current sessions from Plex (Tautulli approach)
|
||||
sessions = client.server.sessions()
|
||||
self.logger.info(f"📡 Found {len(sessions)} active Plex sessions")
|
||||
|
||||
# Find our specific session
|
||||
target_session = None
|
||||
for session in sessions:
|
||||
session_key_attr = str(getattr(session, "sessionKey", ""))
|
||||
if session_key_attr == str(session_key):
|
||||
target_session = session
|
||||
break
|
||||
|
||||
if not target_session:
|
||||
self.logger.warning(
|
||||
f"Session {session_key} not found in current activity"
|
||||
)
|
||||
return {}
|
||||
|
||||
# Extract session data (following Tautulli's pattern)
|
||||
session_data = {}
|
||||
|
||||
# User information
|
||||
plex_username = None
|
||||
usernames = getattr(target_session, "usernames", None)
|
||||
users = getattr(target_session, "users", None)
|
||||
|
||||
if usernames:
|
||||
plex_username = usernames[0]
|
||||
elif users:
|
||||
plex_username = users[0].title
|
||||
|
||||
# Map to local user
|
||||
if plex_username:
|
||||
local_user = (
|
||||
User.query.filter_by(server_id=server_id)
|
||||
.filter(
|
||||
db.or_(
|
||||
User.username == plex_username,
|
||||
User.email == plex_username,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if local_user:
|
||||
session_data["username"] = local_user.username
|
||||
session_data["user_id"] = local_user.id
|
||||
else:
|
||||
session_data["username"] = plex_username
|
||||
session_data["user_id"] = None
|
||||
else:
|
||||
session_data["username"] = "Unknown"
|
||||
session_data["user_id"] = None
|
||||
|
||||
# Media information (rich title like Tautulli)
|
||||
title = getattr(target_session, "title", "Unknown")
|
||||
grandparent_title = getattr(target_session, "grandparentTitle", None)
|
||||
parent_title = getattr(target_session, "parentTitle", None)
|
||||
|
||||
# Build full title
|
||||
if grandparent_title and parent_title:
|
||||
# TV Show: "Game of Thrones - Season 1 - Winter Is Coming"
|
||||
session_data["full_title"] = (
|
||||
f"{grandparent_title} - {parent_title} - {title}"
|
||||
)
|
||||
elif grandparent_title:
|
||||
# TV Show without season: "Game of Thrones - Winter Is Coming"
|
||||
session_data["full_title"] = f"{grandparent_title} - {title}"
|
||||
else:
|
||||
# Movie or other: "The Matrix"
|
||||
session_data["full_title"] = title
|
||||
|
||||
# Additional metadata
|
||||
session_data["media_type"] = getattr(target_session, "type", "unknown")
|
||||
session_data["rating_key"] = getattr(target_session, "ratingKey", "")
|
||||
session_data["session_key"] = session_key_attr
|
||||
|
||||
# Extract player/device info properly
|
||||
player_obj = getattr(target_session, "player", None)
|
||||
if player_obj:
|
||||
session_data["player"] = getattr(player_obj, "title", "Unknown")
|
||||
session_data["device"] = getattr(player_obj, "device", "Unknown")
|
||||
session_data["platform"] = getattr(
|
||||
player_obj, "platform", "Unknown"
|
||||
)
|
||||
else:
|
||||
session_data["player"] = "Unknown"
|
||||
session_data["device"] = "Unknown"
|
||||
session_data["platform"] = "Unknown"
|
||||
|
||||
self.logger.info(
|
||||
f"📡 Retrieved complete session data for {session_key}: user={session_data['username']}, title={session_data['full_title']}"
|
||||
)
|
||||
return session_data
|
||||
|
||||
# Check if we need app context or if we already have one
|
||||
if has_app_context():
|
||||
return _do_session_lookup()
|
||||
# Look for app in the websocket monitor that created this collector
|
||||
from .monitor import _app_instance
|
||||
|
||||
app = _app_instance
|
||||
if app:
|
||||
with app.app_context():
|
||||
return _do_session_lookup()
|
||||
else:
|
||||
self.logger.warning("📡 No app context available for session lookup")
|
||||
return {}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(
|
||||
f"Failed to get session from current activity: {e}", exc_info=True
|
||||
)
|
||||
return {}
|
||||
49
app/activity/templates/_partials/macros.html
Normal file
49
app/activity/templates/_partials/macros.html
Normal file
@@ -0,0 +1,49 @@
|
||||
{% macro modal(id, title='', body=None, class_name='modal hidden', attrs=None) -%}
|
||||
{% set _attrs = attrs or {} %}
|
||||
<div id="{{ id }}" class="{{ class_name }}" {% for key, value in _attrs.items() %} {{ key }}="{{ value }}"{% endfor %}>
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow-lg border border-gray-200 dark:border-gray-700 max-w-lg mx-auto">
|
||||
<div class="px-4 py-3 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">{{ title }}</h3>
|
||||
</div>
|
||||
<div class="px-4 py-4 space-y-4">
|
||||
{% if body %}
|
||||
{{ body }}
|
||||
{% endif %}
|
||||
{% if caller %}
|
||||
{{ caller() }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro loading_spinner(label=None) -%}
|
||||
<div class="flex items-center justify-center gap-2 text-gray-500 dark:text-gray-300">
|
||||
<svg class="animate-spin h-5 w-5 text-current" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24">
|
||||
<circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
|
||||
<path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"></path>
|
||||
</svg>
|
||||
<span>{{ label or _("Loading...") }}</span>
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
|
||||
{% macro card(title=None, description=None, footer=None) -%}
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow-sm border border-gray-200 dark:border-gray-700 p-6">
|
||||
{% if title %}
|
||||
<h3 class="text-sm font-medium text-gray-900 dark:text-white">{{ title }}</h3>
|
||||
{% endif %}
|
||||
{% if description %}
|
||||
<p class="mt-1 text-sm text-gray-500 dark:text-gray-400">{{ description }}</p>
|
||||
{% endif %}
|
||||
{% if caller %}
|
||||
<div class="mt-4 space-y-3">
|
||||
{{ caller() }}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if footer %}
|
||||
<div class="mt-4">
|
||||
{{ footer }}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{%- endmacro %}
|
||||
332
app/activity/templates/activity/_activity_table.html
Normal file
332
app/activity/templates/activity/_activity_table.html
Normal file
@@ -0,0 +1,332 @@
|
||||
{% if error %}
|
||||
<div class="text-center py-12">
|
||||
<div class="text-red-500 dark:text-red-400">
|
||||
<svg class="w-16 h-16 mx-auto mb-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"/>
|
||||
</svg>
|
||||
<h3 class="text-lg font-medium mb-2">{{ _("Unable to load activity data") }}</h3>
|
||||
<p>{{ error }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% elif not sessions %}
|
||||
<div class="text-center py-12">
|
||||
<div class="text-gray-500 dark:text-gray-400">
|
||||
<svg class="w-16 h-16 mx-auto mb-4 opacity-50" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z"/>
|
||||
</svg>
|
||||
<h3 class="text-lg font-medium mb-2">{{ _("No activity found") }}</h3>
|
||||
<p>{{ _("No media sessions found for the selected period") }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<!-- Activity Table -->
|
||||
<div class="relative overflow-x-auto">
|
||||
<table class="w-full text-sm text-left rtl:text-right text-gray-500 dark:text-gray-400">
|
||||
<thead class="text-xs text-gray-700 uppercase bg-gray-50 dark:bg-gray-700 dark:text-gray-400">
|
||||
<tr>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("User") }}
|
||||
</th>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("Server") }}
|
||||
</th>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("Media") }}
|
||||
</th>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("Device") }}
|
||||
</th>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("Duration") }}
|
||||
</th>
|
||||
<th scope="col" class="px-6 py-3">
|
||||
{{ _("Started") }}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for session in sessions %}
|
||||
{% set duration_seconds = session.display_duration_seconds %}
|
||||
{% set duration_str = "" %}
|
||||
{% if duration_seconds %}
|
||||
{% set total_minutes = (duration_seconds // 60) | int %}
|
||||
{% set hours = (total_minutes // 60) | int %}
|
||||
{% set minutes = (total_minutes % 60) | int %}
|
||||
{% if hours > 0 %}
|
||||
{% set duration_str = "{}h {}m".format(hours, minutes) %}
|
||||
{% else %}
|
||||
{% set duration_str = "{}m".format(minutes) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
<tr class="bg-white border-b dark:bg-gray-800 dark:border-gray-700 hover:bg-gray-50 dark:hover:bg-gray-600">
|
||||
<!-- User -->
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center">
|
||||
<svg class="w-4 h-4 mr-2 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z"/>
|
||||
</svg>
|
||||
<span class="text-sm text-gray-900 dark:text-white">{{ session.display_user_name }}</span>
|
||||
</div>
|
||||
</td>
|
||||
|
||||
<!-- Server -->
|
||||
<td class="px-6 py-4">
|
||||
{% if session.server_type and session.server_name %}
|
||||
{{ session.server_type|server_name_tag(session.server_name) }}
|
||||
{% else %}
|
||||
<span class="text-sm text-gray-400">{{ _("Unknown") }}</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
|
||||
<!-- Media -->
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center">
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="text-sm font-medium text-gray-900 dark:text-white truncate max-w-xs" title="{{ session.media_title }}">
|
||||
{{ session.media_title }}
|
||||
</div>
|
||||
{% if session.series_name %}
|
||||
<div class="text-xs text-gray-600 dark:text-gray-300 truncate">
|
||||
{{ session.series_name }}
|
||||
{% if session.season_number and session.episode_number %}
|
||||
• S{{ session.season_number }}E{{ session.episode_number }}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if session.media_type %}
|
||||
<span class="inline-flex items-center px-2 py-1 mt-1 rounded-full text-xs font-medium bg-blue-100 text-blue-800 dark:bg-blue-800 dark:text-blue-100 capitalize">
|
||||
{{ session.media_type }}
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
|
||||
<!-- Device -->
|
||||
<td class="px-6 py-4">
|
||||
<div class="flex items-center">
|
||||
<svg class="w-4 h-4 mr-2 text-gray-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9.75 17L9 20l-1 1h8l-1-1-.75-3M3 13h18M5 17h14a2 2 0 002-2V5a2 2 0 00-2-2H5a2 2 0 00-2 2v10a2 2 0 002 2z"/>
|
||||
</svg>
|
||||
<span class="text-sm text-gray-600 dark:text-gray-300">{{ session.device_name or session.client_name or _("Unknown Device") }}</span>
|
||||
</div>
|
||||
</td>
|
||||
|
||||
<!-- Status -->
|
||||
<!-- Duration -->
|
||||
<td class="px-6 py-4">
|
||||
{% if session.is_active and session.started_at %}
|
||||
<div class="live-timer font-medium text-green-600 dark:text-green-400"
|
||||
data-start-time="{{ session.started_at.isoformat() }}"
|
||||
data-session-id="{{ session.id }}">
|
||||
<span class="timer-display">00:00</span>
|
||||
<div class="text-xs text-gray-500 dark:text-gray-400">{{ _("Live") }}</div>
|
||||
</div>
|
||||
{% elif duration_str %}
|
||||
<div class="text-sm text-gray-900 dark:text-white">
|
||||
{{ duration_str }}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="text-sm text-gray-400">{{ _("N/A") }}</div>
|
||||
{% endif %}
|
||||
</td>
|
||||
|
||||
<!-- Started -->
|
||||
<td class="px-6 py-4">
|
||||
<div class="text-sm text-gray-900 dark:text-white">
|
||||
{{ session.started_at|local_date if session.started_at else '-' }}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<!-- Pagination using Flowbite design -->
|
||||
{% if total_pages > 1 %}
|
||||
<div class="flex flex-col items-center mt-6">
|
||||
<!-- Flowbite Table Pagination -->
|
||||
<div class="flex flex-col items-center">
|
||||
<!-- Help text -->
|
||||
<span class="text-sm text-gray-700 dark:text-gray-400">
|
||||
{{ _("Showing") }} <span class="font-semibold text-gray-900 dark:text-white">{{ ((page - 1) * 20) + 1 }}</span>
|
||||
{{ _("to") }} <span class="font-semibold text-gray-900 dark:text-white">{{ ((page - 1) * 20) + sessions|length }}</span>
|
||||
{{ _("of") }} <span class="font-semibold text-gray-900 dark:text-white">{{ total_count }}</span> {{ _("entries") }}
|
||||
</span>
|
||||
|
||||
<!-- Buttons -->
|
||||
<div class="inline-flex mt-2 xs:mt-0">
|
||||
{% if has_prev %}
|
||||
<button onclick="paginateTo({{ page - 1 }})"
|
||||
class="flex items-center justify-center px-3 h-8 text-sm font-medium text-gray-500 bg-white border border-gray-300 rounded-s hover:bg-gray-100 hover:text-gray-700 dark:bg-gray-800 dark:border-gray-700 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white">
|
||||
{{ _("Prev") }}
|
||||
</button>
|
||||
{% else %}
|
||||
<span class="flex items-center justify-center px-3 h-8 text-sm font-medium text-gray-300 bg-gray-100 border border-gray-300 rounded-s cursor-not-allowed dark:bg-gray-700 dark:border-gray-600 dark:text-gray-500">
|
||||
{{ _("Prev") }}
|
||||
</span>
|
||||
{% endif %}
|
||||
|
||||
{% if has_next %}
|
||||
<button onclick="paginateTo({{ page + 1 }})"
|
||||
class="flex items-center justify-center px-3 h-8 text-sm font-medium text-gray-500 bg-white border border-gray-300 border-l-0 rounded-e hover:bg-gray-100 hover:text-gray-700 dark:bg-gray-800 dark:border-gray-700 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white">
|
||||
{{ _("Next") }}
|
||||
</button>
|
||||
{% else %}
|
||||
<span class="flex items-center justify-center px-3 h-8 text-sm font-medium text-gray-300 bg-gray-100 border border-gray-300 border-l-0 rounded-e cursor-not-allowed dark:bg-gray-700 dark:border-gray-600 dark:text-gray-500">
|
||||
{{ _("Next") }}
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
<script>
|
||||
function paginateTo(page) {
|
||||
// Build query parameters from current URL but replace page number
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
params.set('page', page);
|
||||
|
||||
// Remove days parameter since history shows all data
|
||||
params.delete('days');
|
||||
|
||||
// Make the HTMX request
|
||||
htmx.ajax('GET', '{{ url_for("activity.activity_grid") }}?' + params.toString(), {
|
||||
target: '#activity-table-container'
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
<script>
|
||||
// Live timer functionality
|
||||
function updateLiveTimers() {
|
||||
const timers = document.querySelectorAll('.live-timer');
|
||||
const now = new Date();
|
||||
|
||||
timers.forEach(timer => {
|
||||
// Ensure start time is treated as UTC
|
||||
const startTimeStr = timer.dataset.startTime;
|
||||
const startTime = new Date(startTimeStr.endsWith('Z') ? startTimeStr : startTimeStr + 'Z');
|
||||
const elapsed = Math.floor((now - startTime) / 1000); // seconds
|
||||
|
||||
// Calculate hours, minutes, seconds
|
||||
const hours = Math.floor(elapsed / 3600);
|
||||
const minutes = Math.floor((elapsed % 3600) / 60);
|
||||
const seconds = elapsed % 60;
|
||||
|
||||
// Format display
|
||||
let display = '';
|
||||
if (hours > 0) {
|
||||
display = `${hours}:${minutes.toString().padStart(2, '0')}:${seconds.toString().padStart(2, '0')}`;
|
||||
} else {
|
||||
display = `${minutes}:${seconds.toString().padStart(2, '0')}`;
|
||||
}
|
||||
|
||||
const displayElement = timer.querySelector('.timer-display');
|
||||
if (displayElement) {
|
||||
displayElement.textContent = display;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start timer immediately since this script runs after HTMX loads the content
|
||||
(function() {
|
||||
// Update timers immediately
|
||||
updateLiveTimers();
|
||||
|
||||
// Then update every second
|
||||
setInterval(updateLiveTimers, 1000);
|
||||
})();
|
||||
|
||||
// Also update timers when new content is loaded via HTMX
|
||||
document.addEventListener('htmx:afterSwap', function() {
|
||||
updateLiveTimers();
|
||||
});
|
||||
|
||||
function viewSessionDetails(sessionId) {
|
||||
// Fetch and display session details in a modal
|
||||
fetch(`{{ url_for('activity.activity_session', session_id=0) }}`.replace('0', sessionId))
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.error) {
|
||||
alert(data.error);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create modal content
|
||||
const modalContent = `
|
||||
<div class="space-y-4">
|
||||
<div>
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">${data.media_title}</h3>
|
||||
${data.series_name ? `<p class="text-sm text-gray-500 dark:text-gray-400">${data.series_name} - S${data.season_number}E${data.episode_number}</p>` : ''}
|
||||
</div>
|
||||
|
||||
<div class="grid grid-cols-2 gap-4 text-sm">
|
||||
<div>
|
||||
<strong class="text-gray-900 dark:text-white">{{ _("User") }}:</strong>
|
||||
<span class="text-gray-600 dark:text-gray-300">${data.display_user_name}</span>
|
||||
</div>
|
||||
<div>
|
||||
<strong class="text-gray-900 dark:text-white">{{ _("Device") }}:</strong>
|
||||
<span class="text-gray-600 dark:text-gray-300">${data.device_name || data.client_name || '{{ _("Unknown") }}'}</span>
|
||||
</div>
|
||||
<div>
|
||||
<strong class="text-gray-900 dark:text-white">{{ _("Started") }}:</strong>
|
||||
<span class="text-gray-600 dark:text-gray-300">${new Date(data.started_at).toLocaleString()}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
${data.transcoding_info && Object.keys(data.transcoding_info).length > 0 ? `
|
||||
<div>
|
||||
<strong class="text-gray-900 dark:text-white block mb-2">{{ _("Transcoding Info") }}:</strong>
|
||||
<div class="bg-gray-50 dark:bg-gray-700 rounded p-3 text-sm">
|
||||
<pre class="whitespace-pre-wrap">${JSON.stringify(data.transcoding_info, null, 2)}</pre>
|
||||
</div>
|
||||
</div>
|
||||
` : ''}
|
||||
</div>
|
||||
`;
|
||||
|
||||
// Show modal (you'll need to implement this based on your modal system)
|
||||
showSessionModal(modalContent);
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching session details:', error);
|
||||
alert('{{ _("Failed to load session details") }}');
|
||||
});
|
||||
}
|
||||
|
||||
function showSessionModal(content) {
|
||||
// Simple modal implementation - you can enhance this
|
||||
const modal = document.createElement('div');
|
||||
modal.className = 'fixed inset-0 z-50 flex items-center justify-center bg-black bg-opacity-50';
|
||||
modal.innerHTML = `
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow-xl max-w-2xl w-full mx-4 max-h-screen overflow-auto">
|
||||
<div class="p-6">
|
||||
<div class="flex justify-between items-center mb-4">
|
||||
<h2 class="text-xl font-semibold text-gray-900 dark:text-white">{{ _("Session Details") }}</h2>
|
||||
<button onclick="this.closest('.fixed').remove()" class="text-gray-400 hover:text-gray-600 dark:hover:text-gray-200">
|
||||
<svg class="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
${content}
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
document.body.appendChild(modal);
|
||||
|
||||
// Close on background click
|
||||
modal.addEventListener('click', (e) => {
|
||||
if (e.target === modal) {
|
||||
modal.remove();
|
||||
}
|
||||
});
|
||||
}
|
||||
</script>
|
||||
110
app/activity/templates/activity/dashboard.html
Normal file
110
app/activity/templates/activity/dashboard.html
Normal file
@@ -0,0 +1,110 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ _("Activity Monitoring") }} - Wizarr{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<div class="container mx-auto px-4 py-6 max-w-7xl">
|
||||
<!-- Header -->
|
||||
<div class="mb-6">
|
||||
<h1 class="text-3xl font-bold text-gray-900 dark:text-white">{{ _("Activity Monitoring") }}</h1>
|
||||
<p class="mt-1 text-sm text-gray-500 dark:text-gray-400">{{ _("Real-time and historical media playback activity") }}</p>
|
||||
</div>
|
||||
|
||||
{% if error %}
|
||||
<div class="bg-red-50 dark:bg-red-900/50 border border-red-200 dark:border-red-800 rounded-lg p-4 mb-6">
|
||||
<div class="flex">
|
||||
<svg class="flex-shrink-0 w-5 h-5 text-red-400" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"></path>
|
||||
</svg>
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">{{ error }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Activity Table -->
|
||||
<section class="bg-gray-50 dark:bg-gray-900 p-3 sm:p-5">
|
||||
<div class="mx-auto max-w-screen-xl px-4 lg:px-12">
|
||||
<div class="bg-white dark:bg-gray-800 relative shadow-md sm:rounded-lg overflow-hidden">
|
||||
<div class="flex flex-col md:flex-row items-center justify-between space-y-3 md:space-y-0 p-4 gap-4">
|
||||
<div class="w-full md:w-2/5">
|
||||
<form class="flex items-center" method="GET" hx-get="{{ url_for('activity.activity_dashboard') }}" hx-target="body" hx-push-url="true">
|
||||
<label for="simple-search" class="sr-only">{{ _("Search") }}</label>
|
||||
<div class="relative w-full">
|
||||
<div class="absolute inset-y-0 left-0 flex items-center pl-3 pointer-events-none">
|
||||
<svg aria-hidden="true" class="w-5 h-5 text-gray-500 dark:text-gray-400" fill="currentColor" viewbox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" d="M8 4a4 4 0 100 8 4 4 0 000-8zM2 8a6 6 0 1110.89 3.476l4.817 4.817a1 1 0 01-1.414 1.414l-4.816-4.816A6 6 0 012 8z" clip-rule="evenodd" />
|
||||
</svg>
|
||||
</div>
|
||||
<input type="text" id="simple-search" name="user_name" value="{{ selected_user_name or '' }}" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full pl-10 p-2.5 h-10 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" placeholder="{{ _("Search by username...") }}">
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="w-full md:w-3/5 flex flex-col md:flex-row space-y-2 md:space-y-0 items-stretch md:items-center justify-end gap-4">
|
||||
<select name="server_id" id="server-filter" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full md:w-48 p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" onchange="filterActivities()">
|
||||
<option value="">{{ _("All Servers") }}</option>
|
||||
{% for server in servers %}
|
||||
<option value="{{ server.id }}" {% if server.id == selected_server_id %}selected{% endif %}>
|
||||
{{ server.server_name }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<select name="media_type" id="media-type-filter" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full md:w-48 p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" onchange="filterActivities()">
|
||||
<option value="">{{ _("All Media Types") }}</option>
|
||||
<option value="movie" {% if selected_media_type == 'movie' %}selected{% endif %}>{{ _("Movies") }}</option>
|
||||
<option value="episode" {% if selected_media_type == 'episode' %}selected{% endif %}>{{ _("TV Episodes") }}</option>
|
||||
<option value="track" {% if selected_media_type == 'track' %}selected{% endif %}>{{ _("Music") }}</option>
|
||||
<option value="audiobook" {% if selected_media_type == 'audiobook' %}selected{% endif %}>{{ _("Audiobooks") }}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<div id="activity-table-container"
|
||||
hx-get="{{ url_for('activity.activity_grid') }}?{{ request.query_string.decode() }}"
|
||||
hx-trigger="load"
|
||||
hx-swap="innerHTML">
|
||||
<!-- Loading placeholder -->
|
||||
<div class="text-center py-12">
|
||||
<div class="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto mb-4"></div>
|
||||
<p class="text-gray-500 dark:text-gray-400">{{ _("Loading activity data...") }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function refreshData() {
|
||||
// Refresh the activity table
|
||||
const params = new URLSearchParams();
|
||||
|
||||
// Get current filter values
|
||||
const userSearch = document.getElementById('simple-search')?.value || '';
|
||||
const serverFilter = document.getElementById('server-filter')?.value || '';
|
||||
const mediaTypeFilter = document.getElementById('media-type-filter')?.value || '';
|
||||
|
||||
// Add parameters if they have values
|
||||
if (userSearch) params.append('user_name', userSearch);
|
||||
if (serverFilter) params.append('server_id', serverFilter);
|
||||
if (mediaTypeFilter) params.append('media_type', mediaTypeFilter);
|
||||
|
||||
// Refresh the table
|
||||
htmx.ajax('GET', '{{ url_for("activity.activity_grid") }}?' + params.toString(), {
|
||||
target: '#activity-table-container'
|
||||
});
|
||||
}
|
||||
|
||||
function filterActivities() {
|
||||
// Apply filters when dropdowns change
|
||||
refreshData();
|
||||
}
|
||||
|
||||
// Auto-refresh every 30 seconds for active sessions
|
||||
setInterval(() => {
|
||||
refreshData();
|
||||
}, 30000);
|
||||
</script>
|
||||
{% endblock %}
|
||||
227
app/activity/templates/activity/dashboard_tab.html
Normal file
227
app/activity/templates/activity/dashboard_tab.html
Normal file
@@ -0,0 +1,227 @@
|
||||
<!-- Dashboard Tab Content -->
|
||||
<div class="space-y-6">
|
||||
{% if error %}
|
||||
<div class="bg-red-50 dark:bg-red-900/50 border border-red-200 dark:border-red-800 rounded-lg p-4">
|
||||
<div class="flex">
|
||||
<svg class="flex-shrink-0 w-5 h-5 text-red-400" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"></path>
|
||||
</svg>
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">{{ error }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
|
||||
<!-- Summary Stats -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 xl:grid-cols-4 gap-4">
|
||||
<!-- Total Sessions -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-gray-600 dark:text-gray-400">{{ _("Total Sessions") }}</p>
|
||||
<p class="text-2xl font-bold text-gray-900 dark:text-white">{{ stats.total_sessions|default(0) }}</p>
|
||||
</div>
|
||||
<div class="p-3 bg-blue-100 dark:bg-blue-900 rounded-full">
|
||||
<svg class="w-6 h-6 text-blue-600 dark:text-blue-300" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M14.752 11.168l-3.197-2.132A1 1 0 0010 9.87v4.263a1 1 0 001.555.832l3.197-2.132a1 1 0 000-1.664z"></path>
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M21 12a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Active Users -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-gray-600 dark:text-gray-400">{{ _("Active Users") }}</p>
|
||||
<p class="text-2xl font-bold text-gray-900 dark:text-white">{{ stats.unique_users|default(0) }}</p>
|
||||
</div>
|
||||
<div class="p-3 bg-green-100 dark:bg-green-900 rounded-full">
|
||||
<svg class="w-6 h-6 text-green-600 dark:text-green-300" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 4.354a4 4 0 110 5.292M15 21H3v-1a6 6 0 0112 0v1zm0 0h6v-1a6 6 0 00-9-5.197M13 7a4 4 0 11-8 0 4 4 0 018 0z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Total Watch Time -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-gray-600 dark:text-gray-400">{{ _("Total Watch Time") }}</p>
|
||||
<p class="text-2xl font-bold text-gray-900 dark:text-white">{{ stats.total_watch_time|format_duration }}</p>
|
||||
</div>
|
||||
<div class="p-3 bg-purple-100 dark:bg-purple-900 rounded-full">
|
||||
<svg class="w-6 h-6 text-purple-600 dark:text-purple-300" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Average Session -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-gray-600 dark:text-gray-400">{{ _("Avg Session Length") }}</p>
|
||||
<p class="text-2xl font-bold text-gray-900 dark:text-white">{{ stats.avg_session_length|format_duration }}</p>
|
||||
</div>
|
||||
<div class="p-3 bg-yellow-100 dark:bg-yellow-900 rounded-full">
|
||||
<svg class="w-6 h-6 text-yellow-600 dark:text-yellow-300" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z"></path>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Charts Section -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<!-- Activity Over Time Chart -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-4">{{ _("Activity Over Time") }}</h3>
|
||||
<div id="activity-chart" class="h-64">
|
||||
<!-- Chart will be rendered here -->
|
||||
<canvas id="activity-time-chart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Media Type Distribution -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-4">{{ _("Media Type Distribution") }}</h3>
|
||||
<div id="media-type-chart" class="h-64">
|
||||
<!-- Chart will be rendered here -->
|
||||
<canvas id="media-distribution-chart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Viewing Time Distribution -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-4">{{ _("Viewing Time Distribution (24h)") }}</h3>
|
||||
<div id="viewing-time-chart" class="h-64">
|
||||
<!-- Chart will be rendered here -->
|
||||
<canvas id="viewing-distribution-chart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Day of Week Activity -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white mb-4">{{ _("Activity by Day of Week") }}</h3>
|
||||
<div id="day-of-week-chart" class="h-64">
|
||||
<!-- Chart will be rendered here -->
|
||||
<canvas id="weekday-distribution-chart"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Top Content & Users -->
|
||||
<div class="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
<!-- Top Content -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow">
|
||||
<div class="px-6 py-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">{{ _("Most Watched Content") }}</h3>
|
||||
</div>
|
||||
<div class="p-6">
|
||||
<div class="space-y-3">
|
||||
{% for item in stats.top_content[:5] %}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex-1 min-w-0">
|
||||
<p class="text-sm font-medium text-gray-900 dark:text-white truncate">
|
||||
{{ item.title }}
|
||||
</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||
{{ item.media_type|title }}
|
||||
</p>
|
||||
</div>
|
||||
<div class="ml-4 flex items-center space-x-2">
|
||||
<span class="text-sm text-gray-600 dark:text-gray-300">{{ item.play_count }}</span>
|
||||
<span class="text-xs text-gray-400">{{ _("plays") }}</span>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">{{ _("No content data available") }}</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Top Users -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow">
|
||||
<div class="px-6 py-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">{{ _("Most Active Users") }}</h3>
|
||||
</div>
|
||||
<div class="p-6">
|
||||
<div class="space-y-3">
|
||||
{% for user in stats.top_users[:5] %}
|
||||
<div class="flex items-center justify-between">
|
||||
<div class="flex items-center">
|
||||
<div class="w-8 h-8 bg-gray-200 dark:bg-gray-700 rounded-full flex items-center justify-center">
|
||||
<span class="text-xs font-medium text-gray-600 dark:text-gray-300">
|
||||
{{ user.username[:2]|upper }}
|
||||
</span>
|
||||
</div>
|
||||
<p class="ml-3 text-sm font-medium text-gray-900 dark:text-white">
|
||||
{{ user.username }}
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex items-center space-x-4">
|
||||
<div class="text-right">
|
||||
<p class="text-sm text-gray-900 dark:text-white">{{ user.session_count }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ _("sessions") }}</p>
|
||||
</div>
|
||||
<div class="text-right">
|
||||
<p class="text-sm text-gray-900 dark:text-white">{{ user.total_time|format_duration }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ _("watch time") }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">{{ _("No user data available") }}</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Server Activity -->
|
||||
<div class="bg-white dark:bg-gray-800 rounded-lg shadow">
|
||||
<div class="px-6 py-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">{{ _("Server Activity") }}</h3>
|
||||
</div>
|
||||
<div class="p-6">
|
||||
<div class="space-y-4">
|
||||
{% for server in stats.server_stats %}
|
||||
<div class="flex items-center justify-between p-4 bg-gray-50 dark:bg-gray-700 rounded-lg">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-gray-900 dark:text-white">{{ server.name }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ server.type|title }}</p>
|
||||
</div>
|
||||
<div class="flex items-center space-x-6">
|
||||
<div class="text-center">
|
||||
<p class="text-lg font-semibold text-gray-900 dark:text-white">{{ server.session_count }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ _("Sessions") }}</p>
|
||||
</div>
|
||||
<div class="text-center">
|
||||
<p class="text-lg font-semibold text-gray-900 dark:text-white">{{ server.unique_users }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ _("Users") }}</p>
|
||||
</div>
|
||||
<div class="text-center">
|
||||
<p class="text-lg font-semibold text-gray-900 dark:text-white">{{ server.total_time|format_duration }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">{{ _("Watch Time") }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">{{ _("No server data available") }}</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script id="dashboard-chart-data" type="application/json">
|
||||
{{ stats|tojson }}
|
||||
</script>
|
||||
98
app/activity/templates/activity/history_tab.html
Normal file
98
app/activity/templates/activity/history_tab.html
Normal file
@@ -0,0 +1,98 @@
|
||||
<!-- History Tab Content -->
|
||||
<div>
|
||||
{% if error %}
|
||||
<div class="bg-red-50 dark:bg-red-900/50 border border-red-200 dark:border-red-800 rounded-lg p-4 mb-6">
|
||||
<div class="flex">
|
||||
<svg class="flex-shrink-0 w-5 h-5 text-red-400" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"></path>
|
||||
</svg>
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">{{ error }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<!-- Activity Table -->
|
||||
<section class="bg-gray-50 dark:bg-gray-900 p-3 sm:p-5 rounded-lg">
|
||||
<div class="mx-auto max-w-screen-xl">
|
||||
<div class="bg-white dark:bg-gray-800 relative shadow-md sm:rounded-lg overflow-hidden">
|
||||
<div class="flex flex-col md:flex-row items-center justify-between space-y-3 md:space-y-0 p-4 gap-4">
|
||||
<div class="w-full md:w-2/5">
|
||||
<form class="flex items-center">
|
||||
<label for="simple-search" class="sr-only">{{ _("Search") }}</label>
|
||||
<div class="relative w-full">
|
||||
<div class="absolute inset-y-0 left-0 flex items-center pl-3 pointer-events-none">
|
||||
<svg aria-hidden="true" class="w-5 h-5 text-gray-500 dark:text-gray-400" fill="currentColor" viewbox="0 0 20 20" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" d="M8 4a4 4 0 100 8 4 4 0 000-8zM2 8a6 6 0 1110.89 3.476l4.817 4.817a1 1 0 01-1.414 1.414l-4.816-4.816A6 6 0 012 8z" clip-rule="evenodd" />
|
||||
</svg>
|
||||
</div>
|
||||
<input type="text" id="user-search" name="user_name" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full pl-10 p-2.5 h-10 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" placeholder="{{ _("Search by username...") }}" onkeyup="filterActivities()">
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="w-full md:w-3/5 flex flex-col md:flex-row space-y-2 md:space-y-0 items-stretch md:items-center justify-end gap-4">
|
||||
|
||||
<!-- Server Filter -->
|
||||
<select name="server_id" id="server-filter" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full md:w-48 p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" onchange="filterActivities()">
|
||||
<option value="">{{ _("All Servers") }}</option>
|
||||
{% for server in servers %}
|
||||
<option value="{{ server.id }}">{{ server.server_name }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
|
||||
<!-- Media Type Filter -->
|
||||
<select name="media_type" id="media-type-filter" class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-full md:w-48 p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500" onchange="filterActivities()">
|
||||
<option value="">{{ _("All Media Types") }}</option>
|
||||
<option value="movie">{{ _("Movies") }}</option>
|
||||
<option value="episode">{{ _("TV Episodes") }}</option>
|
||||
<option value="track">{{ _("Music") }}</option>
|
||||
<option value="audiobook">{{ _("Audiobooks") }}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Table Container -->
|
||||
<div class="overflow-x-auto">
|
||||
<div id="activity-table-container"
|
||||
hx-get="{{ url_for('activity.activity_grid') }}"
|
||||
hx-trigger="load"
|
||||
hx-swap="innerHTML">
|
||||
<!-- Loading placeholder -->
|
||||
<div class="text-center py-12">
|
||||
<div class="animate-spin rounded-full h-12 w-12 border-b-2 border-primary mx-auto mb-4"></div>
|
||||
<p class="text-gray-500 dark:text-gray-400">{{ _("Loading activity data...") }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function filterActivities() {
|
||||
// Build query parameters
|
||||
const params = new URLSearchParams();
|
||||
|
||||
// Get filter values
|
||||
const userSearch = document.getElementById('user-search')?.value;
|
||||
const serverFilter = document.getElementById('server-filter')?.value;
|
||||
const mediaTypeFilter = document.getElementById('media-type-filter')?.value;
|
||||
|
||||
// Add parameters (no days constraint - show all history)
|
||||
if (userSearch) params.append('user_name', userSearch);
|
||||
if (serverFilter) params.append('server_id', serverFilter);
|
||||
if (mediaTypeFilter) params.append('media_type', mediaTypeFilter);
|
||||
|
||||
// Refresh the table with filters
|
||||
htmx.ajax('GET', '{{ url_for("activity.activity_grid") }}?' + params.toString(), {
|
||||
target: '#activity-table-container'
|
||||
});
|
||||
}
|
||||
|
||||
// Auto-refresh every 30 seconds for active sessions
|
||||
setInterval(() => {
|
||||
filterActivities();
|
||||
}, 30000);
|
||||
</script>
|
||||
382
app/activity/templates/activity/index.html
Normal file
382
app/activity/templates/activity/index.html
Normal file
@@ -0,0 +1,382 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ _("Activity") }}{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<section class="px-4 py-6 sm:px-6 lg:px-8">
|
||||
<div class="max-w-7xl mx-auto">
|
||||
<!-- Activity Header with Time Range Selector -->
|
||||
<div class="mb-8 flex flex-col sm:flex-row sm:items-start sm:justify-between gap-4">
|
||||
<div>
|
||||
<h1 class="text-3xl font-bold text-gray-900 dark:text-white">{{ _("Activity") }}</h1>
|
||||
<p class="mt-2 text-gray-600 dark:text-gray-400">{{ _("Monitor media playback activity and statistics") }}</p>
|
||||
</div>
|
||||
<!-- Time Range Selector -->
|
||||
<div class="flex items-center gap-2">
|
||||
<label class="text-sm text-gray-600 dark:text-gray-400">{{ _("Time Range:") }}</label>
|
||||
<select id="global-time-range"
|
||||
class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-primary-500 focus:border-primary-500 block w-40 p-2 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-primary-500 dark:focus:border-primary-500"
|
||||
onchange="updateTimeRange(this.value)">
|
||||
<option value="1">{{ _("Last 24 hours") }}</option>
|
||||
<option value="7" selected>{{ _("Last 7 days") }}</option>
|
||||
<option value="30">{{ _("Last 30 days") }}</option>
|
||||
<option value="365">{{ _("Last year") }}</option>
|
||||
<option value="0">{{ _("All time") }}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Tabs with Icons -->
|
||||
<div class="border-b border-gray-200 dark:border-gray-700 mb-6">
|
||||
<ul class="flex flex-wrap -mb-px text-sm font-medium text-center text-gray-500 dark:text-gray-400" hx-ext="preload">
|
||||
<li class="me-2">
|
||||
<button
|
||||
data-tab="dashboard"
|
||||
class="activity-tab-btn inline-flex items-center justify-center p-4 border-b-2 border-transparent rounded-t-lg hover:text-gray-600 hover:border-gray-300 dark:hover:text-gray-300 group"
|
||||
hx-get="{{ url_for('activity.dashboard_tab') }}"
|
||||
hx-target="#activity-tab-body"
|
||||
hx-swap="innerHTML"
|
||||
hx-trigger="load,click"
|
||||
preload="mouseover"
|
||||
>
|
||||
<svg class="w-4 h-4 me-2 text-gray-400 group-hover:text-gray-500 dark:text-gray-500 dark:group-hover:text-gray-300" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M3 13h8V3H3v10zm0 8h8v-6H3v6zm10 0h8V11h-8v10zm0-18v6h8V3h-8z"/>
|
||||
</svg>
|
||||
{{ _("Dashboard") }}
|
||||
</button>
|
||||
</li>
|
||||
<li class="me-2">
|
||||
<button
|
||||
data-tab="history"
|
||||
class="activity-tab-btn inline-flex items-center justify-center p-4 border-b-2 border-transparent rounded-t-lg hover:text-gray-600 hover:border-gray-300 dark:hover:text-gray-300 group"
|
||||
hx-get="{{ url_for('activity.history_tab') }}"
|
||||
hx-target="#activity-tab-body"
|
||||
hx-swap="innerHTML"
|
||||
hx-trigger="click"
|
||||
preload="mouseover"
|
||||
>
|
||||
<svg class="w-4 h-4 me-2 text-gray-400 group-hover:text-gray-500 dark:text-gray-500 dark:group-hover:text-gray-300" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M13 3a9 9 0 0 0-9 9H1l3.89 3.89.07.14L9 12H6c0-3.87 3.13-7 7-7s7 3.13 7 7-3.13 7-7 7c-1.93 0-3.68-.79-4.94-2.06l-1.42 1.42A8.954 8.954 0 0 0 13 21a9 9 0 0 0 0-18zm-1 5v5l4.28 2.54.72-1.21-3.5-2.08V8H12z"/>
|
||||
</svg>
|
||||
{{ _("History") }}
|
||||
</button>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- Tab Content -->
|
||||
<div id="activity-tab-body" class="animate__animated animate__fadeIn animated__fastest"></div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{# ───────────────── Tab highlighting for horizontal tabs ───────────────── #}
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@4.4.0/dist/chart.umd.min.js"></script>
|
||||
<script>
|
||||
// Store current time range globally
|
||||
let currentTimeRange = 7;
|
||||
let dashboardTabLoaded = false;
|
||||
|
||||
// Update time range and refresh active tab (only dashboard tab is affected)
|
||||
function updateTimeRange(days) {
|
||||
currentTimeRange = parseInt(days, 10);
|
||||
|
||||
const activeTab =
|
||||
document.querySelector('.activity-tab-btn.text-primary, .activity-tab-btn.border-primary') ||
|
||||
(window.currentActivityTabButton instanceof HTMLElement ? window.currentActivityTabButton : null);
|
||||
|
||||
if (
|
||||
dashboardTabLoaded &&
|
||||
activeTab &&
|
||||
activeTab.dataset.tab === 'dashboard' &&
|
||||
window.htmx
|
||||
) {
|
||||
window.htmx.trigger(activeTab, 'click');
|
||||
}
|
||||
// History tab is not refreshed - it always shows all data
|
||||
}
|
||||
|
||||
// Active tab highlighting for horizontal tabs with icons
|
||||
document.body.addEventListener("htmx:afterRequest", (evt) => {
|
||||
const btn = evt.detail.elt;
|
||||
if (btn.classList.contains("activity-tab-btn")) {
|
||||
// Reset all tabs to inactive state
|
||||
document.querySelectorAll(".activity-tab-btn").forEach((b) => {
|
||||
b.classList.remove("text-primary", "border-primary");
|
||||
b.classList.add("text-gray-500", "dark:text-gray-400", "border-transparent");
|
||||
|
||||
// Reset icon colors
|
||||
const icon = b.querySelector("svg");
|
||||
if (icon) {
|
||||
icon.classList.remove("text-primary");
|
||||
icon.classList.add("text-gray-400", "group-hover:text-gray-500", "dark:text-gray-500", "dark:group-hover:text-gray-300");
|
||||
}
|
||||
});
|
||||
|
||||
// Set active tab
|
||||
btn.classList.remove("text-gray-500", "dark:text-gray-400", "border-transparent");
|
||||
btn.classList.add("text-primary", "border-primary");
|
||||
|
||||
window.currentActivityTabButton = btn;
|
||||
|
||||
// Set active icon
|
||||
const activeIcon = btn.querySelector("svg");
|
||||
if (activeIcon) {
|
||||
activeIcon.classList.remove("text-gray-400", "group-hover:text-gray-500", "dark:text-gray-500", "dark:group-hover:text-gray-300");
|
||||
activeIcon.classList.add("text-primary");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
document.body.addEventListener('htmx:configRequest', (evt) => {
|
||||
const btn = evt.detail.elt;
|
||||
if (btn.classList && btn.classList.contains('activity-tab-btn')) {
|
||||
if (btn.dataset.tab === 'dashboard') {
|
||||
evt.detail.parameters = evt.detail.parameters || {};
|
||||
evt.detail.parameters.days = currentTimeRange;
|
||||
} else if (evt.detail.parameters && evt.detail.parameters.days) {
|
||||
delete evt.detail.parameters.days;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Modify tab clicks to include current time range (only for dashboard tab)
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
window.currentActivityTabButton = document.querySelector('.activity-tab-btn');
|
||||
});
|
||||
|
||||
// Chart rendering helpers ---------------------------------------------------
|
||||
const sessionsLabel = '{{ _("Sessions") }}';
|
||||
|
||||
function destroyActivityCharts() {
|
||||
if (Array.isArray(window.activityDashboardCharts)) {
|
||||
window.activityDashboardCharts.forEach((chart) => {
|
||||
if (chart && typeof chart.destroy === 'function') {
|
||||
chart.destroy();
|
||||
}
|
||||
});
|
||||
}
|
||||
window.activityDashboardCharts = [];
|
||||
}
|
||||
|
||||
function scheduleChartResize() {
|
||||
const charts = window.activityDashboardCharts;
|
||||
if (!Array.isArray(charts) || charts.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const queueFrame =
|
||||
typeof window.requestAnimationFrame === 'function'
|
||||
? window.requestAnimationFrame.bind(window)
|
||||
: (cb) => setTimeout(cb, 16);
|
||||
|
||||
const resizeCharts = () => {
|
||||
charts.forEach((chart) => {
|
||||
if (chart && typeof chart.resize === 'function') {
|
||||
chart.resize();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
queueFrame(() => {
|
||||
resizeCharts();
|
||||
setTimeout(resizeCharts, 200);
|
||||
});
|
||||
}
|
||||
|
||||
function extractStatsData() {
|
||||
const container = document.getElementById('activity-tab-body');
|
||||
if (!container) {
|
||||
return null;
|
||||
}
|
||||
const dataScript = container.querySelector('#dashboard-chart-data');
|
||||
if (!dataScript) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const raw = dataScript.textContent || dataScript.innerText || '{}';
|
||||
return JSON.parse(raw);
|
||||
} catch (error) {
|
||||
console.error('Failed to parse dashboard stats payload:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function renderActivityDashboard() {
|
||||
if (typeof window.Chart === 'undefined') {
|
||||
return;
|
||||
}
|
||||
|
||||
const stats = extractStatsData();
|
||||
if (!stats) {
|
||||
return;
|
||||
}
|
||||
|
||||
destroyActivityCharts();
|
||||
|
||||
const charts = [];
|
||||
const container = document.getElementById('activity-tab-body');
|
||||
|
||||
const activityCtx = container.querySelector('#activity-time-chart');
|
||||
if (activityCtx) {
|
||||
charts.push(new Chart(activityCtx, {
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: stats.time_series_labels || [],
|
||||
datasets: [{
|
||||
label: sessionsLabel,
|
||||
data: stats.time_series_data || [],
|
||||
borderColor: 'rgb(99, 102, 241)',
|
||||
backgroundColor: 'rgba(99, 102, 241, 0.1)',
|
||||
tension: 0.3
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
plugins: {
|
||||
legend: { display: false }
|
||||
},
|
||||
scales: {
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
ticks: { stepSize: 1 }
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
const mediaCtx = container.querySelector('#media-distribution-chart');
|
||||
if (mediaCtx) {
|
||||
charts.push(new Chart(mediaCtx, {
|
||||
type: 'doughnut',
|
||||
data: {
|
||||
labels: stats.media_type_labels || [],
|
||||
datasets: [{
|
||||
data: stats.media_type_data || [],
|
||||
backgroundColor: [
|
||||
'rgba(99, 102, 241, 0.8)',
|
||||
'rgba(34, 197, 94, 0.8)',
|
||||
'rgba(251, 146, 60, 0.8)',
|
||||
'rgba(168, 85, 247, 0.8)',
|
||||
'rgba(250, 204, 21, 0.8)'
|
||||
]
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
plugins: {
|
||||
legend: { position: 'bottom' }
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
const viewingCtx = container.querySelector('#viewing-distribution-chart');
|
||||
if (viewingCtx) {
|
||||
const hourlyLabels = stats.hourly_labels || [];
|
||||
const hourlyData = stats.hourly_data || [];
|
||||
charts.push(new Chart(viewingCtx, {
|
||||
type: 'bar',
|
||||
data: {
|
||||
labels: hourlyLabels,
|
||||
datasets: [{
|
||||
label: sessionsLabel,
|
||||
data: hourlyData,
|
||||
backgroundColor: 'rgba(99, 102, 241, 0.8)',
|
||||
borderColor: 'rgb(99, 102, 241)',
|
||||
borderWidth: 1,
|
||||
categoryPercentage: 0.5,
|
||||
barPercentage: 0.7,
|
||||
borderRadius: 3,
|
||||
maxBarThickness: 18
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
plugins: {
|
||||
legend: { display: false }
|
||||
},
|
||||
scales: {
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
ticks: { stepSize: 1 }
|
||||
},
|
||||
x: {
|
||||
grid: { display: false },
|
||||
ticks: {
|
||||
autoSkip: false,
|
||||
callback: (value, index) => (index % 4 === 0 ? hourlyLabels[index] : ''),
|
||||
maxRotation: 0,
|
||||
minRotation: 0
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
const weekdayCtx = container.querySelector('#weekday-distribution-chart');
|
||||
if (weekdayCtx) {
|
||||
charts.push(new Chart(weekdayCtx, {
|
||||
type: 'bar',
|
||||
data: {
|
||||
labels: stats.weekday_labels || [],
|
||||
datasets: [{
|
||||
label: sessionsLabel,
|
||||
data: stats.weekday_data || [],
|
||||
backgroundColor: 'rgba(34, 197, 94, 0.8)',
|
||||
borderColor: 'rgb(34, 197, 94)',
|
||||
borderWidth: 1
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
plugins: {
|
||||
legend: { display: false }
|
||||
},
|
||||
scales: {
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
ticks: { stepSize: 1 }
|
||||
},
|
||||
x: {
|
||||
grid: { display: false }
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
window.activityDashboardCharts = charts;
|
||||
scheduleChartResize();
|
||||
}
|
||||
|
||||
window.renderActivityDashboard = renderActivityDashboard;
|
||||
|
||||
function handleDashboardSwap(event) {
|
||||
if (event?.detail?.target?.id === 'activity-tab-body') {
|
||||
dashboardTabLoaded = true;
|
||||
renderActivityDashboard();
|
||||
}
|
||||
}
|
||||
|
||||
if (window.htmx && typeof window.htmx.on === 'function') {
|
||||
window.htmx.on('htmx:afterSwap', handleDashboardSwap);
|
||||
window.htmx.onLoad((content) => {
|
||||
const target = content?.id === 'activity-tab-body'
|
||||
? content
|
||||
: content?.querySelector?.('#activity-tab-body');
|
||||
if (target) {
|
||||
renderActivityDashboard();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
document.addEventListener('DOMContentLoaded', renderActivityDashboard);
|
||||
document.body.addEventListener('htmx:afterSwap', handleDashboardSwap);
|
||||
}
|
||||
</script>
|
||||
{% endblock %}
|
||||
6
app/activity/templates/activity/settings.html
Normal file
6
app/activity/templates/activity/settings.html
Normal file
@@ -0,0 +1,6 @@
|
||||
{% extends "base.html" %}
|
||||
{% block title %}{{ _("Activity") }} - Wizarr{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% include "activity/settings_tab.html" %}
|
||||
{% endblock %}
|
||||
153
app/activity/templates/activity/settings_tab.html
Normal file
153
app/activity/templates/activity/settings_tab.html
Normal file
@@ -0,0 +1,153 @@
|
||||
{% from "_partials/macros.html" import loading_spinner %}
|
||||
|
||||
<div class="container mx-auto px-4 py-6 max-w-4xl">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div>
|
||||
<h1 class="text-3xl font-bold text-gray-900 dark:text-white">{{ _("Activity") }}</h1>
|
||||
<p class="mt-1 text-sm text-gray-500 dark:text-gray-400">{{ _("Manage activity monitoring and historical data") }}</p>
|
||||
</div>
|
||||
<a href="{{ url_for('activity.activity_dashboard') }}" class="btn btn-secondary">
|
||||
<svg class="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2-2V5a2 2 0 012-2h2a2 2 0 012 2v6a2 2 0 002 2h2a2 2 0 012-2V5a2 2 0 012-2h2a2 2 0 012 2v4a2 2 0 01-2 2h-2a2 2 0 01-2 2z"/>
|
||||
</svg>
|
||||
{{ _("View Dashboard") }}
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{% if error %}
|
||||
<div class="bg-red-50 dark:bg-red-900/50 border border-red-200 dark:border-red-800 rounded-lg p-4 mb-6">
|
||||
<div class="flex">
|
||||
<svg class="flex-shrink-0 w-5 h-5 text-red-400" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"></path>
|
||||
</svg>
|
||||
<div class="ml-3">
|
||||
<h3 class="text-sm font-medium text-red-800 dark:text-red-200">{{ error }}</h3>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<section class="bg-white dark:bg-gray-800 rounded-lg shadow-sm border border-gray-200 dark:border-gray-700 mb-6">
|
||||
<div class="px-6 py-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h2 class="text-lg font-medium text-gray-900 dark:text-white">{{ _("Monitoring Status") }}</h2>
|
||||
</div>
|
||||
<div class="p-6 space-y-4">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<h3 class="text-sm font-medium text-gray-900 dark:text-white">{{ _("Activity Monitoring") }}</h3>
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||
{% if status.monitoring_enabled %}
|
||||
{{ _("Real-time activity monitoring is enabled") }}
|
||||
{% else %}
|
||||
{{ _("Activity monitoring is not available") }}
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
{% if status.monitoring_enabled %}
|
||||
<span class="inline-flex items-center px-3 py-1 rounded-full text-sm font-medium bg-green-100 text-green-800 dark:bg-green-800 dark:text-green-100">
|
||||
<svg class="w-4 h-4 mr-1" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zm3.707-9.293a1 1 0 00-1.414-1.414L9 10.586 7.707 9.293a1 1 0 00-1.414 1.414l2 2a1 1 0 001.414 0l4-4z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
{{ _("Active") }}
|
||||
</span>
|
||||
{% else %}
|
||||
<span class="inline-flex items-center px-3 py-1 rounded-full text-sm font-medium bg-red-100 text-red-800 dark:bg-red-800 dark:text-red-100">
|
||||
<svg class="w-4 h-4 mr-1" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
{{ _("Inactive") }}
|
||||
</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% if status.monitoring_enabled %}
|
||||
<div>
|
||||
<h4 class="text-sm font-medium text-gray-900 dark:text-white mb-3">{{ _("Server Connections") }}</h4>
|
||||
{% if status.connection_status %}
|
||||
<div class="space-y-3">
|
||||
{% for server_id, conn_status in status.connection_status.items() %}
|
||||
<div class="flex items-center justify-between p-3 bg-gray-50 dark:bg-gray-700 rounded-lg">
|
||||
<div class="flex items-center">
|
||||
<span class="w-3 h-3 rounded-full {% if conn_status.connected %}bg-green-400{% else %}bg-red-400{% endif %}"></span>
|
||||
<div class="ml-3">
|
||||
<p class="text-sm font-medium text-gray-900 dark:text-white">{{ _("Server") }} #{{ server_id }}</p>
|
||||
<p class="text-xs text-gray-500 dark:text-gray-400">
|
||||
{{ _("Events: {}").format(conn_status.event_count) }}
|
||||
{% if conn_status.errors > 0 %}
|
||||
• {{ _("Errors: {}").format(conn_status.errors) }}
|
||||
{% endif %}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-xs text-gray-500 dark:text-gray-400">
|
||||
{% if conn_status.last_event %}
|
||||
{{ _("Last event: {}").format(conn_status.last_event) }}
|
||||
{% else %}
|
||||
{{ _("No events yet") }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">{{ _("No server connections active") }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
<div class="flex justify-end">
|
||||
<button onclick="restartMonitoring()" class="btn btn-secondary">
|
||||
<svg class="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15"/>
|
||||
</svg>
|
||||
{{ _("Restart Monitoring") }}
|
||||
</button>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section class="bg-white dark:bg-gray-800 rounded-lg shadow-sm border border-gray-200 dark:border-gray-700">
|
||||
<div class="px-6 py-4 border-b border-gray-200 dark:border-gray-700">
|
||||
<h3 class="text-lg font-medium text-gray-900 dark:text-white">{{ _("Historical Data Import") }}</h3>
|
||||
<p class="mt-1 text-sm text-gray-500 dark:text-gray-400">{{ _("Import and manage historical viewing data from media servers") }}</p>
|
||||
</div>
|
||||
<div class="p-6 space-y-4">
|
||||
<p class="text-sm text-gray-500 dark:text-gray-400">
|
||||
{{ _("Historical import and cleanup tasks are available in Plus settings. Links below will open the Plus configuration panel.") }}
|
||||
</p>
|
||||
<div class="flex flex-wrap gap-3">
|
||||
<a href="{{ url_for('plus_settings.settings_tab') }}" class="btn btn-primary">
|
||||
{{ _("Open Plus Settings") }}
|
||||
</a>
|
||||
<a href="{{ url_for('plus_settings.settings_tab') }}#historical" class="btn btn-secondary">
|
||||
{{ _("Manage Historical Data") }}
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
function restartMonitoring() {
|
||||
fetch('{{ url_for("activity.activity_settings") }}', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
'HX-Request': 'true'
|
||||
},
|
||||
body: new URLSearchParams({action: 'restart_monitoring'})
|
||||
}).then(response => response.json())
|
||||
.then(data => {
|
||||
if (data.success) {
|
||||
window.toast?.success(data.message || '{{ _("Monitoring restarted") }}');
|
||||
} else {
|
||||
window.toast?.error(data.message || '{{ _("Failed to restart monitoring") }}');
|
||||
}
|
||||
})
|
||||
.catch(() => {
|
||||
window.toast?.error('{{ _("Failed to restart monitoring") }}');
|
||||
});
|
||||
}
|
||||
</script>
|
||||
@@ -1,3 +1,5 @@
|
||||
from app.activity.api.blueprint import activity_bp
|
||||
|
||||
from .admin.routes import admin_bp
|
||||
from .admin_accounts.routes import admin_accounts_bp
|
||||
from .api.api_routes import api_bp
|
||||
@@ -28,6 +30,7 @@ all_blueprints = (
|
||||
public_bp,
|
||||
wizard_bp,
|
||||
admin_bp,
|
||||
activity_bp,
|
||||
auth_bp,
|
||||
settings_bp,
|
||||
connections_bp,
|
||||
|
||||
@@ -1098,14 +1098,5 @@ def plus_audit():
|
||||
@admin_bp.route("/activity")
|
||||
@login_required
|
||||
def activity():
|
||||
"""Redirect to activity monitoring dashboard - Plus feature."""
|
||||
try:
|
||||
import plus
|
||||
|
||||
if not plus.is_plus_enabled():
|
||||
return redirect(url_for("admin.dashboard"))
|
||||
except ImportError:
|
||||
return redirect(url_for("admin.dashboard"))
|
||||
|
||||
# Redirect to the activity blueprint
|
||||
"""Redirect to activity monitoring dashboard."""
|
||||
return redirect(url_for("activity.activity_dashboard"))
|
||||
|
||||
@@ -10,7 +10,7 @@ try:
|
||||
except (
|
||||
ImportError
|
||||
): # pragma: no cover - Python <3.9 not officially supported but handle gracefully
|
||||
ZoneInfo = None
|
||||
ZoneInfo = None # type: ignore[assignment]
|
||||
|
||||
# Mapping of server types to their desired pastel background colours
|
||||
_SERVER_TAG_COLOURS = {
|
||||
|
||||
@@ -5,7 +5,7 @@ Periodically checks license validity and can disable plus features if license be
|
||||
|
||||
import os
|
||||
import threading
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import structlog
|
||||
from flask import Flask, g, jsonify, request
|
||||
@@ -110,7 +110,7 @@ class LicenseValidationMiddleware:
|
||||
try:
|
||||
is_valid, message = license_service.verify_plus_license()
|
||||
|
||||
self.last_validation = datetime.utcnow()
|
||||
self.last_validation = datetime.now(UTC)
|
||||
self.license_valid = is_valid
|
||||
self.validation_message = message
|
||||
|
||||
@@ -122,7 +122,7 @@ class LicenseValidationMiddleware:
|
||||
|
||||
except Exception as e:
|
||||
logger.error("License validation error", error=str(e))
|
||||
self.last_validation = datetime.utcnow()
|
||||
self.last_validation = datetime.now(UTC)
|
||||
self.license_valid = False
|
||||
self.validation_message = f"Validation error: {str(e)}"
|
||||
|
||||
@@ -132,7 +132,7 @@ class LicenseValidationMiddleware:
|
||||
return False
|
||||
|
||||
grace_end = self.last_validation_success + timedelta(seconds=self.grace_period)
|
||||
return datetime.utcnow() < grace_end
|
||||
return datetime.now(UTC) < grace_end
|
||||
|
||||
def _should_block_request(self) -> bool:
|
||||
"""Determine if the current request should be blocked due to license issues."""
|
||||
@@ -154,7 +154,6 @@ class LicenseValidationMiddleware:
|
||||
"/plus/",
|
||||
"/api/plus/",
|
||||
"/hx/plus/",
|
||||
"/activity/",
|
||||
"/audit/",
|
||||
]
|
||||
|
||||
|
||||
226
app/models.py
226
app/models.py
@@ -1,4 +1,6 @@
|
||||
import json
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from flask_login import UserMixin
|
||||
|
||||
@@ -674,3 +676,227 @@ class ExpiredUser(db.Model):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class ActivitySession(db.Model):
|
||||
__tablename__ = "activity_session"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
server_id = db.Column(db.Integer, db.ForeignKey("media_server.id"), nullable=False)
|
||||
session_id = db.Column(db.String, nullable=False, index=True)
|
||||
reference_id = db.Column(db.Integer, nullable=True, index=True)
|
||||
user_name = db.Column(db.String, nullable=False, index=True)
|
||||
user_id = db.Column(db.String, nullable=True)
|
||||
media_title = db.Column(db.String, nullable=False)
|
||||
media_type = db.Column(db.String, nullable=True, index=True)
|
||||
media_id = db.Column(db.String, nullable=True)
|
||||
series_name = db.Column(db.String, nullable=True)
|
||||
season_number = db.Column(db.Integer, nullable=True)
|
||||
episode_number = db.Column(db.Integer, nullable=True)
|
||||
started_at = db.Column(
|
||||
db.DateTime, nullable=False, index=True, default=lambda: datetime.now(UTC)
|
||||
)
|
||||
active = db.Column(db.Boolean, nullable=False, default=True, index=True)
|
||||
duration_ms = db.Column(db.BigInteger, nullable=True)
|
||||
device_name = db.Column(db.String, nullable=True)
|
||||
client_name = db.Column(db.String, nullable=True)
|
||||
ip_address = db.Column(db.String, nullable=True)
|
||||
platform = db.Column(db.String, nullable=True)
|
||||
player_version = db.Column(db.String, nullable=True)
|
||||
transcoding_info = db.Column(db.Text, nullable=True)
|
||||
session_metadata = db.Column(db.Text, nullable=True)
|
||||
artwork_url = db.Column(db.String, nullable=True)
|
||||
thumbnail_url = db.Column(db.String, nullable=True)
|
||||
wizarr_user_id = db.Column(
|
||||
db.Integer, db.ForeignKey("user.id"), nullable=True, index=True
|
||||
)
|
||||
wizarr_identity_id = db.Column(
|
||||
db.Integer, db.ForeignKey("identity.id"), nullable=True, index=True
|
||||
)
|
||||
created_at = db.Column(
|
||||
db.DateTime, default=lambda: datetime.now(UTC), nullable=False
|
||||
)
|
||||
updated_at = db.Column(
|
||||
db.DateTime,
|
||||
default=lambda: datetime.now(UTC),
|
||||
onupdate=lambda: datetime.now(UTC),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
server = db.relationship(
|
||||
"MediaServer", backref=db.backref("activity_sessions", lazy=True)
|
||||
)
|
||||
snapshots = db.relationship(
|
||||
"ActivitySnapshot", backref="session", lazy=True, cascade="all, delete-orphan"
|
||||
)
|
||||
wizarr_user = db.relationship("User", foreign_keys=[wizarr_user_id], lazy="joined")
|
||||
wizarr_identity = db.relationship(
|
||||
"Identity", foreign_keys=[wizarr_identity_id], lazy="joined"
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
db.Index("ix_activity_session_server_started", "server_id", "started_at"),
|
||||
db.Index("ix_activity_session_user_started", "user_name", "started_at"),
|
||||
)
|
||||
|
||||
def get_transcoding_info(self) -> dict[str, Any]:
|
||||
if not self.transcoding_info:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(self.transcoding_info)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return {}
|
||||
|
||||
def set_transcoding_info(self, info: dict[str, Any]):
|
||||
if info is None:
|
||||
self.transcoding_info = None
|
||||
else:
|
||||
self.transcoding_info = json.dumps(info, default=str)
|
||||
|
||||
def get_metadata(self) -> dict[str, Any]:
|
||||
if not self.session_metadata:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(self.session_metadata)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return {}
|
||||
|
||||
def set_metadata(self, metadata: dict[str, Any]):
|
||||
if metadata is None:
|
||||
self.session_metadata = None
|
||||
else:
|
||||
self.session_metadata = json.dumps(metadata, default=str)
|
||||
|
||||
@property
|
||||
def duration_minutes(self) -> float | None:
|
||||
if self.duration_ms is None:
|
||||
return None
|
||||
return self.duration_ms / (1000 * 60)
|
||||
|
||||
@property
|
||||
def is_active(self) -> bool:
|
||||
return bool(self.active)
|
||||
|
||||
@property
|
||||
def display_duration_seconds(self) -> int | None:
|
||||
if self.duration_ms:
|
||||
return max(int(self.duration_ms // 1000), 0)
|
||||
return None
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"server_id": self.server_id,
|
||||
"session_id": self.session_id,
|
||||
"user_name": self.user_name,
|
||||
"user_id": self.user_id,
|
||||
"media_title": self.media_title,
|
||||
"media_type": self.media_type,
|
||||
"media_id": self.media_id,
|
||||
"series_name": self.series_name,
|
||||
"season_number": self.season_number,
|
||||
"episode_number": self.episode_number,
|
||||
"started_at": self.started_at.isoformat() if self.started_at else None,
|
||||
"duration_ms": self.duration_ms,
|
||||
"device_name": self.device_name,
|
||||
"client_name": self.client_name,
|
||||
"ip_address": self.ip_address,
|
||||
"platform": self.platform,
|
||||
"player_version": self.player_version,
|
||||
"transcoding_info": self.get_transcoding_info(),
|
||||
"metadata": self.get_metadata(),
|
||||
"artwork_url": self.artwork_url,
|
||||
"thumbnail_url": self.thumbnail_url,
|
||||
"duration_minutes": self.duration_minutes,
|
||||
"display_duration_seconds": self.display_duration_seconds,
|
||||
"is_active": self.is_active,
|
||||
"active": bool(self.active),
|
||||
"wizarr_user_id": self.wizarr_user_id,
|
||||
"wizarr_identity_id": self.wizarr_identity_id,
|
||||
"display_user_name": self.display_user_name,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
|
||||
}
|
||||
|
||||
@property
|
||||
def display_user_name(self) -> str:
|
||||
if hasattr(self, "_resolved_identity_name") and self._resolved_identity_name:
|
||||
return self._resolved_identity_name
|
||||
|
||||
identity = None
|
||||
if self.wizarr_identity:
|
||||
identity = self.wizarr_identity
|
||||
elif getattr(self, "wizarr_user", None) and getattr(
|
||||
self.wizarr_user, "identity", None
|
||||
):
|
||||
identity = self.wizarr_user.identity
|
||||
|
||||
if identity:
|
||||
return identity.nickname or identity.primary_username or self.user_name
|
||||
|
||||
return self.user_name
|
||||
|
||||
|
||||
class ActivitySnapshot(db.Model):
|
||||
__tablename__ = "activity_snapshot"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
session_id = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey("activity_session.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
timestamp = db.Column(
|
||||
db.DateTime, nullable=False, index=True, default=lambda: datetime.now(UTC)
|
||||
)
|
||||
position_ms = db.Column(db.BigInteger, nullable=True)
|
||||
state = db.Column(db.String, nullable=False, index=True)
|
||||
transcoding_details = db.Column(db.Text, nullable=True)
|
||||
bandwidth_kbps = db.Column(db.Integer, nullable=True)
|
||||
quality = db.Column(db.String, nullable=True)
|
||||
subtitle_stream = db.Column(db.String, nullable=True)
|
||||
audio_stream = db.Column(db.String, nullable=True)
|
||||
created_at = db.Column(
|
||||
db.DateTime, default=lambda: datetime.now(UTC), nullable=False
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
db.Index("ix_activity_snapshot_session_timestamp", "session_id", "timestamp"),
|
||||
)
|
||||
|
||||
def get_transcoding_details(self) -> dict[str, Any]:
|
||||
if not self.transcoding_details:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(self.transcoding_details)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
return {}
|
||||
|
||||
def set_transcoding_details(self, details: dict[str, Any]):
|
||||
if details is None:
|
||||
self.transcoding_details = None
|
||||
else:
|
||||
self.transcoding_details = json.dumps(details, default=str)
|
||||
|
||||
@property
|
||||
def position_minutes(self) -> float | None:
|
||||
if self.position_ms is None:
|
||||
return None
|
||||
return self.position_ms / (1000 * 60)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
return {
|
||||
"id": self.id,
|
||||
"session_id": self.session_id,
|
||||
"timestamp": self.timestamp.isoformat() if self.timestamp else None,
|
||||
"position_ms": self.position_ms,
|
||||
"state": self.state,
|
||||
"transcoding_details": self.get_transcoding_details(),
|
||||
"bandwidth_kbps": self.bandwidth_kbps,
|
||||
"quality": self.quality,
|
||||
"subtitle_stream": self.subtitle_stream,
|
||||
"audio_stream": self.audio_stream,
|
||||
"position_minutes": self.position_minutes,
|
||||
"created_at": self.created_at.isoformat() if self.created_at else None,
|
||||
}
|
||||
|
||||
78
app/services/activity/__init__.py
Normal file
78
app/services/activity/__init__.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""
|
||||
Service facade for Wizarr activity features.
|
||||
|
||||
The facade composes specialised services so callers can continue to use a
|
||||
single entry point while the implementation remains modular.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from app.activity.domain.models import ActivityEvent, ActivityQuery
|
||||
from app.models import ActivitySession
|
||||
from app.services.activity.analytics import ActivityAnalyticsService
|
||||
from app.services.activity.ingestion import ActivityIngestionService
|
||||
from app.services.activity.maintenance import ActivityMaintenanceService
|
||||
from app.services.activity.queries import ActivityQueryService
|
||||
|
||||
|
||||
class ActivityService:
|
||||
"""Backwards-compatible facade that delegates to modular services."""
|
||||
|
||||
def __init__(self):
|
||||
self.ingestion = ActivityIngestionService()
|
||||
self.queries = ActivityQueryService()
|
||||
self.analytics = ActivityAnalyticsService()
|
||||
self.maintenance = ActivityMaintenanceService()
|
||||
|
||||
# Event ingestion -------------------------------------------------
|
||||
def record_activity_event(self, event: ActivityEvent) -> ActivitySession | None:
|
||||
return self.ingestion.record_activity_event(event)
|
||||
|
||||
# Query helpers ---------------------------------------------------
|
||||
def get_activity_sessions(
|
||||
self,
|
||||
query: ActivityQuery,
|
||||
) -> tuple[list[ActivitySession], int]:
|
||||
return self.queries.get_activity_sessions(query)
|
||||
|
||||
def get_active_sessions(
|
||||
self,
|
||||
server_id: int | None = None,
|
||||
) -> list[ActivitySession]:
|
||||
return self.queries.get_active_sessions(server_id)
|
||||
|
||||
def get_user_activity(
|
||||
self,
|
||||
user_name: str,
|
||||
days: int = 30,
|
||||
) -> list[ActivitySession]:
|
||||
return self.queries.get_user_activity(user_name, days)
|
||||
|
||||
def get_server_activity(
|
||||
self,
|
||||
server_id: int,
|
||||
days: int = 7,
|
||||
) -> list[ActivitySession]:
|
||||
return self.queries.get_server_activity(server_id, days)
|
||||
|
||||
# Analytics -------------------------------------------------------
|
||||
def get_activity_stats(self, days: int = 30) -> dict[str, Any]:
|
||||
return self.analytics.get_activity_stats(days)
|
||||
|
||||
def get_dashboard_stats(self, days: int = 7) -> dict[str, Any]:
|
||||
return self.analytics.get_dashboard_stats(days)
|
||||
|
||||
# Maintenance -----------------------------------------------------
|
||||
def cleanup_old_activity(self, retention_days: int = 90) -> int:
|
||||
return self.maintenance.cleanup_old_activity(retention_days)
|
||||
|
||||
def end_stale_sessions(self, timeout_hours: int = 24) -> int:
|
||||
return self.maintenance.end_stale_sessions(timeout_hours)
|
||||
|
||||
def recover_sessions_on_startup(self) -> int:
|
||||
return self.maintenance.recover_sessions_on_startup()
|
||||
|
||||
|
||||
__all__ = ["ActivityService"]
|
||||
446
app/services/activity/analytics.py
Normal file
446
app/services/activity/analytics.py
Normal file
@@ -0,0 +1,446 @@
|
||||
"""
|
||||
Analytics services for Wizarr activity data.
|
||||
|
||||
Contains aggregate/statistical queries used by dashboards and reports.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
import structlog
|
||||
|
||||
try:
|
||||
from app.extensions import db # type: ignore
|
||||
except ImportError: # pragma: no cover - during unit tests
|
||||
db = None # type: ignore
|
||||
|
||||
from app.models import ActivitySession
|
||||
|
||||
|
||||
class ActivityAnalyticsService:
|
||||
"""Provides statistical views over activity sessions."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
|
||||
def get_activity_stats(self, days: int = 30) -> dict[str, Any]:
|
||||
"""Return high-level statistics for the given window."""
|
||||
if db is None:
|
||||
return {}
|
||||
|
||||
try:
|
||||
start_date = datetime.now(UTC) - timedelta(days=days)
|
||||
|
||||
total_sessions = (
|
||||
db.session.query(ActivitySession)
|
||||
.filter(ActivitySession.started_at >= start_date)
|
||||
.count()
|
||||
)
|
||||
|
||||
unique_users = (
|
||||
db.session.query(ActivitySession.user_name)
|
||||
.filter(ActivitySession.started_at >= start_date)
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
active_sessions = (
|
||||
db.session.query(ActivitySession)
|
||||
.filter(ActivitySession.active.is_(True))
|
||||
.count()
|
||||
)
|
||||
|
||||
media_type_stats = (
|
||||
db.session.query(
|
||||
ActivitySession.media_type,
|
||||
db.func.count(ActivitySession.id).label("count"),
|
||||
)
|
||||
.filter(
|
||||
ActivitySession.started_at >= start_date,
|
||||
ActivitySession.media_type.is_not(None),
|
||||
)
|
||||
.group_by(ActivitySession.media_type)
|
||||
.order_by(db.func.count(ActivitySession.id).desc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
user_stats = (
|
||||
db.session.query(
|
||||
ActivitySession.user_name,
|
||||
db.func.count(ActivitySession.id).label("session_count"),
|
||||
)
|
||||
.filter(ActivitySession.started_at >= start_date)
|
||||
.group_by(ActivitySession.user_name)
|
||||
.order_by(db.func.count(ActivitySession.id).desc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
return {
|
||||
"period_days": days,
|
||||
"total_sessions": total_sessions,
|
||||
"unique_users": unique_users,
|
||||
"active_sessions": active_sessions,
|
||||
"media_type_breakdown": [
|
||||
{"media_type": stat[0], "count": stat[1]}
|
||||
for stat in media_type_stats
|
||||
],
|
||||
"top_users": [
|
||||
{"user_name": stat[0], "session_count": stat[1]}
|
||||
for stat in user_stats
|
||||
],
|
||||
}
|
||||
|
||||
except Exception as exc: # pragma: no cover - log and fallback
|
||||
self.logger.error("Failed to get activity stats: %s", exc, exc_info=True)
|
||||
return {}
|
||||
|
||||
def get_dashboard_stats(self, days: int = 7) -> dict[str, Any]:
|
||||
"""Return the rich dataset used by the activity dashboard."""
|
||||
if db is None:
|
||||
return self._get_empty_dashboard_stats()
|
||||
|
||||
try:
|
||||
from sqlalchemy import and_, case, extract, func, or_
|
||||
|
||||
from app.models import MediaServer
|
||||
|
||||
filters = []
|
||||
start_date = None
|
||||
if days != 0:
|
||||
start_date = datetime.now(UTC) - timedelta(days=days)
|
||||
filters.append(ActivitySession.started_at >= start_date)
|
||||
|
||||
watch_time_expr = case(
|
||||
(
|
||||
and_(
|
||||
ActivitySession.duration_ms.is_not(None),
|
||||
ActivitySession.duration_ms > 0,
|
||||
),
|
||||
ActivitySession.duration_ms,
|
||||
),
|
||||
else_=0,
|
||||
)
|
||||
|
||||
total_sessions_query = db.session.query(func.count(ActivitySession.id))
|
||||
unique_users_query = db.session.query(
|
||||
func.count(func.distinct(ActivitySession.user_name))
|
||||
)
|
||||
total_watch_query = db.session.query(
|
||||
func.coalesce(func.sum(watch_time_expr), 0)
|
||||
)
|
||||
watched_sessions_query = db.session.query(
|
||||
func.coalesce(
|
||||
func.sum(
|
||||
case(
|
||||
(
|
||||
and_(
|
||||
ActivitySession.duration_ms.is_not(None),
|
||||
ActivitySession.duration_ms > 0,
|
||||
),
|
||||
1,
|
||||
),
|
||||
else_=0,
|
||||
)
|
||||
),
|
||||
0,
|
||||
)
|
||||
)
|
||||
|
||||
if filters:
|
||||
total_sessions_query = total_sessions_query.filter(*filters)
|
||||
unique_users_query = unique_users_query.filter(*filters)
|
||||
total_watch_query = total_watch_query.filter(*filters)
|
||||
watched_sessions_query = watched_sessions_query.filter(*filters)
|
||||
|
||||
total_sessions = total_sessions_query.scalar() or 0
|
||||
unique_users = unique_users_query.scalar() or 0
|
||||
total_watch_ms = total_watch_query.scalar() or 0
|
||||
watched_sessions = watched_sessions_query.scalar() or 0
|
||||
|
||||
avg_session_ms = (
|
||||
total_watch_ms / watched_sessions if watched_sessions else 0
|
||||
)
|
||||
total_watch_hours = total_watch_ms / (1000 * 60 * 60)
|
||||
|
||||
def apply_filters(query):
|
||||
return query.filter(*filters) if filters else query
|
||||
|
||||
media_type_data = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
ActivitySession.media_type,
|
||||
func.count(ActivitySession.id).label("count"),
|
||||
).filter(ActivitySession.media_type.is_not(None))
|
||||
)
|
||||
.group_by(ActivitySession.media_type)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Collapse episodes/seasons under their series so top content reflects movies vs. series.
|
||||
media_type_lower = func.lower(func.coalesce(ActivitySession.media_type, ""))
|
||||
series_condition = or_(
|
||||
ActivitySession.series_name.is_not(None),
|
||||
media_type_lower.in_(("episode", "season", "series")),
|
||||
)
|
||||
|
||||
top_title_expr = case(
|
||||
(
|
||||
series_condition,
|
||||
func.coalesce(
|
||||
ActivitySession.series_name, ActivitySession.media_title
|
||||
),
|
||||
),
|
||||
else_=ActivitySession.media_title,
|
||||
).label("display_title")
|
||||
|
||||
top_type_expr = case(
|
||||
(series_condition, "Series"),
|
||||
(media_type_lower == "movie", "Movie"),
|
||||
else_=func.coalesce(ActivitySession.media_type, "Unknown"),
|
||||
).label("content_type")
|
||||
|
||||
top_content = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
top_title_expr,
|
||||
top_type_expr,
|
||||
func.count(ActivitySession.id).label("play_count"),
|
||||
func.sum(watch_time_expr).label("total_time"),
|
||||
).filter(
|
||||
or_(
|
||||
ActivitySession.media_title.is_not(None),
|
||||
ActivitySession.series_name.is_not(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
.group_by(top_title_expr, top_type_expr)
|
||||
.order_by(func.count(ActivitySession.id).desc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
top_users = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
ActivitySession.user_name,
|
||||
func.count(ActivitySession.id).label("session_count"),
|
||||
func.sum(watch_time_expr).label("total_time"),
|
||||
)
|
||||
)
|
||||
.group_by(ActivitySession.user_name)
|
||||
.order_by(func.sum(watch_time_expr).desc())
|
||||
.limit(10)
|
||||
.all()
|
||||
)
|
||||
|
||||
time_series = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
func.date(ActivitySession.started_at).label("date"),
|
||||
func.count(ActivitySession.id).label("count"),
|
||||
)
|
||||
)
|
||||
.group_by(func.date(ActivitySession.started_at))
|
||||
.order_by(func.date(ActivitySession.started_at))
|
||||
.all()
|
||||
)
|
||||
|
||||
hourly_distribution = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
extract("hour", ActivitySession.started_at).label("hour"),
|
||||
func.count(ActivitySession.id).label("count"),
|
||||
)
|
||||
)
|
||||
.group_by(extract("hour", ActivitySession.started_at))
|
||||
.all()
|
||||
)
|
||||
|
||||
weekday_distribution = (
|
||||
apply_filters(
|
||||
db.session.query(
|
||||
func.strftime("%w", ActivitySession.started_at).label(
|
||||
"weekday"
|
||||
),
|
||||
func.count(ActivitySession.id).label("count"),
|
||||
)
|
||||
)
|
||||
.group_by(func.strftime("%w", ActivitySession.started_at))
|
||||
.all()
|
||||
)
|
||||
|
||||
join_condition = MediaServer.id == ActivitySession.server_id
|
||||
for filter_expr in filters:
|
||||
join_condition = and_(join_condition, filter_expr)
|
||||
|
||||
server_stats = (
|
||||
db.session.query(
|
||||
MediaServer.id,
|
||||
MediaServer.name,
|
||||
MediaServer.server_type,
|
||||
func.count(ActivitySession.id).label("session_count"),
|
||||
func.count(func.distinct(ActivitySession.user_name)).label(
|
||||
"unique_users"
|
||||
),
|
||||
func.coalesce(func.sum(watch_time_expr), 0).label("total_time"),
|
||||
)
|
||||
.outerjoin(ActivitySession, join_condition)
|
||||
.group_by(
|
||||
MediaServer.id,
|
||||
MediaServer.name,
|
||||
MediaServer.server_type,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
dates: list[str] = []
|
||||
counts: list[int] = []
|
||||
if start_date:
|
||||
current_date = start_date.date()
|
||||
end_date = datetime.now(UTC).date()
|
||||
|
||||
date_counts = {}
|
||||
for row in time_series:
|
||||
raw_date = getattr(row, "date", None)
|
||||
if raw_date is None:
|
||||
continue
|
||||
|
||||
if hasattr(raw_date, "isoformat"):
|
||||
normalized_date = (
|
||||
raw_date.date() if hasattr(raw_date, "date") else raw_date
|
||||
)
|
||||
elif isinstance(raw_date, str):
|
||||
try:
|
||||
normalized_date = datetime.strptime(
|
||||
raw_date, "%Y-%m-%d"
|
||||
).date()
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
date_counts[normalized_date] = int(getattr(row, "count", 0) or 0)
|
||||
|
||||
while current_date <= end_date:
|
||||
dates.append(current_date.strftime("%m/%d"))
|
||||
counts.append(date_counts.get(current_date, 0))
|
||||
current_date += timedelta(days=1)
|
||||
else:
|
||||
from datetime import datetime as dt
|
||||
|
||||
for row in time_series:
|
||||
try:
|
||||
date_obj = dt.strptime(row.date, "%Y-%m-%d").date()
|
||||
dates.append(date_obj.strftime("%m/%d"))
|
||||
counts.append(row.count)
|
||||
except (ValueError, AttributeError):
|
||||
continue
|
||||
|
||||
media_labels = [row.media_type or "Unknown" for row in media_type_data]
|
||||
media_counts = [row.count for row in media_type_data]
|
||||
|
||||
hourly_labels = []
|
||||
hourly_data = [0] * 24
|
||||
hour_counts = {int(row.hour): row.count for row in hourly_distribution}
|
||||
for hour in range(24):
|
||||
if hour == 0:
|
||||
hourly_labels.append("12 AM")
|
||||
elif hour < 12:
|
||||
hourly_labels.append(f"{hour} AM")
|
||||
elif hour == 12:
|
||||
hourly_labels.append("12 PM")
|
||||
else:
|
||||
hourly_labels.append(f"{hour - 12} PM")
|
||||
hourly_data[hour] = hour_counts.get(hour, 0)
|
||||
|
||||
weekday_names = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]
|
||||
weekday_labels = weekday_names
|
||||
weekday_data = [0] * 7
|
||||
day_counts = {int(row.weekday): row.count for row in weekday_distribution}
|
||||
for day in range(7):
|
||||
weekday_data[day] = day_counts.get(day, 0)
|
||||
|
||||
return {
|
||||
"total_sessions": total_sessions,
|
||||
"unique_users": unique_users,
|
||||
"total_watch_time": total_watch_hours,
|
||||
"avg_session_length": avg_session_ms / (1000 * 60 * 60)
|
||||
if avg_session_ms
|
||||
else 0,
|
||||
"top_content": [
|
||||
{
|
||||
"title": row.display_title,
|
||||
"media_type": row.content_type or "Unknown",
|
||||
"play_count": row.play_count,
|
||||
"total_time": row.total_time / (1000 * 60 * 60)
|
||||
if row.total_time
|
||||
else 0,
|
||||
}
|
||||
for row in top_content
|
||||
],
|
||||
"top_users": [
|
||||
{
|
||||
"username": row.user_name,
|
||||
"session_count": row.session_count,
|
||||
"total_time": row.total_time / (1000 * 60 * 60)
|
||||
if row.total_time
|
||||
else 0,
|
||||
}
|
||||
for row in top_users
|
||||
],
|
||||
"time_series_labels": dates,
|
||||
"time_series_data": counts,
|
||||
"media_type_labels": media_labels,
|
||||
"media_type_data": media_counts,
|
||||
"hourly_labels": hourly_labels,
|
||||
"hourly_data": hourly_data,
|
||||
"weekday_labels": weekday_labels,
|
||||
"weekday_data": weekday_data,
|
||||
"server_stats": [
|
||||
{
|
||||
"id": row.id,
|
||||
"name": row.name,
|
||||
"type": row.server_type,
|
||||
"session_count": row.session_count or 0,
|
||||
"unique_users": row.unique_users or 0,
|
||||
"total_time": row.total_time / (1000 * 60 * 60)
|
||||
if row.total_time
|
||||
else 0,
|
||||
}
|
||||
for row in server_stats
|
||||
],
|
||||
}
|
||||
|
||||
except Exception as exc: # pragma: no cover - log and fallback
|
||||
self.logger.error("Failed to get dashboard stats: %s", exc, exc_info=True)
|
||||
return self._get_empty_dashboard_stats()
|
||||
|
||||
def _get_empty_dashboard_stats(self) -> dict[str, Any]:
|
||||
"""Return an empty dataset used when analytics are unavailable."""
|
||||
return {
|
||||
"total_sessions": 0,
|
||||
"unique_users": 0,
|
||||
"total_watch_time": 0,
|
||||
"avg_session_length": 0,
|
||||
"top_content": [],
|
||||
"top_users": [],
|
||||
"time_series_labels": [],
|
||||
"time_series_data": [],
|
||||
"media_type_labels": [],
|
||||
"media_type_data": [],
|
||||
"hourly_labels": ["12 AM"]
|
||||
+ [f"{i} AM" for i in range(1, 12)]
|
||||
+ ["12 PM"]
|
||||
+ [f"{i} PM" for i in range(1, 12)],
|
||||
"hourly_data": [0] * 24,
|
||||
"weekday_labels": ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"],
|
||||
"weekday_data": [0] * 7,
|
||||
"server_stats": [],
|
||||
}
|
||||
|
||||
|
||||
__all__ = ["ActivityAnalyticsService"]
|
||||
131
app/services/activity/identity_resolution.py
Normal file
131
app/services/activity/identity_resolution.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""
|
||||
Utilities for resolving Wizarr users and identities for activity sessions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
try:
|
||||
from app.extensions import db # type: ignore
|
||||
except ImportError: # pragma: no cover
|
||||
db = None # type: ignore
|
||||
|
||||
try:
|
||||
from app.models import Identity, User
|
||||
except ImportError: # pragma: no cover - during testing without app context
|
||||
Identity = None # type: ignore
|
||||
User = None # type: ignore
|
||||
|
||||
|
||||
def _normalise(value: str | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
value = value.strip()
|
||||
return value.lower() if value else None
|
||||
|
||||
|
||||
def _identity_display_name(
|
||||
identity: Identity | None, fallback: str | None
|
||||
) -> str | None:
|
||||
if identity:
|
||||
return identity.nickname or identity.primary_username or fallback
|
||||
return fallback
|
||||
|
||||
|
||||
def resolve_user_identity(
|
||||
server_id: int,
|
||||
external_user_id: str | None,
|
||||
external_user_name: str | None,
|
||||
) -> tuple[int | None, int | None, str | None]:
|
||||
"""
|
||||
Resolve an activity event user to a Wizarr user and identity.
|
||||
|
||||
Returns a tuple of (wizarr_user_id, wizarr_identity_id, display_name).
|
||||
"""
|
||||
if db is None or User is None or Identity is None or server_id is None:
|
||||
return None, None, external_user_name
|
||||
|
||||
query = (
|
||||
db.session.query(User)
|
||||
.filter(User.server_id == server_id)
|
||||
.options(joinedload(User.identity))
|
||||
)
|
||||
|
||||
match: User | None = None
|
||||
|
||||
normalised_name = _normalise(external_user_name)
|
||||
|
||||
if normalised_name:
|
||||
match = (
|
||||
query.filter(func.lower(User.username) == normalised_name)
|
||||
.order_by(User.id.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
if not match and normalised_name:
|
||||
match = (
|
||||
query.join(Identity, User.identity, isouter=True)
|
||||
.filter(func.lower(Identity.nickname) == normalised_name)
|
||||
.order_by(User.id.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
if not match and normalised_name:
|
||||
match = (
|
||||
query.join(Identity, User.identity, isouter=True)
|
||||
.filter(func.lower(Identity.primary_username) == normalised_name)
|
||||
.order_by(User.id.asc())
|
||||
.first()
|
||||
)
|
||||
|
||||
display_name = None
|
||||
wizarr_user_id = None
|
||||
identity_id = None
|
||||
|
||||
if match:
|
||||
wizarr_user_id = match.id
|
||||
identity_id = match.identity_id if match.identity_id else None
|
||||
display_name = _identity_display_name(match.identity, match.username)
|
||||
|
||||
if not display_name:
|
||||
display_name = external_user_name
|
||||
|
||||
return wizarr_user_id, identity_id, display_name
|
||||
|
||||
|
||||
def apply_identity_resolution(session) -> bool:
|
||||
"""
|
||||
Resolve and attach Wizarr identity details to an ActivitySession.
|
||||
|
||||
Returns True if the session was modified.
|
||||
"""
|
||||
from app.models import ActivitySession # local import to avoid circular
|
||||
|
||||
if not isinstance(session, ActivitySession):
|
||||
return False
|
||||
|
||||
user_id, identity_id, display_name = resolve_user_identity(
|
||||
session.server_id,
|
||||
getattr(session, "user_id", None),
|
||||
getattr(session, "user_name", None),
|
||||
)
|
||||
|
||||
changed = False
|
||||
|
||||
if user_id and session.wizarr_user_id != user_id:
|
||||
session.wizarr_user_id = user_id
|
||||
changed = True
|
||||
|
||||
if identity_id and session.wizarr_identity_id != identity_id:
|
||||
session.wizarr_identity_id = identity_id
|
||||
changed = True
|
||||
|
||||
if display_name and session.user_name != display_name:
|
||||
session.user_name = display_name
|
||||
changed = True
|
||||
|
||||
session._resolved_identity_name = display_name or session.user_name
|
||||
|
||||
return changed
|
||||
292
app/services/activity/ingestion.py
Normal file
292
app/services/activity/ingestion.py
Normal file
@@ -0,0 +1,292 @@
|
||||
"""
|
||||
Ingestion service for Wizarr activity events.
|
||||
|
||||
Responsible for translating media server events into database records while
|
||||
keeping session grouping and identity resolution concerns encapsulated.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import structlog
|
||||
|
||||
try:
|
||||
from app.extensions import db # type: ignore
|
||||
except ImportError: # pragma: no cover - during unit tests
|
||||
db = None # type: ignore
|
||||
|
||||
from app.activity.domain.models import ActivityEvent
|
||||
from app.models import ActivitySession, ActivitySnapshot
|
||||
from app.services.activity.identity_resolution import apply_identity_resolution
|
||||
|
||||
|
||||
class ActivityIngestionService:
|
||||
"""Persist and update activity sessions based on incoming events."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def record_activity_event(self, event: ActivityEvent) -> ActivitySession | None:
|
||||
"""Record a new activity event and return the affected session."""
|
||||
if db is None:
|
||||
self.logger.warning("Database not available, skipping activity recording")
|
||||
return None
|
||||
|
||||
try:
|
||||
handlers = {
|
||||
"session_start": self._handle_session_start,
|
||||
"session_end": self._handle_session_end,
|
||||
"session_progress": self._handle_session_update,
|
||||
"session_pause": self._handle_session_update,
|
||||
"session_resume": self._handle_session_update,
|
||||
"session_buffer": self._handle_session_update,
|
||||
}
|
||||
|
||||
handler = handlers.get(event.event_type)
|
||||
if not handler:
|
||||
self.logger.warning("Unknown activity event type: %s", event.event_type)
|
||||
return None
|
||||
|
||||
return handler(event)
|
||||
|
||||
except Exception as exc: # pragma: no cover - defensive rollback
|
||||
self.logger.error("Failed to record activity event: %s", exc, exc_info=True)
|
||||
db.session.rollback() # type: ignore[union-attr]
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Event handlers
|
||||
# ------------------------------------------------------------------
|
||||
def _handle_session_start(self, event: ActivityEvent) -> ActivitySession:
|
||||
existing_session = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter_by(server_id=event.server_id, session_id=event.session_id)
|
||||
.filter(ActivitySession.active.is_(True))
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing_session:
|
||||
self.logger.debug("Session %s already exists, updating", event.session_id)
|
||||
return self._update_session_from_event(existing_session, event)
|
||||
|
||||
session = ActivitySession(
|
||||
server_id=event.server_id,
|
||||
session_id=event.session_id,
|
||||
user_name=event.user_name,
|
||||
user_id=event.user_id,
|
||||
media_title=event.media_title,
|
||||
media_type=event.media_type,
|
||||
media_id=event.media_id,
|
||||
series_name=event.series_name,
|
||||
season_number=event.season_number,
|
||||
episode_number=event.episode_number,
|
||||
started_at=event.timestamp,
|
||||
active=True,
|
||||
duration_ms=event.duration_ms,
|
||||
device_name=event.device_name,
|
||||
client_name=event.client_name,
|
||||
ip_address=event.ip_address,
|
||||
platform=event.platform,
|
||||
player_version=event.player_version,
|
||||
artwork_url=event.artwork_url,
|
||||
thumbnail_url=event.thumbnail_url,
|
||||
)
|
||||
|
||||
if event.transcoding_info:
|
||||
session.set_transcoding_info(event.transcoding_info)
|
||||
if event.metadata:
|
||||
session.set_metadata(event.metadata)
|
||||
|
||||
metadata = session.get_metadata()
|
||||
metadata["status"] = "active"
|
||||
session.set_metadata(metadata)
|
||||
|
||||
self._assign_session_identity(session)
|
||||
|
||||
db.session.add(session) # type: ignore[union-attr]
|
||||
db.session.flush() # type: ignore[union-attr]
|
||||
|
||||
self._apply_session_grouping(session, event)
|
||||
|
||||
if event.position_ms is not None and event.state:
|
||||
self._create_snapshot(session.id, event)
|
||||
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
self.logger.info(
|
||||
"Started tracking session %s for user %s",
|
||||
event.session_id,
|
||||
event.user_name,
|
||||
)
|
||||
return session
|
||||
|
||||
def _handle_session_update(self, event: ActivityEvent) -> ActivitySession | None:
|
||||
session = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter_by(server_id=event.server_id, session_id=event.session_id)
|
||||
.filter(ActivitySession.active.is_(True))
|
||||
.first()
|
||||
)
|
||||
|
||||
if not session:
|
||||
self.logger.debug(
|
||||
"Session %s not found for update, creating new session",
|
||||
event.session_id,
|
||||
)
|
||||
event.event_type = "session_start"
|
||||
return self._handle_session_start(event)
|
||||
|
||||
if event.user_name and event.user_name.lower() not in {
|
||||
"unknown",
|
||||
"unknown user",
|
||||
}:
|
||||
session.user_name = event.user_name
|
||||
if event.user_id:
|
||||
session.user_id = event.user_id
|
||||
|
||||
updated_session = self._update_session_from_event(session, event)
|
||||
|
||||
if event.position_ms is not None and event.state:
|
||||
self._create_snapshot(session.id, event)
|
||||
|
||||
self._assign_session_identity(updated_session)
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
return updated_session
|
||||
|
||||
def _handle_session_end(self, event: ActivityEvent) -> ActivitySession | None:
|
||||
session = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter_by(server_id=event.server_id, session_id=event.session_id)
|
||||
.filter(ActivitySession.active.is_(True))
|
||||
.first()
|
||||
)
|
||||
|
||||
if not session:
|
||||
self.logger.debug(
|
||||
"Session %s not found for end event, skipping", event.session_id
|
||||
)
|
||||
return None
|
||||
|
||||
self._update_session_from_event(session, event)
|
||||
|
||||
session.active = False
|
||||
metadata = session.get_metadata()
|
||||
if event.timestamp:
|
||||
metadata["last_end_timestamp"] = event.timestamp.isoformat()
|
||||
metadata["status"] = "ended"
|
||||
session.set_metadata(metadata)
|
||||
|
||||
if event.position_ms is not None:
|
||||
self._create_snapshot(session.id, event)
|
||||
|
||||
self._assign_session_identity(session)
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
|
||||
self.logger.info(
|
||||
"Closed session %s for user %s", event.session_id, event.user_name
|
||||
)
|
||||
return session
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _assign_session_identity(self, session: ActivitySession) -> bool:
|
||||
try:
|
||||
return bool(apply_identity_resolution(session))
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
self.logger.debug("Identity resolution skipped: %s", exc)
|
||||
return False
|
||||
|
||||
def _apply_session_grouping(
|
||||
self,
|
||||
new_session: ActivitySession,
|
||||
event: ActivityEvent,
|
||||
) -> None:
|
||||
try:
|
||||
previous_sessions = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter_by(
|
||||
server_id=event.server_id,
|
||||
user_name=event.user_name,
|
||||
media_id=event.media_id,
|
||||
)
|
||||
.filter(ActivitySession.id < new_session.id)
|
||||
.order_by(ActivitySession.id.desc())
|
||||
.limit(2)
|
||||
.all()
|
||||
)
|
||||
|
||||
if not previous_sessions:
|
||||
new_session.reference_id = new_session.id
|
||||
return
|
||||
|
||||
prev_session = previous_sessions[0]
|
||||
|
||||
prev_timestamp = prev_session.updated_at or prev_session.started_at
|
||||
event_timestamp = event.timestamp
|
||||
|
||||
if prev_timestamp.tzinfo is None and event_timestamp.tzinfo is not None:
|
||||
prev_timestamp = prev_timestamp.replace(tzinfo=UTC)
|
||||
elif prev_timestamp.tzinfo is not None and event_timestamp.tzinfo is None:
|
||||
event_timestamp = event_timestamp.replace(tzinfo=UTC)
|
||||
|
||||
time_gap = event_timestamp - prev_timestamp
|
||||
should_group = time_gap.total_seconds() < 1800
|
||||
|
||||
if should_group:
|
||||
if prev_session.reference_id is None:
|
||||
prev_session.reference_id = prev_session.id
|
||||
new_session.reference_id = prev_session.reference_id
|
||||
else:
|
||||
new_session.reference_id = new_session.id
|
||||
|
||||
except Exception as exc:
|
||||
new_session.reference_id = new_session.id
|
||||
self.logger.warning(
|
||||
"Session grouping failed for %s: %s", event.session_id, exc
|
||||
)
|
||||
|
||||
def _update_session_from_event(
|
||||
self,
|
||||
session: ActivitySession,
|
||||
event: ActivityEvent,
|
||||
) -> ActivitySession:
|
||||
if event.duration_ms is not None:
|
||||
session.duration_ms = event.duration_ms
|
||||
if event.transcoding_info:
|
||||
session.set_transcoding_info(event.transcoding_info)
|
||||
|
||||
metadata = session.get_metadata()
|
||||
if event.position_ms is not None:
|
||||
metadata["last_known_position_ms"] = event.position_ms
|
||||
if event.metadata:
|
||||
metadata.update(event.metadata)
|
||||
|
||||
session.set_metadata(metadata if metadata else {})
|
||||
|
||||
session.updated_at = datetime.now(UTC)
|
||||
self._assign_session_identity(session)
|
||||
return session
|
||||
|
||||
def _create_snapshot(self, session_id: int, event: ActivityEvent) -> None:
|
||||
snapshot = ActivitySnapshot(
|
||||
session_id=session_id,
|
||||
timestamp=event.timestamp,
|
||||
position_ms=event.position_ms,
|
||||
state=event.state or "unknown",
|
||||
bandwidth_kbps=event.bandwidth_kbps,
|
||||
quality=event.quality,
|
||||
subtitle_stream=event.subtitle_stream,
|
||||
audio_stream=event.audio_stream,
|
||||
)
|
||||
|
||||
if event.transcoding_info:
|
||||
snapshot.set_transcoding_details(event.transcoding_info)
|
||||
|
||||
db.session.add(snapshot) # type: ignore[union-attr]
|
||||
|
||||
|
||||
__all__ = ["ActivityIngestionService"]
|
||||
255
app/services/activity/maintenance.py
Normal file
255
app/services/activity/maintenance.py
Normal file
@@ -0,0 +1,255 @@
|
||||
"""
|
||||
Maintenance tasks for Wizarr activity data.
|
||||
|
||||
These helpers support scheduled jobs that clean up or reconcile sessions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import structlog
|
||||
|
||||
try:
|
||||
from app.extensions import db # type: ignore
|
||||
except ImportError: # pragma: no cover - during tests
|
||||
db = None # type: ignore
|
||||
|
||||
from app.models import ActivitySession
|
||||
|
||||
|
||||
class ActivityMaintenanceService:
|
||||
"""Lifecycle management for session data."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
|
||||
def cleanup_old_activity(self, retention_days: int = 90) -> int:
|
||||
"""Delete activity sessions older than the retention window."""
|
||||
if db is None:
|
||||
return 0
|
||||
|
||||
try:
|
||||
cutoff_date = datetime.now(UTC) - timedelta(days=retention_days)
|
||||
deleted_count = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter(ActivitySession.started_at < cutoff_date)
|
||||
.delete()
|
||||
)
|
||||
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
self.logger.info("Cleaned up %s old activity sessions", deleted_count)
|
||||
return deleted_count
|
||||
|
||||
except Exception as exc: # pragma: no cover - log and rollback
|
||||
self.logger.error("Failed to cleanup old activity: %s", exc, exc_info=True)
|
||||
db.session.rollback() # type: ignore[union-attr]
|
||||
return 0
|
||||
|
||||
def end_stale_sessions(self, timeout_hours: int = 24) -> int:
|
||||
"""Mark sessions as ended when they have not updated within timeout."""
|
||||
if db is None:
|
||||
return 0
|
||||
|
||||
try:
|
||||
cutoff_time = datetime.now(UTC) - timedelta(hours=timeout_hours)
|
||||
|
||||
stale_sessions = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter(
|
||||
ActivitySession.active.is_(True),
|
||||
ActivitySession.updated_at < cutoff_time,
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
ended_count = 0
|
||||
for session in stale_sessions:
|
||||
session.active = False
|
||||
metadata = session.get_metadata()
|
||||
metadata["status"] = "ended"
|
||||
metadata["stale_closed_at"] = datetime.now(UTC).isoformat()
|
||||
session.set_metadata(metadata)
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
|
||||
if ended_count:
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
|
||||
self.logger.info("Ended %s stale activity sessions", ended_count)
|
||||
return ended_count
|
||||
|
||||
except Exception as exc: # pragma: no cover - log and rollback
|
||||
self.logger.error("Failed to end stale sessions: %s", exc, exc_info=True)
|
||||
db.session.rollback() # type: ignore[union-attr]
|
||||
return 0
|
||||
|
||||
def recover_sessions_on_startup(self) -> int:
|
||||
"""Validate sessions when the application boots and fix stale entries."""
|
||||
if db is None:
|
||||
return 0
|
||||
|
||||
try:
|
||||
active_sessions = (
|
||||
db.session.query(ActivitySession) # type: ignore[union-attr]
|
||||
.filter(ActivitySession.active.is_(True))
|
||||
.all()
|
||||
)
|
||||
|
||||
if not active_sessions:
|
||||
return 0
|
||||
|
||||
self.logger.info(
|
||||
"Validating %s active sessions during startup", len(active_sessions)
|
||||
)
|
||||
|
||||
sessions_by_server: dict[int, list[ActivitySession]] = {}
|
||||
for session in active_sessions:
|
||||
sessions_by_server.setdefault(session.server_id, []).append(session)
|
||||
|
||||
ended_count = 0
|
||||
recovered_count = 0
|
||||
|
||||
for server_id, sessions in sessions_by_server.items():
|
||||
try:
|
||||
result = self._validate_server_sessions(server_id, sessions)
|
||||
ended_count += result["ended"]
|
||||
recovered_count += result["recovered"]
|
||||
except Exception as exc:
|
||||
self.logger.warning(
|
||||
"Failed to validate sessions for server %s: %s",
|
||||
server_id,
|
||||
exc,
|
||||
)
|
||||
for session in sessions:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
|
||||
if ended_count or recovered_count:
|
||||
db.session.commit() # type: ignore[union-attr]
|
||||
self.logger.info(
|
||||
"Session recovery completed: %s recovered, %s ended",
|
||||
recovered_count,
|
||||
ended_count,
|
||||
)
|
||||
|
||||
return ended_count
|
||||
|
||||
except Exception as exc: # pragma: no cover
|
||||
self.logger.error("Failed to recover sessions: %s", exc, exc_info=True)
|
||||
db.session.rollback() # type: ignore[union-attr]
|
||||
return 0
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _validate_server_sessions(
|
||||
self,
|
||||
server_id: int,
|
||||
sessions: list[ActivitySession],
|
||||
) -> dict[str, int]:
|
||||
"""Poll the media server to verify which sessions are still active."""
|
||||
from app.models import MediaServer
|
||||
from app.services.media.service import get_client_for_media_server
|
||||
|
||||
ended_count = 0
|
||||
recovered_count = 0
|
||||
|
||||
try:
|
||||
server = db.session.query(MediaServer).filter_by(id=server_id).first() # type: ignore[union-attr]
|
||||
if not server:
|
||||
self.logger.warning(
|
||||
"Server %s not found during validation. Ending sessions.", server_id
|
||||
)
|
||||
for session in sessions:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
return {"ended": ended_count, "recovered": recovered_count}
|
||||
|
||||
client = get_client_for_media_server(server)
|
||||
if not client:
|
||||
self.logger.warning(
|
||||
"No client for server %s. Ending active sessions.", server_id
|
||||
)
|
||||
for session in sessions:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
return {"ended": ended_count, "recovered": recovered_count}
|
||||
|
||||
try:
|
||||
current_sessions = client.now_playing()
|
||||
except Exception as exc:
|
||||
self.logger.warning(
|
||||
"Failed to poll server %s sessions: %s", server_id, exc
|
||||
)
|
||||
cutoff_time = datetime.now(UTC) - timedelta(hours=1)
|
||||
for session in sessions:
|
||||
updated_at = session.updated_at
|
||||
if updated_at.tzinfo is None:
|
||||
updated_at = updated_at.replace(tzinfo=UTC)
|
||||
if updated_at < cutoff_time:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
else:
|
||||
recovered_count += 1
|
||||
return {"ended": ended_count, "recovered": recovered_count}
|
||||
|
||||
active_session_ids = set()
|
||||
if current_sessions:
|
||||
for current_session in current_sessions:
|
||||
session_id = self._extract_session_id(current_session)
|
||||
if session_id:
|
||||
active_session_ids.add(session_id)
|
||||
|
||||
for session in sessions:
|
||||
if session.session_id in active_session_ids:
|
||||
recovered_count += 1
|
||||
else:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
|
||||
except Exception as exc:
|
||||
self.logger.error(
|
||||
"Error validating sessions for server %s: %s", server_id, exc
|
||||
)
|
||||
cutoff_time = datetime.now(UTC) - timedelta(hours=1)
|
||||
for session in sessions:
|
||||
updated_at = session.updated_at
|
||||
if updated_at.tzinfo is None:
|
||||
updated_at = updated_at.replace(tzinfo=UTC)
|
||||
if updated_at < cutoff_time:
|
||||
self._end_session_gracefully(session)
|
||||
ended_count += 1
|
||||
else:
|
||||
recovered_count += 1
|
||||
|
||||
return {"ended": ended_count, "recovered": recovered_count}
|
||||
|
||||
def _extract_session_id(self, current_session: dict) -> str | None:
|
||||
"""Return the session identifier from a now_playing payload."""
|
||||
possible_fields = ["sessionKey", "session_id", "id", "key", "playSessionId"]
|
||||
for field in possible_fields:
|
||||
value = current_session.get(field)
|
||||
if value:
|
||||
return str(value)
|
||||
return None
|
||||
|
||||
def _end_session_gracefully(self, session: ActivitySession) -> None:
|
||||
"""Mark a session inactive while estimating its final metadata."""
|
||||
estimated_end_time = session.updated_at or session.started_at
|
||||
|
||||
session.active = False
|
||||
metadata = session.get_metadata()
|
||||
metadata["graceful_closed_at"] = (
|
||||
estimated_end_time.isoformat() if estimated_end_time else None
|
||||
)
|
||||
metadata["status"] = "ended"
|
||||
session.set_metadata(metadata)
|
||||
|
||||
self.logger.debug(
|
||||
"Gracefully ended session %s",
|
||||
session.session_id,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ["ActivityMaintenanceService"]
|
||||
324
app/services/activity/queries.py
Normal file
324
app/services/activity/queries.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Query services for Wizarr activity data.
|
||||
|
||||
This module focuses on read-oriented operations (lists, detail views, filters)
|
||||
so API layers can depend on a smaller surface area than the full ingestion
|
||||
service.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import structlog
|
||||
|
||||
try:
|
||||
from app.extensions import db # type: ignore
|
||||
except ImportError: # pragma: no cover - during unit tests
|
||||
db = None # type: ignore
|
||||
|
||||
from app.activity.domain.models import ActivityQuery
|
||||
from app.models import ActivitySession
|
||||
from app.services.activity.identity_resolution import apply_identity_resolution
|
||||
|
||||
|
||||
class ActivityQueryService:
|
||||
"""Encapsulates filterable queries over activity sessions."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = structlog.get_logger(__name__)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Public API
|
||||
# -------------------------------------------------------------------------
|
||||
def get_activity_sessions(
|
||||
self,
|
||||
query: ActivityQuery,
|
||||
) -> tuple[list[ActivitySession], int]:
|
||||
"""Return activity sessions matching the supplied query object."""
|
||||
if db is None:
|
||||
return [], 0
|
||||
|
||||
try:
|
||||
from sqlalchemy import case, func
|
||||
|
||||
from app.models import MediaServer # Local import to avoid cycles
|
||||
|
||||
group_key_expr = case(
|
||||
(
|
||||
ActivitySession.reference_id.isnot(None),
|
||||
ActivitySession.reference_id,
|
||||
),
|
||||
else_=ActivitySession.id,
|
||||
)
|
||||
|
||||
filters = []
|
||||
if query.server_ids:
|
||||
filters.append(ActivitySession.server_id.in_(query.server_ids))
|
||||
if query.user_names:
|
||||
filters.append(ActivitySession.user_name.in_(query.user_names))
|
||||
if query.media_types:
|
||||
filters.append(ActivitySession.media_type.in_(query.media_types))
|
||||
if query.start_date:
|
||||
filters.append(ActivitySession.started_at >= query.start_date)
|
||||
if query.end_date:
|
||||
filters.append(ActivitySession.started_at <= query.end_date)
|
||||
if query.active_only:
|
||||
filters.append(ActivitySession.active.is_(True))
|
||||
|
||||
total_count = (
|
||||
db.session.query(func.count(func.distinct(group_key_expr)))
|
||||
.filter(*filters)
|
||||
.scalar()
|
||||
or 0
|
||||
)
|
||||
|
||||
if total_count == 0:
|
||||
return [], 0
|
||||
|
||||
order_col = getattr(
|
||||
ActivitySession, query.order_by, ActivitySession.started_at
|
||||
)
|
||||
direction = (query.order_direction or "desc").lower()
|
||||
order_agg = (
|
||||
func.max(order_col) if direction == "desc" else func.min(order_col)
|
||||
)
|
||||
order_value = order_agg.label("order_value")
|
||||
|
||||
limit_value = query.limit if query.limit not in (None, 0) else None
|
||||
offset_value = query.offset or 0
|
||||
|
||||
group_query = (
|
||||
db.session.query(group_key_expr.label("group_key"), order_value)
|
||||
.filter(*filters)
|
||||
.group_by(group_key_expr)
|
||||
)
|
||||
|
||||
if direction == "desc":
|
||||
group_query = group_query.order_by(
|
||||
order_value.desc(), group_key_expr.desc()
|
||||
)
|
||||
else:
|
||||
group_query = group_query.order_by(
|
||||
order_value.asc(), group_key_expr.asc()
|
||||
)
|
||||
|
||||
if offset_value:
|
||||
group_query = group_query.offset(offset_value)
|
||||
if limit_value:
|
||||
group_query = group_query.limit(limit_value)
|
||||
|
||||
group_rows = group_query.all()
|
||||
if not group_rows:
|
||||
return [], total_count
|
||||
|
||||
group_keys = [row.group_key for row in group_rows]
|
||||
group_order_map = {key: index for index, key in enumerate(group_keys)}
|
||||
|
||||
session_query = (
|
||||
db.session.query(
|
||||
ActivitySession,
|
||||
MediaServer.name.label("server_name"),
|
||||
MediaServer.server_type.label("server_type"),
|
||||
)
|
||||
.outerjoin(MediaServer, ActivitySession.server_id == MediaServer.id)
|
||||
.filter(*filters)
|
||||
)
|
||||
|
||||
if group_keys and (limit_value is not None or offset_value):
|
||||
session_query = session_query.filter(group_key_expr.in_(group_keys))
|
||||
|
||||
if direction == "desc":
|
||||
session_query = session_query.order_by(
|
||||
order_col.desc(),
|
||||
ActivitySession.id.desc(),
|
||||
)
|
||||
else:
|
||||
session_query = session_query.order_by(
|
||||
order_col.asc(),
|
||||
ActivitySession.id.asc(),
|
||||
)
|
||||
|
||||
results = session_query.all()
|
||||
|
||||
raw_sessions: list[ActivitySession] = []
|
||||
for session_obj, server_name, server_type in results:
|
||||
session_obj.server_name = server_name
|
||||
session_obj.server_type = server_type
|
||||
raw_sessions.append(session_obj)
|
||||
|
||||
identity_updates = False
|
||||
for session in raw_sessions:
|
||||
try:
|
||||
identity_updates |= bool(apply_identity_resolution(session))
|
||||
except Exception as exc: # pragma: no cover - defensive
|
||||
self.logger.debug("Identity resolution skipped: %s", exc)
|
||||
|
||||
if identity_updates:
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as exc: # pragma: no cover - best effort
|
||||
self.logger.debug("Failed to persist identity resolution: %s", exc)
|
||||
db.session.rollback()
|
||||
|
||||
sessions = self._consolidate_grouped_sessions(raw_sessions)
|
||||
|
||||
if group_order_map:
|
||||
sessions.sort(
|
||||
key=lambda s: group_order_map.get(
|
||||
s.reference_id if s.reference_id is not None else s.id,
|
||||
len(group_order_map),
|
||||
)
|
||||
)
|
||||
|
||||
if limit_value is not None and len(sessions) > len(group_keys):
|
||||
sessions = sessions[: len(group_keys)]
|
||||
|
||||
if query.include_snapshots:
|
||||
for session in sessions:
|
||||
_ = session.snapshots
|
||||
|
||||
return sessions, total_count
|
||||
|
||||
except Exception as exc: # pragma: no cover - log and fallback
|
||||
self.logger.error("Failed to get activity sessions: %s", exc, exc_info=True)
|
||||
return [], 0
|
||||
|
||||
def get_active_sessions(
|
||||
self, server_id: int | None = None
|
||||
) -> list[ActivitySession]:
|
||||
"""Return all currently active sessions, optionally filtered by server."""
|
||||
query = ActivityQuery(
|
||||
server_ids=[server_id] if server_id else None,
|
||||
active_only=True,
|
||||
order_by="started_at",
|
||||
order_direction="desc",
|
||||
)
|
||||
sessions, _ = self.get_activity_sessions(query)
|
||||
return sessions
|
||||
|
||||
def get_user_activity(
|
||||
self, user_name: str, days: int = 30
|
||||
) -> list[ActivitySession]:
|
||||
"""Return sessions for a specific user within the requested window."""
|
||||
start_date = datetime.now(UTC) - timedelta(days=days)
|
||||
query = ActivityQuery(
|
||||
user_names=[user_name],
|
||||
start_date=start_date,
|
||||
order_by="started_at",
|
||||
order_direction="desc",
|
||||
)
|
||||
sessions, _ = self.get_activity_sessions(query)
|
||||
return sessions
|
||||
|
||||
def get_server_activity(
|
||||
self, server_id: int, days: int = 7
|
||||
) -> list[ActivitySession]:
|
||||
"""Return sessions for a specific server."""
|
||||
start_date = datetime.now(UTC) - timedelta(days=days)
|
||||
query = ActivityQuery(
|
||||
server_ids=[server_id],
|
||||
start_date=start_date,
|
||||
order_by="started_at",
|
||||
order_direction="desc",
|
||||
)
|
||||
sessions, _ = self.get_activity_sessions(query)
|
||||
return sessions
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# -------------------------------------------------------------------------
|
||||
def _consolidate_grouped_sessions(
|
||||
self,
|
||||
sessions: list[ActivitySession],
|
||||
) -> list[ActivitySession]:
|
||||
"""Collapse grouped sessions (reference_id) into a single representative."""
|
||||
if not sessions:
|
||||
return []
|
||||
|
||||
grouped = {}
|
||||
standalone_sessions: list[ActivitySession] = []
|
||||
|
||||
for session in sessions:
|
||||
if session.reference_id is not None:
|
||||
grouped.setdefault(session.reference_id, []).append(session)
|
||||
else:
|
||||
standalone_sessions.append(session)
|
||||
|
||||
consolidated_sessions: list[ActivitySession] = []
|
||||
|
||||
for group_sessions in grouped.values():
|
||||
if len(group_sessions) == 1:
|
||||
consolidated_sessions.append(group_sessions[0])
|
||||
else:
|
||||
consolidated_sessions.append(self._merge_session_group(group_sessions))
|
||||
|
||||
consolidated_sessions.extend(standalone_sessions)
|
||||
consolidated_sessions.sort(
|
||||
key=lambda s: s.started_at or datetime.min.replace(tzinfo=UTC),
|
||||
reverse=True,
|
||||
)
|
||||
return consolidated_sessions
|
||||
|
||||
def _merge_session_group(self, sessions: list[ActivitySession]) -> ActivitySession:
|
||||
"""Merge a set of grouped sessions into a single consolidated view."""
|
||||
if not sessions:
|
||||
raise ValueError("Cannot merge empty session list")
|
||||
|
||||
if len(sessions) == 1:
|
||||
return sessions[0]
|
||||
|
||||
sorted_sessions = sorted(
|
||||
sessions,
|
||||
key=lambda s: s.started_at or datetime.max.replace(tzinfo=UTC),
|
||||
)
|
||||
|
||||
base_session = sorted_sessions[0]
|
||||
latest_session = sorted_sessions[-1]
|
||||
is_active = any(getattr(s, "active", False) for s in sessions)
|
||||
|
||||
consolidated = base_session
|
||||
consolidated.user_name = latest_session.user_name
|
||||
consolidated.user_id = latest_session.user_id
|
||||
consolidated.media_title = latest_session.media_title
|
||||
consolidated.media_type = latest_session.media_type
|
||||
consolidated.media_id = latest_session.media_id
|
||||
consolidated.series_name = latest_session.series_name
|
||||
consolidated.season_number = latest_session.season_number
|
||||
consolidated.episode_number = latest_session.episode_number
|
||||
|
||||
consolidated.started_at = base_session.started_at
|
||||
consolidated.active = is_active
|
||||
consolidated.duration_ms = latest_session.duration_ms
|
||||
|
||||
consolidated.device_name = latest_session.device_name
|
||||
consolidated.client_name = latest_session.client_name
|
||||
consolidated.ip_address = latest_session.ip_address
|
||||
consolidated.platform = latest_session.platform
|
||||
consolidated.player_version = latest_session.player_version
|
||||
|
||||
if latest_session.transcoding_info:
|
||||
consolidated.set_transcoding_info(latest_session.get_transcoding_info())
|
||||
else:
|
||||
consolidated.transcoding_info = None
|
||||
|
||||
if latest_session.session_metadata:
|
||||
consolidated.set_metadata(latest_session.get_metadata())
|
||||
else:
|
||||
consolidated.session_metadata = None
|
||||
|
||||
consolidated.artwork_url = latest_session.artwork_url
|
||||
consolidated.thumbnail_url = latest_session.thumbnail_url
|
||||
consolidated.updated_at = latest_session.updated_at
|
||||
|
||||
if hasattr(base_session, "server_name"):
|
||||
consolidated.server_name = base_session.server_name
|
||||
if hasattr(base_session, "server_type"):
|
||||
consolidated.server_type = base_session.server_type
|
||||
|
||||
metadata = consolidated.get_metadata()
|
||||
metadata["grouped_sessions"] = [s.session_id for s in sessions]
|
||||
metadata["group_count"] = len(sessions)
|
||||
consolidated.set_metadata(metadata)
|
||||
|
||||
return consolidated
|
||||
@@ -1,5 +1,5 @@
|
||||
"""
|
||||
Historical data import service for Plus features.
|
||||
Historical data import service for Wizarr.
|
||||
|
||||
This service handles importing historical viewing data from media servers
|
||||
like Plex into the existing ActivitySession model for unified analytics.
|
||||
@@ -10,7 +10,7 @@ from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
from app.models import MediaServer, db
|
||||
from plus.activity.services.identity_resolution import apply_identity_resolution
|
||||
from app.services.activity.identity_resolution import apply_identity_resolution
|
||||
|
||||
|
||||
class HistoricalDataService:
|
||||
@@ -121,7 +121,7 @@ class HistoricalDataService:
|
||||
def _process_plex_history_entry(self, entry, account_lookup, client):
|
||||
"""Process a single Plex history entry into ActivitySession format."""
|
||||
try:
|
||||
from plus.activity.domain.models import ActivitySession
|
||||
from app.models import ActivitySession
|
||||
|
||||
# Extract user data with multiple fallbacks because Plex history differs by server version
|
||||
account = getattr(entry, "account", None)
|
||||
@@ -301,7 +301,7 @@ class HistoricalDataService:
|
||||
def _store_activity_sessions(self, sessions: list) -> int:
|
||||
"""Store activity sessions in the database."""
|
||||
try:
|
||||
from plus.activity.domain.models import ActivitySession
|
||||
from app.models import ActivitySession
|
||||
|
||||
stored_count = 0
|
||||
for session in sessions:
|
||||
@@ -330,7 +330,7 @@ class HistoricalDataService:
|
||||
def get_import_statistics(self) -> dict[str, Any]:
|
||||
"""Get statistics about imported historical data."""
|
||||
try:
|
||||
from plus.activity.domain.models import ActivitySession
|
||||
from app.models import ActivitySession
|
||||
|
||||
# Get basic counts for imported historical data
|
||||
imported_query = ActivitySession.query.filter(
|
||||
@@ -385,7 +385,7 @@ class HistoricalDataService:
|
||||
def clear_historical_data(self) -> dict[str, Any]:
|
||||
"""Clear all imported historical data for this server."""
|
||||
try:
|
||||
from plus.activity.domain.models import ActivitySession
|
||||
from app.models import ActivitySession
|
||||
|
||||
# Delete only imported historical data, not live activity data
|
||||
deleted_count = ActivitySession.query.filter(
|
||||
|
||||
@@ -15,8 +15,6 @@ from typing import Any
|
||||
import requests
|
||||
import structlog
|
||||
|
||||
from app.services.shared.base import BaseService
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
@@ -41,7 +39,7 @@ class LicenseVerificationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class KeygenLicenseService(BaseService):
|
||||
class KeygenLicenseService:
|
||||
"""Service for verifying Keygen licenses at runtime."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -220,9 +218,9 @@ class KeygenLicenseService(BaseService):
|
||||
)
|
||||
|
||||
try:
|
||||
from Crypto.Hash import SHA256
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Signature import PKCS1_v1_5
|
||||
from Crypto.Hash import SHA256 # type: ignore[import-untyped]
|
||||
from Crypto.PublicKey import RSA # type: ignore[import-untyped]
|
||||
from Crypto.Signature import PKCS1_v1_5 # type: ignore[import-untyped]
|
||||
except ImportError as e:
|
||||
raise LicenseVerificationError(
|
||||
"pycryptodome library required for offline verification"
|
||||
|
||||
@@ -172,7 +172,8 @@ class KomgaClient(RestApiMixin):
|
||||
# Always create restricted access with the actual library IDs
|
||||
library_access = (
|
||||
LibraryAccessHelper.create_restricted_access(
|
||||
shared_library_ids, getattr(self, "server_id", None)
|
||||
shared_library_ids,
|
||||
self.server_id, # type: ignore[attr-defined]
|
||||
)
|
||||
if shared_library_ids
|
||||
else []
|
||||
@@ -329,7 +330,8 @@ class KomgaClient(RestApiMixin):
|
||||
# Always create restricted access with the actual library IDs
|
||||
library_access = (
|
||||
LibraryAccessHelper.create_restricted_access(
|
||||
shared_library_ids, getattr(self, "server_id", None)
|
||||
shared_library_ids,
|
||||
self.server_id, # type: ignore[attr-defined]
|
||||
)
|
||||
if shared_library_ids
|
||||
else []
|
||||
|
||||
286
app/tasks/activity.py
Normal file
286
app/tasks/activity.py
Normal file
@@ -0,0 +1,286 @@
|
||||
"""
|
||||
Background tasks for activity monitoring.
|
||||
|
||||
Provides scheduled tasks for activity data maintenance, cleanup,
|
||||
and monitoring health checks.
|
||||
"""
|
||||
|
||||
try:
|
||||
from flask import Flask
|
||||
except ImportError: # pragma: no cover
|
||||
Flask = None # type: ignore
|
||||
|
||||
import structlog
|
||||
from structlog import get_logger as _get_logger
|
||||
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
|
||||
def cleanup_old_activity_task(app: Flask, retention_days: int = 90):
|
||||
"""
|
||||
Cleanup old activity data beyond retention period.
|
||||
|
||||
This task should be scheduled to run daily to manage database size.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
retention_days: Number of days to retain activity data
|
||||
"""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
activity_service = ActivityService()
|
||||
deleted_count = activity_service.cleanup_old_activity(retention_days)
|
||||
|
||||
logger.info(
|
||||
f"Activity cleanup completed: {deleted_count} old sessions removed"
|
||||
)
|
||||
return deleted_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup old activity data: {e}", exc_info=True)
|
||||
return 0
|
||||
|
||||
|
||||
def end_stale_sessions_task(app: Flask, timeout_hours: int = 24):
|
||||
"""
|
||||
End sessions that have been active too long without updates.
|
||||
|
||||
This task should be scheduled to run every few hours to clean up
|
||||
sessions that may have been left open due to client disconnections.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
timeout_hours: Hours after which to consider a session stale
|
||||
"""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
activity_service = ActivityService()
|
||||
ended_count = activity_service.end_stale_sessions(timeout_hours)
|
||||
|
||||
logger.info(
|
||||
f"Stale session cleanup completed: {ended_count} sessions ended"
|
||||
)
|
||||
return ended_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to end stale sessions: {e}", exc_info=True)
|
||||
return 0
|
||||
|
||||
|
||||
def monitor_health_check_task(app: Flask):
|
||||
"""
|
||||
Check the health of activity monitoring connections.
|
||||
|
||||
This task monitors WebSocket connections and logs any issues
|
||||
for debugging purposes.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
"""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
# Get activity monitor from app extensions
|
||||
monitor = getattr(app.extensions, "activity_monitor", None)
|
||||
|
||||
if not monitor:
|
||||
logger.warning("Activity monitor not found in app extensions")
|
||||
return {"status": "no_monitor"}
|
||||
|
||||
# Get connection status
|
||||
connection_status = monitor.get_connection_status()
|
||||
|
||||
# Check for issues
|
||||
issues = []
|
||||
total_connections = len(connection_status)
|
||||
connected_count = 0
|
||||
total_errors = 0
|
||||
|
||||
for server_id, status in connection_status.items():
|
||||
if status.get("connected", False):
|
||||
connected_count += 1
|
||||
else:
|
||||
issues.append(f"Server {server_id} disconnected")
|
||||
|
||||
error_count = status.get("errors", 0)
|
||||
total_errors += error_count
|
||||
|
||||
if error_count > 10: # Threshold for concerning error count
|
||||
issues.append(f"Server {server_id} has {error_count} errors")
|
||||
|
||||
# Log health status
|
||||
health_status = {
|
||||
"total_connections": total_connections,
|
||||
"connected_count": connected_count,
|
||||
"total_errors": total_errors,
|
||||
"connection_rate": connected_count / total_connections
|
||||
if total_connections > 0
|
||||
else 0,
|
||||
"issues": issues,
|
||||
}
|
||||
|
||||
if issues:
|
||||
logger.warning(f"Activity monitoring health issues detected: {issues}")
|
||||
else:
|
||||
logger.debug(
|
||||
f"Activity monitoring health check passed: {connected_count}/{total_connections} connections active"
|
||||
)
|
||||
|
||||
return health_status
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to perform health check: {e}", exc_info=True)
|
||||
return {"status": "error", "error": str(e)}
|
||||
|
||||
|
||||
def activity_monitoring_heartbeat_task(app: Flask):
|
||||
"""
|
||||
Simple heartbeat task to ensure activity monitoring is running.
|
||||
|
||||
This task can be used to restart monitoring if it has stopped
|
||||
unexpectedly.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
"""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
monitor = getattr(app.extensions, "activity_monitor", None)
|
||||
|
||||
if not monitor:
|
||||
logger.warning("Activity monitor not available for heartbeat check")
|
||||
return False
|
||||
|
||||
if not monitor.monitoring:
|
||||
logger.warning(
|
||||
"Activity monitoring is not running, attempting to restart"
|
||||
)
|
||||
try:
|
||||
monitor.start_monitoring()
|
||||
logger.info("Activity monitoring restarted successfully")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restart activity monitoring: {e}")
|
||||
return False
|
||||
|
||||
logger.debug("Activity monitoring heartbeat: OK")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to perform heartbeat check: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
|
||||
def recover_sessions_on_startup_task(app: Flask):
|
||||
"""
|
||||
Recover active sessions on startup by validating them against media servers.
|
||||
|
||||
This task should be run once when Wizarr starts up to handle sessions
|
||||
that were active when the application last shut down.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
"""
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
activity_service = ActivityService()
|
||||
ended_count = activity_service.recover_sessions_on_startup()
|
||||
|
||||
logger.info(
|
||||
f"Session recovery completed: {ended_count} orphaned sessions cleaned up"
|
||||
)
|
||||
return ended_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to recover sessions on startup: {e}", exc_info=True)
|
||||
return 0
|
||||
|
||||
|
||||
def get_activity_cleanup_interval() -> int:
|
||||
"""Get the interval for activity cleanup task in hours."""
|
||||
# Run daily
|
||||
return 24
|
||||
|
||||
|
||||
def get_stale_session_cleanup_interval() -> int:
|
||||
"""Get the interval for stale session cleanup task in hours."""
|
||||
# Run every 6 hours
|
||||
return 6
|
||||
|
||||
|
||||
def get_health_check_interval() -> int:
|
||||
"""Get the interval for health check task in minutes."""
|
||||
# Run every 15 minutes
|
||||
return 15
|
||||
|
||||
|
||||
def get_heartbeat_interval() -> int:
|
||||
"""Get the interval for heartbeat task in minutes."""
|
||||
# Run every 5 minutes
|
||||
return 5
|
||||
|
||||
|
||||
def register_activity_tasks(app: Flask, scheduler):
|
||||
"""
|
||||
Register all activity monitoring tasks with the scheduler.
|
||||
|
||||
Args:
|
||||
app: Flask application instance
|
||||
scheduler: APScheduler instance
|
||||
"""
|
||||
logger = _get_logger()
|
||||
|
||||
try:
|
||||
# Daily cleanup of old activity data
|
||||
scheduler.add_job(
|
||||
id="activity_cleanup",
|
||||
func=lambda: cleanup_old_activity_task(app),
|
||||
trigger="interval",
|
||||
hours=get_activity_cleanup_interval(),
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
)
|
||||
|
||||
# Regular cleanup of stale sessions
|
||||
scheduler.add_job(
|
||||
id="activity_stale_cleanup",
|
||||
func=lambda: end_stale_sessions_task(app),
|
||||
trigger="interval",
|
||||
hours=get_stale_session_cleanup_interval(),
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
)
|
||||
|
||||
# Health monitoring
|
||||
scheduler.add_job(
|
||||
id="activity_health_check",
|
||||
func=lambda: monitor_health_check_task(app),
|
||||
trigger="interval",
|
||||
minutes=get_health_check_interval(),
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
)
|
||||
|
||||
# Heartbeat monitoring
|
||||
scheduler.add_job(
|
||||
id="activity_heartbeat",
|
||||
func=lambda: activity_monitoring_heartbeat_task(app),
|
||||
trigger="interval",
|
||||
minutes=get_heartbeat_interval(),
|
||||
replace_existing=True,
|
||||
max_instances=1,
|
||||
)
|
||||
|
||||
logger.info("Activity monitoring tasks registered successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to register activity tasks: {e}", exc_info=True)
|
||||
@@ -25,11 +25,8 @@
|
||||
<div class="flex flex-col md:flex-row md:items-center gap-4 md:gap-8 text-sm font-medium">
|
||||
<!-- Main Navigation -->
|
||||
<ul class="flex flex-col md:flex-row md:items-center gap-2 md:gap-4 md:space-y-0 space-y-2">
|
||||
{% set tabs = [('/home', '#', 'Home'), ('/invites', '#invites', 'Invitations'), ('/users', '#users', 'Users')] %}
|
||||
{% if is_plus_enabled %}
|
||||
{% set tabs = tabs + [('/activity', '#activity', 'Activity')] %}
|
||||
{% endif %}
|
||||
{% set tabs = tabs + [('/settings', '#settings', 'Settings')] %}
|
||||
{% set activity_url = url_for('activity.activity_dashboard') %}
|
||||
{% set tabs = [('/home', '#', 'Home'), ('/invites', '#invites', 'Invitations'), ('/users', '#users', 'Users'), (activity_url, '#activity', 'Activity'), ('/settings', '#settings', 'Settings')] %}
|
||||
{% for href, hash, label in tabs %}
|
||||
<li>
|
||||
<button hx-get="{{ href }}"
|
||||
@@ -58,12 +55,12 @@
|
||||
hx-trigger="hover once"
|
||||
hx-swap="none"
|
||||
style="display: none"></div>
|
||||
{% elif href == "/activity" %}
|
||||
<div hx-get="/activity/grid"
|
||||
{% elif href == activity_url %}
|
||||
<div hx-get="{{ url_for('activity.activity_grid') }}"
|
||||
hx-trigger="hover once"
|
||||
hx-swap="none"
|
||||
style="display: none"></div>
|
||||
<div hx-get="/activity/stats"
|
||||
<div hx-get="{{ url_for('activity.activity_stats') }}"
|
||||
hx-trigger="hover once"
|
||||
hx-swap="none"
|
||||
style="display: none"></div>
|
||||
@@ -224,8 +221,15 @@
|
||||
<div id="content" class="flex-1 z-10 overflow-y-auto pb-16 md:pb-0"></div>
|
||||
<!-- Mobile Bottom Navigation -->
|
||||
<nav class="md:hidden fixed bottom-0 left-0 right-0 bg-white dark:bg-gray-900 border-t border-gray-200 dark:border-gray-700 z-50">
|
||||
<div class="grid grid-cols-4 h-16">
|
||||
{% set mobile_tabs = [('/home', '#', 'Home', 'm4 12 8-8 8 8M6 10.5V19a1 1 0 0 0 1 1h3v-3a1 1 0 0 1 1-1h2a1 1 0 0 1 1 1v3h3a1 1 0 0 0 1-1v-8.5'), ('/invites', '#invites', 'Invitations', 'm3.5 5.5 7.893 6.036a1 1 0 0 0 1.214 0L20.5 5.5M4 19h16a1 1 0 0 0 1-1V6a1 1 0 0 0-1-1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1Z'), ('/users', '#users', 'Users', 'M16 19h4a1 1 0 0 0 1-1v-1a3 3 0 0 0-3-3h-2m-2.236-4a3 3 0 1 0 0-4M3 18v-1a3 3 0 0 1 3-3h4a3 3 0 0 1 3 3v1a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1Zm8-10a3 3 0 1 1-6 0 3 3 0 0 1 6 0Z'), ('/settings', '#settings', 'Settings', 'M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z M15 12a3 3 0 11-6 0 3 3 0 016 0z')] %}
|
||||
<div class="grid grid-cols-5 h-16">
|
||||
{% set activity_url = url_for('activity.activity_dashboard') %}
|
||||
{% set mobile_tabs = [
|
||||
('/home', '#', 'Home', 'm4 12 8-8 8 8M6 10.5V19a1 1 0 0 0 1 1h3v-3a1 1 0 0 1 1-1h2a1 1 0 0 1 1 1v3h3a1 1 0 0 0 1-1v-8.5'),
|
||||
('/invites', '#invites', 'Invitations', 'm3.5 5.5 7.893 6.036a1 1 0 0 0 1.214 0L20.5 5.5M4 19h16a1 1 0 0 0 1-1V6a1 1 0 0 0-1-1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1Z'),
|
||||
('/users', '#users', 'Users', 'M16 19h4a1 1 0 0 0 1-1v-1a3 3 0 0 0-3-3h-2m-2.236-4a3 3 0 1 0 0-4M3 18v-1a3 3 0 0 1 3-3h4a3 3 0 0 1 3 3v1a1 1 0 0 1-1 1H4a1 1 0 0 1-1-1Zm8-10a3 3 0 1 1-6 0 3 3 0 0 1 6 0Z'),
|
||||
(activity_url, '#activity', 'Activity', 'M5 3a1 1 0 0 1 1 1v12a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1Zm7 4a1 1 0 0 1 1 1v9a1 1 0 1 1-2 0V8a1 1 0 0 1 1-1Zm7 4a1 1 0 0 1 1 1v5a1 1 0 1 1-2 0v-5a1 1 0 0 1 1-1Z'),
|
||||
('/settings', '#settings', 'Settings', 'M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z M15 12a3 3 0 11-6 0 3 3 0 0 1 6 0z')
|
||||
] %}
|
||||
{% for href, hash, label, icon_path in mobile_tabs %}
|
||||
<button hx-get="{{ href }}"
|
||||
hx-trigger="click"
|
||||
@@ -245,12 +249,8 @@
|
||||
{% endfor %}
|
||||
</div>
|
||||
</nav>
|
||||
{% set preload_hashes = ['#invites', '#users', '#settings', '#profile'] %}
|
||||
{% set preload_pages = ['invites', 'users', 'settings', 'profile'] %}
|
||||
{% if is_plus_enabled %}
|
||||
{% set preload_hashes = preload_hashes + ['#activity'] %}
|
||||
{% set preload_pages = preload_pages + ['activity'] %}
|
||||
{% endif %}
|
||||
{% set preload_hashes = ['#invites', '#users', '#activity', '#settings', '#profile'] %}
|
||||
{% set preload_pages = ['invites', 'users', 'activity', 'settings', 'profile'] %}
|
||||
<div hx-get="/home"
|
||||
hx-target="#content"
|
||||
hx-swap="innerHTML"
|
||||
|
||||
@@ -59,6 +59,25 @@
|
||||
{{ _("Servers") }}
|
||||
</button>
|
||||
</li>
|
||||
<li class="me-2">
|
||||
<button class="tab-btn inline-flex items-center justify-center p-4 border-b-2 border-transparent rounded-t-lg hover:text-gray-600 hover:border-gray-300 dark:hover:text-gray-300 group"
|
||||
hx-get="{{ url_for('activity.activity_settings') }}"
|
||||
hx-target="#tab-body"
|
||||
hx-swap="innerHTML"
|
||||
hx-trigger="click"
|
||||
preload="mouseover">
|
||||
<svg class="w-4 h-4 me-2 text-gray-400 group-hover:text-gray-500 dark:text-gray-500 dark:group-hover:text-gray-300"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
stroke-width="2"
|
||||
d="M9.75 17L9 20l-1 1h8l-1-1-.75-3M3 13h18M5 17h14a2 2 0 0 0 2-2V5a2 2 0 0 0-2-2H5a2 2 0 0 0-2 2v10a2 2 0 0 0 2 2z" />
|
||||
</svg>
|
||||
{{ _("Activity") }}
|
||||
</button>
|
||||
</li>
|
||||
<li class="me-2">
|
||||
<button class="tab-btn inline-flex items-center justify-center p-4 border-b-2 border-transparent rounded-t-lg hover:text-gray-600 hover:border-gray-300 dark:hover:text-gray-300 group"
|
||||
hx-get="{{ url_for('connections.list_connections') }}"
|
||||
|
||||
@@ -3,6 +3,6 @@ minversion = 6.0
|
||||
addopts = -ra -q --tb=short
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
filterwarnings =
|
||||
filterwarnings =
|
||||
ignore:jsonschema.RefResolver is deprecated:DeprecationWarning
|
||||
# Playwright will be enabled via conftest.py for E2E tests only
|
||||
@@ -1,6 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Wrapper for Wizarr Plus Cython compilation.
|
||||
The actual implementation is in the plus submodule.
|
||||
"""
|
||||
|
||||
|
||||
1
tests/activity/__init__.py
Normal file
1
tests/activity/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Package marker for activity-related tests.
|
||||
60
tests/activity/conftest.py
Normal file
60
tests/activity/conftest.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask_login import LoginManager, UserMixin, login_user
|
||||
|
||||
from app.activity.api.blueprint import activity_bp
|
||||
|
||||
|
||||
class _TestUser(UserMixin):
|
||||
id = "pytest-user"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activity_app():
|
||||
"""Minimal Flask app with the activity blueprint registered."""
|
||||
app = Flask(__name__)
|
||||
app.config.update(
|
||||
SECRET_KEY="testing-secret",
|
||||
TESTING=True,
|
||||
)
|
||||
|
||||
login_manager = LoginManager()
|
||||
login_manager.init_app(app)
|
||||
login_manager.login_view = "_login_route"
|
||||
|
||||
@login_manager.user_loader
|
||||
def _load_user(user_id): # pragma: no cover - required by flask-login
|
||||
if user_id == _TestUser.id:
|
||||
return _TestUser()
|
||||
return None
|
||||
|
||||
@app.route("/_login")
|
||||
def _login_route():
|
||||
login_user(_TestUser())
|
||||
return "ok"
|
||||
|
||||
template_dir = Path(__file__).resolve().parents[2] / "app" / "templates"
|
||||
if str(template_dir) not in app.jinja_loader.searchpath:
|
||||
app.jinja_loader.searchpath.append(str(template_dir))
|
||||
|
||||
app.jinja_env.globals.setdefault("_", lambda s, **_: s)
|
||||
app.jinja_env.globals.setdefault(
|
||||
"ngettext",
|
||||
lambda singular, plural, number, **_: singular if number == 1 else plural,
|
||||
)
|
||||
|
||||
app.register_blueprint(activity_bp)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def activity_client(activity_app):
|
||||
return activity_app.test_client()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logged_activity_client(activity_client):
|
||||
activity_client.get("/_login")
|
||||
return activity_client
|
||||
41
tests/activity/test_activity_blueprint.py
Normal file
41
tests/activity/test_activity_blueprint.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
|
||||
def test_activity_requires_login(activity_client):
|
||||
response = activity_client.get("/activity/")
|
||||
assert response.status_code == 302
|
||||
# Redirects to the login route
|
||||
assert "_login" in response.headers.get("Location", "")
|
||||
|
||||
|
||||
def test_activity_index_renders(logged_activity_client):
|
||||
response = logged_activity_client.get("/activity/")
|
||||
body = response.get_data(as_text=True)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "Monitor media playback activity" in body
|
||||
|
||||
|
||||
def test_activity_dashboard_tab_uses_service(logged_activity_client):
|
||||
default_stats = ActivityService().get_dashboard_stats()
|
||||
with patch(
|
||||
"app.services.activity.ActivityService.get_dashboard_stats",
|
||||
return_value=default_stats,
|
||||
) as mocked:
|
||||
response = logged_activity_client.get("/activity/dashboard")
|
||||
|
||||
body = response.get_data(as_text=True)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "dashboard" in body.lower()
|
||||
mocked.assert_called_once_with(days=7)
|
||||
|
||||
|
||||
def test_activity_grid_returns_table(logged_activity_client):
|
||||
response = logged_activity_client.get("/activity/grid")
|
||||
body = response.get_data(as_text=True)
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "Failed to load activity data" not in body
|
||||
19
tests/activity/test_activity_service.py
Normal file
19
tests/activity/test_activity_service.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from app.activity.domain.models import ActivityQuery
|
||||
from app.services.activity import ActivityService
|
||||
|
||||
|
||||
def test_activity_service_dashboard_stats_without_db():
|
||||
service = ActivityService()
|
||||
stats = service.get_dashboard_stats()
|
||||
|
||||
assert isinstance(stats, dict)
|
||||
assert stats["total_sessions"] == 0
|
||||
assert "time_series_labels" in stats
|
||||
|
||||
|
||||
def test_activity_service_query_without_db():
|
||||
service = ActivityService()
|
||||
sessions, total = service.get_activity_sessions(ActivityQuery())
|
||||
|
||||
assert sessions == []
|
||||
assert total == 0
|
||||
@@ -1,16 +1,25 @@
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
from flask_migrate import upgrade
|
||||
|
||||
from app import create_app
|
||||
from app.config import BaseConfig
|
||||
from app.extensions import db
|
||||
|
||||
# Workaround for Python 3.13 macOS proxy detection bug
|
||||
# https://github.com/python/cpython/issues/112509
|
||||
os.environ["NO_PROXY"] = "*"
|
||||
os.environ["no_proxy"] = "*"
|
||||
|
||||
|
||||
class TestConfig(BaseConfig):
|
||||
TESTING = True
|
||||
WTF_CSRF_ENABLED = False
|
||||
SQLALCHEMY_DATABASE_URI = "sqlite:///:memory:"
|
||||
# Use a temporary file database for better migration compatibility
|
||||
_temp_db_path = os.path.join(tempfile.gettempdir(), "wizarr_test.db")
|
||||
SQLALCHEMY_DATABASE_URI = f"sqlite:///{_temp_db_path}"
|
||||
|
||||
|
||||
class E2ETestConfig(BaseConfig):
|
||||
@@ -22,13 +31,23 @@ class E2ETestConfig(BaseConfig):
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def app():
|
||||
# Clean up any existing test database
|
||||
if os.path.exists(TestConfig._temp_db_path):
|
||||
os.unlink(TestConfig._temp_db_path)
|
||||
|
||||
app = create_app(TestConfig) # type: ignore[arg-type]
|
||||
with app.app_context():
|
||||
db.create_all()
|
||||
# Use Alembic migrations instead of db.create_all()
|
||||
# This ensures the test database schema matches production
|
||||
upgrade()
|
||||
yield app
|
||||
with app.app_context():
|
||||
db.drop_all()
|
||||
|
||||
# Clean up test database file after session
|
||||
if os.path.exists(TestConfig._temp_db_path):
|
||||
os.unlink(TestConfig._temp_db_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
|
||||
@@ -3,7 +3,9 @@ def test_app_creation(app):
|
||||
assert app is not None
|
||||
assert app.config["TESTING"] is True
|
||||
assert app.config["WTF_CSRF_ENABLED"] is False
|
||||
assert app.config["SQLALCHEMY_DATABASE_URI"] == "sqlite:///:memory:"
|
||||
# Check that a SQLite test database is being used
|
||||
assert app.config["SQLALCHEMY_DATABASE_URI"].startswith("sqlite:///")
|
||||
assert "wizarr_test.db" in app.config["SQLALCHEMY_DATABASE_URI"]
|
||||
|
||||
# Check that essential Flask app attributes exist
|
||||
assert app.name == "app"
|
||||
|
||||
@@ -57,8 +57,8 @@ class MockMediaClient(MediaClient):
|
||||
class TestIdentityLinkingFix:
|
||||
"""Test the identity linking fix for unlimited vs limited invitations."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Set up test data for each test method."""
|
||||
def _setup_servers(self):
|
||||
"""Set up test servers - must be called within app context."""
|
||||
# Create test servers
|
||||
self.server1 = MediaServer(
|
||||
name="Jellyfin Server",
|
||||
@@ -73,12 +73,12 @@ class TestIdentityLinkingFix:
|
||||
api_key="test-key-2",
|
||||
)
|
||||
db.session.add_all([self.server1, self.server2])
|
||||
db.session.flush()
|
||||
db.session.commit() # Changed from flush to commit
|
||||
|
||||
def test_unlimited_invite_different_users_remain_separate(self, app):
|
||||
"""Test that DIFFERENT users using the same unlimited invite code remain separate identities."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
# Create unlimited invitation
|
||||
form_data = {
|
||||
@@ -135,7 +135,7 @@ class TestIdentityLinkingFix:
|
||||
def test_limited_invite_users_get_linked_across_servers(self, app):
|
||||
"""Test that users using the same limited invite across servers get linked."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
# Create limited (non-unlimited) multi-server invitation
|
||||
form_data = {
|
||||
@@ -192,7 +192,7 @@ class TestIdentityLinkingFix:
|
||||
def test_unlimited_invite_with_same_email_remains_separate(self, app):
|
||||
"""Test that even with same email, unlimited invite users remain separate."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
# Create unlimited invitation
|
||||
form_data = {
|
||||
@@ -242,7 +242,7 @@ class TestIdentityLinkingFix:
|
||||
def test_mixed_scenario_unlimited_then_limited(self, app):
|
||||
"""Test mixed scenario: unlimited invite first, then limited invite."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
# Create unlimited invitation
|
||||
unlimited_form = {
|
||||
@@ -333,7 +333,7 @@ class TestIdentityLinkingFix:
|
||||
def test_edge_case_nonexistent_invitation_code(self, app):
|
||||
"""Test edge case where invitation code doesn't exist in database."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
mock_client = MockMediaClient(media_server=self.server1)
|
||||
|
||||
@@ -357,7 +357,7 @@ class TestIdentityLinkingFix:
|
||||
def test_same_user_unlimited_invite_multiple_servers(self, app):
|
||||
"""Test that the SAME user using unlimited invite across servers gets linked."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
# Create unlimited invitation for multiple servers
|
||||
form_data = {
|
||||
@@ -414,7 +414,7 @@ class TestIdentityLinkingFix:
|
||||
def test_edge_case_invitation_code_is_none(self, app):
|
||||
"""Test edge case where invitation code is None."""
|
||||
with app.app_context():
|
||||
self.setup_method()
|
||||
self._setup_servers()
|
||||
|
||||
mock_client = MockMediaClient(media_server=self.server1)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user