mirror of
https://github.com/plexguide/Huntarr.io.git
synced 2025-12-23 22:18:23 -05:00
Refactor logging and database management
- Consolidated logging functionality into a unified database system, replacing the previous logs_database.py and manager_database.py with a single database implementation. - Updated main.py to initialize the new logging system and handle log cleanup scheduling. - Enhanced frontend logs section to load and save user settings from localStorage, improving user experience. - Updated version to 8.0.3 to reflect these changes.
This commit is contained in:
@@ -1084,51 +1084,78 @@ if (logsNextPage) logsNextPage.disabled = true;
|
||||
let pageSize = 20; // Default page size
|
||||
let currentPage = 1;
|
||||
|
||||
// Function to reset logs section to default values
|
||||
function resetLogsToDefaults() {
|
||||
// Reset app filter to "all"
|
||||
const logAppSelect = document.getElementById('logAppSelect');
|
||||
if (logAppSelect) {
|
||||
logAppSelect.value = 'all';
|
||||
// Trigger change event to update the UI
|
||||
logAppSelect.dispatchEvent(new Event('change'));
|
||||
// Load saved logs settings from localStorage
|
||||
function loadLogsSettings() {
|
||||
try {
|
||||
const savedSettings = localStorage.getItem('huntarr-logs-settings');
|
||||
if (savedSettings) {
|
||||
const settings = JSON.parse(savedSettings);
|
||||
|
||||
// Restore page size
|
||||
if (settings.pageSize) {
|
||||
pageSize = settings.pageSize;
|
||||
const logsPageSize = document.getElementById('logsPageSize');
|
||||
if (logsPageSize) {
|
||||
logsPageSize.value = settings.pageSize.toString();
|
||||
}
|
||||
}
|
||||
|
||||
// Restore log level
|
||||
if (settings.logLevel) {
|
||||
const logLevelSelect = document.getElementById('logLevelSelect');
|
||||
if (logLevelSelect) {
|
||||
logLevelSelect.value = settings.logLevel;
|
||||
}
|
||||
}
|
||||
|
||||
// Restore app filter
|
||||
if (settings.appFilter) {
|
||||
const logAppSelect = document.getElementById('logAppSelect');
|
||||
if (logAppSelect) {
|
||||
logAppSelect.value = settings.appFilter;
|
||||
}
|
||||
}
|
||||
|
||||
console.log('[LogsSection] Restored settings:', settings);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('[LogsSection] Failed to load saved settings:', e);
|
||||
}
|
||||
|
||||
// Reset log level to "info"
|
||||
const logLevelSelect = document.getElementById('logLevelSelect');
|
||||
if (logLevelSelect) {
|
||||
logLevelSelect.value = 'info';
|
||||
// Trigger change event to update filtering
|
||||
logLevelSelect.dispatchEvent(new Event('change'));
|
||||
}
|
||||
|
||||
// Reset page size to 20
|
||||
const logsPageSize = document.getElementById('logsPageSize');
|
||||
if (logsPageSize) {
|
||||
logsPageSize.value = '20';
|
||||
pageSize = 20;
|
||||
}
|
||||
|
||||
// Reset to first page
|
||||
currentPage = 1;
|
||||
|
||||
// Update pagination display
|
||||
setTimeout(() => {
|
||||
updateLogsPagination();
|
||||
}, 100);
|
||||
|
||||
console.log('[LogsSection] Reset to defaults: All apps, INFO level, 20 entries per page');
|
||||
}
|
||||
|
||||
// Watch for when logs section becomes active and reset to defaults
|
||||
// Save logs settings to localStorage
|
||||
function saveLogsSettings() {
|
||||
try {
|
||||
const logLevelSelect = document.getElementById('logLevelSelect');
|
||||
const logAppSelect = document.getElementById('logAppSelect');
|
||||
const logsPageSize = document.getElementById('logsPageSize');
|
||||
|
||||
const settings = {
|
||||
pageSize: pageSize,
|
||||
logLevel: logLevelSelect ? logLevelSelect.value : 'info',
|
||||
appFilter: logAppSelect ? logAppSelect.value : 'all'
|
||||
};
|
||||
|
||||
localStorage.setItem('huntarr-logs-settings', JSON.stringify(settings));
|
||||
console.log('[LogsSection] Saved settings:', settings);
|
||||
} catch (e) {
|
||||
console.warn('[LogsSection] Failed to save settings:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Watch for when logs section becomes active and restore settings
|
||||
const observer = new MutationObserver(function(mutations) {
|
||||
mutations.forEach(function(mutation) {
|
||||
if (mutation.type === 'attributes' && mutation.attributeName === 'class') {
|
||||
const logsSection = document.getElementById('logsSection');
|
||||
if (logsSection && logsSection.classList.contains('active')) {
|
||||
// Small delay to ensure all elements are ready
|
||||
// Small delay to ensure all elements are ready, then restore settings
|
||||
setTimeout(() => {
|
||||
resetLogsToDefaults();
|
||||
loadLogsSettings();
|
||||
// Update pagination after restoring settings
|
||||
setTimeout(() => {
|
||||
updateLogsPagination();
|
||||
}, 100);
|
||||
}, 200);
|
||||
}
|
||||
}
|
||||
@@ -1201,6 +1228,7 @@ function handlePageSizeChange() {
|
||||
pageSize = parseInt(logsPageSize.value);
|
||||
currentPage = 1; // Reset to first page
|
||||
updateLogsPagination();
|
||||
saveLogsSettings(); // Save settings when page size changes
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1252,6 +1280,7 @@ if (logLevelSelect) {
|
||||
setTimeout(() => {
|
||||
updateLogsPagination();
|
||||
}, 300);
|
||||
saveLogsSettings(); // Save settings when log level changes
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1264,6 +1293,7 @@ if (logAppSelect) {
|
||||
setTimeout(() => {
|
||||
updateLogsPagination();
|
||||
}, 300);
|
||||
saveLogsSettings(); // Save settings when app filter changes
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
30
main.py
30
main.py
@@ -128,15 +128,6 @@ try:
|
||||
setup_clean_logging()
|
||||
huntarr_logger.info("Clean logging system initialized for frontend consumption.")
|
||||
|
||||
# Initialize database logging system
|
||||
try:
|
||||
from primary.utils.logs_database import get_logs_database, schedule_log_cleanup
|
||||
logs_db = get_logs_database()
|
||||
schedule_log_cleanup()
|
||||
huntarr_logger.info("Database logging system initialized with scheduled cleanup.")
|
||||
except Exception as e:
|
||||
huntarr_logger.warning(f"Failed to initialize database logging: {e}")
|
||||
|
||||
huntarr_logger.info("Successfully imported application components.")
|
||||
# Main function startup message removed to reduce log spam
|
||||
except ImportError as e:
|
||||
@@ -337,19 +328,14 @@ def main():
|
||||
initialize_database()
|
||||
huntarr_logger.info("Main database initialization completed successfully")
|
||||
|
||||
# Initialize manager database and migrate history if needed
|
||||
from src.primary.utils.manager_database import get_manager_database
|
||||
from src.primary.utils.database import get_database
|
||||
manager_db = get_manager_database()
|
||||
|
||||
# Attempt to migrate history from huntarr.db if it exists
|
||||
main_db = get_database()
|
||||
if hasattr(main_db, 'db_path'):
|
||||
try:
|
||||
manager_db.migrate_from_huntarr_db(main_db.db_path)
|
||||
huntarr_logger.info("Hunt Manager database initialized and migration completed")
|
||||
except Exception as migration_error:
|
||||
huntarr_logger.warning(f"History migration completed with warnings: {migration_error}")
|
||||
# Initialize database logging system (now uses main huntarr.db)
|
||||
try:
|
||||
from primary.utils.database import get_logs_database, schedule_log_cleanup
|
||||
logs_db = get_logs_database()
|
||||
schedule_log_cleanup()
|
||||
huntarr_logger.info("Database logging system initialized with scheduled cleanup.")
|
||||
except Exception as e:
|
||||
huntarr_logger.warning(f"Failed to initialize database logging: {e}")
|
||||
|
||||
# Refresh sponsors from manifest.json on startup
|
||||
try:
|
||||
|
||||
@@ -14,7 +14,7 @@ from typing import Dict, Any, Optional
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Import manager database
|
||||
from src.primary.utils.manager_database import get_manager_database
|
||||
from src.primary.utils.database import get_manager_database
|
||||
|
||||
# Lock to prevent race conditions during database operations
|
||||
history_locks = {
|
||||
|
||||
@@ -4,9 +4,9 @@ Database-based log routes for Huntarr web interface
|
||||
Replaces file-based log reading with database queries
|
||||
"""
|
||||
|
||||
from flask import Blueprint, jsonify, request
|
||||
from src.primary.utils.logs_database import get_logs_database
|
||||
from flask import Blueprint, jsonify, request, current_app
|
||||
from src.primary.utils.logger import get_logger
|
||||
from src.primary.utils.database import get_logs_database
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
|
||||
|
||||
@@ -113,7 +113,7 @@ class DatabaseLogHandler(logging.Handler):
|
||||
def logs_db(self):
|
||||
"""Lazy load the logs database instance"""
|
||||
if self._logs_db is None:
|
||||
from src.primary.utils.logs_database import get_logs_database
|
||||
from src.primary.utils.database import get_logs_database
|
||||
self._logs_db = get_logs_database()
|
||||
return self._logs_db
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import json
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional, Set
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import time
|
||||
|
||||
@@ -22,6 +22,16 @@ class HuntarrDatabase:
|
||||
self.db_path = self._get_database_path()
|
||||
self.ensure_database_exists()
|
||||
|
||||
def execute_query(self, query: str, params: tuple = None) -> List[tuple]:
|
||||
"""Execute a raw SQL query and return results"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.cursor()
|
||||
if params:
|
||||
cursor.execute(query, params)
|
||||
else:
|
||||
cursor.execute(query)
|
||||
return cursor.fetchall()
|
||||
|
||||
def _get_database_path(self) -> Path:
|
||||
"""Get database path - use /config for Docker, local data directory for development"""
|
||||
# Check if running in Docker (config directory exists)
|
||||
@@ -211,6 +221,35 @@ class HuntarrDatabase:
|
||||
)
|
||||
''')
|
||||
|
||||
# Create logs table for all application logs
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME NOT NULL,
|
||||
level TEXT NOT NULL,
|
||||
app_type TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
logger_name TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Create hunt_history table for tracking processed media history
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS hunt_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
app_type TEXT NOT NULL,
|
||||
instance_name TEXT NOT NULL,
|
||||
media_id TEXT NOT NULL,
|
||||
processed_info TEXT NOT NULL,
|
||||
operation_type TEXT DEFAULT 'missing',
|
||||
discovered BOOLEAN DEFAULT FALSE,
|
||||
date_time INTEGER NOT NULL,
|
||||
date_time_readable TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Add temp_2fa_secret column if it doesn't exist (for existing databases)
|
||||
try:
|
||||
conn.execute('ALTER TABLE users ADD COLUMN temp_2fa_secret TEXT')
|
||||
@@ -245,6 +284,15 @@ class HuntarrDatabase:
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_state_data_app_type ON state_data(app_type, state_type)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_swaparr_state_app_name ON swaparr_state(app_name, state_type)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_users_username ON users(username)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_sponsors_login ON sponsors(login)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_app_type ON logs(app_type)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_level ON logs(level)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_app_level ON logs(app_type, level)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_app_instance ON hunt_history(app_type, instance_name)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_date_time ON hunt_history(date_time)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_media_id ON hunt_history(media_id)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_operation_type ON hunt_history(operation_type)')
|
||||
|
||||
conn.commit()
|
||||
logger.info(f"Database initialized at: {self.db_path}")
|
||||
@@ -1189,6 +1237,268 @@ class HuntarrDatabase:
|
||||
sponsor_data.get('category', 'past')
|
||||
))
|
||||
|
||||
# Logs Database Methods
|
||||
def insert_log(self, timestamp: datetime, level: str, app_type: str, message: str, logger_name: str = None):
|
||||
"""Insert a new log entry"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO logs (timestamp, level, app_type, message, logger_name)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (timestamp.isoformat(), level, app_type, message, logger_name))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Error inserting log entry: {e}")
|
||||
|
||||
def get_logs(self, app_type: str = None, level: str = None, limit: int = 100, offset: int = 0, search: str = None) -> List[Dict[str, Any]]:
|
||||
"""Get logs with optional filtering"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Build query with filters
|
||||
query = "SELECT * FROM logs WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if app_type:
|
||||
query += " AND app_type = ?"
|
||||
params.append(app_type)
|
||||
|
||||
if level:
|
||||
query += " AND level = ?"
|
||||
params.append(level)
|
||||
|
||||
if search:
|
||||
query += " AND message LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
|
||||
cursor = conn.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
return [dict(row) for row in rows]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting logs: {e}")
|
||||
return []
|
||||
|
||||
def get_log_count(self, app_type: str = None, level: str = None, search: str = None) -> int:
|
||||
"""Get total count of logs matching filters"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
query = "SELECT COUNT(*) FROM logs WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if app_type:
|
||||
query += " AND app_type = ?"
|
||||
params.append(app_type)
|
||||
|
||||
if level:
|
||||
query += " AND level = ?"
|
||||
params.append(level)
|
||||
|
||||
if search:
|
||||
query += " AND message LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
cursor = conn.execute(query, params)
|
||||
return cursor.fetchone()[0]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log count: {e}")
|
||||
return 0
|
||||
|
||||
def cleanup_old_logs(self, days_to_keep: int = 30, max_entries_per_app: int = 10000):
|
||||
"""Clean up old logs based on age and count limits"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
# Time-based cleanup
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
cursor = conn.execute(
|
||||
"DELETE FROM logs WHERE timestamp < ?",
|
||||
(cutoff_date.isoformat(),)
|
||||
)
|
||||
deleted_by_age = cursor.rowcount
|
||||
|
||||
# Count-based cleanup per app type
|
||||
app_types = ['system', 'sonarr', 'radarr', 'lidarr', 'readarr', 'whisparr', 'eros', 'swaparr']
|
||||
total_deleted_by_count = 0
|
||||
|
||||
for app_type in app_types:
|
||||
cursor = conn.execute('''
|
||||
DELETE FROM logs
|
||||
WHERE app_type = ? AND id NOT IN (
|
||||
SELECT id FROM logs
|
||||
WHERE app_type = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
)
|
||||
''', (app_type, app_type, max_entries_per_app))
|
||||
total_deleted_by_count += cursor.rowcount
|
||||
|
||||
conn.commit()
|
||||
|
||||
if deleted_by_age > 0 or total_deleted_by_count > 0:
|
||||
logger.info(f"Cleaned up logs: {deleted_by_age} by age, {total_deleted_by_count} by count")
|
||||
|
||||
return deleted_by_age + total_deleted_by_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up logs: {e}")
|
||||
return 0
|
||||
|
||||
def get_app_types_from_logs(self) -> List[str]:
|
||||
"""Get list of all app types that have logs"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute("SELECT DISTINCT app_type FROM logs ORDER BY app_type")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting app types from logs: {e}")
|
||||
return []
|
||||
|
||||
def get_log_levels(self) -> List[str]:
|
||||
"""Get list of all log levels that exist"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute("SELECT DISTINCT level FROM logs ORDER BY level")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log levels: {e}")
|
||||
return []
|
||||
|
||||
def clear_logs(self, app_type: str = None):
|
||||
"""Clear logs for a specific app type or all logs"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
if app_type:
|
||||
cursor = conn.execute("DELETE FROM logs WHERE app_type = ?", (app_type,))
|
||||
else:
|
||||
cursor = conn.execute("DELETE FROM logs")
|
||||
|
||||
deleted_count = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Cleared {deleted_count} logs" + (f" for {app_type}" if app_type else ""))
|
||||
return deleted_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error clearing logs: {e}")
|
||||
return 0
|
||||
|
||||
# Hunt History/Manager Database Methods
|
||||
def add_hunt_history_entry(self, app_type: str, instance_name: str, media_id: str,
|
||||
processed_info: str, operation_type: str = "missing",
|
||||
discovered: bool = False, date_time: int = None) -> Dict[str, Any]:
|
||||
"""Add a new hunt history entry to the database"""
|
||||
if date_time is None:
|
||||
date_time = int(time.time())
|
||||
|
||||
date_time_readable = datetime.fromtimestamp(date_time).strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO hunt_history
|
||||
(app_type, instance_name, media_id, processed_info, operation_type, discovered, date_time, date_time_readable)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (app_type, instance_name, media_id, processed_info, operation_type, discovered, date_time, date_time_readable))
|
||||
|
||||
entry_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
|
||||
# Return the created entry
|
||||
entry = {
|
||||
"id": entry_id,
|
||||
"app_type": app_type,
|
||||
"instance_name": instance_name,
|
||||
"media_id": media_id,
|
||||
"processed_info": processed_info,
|
||||
"operation_type": operation_type,
|
||||
"discovered": discovered,
|
||||
"date_time": date_time,
|
||||
"date_time_readable": date_time_readable
|
||||
}
|
||||
|
||||
logger.info(f"Added hunt history entry for {app_type}-{instance_name}: {processed_info}")
|
||||
return entry
|
||||
|
||||
def get_hunt_history(self, app_type: str = None, search_query: str = None,
|
||||
page: int = 1, page_size: int = 20) -> Dict[str, Any]:
|
||||
"""Get hunt history entries with pagination and filtering"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Build WHERE clause
|
||||
where_conditions = []
|
||||
params = []
|
||||
|
||||
if app_type and app_type != "all":
|
||||
where_conditions.append("app_type = ?")
|
||||
params.append(app_type)
|
||||
|
||||
if search_query:
|
||||
where_conditions.append("(processed_info LIKE ? OR media_id LIKE ?)")
|
||||
params.extend([f"%{search_query}%", f"%{search_query}%"])
|
||||
|
||||
where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else ""
|
||||
|
||||
# Get total count
|
||||
count_query = f"SELECT COUNT(*) FROM hunt_history {where_clause}"
|
||||
cursor = conn.execute(count_query, params)
|
||||
total_entries = cursor.fetchone()[0]
|
||||
|
||||
# Calculate pagination
|
||||
total_pages = max(1, (total_entries + page_size - 1) // page_size)
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Get entries
|
||||
entries_query = f"""
|
||||
SELECT * FROM hunt_history {where_clause}
|
||||
ORDER BY date_time DESC
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
cursor = conn.execute(entries_query, params + [page_size, offset])
|
||||
|
||||
entries = []
|
||||
current_time = int(time.time())
|
||||
|
||||
for row in cursor.fetchall():
|
||||
entry = dict(row)
|
||||
# Calculate "how long ago"
|
||||
seconds_ago = current_time - entry["date_time"]
|
||||
entry["how_long_ago"] = self._format_time_ago(seconds_ago)
|
||||
entries.append(entry)
|
||||
|
||||
return {
|
||||
"entries": entries,
|
||||
"total_entries": total_entries,
|
||||
"total_pages": total_pages,
|
||||
"current_page": page
|
||||
}
|
||||
|
||||
def clear_hunt_history(self, app_type: str = None):
|
||||
"""Clear hunt history entries"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
if app_type and app_type != "all":
|
||||
conn.execute("DELETE FROM hunt_history WHERE app_type = ?", (app_type,))
|
||||
logger.info(f"Cleared hunt history for {app_type}")
|
||||
else:
|
||||
conn.execute("DELETE FROM hunt_history")
|
||||
logger.info("Cleared all hunt history")
|
||||
conn.commit()
|
||||
|
||||
def _format_time_ago(self, seconds_ago: int) -> str:
|
||||
"""Format seconds into human-readable time ago string"""
|
||||
if seconds_ago < 60:
|
||||
return f"{seconds_ago} seconds ago"
|
||||
elif seconds_ago < 3600:
|
||||
minutes = seconds_ago // 60
|
||||
return f"{minutes} minute{'s' if minutes != 1 else ''} ago"
|
||||
elif seconds_ago < 86400:
|
||||
hours = seconds_ago // 3600
|
||||
return f"{hours} hour{'s' if hours != 1 else ''} ago"
|
||||
else:
|
||||
days = seconds_ago // 86400
|
||||
return f"{days} day{'s' if days != 1 else ''} ago"
|
||||
|
||||
# Global database instance
|
||||
_database_instance = None
|
||||
|
||||
@@ -1197,4 +1507,36 @@ def get_database() -> HuntarrDatabase:
|
||||
global _database_instance
|
||||
if _database_instance is None:
|
||||
_database_instance = HuntarrDatabase()
|
||||
return _database_instance
|
||||
return _database_instance
|
||||
|
||||
# Logs Database Functions (consolidated from logs_database.py)
|
||||
def get_logs_database() -> HuntarrDatabase:
|
||||
"""Get the database instance for logs operations"""
|
||||
return get_database()
|
||||
|
||||
def schedule_log_cleanup():
|
||||
"""Schedule periodic log cleanup - call this from background tasks"""
|
||||
import threading
|
||||
import time
|
||||
|
||||
def cleanup_worker():
|
||||
"""Background worker to clean up logs periodically"""
|
||||
while True:
|
||||
try:
|
||||
time.sleep(3600) # Run every hour
|
||||
db = get_database()
|
||||
deleted_count = db.cleanup_old_logs(days_to_keep=30, max_entries_per_app=10000)
|
||||
if deleted_count > 0:
|
||||
logger.info(f"Scheduled cleanup removed {deleted_count} old log entries")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in scheduled log cleanup: {e}")
|
||||
|
||||
# Start cleanup thread
|
||||
cleanup_thread = threading.Thread(target=cleanup_worker, daemon=True)
|
||||
cleanup_thread.start()
|
||||
logger.info("Scheduled log cleanup thread started")
|
||||
|
||||
# Manager Database Functions (consolidated from manager_database.py)
|
||||
def get_manager_database() -> HuntarrDatabase:
|
||||
"""Get the database instance for manager operations"""
|
||||
return get_database()
|
||||
@@ -1,246 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Logs Database Manager for Huntarr
|
||||
Handles all log storage operations in a separate logs.db database
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional
|
||||
from contextlib import contextmanager
|
||||
import threading
|
||||
|
||||
# Don't import logger here to avoid circular dependencies during initialization
|
||||
# from src.primary.utils.logger import get_logger
|
||||
# logger = get_logger(__name__)
|
||||
|
||||
class LogsDatabase:
|
||||
"""Database manager for log storage"""
|
||||
|
||||
def __init__(self):
|
||||
self.db_path = self._get_database_path()
|
||||
self.ensure_database_exists()
|
||||
|
||||
def _get_database_path(self) -> Path:
|
||||
"""Get the path to the logs database file"""
|
||||
# Use simple fallback approach to avoid import issues
|
||||
import os
|
||||
config_dir = os.environ.get('CONFIG_DIR', '/config')
|
||||
db_path = Path(config_dir) / "logs.db"
|
||||
return db_path
|
||||
|
||||
def ensure_database_exists(self):
|
||||
"""Create the logs database and tables if they don't exist"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
# Create logs table
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME NOT NULL,
|
||||
level TEXT NOT NULL,
|
||||
app_type TEXT NOT NULL,
|
||||
message TEXT NOT NULL,
|
||||
logger_name TEXT,
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Create indexes for better performance
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON logs(timestamp)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_app_type ON logs(app_type)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_level ON logs(level)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_logs_app_level ON logs(app_type, level)')
|
||||
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
print(f"Failed to initialize logs database: {e}")
|
||||
raise
|
||||
|
||||
def insert_log(self, timestamp: datetime, level: str, app_type: str, message: str, logger_name: str = None):
|
||||
"""Insert a new log entry"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO logs (timestamp, level, app_type, message, logger_name)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (timestamp.isoformat(), level, app_type, message, logger_name))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
# Don't use logger here to avoid infinite recursion
|
||||
print(f"Error inserting log entry: {e}")
|
||||
|
||||
def get_logs(self, app_type: str = None, level: str = None, limit: int = 100, offset: int = 0, search: str = None) -> List[Dict[str, Any]]:
|
||||
"""Get logs with optional filtering"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Build query with filters
|
||||
query = "SELECT * FROM logs WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if app_type:
|
||||
query += " AND app_type = ?"
|
||||
params.append(app_type)
|
||||
|
||||
if level:
|
||||
query += " AND level = ?"
|
||||
params.append(level)
|
||||
|
||||
if search:
|
||||
query += " AND message LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
query += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
|
||||
cursor = conn.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
return [dict(row) for row in rows]
|
||||
except Exception as e:
|
||||
print(f"Error getting logs: {e}")
|
||||
return []
|
||||
|
||||
def get_log_count(self, app_type: str = None, level: str = None, search: str = None) -> int:
|
||||
"""Get total count of logs matching filters"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
query = "SELECT COUNT(*) FROM logs WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if app_type:
|
||||
query += " AND app_type = ?"
|
||||
params.append(app_type)
|
||||
|
||||
if level:
|
||||
query += " AND level = ?"
|
||||
params.append(level)
|
||||
|
||||
if search:
|
||||
query += " AND message LIKE ?"
|
||||
params.append(f"%{search}%")
|
||||
|
||||
cursor = conn.execute(query, params)
|
||||
return cursor.fetchone()[0]
|
||||
except Exception as e:
|
||||
print(f"Error getting log count: {e}")
|
||||
return 0
|
||||
|
||||
def cleanup_old_logs(self, days_to_keep: int = 30, max_entries_per_app: int = 10000):
|
||||
"""Clean up old logs based on age and count limits"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
# Time-based cleanup
|
||||
cutoff_date = datetime.now() - timedelta(days=days_to_keep)
|
||||
cursor = conn.execute(
|
||||
"DELETE FROM logs WHERE timestamp < ?",
|
||||
(cutoff_date.isoformat(),)
|
||||
)
|
||||
deleted_by_age = cursor.rowcount
|
||||
|
||||
# Count-based cleanup per app type
|
||||
app_types = ['system', 'sonarr', 'radarr', 'lidarr', 'readarr', 'whisparr', 'eros', 'swaparr']
|
||||
total_deleted_by_count = 0
|
||||
|
||||
for app_type in app_types:
|
||||
cursor = conn.execute('''
|
||||
DELETE FROM logs
|
||||
WHERE app_type = ? AND id NOT IN (
|
||||
SELECT id FROM logs
|
||||
WHERE app_type = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
)
|
||||
''', (app_type, app_type, max_entries_per_app))
|
||||
total_deleted_by_count += cursor.rowcount
|
||||
|
||||
conn.commit()
|
||||
|
||||
if deleted_by_age > 0 or total_deleted_by_count > 0:
|
||||
print(f"Cleaned up logs: {deleted_by_age} by age, {total_deleted_by_count} by count")
|
||||
|
||||
return deleted_by_age + total_deleted_by_count
|
||||
except Exception as e:
|
||||
print(f"Error cleaning up logs: {e}")
|
||||
return 0
|
||||
|
||||
def get_app_types(self) -> List[str]:
|
||||
"""Get list of all app types that have logs"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute("SELECT DISTINCT app_type FROM logs ORDER BY app_type")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
print(f"Error getting app types: {e}")
|
||||
return []
|
||||
|
||||
def get_log_levels(self) -> List[str]:
|
||||
"""Get list of all log levels that exist"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute("SELECT DISTINCT level FROM logs ORDER BY level")
|
||||
return [row[0] for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
print(f"Error getting log levels: {e}")
|
||||
return []
|
||||
|
||||
def clear_logs(self, app_type: str = None):
|
||||
"""Clear logs for a specific app type or all logs"""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
if app_type:
|
||||
cursor = conn.execute("DELETE FROM logs WHERE app_type = ?", (app_type,))
|
||||
else:
|
||||
cursor = conn.execute("DELETE FROM logs")
|
||||
|
||||
deleted_count = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
print(f"Cleared {deleted_count} logs" + (f" for {app_type}" if app_type else ""))
|
||||
return deleted_count
|
||||
except Exception as e:
|
||||
print(f"Error clearing logs: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
# Global instance
|
||||
_logs_db = None
|
||||
_logs_db_lock = threading.Lock()
|
||||
|
||||
def get_logs_database() -> LogsDatabase:
|
||||
"""Get the global logs database instance (thread-safe singleton)"""
|
||||
global _logs_db
|
||||
if _logs_db is None:
|
||||
with _logs_db_lock:
|
||||
# Double-check locking pattern
|
||||
if _logs_db is None:
|
||||
_logs_db = LogsDatabase()
|
||||
return _logs_db
|
||||
|
||||
|
||||
def schedule_log_cleanup():
|
||||
"""Schedule periodic log cleanup - call this from background tasks"""
|
||||
import threading
|
||||
import time
|
||||
|
||||
def cleanup_worker():
|
||||
"""Background worker to clean up logs periodically"""
|
||||
while True:
|
||||
try:
|
||||
time.sleep(3600) # Run every hour
|
||||
logs_db = get_logs_database()
|
||||
deleted_count = logs_db.cleanup_old_logs(days_to_keep=30, max_entries_per_app=10000)
|
||||
if deleted_count > 0:
|
||||
print(f"Scheduled cleanup removed {deleted_count} old log entries")
|
||||
except Exception as e:
|
||||
print(f"Error in scheduled log cleanup: {e}")
|
||||
|
||||
# Start cleanup thread
|
||||
cleanup_thread = threading.Thread(target=cleanup_worker, daemon=True)
|
||||
cleanup_thread.start()
|
||||
print("Scheduled log cleanup thread started")
|
||||
@@ -1,284 +0,0 @@
|
||||
"""
|
||||
Manager Database for Huntarr
|
||||
Handles Hunt Manager (history) operations in a separate manager.db database
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ManagerDatabase:
|
||||
"""Database manager for Hunt Manager functionality"""
|
||||
|
||||
def __init__(self):
|
||||
self.db_path = self._get_database_path()
|
||||
self.ensure_database_exists()
|
||||
|
||||
def _get_database_path(self) -> Path:
|
||||
"""Get database path - use /config for Docker, local data directory for development"""
|
||||
# Check if running in Docker (config directory exists)
|
||||
config_dir = Path("/config")
|
||||
if config_dir.exists() and config_dir.is_dir():
|
||||
# Running in Docker - use persistent config directory
|
||||
return config_dir / "manager.db"
|
||||
else:
|
||||
# For local development, use data directory in project root
|
||||
project_root = Path(__file__).parent.parent.parent.parent
|
||||
data_dir = project_root / "data"
|
||||
|
||||
# Ensure directory exists
|
||||
data_dir.mkdir(parents=True, exist_ok=True)
|
||||
return data_dir / "manager.db"
|
||||
|
||||
def ensure_database_exists(self):
|
||||
"""Create database and all tables if they don't exist"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('PRAGMA foreign_keys = ON')
|
||||
|
||||
# Create hunt_history table for tracking processed media history
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS hunt_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
app_type TEXT NOT NULL,
|
||||
instance_name TEXT NOT NULL,
|
||||
media_id TEXT NOT NULL,
|
||||
processed_info TEXT NOT NULL,
|
||||
operation_type TEXT DEFAULT 'missing',
|
||||
discovered BOOLEAN DEFAULT FALSE,
|
||||
date_time INTEGER NOT NULL,
|
||||
date_time_readable TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
''')
|
||||
|
||||
# Create indexes for better performance
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_app_instance ON hunt_history(app_type, instance_name)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_date_time ON hunt_history(date_time)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_media_id ON hunt_history(media_id)')
|
||||
conn.execute('CREATE INDEX IF NOT EXISTS idx_hunt_history_operation_type ON hunt_history(operation_type)')
|
||||
|
||||
conn.commit()
|
||||
logger.info(f"Manager database initialized at: {self.db_path}")
|
||||
|
||||
def add_hunt_history_entry(self, app_type: str, instance_name: str, media_id: str,
|
||||
processed_info: str, operation_type: str = "missing",
|
||||
discovered: bool = False, date_time: int = None) -> Dict[str, Any]:
|
||||
"""Add a new hunt history entry to the database"""
|
||||
if date_time is None:
|
||||
date_time = int(time.time())
|
||||
|
||||
date_time_readable = datetime.fromtimestamp(date_time).strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO hunt_history
|
||||
(app_type, instance_name, media_id, processed_info, operation_type, discovered, date_time, date_time_readable)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (app_type, instance_name, media_id, processed_info, operation_type, discovered, date_time, date_time_readable))
|
||||
|
||||
entry_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
|
||||
# Return the created entry
|
||||
entry = {
|
||||
"id": entry_id,
|
||||
"app_type": app_type,
|
||||
"instance_name": instance_name,
|
||||
"media_id": media_id,
|
||||
"processed_info": processed_info,
|
||||
"operation_type": operation_type,
|
||||
"discovered": discovered,
|
||||
"date_time": date_time,
|
||||
"date_time_readable": date_time_readable
|
||||
}
|
||||
|
||||
logger.info(f"Added hunt history entry for {app_type}-{instance_name}: {processed_info}")
|
||||
return entry
|
||||
|
||||
def get_hunt_history(self, app_type: str = None, search_query: str = None,
|
||||
page: int = 1, page_size: int = 20) -> Dict[str, Any]:
|
||||
"""Get hunt history entries with pagination and filtering"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Build the base query
|
||||
where_conditions = []
|
||||
params = []
|
||||
|
||||
if app_type and app_type != "all":
|
||||
where_conditions.append("app_type = ?")
|
||||
params.append(app_type)
|
||||
|
||||
if search_query and search_query.strip():
|
||||
search_query = search_query.lower()
|
||||
where_conditions.append("""
|
||||
(LOWER(processed_info) LIKE ? OR
|
||||
LOWER(instance_name) LIKE ? OR
|
||||
LOWER(media_id) LIKE ?)
|
||||
""")
|
||||
search_param = f"%{search_query}%"
|
||||
params.extend([search_param, search_param, search_param])
|
||||
|
||||
where_clause = ""
|
||||
if where_conditions:
|
||||
where_clause = "WHERE " + " AND ".join(where_conditions)
|
||||
|
||||
# Get total count
|
||||
count_query = f"SELECT COUNT(*) FROM hunt_history {where_clause}"
|
||||
cursor = conn.execute(count_query, params)
|
||||
total_entries = cursor.fetchone()[0]
|
||||
|
||||
# Calculate pagination
|
||||
total_pages = (total_entries + page_size - 1) // page_size if total_entries > 0 else 1
|
||||
|
||||
# Adjust page if out of bounds
|
||||
if page < 1:
|
||||
page = 1
|
||||
elif page > total_pages:
|
||||
page = total_pages
|
||||
|
||||
# Get paginated entries
|
||||
offset = (page - 1) * page_size
|
||||
entries_query = f"""
|
||||
SELECT * FROM hunt_history {where_clause}
|
||||
ORDER BY date_time DESC
|
||||
LIMIT ? OFFSET ?
|
||||
"""
|
||||
cursor = conn.execute(entries_query, params + [page_size, offset])
|
||||
|
||||
entries = []
|
||||
current_time = int(time.time())
|
||||
|
||||
for row in cursor.fetchall():
|
||||
entry = dict(row)
|
||||
# Calculate "how long ago"
|
||||
seconds_ago = current_time - entry["date_time"]
|
||||
entry["how_long_ago"] = self._format_time_ago(seconds_ago)
|
||||
entries.append(entry)
|
||||
|
||||
return {
|
||||
"entries": entries,
|
||||
"total_entries": total_entries,
|
||||
"total_pages": total_pages,
|
||||
"current_page": page
|
||||
}
|
||||
|
||||
def clear_hunt_history(self, app_type: str = None):
|
||||
"""Clear hunt history entries"""
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
if app_type and app_type != "all":
|
||||
conn.execute("DELETE FROM hunt_history WHERE app_type = ?", (app_type,))
|
||||
logger.info(f"Cleared hunt history for {app_type}")
|
||||
else:
|
||||
conn.execute("DELETE FROM hunt_history")
|
||||
logger.info("Cleared all hunt history")
|
||||
conn.commit()
|
||||
|
||||
def handle_instance_rename(self, app_type: str, old_instance_name: str, new_instance_name: str):
|
||||
"""Handle renaming of an instance by updating hunt history entries"""
|
||||
if old_instance_name == new_instance_name:
|
||||
return True
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE hunt_history
|
||||
SET instance_name = ?
|
||||
WHERE app_type = ? AND instance_name = ?
|
||||
''', (new_instance_name, app_type, old_instance_name))
|
||||
|
||||
updated_count = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Updated {updated_count} hunt history entries for {app_type}: {old_instance_name} -> {new_instance_name}")
|
||||
return True
|
||||
|
||||
def migrate_from_huntarr_db(self, huntarr_db_path: Path):
|
||||
"""Migrate existing history data from huntarr.db to manager.db"""
|
||||
if not huntarr_db_path.exists():
|
||||
logger.info("No existing huntarr.db found, skipping migration")
|
||||
return
|
||||
|
||||
try:
|
||||
# Connect to source database
|
||||
with sqlite3.connect(huntarr_db_path) as source_conn:
|
||||
source_conn.row_factory = sqlite3.Row
|
||||
|
||||
# Check if history table exists
|
||||
cursor = source_conn.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='history'
|
||||
""")
|
||||
|
||||
if not cursor.fetchone():
|
||||
logger.info("No history table found in huntarr.db, skipping migration")
|
||||
return
|
||||
|
||||
# Get all history entries
|
||||
cursor = source_conn.execute("SELECT * FROM history ORDER BY date_time")
|
||||
history_entries = cursor.fetchall()
|
||||
|
||||
if not history_entries:
|
||||
logger.info("No history entries to migrate")
|
||||
return
|
||||
|
||||
# Insert into manager database
|
||||
with sqlite3.connect(self.db_path) as dest_conn:
|
||||
for entry in history_entries:
|
||||
dest_conn.execute('''
|
||||
INSERT INTO hunt_history
|
||||
(app_type, instance_name, media_id, processed_info, operation_type, discovered, date_time, date_time_readable)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
''', (
|
||||
entry['app_type'],
|
||||
entry['instance_name'],
|
||||
entry['media_id'],
|
||||
entry['processed_info'],
|
||||
entry['operation_type'],
|
||||
entry['discovered'],
|
||||
entry['date_time'],
|
||||
entry['date_time_readable']
|
||||
))
|
||||
|
||||
dest_conn.commit()
|
||||
logger.info(f"Migrated {len(history_entries)} history entries to manager.db")
|
||||
|
||||
# Drop the history table from huntarr.db
|
||||
source_conn.execute("DROP TABLE IF EXISTS history")
|
||||
source_conn.commit()
|
||||
logger.info("Removed history table from huntarr.db")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during history migration: {e}")
|
||||
raise
|
||||
|
||||
def _format_time_ago(self, seconds: int) -> str:
|
||||
"""Format seconds into a human-readable 'time ago' string"""
|
||||
if seconds < 60:
|
||||
return f"{seconds} second{'s' if seconds != 1 else ''} ago"
|
||||
elif seconds < 3600:
|
||||
minutes = seconds // 60
|
||||
return f"{minutes} minute{'s' if minutes != 1 else ''} ago"
|
||||
elif seconds < 86400:
|
||||
hours = seconds // 3600
|
||||
return f"{hours} hour{'s' if hours != 1 else ''} ago"
|
||||
else:
|
||||
days = seconds // 86400
|
||||
return f"{days} day{'s' if days != 1 else ''} ago"
|
||||
|
||||
|
||||
# Global manager database instance
|
||||
_manager_database_instance = None
|
||||
|
||||
def get_manager_database() -> ManagerDatabase:
|
||||
"""Get the global manager database instance"""
|
||||
global _manager_database_instance
|
||||
if _manager_database_instance is None:
|
||||
_manager_database_instance = ManagerDatabase()
|
||||
return _manager_database_instance
|
||||
@@ -1 +1 @@
|
||||
8.0.2
|
||||
8.0.3
|
||||
|
||||
Reference in New Issue
Block a user