From e033caacf6a82bce8db9bb08d94872a7aa8a12fe Mon Sep 17 00:00:00 2001 From: fccview Date: Thu, 20 Nov 2025 18:59:01 +0000 Subject: [PATCH] try a new strategy for log watch --- .gitignore | 3 +- .../FeatureComponents/Modals/LiveLogModal.tsx | 244 ++++++++++++------ app/_translations/en.json | 18 +- app/_translations/it.json | 18 +- app/_utils/job-execution-utils.ts | 16 +- app/_utils/log-watcher.ts | 59 +++++ app/api/logs/stream/route.ts | 64 ++--- scripts/long-logs.sh | 36 +++ test-large-log.sh | 32 +++ 9 files changed, 371 insertions(+), 119 deletions(-) create mode 100755 scripts/long-logs.sh create mode 100755 test-large-log.sh diff --git a/.gitignore b/.gitignore index 375e0d3..beec26b 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,5 @@ node_modules .idea tsconfig.tsbuildinfo docker-compose.test.yml -/data \ No newline at end of file +/data +claude.md \ No newline at end of file diff --git a/app/_components/FeatureComponents/Modals/LiveLogModal.tsx b/app/_components/FeatureComponents/Modals/LiveLogModal.tsx index e123772..e976bda 100644 --- a/app/_components/FeatureComponents/Modals/LiveLogModal.tsx +++ b/app/_components/FeatureComponents/Modals/LiveLogModal.tsx @@ -1,12 +1,13 @@ "use client"; -import { useState, useEffect, useRef } from "react"; +import { useState, useEffect, useRef, useCallback } from "react"; import { Loader2, CheckCircle2, XCircle, AlertTriangle, Minimize2, Maximize2 } from "lucide-react"; import { Modal } from "@/app/_components/GlobalComponents/UIElements/Modal"; import { Button } from "@/app/_components/GlobalComponents/UIElements/Button"; import { useSSEContext } from "@/app/_contexts/SSEContext"; import { SSEEvent } from "@/app/_utils/sse-events"; import { usePageVisibility } from "@/app/_hooks/usePageVisibility"; +import { useTranslations } from "next-intl"; interface LiveLogModalProps { isOpen: boolean; @@ -26,6 +27,7 @@ export const LiveLogModal = ({ jobId, jobComment, }: LiveLogModalProps) => { + const t = useTranslations(); const [logContent, setLogContent] = useState(""); const [status, setStatus] = useState<"running" | "completed" | "failed">( "running" @@ -40,6 +42,11 @@ export const LiveLogModal = ({ const abortControllerRef = useRef(null); const [fileSize, setFileSize] = useState(0); const [lineCount, setLineCount] = useState(0); + const [maxLines, setMaxLines] = useState(500); + const [totalLines, setTotalLines] = useState(0); + const [truncated, setTruncated] = useState(false); + const [showFullLog, setShowFullLog] = useState(false); + const [isJobComplete, setIsJobComplete] = useState(false); useEffect(() => { if (isOpen) { @@ -49,92 +56,95 @@ export const LiveLogModal = ({ setShowSizeWarning(false); setFileSize(0); setLineCount(0); + setShowFullLog(false); + setIsJobComplete(false); } }, [isOpen, runId]); useEffect(() => { - if (!isOpen || !runId || !isPageVisible) return; + if (isOpen && runId && !isJobComplete) { + lastOffsetRef.current = 0; + setLogContent(""); + fetchLogs(); + } + }, [maxLines]); - const fetchLogs = async () => { - if (abortControllerRef.current) { - abortControllerRef.current.abort(); + const fetchLogs = useCallback(async () => { + if (abortControllerRef.current) { + abortControllerRef.current.abort(); + } + + const abortController = new AbortController(); + abortControllerRef.current = abortController; + + try { + const url = `/api/logs/stream?runId=${runId}&offset=${lastOffsetRef.current}&maxLines=${maxLines}`; + const response = await fetch(url, { + signal: abortController.signal, + }); + const data = await response.json(); + + if (data.fileSize !== undefined) { + lastOffsetRef.current = data.fileSize; + setFileSize(data.fileSize); + + if (data.fileSize > 10 * 1024 * 1024) { + setShowSizeWarning(true); + } } - const abortController = new AbortController(); - abortControllerRef.current = abortController; + if (data.totalLines !== undefined) { + setTotalLines(data.totalLines); + setLineCount(data.displayedLines || data.totalLines); + } - try { - const url = `/api/logs/stream?runId=${runId}&offset=${lastOffsetRef.current}`; - const response = await fetch(url, { - signal: abortController.signal, + if (data.truncated !== undefined) { + setTruncated(data.truncated); + } + + if (lastOffsetRef.current === 0 && data.content) { + setLogContent(data.content); + + if (data.truncated) { + setTailMode(true); + } + } else if (data.newContent) { + setLogContent((prev) => { + const combined = prev + data.newContent; + const lines = combined.split("\n"); + + if (lines.length > maxLines) { + return lines.slice(-maxLines).join("\n"); + } + + return combined; }); - const data = await response.json(); - - if (data.fileSize !== undefined) { - lastOffsetRef.current = data.fileSize; - setFileSize(data.fileSize); - - if (data.fileSize > 10 * 1024 * 1024 && !showSizeWarning) { - setShowSizeWarning(true); - } - } - - if (lastOffsetRef.current === 0 && data.content) { - const lines = data.content.split("\n"); - setLineCount(lines.length); - - if (lines.length > MAX_LINES_FULL_RENDER) { - setTailMode(true); - setShowSizeWarning(true); - setLogContent(lines.slice(-TAIL_LINES).join("\n")); - } else { - setLogContent(data.content); - } - } else if (data.newContent) { - setLogContent((prev) => { - const newContent = prev + data.newContent; - const lines = newContent.split("\n"); - setLineCount(lines.length); - - if (lines.length > MAX_LINES_FULL_RENDER && !tailMode) { - setTailMode(true); - setShowSizeWarning(true); - return lines.slice(-TAIL_LINES).join("\n"); - } - - if (tailMode && lines.length > TAIL_LINES) { - return lines.slice(-TAIL_LINES).join("\n"); - } - - const maxLength = 50 * 1024 * 1024; - if (newContent.length > maxLength) { - setTailMode(true); - setShowSizeWarning(true); - const truncated = newContent.slice(-maxLength + 200); - const truncatedLines = truncated.split("\n"); - return truncatedLines.slice(-TAIL_LINES).join("\n"); - } - - return newContent; - }); - } - - setStatus(data.status || "running"); - - if (data.exitCode !== undefined) { - setExitCode(data.exitCode); - } - } catch (error: any) { - if (error.name !== "AbortError") { - console.error("Failed to fetch logs:", error); - } } - }; + + const jobStatus = data.status || "running"; + setStatus(jobStatus); + + if (jobStatus === "completed" || jobStatus === "failed") { + setIsJobComplete(true); + } + + if (data.exitCode !== undefined) { + setExitCode(data.exitCode); + } + } catch (error: any) { + if (error.name !== "AbortError") { + console.error("Failed to fetch logs:", error); + } + } + }, [runId, maxLines]); + + useEffect(() => { + if (!isOpen || !runId || !isPageVisible) return; fetchLogs(); let interval: NodeJS.Timeout | null = null; - if (isPageVisible) { + if (isPageVisible && !isJobComplete) { interval = setInterval(fetchLogs, 3000); } @@ -146,7 +156,7 @@ export const LiveLogModal = ({ abortControllerRef.current.abort(); } }; - }, [isOpen, runId, isPageVisible, showSizeWarning, tailMode]); + }, [isOpen, runId, isPageVisible, fetchLogs, isJobComplete]); useEffect(() => { if (!isOpen) return; @@ -194,7 +204,7 @@ export const LiveLogModal = ({ useEffect(() => { if (logEndRef.current) { - logEndRef.current.scrollIntoView({ behavior: "smooth" }); + logEndRef.current.scrollIntoView({ behavior: "instant" }); } }, [logContent]); @@ -216,23 +226,23 @@ export const LiveLogModal = ({ const titleWithStatus = (
- Live Job Execution{jobComment && `: ${jobComment}`} + {t("cronjobs.liveJobExecution")}{jobComment && `: ${jobComment}`} {status === "running" && ( - Running... + {t("cronjobs.running")} )} {status === "completed" && ( - Completed (Exit: {exitCode}) + {t("cronjobs.completed", { exitCode: exitCode ?? 0 })} )} {status === "failed" && ( - Failed (Exit: {exitCode}) + {t("cronjobs.jobFailed", { exitCode: exitCode ?? 1 })} )}
@@ -247,13 +257,78 @@ export const LiveLogModal = ({ preventCloseOnClickOutside={status === "running"} >
+
+
+ {!showFullLog ? ( + <> + + + {truncated && ( + + )} + + ) : ( +
+ + {t("cronjobs.viewingFullLog", { totalLines: totalLines.toLocaleString() })} + + +
+ )} +
+ {truncated && !showFullLog && ( +
+ + {t("cronjobs.showingLastOf", { + lineCount: lineCount.toLocaleString(), + totalLines: totalLines.toLocaleString() + })} +
+ )} +
+ {showSizeWarning && (

- Large log file detected ({formatFileSize(fileSize)}) - {tailMode && ` - Tail mode enabled, showing last ${TAIL_LINES.toLocaleString()} lines`} + {t("cronjobs.largeLogFileDetected")} ({formatFileSize(fileSize)}) + {tailMode && ` - ${t("cronjobs.tailModeEnabled", { tailLines: TAIL_LINES.toLocaleString() })}`}

@@ -271,15 +346,14 @@ export const LiveLogModal = ({
-            {logContent ||
-              "Waiting for job to start...\n\nLogs will appear here in real-time."}
+            {logContent || t("cronjobs.waitingForJobToStart")}
             
- Run ID: {runId} | Job ID: {jobId} + {t("cronjobs.runIdJobId", { runId, jobId })} {lineCount.toLocaleString()} lines diff --git a/app/_translations/en.json b/app/_translations/en.json index c73c22c..c10b6ac 100644 --- a/app/_translations/en.json +++ b/app/_translations/en.json @@ -87,7 +87,23 @@ "both": "Both", "minimalMode": "Minimal Mode", "minimalModeDescription": "Show compact view with icons instead of full text", - "applyFilters": "Apply Filters" + "applyFilters": "Apply Filters", + "nLines": "{count} lines", + "liveJobExecution": "Live Job Execution", + "running": "Running...", + "completed": "Completed (Exit: {exitCode})", + "jobFailed": "Failed (Exit: {exitCode})", + "showLast": "Show last:", + "viewFullLog": "View Full Log ({totalLines} lines)", + "viewingFullLog": "Viewing full log ({totalLines} lines)", + "backToWindowedView": "Back to Windowed View", + "showingLastOf": "Showing last {lineCount} of {totalLines} lines", + "largeLogFileDetected": "Large log file detected", + "tailModeEnabled": "Tail mode enabled, showing last {tailLines} lines", + "showAllLines": "Show all lines", + "enableTailMode": "Enable tail mode", + "waitingForJobToStart": "Waiting for job to start...\n\nLogs will appear here in real-time.", + "runIdJobId": "Run ID: {runId} | Job ID: {jobId}" }, "scripts": { "scripts": "Scripts", diff --git a/app/_translations/it.json b/app/_translations/it.json index f2bd21c..511b544 100644 --- a/app/_translations/it.json +++ b/app/_translations/it.json @@ -86,7 +86,23 @@ "both": "Entrambi", "minimalMode": "Modalità Minima", "minimalModeDescription": "Mostra vista compatta con icone invece del testo completo", - "applyFilters": "Applica Filtri" + "applyFilters": "Applica Filtri", + "nLines": "{count} linee", + "liveJobExecution": "Esecuzione Lavoro Live", + "running": "In esecuzione...", + "completed": "Completato (Exit: {exitCode})", + "jobFailed": "Fallito (Exit: {exitCode})", + "showLast": "Mostra ultime:", + "viewFullLog": "Visualizza Log Completo ({totalLines} linee)", + "viewingFullLog": "Visualizzazione log completo ({totalLines} linee)", + "backToWindowedView": "Torna alla Vista Finestrata", + "showingLastOf": "Mostrando ultime {lineCount} di {totalLines} linee", + "largeLogFileDetected": "Rilevato file di log di grandi dimensioni", + "tailModeEnabled": "Modalità tail abilitata, mostrando ultime {tailLines} linee", + "showAllLines": "Mostra tutte le linee", + "enableTailMode": "Abilita modalità tail", + "waitingForJobToStart": "In attesa che il lavoro inizi...\n\nI log appariranno qui in tempo reale.", + "runIdJobId": "ID Esecuzione: {runId} | ID Lavoro: {jobId}" }, "scripts": { "scripts": "Script", diff --git a/app/_utils/job-execution-utils.ts b/app/_utils/job-execution-utils.ts index 567a7e3..34d9ede 100644 --- a/app/_utils/job-execution-utils.ts +++ b/app/_utils/job-execution-utils.ts @@ -11,6 +11,7 @@ import { } from "./running-jobs-utils"; import { sseBroadcaster } from "./sse-broadcaster"; import { generateLogFolderName, cleanupOldLogFiles } from "./wrapper-utils"; +import { watchForLogFile } from "./log-watcher"; const execAsync = promisify(exec); @@ -84,18 +85,29 @@ export const runJobInBackground = async ( child.unref(); + const jobStartTime = new Date(); + saveRunningJob({ id: runId, cronJobId: job.id, pid: child.pid!, - startTime: new Date().toISOString(), + startTime: jobStartTime.toISOString(), status: "running", logFolderName, }); + watchForLogFile(runId, logFolderName, jobStartTime, (logFileName) => { + try { + updateRunningJob(runId, { logFileName }); + console.log(`[RunningJob] Cached logFileName for ${runId}: ${logFileName}`); + } catch (error) { + console.error(`[RunningJob] Failed to cache logFileName for ${runId}:`, error); + } + }); + sseBroadcaster.broadcast({ type: "job-started", - timestamp: new Date().toISOString(), + timestamp: jobStartTime.toISOString(), data: { runId, cronJobId: job.id, diff --git a/app/_utils/log-watcher.ts b/app/_utils/log-watcher.ts index adb42f0..5a9cc07 100644 --- a/app/_utils/log-watcher.ts +++ b/app/_utils/log-watcher.ts @@ -95,3 +95,62 @@ export const stopLogWatcher = () => { watcher = null; } }; + +export const watchForLogFile = ( + runId: string, + logFolderName: string, + jobStartTime: Date, + callback: (logFileName: string) => void +): NodeJS.Timeout => { + const logDir = path.join(LOGS_DIR, logFolderName); + const startTime = jobStartTime.getTime(); + const maxAttempts = 30; + let attempts = 0; + + const checkInterval = setInterval(() => { + attempts++; + + if (attempts > maxAttempts) { + console.warn(`[LogWatcher] Timeout waiting for log file for ${runId}`); + clearInterval(checkInterval); + return; + } + + try { + if (!existsSync(logDir)) { + return; + } + + const files = readdirSync(logDir); + const logFiles = files + .filter((f) => f.endsWith(".log")) + .map((f) => { + const filePath = path.join(logDir, f); + try { + const stats = statSync(filePath); + return { + name: f, + birthtime: stats.birthtime || stats.mtime, + }; + } catch { + return null; + } + }) + .filter((f): f is { name: string; birthtime: Date } => f !== null); + + const matchingFile = logFiles.find((f) => { + const fileTime = f.birthtime.getTime(); + return fileTime >= startTime - 5000 && fileTime <= startTime + 30000; + }); + + if (matchingFile) { + clearInterval(checkInterval); + callback(matchingFile.name); + } + } catch (error) { + console.error(`[LogWatcher] Error watching for log file ${runId}:`, error); + } + }, 500); + + return checkInterval; +}; diff --git a/app/api/logs/stream/route.ts b/app/api/logs/stream/route.ts index 2cd87f6..717ea8b 100644 --- a/app/api/logs/stream/route.ts +++ b/app/api/logs/stream/route.ts @@ -17,6 +17,11 @@ export const GET = async (request: NextRequest) => { const offsetStr = searchParams.get("offset"); const offset = offsetStr ? parseInt(offsetStr, 10) : 0; + const maxLinesStr = searchParams.get("maxLines"); + const maxLines = maxLinesStr + ? Math.min(Math.max(parseInt(maxLinesStr, 10), 100), 5000) + : 500; + if (!runId) { return NextResponse.json( { error: "runId parameter is required" }, @@ -136,42 +141,40 @@ export const GET = async (request: NextRequest) => { const fileSize = latestStats.size; - const MAX_RESPONSE_SIZE = 1024 * 1024; - const MAX_TOTAL_SIZE = 10 * 1024 * 1024; + const fullContent = await readFile(latestLogFile, "utf-8"); + + const allLines = fullContent.split("\n"); + const totalLines = allLines.length; + + let displayedLines: string[]; + let truncated = false; + + if (totalLines > maxLines) { + displayedLines = allLines.slice(-maxLines); + truncated = true; + } else { + displayedLines = allLines; + } let content = ""; let newContent = ""; - if (fileSize > MAX_TOTAL_SIZE) { - const startPos = Math.max(0, fileSize - MAX_TOTAL_SIZE); - const buffer = Buffer.alloc(MAX_TOTAL_SIZE); - const { open } = await import("fs/promises"); - const fileHandle = await open(latestLogFile, "r"); - - try { - await fileHandle.read(buffer, 0, MAX_TOTAL_SIZE, startPos); - content = buffer.toString("utf-8"); - newContent = content.slice(Math.max(0, offset - startPos)); - } finally { - await fileHandle.close(); - } - - if (startPos > 0) { - content = `[LOG TRUNCATED - Showing last ${MAX_TOTAL_SIZE / 1024 / 1024 - }MB of ${fileSize / 1024 / 1024}MB total]\n\n${content}`; + if (offset === 0) { + if (truncated) { + content = `[LOG TRUNCATED - Showing last ${maxLines} of ${totalLines} lines (${(fileSize / 1024 / 1024).toFixed(2)}MB total)]\n\n` + displayedLines.join("\n"); + } else { + content = displayedLines.join("\n"); } + newContent = content; } else { - const fullContent = await readFile(latestLogFile, "utf-8"); + if (offset < fileSize) { + const newBytes = fullContent.slice(offset); + newContent = newBytes; - if (offset > 0 && offset < fileSize) { - newContent = fullContent.slice(offset); - content = newContent; - } else if (offset === 0) { - content = fullContent; - newContent = fullContent; - } else if (offset >= fileSize) { - content = ""; - newContent = ""; + const newLines = newBytes.split("\n").filter(l => l.length > 0); + if (newLines.length > 0) { + content = newBytes; + } } } @@ -185,6 +188,9 @@ export const GET = async (request: NextRequest) => { exitCode: job.exitCode, fileSize, offset, + totalLines, + displayedLines: displayedLines.length, + truncated, }); } catch (error: any) { console.error("Error streaming log:", error); diff --git a/scripts/long-logs.sh b/scripts/long-logs.sh new file mode 100755 index 0000000..5b84b97 --- /dev/null +++ b/scripts/long-logs.sh @@ -0,0 +1,36 @@ +# @id: script_1763663771310_a5dac8gtc +# @title: long-logs +# @description: tests long logs + +#!/bin/bash + +# Test script for large log output +# Generates 15,000 lines with random strings + +echo "Starting large log test - 15,000 lines incoming..." +echo "" + +for i in {1..15000}; do + # Generate random string with timestamp and line number + random_string=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) + timestamp=$(date '+%Y-%m-%d %H:%M:%S.%3N') + + echo "[$timestamp] Line $i: Processing task_${random_string} - Status: $(( RANDOM % 100 ))% complete" + + # Add occasional error/warning messages + if [ $((i % 1000)) -eq 0 ]; then + echo "[$timestamp] [WARNING] Checkpoint reached at line $i" + fi + + if [ $((i % 5000)) -eq 0 ]; then + echo "[$timestamp] [INFO] Major milestone: $i lines processed" + fi + + # Small delay every 100 lines to make it more realistic for live view + if [ $((i % 100)) -eq 0 ]; then + sleep 0.01 + fi +done + +echo "" +echo "Test complete! Generated 15,000 lines." \ No newline at end of file diff --git a/test-large-log.sh b/test-large-log.sh new file mode 100755 index 0000000..62a7bea --- /dev/null +++ b/test-large-log.sh @@ -0,0 +1,32 @@ +#!/bin/bash + +# Test script for large log output +# Generates 15,000 lines with random strings + +echo "Starting large log test - 15,000 lines incoming..." +echo "" + +for i in {1..15000}; do + # Generate random string with timestamp and line number + random_string=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) + timestamp=$(date '+%Y-%m-%d %H:%M:%S.%3N') + + echo "[$timestamp] Line $i: Processing task_${random_string} - Status: $(( RANDOM % 100 ))% complete" + + # Add occasional error/warning messages + if [ $((i % 1000)) -eq 0 ]; then + echo "[$timestamp] [WARNING] Checkpoint reached at line $i" + fi + + if [ $((i % 5000)) -eq 0 ]; then + echo "[$timestamp] [INFO] Major milestone: $i lines processed" + fi + + # Small delay every 100 lines to make it more realistic for live view + if [ $((i % 100)) -eq 0 ]; then + sleep 0.01 + fi +done + +echo "" +echo "Test complete! Generated 15,000 lines."