Compare commits

..

10 Commits

Author SHA1 Message Date
Nicolas Mowen
5e442614e7 Add title 2026-02-14 15:23:37 -07:00
Nicolas Mowen
815b06ed6b Show tool calls separately from message 2026-02-14 15:23:37 -07:00
Nicolas Mowen
ee9243a2c7 More time parsing improvements 2026-02-14 15:23:37 -07:00
Nicolas Mowen
d8f46f3f0c Reduce fields in response 2026-02-14 15:23:37 -07:00
Nicolas Mowen
fb93dcd9e1 Adjust timing format 2026-02-14 15:23:37 -07:00
Nicolas Mowen
b2900fefd2 Improvements 2026-02-14 15:23:37 -07:00
Nicolas Mowen
706de0d47e Add markdown 2026-02-14 15:23:37 -07:00
Nicolas Mowen
8f2318c001 processing 2026-02-14 15:23:37 -07:00
Nicolas Mowen
42c92570bc Add chat history 2026-02-14 15:23:37 -07:00
Nicolas Mowen
ce7fd6ad1f Add basic chat page with entry 2026-02-14 15:23:37 -07:00
24 changed files with 1584 additions and 180 deletions

View File

@@ -38,7 +38,6 @@ from frigate.config.camera.updater import (
CameraConfigUpdateTopic,
)
from frigate.ffmpeg_presets import FFMPEG_HWACCEL_VAAPI, _gpu_selector
from frigate.genai import GenAIClientManager
from frigate.jobs.media_sync import (
get_current_media_sync_job,
get_media_sync_job_by_id,
@@ -433,7 +432,6 @@ def config_set(request: Request, body: AppConfigSetBody):
if body.requires_restart == 0 or body.update_topic:
old_config: FrigateConfig = request.app.frigate_config
request.app.frigate_config = config
request.app.genai_manager = GenAIClientManager(config)
if body.update_topic:
if body.update_topic.startswith("config/cameras/"):

View File

@@ -1037,4 +1037,4 @@ async def get_allowed_cameras_for_filter(request: Request):
role = current_user["role"]
all_camera_names = set(request.app.frigate_config.cameras.keys())
roles_dict = request.app.frigate_config.auth.roles
return User.get_allowed_cameras(role, roles_dict, all_camera_names)
return User.get_allowed_cameras(role, roles_dict, all_camera_names)

View File

@@ -3,7 +3,8 @@
import base64
import json
import logging
from datetime import datetime, timezone
import time
from datetime import datetime
from typing import Any, Dict, List, Optional
import cv2
@@ -20,15 +21,42 @@ from frigate.api.defs.request.chat_body import ChatCompletionRequest
from frigate.api.defs.response.chat_response import (
ChatCompletionResponse,
ChatMessageResponse,
ToolCall,
)
from frigate.api.defs.tags import Tags
from frigate.api.event import events
from frigate.genai import get_genai_client
logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.chat])
def _format_events_with_local_time(
events_list: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Add human-readable local start/end times to each event for the LLM."""
result = []
for evt in events_list:
if not isinstance(evt, dict):
result.append(evt)
continue
copy_evt = dict(evt)
try:
start_ts = evt.get("start_time")
end_ts = evt.get("end_time")
if start_ts is not None:
dt_start = datetime.fromtimestamp(start_ts)
copy_evt["start_time_local"] = dt_start.strftime("%Y-%m-%d %I:%M:%S %p")
if end_ts is not None:
dt_end = datetime.fromtimestamp(end_ts)
copy_evt["end_time_local"] = dt_end.strftime("%Y-%m-%d %I:%M:%S %p")
except (TypeError, ValueError, OSError):
pass
result.append(copy_evt)
return result
class ToolExecuteRequest(BaseModel):
"""Request model for tool execution."""
@@ -136,23 +164,26 @@ async def _execute_search_objects(
This searches for detected objects (events) in Frigate using the same
logic as the events API endpoint.
"""
# Parse ISO 8601 timestamps to Unix timestamps if provided
# Parse after/before as server local time; convert to Unix timestamp
after = arguments.get("after")
before = arguments.get("before")
def _parse_as_local_timestamp(s: str):
s = s.replace("Z", "").strip()[:19]
dt = datetime.strptime(s, "%Y-%m-%dT%H:%M:%S")
return time.mktime(dt.timetuple())
if after:
try:
after_dt = datetime.fromisoformat(after.replace("Z", "+00:00"))
after = after_dt.timestamp()
except (ValueError, AttributeError):
after = _parse_as_local_timestamp(after)
except (ValueError, AttributeError, TypeError):
logger.warning(f"Invalid 'after' timestamp format: {after}")
after = None
if before:
try:
before_dt = datetime.fromisoformat(before.replace("Z", "+00:00"))
before = before_dt.timestamp()
except (ValueError, AttributeError):
before = _parse_as_local_timestamp(before)
except (ValueError, AttributeError, TypeError):
logger.warning(f"Invalid 'before' timestamp format: {before}")
before = None
@@ -382,7 +413,7 @@ async def chat_completion(
6. Repeats until final answer
7. Returns response to user
"""
genai_client = request.app.genai_manager.tool_client
genai_client = get_genai_client(request.app.frigate_config)
if not genai_client:
return JSONResponse(
content={
@@ -394,9 +425,9 @@ async def chat_completion(
tools = get_tool_definitions()
conversation = []
current_datetime = datetime.now(timezone.utc)
current_datetime = datetime.now()
current_date_str = current_datetime.strftime("%Y-%m-%d")
current_time_str = current_datetime.strftime("%H:%M:%S %Z")
current_time_str = current_datetime.strftime("%I:%M:%S %p")
cameras_info = []
config = request.app.frigate_config
@@ -429,9 +460,10 @@ async def chat_completion(
system_prompt = f"""You are a helpful assistant for Frigate, a security camera NVR system. You help users answer questions about their cameras, detected objects, and events.
Current date and time: {current_date_str} at {current_time_str} (UTC)
Current server local date and time: {current_date_str} at {current_time_str}
When users ask questions about "today", "yesterday", "this week", etc., use the current date above as reference.
Always present times to the user in the server's local timezone. When tool results include start_time_local and end_time_local, use those exact strings when listing or describing detection times—do not convert or invent timestamps. Do not use UTC or ISO format with Z for the user-facing answer unless the tool result only provides Unix timestamps without local time fields.
When users ask about "today", "yesterday", "this week", etc., use the current date above as reference.
When searching for objects or events, use ISO 8601 format for dates (e.g., {current_date_str}T00:00:00Z for the start of today).
Always be accurate with time calculations based on the current date provided.{cameras_section}{live_image_note}"""
@@ -471,6 +503,7 @@ Always be accurate with time calculations based on the current date provided.{ca
conversation.append(msg_dict)
tool_iterations = 0
tool_calls: List[ToolCall] = []
max_iterations = body.max_tool_iterations
logger.debug(
@@ -517,8 +550,8 @@ Always be accurate with time calculations based on the current date provided.{ca
]
conversation.append(assistant_message)
tool_calls = response.get("tool_calls")
if not tool_calls:
pending_tool_calls = response.get("tool_calls")
if not pending_tool_calls:
logger.debug(
f"Chat completion finished with final answer (iterations: {tool_iterations})"
)
@@ -531,6 +564,7 @@ Always be accurate with time calculations based on the current date provided.{ca
),
finish_reason=response.get("finish_reason", "stop"),
tool_iterations=tool_iterations,
tool_calls=tool_calls,
).model_dump(),
)
@@ -538,11 +572,11 @@ Always be accurate with time calculations based on the current date provided.{ca
tool_iterations += 1
logger.debug(
f"Tool calls detected (iteration {tool_iterations}/{max_iterations}): "
f"{len(tool_calls)} tool(s) to execute"
f"{len(pending_tool_calls)} tool(s) to execute"
)
tool_results = []
for tool_call in tool_calls:
for tool_call in pending_tool_calls:
tool_name = tool_call["name"]
tool_args = tool_call["arguments"]
tool_call_id = tool_call["id"]
@@ -556,6 +590,25 @@ Always be accurate with time calculations based on the current date provided.{ca
tool_name, tool_args, request, allowed_cameras
)
# Add local time fields to search_objects results so the LLM doesn't hallucinate timestamps
if tool_name == "search_objects" and isinstance(tool_result, list):
tool_result = _format_events_with_local_time(tool_result)
_keys = {
"id",
"camera",
"label",
"zones",
"start_time_local",
"end_time_local",
"sub_label",
"event_count",
}
tool_result = [
{k: evt[k] for k in _keys if k in evt}
for evt in tool_result
if isinstance(evt, dict)
]
if isinstance(tool_result, dict):
result_content = json.dumps(tool_result)
result_summary = tool_result
@@ -573,6 +626,12 @@ Always be accurate with time calculations based on the current date provided.{ca
f"Tool {tool_name} (id: {tool_call_id}) completed successfully. "
f"Result: {json.dumps(result_summary, indent=2)}"
)
elif isinstance(tool_result, list):
result_content = json.dumps(tool_result)
logger.debug(
f"Tool {tool_name} (id: {tool_call_id}) completed successfully. "
f"Result: {len(tool_result)} item(s)"
)
elif isinstance(tool_result, str):
result_content = tool_result
logger.debug(
@@ -586,6 +645,13 @@ Always be accurate with time calculations based on the current date provided.{ca
f"Result type: {type(tool_result).__name__}"
)
tool_calls.append(
ToolCall(
name=tool_name,
arguments=tool_args or {},
response=result_content,
)
)
tool_results.append(
{
"role": "tool",
@@ -601,6 +667,13 @@ Always be accurate with time calculations based on the current date provided.{ca
error_content = json.dumps(
{"error": f"Tool execution failed: {str(e)}"}
)
tool_calls.append(
ToolCall(
name=tool_name,
arguments=tool_args or {},
response=error_content,
)
)
tool_results.append(
{
"role": "tool",
@@ -630,6 +703,7 @@ Always be accurate with time calculations based on the current date provided.{ca
),
finish_reason="length",
tool_iterations=tool_iterations,
tool_calls=tool_calls,
).model_dump(),
)

View File

@@ -5,8 +5,8 @@ from typing import Any, Optional
from pydantic import BaseModel, Field
class ToolCall(BaseModel):
"""A tool call from the LLM."""
class ToolCallInvocation(BaseModel):
"""A tool call requested by the LLM (before execution)."""
id: str = Field(description="Unique identifier for this tool call")
name: str = Field(description="Tool name to call")
@@ -20,11 +20,24 @@ class ChatMessageResponse(BaseModel):
content: Optional[str] = Field(
default=None, description="Message content (None if tool calls present)"
)
tool_calls: Optional[list[ToolCall]] = Field(
tool_calls: Optional[list[ToolCallInvocation]] = Field(
default=None, description="Tool calls if LLM wants to call tools"
)
class ToolCall(BaseModel):
"""A tool that was executed during the completion, with its response."""
name: str = Field(description="Tool name that was called")
arguments: dict[str, Any] = Field(
default_factory=dict, description="Arguments passed to the tool"
)
response: str = Field(
default="",
description="The response or result returned from the tool execution",
)
class ChatCompletionResponse(BaseModel):
"""Response from chat completion."""
@@ -35,3 +48,7 @@ class ChatCompletionResponse(BaseModel):
tool_iterations: int = Field(
default=0, description="Number of tool call iterations performed"
)
tool_calls: list[ToolCall] = Field(
default_factory=list,
description="List of tool calls that were executed during this completion",
)

View File

@@ -33,7 +33,6 @@ from frigate.comms.event_metadata_updater import (
from frigate.config import FrigateConfig
from frigate.config.camera.updater import CameraConfigUpdatePublisher
from frigate.embeddings import EmbeddingsContext
from frigate.genai import GenAIClientManager
from frigate.ptz.onvif import OnvifController
from frigate.stats.emitter import StatsEmitter
from frigate.storage import StorageMaintainer
@@ -135,7 +134,6 @@ def create_fastapi_app(
app.include_router(record.router)
# App Properties
app.frigate_config = frigate_config
app.genai_manager = GenAIClientManager(frigate_config)
app.embeddings = embeddings
app.detected_frames_processor = detected_frames_processor
app.storage_maintainer = storage_maintainer

View File

@@ -33,6 +33,7 @@ from frigate.api.defs.response.review_response import (
ReviewSummaryResponse,
)
from frigate.api.defs.tags import Tags
from frigate.config import FrigateConfig
from frigate.embeddings import EmbeddingsContext
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
from frigate.review.types import SeverityEnum
@@ -746,7 +747,9 @@ async def set_not_reviewed(
description="Use GenAI to summarize review items over a period of time.",
)
def generate_review_summary(request: Request, start_ts: float, end_ts: float):
if not request.app.genai_manager.vision_client:
config: FrigateConfig = request.app.frigate_config
if not config.genai.provider:
return JSONResponse(
content=(
{

View File

@@ -6,7 +6,7 @@ from pydantic import Field
from ..base import FrigateBaseModel
from ..env import EnvString
__all__ = ["GenAIConfig", "GenAIProviderEnum", "GenAIRoleEnum"]
__all__ = ["GenAIConfig", "GenAIProviderEnum"]
class GenAIProviderEnum(str, Enum):
@@ -17,12 +17,6 @@ class GenAIProviderEnum(str, Enum):
llamacpp = "llamacpp"
class GenAIRoleEnum(str, Enum):
tools = "tools"
vision = "vision"
embeddings = "embeddings"
class GenAIConfig(FrigateBaseModel):
"""Primary GenAI Config to define GenAI Provider."""
@@ -30,14 +24,6 @@ class GenAIConfig(FrigateBaseModel):
base_url: Optional[str] = Field(default=None, title="Provider base url.")
model: str = Field(default="gpt-4o", title="GenAI model.")
provider: GenAIProviderEnum | None = Field(default=None, title="GenAI provider.")
roles: list[GenAIRoleEnum] = Field(
default_factory=lambda: [
GenAIRoleEnum.embeddings,
GenAIRoleEnum.vision,
GenAIRoleEnum.tools,
],
title="GenAI roles (tools, vision, embeddings); one provider per role.",
)
provider_options: dict[str, Any] = Field(
default={}, title="GenAI Provider extra options."
)

View File

@@ -45,7 +45,7 @@ from .camera.audio import AudioConfig
from .camera.birdseye import BirdseyeConfig
from .camera.detect import DetectConfig
from .camera.ffmpeg import FfmpegConfig
from .camera.genai import GenAIConfig, GenAIRoleEnum
from .camera.genai import GenAIConfig
from .camera.motion import MotionConfig
from .camera.notification import NotificationConfig
from .camera.objects import FilterConfig, ObjectConfig
@@ -347,9 +347,9 @@ class FrigateConfig(FrigateBaseModel):
default_factory=ModelConfig, title="Detection model configuration."
)
# GenAI config (named provider configs: name -> GenAIConfig)
genai: Dict[str, GenAIConfig] = Field(
default_factory=dict, title="Generative AI configuration (named providers)."
# GenAI config
genai: GenAIConfig = Field(
default_factory=GenAIConfig, title="Generative AI configuration."
)
# Camera config
@@ -431,18 +431,6 @@ class FrigateConfig(FrigateBaseModel):
# set notifications state
self.notifications.enabled_in_config = self.notifications.enabled
# validate genai: each role (tools, vision, embeddings) at most once
role_to_name: dict[GenAIRoleEnum, str] = {}
for name, genai_cfg in self.genai.items():
for role in genai_cfg.roles:
if role in role_to_name:
raise ValueError(
f"GenAI role '{role.value}' is assigned to both "
f"'{role_to_name[role]}' and '{name}'; each role must have "
"exactly one provider."
)
role_to_name[role] = name
# set default min_score for object attributes
for attribute in self.model.all_attributes:
if not self.objects.filters.get(attribute):

View File

@@ -603,4 +603,4 @@ def get_optimized_runner(
provider_options=options,
),
model_type=model_type,
)
)

View File

@@ -59,7 +59,7 @@ from frigate.data_processing.real_time.license_plate import (
from frigate.data_processing.types import DataProcessorMetrics, PostProcessDataEnum
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
from frigate.events.types import EventTypeEnum, RegenerateDescriptionEnum
from frigate.genai import GenAIClientManager
from frigate.genai import get_genai_client
from frigate.models import Event, Recordings, ReviewSegment, Trigger
from frigate.util.builtin import serialize
from frigate.util.file import get_event_thumbnail_bytes
@@ -144,7 +144,7 @@ class EmbeddingMaintainer(threading.Thread):
self.frame_manager = SharedMemoryFrameManager()
self.detected_license_plates: dict[str, dict[str, Any]] = {}
self.genai_manager = GenAIClientManager(config)
self.genai_client = get_genai_client(config)
# model runners to share between realtime and post processors
if self.config.lpr.enabled:
@@ -203,15 +203,12 @@ class EmbeddingMaintainer(threading.Thread):
# post processors
self.post_processors: list[PostProcessorApi] = []
if self.genai_manager.vision_client is not None and any(
if self.genai_client is not None and any(
c.review.genai.enabled_in_config for c in self.config.cameras.values()
):
self.post_processors.append(
ReviewDescriptionProcessor(
self.config,
self.requestor,
self.metrics,
self.genai_manager.vision_client,
self.config, self.requestor, self.metrics, self.genai_client
)
)
@@ -249,7 +246,7 @@ class EmbeddingMaintainer(threading.Thread):
)
self.post_processors.append(semantic_trigger_processor)
if self.genai_manager.vision_client is not None and any(
if self.genai_client is not None and any(
c.objects.genai.enabled_in_config for c in self.config.cameras.values()
):
self.post_processors.append(
@@ -258,7 +255,7 @@ class EmbeddingMaintainer(threading.Thread):
self.embeddings,
self.requestor,
self.metrics,
self.genai_manager.vision_client,
self.genai_client,
semantic_trigger_processor,
)
)

View File

@@ -12,21 +12,10 @@ from playhouse.shortcuts import model_to_dict
from frigate.config import CameraConfig, FrigateConfig, GenAIConfig, GenAIProviderEnum
from frigate.const import CLIPS_DIR
from frigate.data_processing.post.types import ReviewMetadata
from frigate.genai.manager import GenAIClientManager
from frigate.models import Event
logger = logging.getLogger(__name__)
__all__ = [
"GenAIClient",
"GenAIClientManager",
"GenAIConfig",
"GenAIProviderEnum",
"PROVIDERS",
"load_providers",
"register_genai_provider",
]
PROVIDERS = {}
@@ -363,6 +352,19 @@ Guidelines:
}
def get_genai_client(config: FrigateConfig) -> Optional[GenAIClient]:
"""Get the GenAI client."""
if not config.genai.provider:
return None
load_providers()
provider = PROVIDERS.get(config.genai.provider)
if provider:
return provider(config.genai)
return None
def load_providers():
package_dir = os.path.dirname(__file__)
for filename in os.listdir(package_dir):

View File

@@ -1,89 +0,0 @@
"""GenAI client manager for Frigate.
Manages GenAI provider clients from Frigate config. Configuration is read only
in _update_config(); no other code should read config.genai. Exposes clients
by role: tool_client, vision_client, embeddings_client.
"""
import logging
from typing import TYPE_CHECKING, Optional
from frigate.config import FrigateConfig
from frigate.config.camera.genai import GenAIRoleEnum
if TYPE_CHECKING:
from frigate.genai import GenAIClient
logger = logging.getLogger(__name__)
class GenAIClientManager:
"""Manages GenAI provider clients from Frigate config."""
def __init__(self, config: FrigateConfig) -> None:
self._config = config
self._tool_client: Optional[GenAIClient] = None
self._vision_client: Optional[GenAIClient] = None
self._embeddings_client: Optional[GenAIClient] = None
self._update_config()
def _update_config(self) -> None:
"""Build role clients from current Frigate config.genai.
Called from __init__ and can be called again when config is reloaded.
Each role (tools, vision, embeddings) gets the client for the provider
that has that role in its roles list.
"""
from frigate.genai import PROVIDERS, load_providers
self._tool_client = None
self._vision_client = None
self._embeddings_client = None
if not self._config.genai:
return
load_providers()
for _name, genai_cfg in self._config.genai.items():
if not genai_cfg.provider:
continue
provider_cls = PROVIDERS.get(genai_cfg.provider)
if not provider_cls:
logger.warning(
"Unknown GenAI provider %s in config, skipping.",
genai_cfg.provider,
)
continue
try:
client = provider_cls(genai_cfg)
except Exception as e:
logger.exception(
"Failed to create GenAI client for provider %s: %s",
genai_cfg.provider,
e,
)
continue
for role in genai_cfg.roles:
if role == GenAIRoleEnum.tools:
self._tool_client = client
elif role == GenAIRoleEnum.vision:
self._vision_client = client
elif role == GenAIRoleEnum.embeddings:
self._embeddings_client = client
@property
def tool_client(self) -> "Optional[GenAIClient]":
"""Client configured for the tools role (e.g. chat with function calling)."""
return self._tool_client
@property
def vision_client(self) -> "Optional[GenAIClient]":
"""Client configured for the vision role (e.g. review descriptions, object descriptions)."""
return self._vision_client
@property
def embeddings_client(self) -> "Optional[GenAIClient]":
"""Client configured for the embeddings role."""
return self._embeddings_client

View File

@@ -438,13 +438,6 @@ def migrate_018_0(config: dict[str, dict[str, Any]]) -> dict[str, dict[str, Any]
"""Handle migrating frigate config to 0.18-0"""
new_config = config.copy()
# Migrate GenAI to new format
genai = new_config.get("genai")
if genai and genai.get("provider"):
genai["roles"] = ["embeddings", "vision", "tools"]
new_config["genai"] = {"default": genai}
# Remove deprecated sync_recordings from global record config
if new_config.get("record", {}).get("sync_recordings") is not None:
del new_config["record"]["sync_recordings"]

1164
web/package-lock.json generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -71,6 +71,7 @@
"react-icons": "^5.5.0",
"react-konva": "^18.2.10",
"react-router-dom": "^6.30.3",
"react-markdown": "^9.0.1",
"react-swipeable": "^7.0.2",
"react-tracked": "^2.0.1",
"react-transition-group": "^4.4.5",

View File

@@ -245,6 +245,7 @@
"uiPlayground": "UI Playground",
"faceLibrary": "Face Library",
"classification": "Classification",
"chat": "Chat",
"user": {
"title": "User",
"account": "Account",

View File

@@ -0,0 +1,10 @@
{
"placeholder": "Ask anything...",
"error": "Something went wrong. Please try again.",
"processing": "Processing...",
"toolsUsed": "Used: {{tools}}",
"showTools": "Show tools ({{count}})",
"hideTools": "Hide tools",
"call": "Call",
"result": "Result"
}

View File

@@ -27,6 +27,7 @@ const Settings = lazy(() => import("@/pages/Settings"));
const UIPlayground = lazy(() => import("@/pages/UIPlayground"));
const FaceLibrary = lazy(() => import("@/pages/FaceLibrary"));
const Classification = lazy(() => import("@/pages/ClassificationModel"));
const Chat = lazy(() => import("@/pages/Chat"));
const Logs = lazy(() => import("@/pages/Logs"));
const AccessDenied = lazy(() => import("@/pages/AccessDenied"));
@@ -106,6 +107,7 @@ function DefaultAppView() {
<Route path="/logs" element={<Logs />} />
<Route path="/faces" element={<FaceLibrary />} />
<Route path="/classification" element={<Classification />} />
<Route path="/chat" element={<Chat />} />
<Route path="/playground" element={<UIPlayground />} />
</Route>
<Route path="/unauthorized" element={<AccessDenied />} />

View File

@@ -0,0 +1,9 @@
import ReactMarkdown from "react-markdown";
type AssistantMessageProps = {
content: string;
};
export function AssistantMessage({ content }: AssistantMessageProps) {
return <ReactMarkdown>{content}</ReactMarkdown>;
}

View File

@@ -0,0 +1,77 @@
import { useState } from "react";
import { useTranslation } from "react-i18next";
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "@/components/ui/collapsible";
import { Button } from "@/components/ui/button";
import { ChevronDown, ChevronRight } from "lucide-react";
type ToolCallBubbleProps = {
name: string;
arguments?: Record<string, unknown>;
response?: string;
side: "left" | "right";
};
export function ToolCallBubble({
name,
arguments: args,
response,
side,
}: ToolCallBubbleProps) {
const { t } = useTranslation(["views/chat"]);
const [open, setOpen] = useState(false);
const isLeft = side === "left";
const normalizedName = name
.replace(/_/g, " ")
.split(" ")
.map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
.join(" ");
return (
<div
className={
isLeft
? "self-start rounded-lg bg-muted px-3 py-2"
: "self-end rounded-lg bg-primary px-3 py-2 text-primary-foreground"
}
>
<Collapsible open={open} onOpenChange={setOpen}>
<CollapsibleTrigger asChild>
<Button
variant="ghost"
size="sm"
className="h-auto w-full justify-start gap-2 p-0 text-xs hover:bg-transparent"
>
{open ? <ChevronDown size={12} /> : <ChevronRight size={12} />}
<span className="font-medium">
{isLeft ? t("call") : t("result")} {normalizedName}
</span>
</Button>
</CollapsibleTrigger>
<CollapsibleContent>
<div className="mt-2 space-y-2">
{isLeft && args && Object.keys(args).length > 0 && (
<div className="text-xs">
<div className="font-medium text-muted-foreground">Arguments:</div>
<pre className="mt-1 max-h-32 overflow-auto rounded bg-muted/50 p-2 text-[10px]">
{JSON.stringify(args, null, 2)}
</pre>
</div>
)}
{!isLeft && response && response !== "" && (
<div className="text-xs">
<div className="font-medium opacity-80">Response:</div>
<pre className="mt-1 max-h-32 overflow-auto rounded bg-primary/20 p-2 text-[10px]">
{response}
</pre>
</div>
)}
</div>
</CollapsibleContent>
</Collapsible>
</div>
);
}

View File

@@ -6,7 +6,7 @@ import { isDesktop } from "react-device-detect";
import { FaCompactDisc, FaVideo } from "react-icons/fa";
import { IoSearch } from "react-icons/io5";
import { LuConstruction } from "react-icons/lu";
import { MdCategory, MdVideoLibrary } from "react-icons/md";
import { MdCategory, MdChat, MdVideoLibrary } from "react-icons/md";
import { TbFaceId } from "react-icons/tb";
import useSWR from "swr";
import { useIsAdmin } from "./use-is-admin";
@@ -18,6 +18,7 @@ export const ID_EXPORT = 4;
export const ID_PLAYGROUND = 5;
export const ID_FACE_LIBRARY = 6;
export const ID_CLASSIFICATION = 7;
export const ID_CHAT = 8;
export default function useNavigation(
variant: "primary" | "secondary" = "primary",
@@ -82,7 +83,15 @@ export default function useNavigation(
url: "/classification",
enabled: isDesktop && isAdmin,
},
{
id: ID_CHAT,
variant,
icon: MdChat,
title: "menu.chat",
url: "/chat",
enabled: isDesktop && isAdmin && config?.genai?.model !== "none",
},
] as NavData[],
[config?.face_recognition?.enabled, variant, isAdmin],
[config?.face_recognition?.enabled, config?.genai?.model, variant, isAdmin],
);
}

160
web/src/pages/Chat.tsx Normal file
View File

@@ -0,0 +1,160 @@
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { FaArrowUpLong } from "react-icons/fa6";
import { useTranslation } from "react-i18next";
import { useState, useCallback } from "react";
import axios from "axios";
import { AssistantMessage } from "@/components/chat/AssistantMessage";
import { ToolCallBubble } from "@/components/chat/ToolCallBubble";
import type { ChatMessage, ToolCall } from "@/types/chat";
export default function ChatPage() {
const { t } = useTranslation(["views/chat"]);
const [input, setInput] = useState("");
const [messages, setMessages] = useState<ChatMessage[]>([]);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const sendMessage = useCallback(async () => {
const text = input.trim();
if (!text || isLoading) return;
const userMessage: ChatMessage = { role: "user", content: text };
setInput("");
setError(null);
setMessages((prev) => [...prev, userMessage]);
setIsLoading(true);
try {
const apiMessages = [...messages, userMessage].map((m) => ({
role: m.role,
content: m.content,
}));
const { data } = await axios.post<{
message: { role: string; content: string | null };
tool_calls?: ToolCall[];
}>("chat/completion", { messages: apiMessages });
const content = data.message?.content ?? "";
setMessages((prev) => [
...prev,
{
role: "assistant",
content: content || " ",
toolCalls: data.tool_calls?.length ? data.tool_calls : undefined,
},
]);
} catch {
setError(t("error"));
} finally {
setIsLoading(false);
}
}, [input, isLoading, messages, t]);
return (
<div className="flex size-full flex-col items-center p-2">
<div className="flex min-h-0 w-full flex-1 flex-col gap-2 overflow-y-auto xl:w-[50%]">
{messages.map((msg, i) => (
<div key={i} className="flex flex-col gap-2">
{msg.role === "assistant" && msg.toolCalls && (
<>
{msg.toolCalls.map((tc, tcIdx) => (
<div key={tcIdx} className="flex flex-col gap-2">
<ToolCallBubble
name={tc.name}
arguments={tc.arguments}
side="left"
/>
{tc.response && (
<ToolCallBubble
name={tc.name}
response={tc.response}
side="right"
/>
)}
</div>
))}
</>
)}
<div
className={
msg.role === "user"
? "self-end rounded-lg bg-primary px-3 py-2 text-primary-foreground"
: "self-start rounded-lg bg-muted px-3 py-2"
}
>
{msg.role === "assistant" ? (
<AssistantMessage content={msg.content} />
) : (
msg.content
)}
</div>
</div>
))}
{isLoading && (
<div className="self-start rounded-lg bg-muted px-3 py-2 text-muted-foreground">
{t("processing")}
</div>
)}
{error && (
<p className="self-start text-sm text-destructive" role="alert">
{error}
</p>
)}
</div>
<ChatEntry
input={input}
setInput={setInput}
sendMessage={sendMessage}
isLoading={isLoading}
placeholder={t("placeholder")}
/>
</div>
);
}
type ChatEntryProps = {
input: string;
setInput: (value: string) => void;
sendMessage: () => void;
isLoading: boolean;
placeholder: string;
};
function ChatEntry({
input,
setInput,
sendMessage,
isLoading,
placeholder,
}: ChatEntryProps) {
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
sendMessage();
}
};
return (
<div className="flex w-full flex-col items-center justify-center rounded-xl bg-secondary p-2 xl:w-[50%]">
<div className="flex w-full flex-row items-center gap-2">
<Input
className="w-full flex-1 border-transparent bg-transparent shadow-none focus-visible:ring-0 dark:bg-transparent"
placeholder={placeholder}
value={input}
onChange={(e) => setInput(e.target.value)}
onKeyDown={handleKeyDown}
disabled={isLoading}
/>
<Button
variant="select"
className="size-10 shrink-0 rounded-full"
disabled={!input.trim() || isLoading}
onClick={sendMessage}
>
<FaArrowUpLong size="16" />
</Button>
</div>
</div>
);
}

11
web/src/types/chat.ts Normal file
View File

@@ -0,0 +1,11 @@
export type ToolCall = {
name: string;
arguments?: Record<string, unknown>;
response?: string;
};
export type ChatMessage = {
role: "user" | "assistant";
content: string;
toolCalls?: ToolCall[];
};

View File

@@ -46,6 +46,7 @@ i18n
"components/icons",
"components/player",
"views/events",
"views/chat",
"views/explore",
"views/live",
"views/settings",