mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-13 15:02:01 -05:00
Compare commits
10 Commits
0.18-rebas
...
ai-chat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e57447813e | ||
|
|
89a713baaa | ||
|
|
ae45e8f873 | ||
|
|
80b20898f9 | ||
|
|
058da7ac1a | ||
|
|
f7ce3914ce | ||
|
|
325b9d6407 | ||
|
|
7c235bb86e | ||
|
|
57366db37e | ||
|
|
2ab6fe6f7b |
@@ -3,7 +3,8 @@
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
import time
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import cv2
|
||||
@@ -20,6 +21,7 @@ from frigate.api.defs.request.chat_body import ChatCompletionRequest
|
||||
from frigate.api.defs.response.chat_response import (
|
||||
ChatCompletionResponse,
|
||||
ChatMessageResponse,
|
||||
ToolCall,
|
||||
)
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.api.event import events
|
||||
@@ -30,6 +32,31 @@ logger = logging.getLogger(__name__)
|
||||
router = APIRouter(tags=[Tags.chat])
|
||||
|
||||
|
||||
def _format_events_with_local_time(
|
||||
events_list: List[Dict[str, Any]],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Add human-readable local start/end times to each event for the LLM."""
|
||||
result = []
|
||||
for evt in events_list:
|
||||
if not isinstance(evt, dict):
|
||||
result.append(evt)
|
||||
continue
|
||||
copy_evt = dict(evt)
|
||||
try:
|
||||
start_ts = evt.get("start_time")
|
||||
end_ts = evt.get("end_time")
|
||||
if start_ts is not None:
|
||||
dt_start = datetime.fromtimestamp(start_ts)
|
||||
copy_evt["start_time_local"] = dt_start.strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
if end_ts is not None:
|
||||
dt_end = datetime.fromtimestamp(end_ts)
|
||||
copy_evt["end_time_local"] = dt_end.strftime("%Y-%m-%d %I:%M:%S %p")
|
||||
except (TypeError, ValueError, OSError):
|
||||
pass
|
||||
result.append(copy_evt)
|
||||
return result
|
||||
|
||||
|
||||
class ToolExecuteRequest(BaseModel):
|
||||
"""Request model for tool execution."""
|
||||
|
||||
@@ -137,23 +164,26 @@ async def _execute_search_objects(
|
||||
This searches for detected objects (events) in Frigate using the same
|
||||
logic as the events API endpoint.
|
||||
"""
|
||||
# Parse ISO 8601 timestamps to Unix timestamps if provided
|
||||
# Parse after/before as server local time; convert to Unix timestamp
|
||||
after = arguments.get("after")
|
||||
before = arguments.get("before")
|
||||
|
||||
def _parse_as_local_timestamp(s: str):
|
||||
s = s.replace("Z", "").strip()[:19]
|
||||
dt = datetime.strptime(s, "%Y-%m-%dT%H:%M:%S")
|
||||
return time.mktime(dt.timetuple())
|
||||
|
||||
if after:
|
||||
try:
|
||||
after_dt = datetime.fromisoformat(after.replace("Z", "+00:00"))
|
||||
after = after_dt.timestamp()
|
||||
except (ValueError, AttributeError):
|
||||
after = _parse_as_local_timestamp(after)
|
||||
except (ValueError, AttributeError, TypeError):
|
||||
logger.warning(f"Invalid 'after' timestamp format: {after}")
|
||||
after = None
|
||||
|
||||
if before:
|
||||
try:
|
||||
before_dt = datetime.fromisoformat(before.replace("Z", "+00:00"))
|
||||
before = before_dt.timestamp()
|
||||
except (ValueError, AttributeError):
|
||||
before = _parse_as_local_timestamp(before)
|
||||
except (ValueError, AttributeError, TypeError):
|
||||
logger.warning(f"Invalid 'before' timestamp format: {before}")
|
||||
before = None
|
||||
|
||||
@@ -395,9 +425,9 @@ async def chat_completion(
|
||||
tools = get_tool_definitions()
|
||||
conversation = []
|
||||
|
||||
current_datetime = datetime.now(timezone.utc)
|
||||
current_datetime = datetime.now()
|
||||
current_date_str = current_datetime.strftime("%Y-%m-%d")
|
||||
current_time_str = current_datetime.strftime("%H:%M:%S %Z")
|
||||
current_time_str = current_datetime.strftime("%I:%M:%S %p")
|
||||
|
||||
cameras_info = []
|
||||
config = request.app.frigate_config
|
||||
@@ -430,9 +460,10 @@ async def chat_completion(
|
||||
|
||||
system_prompt = f"""You are a helpful assistant for Frigate, a security camera NVR system. You help users answer questions about their cameras, detected objects, and events.
|
||||
|
||||
Current date and time: {current_date_str} at {current_time_str} (UTC)
|
||||
Current server local date and time: {current_date_str} at {current_time_str}
|
||||
|
||||
When users ask questions about "today", "yesterday", "this week", etc., use the current date above as reference.
|
||||
Always present times to the user in the server's local timezone. When tool results include start_time_local and end_time_local, use those exact strings when listing or describing detection times—do not convert or invent timestamps. Do not use UTC or ISO format with Z for the user-facing answer unless the tool result only provides Unix timestamps without local time fields.
|
||||
When users ask about "today", "yesterday", "this week", etc., use the current date above as reference.
|
||||
When searching for objects or events, use ISO 8601 format for dates (e.g., {current_date_str}T00:00:00Z for the start of today).
|
||||
Always be accurate with time calculations based on the current date provided.{cameras_section}{live_image_note}"""
|
||||
|
||||
@@ -472,6 +503,7 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
conversation.append(msg_dict)
|
||||
|
||||
tool_iterations = 0
|
||||
tool_calls: List[ToolCall] = []
|
||||
max_iterations = body.max_tool_iterations
|
||||
|
||||
logger.debug(
|
||||
@@ -518,8 +550,8 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
]
|
||||
conversation.append(assistant_message)
|
||||
|
||||
tool_calls = response.get("tool_calls")
|
||||
if not tool_calls:
|
||||
pending_tool_calls = response.get("tool_calls")
|
||||
if not pending_tool_calls:
|
||||
logger.debug(
|
||||
f"Chat completion finished with final answer (iterations: {tool_iterations})"
|
||||
)
|
||||
@@ -532,6 +564,7 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
),
|
||||
finish_reason=response.get("finish_reason", "stop"),
|
||||
tool_iterations=tool_iterations,
|
||||
tool_calls=tool_calls,
|
||||
).model_dump(),
|
||||
)
|
||||
|
||||
@@ -539,11 +572,11 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
tool_iterations += 1
|
||||
logger.debug(
|
||||
f"Tool calls detected (iteration {tool_iterations}/{max_iterations}): "
|
||||
f"{len(tool_calls)} tool(s) to execute"
|
||||
f"{len(pending_tool_calls)} tool(s) to execute"
|
||||
)
|
||||
tool_results = []
|
||||
|
||||
for tool_call in tool_calls:
|
||||
for tool_call in pending_tool_calls:
|
||||
tool_name = tool_call["name"]
|
||||
tool_args = tool_call["arguments"]
|
||||
tool_call_id = tool_call["id"]
|
||||
@@ -557,6 +590,25 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
tool_name, tool_args, request, allowed_cameras
|
||||
)
|
||||
|
||||
# Add local time fields to search_objects results so the LLM doesn't hallucinate timestamps
|
||||
if tool_name == "search_objects" and isinstance(tool_result, list):
|
||||
tool_result = _format_events_with_local_time(tool_result)
|
||||
_keys = {
|
||||
"id",
|
||||
"camera",
|
||||
"label",
|
||||
"zones",
|
||||
"start_time_local",
|
||||
"end_time_local",
|
||||
"sub_label",
|
||||
"event_count",
|
||||
}
|
||||
tool_result = [
|
||||
{k: evt[k] for k in _keys if k in evt}
|
||||
for evt in tool_result
|
||||
if isinstance(evt, dict)
|
||||
]
|
||||
|
||||
if isinstance(tool_result, dict):
|
||||
result_content = json.dumps(tool_result)
|
||||
result_summary = tool_result
|
||||
@@ -574,6 +626,12 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
f"Tool {tool_name} (id: {tool_call_id}) completed successfully. "
|
||||
f"Result: {json.dumps(result_summary, indent=2)}"
|
||||
)
|
||||
elif isinstance(tool_result, list):
|
||||
result_content = json.dumps(tool_result)
|
||||
logger.debug(
|
||||
f"Tool {tool_name} (id: {tool_call_id}) completed successfully. "
|
||||
f"Result: {len(tool_result)} item(s)"
|
||||
)
|
||||
elif isinstance(tool_result, str):
|
||||
result_content = tool_result
|
||||
logger.debug(
|
||||
@@ -587,6 +645,13 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
f"Result type: {type(tool_result).__name__}"
|
||||
)
|
||||
|
||||
tool_calls.append(
|
||||
ToolCall(
|
||||
name=tool_name,
|
||||
arguments=tool_args or {},
|
||||
response=result_content,
|
||||
)
|
||||
)
|
||||
tool_results.append(
|
||||
{
|
||||
"role": "tool",
|
||||
@@ -602,6 +667,13 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
error_content = json.dumps(
|
||||
{"error": f"Tool execution failed: {str(e)}"}
|
||||
)
|
||||
tool_calls.append(
|
||||
ToolCall(
|
||||
name=tool_name,
|
||||
arguments=tool_args or {},
|
||||
response=error_content,
|
||||
)
|
||||
)
|
||||
tool_results.append(
|
||||
{
|
||||
"role": "tool",
|
||||
@@ -631,6 +703,7 @@ Always be accurate with time calculations based on the current date provided.{ca
|
||||
),
|
||||
finish_reason="length",
|
||||
tool_iterations=tool_iterations,
|
||||
tool_calls=tool_calls,
|
||||
).model_dump(),
|
||||
)
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ from typing import Any, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class ToolCall(BaseModel):
|
||||
"""A tool call from the LLM."""
|
||||
class ToolCallInvocation(BaseModel):
|
||||
"""A tool call requested by the LLM (before execution)."""
|
||||
|
||||
id: str = Field(description="Unique identifier for this tool call")
|
||||
name: str = Field(description="Tool name to call")
|
||||
@@ -20,11 +20,24 @@ class ChatMessageResponse(BaseModel):
|
||||
content: Optional[str] = Field(
|
||||
default=None, description="Message content (None if tool calls present)"
|
||||
)
|
||||
tool_calls: Optional[list[ToolCall]] = Field(
|
||||
tool_calls: Optional[list[ToolCallInvocation]] = Field(
|
||||
default=None, description="Tool calls if LLM wants to call tools"
|
||||
)
|
||||
|
||||
|
||||
class ToolCall(BaseModel):
|
||||
"""A tool that was executed during the completion, with its response."""
|
||||
|
||||
name: str = Field(description="Tool name that was called")
|
||||
arguments: dict[str, Any] = Field(
|
||||
default_factory=dict, description="Arguments passed to the tool"
|
||||
)
|
||||
response: str = Field(
|
||||
default="",
|
||||
description="The response or result returned from the tool execution",
|
||||
)
|
||||
|
||||
|
||||
class ChatCompletionResponse(BaseModel):
|
||||
"""Response from chat completion."""
|
||||
|
||||
@@ -35,3 +48,7 @@ class ChatCompletionResponse(BaseModel):
|
||||
tool_iterations: int = Field(
|
||||
default=0, description="Number of tool call iterations performed"
|
||||
)
|
||||
tool_calls: list[ToolCall] = Field(
|
||||
default_factory=list,
|
||||
description="List of tool calls that were executed during this completion",
|
||||
)
|
||||
|
||||
1164
web/package-lock.json
generated
1164
web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -71,6 +71,7 @@
|
||||
"react-icons": "^5.5.0",
|
||||
"react-konva": "^18.2.10",
|
||||
"react-router-dom": "^6.30.3",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-swipeable": "^7.0.2",
|
||||
"react-tracked": "^2.0.1",
|
||||
"react-transition-group": "^4.4.5",
|
||||
|
||||
@@ -245,6 +245,7 @@
|
||||
"uiPlayground": "UI Playground",
|
||||
"faceLibrary": "Face Library",
|
||||
"classification": "Classification",
|
||||
"chat": "Chat",
|
||||
"user": {
|
||||
"title": "User",
|
||||
"account": "Account",
|
||||
|
||||
10
web/public/locales/en/views/chat.json
Normal file
10
web/public/locales/en/views/chat.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"placeholder": "Ask anything...",
|
||||
"error": "Something went wrong. Please try again.",
|
||||
"processing": "Processing...",
|
||||
"toolsUsed": "Used: {{tools}}",
|
||||
"showTools": "Show tools ({{count}})",
|
||||
"hideTools": "Hide tools",
|
||||
"call": "Call",
|
||||
"result": "Result"
|
||||
}
|
||||
@@ -27,6 +27,7 @@ const Settings = lazy(() => import("@/pages/Settings"));
|
||||
const UIPlayground = lazy(() => import("@/pages/UIPlayground"));
|
||||
const FaceLibrary = lazy(() => import("@/pages/FaceLibrary"));
|
||||
const Classification = lazy(() => import("@/pages/ClassificationModel"));
|
||||
const Chat = lazy(() => import("@/pages/Chat"));
|
||||
const Logs = lazy(() => import("@/pages/Logs"));
|
||||
const AccessDenied = lazy(() => import("@/pages/AccessDenied"));
|
||||
|
||||
@@ -106,6 +107,7 @@ function DefaultAppView() {
|
||||
<Route path="/logs" element={<Logs />} />
|
||||
<Route path="/faces" element={<FaceLibrary />} />
|
||||
<Route path="/classification" element={<Classification />} />
|
||||
<Route path="/chat" element={<Chat />} />
|
||||
<Route path="/playground" element={<UIPlayground />} />
|
||||
</Route>
|
||||
<Route path="/unauthorized" element={<AccessDenied />} />
|
||||
|
||||
9
web/src/components/chat/AssistantMessage.tsx
Normal file
9
web/src/components/chat/AssistantMessage.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import ReactMarkdown from "react-markdown";
|
||||
|
||||
type AssistantMessageProps = {
|
||||
content: string;
|
||||
};
|
||||
|
||||
export function AssistantMessage({ content }: AssistantMessageProps) {
|
||||
return <ReactMarkdown>{content}</ReactMarkdown>;
|
||||
}
|
||||
77
web/src/components/chat/ToolCallBubble.tsx
Normal file
77
web/src/components/chat/ToolCallBubble.tsx
Normal file
@@ -0,0 +1,77 @@
|
||||
import { useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import {
|
||||
Collapsible,
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from "@/components/ui/collapsible";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { ChevronDown, ChevronRight } from "lucide-react";
|
||||
|
||||
type ToolCallBubbleProps = {
|
||||
name: string;
|
||||
arguments?: Record<string, unknown>;
|
||||
response?: string;
|
||||
side: "left" | "right";
|
||||
};
|
||||
|
||||
export function ToolCallBubble({
|
||||
name,
|
||||
arguments: args,
|
||||
response,
|
||||
side,
|
||||
}: ToolCallBubbleProps) {
|
||||
const { t } = useTranslation(["views/chat"]);
|
||||
const [open, setOpen] = useState(false);
|
||||
const isLeft = side === "left";
|
||||
const normalizedName = name
|
||||
.replace(/_/g, " ")
|
||||
.split(" ")
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase())
|
||||
.join(" ");
|
||||
|
||||
return (
|
||||
<div
|
||||
className={
|
||||
isLeft
|
||||
? "self-start rounded-lg bg-muted px-3 py-2"
|
||||
: "self-end rounded-lg bg-primary px-3 py-2 text-primary-foreground"
|
||||
}
|
||||
>
|
||||
<Collapsible open={open} onOpenChange={setOpen}>
|
||||
<CollapsibleTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-auto w-full justify-start gap-2 p-0 text-xs hover:bg-transparent"
|
||||
>
|
||||
{open ? <ChevronDown size={12} /> : <ChevronRight size={12} />}
|
||||
<span className="font-medium">
|
||||
{isLeft ? t("call") : t("result")} {normalizedName}
|
||||
</span>
|
||||
</Button>
|
||||
</CollapsibleTrigger>
|
||||
<CollapsibleContent>
|
||||
<div className="mt-2 space-y-2">
|
||||
{isLeft && args && Object.keys(args).length > 0 && (
|
||||
<div className="text-xs">
|
||||
<div className="font-medium text-muted-foreground">Arguments:</div>
|
||||
<pre className="mt-1 max-h-32 overflow-auto rounded bg-muted/50 p-2 text-[10px]">
|
||||
{JSON.stringify(args, null, 2)}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
{!isLeft && response && response !== "" && (
|
||||
<div className="text-xs">
|
||||
<div className="font-medium opacity-80">Response:</div>
|
||||
<pre className="mt-1 max-h-32 overflow-auto rounded bg-primary/20 p-2 text-[10px]">
|
||||
{response}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CollapsibleContent>
|
||||
</Collapsible>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -6,7 +6,7 @@ import { isDesktop } from "react-device-detect";
|
||||
import { FaCompactDisc, FaVideo } from "react-icons/fa";
|
||||
import { IoSearch } from "react-icons/io5";
|
||||
import { LuConstruction } from "react-icons/lu";
|
||||
import { MdCategory, MdVideoLibrary } from "react-icons/md";
|
||||
import { MdCategory, MdChat, MdVideoLibrary } from "react-icons/md";
|
||||
import { TbFaceId } from "react-icons/tb";
|
||||
import useSWR from "swr";
|
||||
import { useIsAdmin } from "./use-is-admin";
|
||||
@@ -18,6 +18,7 @@ export const ID_EXPORT = 4;
|
||||
export const ID_PLAYGROUND = 5;
|
||||
export const ID_FACE_LIBRARY = 6;
|
||||
export const ID_CLASSIFICATION = 7;
|
||||
export const ID_CHAT = 8;
|
||||
|
||||
export default function useNavigation(
|
||||
variant: "primary" | "secondary" = "primary",
|
||||
@@ -82,7 +83,15 @@ export default function useNavigation(
|
||||
url: "/classification",
|
||||
enabled: isDesktop && isAdmin,
|
||||
},
|
||||
{
|
||||
id: ID_CHAT,
|
||||
variant,
|
||||
icon: MdChat,
|
||||
title: "menu.chat",
|
||||
url: "/chat",
|
||||
enabled: isDesktop && isAdmin && config?.genai?.model !== "none",
|
||||
},
|
||||
] as NavData[],
|
||||
[config?.face_recognition?.enabled, variant, isAdmin],
|
||||
[config?.face_recognition?.enabled, config?.genai?.model, variant, isAdmin],
|
||||
);
|
||||
}
|
||||
|
||||
160
web/src/pages/Chat.tsx
Normal file
160
web/src/pages/Chat.tsx
Normal file
@@ -0,0 +1,160 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { FaArrowUpLong } from "react-icons/fa6";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { useState, useCallback } from "react";
|
||||
import axios from "axios";
|
||||
import { AssistantMessage } from "@/components/chat/AssistantMessage";
|
||||
import { ToolCallBubble } from "@/components/chat/ToolCallBubble";
|
||||
import type { ChatMessage, ToolCall } from "@/types/chat";
|
||||
|
||||
export default function ChatPage() {
|
||||
const { t } = useTranslation(["views/chat"]);
|
||||
const [input, setInput] = useState("");
|
||||
const [messages, setMessages] = useState<ChatMessage[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const sendMessage = useCallback(async () => {
|
||||
const text = input.trim();
|
||||
if (!text || isLoading) return;
|
||||
|
||||
const userMessage: ChatMessage = { role: "user", content: text };
|
||||
setInput("");
|
||||
setError(null);
|
||||
setMessages((prev) => [...prev, userMessage]);
|
||||
setIsLoading(true);
|
||||
|
||||
try {
|
||||
const apiMessages = [...messages, userMessage].map((m) => ({
|
||||
role: m.role,
|
||||
content: m.content,
|
||||
}));
|
||||
const { data } = await axios.post<{
|
||||
message: { role: string; content: string | null };
|
||||
tool_calls?: ToolCall[];
|
||||
}>("chat/completion", { messages: apiMessages });
|
||||
|
||||
const content = data.message?.content ?? "";
|
||||
setMessages((prev) => [
|
||||
...prev,
|
||||
{
|
||||
role: "assistant",
|
||||
content: content || " ",
|
||||
toolCalls: data.tool_calls?.length ? data.tool_calls : undefined,
|
||||
},
|
||||
]);
|
||||
} catch {
|
||||
setError(t("error"));
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [input, isLoading, messages, t]);
|
||||
|
||||
return (
|
||||
<div className="flex size-full flex-col items-center p-2">
|
||||
<div className="flex min-h-0 w-full flex-1 flex-col gap-2 overflow-y-auto xl:w-[50%]">
|
||||
{messages.map((msg, i) => (
|
||||
<div key={i} className="flex flex-col gap-2">
|
||||
{msg.role === "assistant" && msg.toolCalls && (
|
||||
<>
|
||||
{msg.toolCalls.map((tc, tcIdx) => (
|
||||
<div key={tcIdx} className="flex flex-col gap-2">
|
||||
<ToolCallBubble
|
||||
name={tc.name}
|
||||
arguments={tc.arguments}
|
||||
side="left"
|
||||
/>
|
||||
{tc.response && (
|
||||
<ToolCallBubble
|
||||
name={tc.name}
|
||||
response={tc.response}
|
||||
side="right"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
)}
|
||||
<div
|
||||
className={
|
||||
msg.role === "user"
|
||||
? "self-end rounded-lg bg-primary px-3 py-2 text-primary-foreground"
|
||||
: "self-start rounded-lg bg-muted px-3 py-2"
|
||||
}
|
||||
>
|
||||
{msg.role === "assistant" ? (
|
||||
<AssistantMessage content={msg.content} />
|
||||
) : (
|
||||
msg.content
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{isLoading && (
|
||||
<div className="self-start rounded-lg bg-muted px-3 py-2 text-muted-foreground">
|
||||
{t("processing")}
|
||||
</div>
|
||||
)}
|
||||
{error && (
|
||||
<p className="self-start text-sm text-destructive" role="alert">
|
||||
{error}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
<ChatEntry
|
||||
input={input}
|
||||
setInput={setInput}
|
||||
sendMessage={sendMessage}
|
||||
isLoading={isLoading}
|
||||
placeholder={t("placeholder")}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
type ChatEntryProps = {
|
||||
input: string;
|
||||
setInput: (value: string) => void;
|
||||
sendMessage: () => void;
|
||||
isLoading: boolean;
|
||||
placeholder: string;
|
||||
};
|
||||
|
||||
function ChatEntry({
|
||||
input,
|
||||
setInput,
|
||||
sendMessage,
|
||||
isLoading,
|
||||
placeholder,
|
||||
}: ChatEntryProps) {
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === "Enter" && !e.shiftKey) {
|
||||
e.preventDefault();
|
||||
sendMessage();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex w-full flex-col items-center justify-center rounded-xl bg-secondary p-2 xl:w-[50%]">
|
||||
<div className="flex w-full flex-row items-center gap-2">
|
||||
<Input
|
||||
className="w-full flex-1 border-transparent bg-transparent shadow-none focus-visible:ring-0 dark:bg-transparent"
|
||||
placeholder={placeholder}
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<Button
|
||||
variant="select"
|
||||
className="size-10 shrink-0 rounded-full"
|
||||
disabled={!input.trim() || isLoading}
|
||||
onClick={sendMessage}
|
||||
>
|
||||
<FaArrowUpLong size="16" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
11
web/src/types/chat.ts
Normal file
11
web/src/types/chat.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export type ToolCall = {
|
||||
name: string;
|
||||
arguments?: Record<string, unknown>;
|
||||
response?: string;
|
||||
};
|
||||
|
||||
export type ChatMessage = {
|
||||
role: "user" | "assistant";
|
||||
content: string;
|
||||
toolCalls?: ToolCall[];
|
||||
};
|
||||
@@ -46,6 +46,7 @@ i18n
|
||||
"components/icons",
|
||||
"components/player",
|
||||
"views/events",
|
||||
"views/chat",
|
||||
"views/explore",
|
||||
"views/live",
|
||||
"views/settings",
|
||||
|
||||
Reference in New Issue
Block a user