diff --git a/docs/docs/configuration/custom_classification/object_classification.md b/docs/docs/configuration/custom_classification/object_classification.md index 70fd1fbbd..da8c4d887 100644 --- a/docs/docs/configuration/custom_classification/object_classification.md +++ b/docs/docs/configuration/custom_classification/object_classification.md @@ -33,9 +33,9 @@ For object classification: - Example: `cat` → `Leo`, `Charlie`, `None`. - **Attribute**: - - Added as metadata to the object (visible in /events): `: `. + - Added as metadata to the object, visible in the Tracked Object Details pane in Explore, `frigate/events` MQTT messages, and the HTTP API response as `: `. - Ideal when multiple attributes can coexist independently. - - Example: Detecting if a `person` in a construction yard is wearing a helmet or not. + - Example: Detecting if a `person` in a construction yard is wearing a helmet or not, and if they are wearing a yellow vest or not. :::note @@ -81,6 +81,8 @@ classification: classification_type: sub_label # or: attribute ``` +An optional config, `save_attempts`, can be set as a key under the model name. This defines the number of classification attempts to save in the Recent Classifications tab. For object classification models, the default is 200. + ## Training the model Creating and training the model is done within the Frigate UI using the `Classification` page. The process consists of two steps: @@ -89,6 +91,8 @@ Creating and training the model is done within the Frigate UI using the `Classif Enter a name for your model, select the object label to classify (e.g., `person`, `dog`, `car`), choose the classification type (sub label or attribute), and define your classes. Include a `none` class for objects that don't fit any specific category. +For example: To classify your two cats, create a model named "Our Cats" and create two classes, "Charlie" and "Leo". Create a third class, "none", for other neighborhood cats that are not your own. + ### Step 2: Assign Training Examples The system will automatically generate example images from detected objects matching your selected label. You'll be guided through each class one at a time to select which images represent that class. Any images not assigned to a specific class will automatically be assigned to `none` when you complete the last class. Once all images are processed, training will begin automatically. diff --git a/docs/docs/configuration/custom_classification/state_classification.md b/docs/docs/configuration/custom_classification/state_classification.md index 196ec78de..1ffdf9011 100644 --- a/docs/docs/configuration/custom_classification/state_classification.md +++ b/docs/docs/configuration/custom_classification/state_classification.md @@ -48,6 +48,8 @@ classification: crop: [0, 180, 220, 400] ``` +An optional config, `save_attempts`, can be set as a key under the model name. This defines the number of classification attempts to save in the Recent Classifications tab. For state classification models, the default is 100. + ## Training the model Creating and training the model is done within the Frigate UI using the `Classification` page. The process consists of three steps: diff --git a/docs/docs/configuration/genai.md b/docs/docs/configuration/genai.md index 018dc2050..f9a3e1de0 100644 --- a/docs/docs/configuration/genai.md +++ b/docs/docs/configuration/genai.md @@ -56,7 +56,7 @@ Parallel requests also come with some caveats. You will need to set `OLLAMA_NUM_ ### Supported Models -You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`. Note that Frigate will not automatically download the model you specify in your config, you must download the model to your local instance of Ollama first i.e. by running `ollama pull llava:7b` on your Ollama server/Docker container. Note that the model specified in Frigate's config must match the downloaded model tag. +You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). Note that Frigate will not automatically download the model you specify in your config, you must download the model to your local instance of Ollama first i.e. by running `ollama pull llava:7b` on your Ollama server/Docker container. Note that the model specified in Frigate's config must match the downloaded model tag. :::note @@ -64,6 +64,10 @@ You should have at least 8 GB of RAM available (or VRAM if running on GPU) to ru ::: +#### Ollama Cloud models + +Ollama also supports [cloud models](https://ollama.com/cloud), where your local Ollama instance handles requests from Frigate, but model inference is performed in the cloud. Set up Ollama locally, sign in with your Ollama account, and specify the cloud model name in your Frigate config. For more details, see the Ollama cloud model [docs](https://docs.ollama.com/cloud). + ### Configuration ```yaml diff --git a/frigate/app.py b/frigate/app.py index 30259ad3d..fac7a08d9 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -100,6 +100,10 @@ class FrigateApp: ) if ( config.semantic_search.enabled + or any( + c.objects.genai.enabled or c.review.genai.enabled + for c in config.cameras.values() + ) or config.lpr.enabled or config.face_recognition.enabled or len(config.classification.custom) > 0 diff --git a/frigate/data_processing/post/object_descriptions.py b/frigate/data_processing/post/object_descriptions.py index 1f4608bc3..7bd38bfa8 100644 --- a/frigate/data_processing/post/object_descriptions.py +++ b/frigate/data_processing/post/object_descriptions.py @@ -131,6 +131,8 @@ class ObjectDescriptionProcessor(PostProcessorApi): ) ): self._process_genai_description(event, camera_config, thumbnail) + else: + self.cleanup_event(event.id) def __regenerate_description(self, event_id: str, source: str, force: bool) -> None: """Regenerate the description for an event.""" @@ -204,6 +206,17 @@ class ObjectDescriptionProcessor(PostProcessorApi): ) return None + def cleanup_event(self, event_id: str) -> None: + """Clean up tracked event data to prevent memory leaks. + + This should be called when an event ends, regardless of whether + genai processing is triggered. + """ + if event_id in self.tracked_events: + del self.tracked_events[event_id] + if event_id in self.early_request_sent: + del self.early_request_sent[event_id] + def _read_and_crop_snapshot(self, event: Event) -> bytes | None: """Read, decode, and crop the snapshot image.""" @@ -299,9 +312,8 @@ class ObjectDescriptionProcessor(PostProcessorApi): ), ).start() - # Delete tracked events based on the event_id - if event.id in self.tracked_events: - del self.tracked_events[event.id] + # Clean up tracked events and early request state + self.cleanup_event(event.id) def _genai_embed_description(self, event: Event, thumbnails: list[bytes]) -> None: """Embed the description for an event.""" diff --git a/frigate/data_processing/post/review_descriptions.py b/frigate/data_processing/post/review_descriptions.py index 7932d56f4..0b12aa1a0 100644 --- a/frigate/data_processing/post/review_descriptions.py +++ b/frigate/data_processing/post/review_descriptions.py @@ -311,6 +311,7 @@ class ReviewDescriptionProcessor(PostProcessorApi): start_ts, end_ts, events_with_context, + self.config.review.genai.preferred_language, self.config.review.genai.debug_save_thumbnails, ) else: diff --git a/frigate/embeddings/maintainer.py b/frigate/embeddings/maintainer.py index 78a251c42..21d6a5634 100644 --- a/frigate/embeddings/maintainer.py +++ b/frigate/embeddings/maintainer.py @@ -522,6 +522,8 @@ class EmbeddingMaintainer(threading.Thread): ) elif isinstance(processor, ObjectDescriptionProcessor): if not updated_db: + # Still need to cleanup tracked events even if not processing + processor.cleanup_event(event_id) continue processor.process_data( diff --git a/frigate/genai/__init__.py b/frigate/genai/__init__.py index 910fc13b9..5e1a74279 100644 --- a/frigate/genai/__init__.py +++ b/frigate/genai/__init__.py @@ -178,6 +178,7 @@ Each line represents a detection state, not necessarily unique individuals. Pare start_ts: float, end_ts: float, events: list[dict[str, Any]], + preferred_language: str | None, debug_save: bool, ) -> str | None: """Generate a summary of review item descriptions over a period of time.""" @@ -232,6 +233,9 @@ Guidelines: for event in events: timeline_summary_prompt += f"\n{event}\n" + if preferred_language: + timeline_summary_prompt += f"\nProvide your answer in {preferred_language}" + if debug_save: with open( os.path.join( diff --git a/web/src/components/overlay/detail/SearchDetailDialog.tsx b/web/src/components/overlay/detail/SearchDetailDialog.tsx index 392e929eb..e3ae19159 100644 --- a/web/src/components/overlay/detail/SearchDetailDialog.tsx +++ b/web/src/components/overlay/detail/SearchDetailDialog.tsx @@ -599,9 +599,14 @@ export default function SearchDetailDialog({ { if (isPopoverOpen) { diff --git a/web/src/components/overlay/detail/TrackingDetails.tsx b/web/src/components/overlay/detail/TrackingDetails.tsx index 28a462487..42535d5e1 100644 --- a/web/src/components/overlay/detail/TrackingDetails.tsx +++ b/web/src/components/overlay/detail/TrackingDetails.tsx @@ -526,7 +526,7 @@ export function TrackingDetails({
1 && aspectRatio < 1.5 + ? "lg:basis-3/5" + : "lg:basis-2/5", )} > {isDesktop && tabs && ( @@ -632,121 +635,114 @@ export function TrackingDetails({ )}
{config?.cameras[event.camera]?.onvif.autotracking .enabled_in_config && ( -
+
{t("trackingDetails.autoTrackingTips")}
)} -
-
-
+
+
+
{ + e.stopPropagation(); + // event.start_time is detect time, convert to record + handleSeekToTime( + (event.start_time ?? 0) + annotationOffset / 1000, + ); + }} + role="button" + >
{ - e.stopPropagation(); - // event.start_time is detect time, convert to record - handleSeekToTime( - (event.start_time ?? 0) + annotationOffset / 1000, - ); - }} - role="button" + className={cn( + "relative ml-2 rounded-full bg-muted-foreground p-2", + )} > -
- {getIconForLabel( - event.sub_label ? event.label + "-verified" : event.label, - "size-4 text-white", - )} -
-
- {label} -
- {formattedStart ?? ""} - {event.end_time != null ? ( - <> - {formattedEnd} - ) : ( -
- -
- )} -
- {event.data?.recognized_license_plate && ( - <> - · -
- - {event.data.recognized_license_plate} - -
- + {getIconForLabel( + event.sub_label ? event.label + "-verified" : event.label, + "size-4 text-white", + )} +
+
+ {label} +
+ {formattedStart ?? ""} + {event.end_time != null ? ( + <> - {formattedEnd} + ) : ( +
+ +
)}
+ {event.data?.recognized_license_plate && ( + <> + · +
+ + {event.data.recognized_license_plate} + +
+ + )}
+
-
- {!eventSequence ? ( - - ) : eventSequence.length === 0 ? ( -
- {t("detail.noObjectDetailData", { ns: "views/events" })} -
- ) : ( +
+ {!eventSequence ? ( + + ) : eventSequence.length === 0 ? ( +
+ {t("detail.noObjectDetailData", { ns: "views/events" })} +
+ ) : ( +
+ className="absolute -top-2 left-6 z-0 w-0.5 -translate-x-1/2 bg-secondary-foreground" + style={{ bottom: lineBottomOffsetPx }} + /> + {isWithinEventRange && (
- {isWithinEventRange && ( -
- )} -
- {eventSequence.map((item, idx) => { - return ( -
{ - rowRefs.current[idx] = el; - }} - > - handleLifecycleClick(item)} - setSelectedZone={setSelectedZone} - getZoneColor={getZoneColor} - effectiveTime={effectiveTime} - isTimelineActive={isWithinEventRange} - /> -
- ); - })} -
+ )} +
+ {eventSequence.map((item, idx) => { + return ( +
{ + rowRefs.current[idx] = el; + }} + > + handleLifecycleClick(item)} + setSelectedZone={setSelectedZone} + getZoneColor={getZoneColor} + effectiveTime={effectiveTime} + isTimelineActive={isWithinEventRange} + /> +
+ ); + })}
- )} -
+
+ )}
diff --git a/web/src/views/live/LiveCameraView.tsx b/web/src/views/live/LiveCameraView.tsx index 5de52d243..418c74068 100644 --- a/web/src/views/live/LiveCameraView.tsx +++ b/web/src/views/live/LiveCameraView.tsx @@ -1444,7 +1444,7 @@ function FrigateCameraFeatures({ ns: "components/dialog", })}
- +
@@ -1531,7 +1531,7 @@ function FrigateCameraFeatures({ <>
{t("stream.audio.unavailable")}
- +
@@ -1575,7 +1575,7 @@ function FrigateCameraFeatures({ <>
{t("stream.twoWayTalk.unavailable")}
- +