diff --git a/docs/docs/configuration/license_plate_recognition.md b/docs/docs/configuration/license_plate_recognition.md
index 70f24c7a2..c1aa62b22 100644
--- a/docs/docs/configuration/license_plate_recognition.md
+++ b/docs/docs/configuration/license_plate_recognition.md
@@ -3,18 +3,18 @@ id: license_plate_recognition
title: License Plate Recognition (LPR)
---
-Frigate can recognize license plates on vehicles and automatically add the detected characters to the `recognized_license_plate` field or a known name as a `sub_label` to tracked objects of type `car` or `motorcycle`. A common use case may be to read the license plates of cars pulling into a driveway or cars passing by on a street.
+Frigate can recognize license plates on vehicles and automatically add the detected characters to the `recognized_license_plate` field or a [known](#matching) name as a `sub_label` to tracked objects of type `car` or `motorcycle`. A common use case may be to read the license plates of cars pulling into a driveway or cars passing by on a street.
LPR works best when the license plate is clearly visible to the camera. For moving vehicles, Frigate continuously refines the recognition process, keeping the most confident result. When a vehicle becomes stationary, LPR continues to run for a short time after to attempt recognition.
When a plate is recognized, the details are:
-- Added as a `sub_label` (if known) or the `recognized_license_plate` field (if unknown) to a tracked object.
-- Viewable in the Review Item Details pane in Review (sub labels).
+- Added as a `sub_label` (if [known](#matching)) or the `recognized_license_plate` field (if unknown) to a tracked object.
+- Viewable in the Details pane in Review/History.
- Viewable in the Tracked Object Details pane in Explore (sub labels and recognized license plates).
- Filterable through the More Filters menu in Explore.
-- Published via the `frigate/events` MQTT topic as a `sub_label` (known) or `recognized_license_plate` (unknown) for the `car` or `motorcycle` tracked object.
-- Published via the `frigate/tracked_object_update` MQTT topic with `name` (if known) and `plate`.
+- Published via the `frigate/events` MQTT topic as a `sub_label` ([known](#matching)) or `recognized_license_plate` (unknown) for the `car` or `motorcycle` tracked object.
+- Published via the `frigate/tracked_object_update` MQTT topic with `name` (if [known](#matching)) and `plate`.
## Model Requirements
@@ -31,6 +31,7 @@ In the default mode, Frigate's LPR needs to first detect a `car` or `motorcycle`
## Minimum System Requirements
License plate recognition works by running AI models locally on your system. The YOLOv9 plate detector model and the OCR models ([PaddleOCR](https://github.com/PaddlePaddle/PaddleOCR)) are relatively lightweight and can run on your CPU or GPU, depending on your configuration. At least 4GB of RAM is required.
+
## Configuration
License plate recognition is disabled by default. Enable it in your config file:
@@ -73,8 +74,8 @@ Fine-tune the LPR feature using these optional parameters at the global level of
- Default: `small`
- This can be `small` or `large`.
- The `small` model is fast and identifies groups of Latin and Chinese characters.
- - The `large` model identifies Latin characters only, but uses an enhanced text detector and is more capable at finding characters on multi-line plates. It is significantly slower than the `small` model. Note that using the `large` model does not improve _text recognition_, but it may improve _text detection_.
- - For most users, the `small` model is recommended.
+ - The `large` model identifies Latin characters only, and uses an enhanced text detector to find characters on multi-line plates. It is significantly slower than the `small` model.
+ - If your country or region does not use multi-line plates, you should use the `small` model as performance is much better for single-line plates.
### Recognition
@@ -177,7 +178,7 @@ lpr:
:::note
-If you want to detect cars on cameras but don't want to use resources to run LPR on those cars, you should disable LPR for those specific cameras.
+If a camera is configured to detect `car` or `motorcycle` but you don't want Frigate to run LPR for that camera, disable LPR at the camera level:
```yaml
cameras:
@@ -305,7 +306,7 @@ With this setup:
- Review items will always be classified as a `detection`.
- Snapshots will always be saved.
- Zones and object masks are **not** used.
-- The `frigate/events` MQTT topic will **not** publish tracked object updates with the license plate bounding box and score, though `frigate/reviews` will publish if recordings are enabled. If a plate is recognized as a known plate, publishing will occur with an updated `sub_label` field. If characters are recognized, publishing will occur with an updated `recognized_license_plate` field.
+- The `frigate/events` MQTT topic will **not** publish tracked object updates with the license plate bounding box and score, though `frigate/reviews` will publish if recordings are enabled. If a plate is recognized as a [known](#matching) plate, publishing will occur with an updated `sub_label` field. If characters are recognized, publishing will occur with an updated `recognized_license_plate` field.
- License plate snapshots are saved at the highest-scoring moment and appear in Explore.
- Debug view will not show `license_plate` bounding boxes.
diff --git a/docs/docs/configuration/semantic_search.md b/docs/docs/configuration/semantic_search.md
index b3ce37177..91f435ff0 100644
--- a/docs/docs/configuration/semantic_search.md
+++ b/docs/docs/configuration/semantic_search.md
@@ -141,7 +141,7 @@ Triggers are best configured through the Frigate UI.
Check the `Add Attribute` box to add the trigger's internal ID (e.g., "red_car_alert") to a data attribute on the tracked object that can be processed via the API or MQTT.
5. Save the trigger to update the configuration and store the embedding in the database.
-When a trigger fires, the UI highlights the trigger with a blue dot for 3 seconds for easy identification.
+When a trigger fires, the UI highlights the trigger with a blue dot for 3 seconds for easy identification. Additionally, the UI will show the last date/time and tracked object ID that activated your trigger. The last triggered timestamp is not saved to the database or persisted through restarts of Frigate.
### Usage and Best Practices
diff --git a/frigate/api/event.py b/frigate/api/event.py
index 61c1d86c4..13886af13 100644
--- a/frigate/api/event.py
+++ b/frigate/api/event.py
@@ -1781,9 +1781,8 @@ def create_trigger_embedding(
logger.debug(
f"Writing thumbnail for trigger with data {body.data} in {camera_name}."
)
- except Exception as e:
- logger.error(e.with_traceback())
- logger.error(
+ except Exception:
+ logger.exception(
f"Failed to write thumbnail for trigger with data {body.data} in {camera_name}"
)
@@ -1807,8 +1806,8 @@ def create_trigger_embedding(
status_code=200,
)
- except Exception as e:
- logger.error(e.with_traceback())
+ except Exception:
+ logger.exception("Error creating trigger embedding")
return JSONResponse(
content={
"success": False,
@@ -1917,9 +1916,8 @@ def update_trigger_embedding(
logger.debug(
f"Deleted thumbnail for trigger with data {trigger.data} in {camera_name}."
)
- except Exception as e:
- logger.error(e.with_traceback())
- logger.error(
+ except Exception:
+ logger.exception(
f"Failed to delete thumbnail for trigger with data {trigger.data} in {camera_name}"
)
@@ -1958,9 +1956,8 @@ def update_trigger_embedding(
logger.debug(
f"Writing thumbnail for trigger with data {body.data} in {camera_name}."
)
- except Exception as e:
- logger.error(e.with_traceback())
- logger.error(
+ except Exception:
+ logger.exception(
f"Failed to write thumbnail for trigger with data {body.data} in {camera_name}"
)
@@ -1972,8 +1969,8 @@ def update_trigger_embedding(
status_code=200,
)
- except Exception as e:
- logger.error(e.with_traceback())
+ except Exception:
+ logger.exception("Error updating trigger embedding")
return JSONResponse(
content={
"success": False,
@@ -2033,9 +2030,8 @@ def delete_trigger_embedding(
logger.debug(
f"Deleted thumbnail for trigger with data {trigger.data} in {camera_name}."
)
- except Exception as e:
- logger.error(e.with_traceback())
- logger.error(
+ except Exception:
+ logger.exception(
f"Failed to delete thumbnail for trigger with data {trigger.data} in {camera_name}"
)
@@ -2047,8 +2043,8 @@ def delete_trigger_embedding(
status_code=200,
)
- except Exception as e:
- logger.error(e.with_traceback())
+ except Exception:
+ logger.exception("Error deleting trigger embedding")
return JSONResponse(
content={
"success": False,
diff --git a/frigate/camera/maintainer.py b/frigate/camera/maintainer.py
index 865fe4725..815e650e9 100644
--- a/frigate/camera/maintainer.py
+++ b/frigate/camera/maintainer.py
@@ -136,6 +136,7 @@ class CameraMaintainer(threading.Thread):
self.ptz_metrics[name],
self.region_grids[name],
self.stop_event,
+ self.config.logger,
)
self.camera_processes[config.name] = camera_process
camera_process.start()
@@ -156,7 +157,11 @@ class CameraMaintainer(threading.Thread):
self.frame_manager.create(f"{config.name}_frame{i}", frame_size)
capture_process = CameraCapture(
- config, count, self.camera_metrics[name], self.stop_event
+ config,
+ count,
+ self.camera_metrics[name],
+ self.stop_event,
+ self.config.logger,
)
capture_process.daemon = True
self.capture_processes[name] = capture_process
diff --git a/frigate/data_processing/post/review_descriptions.py b/frigate/data_processing/post/review_descriptions.py
index 9691ac8fd..fadc483c3 100644
--- a/frigate/data_processing/post/review_descriptions.py
+++ b/frigate/data_processing/post/review_descriptions.py
@@ -132,17 +132,15 @@ class ReviewDescriptionProcessor(PostProcessorApi):
if image_source == ImageSourceEnum.recordings:
duration = final_data["end_time"] - final_data["start_time"]
- buffer_extension = min(
- 10, max(2, duration * RECORDING_BUFFER_EXTENSION_PERCENT)
- )
+ buffer_extension = min(5, duration * RECORDING_BUFFER_EXTENSION_PERCENT)
# Ensure minimum total duration for short review items
# This provides better context for brief events
total_duration = duration + (2 * buffer_extension)
if total_duration < MIN_RECORDING_DURATION:
- # Expand buffer to reach minimum duration, still respecting max of 10s per side
+ # Expand buffer to reach minimum duration, still respecting max of 5s per side
additional_buffer_per_side = (MIN_RECORDING_DURATION - duration) / 2
- buffer_extension = min(10, additional_buffer_per_side)
+ buffer_extension = min(5, additional_buffer_per_side)
thumbs = self.get_recording_frames(
camera,
diff --git a/frigate/data_processing/real_time/face.py b/frigate/data_processing/real_time/face.py
index db3cb22ed..1901a81e1 100644
--- a/frigate/data_processing/real_time/face.py
+++ b/frigate/data_processing/real_time/face.py
@@ -424,7 +424,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
if not res:
return {
- "message": "No face was recognized.",
+ "message": "Model is still training, please try again in a few moments.",
"success": False,
}
diff --git a/frigate/video.py b/frigate/video.py
index 739fb5c03..6be4f52a4 100755
--- a/frigate/video.py
+++ b/frigate/video.py
@@ -16,7 +16,7 @@ from frigate.comms.recordings_updater import (
RecordingsDataSubscriber,
RecordingsDataTypeEnum,
)
-from frigate.config import CameraConfig, DetectConfig, ModelConfig
+from frigate.config import CameraConfig, DetectConfig, LoggerConfig, ModelConfig
from frigate.config.camera.camera import CameraTypeEnum
from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
@@ -539,6 +539,7 @@ class CameraCapture(FrigateProcess):
shm_frame_count: int,
camera_metrics: CameraMetrics,
stop_event: MpEvent,
+ log_config: LoggerConfig | None = None,
) -> None:
super().__init__(
stop_event,
@@ -549,9 +550,10 @@ class CameraCapture(FrigateProcess):
self.config = config
self.shm_frame_count = shm_frame_count
self.camera_metrics = camera_metrics
+ self.log_config = log_config
def run(self) -> None:
- self.pre_run_setup()
+ self.pre_run_setup(self.log_config)
camera_watchdog = CameraWatchdog(
self.config,
self.shm_frame_count,
@@ -577,6 +579,7 @@ class CameraTracker(FrigateProcess):
ptz_metrics: PTZMetrics,
region_grid: list[list[dict[str, Any]]],
stop_event: MpEvent,
+ log_config: LoggerConfig | None = None,
) -> None:
super().__init__(
stop_event,
@@ -592,9 +595,10 @@ class CameraTracker(FrigateProcess):
self.camera_metrics = camera_metrics
self.ptz_metrics = ptz_metrics
self.region_grid = region_grid
+ self.log_config = log_config
def run(self) -> None:
- self.pre_run_setup()
+ self.pre_run_setup(self.log_config)
frame_queue = self.camera_metrics.frame_queue
frame_shape = self.config.frame_shape
diff --git a/web/public/notifications-worker.js b/web/public/notifications-worker.js
index ab8a6ae44..ba4e033ea 100644
--- a/web/public/notifications-worker.js
+++ b/web/public/notifications-worker.js
@@ -44,11 +44,16 @@ self.addEventListener("notificationclick", (event) => {
switch (event.action ?? "default") {
case "markReviewed":
if (event.notification.data) {
- fetch("/api/reviews/viewed", {
- method: "POST",
- headers: { "Content-Type": "application/json", "X-CSRF-TOKEN": 1 },
- body: JSON.stringify({ ids: [event.notification.data.id] }),
- });
+ event.waitUntil(
+ fetch("/api/reviews/viewed", {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ "X-CSRF-TOKEN": 1,
+ },
+ body: JSON.stringify({ ids: [event.notification.data.id] }),
+ }), // eslint-disable-line comma-dangle
+ );
}
break;
default:
@@ -58,7 +63,7 @@ self.addEventListener("notificationclick", (event) => {
// eslint-disable-next-line no-undef
if (clients.openWindow) {
// eslint-disable-next-line no-undef
- return clients.openWindow(url);
+ event.waitUntil(clients.openWindow(url));
}
}
}
diff --git a/web/src/components/card/ClassificationCard.tsx b/web/src/components/card/ClassificationCard.tsx
index 4511c22d5..0e1138feb 100644
--- a/web/src/components/card/ClassificationCard.tsx
+++ b/web/src/components/card/ClassificationCard.tsx
@@ -398,11 +398,7 @@ export function GroupedClassificationCard({
threshold={threshold}
selected={false}
i18nLibrary={i18nLibrary}
- onClick={(data, meta) => {
- if (meta || selectedItems.length > 0) {
- onClick(data);
- }
- }}
+ onClick={() => {}}
>
{children?.(data)}
diff --git a/web/src/components/menu/SearchResultActions.tsx b/web/src/components/menu/SearchResultActions.tsx
index f5128e268..66de0c496 100644
--- a/web/src/components/menu/SearchResultActions.tsx
+++ b/web/src/components/menu/SearchResultActions.tsx
@@ -4,9 +4,7 @@ import { FrigateConfig } from "@/types/frigateConfig";
import { baseUrl } from "@/api/baseUrl";
import { toast } from "sonner";
import axios from "axios";
-import { LuCamera, LuDownload, LuTrash2 } from "react-icons/lu";
import { FiMoreVertical } from "react-icons/fi";
-import { MdImageSearch } from "react-icons/md";
import { buttonVariants } from "@/components/ui/button";
import {
ContextMenu,
@@ -31,11 +29,8 @@ import {
AlertDialogTitle,
} from "@/components/ui/alert-dialog";
import useSWR from "swr";
-
import { Trans, useTranslation } from "react-i18next";
-import { BsFillLightningFill } from "react-icons/bs";
import BlurredIconButton from "../button/BlurredIconButton";
-import { PiPath } from "react-icons/pi";
type SearchResultActionsProps = {
searchResult: SearchResult;
@@ -98,7 +93,6 @@ export default function SearchResultActions({
href={`${baseUrl}api/events/${searchResult.id}/clip.mp4`}
download={`${searchResult.camera}_${searchResult.label}.mp4`}
>
-