diff --git a/docs/docs/configuration/genai.md b/docs/docs/configuration/genai.md index 991562871..875608766 100644 --- a/docs/docs/configuration/genai.md +++ b/docs/docs/configuration/genai.md @@ -211,7 +211,7 @@ You are also able to define custom prompts in your configuration. genai: provider: ollama base_url: http://localhost:11434 - model: llava + model: qwen3-vl:8b-instruct objects: prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance." diff --git a/docs/docs/configuration/genai/objects.md b/docs/docs/configuration/genai/objects.md index e5aa92cc0..e3ae31393 100644 --- a/docs/docs/configuration/genai/objects.md +++ b/docs/docs/configuration/genai/objects.md @@ -39,9 +39,10 @@ You are also able to define custom prompts in your configuration. genai: provider: ollama base_url: http://localhost:11434 - model: llava + model: qwen3-vl:8b-instruct objects: + genai: prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance." object_prompts: person: "Examine the main person in these images. What are they doing and what might their actions suggest about their intent (e.g., approaching a door, leaving an area, standing still)? Do not describe the surroundings or static details." diff --git a/docs/docs/configuration/hardware_acceleration_video.md b/docs/docs/configuration/hardware_acceleration_video.md index f7368e623..bbbf5a640 100644 --- a/docs/docs/configuration/hardware_acceleration_video.md +++ b/docs/docs/configuration/hardware_acceleration_video.md @@ -3,6 +3,8 @@ id: hardware_acceleration_video title: Video Decoding --- +import CommunityBadge from '@site/src/components/CommunityBadge'; + # Video Decoding It is highly recommended to use an integrated or discrete GPU for hardware acceleration video decoding in Frigate. @@ -31,11 +33,11 @@ Frigate supports presets for optimal hardware accelerated video decoding: - [Raspberry Pi](#raspberry-pi-34): Frigate can utilize the media engine in the Raspberry Pi 3 and 4 to slightly accelerate video decoding. -**Nvidia Jetson** +**Nvidia Jetson** - [Jetson](#nvidia-jetson): Frigate can utilize the media engine in Jetson hardware to accelerate video decoding. -**Rockchip** +**Rockchip** - [RKNN](#rockchip-platform): Frigate can utilize the media engine in RockChip SOCs to accelerate video decoding. @@ -184,11 +186,11 @@ If you are passing in a device path, make sure you've passed the device through Frigate can utilize modern AMD integrated GPUs and AMD GPUs to accelerate video decoding using VAAPI. -:::note +### Configuring Radeon Driver You need to change the driver to `radeonsi` by adding the following environment variable `LIBVA_DRIVER_NAME=radeonsi` to your docker-compose file or [in the `config.yml` for HA Add-on users](advanced.md#environment_vars). -::: +### Via VAAPI VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams. diff --git a/docs/docs/frigate/installation.md b/docs/docs/frigate/installation.md index 29ddbd79f..70b4b5bc1 100644 --- a/docs/docs/frigate/installation.md +++ b/docs/docs/frigate/installation.md @@ -465,6 +465,7 @@ There are important limitations in HA OS to be aware of: - Separate local storage for media is not yet supported by Home Assistant - AMD GPUs are not supported because HA OS does not include the mesa driver. +- Intel NPUs are not supported because HA OS does not include the NPU firmware. - Nvidia GPUs are not supported because addons do not support the nvidia runtime. ::: diff --git a/docs/docs/guides/getting_started.md b/docs/docs/guides/getting_started.md index 3b07d8d5b..8c90a6f33 100644 --- a/docs/docs/guides/getting_started.md +++ b/docs/docs/guides/getting_started.md @@ -134,31 +134,13 @@ Now you should be able to start Frigate by running `docker compose up -d` from w This section assumes that you already have an environment setup as described in [Installation](../frigate/installation.md). You should also configure your cameras according to the [camera setup guide](/frigate/camera_setup). Pay particular attention to the section on choosing a detect resolution. -### Step 1: Add a detect stream +### Step 1: Start Frigate -First we will add the detect stream for the camera: +At this point you should be able to start Frigate and a basic config will be created automatically. -```yaml -mqtt: - enabled: False +### Step 2: Add a camera -cameras: - name_of_your_camera: # <------ Name the camera - enabled: True - ffmpeg: - inputs: - - path: rtsp://10.0.10.10:554/rtsp # <----- The stream you want to use for detection - roles: - - detect -``` - -### Step 2: Start Frigate - -At this point you should be able to start Frigate and see the video feed in the UI. - -If you get an error image from the camera, this means ffmpeg was not able to get the video feed from your camera. Check the logs for error messages from ffmpeg. The default ffmpeg arguments are designed to work with H264 RTSP cameras that support TCP connections. - -FFmpeg arguments for other types of cameras can be found [here](../configuration/camera_specific.md). +You can click the `Add Camera` button to use the camera setup wizard to get your first camera added into Frigate. ### Step 3: Configure hardware acceleration (recommended) @@ -173,7 +155,7 @@ services: frigate: ... devices: - - /dev/dri/renderD128:/dev/dri/renderD128 # for intel hwaccel, needs to be updated for your hardware + - /dev/dri/renderD128:/dev/dri/renderD128 # for intel & amd hwaccel, needs to be updated for your hardware ... ``` diff --git a/docs/docs/troubleshooting/cpu.md b/docs/docs/troubleshooting/cpu.md new file mode 100644 index 000000000..a9f449ad8 --- /dev/null +++ b/docs/docs/troubleshooting/cpu.md @@ -0,0 +1,73 @@ +--- +id: cpu +title: High CPU Usage +--- + +High CPU usage can impact Frigate's performance and responsiveness. This guide outlines the most effective configuration changes to help reduce CPU consumption and optimize resource usage. + +## 1. Hardware Acceleration for Video Decoding + +**Priority: Critical** + +Video decoding is one of the most CPU-intensive tasks in Frigate. While an AI accelerator handles object detection, it does not assist with decoding video streams. Hardware acceleration (hwaccel) offloads this work to your GPU or specialized video decode hardware, significantly reducing CPU usage and enabling you to support more cameras on the same hardware. + +### Key Concepts + +**Resolution & FPS Impact:** The decoding burden grows exponentially with resolution and frame rate. A 4K stream at 30 FPS requires roughly 4 times the processing power of a 1080p stream at the same frame rate, and doubling the frame rate doubles the decode workload. This is why hardware acceleration becomes critical when working with multiple high-resolution cameras. + +**Hardware Acceleration Benefits:** By using dedicated video decode hardware, you can: + +- Significantly reduce CPU usage per camera stream +- Support 2-3x more cameras on the same hardware +- Free up CPU resources for motion detection and other Frigate processes +- Reduce system heat and power consumption + +### Configuration + +Frigate provides preset configurations for common hardware acceleration scenarios. Set up `hwaccel_args` based on your hardware in your [configuration](../configuration/reference) as described in the [getting started guide](../guides/getting_started). + +### Troubleshooting Hardware Acceleration + +If hardware acceleration isn't working: + +1. Check Frigate logs for FFmpeg errors related to hwaccel +2. Verify the hardware device is accessible inside the container +3. Ensure your camera streams use H.264 or H.265 codecs (most common) +4. Try different presets if the automatic detection fails +5. Check that your GPU drivers are properly installed on the host system + +## 2. Detector Selection and Configuration + +**Priority: Critical** + +Choosing the right detector for your hardware is the single most important factor for detection performance. The detector is responsible for running the AI model that identifies objects in video frames. Different detector types have vastly different performance characteristics and hardware requirements, as detailed in the [hardware documentation](../frigate/hardware). + +### Understanding Detector Performance + +Frigate uses motion detection as a first-line check before running expensive object detection, as explained in the [motion detection documentation](../configuration/motion_detection). When motion is detected, Frigate creates a "region" (the green boxes in the debug viewer) and sends it to the detector. The detector's inference speed determines how many detections per second your system can handle. + +**Calculating Detector Capacity:** Your detector has a finite capacity measured in detections per second. With an inference speed of 10ms, your detector can handle approximately 100 detections per second (1000ms / 10ms = 100).If your cameras collectively require more than this capacity, you'll experience delays, missed detections, or the system will fall behind. + +### Choosing the Right Detector + +Different detectors have vastly different performance characteristics, see the expected performance for object detectors in [the hardware docs](../frigate/hardware) + +### Multiple Detector Instances + +When a single detector cannot keep up with your camera count, some detector types (`openvino`, `onnx`) allow you to define multiple detector instances to share the workload. This is particularly useful with GPU-based detectors that have sufficient VRAM to run multiple inference processes. + +For detailed instructions on configuring multiple detectors, see the [Object Detectors documentation](../configuration/object_detectors). + + +**When to add a second detector:** + +- Skipped FPS is consistently > 0 even during normal activity + +### Model Selection and Optimization + +The model you use significantly impacts detector performance. Frigate provides default models optimized for each detector type, but you can customize them as described in the [detector documentation](../configuration/object_detectors). + +**Model Size Trade-offs:** + +- Smaller models (320x320): Faster inference, Frigate is specifically optimized for a 320x320 size model. +- Larger models (640x640): Slower inference, can sometimes have higher accuracy on very large objects that take up a majority of the frame. \ No newline at end of file diff --git a/docs/docs/troubleshooting/dummy-camera.md b/docs/docs/troubleshooting/dummy-camera.md index 7e7c26ae9..c510f2ba8 100644 --- a/docs/docs/troubleshooting/dummy-camera.md +++ b/docs/docs/troubleshooting/dummy-camera.md @@ -1,6 +1,6 @@ --- id: dummy-camera -title: Troubleshooting Detection +title: Analyzing Object Detection --- When investigating object detection or tracking problems, it can be helpful to replay an exported video as a temporary "dummy" camera. This lets you reproduce issues locally, iterate on configuration (detections, zones, enrichment settings), and capture logs and clips for analysis. diff --git a/docs/docs/troubleshooting/edgetpu.md b/docs/docs/troubleshooting/edgetpu.md index af94a3d84..97b2b0040 100644 --- a/docs/docs/troubleshooting/edgetpu.md +++ b/docs/docs/troubleshooting/edgetpu.md @@ -1,6 +1,6 @@ --- id: edgetpu -title: Troubleshooting EdgeTPU +title: EdgeTPU Errors --- ## USB Coral Not Detected diff --git a/docs/docs/troubleshooting/gpu.md b/docs/docs/troubleshooting/gpu.md index a5b48246a..6399f92d8 100644 --- a/docs/docs/troubleshooting/gpu.md +++ b/docs/docs/troubleshooting/gpu.md @@ -1,6 +1,6 @@ --- id: gpu -title: Troubleshooting GPU +title: GPU Errors --- ## OpenVINO diff --git a/docs/docs/troubleshooting/memory.md b/docs/docs/troubleshooting/memory.md index c74729e5f..d062944e5 100644 --- a/docs/docs/troubleshooting/memory.md +++ b/docs/docs/troubleshooting/memory.md @@ -1,6 +1,6 @@ --- id: memory -title: Memory Troubleshooting +title: Memory Usage --- Frigate includes built-in memory profiling using [memray](https://bloomberg.github.io/memray/) to help diagnose memory issues. This feature allows you to profile specific Frigate modules to identify memory leaks, excessive allocations, or other memory-related problems. diff --git a/docs/docs/troubleshooting/recordings.md b/docs/docs/troubleshooting/recordings.md index d26a3614e..b1f180a82 100644 --- a/docs/docs/troubleshooting/recordings.md +++ b/docs/docs/troubleshooting/recordings.md @@ -1,6 +1,6 @@ --- id: recordings -title: Troubleshooting Recordings +title: Recordings Errors --- ## I have Frigate configured for motion recording only, but it still seems to be recording even with no motion. Why? diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 4c8effeec..ea0d2f5c8 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -129,10 +129,27 @@ const sidebars: SidebarsConfig = { Troubleshooting: [ "troubleshooting/faqs", "troubleshooting/recordings", - "troubleshooting/gpu", - "troubleshooting/edgetpu", - "troubleshooting/memory", "troubleshooting/dummy-camera", + { + type: "category", + label: "Troubleshooting Hardware", + link: { + type: "generated-index", + title: "Troubleshooting Hardware", + description: "Troubleshooting Problems with Hardware", + }, + items: ["troubleshooting/gpu", "troubleshooting/edgetpu"], + }, + { + type: "category", + label: "Troubleshooting Resource Usage", + link: { + type: "generated-index", + title: "Troubleshooting Resource Usage", + description: "Troubleshooting issues with resource usage", + }, + items: ["troubleshooting/cpu", "troubleshooting/memory"], + }, ], Development: [ "development/contributing", diff --git a/frigate/api/media.py b/frigate/api/media.py index 783b42e97..971bfef83 100644 --- a/frigate/api/media.py +++ b/frigate/api/media.py @@ -1935,7 +1935,7 @@ async def label_clip(request: Request, camera_name: str, label: str): try: event = event_query.get() - return await event_clip(request, event.id) + return await event_clip(request, event.id, 0) except DoesNotExist: return JSONResponse( content={"success": False, "message": "Event not found"}, status_code=404 diff --git a/web/src/components/card/EmptyCard.tsx b/web/src/components/card/EmptyCard.tsx index 8d6b67a68..00b22d197 100644 --- a/web/src/components/card/EmptyCard.tsx +++ b/web/src/components/card/EmptyCard.tsx @@ -8,6 +8,7 @@ type EmptyCardProps = { className?: string; icon: React.ReactNode; title: string; + titleHeading?: boolean; description?: string; buttonText?: string; link?: string; @@ -16,14 +17,23 @@ export function EmptyCard({ className, icon, title, + titleHeading = true, description, buttonText, link, }: EmptyCardProps) { + let TitleComponent; + + if (titleHeading) { + TitleComponent = {title}; + } else { + TitleComponent =
{title}
; + } + return (
{icon} - {title} + {TitleComponent} {description && (
{description}
)} diff --git a/web/src/components/overlay/chip/GenAISummaryChip.tsx b/web/src/components/overlay/chip/GenAISummaryChip.tsx index ead0104b1..46fefdc67 100644 --- a/web/src/components/overlay/chip/GenAISummaryChip.tsx +++ b/web/src/components/overlay/chip/GenAISummaryChip.tsx @@ -26,7 +26,9 @@ export function GenAISummaryChip({ review }: GenAISummaryChipProps) { className={cn( "absolute left-1/2 top-8 z-30 flex max-w-[90vw] -translate-x-[50%] cursor-pointer select-none items-center gap-2 rounded-full p-2 text-sm transition-all duration-500", isVisible ? "translate-y-0 opacity-100" : "-translate-y-4 opacity-0", - isDesktop ? "bg-card" : "bg-secondary-foreground", + isDesktop + ? "bg-card text-primary" + : "bg-secondary-foreground text-white", )} > diff --git a/web/src/components/overlay/detail/TrackingDetails.tsx b/web/src/components/overlay/detail/TrackingDetails.tsx index 42535d5e1..80471b8bd 100644 --- a/web/src/components/overlay/detail/TrackingDetails.tsx +++ b/web/src/components/overlay/detail/TrackingDetails.tsx @@ -849,7 +849,11 @@ function LifecycleIconRow({ () => Array.isArray(item.data.attribute_box) && item.data.attribute_box.length >= 4 - ? (item.data.attribute_box[2] * item.data.attribute_box[3]).toFixed(4) + ? ( + item.data.attribute_box[2] * + item.data.attribute_box[3] * + 100 + ).toFixed(2) : undefined, [item.data.attribute_box], ); @@ -857,7 +861,7 @@ function LifecycleIconRow({ const areaPct = useMemo( () => Array.isArray(item.data.box) && item.data.box.length >= 4 - ? (item.data.box[2] * item.data.box[3]).toFixed(4) + ? (item.data.box[2] * item.data.box[3] * 100).toFixed(2) : undefined, [item.data.box], ); diff --git a/web/src/components/timeline/DetailStream.tsx b/web/src/components/timeline/DetailStream.tsx index 9258ca457..c6413ed97 100644 --- a/web/src/components/timeline/DetailStream.tsx +++ b/web/src/components/timeline/DetailStream.tsx @@ -744,7 +744,7 @@ function LifecycleItem({ const areaPct = useMemo( () => Array.isArray(item?.data.box) && item?.data.box.length >= 4 - ? (item?.data.box[2] * item?.data.box[3]).toFixed(4) + ? (item?.data.box[2] * item?.data.box[3] * 100).toFixed(2) : undefined, [item], ); @@ -766,7 +766,11 @@ function LifecycleItem({ () => Array.isArray(item?.data.attribute_box) && item?.data.attribute_box.length >= 4 - ? (item?.data.attribute_box[2] * item?.data.attribute_box[3]).toFixed(4) + ? ( + item?.data.attribute_box[2] * + item?.data.attribute_box[3] * + 100 + ).toFixed(2) : undefined, [item], ); @@ -845,7 +849,7 @@ function LifecycleItem({ {areaPx !== undefined && areaPct !== undefined ? ( - {areaPx} {t("information.pixels", { ns: "common" })}{" "} + {t("information.pixels", { ns: "common", area: areaPx })}{" "} ยท{" "} {areaPct}% diff --git a/web/src/views/events/EventView.tsx b/web/src/views/events/EventView.tsx index 9e015dfe4..70067ff5c 100644 --- a/web/src/views/events/EventView.tsx +++ b/web/src/views/events/EventView.tsx @@ -762,8 +762,9 @@ function DetectionReview({ {!loading && currentItems?.length === 0 && ( } />