Compare commits

...

85 Commits

Author SHA1 Message Date
dependabot[bot]
413a9c0418
Bump lodash-es from 4.17.23 to 4.18.1 in /web
Bumps [lodash-es](https://github.com/lodash/lodash) from 4.17.23 to 4.18.1.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.23...4.18.1)

---
updated-dependencies:
- dependency-name: lodash-es
  dependency-version: 4.18.1
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-04-22 19:41:18 +00:00
dependabot[bot]
c244e6582a
Bump path-to-regexp from 0.1.12 to 0.1.13 in /docs (#22683)
Bumps [path-to-regexp](https://github.com/pillarjs/path-to-regexp) from 0.1.12 to 0.1.13.
- [Release notes](https://github.com/pillarjs/path-to-regexp/releases)
- [Changelog](https://github.com/pillarjs/path-to-regexp/blob/v.0.1.13/History.md)
- [Commits](https://github.com/pillarjs/path-to-regexp/compare/v0.1.12...v.0.1.13)

---
updated-dependencies:
- dependency-name: path-to-regexp
  dependency-version: 0.1.13
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-22 14:39:46 -05:00
dependabot[bot]
fff3594553
Bump lodash from 4.17.23 to 4.18.1 in /web (#22787)
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.23 to 4.18.1.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.23...4.18.1)

---
updated-dependencies:
- dependency-name: lodash
  dependency-version: 4.18.1
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-22 14:39:08 -05:00
dependabot[bot]
25bfb2c481
Bump python-multipart from 0.0.20 to 0.0.26 in /docker/main (#22894)
Bumps [python-multipart](https://github.com/Kludex/python-multipart) from 0.0.20 to 0.0.26.
- [Release notes](https://github.com/Kludex/python-multipart/releases)
- [Changelog](https://github.com/Kludex/python-multipart/blob/master/CHANGELOG.md)
- [Commits](https://github.com/Kludex/python-multipart/compare/0.0.20...0.0.26)

---
updated-dependencies:
- dependency-name: python-multipart
  dependency-version: 0.0.26
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-22 14:38:56 -05:00
Nicolas Mowen
b7261c8e70
GenAI Tweaks (#22968)
* Add debug logs

* refresh embeddings maintainer genai clients on config update

---------

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>
2026-04-22 09:55:54 -06:00
Josh Hawkins
ad9092d0da
Tweaks (#22965)
* use ffmpeg to probe rtsp urls instead of cv2

cv2 is faster (no subprocess launch) and will continue to be used for recording segments

* tweak faq

* change unsaved color to orange

avoids confusion with validation errors (red)

* don't use any variant of orange as a profile color

avoids confusion with unsaved changes

* more unsaved color tweaks
2026-04-22 09:19:30 -06:00
Nicolas Mowen
20705a3e97
Update oneVPL (#22966) 2026-04-22 08:50:37 -06:00
Josh Hawkins
f4ac063b37
Add camera wizard improvements (#22963)
* warn in camera wizard when detect stream resolution cannot be determined

* add timeout and tcp fallback for rtsp urls only
2026-04-22 08:15:17 -05:00
Abhilash Kishore
2dcaeb6809
fix: bump OpenVINO to 2025.4.x to resolve LXC container detector crash (#22859)
* fix: bump OpenVINO to 2025.4.x to resolve LXC container crash

* fix: replace openvino + onnxruntime with onnxruntime-openvino 1.24.*

onnxruntime-openvino 1.24.* bundles OpenVINO 2025.4.1, which fixes a
crash in constrained CPU environments (e.g. Proxmox LXC) where
lin_system_conf.cpp calls stoi("") on empty strings read from offline
CPU sysfs entries.

Consolidating to onnxruntime-openvino also ensures the OpenVINO runtime
and ONNX Runtime OpenVINO EP are always compatible versions.

* revert: restore onnxruntime, keep openvino bump

Reverting onnxruntime-openvino consolidation - onnxruntime is used with
multiple execution providers (CUDA, TensorRT, MIGraphX, CPU) and cannot
be replaced wholesale with the openvino-specific wheel.
2026-04-22 07:12:14 -06:00
Josh Hawkins
962d36323b
Improve frontend e2e tests (#22958)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* add mock data

* add helpers

* page objects

* updated specs

* remove PENDING_REWARITE

* formatting
2026-04-21 16:32:18 -06:00
Josh Hawkins
3b81416299
Update Radix deps (#22957)
Some checks are pending
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
* Bump radix-ui packages to align react-dismissable-layer version and fix nested overlay pointer-events bug

* remove workarounds for radix pointer events issues on dropdown and context menus

* remove disablePortal from popover

* remove modal on popovers

* remove workarounds in restart dialog

* keep onCloseAutoFocus for face, classification, and ptz

these are necessary to prevent tooltips from re-showing and from the arrow keys from reopening the ptz presets menu

* add tests
2026-04-21 08:48:48 -06:00
Eduardo-Jaramillo
7d315c5e6b
remove temporary DB cleanup check (#22950)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-21 05:11:01 -06:00
Nicolas Mowen
6cdf4fe3b8
Update intel runtimes to support Battlemage (#22943)
Some checks are pending
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-20 08:23:15 -06:00
Josh Hawkins
1a5d15ba81
Miscellaneous fixes (#22924)
* apply annotation offset to frigate+ submission frame time

* fix broken docs links with hash fragments that resolve wrong on reload

* undo

* use recording snapshot for frigate+ frame submission from VideoControls

rather than a canvas grab/paint, which may not always align with an ffmpeg snapshot due to keyframes

* add more docs links

- display docs link for main sections on collapsible fields

* dialog button consistency
2026-04-20 07:19:09 -06:00
icidi
043c746a8b
Improve readability by removing trailing digits caused by floating number conversion (#22934) 2026-04-20 06:35:48 -06:00
Otto
423ee2fe72
Feature: Share Timestamped URL for Camera Footage History (#22537)
* Initial copy timestamp url implementation

* revise url format

* Implement share timestamp dialog

* Use translations

* Add comments

* Add validations to shared link

* Switch to searchEffect implementation

* Add missing accessibility related dialog description

* Change URL format to unix timestamps

* Remove unnecessary useEffect

* Remove duplicated dialog title

* Fixes/improvements based off PR review comments

* Add missing cancel button & separators to dialog

* Make share description clearer

* Bugfix: guard against showing toasts twice
Because this effect ends up running multiple times

* Clamp future timestamps to now

* Revert "Bugfix: guard against showing toasts twice"

This reverts commit 99fa5e1dee.

* Use normal separator

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>

* Fixes based off PR review comments

* Bugfix: Share dialog was not receiving the player timestamp after removing key that triggered remounts

* Defer `setRecording` and return true from hook for cleanup

* Remove timeout defer hack in favor of refactored hook

* Attempt to replay video muted on NotAllowedError

* Use separate persistent mute and temporary forced mute states

* Align cancel button with other dialogs

* Prevent wrapping on dialog title

* Remove extra "back" button on mobile drawer

* Fix back navigation when coming from direct shared timestamp links

* Use new timeformat hook

* Simplify dialog radio buttons

* Apply suggestions from code review

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>

---------

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>
2026-04-20 06:35:25 -06:00
Arun Rajiah
d7f42735fc
docs: add cctvQL to third-party extensions (#22930) 2026-04-19 07:05:43 -06:00
Josh Hawkins
cfb87f9744
Miscellaneous fixes (#22913)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled
* add log when probing detect stream on startup

when users don't explicitly set detect.width and detect.height, we probe for them. sometimes the probe hangs (camera doesn't support UDP, like some Reolinks), so this log message will make that clearer

* add faq about probing detect stream

* fix stuck activity ring when tracked object transitions to stationary

* drop cache segments past retain cutoff regardless of retention mode

* add maintainer test
2026-04-18 07:10:50 -06:00
Josh Hawkins
74fcd720d3
Add step + percent progress for exports (#22915)
Some checks are pending
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* backend

* improve frontend Job typing

* progress frontend

* i18n

* tests
2026-04-17 12:18:12 -06:00
Josh Hawkins
a94d1b5d9e
Miscellaneous Fixes (#22890)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled
* only link to profile settings in status bar for admin users

* use hasFullCameraAccess for group filtering

* add custom export args to record docs

* update recordings docs

* prevent review WS handler from poisoning SWR cache before initial fetch completes
2026-04-16 09:10:03 -06:00
Nicolas Mowen
d830d47c9b
fix mypy (#22889)
Some checks are pending
CI / ARM Extra Build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-15 10:02:41 -05:00
Nicolas Mowen
3e85b18ee3
Use Frigate amd64 cache image for devcontainer build (#22888)
* Cache intel driver build

* Use Frigate's cached build
2026-04-15 08:37:23 -06:00
Nicolas Mowen
2ffe47511a
Fix manual event getting caught by global motion config (#22887) 2026-04-15 08:32:26 -06:00
Josh Hawkins
82e14d71fb
Fix review page spinner not clearing when review item ends (#22886)
* fix review page spinner not clearing when review item ends

* use last ended review item ID instead of counter

* use separate displayItems memo to overlay end_time updates without re-filtering reviewed items
2026-04-15 08:24:36 -06:00
Nicolas Mowen
15ac76f20d
Manually build intel driver (#22881)
* build intel driver

* Update docs
2026-04-15 07:07:27 -05:00
Josh Hawkins
a47be12ac5
Add deferred real-time processor for enrichments (#22880)
Some checks are pending
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
* implement deferred real-time processor with background task handling

* add tests

* fix typing
2026-04-14 21:39:44 -06:00
K3A
4232792248
docs: mention /dev/kfd for ROCm and clarify /dev/accel docker mounts in installation.md (#22879)
Some checks are pending
CI / Assemble and push default build (push) Blocked by required conditions
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
AMD needs /dev/kfd for ROCm.

And /dev/accel is used for NPUs in AMD APUs too (enabled via CONFIG_DRM_ACCEL_AMDXDNA kernel option).
2026-04-14 16:26:19 -06:00
Josh Hawkins
48abac9b45
fix i18n extractor not recognizing shorthand plural count in MultiExportDialog (#22877) 2026-04-14 13:51:39 -05:00
Josh Hawkins
e7e6f87682
Export improvements (#22867)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* backend

* frontend + i18n

* tests + api spec

* tweak backend to use Job infrastructure for exports

* frontend tweaks and Job infrastructure

* tests

* tweaks

- add ability to remove from case
- change location of counts in case card

* add stale export reaper on startup

* fix toaster close button color

* improve add dialog

* formatting

* hide max_concurrent from camera config export settings

* remove border

* refactor batch endpoint for multiple review items

* frontend

* tests and fastapi spec

* fix deletion of in-progress exports in a case

* tweaks

- hide cases when filtering cameras that have no exports from those cameras
- remove description from case card
- use textarea instead of input for case description in add new case dialog

* add auth exceptions for exports

* add e2e test for deleting cases with exports

* refactor delete and case endpoints

allow bulk deleting and reassigning

* frontend

- bulk selection like Review
- gate admin-only actions
- consolidate dialogs
- spacing/padding tweaks

* i18n and tests

* update openapi spec

* tweaks

- add None to case selection list
- allow new case creation from single cam export dialog

* fix codeql

* fix i18n

* remove unused

* fix frontend tests
2026-04-14 08:19:50 -06:00
Josh Hawkins
18c068a3f9
Add network requirements docs (#22874)
* Add network requirements docs

* shorten title

* add note about network requirements in each section
2026-04-14 08:03:34 -06:00
Josh Hawkins
335229d0d4
Miscellaneous fixes (#22828)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled
* fix video playback stutter when GenAI dialog is open in detail stream

Inline `onOpen` callback in DetailStream.tsx:522 creates a new function identity every render. GenAISummaryChip.tsx:98's useEffect depends on [open, onOpen], so it re-fires on every parent re-render while the dialog is open. Each fire calls onSeek -> setCurrentTime -> seekToTimestamp, creating a continuous re-render + seek loop

* add /profiles to EXEMPT_PATHS for non-admin users

* skip debug_replay/status poll for non-admin users

* use subquery for timeline lookup to avoid SQLite variable limit
2026-04-09 20:53:17 -06:00
Josh Hawkins
d113be5e19
Improve frontend test framework (#22824)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* add error allowlist file for error collector

* add error collector for console + page + request errors

* wire error collector into frigateApp fixture

* add self-tests for error collector fixture

* gate strict error mode on E2E_STRICT_ERRORS=1

* triage pre-existing errors and seed allowlist

* add mockEmpty/mockError/mockDelay helpers for state-driven tests

* add self-tests for mock override helpers

* add mobile affordance helpers to BasePage

* add lint script for banned spec patterns and @mobile rule

* apply prettier fixes to new e2e files

* rewrite export.spec.ts

* clean up

* move export spec rewrite and bugfix to separate branch
2026-04-09 14:42:36 -06:00
Josh Hawkins
98c2fe00c1
Chat improvements (#22823)
* Add score fusion helpers for find_similar_objects chat tool

* Add candidate query builder for find_similar_objects chat tool

* register find_similar_objects chat tool definition

* implement _execute_find_similar_objects chat tool dispatcher

* Dispatch find_similar_objects in chat tool executor

* Teach chat system prompt when to use find_similar_objects

* Add i18n strings for find_similar_objects chat tool

* Add frontend extractor for find_similar_objects tool response

* Render anchor badge and similarity scores in chat results

* formatting

* filter similarity results in python, not sqlite-vec

* extract pure chat helpers to chat_util module

* Teach chat system prompt about attached_event marker

* Add parseAttachedEvent and prependAttachment helpers

* Add i18n strings for chat event attachments

* Add ChatAttachmentChip component

* Make chat thumbnails attach to composer on click

* Render attachment chip in user chat bubbles

* Add ChatQuickReplies pill row component

* Add ChatPaperclipButton with event picker popover

* Wire event attachments into chat composer and messages

* add ability to stop streaming

* tweak cursor to appear at the end of the same line of the streaming response

* use abort signal

* add tooltip

* display label and camera on attachment chip
2026-04-09 14:31:37 -06:00
Josh Hawkins
556d5d8c9d
remove dead code and repair utf-8 preset names via latin-1 round trip (#22818)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled
2026-04-08 16:20:58 -05:00
Josh Hawkins
8f13932c64
UI fixes (#22814)
* display area as proper percentage in debug view

* match replay objects list with debug view

* motion search fixes

- tweak progress bar to exclude heatmap and inactive segments
- show metrics immediately on search start
- fix preview frame loading race
- fix polygon missing after dialog remount
- don't try to drag the image when dragging vertex of polygon

* add activity indicator to storage metrics

* make sub label query for events API endpoints case insensitive
2026-04-08 08:21:48 -06:00
Josh Hawkins
5d2a725428
Display more scores in Tracking Details (#22799)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* add computed and top score to timeline entries

* frontend

* docs
2026-04-07 10:06:22 -06:00
Josh Hawkins
dfe365cd28
Miscellaneous fixes (#22780)
* fix mobile export crash by removing stale iOS non-modal drawer workaround

* Remove titlecase to avoid Gemma4 handling plain labels as proper nouns

* Improve titling:

* Make directions more clear

* Properly capitalize delivery services

* update dispatcher config reference on save

* subscribe to review topic so ReviewDescriptionProcessor knows genai is enabled

* auto-send ON genai review WS message when enabled_in_config transitions to true

* remove unused object level

* update docs to clarify pre/post capture settings

* add ui docs links

* improve known_plates field in settings UI

* only show save all when multiple sections are changed

or if the section being changed is not currently being viewed

* fix docs

---------

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>
2026-04-07 07:16:19 -06:00
Josh Hawkins
49c3732726
Improve environment var handling (#22796)
* refactor env var handling

- use shared helper
- use left-to-right parser

* add tests

* formatting
2026-04-07 07:16:02 -06:00
dependabot[bot]
c3628a339d
Bump vite from 6.4.1 to 6.4.2 in /web (#22788)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.4.1 to 6.4.2.
- [Release notes](https://github.com/vitejs/vite/releases)
- [Changelog](https://github.com/vitejs/vite/blob/v6.4.2/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite/commits/v6.4.2/packages/vite)

---
updated-dependencies:
- dependency-name: vite
  dependency-version: 6.4.2
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-04-06 19:16:12 -05:00
Josh Hawkins
c750372586
Add frontend tests (#22783)
* basic e2e frontend test framework

* improve mock data generation and add test cases

* more cases

* add e2e tests to PR template

* don't generate mock data in PR CI

* satisfy codeql check

* fix flaky system page tab tests by guarding against crashes from incomplete mock stats

* reduce local test runs to 4 workers to match CI
2026-04-06 16:33:28 -06:00
Josh Hawkins
ed3bebc967
Miscellaneous fixes (#22779)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* block ffmpeg args in custom exports for non-admin users only

* prune expired reconnect timestamps periodically in watchdog loop

reconnect timestamps were only pruned when a new reconnect
occurred. This meant a single reconnect would persist in the count indefinitely instead of expiring after 1 hour

* formatting
2026-04-06 07:53:23 -06:00
Josh Hawkins
e95e9b52f3
GenAI tweak (#22773)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* refresh model dropdown after changing provider or base url

* decouple list_models from provider init

switching providers in the UI left an invalid model in the config, then _init_provider would fail and list_models would return an empty list, making it impossible to select a valid model
2026-04-05 11:00:13 -06:00
Josh Hawkins
67a1531da0
Tweaks (#22770)
* radix pointer events fix

* add debug log for no genai responses

* tweak profiles docs

* add weblate to maintainers list
2026-04-05 10:08:23 -06:00
Josh Hawkins
d8c35d5a0f
Miscellaneous fixes (#22762)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-04 21:32:26 -06:00
Hosted Weblate
9ba81d6dc8 Translated using Weblate (Cantonese (Traditional Han script))
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: beginner2047 <leoywng44@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/yue_Hant/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
2b8fd74ae8 Translated using Weblate (Norwegian Bokmål)
Currently translated at 100.0% (1071 of 1071 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (22 of 22 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (1068 of 1068 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (174 of 174 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (25 of 25 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (172 of 172 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 63.5% (652 of 1026 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 96.5% (56 of 58 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (138 of 138 strings)

Translated using Weblate (Norwegian Bokmål)

Currently translated at 100.0% (10 of 10 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: OverTheHillsAndFarAway <prosjektx@users.noreply.hosted.weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-configeditor/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/nb_NO/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/nb_NO/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-configeditor
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
c0ffb395f8 Translated using Weblate (Chinese (Simplified Han script))
Currently translated at 99.8% (1069 of 1071 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 99.9% (1067 of 1068 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (1065 of 1065 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (174 of 174 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (1049 of 1049 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 94.0% (963 of 1024 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 91.1% (925 of 1015 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (788 of 788 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 99.3% (783 of 788 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 98.9% (780 of 788 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (172 of 172 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 98.8% (779 of 788 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 98.8% (779 of 788 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 99.5% (465 of 467 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 91.2% (923 of 1011 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 91.2% (923 of 1011 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 99.3% (466 of 469 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 96.8% (1082 of 1117 strings)

Translated using Weblate (Chinese (Simplified Han script))

Currently translated at 100.0% (231 of 231 strings)

Co-authored-by: Anonymous <noreply@weblate.org>
Co-authored-by: GuoQing Liu <842607283@qq.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/zh_Hans/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/zh_Hans/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
416b69439c Translated using Weblate (Chinese (Traditional Han script))
Currently translated at 3.6% (17 of 469 strings)

Translated using Weblate (Chinese (Traditional Han script))

Currently translated at 1.0% (8 of 790 strings)

Translated using Weblate (Chinese (Traditional Han script))

Currently translated at 56.5% (69 of 122 strings)

Translated using Weblate (Chinese (Traditional Han script))

Currently translated at 56.5% (69 of 122 strings)

Co-authored-by: Anson <yeungyinho@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: windasd <me@windasd.tw>
Co-authored-by: 薛展 <n16418113@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/zh_Hant/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/zh_Hant/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/zh_Hant/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
8d7134319a Translated using Weblate (Slovenian)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: thehijacker <thehijacker@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/sl/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
23e7881b99 Translated using Weblate (Slovak)
Currently translated at 97.5% (119 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: OskarSidor <oskar.sidor@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/sk/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
b8796838d7 Translated using Weblate (Korean)
Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Korean)

Currently translated at 95.1% (117 of 123 strings)

Translated using Weblate (Korean)

Currently translated at 77.0% (134 of 174 strings)

Translated using Weblate (Korean)

Currently translated at 33.9% (170 of 501 strings)

Translated using Weblate (Korean)

Currently translated at 12.9% (138 of 1065 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: John <john@akfn.net>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/audio/ko/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ko/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/ko/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ko/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ko/
Translation: Frigate NVR/audio
Translation: Frigate NVR/common
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
89c258e18c Translated using Weblate (Serbian)
Currently translated at 40.9% (50 of 122 strings)

Co-authored-by: Aleksandar Jevremovic <aleksandar@jevremovic.org>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/sr/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
680cf146ef Translated using Weblate (Persian)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Anonymous <noreply@weblate.org>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/fa/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
7e2b6de1b3 Translated using Weblate (Swedish)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Kristian Johansson <knmjohansson@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/sv/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
e09a95921e Translated using Weblate (French)
Currently translated at 100.0% (2 of 2 strings)

Translated using Weblate (French)

Currently translated at 93.1% (54 of 58 strings)

Translated using Weblate (French)

Currently translated at 92.0% (23 of 25 strings)

Translated using Weblate (French)

Currently translated at 100.0% (22 of 22 strings)

Translated using Weblate (French)

Currently translated at 4.3% (34 of 790 strings)

Translated using Weblate (French)

Currently translated at 69.1% (728 of 1053 strings)

Translated using Weblate (French)

Currently translated at 98.2% (169 of 172 strings)

Translated using Weblate (French)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (French)

Currently translated at 96.0% (24 of 25 strings)

Translated using Weblate (French)

Currently translated at 97.0% (228 of 235 strings)

Translated using Weblate (French)

Currently translated at 94.5% (122 of 129 strings)

Translated using Weblate (French)

Currently translated at 70.5% (724 of 1026 strings)

Translated using Weblate (French)

Currently translated at 69.9% (718 of 1026 strings)

Translated using Weblate (French)

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (French)

Currently translated at 100.0% (22 of 22 strings)

Translated using Weblate (French)

Currently translated at 97.6% (168 of 172 strings)

Translated using Weblate (French)

Currently translated at 88.0% (22 of 25 strings)

Translated using Weblate (French)

Currently translated at 88.3% (152 of 172 strings)

Translated using Weblate (French)

Currently translated at 48.0% (12 of 25 strings)

Translated using Weblate (French)

Currently translated at 3.8% (30 of 788 strings)

Translated using Weblate (French)

Currently translated at 59.0% (13 of 22 strings)

Translated using Weblate (French)

Currently translated at 87.7% (151 of 172 strings)

Translated using Weblate (French)

Currently translated at 96.5% (227 of 235 strings)

Translated using Weblate (French)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (French)

Currently translated at 69.3% (43 of 62 strings)

Translated using Weblate (French)

Currently translated at 54.5% (12 of 22 strings)

Translated using Weblate (French)

Currently translated at 69.8% (715 of 1024 strings)

Translated using Weblate (French)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (French)

Currently translated at 3.6% (29 of 788 strings)

Translated using Weblate (French)

Currently translated at 40.0% (10 of 25 strings)

Translated using Weblate (French)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (French)

Currently translated at 2.5% (28 of 1111 strings)

Co-authored-by: Anonymous <noreply@weblate.org>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: N D <n.dubreuil@gmail.com>
Co-authored-by: Riton Du Boulon <henripl37@gmail.com>
Co-authored-by: alorente <gitmaster@passific.fr>
Co-authored-by: shdw <weblate@assez.biz>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-icons/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/fr/
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/common
Translation: Frigate NVR/components-icons
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
61d8943793 Translated using Weblate (Spanish)
Currently translated at 63.6% (666 of 1047 strings)

Translated using Weblate (Spanish)

Currently translated at 87.2% (150 of 172 strings)

Translated using Weblate (Spanish)

Currently translated at 95.6% (22 of 23 strings)

Translated using Weblate (Spanish)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Spanish)

Currently translated at 98.5% (140 of 142 strings)

Translated using Weblate (Spanish)

Currently translated at 96.8% (125 of 129 strings)

Translated using Weblate (Spanish)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Spanish)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Spanish)

Currently translated at 69.3% (43 of 62 strings)

Translated using Weblate (Spanish)

Currently translated at 6.0% (48 of 790 strings)

Translated using Weblate (Spanish)

Currently translated at 88.0% (22 of 25 strings)

Translated using Weblate (Spanish)

Currently translated at 9.8% (46 of 469 strings)

Translated using Weblate (Spanish)

Currently translated at 95.4% (21 of 22 strings)

Translated using Weblate (Spanish)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Francesc Domene <fdomenef@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: José María Díaz <jdiaz.bb@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/es/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/es/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
95f08db494 Translated using Weblate (Dutch)
Currently translated at 15.3% (72 of 469 strings)

Translated using Weblate (Dutch)

Currently translated at 91.2% (157 of 172 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (25 of 25 strings)

Translated using Weblate (Dutch)

Currently translated at 10.1% (80 of 788 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (22 of 22 strings)

Translated using Weblate (Dutch)

Currently translated at 99.1% (122 of 123 strings)

Translated using Weblate (Dutch)

Currently translated at 69.4% (713 of 1026 strings)

Translated using Weblate (Dutch)

Currently translated at 15.4% (72 of 467 strings)

Translated using Weblate (Dutch)

Currently translated at 8.6% (68 of 788 strings)

Translated using Weblate (Dutch)

Currently translated at 86.0% (148 of 172 strings)

Translated using Weblate (Dutch)

Currently translated at 52.0% (13 of 25 strings)

Translated using Weblate (Dutch)

Currently translated at 8.5% (67 of 788 strings)

Translated using Weblate (Dutch)

Currently translated at 86.3% (19 of 22 strings)

Translated using Weblate (Dutch)

Currently translated at 69.3% (43 of 62 strings)

Translated using Weblate (Dutch)

Currently translated at 81.8% (18 of 22 strings)

Translated using Weblate (Dutch)

Currently translated at 40.0% (10 of 25 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Dutch)

Currently translated at 8.2% (65 of 788 strings)

Translated using Weblate (Dutch)

Currently translated at 84.8% (146 of 172 strings)

Translated using Weblate (Dutch)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Dutch)

Currently translated at 69.7% (705 of 1011 strings)

Translated using Weblate (Dutch)

Currently translated at 94.3% (218 of 231 strings)

Translated using Weblate (Dutch)

Currently translated at 5.2% (59 of 1117 strings)

Co-authored-by: Anonymous <noreply@weblate.org>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Marijn <168113859+Marijn0@users.noreply.github.com>
Co-authored-by: Mark Holtkamp <markholtkamp85@gmail.com>
Co-authored-by: Paul Bröerken <broerken@me.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/nl/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/common
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
d5972e2ea7 Translated using Weblate (Indonesian)
Currently translated at 53.2% (65 of 122 strings)

Translated using Weblate (Indonesian)

Currently translated at 53.2% (65 of 122 strings)

Co-authored-by: Catto <sisharyadi@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: ariska <ariska@databisnis.id>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/id/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
3e502ee084 Translated using Weblate (Arabic)
Currently translated at 48.8% (84 of 172 strings)

Translated using Weblate (Arabic)

Currently translated at 4.0% (1 of 25 strings)

Translated using Weblate (Arabic)

Currently translated at 0.2% (1 of 469 strings)

Co-authored-by: Ammar Nafie <ymmar83@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: MAATECH <hmmdcool@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ar/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/ar/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ar/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
ecae4e36cd Translated using Weblate (Italian)
Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Italian)

Currently translated at 98.9% (98 of 99 strings)

Translated using Weblate (Italian)

Currently translated at 2.6% (21 of 790 strings)

Translated using Weblate (Italian)

Currently translated at 2.6% (21 of 790 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Italian)

Currently translated at 93.0% (120 of 129 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (25 of 25 strings)

Translated using Weblate (Italian)

Currently translated at 27.2% (6 of 22 strings)

Translated using Weblate (Italian)

Currently translated at 60.7% (651 of 1071 strings)

Translated using Weblate (Italian)

Currently translated at 2.7% (13 of 469 strings)

Translated using Weblate (Italian)

Currently translated at 1.2% (6 of 469 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Italian)

Currently translated at 0.3% (3 of 790 strings)

Translated using Weblate (Italian)

Currently translated at 76.0% (19 of 25 strings)

Translated using Weblate (Italian)

Currently translated at 98.9% (98 of 99 strings)

Translated using Weblate (Italian)

Currently translated at 27.2% (6 of 22 strings)

Translated using Weblate (Italian)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: AlessioCappe <alessio_cappe@hotmail.it>
Co-authored-by: Evion58 <enniocianciolo058@duck.com>
Co-authored-by: Gringo <ita.translations@tiscali.it>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/it/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
2026-04-04 08:08:35 -05:00
Hosted Weblate
985b834a51 Translated using Weblate (Polish)
Currently translated at 2.2% (18 of 790 strings)

Translated using Weblate (Polish)

Currently translated at 22.8% (107 of 469 strings)

Translated using Weblate (Polish)

Currently translated at 2.2% (18 of 790 strings)

Translated using Weblate (Polish)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Polish)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Mateusz Paś <piciuok@gmail.com>
Co-authored-by: Szymon Woźniak <swozniak6@gmail.com>
Co-authored-by: Wojciech Niziński <niziak-weblate@spox.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/pl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/pl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/pl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/pl/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
5098df10cc Translated using Weblate (Hebrew)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Ronen Atsil <atsil55@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/he/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
91af284677 Translated using Weblate (Hungarian)
Currently translated at 33.6% (41 of 122 strings)

Translated using Weblate (Hungarian)

Currently translated at 33.6% (41 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Zrinyi Patrik <patrikzrinyi404@gmail.com>
Co-authored-by: ugfus1630 <katona.ta@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/hu/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
1e2c8af3dc Translated using Weblate (Croatian)
Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Croatian)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Zoran Ivancevic <zolakt@gmail.com>
Co-authored-by: stipe-jurkovic <sjurko00@fesb.hr>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/hr/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
ba7725b9bf Translated using Weblate (Icelandic)
Currently translated at 1.3% (1 of 74 strings)

Translated using Weblate (Icelandic)

Currently translated at 16.6% (1 of 6 strings)

Translated using Weblate (Icelandic)

Currently translated at 1.7% (1 of 58 strings)

Translated using Weblate (Icelandic)

Currently translated at 10.0% (1 of 10 strings)

Translated using Weblate (Icelandic)

Currently translated at 50.0% (1 of 2 strings)

Translated using Weblate (Icelandic)

Currently translated at 50.0% (1 of 2 strings)

Translated using Weblate (Icelandic)

Currently translated at 10.0% (1 of 10 strings)

Translated using Weblate (Icelandic)

Currently translated at 1.6% (1 of 62 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Hosted Weblate user 145663 <marel@snorl.net>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-auth/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-filter/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-icons/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-input/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-configeditor/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/is/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-recording/is/
Translation: Frigate NVR/components-auth
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/components-filter
Translation: Frigate NVR/components-icons
Translation: Frigate NVR/components-input
Translation: Frigate NVR/views-configeditor
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-recording
2026-04-04 08:08:35 -05:00
Hosted Weblate
cf457f67bd Translated using Weblate (Vietnamese)
Currently translated at 31.9% (39 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: John Nguyen <thongnguyen.uit@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/vi/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
0fa6053839 Translated using Weblate (Portuguese)
Currently translated at 27.8% (34 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: fabiovalverde <fabio@rvalverde.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/pt/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
287ea3f4f0 Translated using Weblate (Czech)
Currently translated at 44.2% (54 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: lukascissa <lukas@cissa.cz>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/cs/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
a16c5731da Translated using Weblate (Catalan)
Currently translated at 100.0% (1071 of 1071 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1068 of 1068 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1065 of 1065 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1065 of 1065 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (174 of 174 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1053 of 1053 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1047 of 1047 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (172 of 172 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1026 of 1026 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (788 of 788 strings)

Translated using Weblate (Catalan)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (788 of 788 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1011 of 1011 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1011 of 1011 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1117 of 1117 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1005 of 1005 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (231 of 231 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1003 of 1003 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (1111 of 1111 strings)

Co-authored-by: Eduardo Pastor Fernández <123eduardoneko123@gmail.com>
Co-authored-by: Gerard Ricart Castells <gerard.ricart@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ca/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
aec7e7443a Translated using Weblate (Japanese)
Currently translated at 100.0% (6 of 6 strings)

Translated using Weblate (Japanese)

Currently translated at 52.1% (12 of 23 strings)

Translated using Weblate (Japanese)

Currently translated at 64.0% (16 of 25 strings)

Translated using Weblate (Japanese)

Currently translated at 83.1% (143 of 172 strings)

Translated using Weblate (Japanese)

Currently translated at 1.6% (13 of 788 strings)

Translated using Weblate (Japanese)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Japanese)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Japanese)

Currently translated at 60.0% (15 of 25 strings)

Translated using Weblate (Japanese)

Currently translated at 64.8% (656 of 1011 strings)

Translated using Weblate (Japanese)

Currently translated at 1.7% (8 of 469 strings)

Translated using Weblate (Japanese)

Currently translated at 18.1% (4 of 22 strings)

Translated using Weblate (Japanese)

Currently translated at 83.6% (143 of 171 strings)

Translated using Weblate (Japanese)

Currently translated at 1.0% (12 of 1117 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Yusuke, Hirota <hirota.yusuke@jp.fujitsu.com>
Co-authored-by: gon 360 <gon360@gmail.com>
Co-authored-by: sotohi <c6h4ohcooc6h5@gmail.com>
Co-authored-by: yhi264 <yhiraki@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-recording/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ja/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ja/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-recording
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
32a7adaa23 Translated using Weblate (Ukrainian)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Максим Горпиніч <gorpinicmaksim2005ukr@protonmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/uk/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
b3dce759d9 Translated using Weblate (Romanian)
Currently translated at 100.0% (1071 of 1071 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1068 of 1068 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1065 of 1065 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (174 of 174 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1053 of 1053 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1049 of 1049 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (172 of 172 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (Romanian)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (231 of 231 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (788 of 788 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1011 of 1011 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (1111 of 1111 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: lukasig <lukasig@hotmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ro/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
c65e6d84e9 Translated using Weblate (Russian)
Currently translated at 5.1% (41 of 790 strings)

Translated using Weblate (Russian)

Currently translated at 11.5% (54 of 469 strings)

Translated using Weblate (Russian)

Currently translated at 2.3% (11 of 469 strings)

Translated using Weblate (Russian)

Currently translated at 0.2% (2 of 790 strings)

Translated using Weblate (Russian)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Evgeny N. <isobago@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Артём Владимиров <artyomka71@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ru/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/ru/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/ru/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
41cfd23255 Translated using Weblate (Estonian)
Currently translated at 0.4% (2 of 467 strings)

Translated using Weblate (Estonian)

Currently translated at 86.7% (85 of 98 strings)

Translated using Weblate (Estonian)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (Estonian)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (Estonian)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (Estonian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Estonian)

Currently translated at 100.0% (231 of 231 strings)

Translated using Weblate (Estonian)

Currently translated at 19.0% (191 of 1005 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Priit Jõerüüt <jrthwlate@users.noreply.hosted.weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/et/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/et/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
2026-04-04 08:08:35 -05:00
Hosted Weblate
75e074fb4f Translated using Weblate (Danish)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Bjorn Jorgensen <github@bjornz.dk>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/da/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
56fbc4c185 Translated using Weblate (German)
Currently translated at 100.0% (1065 of 1065 strings)

Translated using Weblate (German)

Currently translated at 100.0% (174 of 174 strings)

Translated using Weblate (German)

Currently translated at 99.4% (173 of 174 strings)

Translated using Weblate (German)

Currently translated at 100.0% (1053 of 1053 strings)

Translated using Weblate (German)

Currently translated at 100.0% (1053 of 1053 strings)

Translated using Weblate (German)

Currently translated at 100.0% (469 of 469 strings)

Translated using Weblate (German)

Currently translated at 100.0% (99 of 99 strings)

Translated using Weblate (German)

Currently translated at 100.0% (123 of 123 strings)

Translated using Weblate (German)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (German)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (German)

Currently translated at 100.0% (790 of 790 strings)

Translated using Weblate (German)

Currently translated at 100.0% (172 of 172 strings)

Translated using Weblate (German)

Currently translated at 100.0% (129 of 129 strings)

Translated using Weblate (German)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (German)

Currently translated at 83.2% (656 of 788 strings)

Translated using Weblate (German)

Currently translated at 73.7% (757 of 1026 strings)

Translated using Weblate (German)

Currently translated at 100.0% (467 of 467 strings)

Translated using Weblate (German)

Currently translated at 99.4% (171 of 172 strings)

Translated using Weblate (German)

Currently translated at 100.0% (47 of 47 strings)

Translated using Weblate (German)

Currently translated at 100.0% (142 of 142 strings)

Translated using Weblate (German)

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (German)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (German)

Currently translated at 100.0% (235 of 235 strings)

Translated using Weblate (German)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (German)

Currently translated at 100.0% (62 of 62 strings)

Translated using Weblate (German)

Currently translated at 98.3% (120 of 122 strings)

Translated using Weblate (German)

Currently translated at 99.1% (122 of 123 strings)

Translated using Weblate (German)

Currently translated at 72.8% (737 of 1011 strings)

Translated using Weblate (German)

Currently translated at 70.2% (328 of 467 strings)

Translated using Weblate (German)

Currently translated at 100.0% (231 of 231 strings)

Translated using Weblate (German)

Currently translated at 58.8% (464 of 788 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Ninja110292 <ninja110292@users.noreply.hosted.weblate.org>
Co-authored-by: PhillyMay <mein.alias@outlook.com>
Co-authored-by: Sebastian Sie <sebastian.neuplanitz@googlemail.com>
Co-authored-by: jmtatsch <julian@tatsch.it>
Co-authored-by: mvdberge <micha.vordemberge@christmann.info>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/de/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/de/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
a6015631d4 Translated using Weblate (Portuguese (Brazil))
Currently translated at 93.1% (54 of 58 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 80.9% (115 of 142 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 67.7% (42 of 62 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 96.0% (24 of 25 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 44.1% (57 of 129 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 44.9% (461 of 1026 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 77.3% (133 of 172 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 4.6% (37 of 788 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 5.1% (24 of 467 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (22 of 22 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 45.9% (56 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Marcelo Popper Costa <marcelo_popper@hotmail.com>
Co-authored-by: Tiago Krüger <tiagokruger404@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-groups/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-events/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/pt_BR/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Groups
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-events
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-04-04 08:08:35 -05:00
Hosted Weblate
99076cfe51 Translated using Weblate (Lithuanian)
Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: MaBeniu <runnerm@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/lt/
Translation: Frigate NVR/views-classificationmodel
2026-04-04 08:08:35 -05:00
Hosted Weblate
75d654b6d5 Translated using Weblate (Turkish)
Currently translated at 18.1% (4 of 22 strings)

Translated using Weblate (Turkish)

Currently translated at 0.2% (2 of 788 strings)

Translated using Weblate (Turkish)

Currently translated at 93.1% (54 of 58 strings)

Translated using Weblate (Turkish)

Currently translated at 0.2% (1 of 467 strings)

Translated using Weblate (Turkish)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Turkish)

Currently translated at 98.3% (120 of 122 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Serhat Karaman <serhatkaramanworkmail@gmail.com>
Co-authored-by: m.a.tecik <matmakif38@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/tr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-global/tr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-validation/tr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/tr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/tr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/tr/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/Config - Global
Translation: Frigate NVR/Config - Validation
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
2026-04-04 08:08:35 -05:00
Josh Hawkins
e24eb676a9
GenAI tweaks (#22756)
* add DictAsYamlField for genai provider and runtime options

* regenerate config translations

* chat tweaks

- add page title
- scroll if near bottom
- add tool call group that dynamically updates as tool calls are made
- add bouncing loading indicator and other UI polish

* tool call grouping
2026-04-04 06:54:51 -06:00
Nicolas Mowen
9cb76d0bd9
Refactor genai (#22752)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* Switch to a feature-based roles so it is easier to choose models for different tasks

* Fallback and try llama-swap format

* List models supported by provider

* Cleanup

* Add frontend

* Improve model loading

* Make it possible to update genai without restarting

* Cleanup

* Cleanup

* Mypy
2026-04-03 17:13:52 -06:00
Josh Hawkins
bb77a01779
Settings tweaks (#22750)
* add ability to order subfields with dot notation

* put review genai enabled at the top of the genai subsection

* fix genai summary title truncation issue in detail stream
2026-04-03 17:46:43 -05:00
Nicolas Mowen
68dfb157ea
Auto llama.cpp context (#22737)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* Add model probing

* Include aliases

* Pull correctly

* Correctly query specific model props

* Debug log

* Update model list
2026-04-02 20:13:34 -05:00
Josh Hawkins
520d9eeb7f
remove direct go2rtc API proxy in favor of frigate API endpoints (#22735)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-02 11:15:27 -06:00
Josh Hawkins
5059311c9d
Mask/zone editor fixes (#22732)
* add guards to reject missing sub commands

* mask/zone bugfixes

- fix websocket crash when creating a new mask or zone before a name is assigned
- fix deleted masks and zones not disappearing from the list until navigating away
- fix deleting profile override not reverting to the base mask in the list
- fix inertia defaulting to nan

* disable save button on invalid form state

* fix validation for speed estimation

* ensure polygon is closed before allowing save

* require all masks and zones to be on the base config

* clarify dialog message and tooltip when removing an override

* clarify docs
2026-04-02 08:15:51 -06:00
438 changed files with 28025 additions and 2648 deletions

View File

@ -16,7 +16,7 @@ jobs:
uses: actions/github-script@v7
with:
script: |
const maintainers = ['blakeblackshear', 'NickM-27', 'hawkeye217', 'dependabot[bot]'];
const maintainers = ['blakeblackshear', 'NickM-27', 'hawkeye217', 'dependabot[bot]', 'weblate'];
const author = context.payload.pull_request.user.login;
if (maintainers.includes(author)) {

View File

@ -50,6 +50,37 @@ jobs:
# run: npm run test
# working-directory: ./web
web_e2e:
name: Web - E2E Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-node@v6
with:
node-version: 20.x
- run: npm install
working-directory: ./web
- name: Install Playwright Chromium
run: npx playwright install chromium --with-deps
working-directory: ./web
- name: Build web for E2E
run: npm run e2e:build
working-directory: ./web
- name: Run E2E tests
run: npm run e2e
working-directory: ./web
- name: Upload test artifacts
uses: actions/upload-artifact@v4
if: failure()
with:
name: playwright-report
path: |
web/test-results/
web/playwright-report/
retention-days: 7
python_checks:
runs-on: ubuntu-latest
name: Python Checks

View File

@ -14,6 +14,8 @@ services:
dockerfile: docker/main/Dockerfile
# Use target devcontainer-trt for TensorRT dev
target: devcontainer
cache_from:
- ghcr.io/blakeblackshear/frigate:cache-amd64
## Uncomment this block for nvidia gpu support
# deploy:
# resources:

View File

@ -52,6 +52,14 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
--mount=type=cache,target=/root/.ccache \
/deps/build_sqlite_vec.sh
# Build intel-media-driver from source against bookworm's system libva so it
# works with Debian 12's glibc/libstdc++ (pre-built noble/trixie packages
# require glibc 2.38 which is not available on bookworm).
FROM base AS intel-media-driver
ARG DEBIAN_FRONTEND
RUN --mount=type=bind,source=docker/main/build_intel_media_driver.sh,target=/deps/build_intel_media_driver.sh \
/deps/build_intel_media_driver.sh
FROM scratch AS go2rtc
ARG TARGETARCH
WORKDIR /rootfs/usr/local/go2rtc/bin
@ -200,6 +208,7 @@ RUN --mount=type=bind,source=docker/main/install_hailort.sh,target=/deps/install
FROM scratch AS deps-rootfs
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
COPY --from=sqlite-vec /usr/local/lib/ /usr/local/lib/
COPY --from=intel-media-driver /rootfs/ /
COPY --from=go2rtc /rootfs/ /
COPY --from=libusb-build /usr/local/lib /usr/local/lib
COPY --from=tempio /rootfs/ /

View File

@ -0,0 +1,48 @@
#!/bin/bash
set -euxo pipefail
# Intel media driver is x86_64-only. Create empty rootfs on other arches so
# the downstream COPY --from has a valid source.
if [ "$(uname -m)" != "x86_64" ]; then
mkdir -p /rootfs
exit 0
fi
MEDIA_DRIVER_VERSION="intel-media-25.2.6"
GMMLIB_VERSION="intel-gmmlib-22.7.2"
apt-get -qq update
apt-get -qq install -y wget gnupg ca-certificates cmake g++ make pkg-config
# Use Intel's jammy repo for newer libva-dev (2.22) which provides the
# VVC/VVC-decode headers required by media-driver 25.x
wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" > /etc/apt/sources.list.d/intel-gpu-jammy.list
apt-get -qq update
apt-get -qq install -y libva-dev
# Build gmmlib (required by media-driver)
wget -qO gmmlib.tar.gz "https://github.com/intel/gmmlib/archive/refs/tags/${GMMLIB_VERSION}.tar.gz"
mkdir /tmp/gmmlib
tar -xf gmmlib.tar.gz -C /tmp/gmmlib --strip-components 1
cmake -S /tmp/gmmlib -B /tmp/gmmlib/build -DCMAKE_BUILD_TYPE=Release
make -C /tmp/gmmlib/build -j"$(nproc)"
make -C /tmp/gmmlib/build install
# Build intel-media-driver
wget -qO media-driver.tar.gz "https://github.com/intel/media-driver/archive/refs/tags/${MEDIA_DRIVER_VERSION}.tar.gz"
mkdir /tmp/media-driver
tar -xf media-driver.tar.gz -C /tmp/media-driver --strip-components 1
cmake -S /tmp/media-driver -B /tmp/media-driver/build \
-DCMAKE_BUILD_TYPE=Release \
-DENABLE_KERNELS=ON \
-DENABLE_NONFREE_KERNELS=ON \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=/usr/lib/x86_64-linux-gnu \
-DCMAKE_C_FLAGS="-Wno-error" \
-DCMAKE_CXX_FLAGS="-Wno-error"
make -C /tmp/media-driver/build -j"$(nproc)"
# Install driver to rootfs for COPY --from
make -C /tmp/media-driver/build install DESTDIR=/rootfs

View File

@ -87,38 +87,47 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
# intel packages use zst compression so we need to update dpkg
apt-get install -y dpkg
# use intel apt intel packages
# use intel apt repo for libmfx1 (legacy QSV, pre-Gen12)
wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" | tee /etc/apt/sources.list.d/intel-gpu-jammy.list
apt-get -qq update
apt-get -qq install --no-install-recommends --no-install-suggests -y \
intel-media-va-driver-non-free libmfx1 libmfxgen1 libvpl2
# intel-media-va-driver-non-free is built from source in the
# intel-media-driver Dockerfile stage for Battlemage (Xe2) support
apt-get -qq install --no-install-recommends --no-install-suggests -y \
libmfx1
rm -f /usr/share/keyrings/intel-graphics.gpg
rm -f /etc/apt/sources.list.d/intel-gpu-jammy.list
# upgrade libva2, oneVPL runtime, and libvpl2 from trixie for Battlemage support
echo "deb http://deb.debian.org/debian trixie main" > /etc/apt/sources.list.d/trixie.list
apt-get -qq update
apt-get -qq install -y -t trixie libva2 libva-drm2 libzstd1
apt-get -qq install -y -t trixie libmfx-gen1.2 libvpl2
rm -f /etc/apt/sources.list.d/trixie.list
apt-get -qq update
apt-get -qq install -y ocl-icd-libopencl1
# install libtbb12 for NPU support
apt-get -qq install -y libtbb12
rm -f /usr/share/keyrings/intel-graphics.gpg
rm -f /etc/apt/sources.list.d/intel-gpu-jammy.list
# install legacy and standard intel icd and level-zero-gpu
# install legacy and standard intel compute packages
# see https://github.com/intel/compute-runtime/blob/master/LEGACY_PLATFORMS.md for more info
# needed core package
wget https://github.com/intel/compute-runtime/releases/download/25.13.33276.19/libigdgmm12_22.7.0_amd64.deb
dpkg -i libigdgmm12_22.7.0_amd64.deb
rm libigdgmm12_22.7.0_amd64.deb
wget https://github.com/intel/compute-runtime/releases/download/26.14.37833.4/libigdgmm12_22.9.0_amd64.deb
dpkg -i libigdgmm12_22.9.0_amd64.deb
rm libigdgmm12_22.9.0_amd64.deb
# legacy packages
# legacy compute-runtime packages
wget https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-opencl-icd-legacy1_24.35.30872.36_amd64.deb
wget https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-level-zero-gpu-legacy1_1.5.30872.36_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.24/intel-igc-opencl_1.0.17537.24_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.24/intel-igc-core_1.0.17537.24_amd64.deb
# standard packages
wget https://github.com/intel/compute-runtime/releases/download/25.13.33276.19/intel-opencl-icd_25.13.33276.19_amd64.deb
wget https://github.com/intel/compute-runtime/releases/download/25.13.33276.19/intel-level-zero-gpu_1.6.33276.19_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/v2.10.10/intel-igc-opencl-2_2.10.10+18926_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/v2.10.10/intel-igc-core-2_2.10.10+18926_amd64.deb
# standard compute-runtime packages
wget https://github.com/intel/compute-runtime/releases/download/26.14.37833.4/intel-opencl-icd_26.14.37833.4-0_amd64.deb
wget https://github.com/intel/compute-runtime/releases/download/26.14.37833.4/libze-intel-gpu1_26.14.37833.4-0_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/v2.32.7/intel-igc-opencl-2_2.32.7+21184_amd64.deb
wget https://github.com/intel/intel-graphics-compiler/releases/download/v2.32.7/intel-igc-core-2_2.32.7+21184_amd64.deb
# npu packages
wget https://github.com/oneapi-src/level-zero/releases/download/v1.28.2/level-zero_1.28.2+u22.04_amd64.deb
wget https://github.com/intel/linux-npu-driver/releases/download/v1.19.0/intel-driver-compiler-npu_1.19.0.20250707-16111289554_ubuntu22.04_amd64.deb
@ -128,6 +137,10 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
dpkg -i *.deb
rm *.deb
apt-get -qq install -f -y
# Battlemage uses the xe kernel driver, but the VA-API driver is still iHD.
# The oneVPL runtime may look for a driver named after the kernel module.
ln -sf /usr/lib/x86_64-linux-gnu/dri/iHD_drv_video.so /usr/lib/x86_64-linux-gnu/dri/xe_drv_video.so
fi
if [[ "${TARGETARCH}" == "arm64" ]]; then

View File

@ -11,7 +11,7 @@ joserfc == 1.2.*
cryptography == 44.0.*
pathvalidate == 3.3.*
markupsafe == 3.0.*
python-multipart == 0.0.20
python-multipart == 0.0.26
# Classification Model Training
tensorflow == 2.19.* ; platform_machine == 'aarch64'
tensorflow-cpu == 2.19.* ; platform_machine == 'x86_64'
@ -42,7 +42,7 @@ opencv-python-headless == 4.11.0.*
opencv-contrib-python == 4.11.0.*
scipy == 1.16.*
# OpenVino & ONNX
openvino == 2025.3.*
openvino == 2025.4.*
onnxruntime == 1.22.*
# Embeddings
transformers == 4.45.*

View File

@ -9,6 +9,7 @@ from typing import Any
from ruamel.yaml import YAML
sys.path.insert(0, "/opt/frigate")
from frigate.config.env import substitute_frigate_vars
from frigate.const import (
BIRDSEYE_PIPE,
DEFAULT_FFMPEG_VERSION,
@ -47,14 +48,6 @@ ALLOW_ARBITRARY_EXEC = allow_arbitrary_exec is not None and str(
allow_arbitrary_exec
).lower() in ("true", "1", "yes")
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
# read docker secret files as env vars too
if os.path.isdir("/run/secrets"):
for secret_file in os.listdir("/run/secrets"):
if secret_file.startswith("FRIGATE_"):
FRIGATE_ENV_VARS[secret_file] = (
Path(os.path.join("/run/secrets", secret_file)).read_text().strip()
)
config_file = find_config_file()
@ -103,13 +96,13 @@ if go2rtc_config["webrtc"].get("candidates") is None:
go2rtc_config["webrtc"]["candidates"] = default_candidates
if go2rtc_config.get("rtsp", {}).get("username") is not None:
go2rtc_config["rtsp"]["username"] = go2rtc_config["rtsp"]["username"].format(
**FRIGATE_ENV_VARS
go2rtc_config["rtsp"]["username"] = substitute_frigate_vars(
go2rtc_config["rtsp"]["username"]
)
if go2rtc_config.get("rtsp", {}).get("password") is not None:
go2rtc_config["rtsp"]["password"] = go2rtc_config["rtsp"]["password"].format(
**FRIGATE_ENV_VARS
go2rtc_config["rtsp"]["password"] = substitute_frigate_vars(
go2rtc_config["rtsp"]["password"]
)
# ensure ffmpeg path is set correctly
@ -145,7 +138,7 @@ for name in list(go2rtc_config.get("streams", {})):
if isinstance(stream, str):
try:
formatted_stream = stream.format(**FRIGATE_ENV_VARS)
formatted_stream = substitute_frigate_vars(stream)
if not ALLOW_ARBITRARY_EXEC and is_restricted_source(formatted_stream):
print(
f"[ERROR] Stream '{name}' uses a restricted source (echo/expr/exec) which is disabled by default for security. "
@ -164,7 +157,7 @@ for name in list(go2rtc_config.get("streams", {})):
filtered_streams = []
for i, stream_item in enumerate(stream):
try:
formatted_stream = stream_item.format(**FRIGATE_ENV_VARS)
formatted_stream = substitute_frigate_vars(stream_item)
if not ALLOW_ARBITRARY_EXEC and is_restricted_source(formatted_stream):
print(
f"[ERROR] Stream '{name}' item {i + 1} uses a restricted source (echo/expr/exec) which is disabled by default for security. "

View File

@ -227,16 +227,6 @@ http {
include proxy.conf;
}
# frontend uses this to fetch the version
location /api/go2rtc/api {
include auth_request.conf;
limit_except GET {
deny all;
}
proxy_pass http://go2rtc/api;
include proxy.conf;
}
# integration uses this to add webrtc candidate
location /api/go2rtc/webrtc {
include auth_request.conf;

View File

@ -119,6 +119,12 @@ audio:
Frigate supports fully local audio transcription using either `sherpa-onnx` or OpenAI's open-source Whisper models via `faster-whisper`. The goal of this feature is to support Semantic Search for `speech` audio events. Frigate is not intended to act as a continuous, fully-automatic speech transcription service — automatically transcribing all speech (or queuing many audio events for transcription) requires substantial CPU (or GPU) resources and is impractical on most systems. For this reason, transcriptions for events are initiated manually from the UI or the API rather than being run continuously in the background.
:::info
Audio transcription requires a one-time internet connection to download the Whisper or Sherpa-ONNX model on first use. Once cached, transcription runs fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
Transcription accuracy also depends heavily on the quality of your camera's microphone and recording conditions. Many cameras use inexpensive microphones, and distance to the speaker, low audio bitrate, or background noise can significantly reduce transcription quality. If you need higher accuracy, more robust long-running queues, or large-scale automatic transcription, consider using the HTTP API in combination with an automation platform and a cloud transcription service.
#### Configuration

View File

@ -9,6 +9,12 @@ import NavPath from "@site/src/components/NavPath";
Bird classification identifies known birds using a quantized Tensorflow model. When a known bird is recognized, its common name will be added as a `sub_label`. This information is included in the UI, filters, as well as in notifications.
:::info
Bird classification requires a one-time internet connection to download the classification model and label map from GitHub. Once cached, models work fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
## Minimum System Requirements
Bird classification runs a lightweight tflite model on the CPU, there are no significantly different system requirements than running Frigate itself.

View File

@ -9,6 +9,12 @@ import NavPath from "@site/src/components/NavPath";
Object classification allows you to train a custom MobileNetV2 classification model to run on tracked objects (persons, cars, animals, etc.) to identify a finer category or attribute for that object. Classification results are visible in the Tracked Object Details pane in Explore, through the `frigate/tracked_object_details` MQTT topic, in Home Assistant sensors via the official Frigate integration, or through the event endpoints in the HTTP API.
:::info
Training a custom object classification model requires a one-time internet connection to download MobileNetV2 base weights. Once trained, the model runs fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
## Minimum System Requirements
Object classification models are lightweight and run very fast on CPU.
@ -158,7 +164,7 @@ Enable debug logs for classification models by adding `frigate.data_processing.r
Navigate to <NavPath path="Settings > System > Logging" />.
- Set **Logging level** to `debug`
- Set **Per-process log level > Frigate.Data Processing.Real Time.Custom Classification** to `debug` for verbose classification logging
- Set **Per-process log level > `frigate.data_processing.real_time.custom_classification`** to `debug` for verbose classification logging
</TabItem>
<TabItem value="yaml">

View File

@ -9,6 +9,12 @@ import NavPath from "@site/src/components/NavPath";
State classification allows you to train a custom MobileNetV2 classification model on a fixed region of your camera frame(s) to determine a current state. The model can be configured to run on a schedule and/or when motion is detected in that region. Classification results are available through the `frigate/<camera_name>/classification/<model_name>` MQTT topic and in Home Assistant sensors via the official Frigate integration.
:::info
Training a custom state classification model requires a one-time internet connection to download MobileNetV2 base weights. Once trained, the model runs fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
## Minimum System Requirements
State classification models are lightweight and run very fast on CPU.

View File

@ -9,6 +9,12 @@ import NavPath from "@site/src/components/NavPath";
Face recognition identifies known individuals by matching detected faces with previously learned facial data. When a known `person` is recognized, their name will be added as a `sub_label`. This information is included in the UI, filters, as well as in notifications.
:::info
Face recognition requires a one-time internet connection to download detection and embedding models from GitHub. Once cached, models work fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
## Model Requirements
### Face Detection

View File

@ -29,11 +29,11 @@ You must use a vision-capable model with Frigate. The following models are recom
| Model | Notes |
| ------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `qwen3-vl` | Strong visual and situational understanding, strong ability to identify smaller objects and interactions with object. |
| `qwen3-vl` | Strong visual and situational understanding, enhanced ability to identify smaller objects and interactions with object. |
| `qwen3.5` | Strong situational understanding, but missing DeepStack from qwen3-vl leading to worse performance for identifying objects in people's hand and other small details. |
| `gemma4` | Strong situational understanding, sometimes resorts to more vague terms like 'interacts' instead of assigning a specific action. |
| `Intern3.5VL` | Relatively fast with good vision comprehension |
| `gemma3` | Slower model with good vision and temporal understanding |
| `qwen2.5-vl` | Fast but capable model with good vision comprehension |
:::info
@ -193,6 +193,12 @@ To use a different OpenAI-compatible API endpoint, set the `OPENAI_BASE_URL` env
Cloud providers run on remote infrastructure and require an API key for authentication. These services handle all model inference on their servers.
:::info
Cloud Generative AI providers require an active internet connection to send images and prompts for processing. Local providers like llama.cpp and Ollama (with local models) do not require internet. See [Network Requirements](/frigate/network_requirements#generative-ai) for details.
:::
### Ollama Cloud
Ollama also supports [cloud models](https://ollama.com/cloud), where your local Ollama instance handles requests from Frigate, but model inference is performed in the cloud. Set up Ollama locally, sign in with your Ollama account, and specify the cloud model name in your Frigate config. For more details, see the Ollama cloud model [docs](https://docs.ollama.com/cloud).

View File

@ -60,12 +60,13 @@ Frigate can utilize most Intel integrated GPUs and Arc GPUs to accelerate video
**Recommended hwaccel Preset**
| CPU Generation | Intel Driver | Recommended Preset | Notes |
| -------------- | ------------ | ------------------- | ------------------------------------------- |
| ------------------ | ------------ | ------------------- | ------------------------------------------- |
| gen1 - gen5 | i965 | preset-vaapi | qsv is not supported, may not support H.265 |
| gen6 - gen7 | iHD | preset-vaapi | qsv is not supported |
| gen8 - gen12 | iHD | preset-vaapi | preset-intel-qsv-\* can also be used |
| gen13+ | iHD / Xe | preset-intel-qsv-\* | |
| Intel Arc GPU | iHD / Xe | preset-intel-qsv-\* | |
| Intel Arc A-series | iHD / Xe | preset-intel-qsv-\* | |
| Intel Arc B-series | iHD / Xe | preset-intel-qsv-\* | Requires host kernel 6.12+ |
:::

View File

@ -11,6 +11,12 @@ Frigate can recognize license plates on vehicles and automatically add the detec
LPR works best when the license plate is clearly visible to the camera. For moving vehicles, Frigate continuously refines the recognition process, keeping the most confident result. When a vehicle becomes stationary, LPR continues to run for a short time after to attempt recognition.
:::info
License plate recognition requires a one-time internet connection to download OCR and detection models from GitHub. Once cached, models work fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
When a plate is recognized, the details are:
- Added as a `sub_label` (if [known](#matching)) or the `recognized_license_plate` field (if unknown) to a tracked object.

View File

@ -21,6 +21,12 @@ The jsmpeg live view will use more browser and client GPU resources. Using go2rt
| mse | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only. This is Frigate's default when go2rtc is configured. |
| webrtc | native | native | yes (depends on audio codec) | yes | Requires extra configuration. Frigate attempts to use WebRTC when MSE fails or when using a camera's two-way talk feature. |
:::info
WebRTC may use an external STUN server for NAT traversal. MSE and HLS streaming do not require any internet access. See [Network Requirements](/frigate/network_requirements#webrtc-stun) for details.
:::
### Camera Settings Recommendations
If you are using go2rtc, you should adjust the following settings in your camera's firmware for the best experience with Live view:

View File

@ -11,6 +11,12 @@ import NavPath from "@site/src/components/NavPath";
Frigate offers native notifications using the [WebPush Protocol](https://web.dev/articles/push-notifications-web-push-protocol) which uses the [VAPID spec](https://tools.ietf.org/html/draft-thomson-webpush-vapid) to deliver notifications to web apps using encryption.
:::info
Push notifications require internet access from the Frigate server to the browser vendor's push service (e.g., Google FCM, Mozilla autopush). See [Network Requirements](/frigate/network_requirements#push-notifications) for details.
:::
## Setting up Notifications
In order to use notifications the following requirements must be met:

View File

@ -288,6 +288,12 @@ This detector is available for use with both Hailo-8 and Hailo-8L AI Acceleratio
See the [installation docs](../frigate/installation.md#hailo-8) for information on configuring the Hailo hardware.
:::info
If no custom model is provided, the Hailo detector downloads a default model from the Hailo Model Zoo on first startup. Once cached, the model works fully offline. See [Network Requirements](/frigate/network_requirements#hardware-specific-detector-models) for details.
:::
### Configuration
When configuring the Hailo detector, you have two options to specify the model: a local **path** or a **URL**.
@ -1793,6 +1799,12 @@ Hardware accelerated object detection is supported on the following SoCs:
This implementation uses the [Rockchip's RKNN-Toolkit2](https://github.com/airockchip/rknn-toolkit2/), version v2.3.2.
:::info
If no custom model is provided, the RKNN detector downloads a default model from GitHub on first startup. Once cached, the model works fully offline. See [Network Requirements](/frigate/network_requirements#hardware-specific-detector-models) for details.
:::
:::tip
When using many cameras one detector may not be enough to keep up. Multiple detectors can be defined assuming NPU resources are available. An example configuration would be:
@ -2176,6 +2188,12 @@ This implementation uses the [AXera Pulsar2 Toolchain](https://huggingface.co/AX
See the [installation docs](../frigate/installation.md#axera) for information on configuring the AXEngine hardware.
:::info
The AXEngine detector downloads its default model from HuggingFace on first startup. Once cached, the model works fully offline. See [Network Requirements](/frigate/network_requirements#hardware-specific-detector-models) for details.
:::
### Configuration
When configuring the AXEngine detector, you have to specify the model name.

View File

@ -24,6 +24,12 @@ For object filters, any single detection below `min_score` will be ignored as a
In frame 2, the score is below the `min_score` value, so Frigate ignores it and it becomes a 0.0. The computed score is the median of the score history (padding to at least 3 values), and only when that computed score crosses the `threshold` is the object marked as a true positive. That happens in frame 4 in the example.
The **top score** is the highest computed score the tracked object has ever reached during its lifetime. Because the computed score rises and falls as new frames come in, the top score can be thought of as the peak confidence Frigate had in the object. In Frigate's UI (such as the Tracking Details pane in Explore), you may see all three values:
- **Score** — the raw detector score for that single frame.
- **Computed Score** — the median of the most recent score history at that moment. This is the value compared against `threshold`.
- **Top Score** — the highest computed score reached so far for the tracked object.
### Minimum Score
Any detection below `min_score` will be immediately thrown out and never tracked because it is considered a false positive. If `min_score` is too low then false positives may be detected and tracked which can confuse the object tracker and may lead to wasted resources. If `min_score` is too high then lower scoring true positives like objects that are further away or partially occluded may be thrown out which can also confuse the tracker and cause valid tracked objects to be lost or disjointed.

View File

@ -20,7 +20,7 @@ When a profile is activated, Frigate merges each camera's profile overrides on t
:::info
Profile changes are applied in-memory and take effect immediately — no restart is required. The active profile is persisted across Frigate restarts (stored in the `/config/.active_profile` file).
Profile changes are applied in-memory and take effect immediately — no restart is required. The active profile is persisted across Frigate restarts (stored in the `/config/.profiles` file).
:::
@ -120,7 +120,7 @@ The following camera configuration sections can be overridden in a profile:
:::note
Only the fields you explicitly set in a profile override are applied. All other fields retain their base configuration values. For zones, profile zones are merged with the camera's base zones — any zone defined in the profile will override or add to the base zones.
Only the fields you explicitly set in a profile override are applied. All other fields retain their base configuration values. For masks and zones, profile zones **override** the camera's base masks and zones. If configuring profiles via YAML, you should not define masks or zones in profiles that are not defined in the base config.
:::
@ -130,14 +130,14 @@ Profiles can be activated and deactivated from the Frigate UI. Open the Settings
## Example: Home / Away Setup
A common use case is having different detection and notification settings based on whether you are home or away.
A common use case is having different detection and notification settings based on whether you are home or away. This example below is for a system with two cameras, `front_door` and `indoor_cam`.
<ConfigTabs>
<TabItem value="ui">
1. Navigate to <NavPath path="Settings > Camera configuration > Profiles" /> and create two profiles: **Home** and **Away**.
2. For the **front_door** camera, configure the **Away** profile to enable notifications and set alert labels to `person` and `car`. Configure the **Home** profile to disable notifications.
3. For the **indoor_cam** camera, configure the **Away** profile to enable the camera, detection, and recording. Configure the **Home** profile to disable the camera entirely for privacy.
2. From to the Camera configuration section in Settings, choose the **front_door** camera, and select the **Away** profile from the profile dropdown. Then, enable notifications from the Notifications pane, and set alert labels to `person` and `car` from the Review pane. Then, from the profile dropdown choose **Home** profile, then navigate to Notifications to disable notifications.
3. For the **indoor_cam** camera, perform similar steps - configure the **Away** profile to enable the camera, detection, and recording. Configure the **Home** profile to disable the camera entirely for privacy.
4. Activate the desired profile from <NavPath path="Settings > Camera configuration > Profiles" /> or from the **Profiles** option in Frigate's main menu.
</TabItem>

View File

@ -123,6 +123,76 @@ record:
</TabItem>
</ConfigTabs>
## Pre-capture and Post-capture
The `pre_capture` and `post_capture` settings control how many seconds of video are included before and after an alert or detection. These can be configured independently for alerts and detections, and can be set globally or overridden per camera.
<ConfigTabs>
<TabItem value="ui">
Navigate to <NavPath path="Settings > Global configuration > Recording" /> for global defaults, or <NavPath path="Settings > Camera configuration > (select camera) > Recording" /> to override for a specific camera.
| Field | Description |
| ---------------------------------------------- | ---------------------------------------------------- |
| **Alert retention > Pre-capture seconds** | Seconds of video to include before an alert event |
| **Alert retention > Post-capture seconds** | Seconds of video to include after an alert event |
| **Detection retention > Pre-capture seconds** | Seconds of video to include before a detection event |
| **Detection retention > Post-capture seconds** | Seconds of video to include after a detection event |
</TabItem>
<TabItem value="yaml">
```yaml
record:
enabled: True
alerts:
pre_capture: 5 # seconds before the alert to include
post_capture: 5 # seconds after the alert to include
detections:
pre_capture: 5 # seconds before the detection to include
post_capture: 5 # seconds after the detection to include
```
</TabItem>
</ConfigTabs>
- **Default**: 5 seconds for both pre and post capture.
- **Pre-capture maximum**: 60 seconds.
- These settings apply per review category (alerts and detections), not per object type.
### How pre/post capture interacts with retention mode
The `pre_capture` and `post_capture` values define the **time window** around a review item, but only recording segments that also match the configured **retention mode** are actually kept on disk.
- **`mode: all`** — Retains every segment within the capture window, regardless of whether motion was detected.
- **`mode: motion`** (default) — Only retains segments within the capture window that contain motion. This includes segments with active tracked objects, since object motion implies motion. Segments without any motion are discarded even if they fall within the pre/post capture range.
- **`mode: active_objects`** — Only retains segments within the capture window where tracked objects were actively moving. Segments with general motion but no active objects are discarded.
This means that with the default `motion` mode, you may see less footage than the configured pre/post capture duration if parts of the capture window had no motion.
To guarantee the full pre/post capture duration is always retained:
```yaml
record:
enabled: True
alerts:
pre_capture: 10
post_capture: 10
retain:
days: 30
mode: all # retains all segments within the capture window
```
:::note
Because recording segments are written in 10 second chunks, pre-capture timing depends on segment boundaries. The actual pre-capture footage may be slightly shorter or longer than the exact configured value.
:::
### Where to view pre/post capture footage
Pre and post capture footage is included in the **recording timeline**, visible in the History view. Note that pre/post capture settings only affect which recording segments are **retained on disk** — they do not change the start and end points shown in the UI. The History view will still center on the review item's actual time range, but you can scrub backward and forward through the retained pre/post capture footage on the timeline. The Explore view shows object-specific clips that are trimmed to when the tracked object was actually visible, so pre/post capture time will not be reflected there.
## Will Frigate delete old recordings if my storage runs out?
As of Frigate 0.12 if there is less than an hour left of storage, the oldest 2 hours of recordings will be deleted.
@ -211,31 +281,52 @@ Using Frigate UI, Home Assistant, or MQTT, cameras can be automated to only reco
Footage can be exported from Frigate by right-clicking (desktop) or long pressing (mobile) on a review item in the Review pane or by clicking the Export button in the History view. Exported footage is then organized and searchable through the Export view, accessible from the main navigation bar.
### Time-lapse export
### Custom export with FFmpeg arguments
Time lapse exporting is available only via the [HTTP API](../integrations/api/export-recording-export-camera-name-start-start-time-end-end-time-post.api.mdx).
For advanced use cases, the [custom export HTTP API](../integrations/api/export-recording-custom-export-custom-camera-name-start-start-time-end-end-time-post.api.mdx) lets you pass custom FFmpeg arguments when exporting a recording:
When exporting a time-lapse the default speed-up is 25x with 30 FPS. This means that every 25 seconds of (real-time) recording is condensed into 1 second of time-lapse video (always without audio) with a smoothness of 30 FPS.
To configure the speed-up factor, the frame rate and further custom settings, use the `timelapse_args` parameter. The below configuration example would change the time-lapse speed to 60x (for fitting 1 hour of recording into 1 minute of time-lapse) with 25 FPS:
```yaml {3-4}
record:
enabled: True
export:
timelapse_args: "-vf setpts=PTS/60 -r 25"
```
POST /export/custom/{camera_name}/start/{start_time}/end/{end_time}
```
:::tip
The request body accepts `ffmpeg_input_args` and `ffmpeg_output_args` to control encoding, frame rate, filters, and other FFmpeg options. If neither is provided, Frigate defaults to time-lapse output settings (25x speed, 30 FPS).
When using `hwaccel_args`, hardware encoding is used for timelapse generation. This setting can be overridden for a specific camera (e.g., when camera resolution exceeds hardware encoder limits); set the camera-level export hwaccel_args with the appropriate settings. Using an unrecognized value or empty string will fall back to software encoding (libx264).
The following example exports a time-lapse at 60x speed with 25 FPS:
```json
{
"name": "Front Door Time-lapse",
"ffmpeg_output_args": "-vf setpts=PTS/60 -r 25"
}
```
#### CPU fallback
If hardware acceleration is configured and the export fails (e.g., the GPU is unavailable), set `cpu_fallback: true` in the request body to automatically retry using software encoding.
```json
{
"name": "My Export",
"ffmpeg_output_args": "-c:v libx264 -crf 23",
"cpu_fallback": true
}
```
:::note
Non-admin users are restricted from using FFmpeg arguments that can access the filesystem (e.g., `-filter_complex`, file paths, and protocol references). Admin users have full control over FFmpeg arguments.
:::
:::tip
The encoder determines its own behavior so the resulting file size may be undesirably large.
To reduce the output file size the ffmpeg parameter `-qp n` can be utilized (where `n` stands for the value of the quantisation parameter). The value can be adjusted to get an acceptable tradeoff between quality and file size for the given scenario.
When `hwaccel_args` is configured, hardware encoding is used for exports. This can be overridden per camera (e.g., when camera resolution exceeds hardware encoder limits) by setting a camera-level `hwaccel_args`. Using an unrecognized value or empty string falls back to software encoding (libx264).
:::
:::tip
To reduce output file size, add the FFmpeg parameter `-qp n` to `ffmpeg_output_args` (where `n` is the quantization parameter). Adjust the value to balance quality and file size for your scenario.
:::

View File

@ -13,6 +13,12 @@ Frigate uses models from [Jina AI](https://huggingface.co/jinaai) to create and
Semantic Search is accessed via the _Explore_ view in the Frigate UI.
:::info
Semantic search requires a one-time internet connection to download embedding models from HuggingFace. Once cached, models work fully offline. See [Network Requirements](/frigate/network_requirements#one-time-model-downloads) for details.
:::
## Minimum System Requirements
Semantic Search works by running a large AI model locally on your system. Small or underpowered systems like a Raspberry Pi will not run Semantic Search reliably or at all.

View File

@ -146,17 +146,11 @@ A single Coral can handle many cameras using the default model and will be suffi
The OpenVINO detector type is able to run on:
- 6th Gen Intel Platforms and newer that have an iGPU
- x86 hosts with an Intel Arc GPU
- x86 hosts with an Intel Arc GPU (including Arc A-series and B-series Battlemage)
- Intel NPUs
- Most modern AMD CPUs (though this is officially not supported by Intel)
- x86 & Arm64 hosts via CPU (generally not recommended)
:::note
Intel B-series (Battlemage) GPUs are not officially supported with Frigate 0.17, though a user has [provided steps to rebuild the Frigate container](https://github.com/blakeblackshear/frigate/discussions/21257) with support for them.
:::
More information is available [in the detector docs](/configuration/object_detectors#openvino-detector)
Inference speeds vary greatly depending on the CPU or GPU used, some known examples of GPU inference times are below:

View File

@ -482,7 +482,8 @@ services:
- /dev/apex_0:/dev/apex_0 # Passes a PCIe Coral, follow driver instructions here https://github.com/jnicolson/gasket-builder
- /dev/video11:/dev/video11 # For Raspberry Pi 4B
- /dev/dri/renderD128:/dev/dri/renderD128 # AMD / Intel GPU, needs to be updated for your hardware
- /dev/accel:/dev/accel # Intel NPU
- /dev/kfd:/dev/kfd # AMD Kernel Fusion Driver for ROCm
- /dev/accel:/dev/accel # AMD / Intel NPU
volumes:
- /etc/localtime:/etc/localtime:ro
- /path/to/your/config:/config

View File

@ -0,0 +1,155 @@
---
id: network_requirements
title: Network Requirements
---
# Network Requirements
Frigate is designed to run locally and does not require a persistent internet connection for core functionality. However, certain features need internet access for initial setup or ongoing operation. This page describes what connects to the internet, when, and how to control it.
## How Frigate Uses the Internet
Frigate's internet usage falls into three categories:
1. **One-time model downloads** — ML models are downloaded the first time a feature is enabled, then cached locally. No internet is needed on subsequent startups.
2. **Optional cloud services** — Features like Frigate+ and Generative AI connect to external APIs only when explicitly configured.
3. **Build-time dependencies** — Components bundled into the Docker image during the build process. These require no internet at runtime.
:::tip
After initial setup, Frigate can run fully offline as long as all required models have been downloaded and no cloud-dependent features are enabled.
:::
## One-Time Model Downloads
The following models are downloaded automatically the first time their associated feature is enabled. Once cached in `/config/model_cache/`, they do not require internet again.
| Feature | Models Downloaded | Source |
| --------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------- | -------------------- |
| [Semantic search](/configuration/semantic_search) | Jina CLIP v1 or v2 (ONNX) + tokenizer | HuggingFace |
| [Face recognition](/configuration/face_recognition) | FaceNet, ArcFace, face detection model | GitHub |
| [License plate recognition](/configuration/license_plate_recognition) | PaddleOCR (detection, classification, recognition) + YOLOv9 plate detector | GitHub |
| [Bird classification](/configuration/bird_classification) | MobileNetV2 bird model + label map | GitHub |
| [Custom classification](/configuration/custom_classification/state_classification) (training) | MobileNetV2 ImageNet base weights (via Keras) | Google storage |
| [Audio transcription](/configuration/advanced) | Whisper or Sherpa-ONNX streaming model | HuggingFace / OpenAI |
### Hardware-Specific Detector Models
If you are using one of the following hardware detectors and have not provided your own model file, a default model will be downloaded on first startup:
| Detector | Model Downloaded | Source |
| ------------------------------------------------------------------ | -------------------- | ------------------------ |
| [Rockchip RKNN](/configuration/object_detectors#rockchip-platform) | RKNN detection model | GitHub |
| [Hailo 8 / 8L](/configuration/object_detectors#hailo-8) | YOLOv6n (.hef) | Hailo Model Zoo (AWS S3) |
| [AXERA AXEngine](/configuration/object_detectors) | Detection model | HuggingFace |
:::note
The default CPU, EdgeTPU, and OpenVINO object detection models are bundled into the Docker image and do not require any download at runtime.
:::
### Preventing Model Downloads
If you have already downloaded all required models and want to prevent Frigate from attempting any outbound connections to HuggingFace or the Transformers library, set the following environment variables on your Frigate container:
```yaml
environment:
HF_HUB_OFFLINE: "1"
TRANSFORMERS_OFFLINE: "1"
```
:::warning
Setting these variables without having the correct model files already cached in `/config/model_cache/` will cause failures. Only use these after a successful initial setup with internet access.
:::
### Mirror Support
If your Frigate instance has restricted internet access, you can point model downloads at internal mirrors using environment variables:
| Environment Variable | Default | Used By |
| ----------------------------------- | ----------------------------------- | --------------------------------------------- |
| `HF_ENDPOINT` | `https://huggingface.co` | Semantic search, Sherpa-ONNX, AXEngine models |
| `GITHUB_ENDPOINT` | `https://github.com` | Face recognition, LPR, RKNN models |
| `GITHUB_RAW_ENDPOINT` | `https://raw.githubusercontent.com` | Bird classification |
| `TF_KERAS_MOBILENET_V2_WEIGHTS_URL` | Google storage (Keras default) | Custom classification training |
## Optional Cloud Services
These features connect to external services during normal operation and require internet whenever they are active.
### Frigate+
When a Frigate+ API key is configured, Frigate communicates with `https://api.frigate.video` to download models, upload snapshots for training, submit annotations, and report false positives. Remove the API key to disable all Frigate+ network activity.
See [Frigate+](/integrations/plus) for details.
### Generative AI
When a Generative AI provider is configured, Frigate sends images and prompts to the configured provider for event descriptions, chat, and camera monitoring. Available providers:
| Provider | Internet Required |
| ------------- | ---------------------------------------------------------------- |
| OpenAI | Yes — connects to OpenAI API (or custom base URL) |
| Google Gemini | Yes — connects to Google Generative AI API |
| Azure OpenAI | Yes — connects to your Azure endpoint |
| Ollama | Depends — typically local (`localhost:11434`), but can be remote |
| llama.cpp | No — runs entirely locally |
Disable Generative AI by removing the `genai` configuration from your cameras. See [Generative AI](/configuration/genai/genai_config) for details.
### Version Check
Frigate checks GitHub for the latest release version on startup by querying `https://api.github.com`. This can be disabled:
```yaml
telemetry:
version_check: false
```
### Push Notifications
When [notifications](/configuration/notifications) are enabled and users have registered for push notifications in the web UI, Frigate sends push messages through the browser vendor's push service (e.g., Google FCM, Mozilla autopush). This requires internet access from the Frigate server to these push endpoints.
### MQTT
If an [MQTT broker](/integrations/mqtt) is configured, Frigate maintains a connection to the broker's host and port. This is typically a local network connection, but will require internet if you use a cloud-hosted MQTT broker.
### DeepStack / CodeProject.AI
When using the [DeepStack detector plugin](/configuration/object_detectors), Frigate sends images to the configured API endpoint for inference. This is typically local but depends on where the service is hosted.
## WebRTC (STUN)
For [WebRTC live streaming](/configuration/live), Frigate uses STUN for NAT traversal:
- **go2rtc** defaults to a local STUN listener (`stun:8555`) — no internet required.
- **The web UI's WebRTC player** includes a fallback to Google's public STUN server (`stun:stun.l.google.com:19302`), which requires internet.
## Home Assistant Supervisor
When running as a Home Assistant add-on, the go2rtc startup script queries the local Supervisor API (`http://supervisor/`) to discover the host IP address and WebRTC port. This is a local network call to the Home Assistant host, not an internet connection.
## What Does NOT Require Internet
- **Object detection** — CPU, EdgeTPU, OpenVINO, and other bundled detector models are included in the Docker image.
- **Recording and playback** — All video is stored and served locally.
- **Live streaming** — Camera streams are pulled over your local network. MSE and HLS streaming work without any external connections.
- **The web interface** — Fully self-contained with no external fonts, scripts, analytics, or CDN dependencies. All translations are bundled locally.
- **Custom classification inference** — After training, custom models run entirely locally.
- **Audio detection** — The YAMNet audio classification model is bundled in the Docker image.
## Running Frigate Offline
To run Frigate in an air-gapped or offline environment:
1. **Pre-download models** — Start Frigate with internet access once with all desired features enabled. Models will be cached in `/config/model_cache/`.
2. **Disable version check** — Set `telemetry.version_check: false` in your configuration.
3. **Block outbound model requests** — Set the `HF_HUB_OFFLINE=1` and `TRANSFORMERS_OFFLINE=1` environment variables to prevent HuggingFace and Transformers from attempting any network requests.
4. **Avoid cloud features** — Do not configure Frigate+, Generative AI providers that require internet, or cloud MQTT brokers.
5. **Use local model mirrors** — If limited internet is available, set the `HF_ENDPOINT`, `GITHUB_ENDPOINT`, and `GITHUB_RAW_ENDPOINT` environment variables to point to local mirrors.
After these steps, Frigate will operate with no outbound internet connections.

View File

@ -5,6 +5,12 @@ title: MQTT
These are the MQTT messages generated by Frigate. The default topic_prefix is `frigate`, but can be changed in the config file.
:::info
MQTT requires a network connection to your broker. This is typically local, but will require internet if using a cloud-hosted MQTT broker. See [Network Requirements](/frigate/network_requirements#mqtt) for details.
:::
## General Frigate Topics
### `frigate/available`

View File

@ -5,6 +5,12 @@ title: Frigate+
For more information about how to use Frigate+ to improve your model, see the [Frigate+ docs](/plus/).
:::info
Frigate+ requires an active internet connection to communicate with `https://api.frigate.video` for model downloads, image uploads, and annotations. See [Network Requirements](/frigate/network_requirements#frigate) for details.
:::
## Setup
### Create an account

View File

@ -17,6 +17,10 @@ Please use your own knowledge to assess and vet them before you install anything
The [Advanced Camera Card](https://card.camera/#/README) is a Home Assistant dashboard card with deep Frigate integration.
## [cctvQL](https://github.com/arunrajiah/cctvql)
[cctvQL](https://github.com/arunrajiah/cctvql) is a natural language query layer for Frigate and other CCTV systems. It connects to Frigate's REST API and MQTT broker to let you ask conversational questions about cameras and events (e.g. "Was there motion at the front door last night?"), with support for real-time event streaming, anomaly detection, PTZ control, alert rules, and a Home Assistant custom component.
## [Double Take](https://github.com/skrashevich/double-take)
[Double Take](https://github.com/skrashevich/double-take) provides an unified UI and API for processing and training images for facial recognition.

View File

@ -110,3 +110,17 @@ No. Frigate uses the TCP protocol to connect to your camera's RTSP URL. VLC auto
TCP ensures that all data packets arrive in the correct order. This is crucial for video recording, decoding, and stream processing, which is why Frigate enforces a TCP connection. UDP is faster but less reliable, as it does not guarantee packet delivery or order, and VLC does not have the same requirements as Frigate.
You can still configure Frigate to use UDP by using ffmpeg input args or the preset `preset-rtsp-udp`. See the [ffmpeg presets](/configuration/ffmpeg_presets) documentation.
### Frigate is slow to start up with a "probing detect stream" message in the logs
When `detect.width` and `detect.height` are not set, Frigate probes each camera's detect stream on startup (and when saving the config) to auto-detect its resolution. For RTSP streams Frigate probes with ffprobe and automatically retries over TCP if UDP doesn't respond, with a 5 second timeout per attempt. A camera that cannot be reached over either transport will add up to ~10 seconds to startup before Frigate falls through with default dimensions, which may show up as width `0` and height `0` in Camera Probe Info under System Metrics.
To skip the probe entirely and make startup instant, set `detect.width` and `detect.height` explicitly in your camera config:
```yaml
cameras:
my_camera:
detect:
width: 1280
height: 720
```

View File

@ -80,3 +80,85 @@ Some users found that mounting a drive via `fstab` with the `sync` option caused
#### Copy Times < 1 second
If the storage is working quickly then this error may be caused by CPU load on the machine being too high for Frigate to have the resources to keep up. Try temporarily shutting down other services to see if the issue improves.
## I see the message: WARNING : Too many unprocessed recording segments in cache for camera. This likely indicates an issue with the detect stream...
This warning means that the detect stream for the affected camera has fallen behind or stopped processing frames. Frigate's recording cache holds segments waiting to be analyzed by the detector — when more than 6 segments pile up without being processed, Frigate discards the oldest ones to prevent the cache from filling up.
:::warning
This error is a **symptom**, not the root cause. The actual cause is always logged **before** these messages start appearing. You must review the full logs from Frigate startup through the first occurrence of this warning to identify the real issue.
:::
### Step 1: Get the full logs
Collect complete Frigate logs from startup through the first occurrence of the error. Look for errors or warnings that appear **before** the "Too many unprocessed" messages begin — that is where the root cause will be found.
### Step 2: Check the cache directory
Exec into the Frigate container and inspect the recording cache:
```
docker exec -it frigate ls -la /tmp/cache
```
Each camera should have a small number of `.mp4` segment files. If one camera has significantly more files than others, that camera is the source of the problem. A problem with a single camera can cascade and cause all cameras to show this error.
### Step 3: Verify segment duration
Recording segments should be approximately 10 seconds long. Run `ffprobe` on segments in the cache to check:
```
docker exec -it frigate ffprobe -v error -show_entries format=duration -of default=noprint_wrappers=1 /tmp/cache/<camera>@<segment>.mp4
```
If segments are only ~1 second instead of ~10 seconds, the camera is sending corrupt timestamp data, causing segments to be split too frequently and filling the cache 10x faster than expected.
**Common causes of short segments:**
- **"Smart Codec" or "Smart+" enabled on the camera** — These features dynamically change encoding parameters mid-stream, which corrupts timestamps. Disable them in your camera's settings.
- **Changing codec, bitrate, or resolution mid-stream** — Any encoding changes during an active stream can cause unpredictable segment splitting.
- **Camera firmware bugs** — Check for firmware updates from your camera manufacturer.
### Step 4: Check for a stuck detector
If the detect stream is not processing frames, segments will accumulate. Common causes:
- **Detection resolution too high** — Use a substream for detection, not the full resolution main stream.
- **Detection FPS too high** — 5 fps is the recommended maximum for detection.
- **Model too large** — Use smaller model variants (e.g., YOLO `s` or `t` size, not `e` or `x`). Use 320x320 input size rather than 640x640 unless you have a powerful dedicated detector.
- **Virtualization** — Running Frigate in a VM (especially Proxmox) can cause the detector to hang or stall. This is a known issue with GPU/TPU passthrough in virtualized environments and is not something Frigate can fix. Running Frigate in Docker on bare metal is recommended.
### Step 5: Check for GPU hangs
On the host machine, check `dmesg` for GPU-related errors:
```
dmesg | grep -i -E "gpu|drm|reset|hang"
```
Messages like `trying reset from guc_exec_queue_timedout_job` or similar GPU reset/hang messages indicate a driver or hardware issue. Ensure your kernel and GPU drivers (especially Intel) are up to date.
### Step 6: Verify hardware acceleration configuration
An incorrect `hwaccel_args` preset can cause ffmpeg to fail silently or consume excessive CPU, starving the detector of resources.
- After upgrading Frigate, verify your preset matches your hardware (e.g., `preset-intel-qsv-h264` instead of the deprecated `preset-vaapi`).
- For h265 cameras, use the corresponding h265 preset (e.g., `preset-intel-qsv-h265`).
- Note that `hwaccel_args` are only relevant for the detect stream — Frigate does not decode the record stream.
### Step 7: Verify go2rtc stream configuration
Ensure that the ffmpeg source names in your go2rtc configuration match the correct camera stream. A misconfigured stream name (e.g., copying a config from one camera to another without updating the stream reference) will cause the wrong stream to be used or the stream to fail entirely.
### Step 8: Check system resources
If none of the above apply, the issue may be a general resource constraint. Monitor the following on your host:
- **CPU usage** — An overloaded CPU can prevent the detector from keeping up.
- **RAM and swap** — Excessive swapping dramatically slows all I/O operations.
- **Disk I/O** — Use `iotop` or `iostat` to check for saturation.
- **Storage space** — Verify you have free space on the Frigate storage volume (check the Storage page in the Frigate UI).
Try temporarily disabling resource-intensive features like `genai` and `face_recognition` to see if the issue resolves. This can help isolate whether the detector is being starved of resources.

View File

@ -10897,9 +10897,9 @@
"license": "MIT"
},
"node_modules/express/node_modules/path-to-regexp": {
"version": "0.1.12",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"version": "0.1.13",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.13.tgz",
"integrity": "sha512-A/AGNMFN3c8bOlvV9RreMdrv7jsmF9XIfDeCd87+I8RNg6s78BhJxMu69NEMHBSJFxKidViTEdruRwEk/WIKqA==",
"license": "MIT"
},
"node_modules/express/node_modules/range-parser": {

View File

@ -12,6 +12,7 @@ const sidebars: SidebarsConfig = {
"frigate/updating",
"frigate/camera_setup",
"frigate/video_pipeline",
"frigate/network_requirements",
"frigate/glossary",
],
Guides: [

View File

@ -2724,6 +2724,135 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
/exports/batch:
post:
tags:
- Export
summary: Start recording export batch
description: >-
Starts recording exports for a batch of items, each with its own camera
and time range. Optionally assigns them to a new or existing export case.
When neither export_case_id nor new_case_name is provided, exports are
added as uncategorized. Attaching to an existing case is admin-only.
operationId: export_recordings_batch_exports_batch_post
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/BatchExportBody"
responses:
"202":
description: Successful Response
content:
application/json:
schema:
$ref: "#/components/schemas/BatchExportResponse"
"400":
description: Bad Request
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"403":
description: Forbidden
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"404":
description: Not Found
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"503":
description: Service Unavailable
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"422":
description: Validation Error
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
/exports/delete:
post:
tags:
- Export
summary: Bulk delete exports
description: >-
Deletes one or more exports by ID. All IDs must exist and none can be
in-progress. Admin-only.
operationId: bulk_delete_exports_exports_delete_post
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ExportBulkDeleteBody"
responses:
"200":
description: Successful Response
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"400":
description: Bad Request - one or more exports are in-progress
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"404":
description: Not Found - one or more export IDs do not exist
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"422":
description: Validation Error
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
/exports/reassign:
post:
tags:
- Export
summary: Bulk reassign exports to a case
description: >-
Assigns or unassigns one or more exports to/from a case. All IDs must
exist. Pass export_case_id as null to unassign (move to uncategorized).
Admin-only.
operationId: bulk_reassign_exports_exports_reassign_post
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ExportBulkReassignBody"
responses:
"200":
description: Successful Response
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"404":
description: Not Found - one or more export IDs or the target case do not exist
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"422":
description: Validation Error
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
/cases:
get:
tags:
@ -2853,39 +2982,6 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
"/export/{export_id}/case":
patch:
tags:
- Export
summary: Assign export to case
description: "Assigns an export to a case, or unassigns it if export_case_id is null."
operationId: assign_export_case_export__export_id__case_patch
parameters:
- name: export_id
in: path
required: true
schema:
type: string
title: Export Id
requestBody:
required: true
content:
application/json:
schema:
$ref: "#/components/schemas/ExportCaseAssignBody"
responses:
"200":
description: Successful Response
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"422":
description: Validation Error
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
"/export/{camera_name}/start/{start_time}/end/{end_time}":
post:
tags:
@ -2973,32 +3069,6 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
"/export/{event_id}":
delete:
tags:
- Export
summary: Delete export
operationId: export_delete_export__event_id__delete
parameters:
- name: event_id
in: path
required: true
schema:
type: string
title: Event Id
responses:
"200":
description: Successful Response
content:
application/json:
schema:
$ref: "#/components/schemas/GenericResponse"
"422":
description: Validation Error
content:
application/json:
schema:
$ref: "#/components/schemas/HTTPValidationError"
"/export/custom/{camera_name}/start/{start_time}/end/{end_time}":
post:
tags:
@ -6501,6 +6571,149 @@ components:
required:
- recognizedLicensePlate
title: EventsLPRBody
BatchExportBody:
properties:
items:
items:
$ref: "#/components/schemas/BatchExportItem"
type: array
minItems: 1
maxItems: 50
title: Items
description: List of export items. Each item has its own camera and time range.
export_case_id:
anyOf:
- type: string
maxLength: 30
- type: "null"
title: Export case ID
description: Existing export case ID to assign all exports to. Attaching to an existing case is temporarily admin-only until case-level ACLs exist.
new_case_name:
anyOf:
- type: string
maxLength: 100
- type: "null"
title: New case name
description: Name of a new export case to create when export_case_id is omitted
new_case_description:
anyOf:
- type: string
- type: "null"
title: New case description
description: Optional description for a newly created export case
type: object
required:
- items
title: BatchExportBody
BatchExportItem:
properties:
camera:
type: string
title: Camera name
start_time:
type: number
title: Start time
end_time:
type: number
title: End time
image_path:
anyOf:
- type: string
- type: "null"
title: Existing thumbnail path
description: Optional existing image to use as the export thumbnail
friendly_name:
anyOf:
- type: string
maxLength: 256
- type: "null"
title: Friendly name
description: Optional friendly name for this specific export item
client_item_id:
anyOf:
- type: string
maxLength: 128
- type: "null"
title: Client item ID
description: Optional opaque client identifier echoed back in results
type: object
required:
- camera
- start_time
- end_time
title: BatchExportItem
BatchExportResponse:
properties:
export_case_id:
anyOf:
- type: string
- type: "null"
title: Export Case Id
description: Export case ID associated with the batch
export_ids:
items:
type: string
type: array
title: Export Ids
description: Export IDs successfully queued
results:
items:
$ref: "#/components/schemas/BatchExportResultModel"
type: array
title: Results
description: Per-item batch export results
type: object
required:
- export_ids
- results
title: BatchExportResponse
description: Response model for starting an export batch.
BatchExportResultModel:
properties:
camera:
type: string
title: Camera
description: Camera name for this export attempt
export_id:
anyOf:
- type: string
- type: "null"
title: Export Id
description: The export ID when the export was successfully queued
success:
type: boolean
title: Success
description: Whether the export was successfully queued
status:
anyOf:
- type: string
- type: "null"
title: Status
description: Queue status for this camera export
error:
anyOf:
- type: string
- type: "null"
title: Error
description: Validation or queueing error for this item, if any
item_index:
anyOf:
- type: integer
- type: "null"
title: Item Index
description: Zero-based index of this result within the request items list
client_item_id:
anyOf:
- type: string
- type: "null"
title: Client Item Id
description: Opaque client-supplied item identifier echoed from the request
type: object
required:
- camera
- success
title: BatchExportResultModel
description: Per-item result for a batch export request.
EventsSubLabelBody:
properties:
subLabel:
@ -6523,18 +6736,41 @@ components:
required:
- subLabel
title: EventsSubLabelBody
ExportCaseAssignBody:
ExportBulkDeleteBody:
properties:
ids:
items:
type: string
minLength: 1
type: array
minItems: 1
title: Ids
type: object
required:
- ids
title: ExportBulkDeleteBody
description: Request body for bulk deleting exports.
ExportBulkReassignBody:
properties:
ids:
items:
type: string
minLength: 1
type: array
minItems: 1
title: Ids
export_case_id:
anyOf:
- type: string
maxLength: 30
- type: "null"
title: Export Case Id
description: "Case ID to assign to the export, or null to unassign"
description: "Case ID to assign to, or null to unassign from current case"
type: object
title: ExportCaseAssignBody
description: Request body for assigning or unassigning an export to a case.
required:
- ids
title: ExportBulkReassignBody
description: Request body for bulk reassigning exports to a case.
ExportCaseCreateBody:
properties:
name:

View File

@ -125,6 +125,16 @@ def metrics(request: Request):
return Response(content=content, media_type=content_type)
@router.get(
"/genai/models",
dependencies=[Depends(allow_any_authenticated())],
summary="List available GenAI models",
description="Returns available models for each configured GenAI provider.",
)
def genai_models(request: Request):
return JSONResponse(content=request.app.genai_manager.list_models())
@router.get("/config", dependencies=[Depends(allow_any_authenticated())])
def config(request: Request):
config_obj: FrigateConfig = request.app.frigate_config
@ -684,6 +694,9 @@ def config_set(request: Request, body: AppConfigSetBody):
if request.app.stats_emitter is not None:
request.app.stats_emitter.config = config
if request.app.dispatcher is not None:
request.app.dispatcher.config = config
if body.update_topic:
if body.update_topic.startswith("config/cameras/"):
_, _, camera, field = body.update_topic.split("/")

View File

@ -64,6 +64,7 @@ def require_admin_by_default():
"/logout",
# Authenticated user endpoints (allow_any_authenticated)
"/profile",
"/profiles",
# Public info endpoints (allow_public)
"/",
"/version",
@ -87,7 +88,9 @@ def require_admin_by_default():
"/go2rtc/streams",
"/event_ids",
"/events",
"/cases",
"/exports",
"/jobs/export",
}
# Path prefixes that should be exempt (for paths with parameters)
@ -100,7 +103,9 @@ def require_admin_by_default():
"/go2rtc/streams/", # /go2rtc/streams/{camera}
"/users/", # /users/{username}/password (has own auth)
"/preview/", # /preview/{file}/thumbnail.jpg
"/cases/", # /cases/{case_id}
"/exports/", # /exports/{export_id}
"/jobs/export/", # /jobs/export/{export_id}
"/vod/", # /vod/{camera_name}/...
"/notifications/", # /notifications/pubkey, /notifications/register
)

View File

@ -30,7 +30,7 @@ from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdateTopic,
)
from frigate.config.env import FRIGATE_ENV_VARS
from frigate.config.env import substitute_frigate_vars
from frigate.util.builtin import clean_camera_user_pass
from frigate.util.camera_cleanup import cleanup_camera_db, cleanup_camera_files
from frigate.util.config import find_config_file
@ -126,7 +126,7 @@ def go2rtc_add_stream(request: Request, stream_name: str, src: str = ""):
params = {"name": stream_name}
if src:
try:
params["src"] = src.format(**FRIGATE_ENV_VARS)
params["src"] = substitute_frigate_vars(src)
except KeyError:
params["src"] = src
@ -1224,6 +1224,15 @@ def camera_set(
status_code=400,
)
if not sub_command and feature in _SUB_COMMAND_FEATURES:
return JSONResponse(
content={
"success": False,
"message": f"Feature '{feature}' requires a sub-command (e.g. mask or zone name)",
},
status_code=400,
)
if camera_name == "*":
cameras = list(frigate_config.cameras.keys())
elif camera_name not in frigate_config.cameras:

View File

@ -3,9 +3,11 @@
import base64
import json
import logging
import operator
import time
from datetime import datetime
from typing import Any, Dict, Generator, List, Optional
from functools import reduce
from typing import Any, Dict, List, Optional
import cv2
from fastapi import APIRouter, Body, Depends, Request
@ -17,6 +19,14 @@ from frigate.api.auth import (
get_allowed_cameras_for_filter,
require_camera_access,
)
from frigate.api.chat_util import (
chunk_content,
distance_to_score,
format_events_with_local_time,
fuse_scores,
hydrate_event,
parse_iso_to_timestamp,
)
from frigate.api.defs.query.events_query_parameters import EventsQueryParams
from frigate.api.defs.request.chat_body import ChatCompletionRequest
from frigate.api.defs.response.chat_response import (
@ -32,55 +42,13 @@ from frigate.jobs.vlm_watch import (
start_vlm_watch_job,
stop_vlm_watch_job,
)
from frigate.models import Event
logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.chat])
def _chunk_content(content: str, chunk_size: int = 80) -> Generator[str, None, None]:
"""Yield content in word-aware chunks for streaming."""
if not content:
return
words = content.split(" ")
current: List[str] = []
current_len = 0
for w in words:
current.append(w)
current_len += len(w) + 1
if current_len >= chunk_size:
yield " ".join(current) + " "
current = []
current_len = 0
if current:
yield " ".join(current)
def _format_events_with_local_time(
events_list: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Add human-readable local start/end times to each event for the LLM."""
result = []
for evt in events_list:
if not isinstance(evt, dict):
result.append(evt)
continue
copy_evt = dict(evt)
try:
start_ts = evt.get("start_time")
end_ts = evt.get("end_time")
if start_ts is not None:
dt_start = datetime.fromtimestamp(start_ts)
copy_evt["start_time_local"] = dt_start.strftime("%Y-%m-%d %I:%M:%S %p")
if end_ts is not None:
dt_end = datetime.fromtimestamp(end_ts)
copy_evt["end_time_local"] = dt_end.strftime("%Y-%m-%d %I:%M:%S %p")
except (TypeError, ValueError, OSError):
pass
result.append(copy_evt)
return result
class ToolExecuteRequest(BaseModel):
"""Request model for tool execution."""
@ -158,6 +126,76 @@ def get_tool_definitions() -> List[Dict[str, Any]]:
"required": [],
},
},
{
"type": "function",
"function": {
"name": "find_similar_objects",
"description": (
"Find tracked objects that are visually and semantically similar "
"to a specific past event. Use this when the user references a "
"particular object they have seen and wants to find other "
"sightings of the same or similar one ('that green car', 'the "
"person in the red jacket', 'the package that was delivered'). "
"Prefer this over search_objects whenever the user's intent is "
"'find more like this specific one.' Use search_objects first "
"only if you need to locate the anchor event. Requires semantic "
"search to be enabled."
),
"parameters": {
"type": "object",
"properties": {
"event_id": {
"type": "string",
"description": "The id of the anchor event to find similar objects to.",
},
"after": {
"type": "string",
"description": "Start time in ISO 8601 format (e.g., '2024-01-01T00:00:00Z').",
},
"before": {
"type": "string",
"description": "End time in ISO 8601 format (e.g., '2024-01-01T23:59:59Z').",
},
"cameras": {
"type": "array",
"items": {"type": "string"},
"description": "Optional list of cameras to restrict to. Defaults to all.",
},
"labels": {
"type": "array",
"items": {"type": "string"},
"description": "Optional list of labels to restrict to. Defaults to the anchor event's label.",
},
"sub_labels": {
"type": "array",
"items": {"type": "string"},
"description": "Optional list of sub_labels (names) to restrict to.",
},
"zones": {
"type": "array",
"items": {"type": "string"},
"description": "Optional list of zones. An event matches if any of its zones overlap.",
},
"similarity_mode": {
"type": "string",
"enum": ["visual", "semantic", "fused"],
"description": "Which similarity signal(s) to use. 'fused' (default) combines visual and semantic.",
"default": "fused",
},
"min_score": {
"type": "number",
"description": "Drop matches with a similarity score below this threshold (0.0-1.0).",
},
"limit": {
"type": "integer",
"description": "Maximum number of matches to return (default: 10).",
"default": 10,
},
},
"required": ["event_id"],
},
},
},
{
"type": "function",
"function": {
@ -407,7 +445,7 @@ async def _execute_search_objects(
query_params = EventsQueryParams(
cameras=arguments.get("camera", "all"),
labels=arguments.get("label", "all"),
sub_labels=arguments.get("sub_label", "all").lower(),
sub_labels=arguments.get("sub_label", "all"), # case-insensitive on the backend
zones=zones,
zone=zones,
after=after,
@ -434,6 +472,166 @@ async def _execute_search_objects(
)
async def _execute_find_similar_objects(
request: Request,
arguments: Dict[str, Any],
allowed_cameras: List[str],
) -> Dict[str, Any]:
"""Execute the find_similar_objects tool.
Returns a plain dict (not JSONResponse) so the chat loop can embed it
directly in tool-result messages.
"""
# 1. Semantic search enabled?
config = request.app.frigate_config
if not getattr(config.semantic_search, "enabled", False):
return {
"error": "semantic_search_disabled",
"message": (
"Semantic search must be enabled to find similar objects. "
"Enable it in the Frigate config under semantic_search."
),
}
context = request.app.embeddings
if context is None:
return {
"error": "semantic_search_disabled",
"message": "Embeddings context is not available.",
}
# 2. Anchor lookup.
event_id = arguments.get("event_id")
if not event_id:
return {"error": "missing_event_id", "message": "event_id is required."}
try:
anchor = Event.get(Event.id == event_id)
except Event.DoesNotExist:
return {
"error": "anchor_not_found",
"message": f"Could not find event {event_id}.",
}
# 3. Parse params.
after = parse_iso_to_timestamp(arguments.get("after"))
before = parse_iso_to_timestamp(arguments.get("before"))
cameras = arguments.get("cameras")
if cameras:
# Respect RBAC: intersect with the user's allowed cameras.
cameras = [c for c in cameras if c in allowed_cameras]
else:
cameras = list(allowed_cameras) if allowed_cameras else None
labels = arguments.get("labels") or [anchor.label]
sub_labels = arguments.get("sub_labels")
zones = arguments.get("zones")
similarity_mode = arguments.get("similarity_mode", "fused")
if similarity_mode not in ("visual", "semantic", "fused"):
similarity_mode = "fused"
min_score = arguments.get("min_score")
limit = int(arguments.get("limit", 10))
limit = max(1, min(limit, 50))
# 4. Run similarity searches. We deliberately do NOT pass event_ids into
# the vec queries — the IN filter on sqlite-vec is broken in the installed
# version (see frigate/embeddings/__init__.py). Mirror the pattern used by
# frigate/api/event.py events_search: fetch top-k globally, then intersect
# with the structured filters via Peewee.
visual_distances: Dict[str, float] = {}
description_distances: Dict[str, float] = {}
try:
if similarity_mode in ("visual", "fused"):
rows = context.search_thumbnail(anchor)
visual_distances = {row[0]: row[1] for row in rows}
if similarity_mode in ("semantic", "fused"):
query_text = (
(anchor.data or {}).get("description")
or anchor.sub_label
or anchor.label
)
rows = context.search_description(query_text)
description_distances = {row[0]: row[1] for row in rows}
except Exception:
logger.exception("Similarity search failed")
return {
"error": "similarity_search_failed",
"message": "Failed to run similarity search.",
}
vec_ids = set(visual_distances) | set(description_distances)
vec_ids.discard(anchor.id)
# vec layer returns up to k=100 per modality; flag when we hit that ceiling
# so the LLM can mention there may be more matches beyond what we saw.
candidate_truncated = (
len(visual_distances) >= 100 or len(description_distances) >= 100
)
if not vec_ids:
return {
"anchor": hydrate_event(anchor),
"results": [],
"similarity_mode": similarity_mode,
"candidate_truncated": candidate_truncated,
}
# 5. Apply structured filters, intersected with vec hits.
clauses = [Event.id.in_(list(vec_ids))]
if after is not None:
clauses.append(Event.start_time >= after)
if before is not None:
clauses.append(Event.start_time <= before)
if cameras:
clauses.append(Event.camera.in_(cameras))
if labels:
clauses.append(Event.label.in_(labels))
if sub_labels:
clauses.append(Event.sub_label.in_(sub_labels))
if zones:
# Mirror the pattern used by frigate/api/event.py for JSON-array zone match.
zone_clauses = [Event.zones.cast("text") % f'*"{zone}"*' for zone in zones]
clauses.append(reduce(operator.or_, zone_clauses))
eligible = {e.id: e for e in Event.select().where(reduce(operator.and_, clauses))}
# 6. Fuse and rank.
scored: List[tuple[str, float]] = []
for eid in eligible:
v_score = (
distance_to_score(visual_distances[eid], context.thumb_stats)
if eid in visual_distances
else None
)
d_score = (
distance_to_score(description_distances[eid], context.desc_stats)
if eid in description_distances
else None
)
fused = fuse_scores(v_score, d_score)
if fused is None:
continue
if min_score is not None and fused < min_score:
continue
scored.append((eid, fused))
scored.sort(key=lambda pair: pair[1], reverse=True)
scored = scored[:limit]
results = [hydrate_event(eligible[eid], score=score) for eid, score in scored]
return {
"anchor": hydrate_event(anchor),
"results": results,
"similarity_mode": similarity_mode,
"candidate_truncated": candidate_truncated,
}
@router.post(
"/chat/execute",
dependencies=[Depends(allow_any_authenticated())],
@ -459,6 +657,13 @@ async def execute_tool(
if tool_name == "search_objects":
return await _execute_search_objects(arguments, allowed_cameras)
if tool_name == "find_similar_objects":
result = await _execute_find_similar_objects(
request, arguments, allowed_cameras
)
status_code = 200 if "error" not in result else 400
return JSONResponse(content=result, status_code=status_code)
if tool_name == "set_camera_state":
result = await _execute_set_camera_state(request, arguments)
return JSONResponse(
@ -520,45 +725,14 @@ async def _execute_get_live_context(
"detections": list(tracked_objects_dict.values()),
}
# Grab live frame and handle based on provider configuration
# Grab live frame when the chat model supports vision
image_url = await _get_live_frame_image_url(request, camera, allowed_cameras)
if image_url:
genai_manager = request.app.genai_manager
if genai_manager.tool_client is genai_manager.vision_client:
# Same provider handles both roles — pass image URL so it can
# be injected as a user message (images can't be in tool results)
chat_client = request.app.genai_manager.chat_client
if chat_client is not None and chat_client.supports_vision:
# Pass image URL so it can be injected as a user message
# (images can't be in tool results)
result["_image_url"] = image_url
elif genai_manager.vision_client is not None:
# Separate vision provider — have it describe the image,
# providing detection context so it knows what to focus on
frame_bytes = _decode_data_url(image_url)
if frame_bytes:
detections = result.get("detections", [])
if detections:
detection_lines = []
for d in detections:
parts = [d.get("label", "unknown")]
if d.get("sub_label"):
parts.append(f"({d['sub_label']})")
if d.get("zones"):
parts.append(f"in {', '.join(d['zones'])}")
detection_lines.append(" ".join(parts))
context = (
"The following objects are currently being tracked: "
+ "; ".join(detection_lines)
+ "."
)
else:
context = "No objects are currently being tracked."
description = genai_manager.vision_client._send(
f"Describe what you see in this security camera image. "
f"{context} Focus on the scene, any visible activity, "
f"and details about the tracked objects.",
[frame_bytes],
)
if description:
result["image_description"] = description
return result
@ -609,17 +783,6 @@ async def _get_live_frame_image_url(
return None
def _decode_data_url(data_url: str) -> Optional[bytes]:
"""Decode a base64 data URL to raw bytes."""
try:
# Format: data:image/jpeg;base64,<data>
_, encoded = data_url.split(",", 1)
return base64.b64decode(encoded)
except (ValueError, Exception) as e:
logger.debug("Failed to decode data URL: %s", e)
return None
async def _execute_set_camera_state(
request: Request,
arguments: Dict[str, Any],
@ -684,6 +847,8 @@ async def _execute_tool_internal(
except (json.JSONDecodeError, AttributeError) as e:
logger.warning(f"Failed to extract tool result: {e}")
return {"error": "Failed to parse tool result"}
elif tool_name == "find_similar_objects":
return await _execute_find_similar_objects(request, arguments, allowed_cameras)
elif tool_name == "set_camera_state":
return await _execute_set_camera_state(request, arguments)
elif tool_name == "get_live_context":
@ -706,8 +871,9 @@ async def _execute_tool_internal(
return _execute_get_recap(arguments, allowed_cameras)
else:
logger.error(
"Tool call failed: unknown tool %r. Expected one of: search_objects, get_live_context, "
"start_camera_watch, stop_camera_watch, get_profile_status, get_recap. Arguments received: %s",
"Tool call failed: unknown tool %r. Expected one of: search_objects, find_similar_objects, "
"get_live_context, start_camera_watch, stop_camera_watch, get_profile_status, get_recap. "
"Arguments received: %s",
tool_name,
json.dumps(arguments),
)
@ -734,9 +900,9 @@ async def _execute_start_camera_watch(
await require_camera_access(camera, request=request)
genai_manager = request.app.genai_manager
vision_client = genai_manager.vision_client or genai_manager.tool_client
if vision_client is None:
return {"error": "No vision/GenAI provider configured."}
chat_client = genai_manager.chat_client
if chat_client is None or not chat_client.supports_vision:
return {"error": "VLM watch requires a chat model with vision support."}
try:
job_id = start_vlm_watch_job(
@ -969,7 +1135,7 @@ async def _execute_pending_tools(
json.dumps(tool_args),
)
if tool_name == "search_objects" and isinstance(tool_result, list):
tool_result = _format_events_with_local_time(tool_result)
tool_result = format_events_with_local_time(tool_result)
_keys = {
"id",
"camera",
@ -1070,7 +1236,7 @@ async def chat_completion(
6. Repeats until final answer
7. Returns response to user
"""
genai_client = request.app.genai_manager.tool_client
genai_client = request.app.genai_manager.chat_client
if not genai_client:
return JSONResponse(
content={
@ -1122,7 +1288,9 @@ Do not start your response with phrases like "I will check...", "Let me see...",
Always present times to the user in the server's local timezone. When tool results include start_time_local and end_time_local, use those exact strings when listing or describing detection times—do not convert or invent timestamps. Do not use UTC or ISO format with Z for the user-facing answer unless the tool result only provides Unix timestamps without local time fields.
When users ask about "today", "yesterday", "this week", etc., use the current date above as reference.
When searching for objects or events, use ISO 8601 format for dates (e.g., {current_date_str}T00:00:00Z for the start of today).
Always be accurate with time calculations based on the current date provided.{cameras_section}"""
Always be accurate with time calculations based on the current date provided.
When a user refers to a specific object they have seen or describe with identifying details ("that green car", "the person in the red jacket", "a package left today"), prefer the find_similar_objects tool over search_objects. Use search_objects first only to locate the anchor event, then pass its id to find_similar_objects. For generic queries like "show me all cars today", keep using search_objects. If a user message begins with [attached_event:<id>], treat that event id as the anchor for any similarity or "tell me more" request in the same message and call find_similar_objects with that id.{cameras_section}"""
conversation.append(
{
@ -1160,6 +1328,9 @@ Always be accurate with time calculations based on the current date provided.{ca
async def stream_body_llm():
nonlocal conversation, stream_tool_calls, stream_iterations
while stream_iterations < max_iterations:
if await request.is_disconnected():
logger.debug("Client disconnected, stopping chat stream")
return
logger.debug(
f"Streaming LLM (iteration {stream_iterations + 1}/{max_iterations}) "
f"with {len(conversation)} message(s)"
@ -1169,6 +1340,9 @@ Always be accurate with time calculations based on the current date provided.{ca
tools=tools if tools else None,
tool_choice="auto",
):
if await request.is_disconnected():
logger.debug("Client disconnected, stopping chat stream")
return
kind, value = event
if kind == "content_delta":
yield (
@ -1198,6 +1372,11 @@ Always be accurate with time calculations based on the current date provided.{ca
msg.get("content"), pending
)
)
if await request.is_disconnected():
logger.debug(
"Client disconnected before tool execution"
)
return
(
executed_calls,
tool_results,
@ -1282,7 +1461,7 @@ Always be accurate with time calculations based on the current date provided.{ca
+ b"\n"
)
# Stream content in word-sized chunks for smooth UX
for part in _chunk_content(final_content):
for part in chunk_content(final_content):
yield (
json.dumps({"type": "content", "delta": part}).encode(
"utf-8"
@ -1381,12 +1560,12 @@ async def start_vlm_monitor(
await require_camera_access(body.camera, request=request)
vision_client = genai_manager.vision_client or genai_manager.tool_client
if vision_client is None:
chat_client = genai_manager.chat_client
if chat_client is None or not chat_client.supports_vision:
return JSONResponse(
content={
"success": False,
"message": "No vision/GenAI provider configured.",
"message": "VLM watch requires a chat model with vision support.",
},
status_code=400,
)

135
frigate/api/chat_util.py Normal file
View File

@ -0,0 +1,135 @@
"""Pure, stateless helpers used by the chat tool dispatchers.
These were extracted from frigate/api/chat.py to keep that module focused on
route handlers, tool dispatchers, and streaming loop internals. Nothing in
this file touches the FastAPI request, the embeddings context, or the chat
loop state all inputs and outputs are plain data.
"""
import logging
import math
import time
from datetime import datetime
from typing import Any, Dict, Generator, List, Optional
from frigate.embeddings.util import ZScoreNormalization
from frigate.models import Event
logger = logging.getLogger(__name__)
# Similarity fusion weights for find_similar_objects.
# Visual dominates because the feature's primary use case is "same specific object."
# If these change, update the test in test_chat_find_similar_objects.py.
VISUAL_WEIGHT = 0.65
DESCRIPTION_WEIGHT = 0.35
def chunk_content(content: str, chunk_size: int = 80) -> Generator[str, None, None]:
"""Yield content in word-aware chunks for streaming."""
if not content:
return
words = content.split(" ")
current: List[str] = []
current_len = 0
for w in words:
current.append(w)
current_len += len(w) + 1
if current_len >= chunk_size:
yield " ".join(current) + " "
current = []
current_len = 0
if current:
yield " ".join(current)
def format_events_with_local_time(
events_list: List[Dict[str, Any]],
) -> List[Dict[str, Any]]:
"""Add human-readable local start/end times to each event for the LLM."""
result = []
for evt in events_list:
if not isinstance(evt, dict):
result.append(evt)
continue
copy_evt = dict(evt)
try:
start_ts = evt.get("start_time")
end_ts = evt.get("end_time")
if start_ts is not None:
dt_start = datetime.fromtimestamp(start_ts)
copy_evt["start_time_local"] = dt_start.strftime("%Y-%m-%d %I:%M:%S %p")
if end_ts is not None:
dt_end = datetime.fromtimestamp(end_ts)
copy_evt["end_time_local"] = dt_end.strftime("%Y-%m-%d %I:%M:%S %p")
except (TypeError, ValueError, OSError):
pass
result.append(copy_evt)
return result
def distance_to_score(distance: float, stats: ZScoreNormalization) -> float:
"""Convert a cosine distance to a [0, 1] similarity score.
Uses the existing ZScoreNormalization stats maintained by EmbeddingsContext
to normalize across deployments, then a bounded sigmoid. Lower distance ->
higher score. If stats are uninitialized (stddev == 0), returns a neutral
0.5 so the fallback ordering by raw distance still dominates.
"""
if stats.stddev == 0:
return 0.5
z = (distance - stats.mean) / stats.stddev
# Sigmoid on -z so that small distance (good) -> high score.
return 1.0 / (1.0 + math.exp(z))
def fuse_scores(
visual_score: Optional[float],
description_score: Optional[float],
) -> Optional[float]:
"""Weighted fusion of visual and description similarity scores.
If one side is missing (e.g., no description embedding for this event),
the other side's score is returned alone with no penalty. If both are
missing, returns None and the caller should drop the event.
"""
if visual_score is None and description_score is None:
return None
if visual_score is None:
return description_score
if description_score is None:
return visual_score
return VISUAL_WEIGHT * visual_score + DESCRIPTION_WEIGHT * description_score
def parse_iso_to_timestamp(value: Optional[str]) -> Optional[float]:
"""Parse an ISO-8601 string as server-local time -> unix timestamp.
Mirrors the parsing _execute_search_objects uses so both tools accept the
same format from the LLM.
"""
if value is None:
return None
try:
s = value.replace("Z", "").strip()[:19]
dt = datetime.strptime(s, "%Y-%m-%dT%H:%M:%S")
return time.mktime(dt.timetuple())
except (ValueError, AttributeError, TypeError):
logger.warning("Invalid timestamp format: %s", value)
return None
def hydrate_event(event: Event, score: Optional[float] = None) -> Dict[str, Any]:
"""Convert an Event row into the dict shape returned by find_similar_objects."""
data: Dict[str, Any] = {
"id": event.id,
"camera": event.camera,
"label": event.label,
"sub_label": event.sub_label,
"start_time": event.start_time,
"end_time": event.end_time,
"zones": event.zones,
}
if score is not None:
data["score"] = score
return data

View File

@ -0,0 +1,65 @@
from typing import List, Optional
from pydantic import BaseModel, Field, model_validator
MAX_BATCH_EXPORT_ITEMS = 50
class BatchExportItem(BaseModel):
camera: str = Field(title="Camera name")
start_time: float = Field(title="Start time")
end_time: float = Field(title="End time")
image_path: Optional[str] = Field(
default=None,
title="Existing thumbnail path",
description="Optional existing image to use as the export thumbnail",
)
friendly_name: Optional[str] = Field(
default=None,
title="Friendly name",
max_length=256,
description="Optional friendly name for this specific export item",
)
client_item_id: Optional[str] = Field(
default=None,
title="Client item ID",
max_length=128,
description="Optional opaque client identifier echoed back in results",
)
class BatchExportBody(BaseModel):
items: List[BatchExportItem] = Field(
title="Items",
min_length=1,
max_length=MAX_BATCH_EXPORT_ITEMS,
description="List of export items. Each item has its own camera and time range.",
)
export_case_id: Optional[str] = Field(
default=None,
title="Export case ID",
max_length=30,
description=(
"Existing export case ID to assign all exports to. Attaching to an "
"existing case is temporarily admin-only until case-level ACLs exist."
),
)
new_case_name: Optional[str] = Field(
default=None,
title="New case name",
max_length=100,
description="Name of a new export case to create when export_case_id is omitted",
)
new_case_description: Optional[str] = Field(
default=None,
title="New case description",
description="Optional description for a newly created export case",
)
@model_validator(mode="after")
def validate_case_target(self) -> "BatchExportBody":
for item in self.items:
if item.end_time <= item.start_time:
raise ValueError("end_time must be after start_time")
return self

View File

@ -0,0 +1,24 @@
"""Request bodies for bulk export operations."""
from typing import Optional
from pydantic import BaseModel, Field, conlist, constr
class ExportBulkDeleteBody(BaseModel):
"""Request body for bulk deleting exports."""
# List of export IDs with at least one element and each element with at least one char
ids: conlist(constr(min_length=1), min_length=1)
class ExportBulkReassignBody(BaseModel):
"""Request body for bulk reassigning exports to a case."""
# List of export IDs with at least one element and each element with at least one char
ids: conlist(constr(min_length=1), min_length=1)
export_case_id: Optional[str] = Field(
default=None,
max_length=30,
description="Case ID to assign to, or null to unassign from current case",
)

View File

@ -23,13 +23,3 @@ class ExportCaseUpdateBody(BaseModel):
description: Optional[str] = Field(
default=None, description="Updated description of the export case"
)
class ExportCaseAssignBody(BaseModel):
"""Request body for assigning or unassigning an export to a case."""
export_case_id: Optional[str] = Field(
default=None,
max_length=30,
description="Case ID to assign to the export, or null to unassign",
)

View File

@ -1,4 +1,4 @@
from typing import List, Optional
from typing import Any, List, Optional
from pydantic import BaseModel, Field
@ -28,6 +28,96 @@ class StartExportResponse(BaseModel):
export_id: Optional[str] = Field(
default=None, description="The export ID if successfully started"
)
status: Optional[str] = Field(
default=None,
description="Queue status for the export job",
)
class BatchExportResultModel(BaseModel):
"""Per-item result for a batch export request."""
camera: str = Field(description="Camera name for this export attempt")
export_id: Optional[str] = Field(
default=None,
description="The export ID when the export was successfully queued",
)
success: bool = Field(description="Whether the export was successfully queued")
status: Optional[str] = Field(
default=None,
description="Queue status for this camera export",
)
error: Optional[str] = Field(
default=None,
description="Validation or queueing error for this item, if any",
)
item_index: Optional[int] = Field(
default=None,
description="Zero-based index of this result within the request items list",
)
client_item_id: Optional[str] = Field(
default=None,
description="Opaque client-supplied item identifier echoed from the request",
)
class BatchExportResponse(BaseModel):
"""Response model for starting an export batch."""
export_case_id: Optional[str] = Field(
default=None,
description="Export case ID associated with the batch",
)
export_ids: List[str] = Field(description="Export IDs successfully queued")
results: List[BatchExportResultModel] = Field(
description="Per-item batch export results"
)
class ExportJobModel(BaseModel):
"""Model representing a queued or running export job."""
id: str = Field(description="Unique identifier for the export job")
job_type: str = Field(description="Job type")
status: str = Field(description="Current job status")
camera: str = Field(description="Camera associated with this export job")
name: Optional[str] = Field(
default=None,
description="Friendly name for the export",
)
export_case_id: Optional[str] = Field(
default=None,
description="ID of the export case this export belongs to",
)
request_start_time: float = Field(description="Requested export start time")
request_end_time: float = Field(description="Requested export end time")
start_time: Optional[float] = Field(
default=None,
description="Unix timestamp when execution started",
)
end_time: Optional[float] = Field(
default=None,
description="Unix timestamp when execution completed",
)
error_message: Optional[str] = Field(
default=None,
description="Error message for failed jobs",
)
results: Optional[dict[str, Any]] = Field(
default=None,
description="Result metadata for completed jobs",
)
current_step: str = Field(
default="queued",
description="Current execution step (queued, preparing, encoding, encoding_retry, finalizing)",
)
progress_percent: float = Field(
default=0.0,
description="Progress percentage of the current step (0.0 - 100.0)",
)
ExportJobsResponse = List[ExportJobModel]
ExportsResponse = List[ExportModel]

View File

@ -199,13 +199,18 @@ def events(
sub_label_clauses.append((Event.sub_label.is_null()))
for label in filtered_sub_labels:
lowered = label.lower()
sub_label_clauses.append(
(Event.sub_label.cast("text") == label)
) # include exact matches
(fn.LOWER(Event.sub_label.cast("text")) == lowered)
) # include exact matches (case-insensitive)
# include this label when part of a list
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
# include this label when part of a list (LIKE is case-insensitive in sqlite for ASCII)
sub_label_clauses.append(
(fn.LOWER(Event.sub_label.cast("text")) % f"*{lowered},*")
)
sub_label_clauses.append(
(fn.LOWER(Event.sub_label.cast("text")) % f"*, {lowered}*")
)
sub_label_clause = reduce(operator.or_, sub_label_clauses)
clauses.append((sub_label_clause))
@ -609,13 +614,18 @@ def events_search(
sub_label_clauses.append((Event.sub_label.is_null()))
for label in filtered_sub_labels:
lowered = label.lower()
sub_label_clauses.append(
(Event.sub_label.cast("text") == label)
) # include exact matches
(fn.LOWER(Event.sub_label.cast("text")) == lowered)
) # include exact matches (case-insensitive)
# include this label when part of a list
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
# include this label when part of a list (LIKE is case-insensitive in sqlite for ASCII)
sub_label_clauses.append(
(fn.LOWER(Event.sub_label.cast("text")) % f"*{lowered},*")
)
sub_label_clauses.append(
(fn.LOWER(Event.sub_label.cast("text")) % f"*, {lowered}*")
)
event_filters.append((reduce(operator.or_, sub_label_clauses)))

File diff suppressed because it is too large Load Diff

View File

@ -746,7 +746,7 @@ async def set_not_reviewed(
description="Use GenAI to summarize review items over a period of time.",
)
def generate_review_summary(request: Request, start_ts: float, end_ts: float):
if not request.app.genai_manager.vision_client:
if not request.app.genai_manager.description_client:
return JSONResponse(
content=(
{

View File

@ -52,6 +52,7 @@ from frigate.embeddings import EmbeddingProcess, EmbeddingsContext
from frigate.events.audio import AudioProcessor
from frigate.events.cleanup import EventCleanup
from frigate.events.maintainer import EventProcessor
from frigate.jobs.export import reap_stale_exports
from frigate.jobs.motion_search import stop_all_motion_search_jobs
from frigate.log import _stop_logging
from frigate.models import (
@ -188,17 +189,6 @@ class FrigateApp:
except PermissionError:
logger.error("Unable to write to /config to save DB state")
def cleanup_timeline_db(db: SqliteExtDatabase) -> None:
db.execute_sql(
"DELETE FROM timeline WHERE source_id NOT IN (SELECT id FROM event);"
)
try:
with open(f"{CONFIG_DIR}/.timeline", "w") as f:
f.write(str(datetime.datetime.now().timestamp()))
except PermissionError:
logger.error("Unable to write to /config to save DB state")
# Migrate DB schema
migrate_db = SqliteExtDatabase(self.config.database.path)
@ -215,11 +205,6 @@ class FrigateApp:
router.run()
# this is a temporary check to clean up user DB from beta
# will be removed before final release
if not os.path.exists(f"{CONFIG_DIR}/.timeline"):
cleanup_timeline_db(migrate_db)
# check if vacuum needs to be run
if os.path.exists(f"{CONFIG_DIR}/.vacuum"):
with open(f"{CONFIG_DIR}/.vacuum") as f:
@ -611,6 +596,11 @@ class FrigateApp:
# Clean up any stale replay camera artifacts (filesystem + DB)
cleanup_replay_cameras()
# Reap any Export rows still marked in_progress from a previous
# session (crash, kill, broken migration). Runs synchronously before
# uvicorn binds so no API request can observe a stale row.
reap_stale_exports()
self.init_inter_process_communicator()
self.start_detectors()
self.init_dispatcher()

View File

@ -118,10 +118,21 @@ class Dispatcher:
try:
if command_type == "set":
# Commands that require a sub-command (mask/zone name)
sub_command_required = {
"motion_mask",
"object_mask",
"zone",
}
if sub_command:
self._camera_settings_handlers[command](
camera_name, sub_command, payload
)
elif command in sub_command_required:
logger.error(
"Command %s requires a sub-command (mask/zone name)",
command,
)
else:
self._camera_settings_handlers[command](camera_name, payload)
elif command_type == "ptz":

View File

@ -18,8 +18,8 @@ class GenAIProviderEnum(str, Enum):
class GenAIRoleEnum(str, Enum):
tools = "tools"
vision = "vision"
chat = "chat"
descriptions = "descriptions"
embeddings = "embeddings"
@ -49,21 +49,21 @@ class GenAIConfig(FrigateBaseModel):
roles: list[GenAIRoleEnum] = Field(
default_factory=lambda: [
GenAIRoleEnum.embeddings,
GenAIRoleEnum.vision,
GenAIRoleEnum.tools,
GenAIRoleEnum.descriptions,
GenAIRoleEnum.chat,
],
title="Roles",
description="GenAI roles (tools, vision, embeddings); one provider per role.",
description="GenAI roles (chat, descriptions, embeddings); one provider per role.",
)
provider_options: dict[str, Any] = Field(
default={},
title="Provider options",
description="Additional provider-specific options to pass to the GenAI client.",
json_schema_extra={"additionalProperties": {"type": "string"}},
json_schema_extra={"additionalProperties": {}},
)
runtime_options: dict[str, Any] = Field(
default={},
title="Runtime options",
description="Runtime options passed to the provider for each inference call.",
json_schema_extra={"additionalProperties": {"type": "string"}},
json_schema_extra={"additionalProperties": {}},
)

View File

@ -92,6 +92,12 @@ class RecordExportConfig(FrigateBaseModel):
title="Export hwaccel args",
description="Hardware acceleration args to use for export/transcode operations.",
)
max_concurrent: int = Field(
default=3,
ge=1,
title="Maximum concurrent exports",
description="Maximum number of export jobs to process at the same time.",
)
class RecordConfig(FrigateBaseModel):

View File

@ -730,6 +730,9 @@ class FrigateConfig(FrigateBaseModel):
)
if need_detect_dimensions:
logger.info(
f"detect.width and detect.height not set for {camera_config.name}, probing detect stream to determine resolution."
)
stream_info = {"width": 0, "height": 0, "fourcc": None}
try:
stream_info = stream_info_retriever.get_stream_info(

View File

@ -1,4 +1,5 @@
import os
import re
from pathlib import Path
from typing import Annotated
@ -15,8 +16,77 @@ if os.path.isdir(secrets_dir) and os.access(secrets_dir, os.R_OK):
)
# Matches a FRIGATE_* identifier following an opening brace.
_FRIGATE_IDENT_RE = re.compile(r"FRIGATE_[A-Za-z0-9_]+")
def substitute_frigate_vars(value: str) -> str:
"""Substitute `{FRIGATE_*}` placeholders in *value*.
Reproduces the subset of `str.format()` brace semantics that Frigate's
config has historically supported, while leaving unrelated brace content
(e.g. ffmpeg `%{localtime\\:...}` expressions) untouched:
* `{{` and `}}` collapse to literal `{` / `}` (the documented escape).
* `{FRIGATE_NAME}` is replaced from `FRIGATE_ENV_VARS`; an unknown name
raises `KeyError` to preserve the existing "Invalid substitution"
error path.
* A `{` that begins `{FRIGATE_` but is not a well-formed
`{FRIGATE_NAME}` placeholder raises `ValueError` (malformed
placeholder). Callers that catch `KeyError` to allow unknown-var
passthrough will still surface malformed syntax as an error.
* Any other `{` or `}` is treated as a literal and passed through.
"""
out: list[str] = []
i = 0
n = len(value)
while i < n:
ch = value[i]
if ch == "{":
# Escaped literal `{{`.
if i + 1 < n and value[i + 1] == "{":
out.append("{")
i += 2
continue
# Possible `{FRIGATE_*}` placeholder.
if value.startswith("{FRIGATE_", i):
ident_match = _FRIGATE_IDENT_RE.match(value, i + 1)
if (
ident_match is not None
and ident_match.end() < n
and value[ident_match.end()] == "}"
):
key = ident_match.group(0)
if key not in FRIGATE_ENV_VARS:
raise KeyError(key)
out.append(FRIGATE_ENV_VARS[key])
i = ident_match.end() + 1
continue
# Looks like a FRIGATE placeholder but is malformed
# (no closing brace, illegal char, format spec, etc.).
raise ValueError(
f"Malformed FRIGATE_ placeholder near {value[i : i + 32]!r}"
)
# Plain `{` — pass through (e.g. `%{localtime\:...}`).
out.append("{")
i += 1
continue
if ch == "}":
# Escaped literal `}}`.
if i + 1 < n and value[i + 1] == "}":
out.append("}")
i += 2
continue
out.append("}")
i += 1
continue
out.append(ch)
i += 1
return "".join(out)
def validate_env_string(v: str) -> str:
return v.format(**FRIGATE_ENV_VARS)
return substitute_frigate_vars(v)
EnvString = Annotated[str, AfterValidator(validate_env_string)]

View File

@ -44,6 +44,22 @@ DEFAULT_ATTRIBUTE_LABEL_MAP = {
],
"motorcycle": ["license_plate"],
}
ATTRIBUTE_LABEL_DISPLAY_MAP = {
"amazon": "Amazon",
"an_post": "An Post",
"canada_post": "Canada Post",
"dhl": "DHL",
"dpd": "DPD",
"fedex": "FedEx",
"gls": "GLS",
"nzpost": "NZ Post",
"postnl": "PostNL",
"postnord": "PostNord",
"purolator": "Purolator",
"royal_mail": "Royal Mail",
"ups": "UPS",
"usps": "USPS",
}
LABEL_CONSOLIDATION_MAP = {
"car": 0.8,
"face": 0.5,

View File

@ -16,7 +16,7 @@ from frigate.config import CameraConfig, FrigateConfig
from frigate.const import CLIPS_DIR, UPDATE_EVENT_DESCRIPTION
from frigate.data_processing.post.semantic_trigger import SemanticTriggerProcessor
from frigate.data_processing.types import PostProcessDataEnum
from frigate.genai import GenAIClient
from frigate.genai.manager import GenAIClientManager
from frigate.models import Event
from frigate.types import TrackedObjectUpdateTypesEnum
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
@ -41,7 +41,7 @@ class ObjectDescriptionProcessor(PostProcessorApi):
embeddings: "Embeddings",
requestor: InterProcessRequestor,
metrics: DataProcessorMetrics,
client: GenAIClient,
genai_manager: GenAIClientManager,
semantic_trigger_processor: SemanticTriggerProcessor | None,
):
super().__init__(config, metrics, None)
@ -49,7 +49,7 @@ class ObjectDescriptionProcessor(PostProcessorApi):
self.embeddings = embeddings
self.requestor = requestor
self.metrics = metrics
self.genai_client = client
self.genai_manager = genai_manager
self.semantic_trigger_processor = semantic_trigger_processor
self.tracked_events: dict[str, list[Any]] = {}
self.early_request_sent: dict[str, bool] = {}
@ -198,6 +198,9 @@ class ObjectDescriptionProcessor(PostProcessorApi):
if data_type != PostProcessDataEnum.tracked_object:
return
if self.genai_manager.description_client is None:
return
state: str | None = frame_data.get("state", None)
if state is not None:
@ -329,7 +332,12 @@ class ObjectDescriptionProcessor(PostProcessorApi):
"""Embed the description for an event."""
start = datetime.datetime.now().timestamp()
camera_config = self.config.cameras[str(event.camera)]
description = self.genai_client.generate_object_description(
client = self.genai_manager.description_client
if client is None:
return
description = client.generate_object_description(
camera_config, thumbnails, event
)

View File

@ -19,9 +19,15 @@ from frigate.comms.inter_process import InterProcessRequestor
from frigate.config import FrigateConfig
from frigate.config.camera import CameraConfig
from frigate.config.camera.review import GenAIReviewConfig, ImageSourceEnum
from frigate.const import CACHE_DIR, CLIPS_DIR, UPDATE_REVIEW_DESCRIPTION
from frigate.const import (
ATTRIBUTE_LABEL_DISPLAY_MAP,
CACHE_DIR,
CLIPS_DIR,
UPDATE_REVIEW_DESCRIPTION,
)
from frigate.data_processing.types import PostProcessDataEnum
from frigate.genai import GenAIClient
from frigate.genai.manager import GenAIClientManager
from frigate.models import Recordings, ReviewSegment
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
from frigate.util.image import get_image_from_recording
@ -41,12 +47,12 @@ class ReviewDescriptionProcessor(PostProcessorApi):
config: FrigateConfig,
requestor: InterProcessRequestor,
metrics: DataProcessorMetrics,
client: GenAIClient,
genai_manager: GenAIClientManager,
):
super().__init__(config, metrics, None)
self.requestor = requestor
self.metrics = metrics
self.genai_client = client
self.genai_manager = genai_manager
self.review_desc_speed = InferenceSpeed(self.metrics.review_desc_speed)
self.review_desc_dps = EventsPerSecond()
self.review_desc_dps.start()
@ -63,7 +69,12 @@ class ReviewDescriptionProcessor(PostProcessorApi):
Estimates ~1 token per 1250 pixels. Targets 98% context utilization with safety margin.
Capped at 20 frames.
"""
context_size = self.genai_client.get_context_size()
client = self.genai_manager.description_client
if client is None:
return 3
context_size = client.get_context_size()
camera_config = self.config.cameras[camera]
detect_width = camera_config.detect.width
@ -111,6 +122,9 @@ class ReviewDescriptionProcessor(PostProcessorApi):
if data_type != PostProcessDataEnum.review:
return
if self.genai_manager.description_client is None:
return
camera = data["after"]["camera"]
camera_config = self.config.cameras[camera]
@ -200,7 +214,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
target=run_analysis,
args=(
self.requestor,
self.genai_client,
self.genai_manager.description_client,
self.review_desc_speed,
camera_config,
final_data,
@ -316,7 +330,12 @@ class ReviewDescriptionProcessor(PostProcessorApi):
os.path.join(CLIPS_DIR, "genai-requests", f"{start_ts}-{end_ts}")
).mkdir(parents=True, exist_ok=True)
return self.genai_client.generate_review_summary(
client = self.genai_manager.description_client
if client is None:
return None
return client.generate_review_summary(
start_ts,
end_ts,
events_with_context,
@ -542,10 +561,11 @@ def run_analysis(
if "-verified" in label:
continue
elif label in labelmap_objects:
object_type = titlecase(label.replace("_", " "))
object_type = label.replace("_", " ")
if label in attribute_labels:
unified_objects.append(f"{object_type} (delivery/service)")
display_name = ATTRIBUTE_LABEL_DISPLAY_MAP.get(label, object_type)
unified_objects.append(f"{display_name} (delivery/service)")
else:
unified_objects.append(object_type)

View File

@ -1,8 +1,12 @@
"""Local only processors for handling real time object processing."""
import logging
import threading
from abc import ABC, abstractmethod
from typing import Any
from collections import deque
from concurrent.futures import Future
from queue import Empty, Full, Queue
from typing import Any, Callable
import numpy as np
@ -74,3 +78,123 @@ class RealTimeProcessorApi(ABC):
payload: The updated configuration object.
"""
pass
def drain_results(self) -> list[dict[str, Any]]:
"""Return pending results that need IPC side-effects.
Deferred processors accumulate results on a worker thread.
The maintainer calls this each loop iteration to collect them
and perform publishes on the main thread.
Synchronous processors return an empty list (default).
"""
return []
def shutdown(self) -> None:
"""Stop any background work and release resources.
Called when the processor is being removed or the maintainer
is shutting down. Default is a no-op for synchronous processors.
"""
pass
class DeferredRealtimeProcessorApi(RealTimeProcessorApi):
"""Base class for processors that offload heavy work to a background thread.
Subclasses implement:
- process_frame(): do cheap gating + crop + copy, then call _enqueue_task()
- _process_task(task): heavy work (inference, consensus) on the worker thread
- handle_request(): optionally use _enqueue_request() for sync request/response
- expire_object(): call _enqueue_task() with a control message
The worker thread owns all processor state. No locks are needed because
only the worker mutates state. Results that need IPC are placed in
_pending_results via _emit_result(), and the maintainer drains them
each loop iteration.
"""
def __init__(
self,
config: FrigateConfig,
metrics: DataProcessorMetrics,
max_queue: int = 8,
) -> None:
super().__init__(config, metrics)
self._task_queue: Queue = Queue(maxsize=max_queue)
self._pending_results: deque[dict[str, Any]] = deque()
self._results_lock = threading.Lock()
self._stop_event = threading.Event()
self._worker = threading.Thread(
target=self._drain_loop,
daemon=True,
name=f"{type(self).__name__}_worker",
)
self._worker.start()
def _drain_loop(self) -> None:
"""Worker thread main loop — drains the task queue until stopped."""
while not self._stop_event.is_set():
try:
task = self._task_queue.get(timeout=0.5)
except Empty:
continue
if (
isinstance(task, tuple)
and len(task) == 2
and isinstance(task[1], Future)
):
# Request/response: (callable_and_args, future)
(func, args), future = task
try:
result = func(args)
future.set_result(result)
except Exception as e:
future.set_exception(e)
else:
try:
self._process_task(task)
except Exception:
logger.exception("Error processing deferred task")
def _enqueue_task(self, task: Any) -> bool:
"""Enqueue a task for the worker. Returns False if queue is full (dropped)."""
try:
self._task_queue.put_nowait(task)
return True
except Full:
logger.debug("Deferred processor queue full, dropping task")
return False
def _enqueue_request(self, func: Callable, args: Any, timeout: float = 10.0) -> Any:
"""Enqueue a request and block until the worker returns a result."""
future: Future = Future()
self._task_queue.put(((func, args), future), timeout=timeout)
return future.result(timeout=timeout)
def _emit_result(self, result: dict[str, Any]) -> None:
"""Called by the worker thread to stage a result for the maintainer."""
with self._results_lock:
self._pending_results.append(result)
def drain_results(self) -> list[dict[str, Any]]:
"""Called by the maintainer on the main thread to collect pending results."""
with self._results_lock:
results = list(self._pending_results)
self._pending_results.clear()
return results
def shutdown(self) -> None:
"""Signal the worker to stop and wait for it to finish."""
self._stop_event.set()
self._worker.join(timeout=5.0)
@abstractmethod
def _process_task(self, task: Any) -> None:
"""Process a single task on the worker thread.
Subclasses implement inference, consensus, training image saves here.
Call _emit_result() to stage results for the maintainer to publish.
"""
pass

View File

@ -1,7 +1,6 @@
"""Real time processor that works with classification tflite models."""
import datetime
import json
import logging
import os
from typing import Any
@ -10,25 +9,18 @@ import cv2
import numpy as np
from frigate.comms.embeddings_updater import EmbeddingsRequestEnum
from frigate.comms.event_metadata_updater import (
EventMetadataPublisher,
EventMetadataTypeEnum,
)
from frigate.comms.event_metadata_updater import EventMetadataPublisher
from frigate.comms.inter_process import InterProcessRequestor
from frigate.config import FrigateConfig
from frigate.config.classification import (
CustomClassificationConfig,
ObjectClassificationType,
)
from frigate.config.classification import CustomClassificationConfig
from frigate.const import CLIPS_DIR, MODEL_CACHE_DIR
from frigate.log import suppress_stderr_during
from frigate.types import TrackedObjectUpdateTypesEnum
from frigate.util.builtin import EventsPerSecond, InferenceSpeed, load_labels
from frigate.util.image import calculate_region
from frigate.util.object import box_overlaps
from ..types import DataProcessorMetrics
from .api import RealTimeProcessorApi
from .api import DeferredRealtimeProcessorApi
try:
from tflite_runtime.interpreter import Interpreter
@ -40,7 +32,7 @@ logger = logging.getLogger(__name__)
MAX_OBJECT_CLASSIFICATIONS = 16
class CustomStateClassificationProcessor(RealTimeProcessorApi):
class CustomStateClassificationProcessor(DeferredRealtimeProcessorApi):
def __init__(
self,
config: FrigateConfig,
@ -48,7 +40,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
requestor: InterProcessRequestor,
metrics: DataProcessorMetrics,
):
super().__init__(config, metrics)
super().__init__(config, metrics, max_queue=4)
self.model_config = model_config
if not self.model_config.name:
@ -259,14 +251,34 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
)
return
frame = rgb[y1:y2, x1:x2]
cropped_frame = rgb[y1:y2, x1:x2]
try:
resized_frame = cv2.resize(frame, (224, 224))
resized_frame = cv2.resize(cropped_frame, (224, 224))
except Exception:
logger.warning("Failed to resize image for state classification")
return
# Copy for training image saves on worker thread
crop_bgr = cv2.cvtColor(cropped_frame, cv2.COLOR_RGB2BGR)
self._enqueue_task(("classify", camera, now, resized_frame, crop_bgr))
def _process_task(self, task: Any) -> None:
kind = task[0]
if kind == "classify":
_, camera, timestamp, resized_frame, crop_bgr = task
self._classify_state(camera, timestamp, resized_frame, crop_bgr)
elif kind == "reload":
self.__build_detector()
def _classify_state(
self,
camera: str,
timestamp: float,
resized_frame: np.ndarray,
crop_bgr: np.ndarray,
) -> None:
if self.interpreter is None:
# When interpreter is None, always save (score is 0.0, which is < 1.0)
if self._should_save_image(camera, "unknown", 0.0):
@ -277,15 +289,18 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
)
write_classification_attempt(
self.train_dir,
cv2.cvtColor(frame, cv2.COLOR_RGB2BGR),
crop_bgr,
"none-none",
now,
timestamp,
"unknown",
0.0,
max_files=save_attempts,
)
return
if not self.tensor_input_details or not self.tensor_output_details:
return
input = np.expand_dims(resized_frame, axis=0)
self.interpreter.set_tensor(self.tensor_input_details[0]["index"], input)
self.interpreter.invoke()
@ -298,7 +313,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
)
best_id = int(np.argmax(probs))
score = round(probs[best_id], 2)
self.__update_metrics(datetime.datetime.now().timestamp() - now)
self.__update_metrics(datetime.datetime.now().timestamp() - timestamp)
detected_state = self.labelmap[best_id]
@ -310,9 +325,9 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
)
write_classification_attempt(
self.train_dir,
cv2.cvtColor(frame, cv2.COLOR_RGB2BGR),
crop_bgr,
"none-none",
now,
timestamp,
detected_state,
score,
max_files=save_attempts,
@ -327,9 +342,14 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
verified_state = self.verify_state_change(camera, detected_state)
if verified_state is not None:
self.requestor.send_data(
f"{camera}/classification/{self.model_config.name}",
verified_state,
self._emit_result(
{
"type": "classification",
"processor": "state",
"model_name": self.model_config.name,
"camera": camera,
"state": verified_state,
}
)
def handle_request(
@ -337,6 +357,8 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
) -> dict[str, Any] | None:
if topic == EmbeddingsRequestEnum.reload_classification_model.value:
if request_data.get("model_name") == self.model_config.name:
def _do_reload(data: dict[str, Any]) -> dict[str, Any]:
self.__build_detector()
logger.info(
f"Successfully loaded updated model for {self.model_config.name}"
@ -345,6 +367,9 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
"success": True,
"message": f"Loaded {self.model_config.name} model.",
}
result: dict[str, Any] = self._enqueue_request(_do_reload, request_data)
return result
else:
return None
else:
@ -354,7 +379,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
pass
class CustomObjectClassificationProcessor(RealTimeProcessorApi):
class CustomObjectClassificationProcessor(DeferredRealtimeProcessorApi):
def __init__(
self,
config: FrigateConfig,
@ -363,7 +388,7 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
requestor: InterProcessRequestor,
metrics: DataProcessorMetrics,
):
super().__init__(config, metrics)
super().__init__(config, metrics, max_queue=8)
self.model_config = model_config
if not self.model_config.name:
@ -536,18 +561,41 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
)
rgb = cv2.cvtColor(frame, cv2.COLOR_YUV2RGB_I420)
crop = rgb[
y:y2,
x:x2,
]
crop = rgb[y:y2, x:x2]
if crop.shape != (224, 224):
try:
resized_crop = cv2.resize(crop, (224, 224))
except Exception:
logger.warning("Failed to resize image for state classification")
logger.warning("Failed to resize image for object classification")
return
# Copy crop for training images (will be used on worker thread)
crop_bgr = cv2.cvtColor(crop, cv2.COLOR_RGB2BGR)
self._enqueue_task(
("classify", object_id, obj_data["camera"], now, resized_crop, crop_bgr)
)
def _process_task(self, task: Any) -> None:
kind = task[0]
if kind == "classify":
_, object_id, camera, timestamp, resized_crop, crop_bgr = task
self._classify_object(object_id, camera, timestamp, resized_crop, crop_bgr)
elif kind == "expire":
_, object_id = task
if object_id in self.classification_history:
self.classification_history.pop(object_id)
elif kind == "reload":
self.__build_detector()
def _classify_object(
self,
object_id: str,
camera: str,
timestamp: float,
resized_crop: np.ndarray,
crop_bgr: np.ndarray,
) -> None:
if self.interpreter is None:
save_attempts = (
self.model_config.save_attempts
@ -556,9 +604,9 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
)
write_classification_attempt(
self.train_dir,
cv2.cvtColor(crop, cv2.COLOR_RGB2BGR),
crop_bgr,
object_id,
now,
timestamp,
"unknown",
0.0,
max_files=save_attempts,
@ -569,7 +617,10 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
if object_id not in self.classification_history:
self.classification_history[object_id] = []
self.classification_history[object_id].append(("unknown", 0.0, now))
self.classification_history[object_id].append(("unknown", 0.0, timestamp))
return
if not self.tensor_input_details or not self.tensor_output_details:
return
input = np.expand_dims(resized_crop, axis=0)
@ -584,7 +635,7 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
)
best_id = int(np.argmax(probs))
score = round(probs[best_id], 2)
self.__update_metrics(datetime.datetime.now().timestamp() - now)
self.__update_metrics(datetime.datetime.now().timestamp() - timestamp)
save_attempts = (
self.model_config.save_attempts
@ -593,9 +644,9 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
)
write_classification_attempt(
self.train_dir,
cv2.cvtColor(crop, cv2.COLOR_RGB2BGR),
crop_bgr,
object_id,
now,
timestamp,
self.labelmap[best_id],
score,
max_files=save_attempts,
@ -610,76 +661,39 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
sub_label = self.labelmap[best_id]
logger.debug(
f"{self.model_config.name}: Object {object_id} (label={obj_data['label']}) passed threshold with sub_label={sub_label}, score={score}"
f"{self.model_config.name}: Object {object_id} passed threshold with sub_label={sub_label}, score={score}"
)
consensus_label, consensus_score = self.get_weighted_score(
object_id, sub_label, score, now
object_id, sub_label, score, timestamp
)
logger.debug(
f"{self.model_config.name}: get_weighted_score returned consensus_label={consensus_label}, consensus_score={consensus_score} for {object_id}"
)
if consensus_label is not None:
camera = obj_data["camera"]
logger.debug(
f"{self.model_config.name}: Publishing sub_label={consensus_label} for {obj_data['label']} object {object_id} on {camera}"
)
if (
self.model_config.object_config.classification_type
== ObjectClassificationType.sub_label
):
self.sub_label_publisher.publish(
(object_id, consensus_label, consensus_score),
EventMetadataTypeEnum.sub_label,
)
self.requestor.send_data(
"tracked_object_update",
json.dumps(
if consensus_label is not None and self.model_config.object_config is not None:
self._emit_result(
{
"type": TrackedObjectUpdateTypesEnum.classification,
"id": object_id,
"type": "classification",
"processor": "object",
"model_name": self.model_config.name,
"classification_type": self.model_config.object_config.classification_type,
"object_id": object_id,
"camera": camera,
"timestamp": now,
"model": self.model_config.name,
"sub_label": consensus_label,
"timestamp": timestamp,
"label": consensus_label,
"score": consensus_score,
}
),
)
elif (
self.model_config.object_config.classification_type
== ObjectClassificationType.attribute
):
self.sub_label_publisher.publish(
(
object_id,
self.model_config.name,
consensus_label,
consensus_score,
),
EventMetadataTypeEnum.attribute.value,
)
self.requestor.send_data(
"tracked_object_update",
json.dumps(
{
"type": TrackedObjectUpdateTypesEnum.classification,
"id": object_id,
"camera": camera,
"timestamp": now,
"model": self.model_config.name,
"attribute": consensus_label,
"score": consensus_score,
}
),
)
def handle_request(self, topic: str, request_data: dict) -> dict | None:
def handle_request(
self, topic: str, request_data: dict[str, Any]
) -> dict[str, Any] | None:
if topic == EmbeddingsRequestEnum.reload_classification_model.value:
if request_data.get("model_name") == self.model_config.name:
def _do_reload(data: dict[str, Any]) -> dict[str, Any]:
self.__build_detector()
logger.info(
f"Successfully loaded updated model for {self.model_config.name}"
@ -688,14 +702,16 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi):
"success": True,
"message": f"Loaded {self.model_config.name} model.",
}
result: dict[str, Any] = self._enqueue_request(_do_reload, request_data)
return result
else:
return None
else:
return None
def expire_object(self, object_id: str, camera: str) -> None:
if object_id in self.classification_history:
self.classification_history.pop(object_id)
self._enqueue_task(("expire", object_id))
def write_classification_attempt(

View File

@ -2,6 +2,7 @@
import base64
import datetime
import json
import logging
import threading
from multiprocessing.synchronize import Event as MpEvent
@ -33,6 +34,7 @@ from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdateSubscriber,
)
from frigate.config.classification import ObjectClassificationType
from frigate.data_processing.common.license_plate.model import (
LicensePlateModelRunner,
)
@ -61,6 +63,7 @@ from frigate.db.sqlitevecq import SqliteVecQueueDatabase
from frigate.events.types import EventTypeEnum, RegenerateDescriptionEnum
from frigate.genai import GenAIClientManager
from frigate.models import Event, Recordings, ReviewSegment, Trigger
from frigate.types import TrackedObjectUpdateTypesEnum
from frigate.util.builtin import serialize
from frigate.util.file import get_event_thumbnail_bytes
from frigate.util.image import SharedMemoryFrameManager
@ -92,6 +95,7 @@ class EmbeddingMaintainer(threading.Thread):
CameraConfigUpdateEnum.add,
CameraConfigUpdateEnum.remove,
CameraConfigUpdateEnum.object_genai,
CameraConfigUpdateEnum.review,
CameraConfigUpdateEnum.review_genai,
CameraConfigUpdateEnum.semantic_search,
],
@ -202,15 +206,13 @@ class EmbeddingMaintainer(threading.Thread):
# post processors
self.post_processors: list[PostProcessorApi] = []
if self.genai_manager.vision_client is not None and any(
c.review.genai.enabled_in_config for c in self.config.cameras.values()
):
if any(c.review.genai.enabled_in_config for c in self.config.cameras.values()):
self.post_processors.append(
ReviewDescriptionProcessor(
self.config,
self.requestor,
self.metrics,
self.genai_manager.vision_client,
self.genai_manager,
)
)
@ -248,16 +250,14 @@ class EmbeddingMaintainer(threading.Thread):
)
self.post_processors.append(semantic_trigger_processor)
if self.genai_manager.vision_client is not None and any(
c.objects.genai.enabled_in_config for c in self.config.cameras.values()
):
if any(c.objects.genai.enabled_in_config for c in self.config.cameras.values()):
self.post_processors.append(
ObjectDescriptionProcessor(
self.config,
self.embeddings,
self.requestor,
self.metrics,
self.genai_manager.vision_client,
self.genai_manager,
semantic_trigger_processor,
)
)
@ -277,10 +277,15 @@ class EmbeddingMaintainer(threading.Thread):
self._process_recordings_updates()
self._process_review_updates()
self._process_frame_updates()
self._process_deferred_results()
self._expire_dedicated_lpr()
self._process_finalized()
self._process_event_metadata()
# Shutdown deferred processors
for processor in self.realtime_processors:
processor.shutdown()
self.config_updater.stop()
self.enrichment_config_subscriber.stop()
self.event_subscriber.stop()
@ -305,6 +310,10 @@ class EmbeddingMaintainer(threading.Thread):
self._handle_custom_classification_update(topic, payload)
return
if topic == "config/genai":
self.config.genai = payload
self.genai_manager.update_config(self.config)
# Broadcast to all processors — each decides if the topic is relevant
for processor in self.realtime_processors:
processor.update_config(topic, payload)
@ -319,10 +328,9 @@ class EmbeddingMaintainer(threading.Thread):
model_name = topic.split("/")[-1]
if model_config is None:
self.realtime_processors = [
processor
for processor in self.realtime_processors
if not (
remaining = []
for processor in self.realtime_processors:
if (
isinstance(
processor,
(
@ -331,8 +339,11 @@ class EmbeddingMaintainer(threading.Thread):
),
)
and processor.model_config.name == model_name
)
]
):
processor.shutdown()
else:
remaining.append(processor)
self.realtime_processors = remaining
logger.info(
f"Successfully removed classification processor for model: {model_name}"
@ -700,6 +711,68 @@ class EmbeddingMaintainer(threading.Thread):
self.frame_manager.close(frame_name)
def _process_deferred_results(self) -> None:
"""Drain results from deferred processors and perform IPC side-effects."""
for processor in self.realtime_processors:
results = processor.drain_results()
for result in results:
if result.get("type") != "classification":
continue
if result["processor"] == "state":
self.requestor.send_data(
f"{result['camera']}/classification/{result['model_name']}",
result["state"],
)
elif result["processor"] == "object":
object_id = result["object_id"]
camera = result["camera"]
timestamp = result["timestamp"]
model_name = result["model_name"]
label = result["label"]
score = result["score"]
classification_type = result["classification_type"]
if classification_type == ObjectClassificationType.sub_label:
self.event_metadata_publisher.publish(
(object_id, label, score),
EventMetadataTypeEnum.sub_label,
)
self.requestor.send_data(
"tracked_object_update",
json.dumps(
{
"type": TrackedObjectUpdateTypesEnum.classification,
"id": object_id,
"camera": camera,
"timestamp": timestamp,
"model": model_name,
"sub_label": label,
"score": score,
}
),
)
elif classification_type == ObjectClassificationType.attribute:
self.event_metadata_publisher.publish(
(object_id, model_name, label, score),
EventMetadataTypeEnum.attribute.value,
)
self.requestor.send_data(
"tracked_object_update",
json.dumps(
{
"type": TrackedObjectUpdateTypesEnum.classification,
"id": object_id,
"camera": camera,
"timestamp": timestamp,
"model": model_name,
"attribute": label,
"score": score,
}
),
)
def _embed_thumbnail(self, event_id: str, thumbnail: bytes) -> None:
"""Embed the thumbnail for an event."""
if not self.config.semantic_search.enabled:

View File

@ -106,8 +106,8 @@ When forming your description:
## Response Field Guidelines
Respond with a JSON object matching the provided schema. Field-specific guidance:
- `scene`: Describe how the sequence begins, then the progression of events all significant movements and actions in order. For example, if a vehicle arrives and then a person exits, describe both sequentially. Always use subject names from "Objects in Scene" do not replace named subjects with generic terms like "a person" or "the individual". Your description should align with and support the threat level you assign.
- `title`: Characterize **what took place and where** interpret the overall purpose or outcome, do not simply compress the scene description into fewer words. Include the relevant location (zone, area, or entry point). Always include subject names from "Objects in Scene" do not replace named subjects with generic terms. No editorial qualifiers like "routine" or "suspicious."
- `scene`: Describe how the sequence begins, then the progression of events all significant movements and actions in order. For example, if a vehicle arrives and then a person exits, describe both sequentially. For named subjects (those with a `` separator in "Objects in Scene"), always use their name do not replace them with generic terms. For unnamed objects (e.g., "person", "car"), refer to them naturally with articles (e.g., "a person", "the car"). Your description should align with and support the threat level you assign.
- `title`: Characterize **what took place and where** interpret the overall purpose or outcome, do not simply compress the scene description into fewer words. Include the relevant location (zone, area, or entry point). For named subjects, always use their name. For unnamed objects, refer to them naturally with articles. No editorial qualifiers like "routine" or "suspicious."
- `potential_threat_level`: Must be consistent with your scene description and the activity patterns above.
{get_concern_prompt()}
@ -190,6 +190,7 @@ Each line represents a detection state, not necessarily unique individuals. The
if any("" in obj for obj in review_data["unified_objects"]):
metadata.potential_threat_level = 0
metadata.title = metadata.title[0].upper() + metadata.title[1:]
metadata.time = review_data["start"]
return metadata
except Exception as e:
@ -199,6 +200,9 @@ Each line represents a detection state, not necessarily unique individuals. The
)
return None
else:
logger.debug(
f"Invalid response received from GenAI provider for review description on {review_data['camera']}. Response: {response}",
)
return None
def generate_review_summary(
@ -320,6 +324,22 @@ Guidelines:
"""Submit a request to the provider."""
return None
@property
def supports_vision(self) -> bool:
"""Whether the model supports vision/image input.
Defaults to True for cloud providers. Providers that can detect
capability at runtime (e.g. llama.cpp) should override this.
"""
return True
def list_models(self) -> list[str]:
"""Return the list of model names available from this provider.
Providers should override this to query their backend.
"""
return []
def get_context_size(self) -> int:
"""Get the context window size for this provider in tokens."""
return 4096

View File

@ -82,6 +82,14 @@ class OpenAIClient(GenAIClient):
return str(result.choices[0].message.content.strip())
return None
def list_models(self) -> list[str]:
"""Return available model IDs from Azure OpenAI."""
try:
return sorted(m.id for m in self.provider.models.list().data)
except Exception as e:
logger.warning("Failed to list Azure OpenAI models: %s", e)
return []
def get_context_size(self) -> int:
"""Get the context window size for Azure OpenAI."""
return 128000

View File

@ -87,6 +87,14 @@ class GeminiClient(GenAIClient):
return None
return description
def list_models(self) -> list[str]:
"""Return available model names from Gemini."""
try:
return sorted(m.name or "" for m in self.provider.models.list())
except Exception as e:
logger.warning("Failed to list Gemini models: %s", e)
return []
def get_context_size(self) -> int:
"""Get the context window size for Gemini."""
# Gemini Pro Vision has a 1M token context window

View File

@ -38,18 +38,122 @@ class LlamaCppClient(GenAIClient):
provider: str | None # base_url
provider_options: dict[str, Any]
_context_size: int | None
_supports_vision: bool
_supports_audio: bool
_supports_tools: bool
def _init_provider(self) -> str | None:
"""Initialize the client."""
"""Initialize the client and query model metadata from the server."""
self.provider_options = {
**self.genai_config.provider_options,
}
return (
self._context_size = None
self._supports_vision = False
self._supports_audio = False
self._supports_tools = False
base_url = (
self.genai_config.base_url.rstrip("/")
if self.genai_config.base_url
else None
)
if base_url is None:
return None
configured_model = self.genai_config.model
# Query /v1/models to validate the configured model exists
try:
response = requests.get(
f"{base_url}/v1/models",
timeout=10,
)
response.raise_for_status()
models_data = response.json()
model_found = False
for model in models_data.get("data", []):
model_ids = {model.get("id")}
for alias in model.get("aliases", []):
model_ids.add(alias)
if configured_model in model_ids:
model_found = True
break
if not model_found:
available = []
for m in models_data.get("data", []):
available.append(m.get("id", "unknown"))
for alias in m.get("aliases", []):
available.append(alias)
logger.error(
"Model '%s' not found on llama.cpp server. Available models: %s",
configured_model,
available,
)
return None
except Exception as e:
logger.warning(
"Failed to query llama.cpp /v1/models endpoint: %s. "
"Model validation skipped.",
e,
)
# Query /props for context size, modalities, and tool support.
# The standard /props?model=<name> endpoint works with llama-server.
# If it fails, try the llama-swap per-model passthrough endpoint which
# returns props for a specific model without requiring it to be loaded.
try:
try:
response = requests.get(
f"{base_url}/props",
params={"model": configured_model},
timeout=10,
)
response.raise_for_status()
props = response.json()
except Exception:
response = requests.get(
f"{base_url}/upstream/{configured_model}/props",
timeout=10,
)
response.raise_for_status()
props = response.json()
# Context size from server runtime config
default_settings = props.get("default_generation_settings", {})
n_ctx = default_settings.get("n_ctx")
if n_ctx:
self._context_size = int(n_ctx)
# Modalities (vision, audio)
modalities = props.get("modalities", {})
self._supports_vision = modalities.get("vision", False)
self._supports_audio = modalities.get("audio", False)
# Tool support from chat template capabilities
chat_caps = props.get("chat_template_caps", {})
self._supports_tools = chat_caps.get("supports_tools", False)
logger.info(
"llama.cpp model '%s' initialized — context: %s, vision: %s, audio: %s, tools: %s",
configured_model,
self._context_size or "unknown",
self._supports_vision,
self._supports_audio,
self._supports_tools,
)
except Exception as e:
logger.warning(
"Failed to query llama.cpp /props endpoint: %s. "
"Using defaults for context size and capabilities.",
e,
)
return base_url
def _send(
self,
prompt: str,
@ -117,9 +221,56 @@ class LlamaCppClient(GenAIClient):
logger.warning("llama.cpp returned an error: %s", str(e))
return None
@property
def supports_vision(self) -> bool:
"""Whether the loaded model supports vision/image input."""
return self._supports_vision
@property
def supports_audio(self) -> bool:
"""Whether the loaded model supports audio input."""
return self._supports_audio
@property
def supports_tools(self) -> bool:
"""Whether the loaded model supports tool/function calling."""
return self._supports_tools
def list_models(self) -> list[str]:
"""Return available model IDs from the llama.cpp server."""
base_url = self.provider or (
self.genai_config.base_url.rstrip("/")
if self.genai_config.base_url
else None
)
if base_url is None:
return []
try:
response = requests.get(f"{base_url}/v1/models", timeout=10)
response.raise_for_status()
models = []
for m in response.json().get("data", []):
models.append(m.get("id", "unknown"))
for alias in m.get("aliases", []):
models.append(alias)
return sorted(models)
except Exception as e:
logger.warning("Failed to list llama.cpp models: %s", e)
return []
def get_context_size(self) -> int:
"""Get the context window size for llama.cpp."""
return int(self.provider_options.get("context_size", 4096))
"""Get the context window size for llama.cpp.
Resolution order:
1. provider_options["context_size"] (user override)
2. Value queried from llama.cpp server at init
3. Default fallback of 4096
"""
if "context_size" in self.provider_options:
return int(self.provider_options["context_size"])
if self._context_size is not None:
return self._context_size
return 4096
def _build_payload(
self,

View File

@ -1,15 +1,15 @@
"""GenAI client manager for Frigate.
Manages GenAI provider clients from Frigate config. Configuration is read only
in _update_config(); no other code should read config.genai. Exposes clients
by role: tool_client, vision_client, embeddings_client.
Manages GenAI provider clients from Frigate config. Clients are created lazily
on first access so that providers whose roles are never used (e.g. chat when
no chat feature is active) are never initialized.
"""
import logging
from typing import TYPE_CHECKING, Optional
from frigate.config import FrigateConfig
from frigate.config.camera.genai import GenAIRoleEnum
from frigate.config.camera.genai import GenAIConfig, GenAIRoleEnum
if TYPE_CHECKING:
from frigate.genai import GenAIClient
@ -21,68 +21,98 @@ class GenAIClientManager:
"""Manages GenAI provider clients from Frigate config."""
def __init__(self, config: FrigateConfig) -> None:
self._tool_client: Optional[GenAIClient] = None
self._vision_client: Optional[GenAIClient] = None
self._embeddings_client: Optional[GenAIClient] = None
self._configs: dict[str, GenAIConfig] = {}
self._role_map: dict[GenAIRoleEnum, str] = {}
self._clients: dict[str, "GenAIClient"] = {}
self.update_config(config)
def update_config(self, config: FrigateConfig) -> None:
"""Build role clients from current Frigate config.genai.
"""Store provider configs and build the role→name mapping.
Called from __init__ and can be called again when config is reloaded.
Each role (tools, vision, embeddings) gets the client for the provider
that has that role in its roles list.
Clients are not created here; they are instantiated lazily on first
access via a role property or list_models().
"""
from frigate.genai import PROVIDERS, load_providers
self._tool_client = None
self._vision_client = None
self._embeddings_client = None
self._configs = {}
self._role_map = {}
self._clients = {}
if not config.genai:
return
load_providers()
for _name, genai_cfg in config.genai.items():
for name, genai_cfg in config.genai.items():
if not genai_cfg.provider:
continue
provider_cls = PROVIDERS.get(genai_cfg.provider)
if not provider_cls:
if genai_cfg.provider not in PROVIDERS:
logger.warning(
"Unknown GenAI provider %s in config, skipping.",
genai_cfg.provider,
)
continue
self._configs[name] = genai_cfg
for role in genai_cfg.roles:
self._role_map[role] = name
def _get_client(self, name: str) -> "Optional[GenAIClient]":
"""Return the client for *name*, creating it on first access."""
if name in self._clients:
return self._clients[name]
from frigate.genai import PROVIDERS
genai_cfg = self._configs.get(name)
if not genai_cfg:
return None
if not genai_cfg.provider:
return None
provider_cls = PROVIDERS.get(genai_cfg.provider)
if not provider_cls:
return None
try:
client = provider_cls(genai_cfg)
client: "GenAIClient" = provider_cls(genai_cfg)
except Exception as e:
logger.exception(
"Failed to create GenAI client for provider %s: %s",
genai_cfg.provider,
e,
)
continue
return None
for role in genai_cfg.roles:
if role == GenAIRoleEnum.tools:
self._tool_client = client
elif role == GenAIRoleEnum.vision:
self._vision_client = client
elif role == GenAIRoleEnum.embeddings:
self._embeddings_client = client
self._clients[name] = client
return client
@property
def tool_client(self) -> "Optional[GenAIClient]":
"""Client configured for the tools role (e.g. chat with function calling)."""
return self._tool_client
def chat_client(self) -> "Optional[GenAIClient]":
"""Client configured for the chat role (e.g. chat with function calling)."""
name = self._role_map.get(GenAIRoleEnum.chat)
return self._get_client(name) if name else None
@property
def vision_client(self) -> "Optional[GenAIClient]":
"""Client configured for the vision role (e.g. review descriptions, object descriptions)."""
return self._vision_client
def description_client(self) -> "Optional[GenAIClient]":
"""Client configured for the descriptions role (e.g. review descriptions, object descriptions)."""
name = self._role_map.get(GenAIRoleEnum.descriptions)
return self._get_client(name) if name else None
@property
def embeddings_client(self) -> "Optional[GenAIClient]":
"""Client configured for the embeddings role."""
return self._embeddings_client
name = self._role_map.get(GenAIRoleEnum.embeddings)
return self._get_client(name) if name else None
def list_models(self) -> dict[str, list[str]]:
"""Return available models keyed by config entry name."""
result: dict[str, list[str]] = {}
for name in self._configs:
client = self._get_client(name)
if client:
result[name] = client.list_models()
return result

View File

@ -113,6 +113,15 @@ class OllamaClient(GenAIClient):
schema = response_format.get("json_schema", {}).get("schema")
if schema:
ollama_options["format"] = self._clean_schema_for_ollama(schema)
logger.debug(
"Ollama generate request: model=%s, prompt_len=%s, image_count=%s, "
"has_format=%s, options=%s",
self.genai_config.model,
len(prompt),
len(images) if images else 0,
"format" in ollama_options,
{k: v for k, v in ollama_options.items() if k != "format"},
)
result = self.provider.generate(
self.genai_config.model,
prompt,
@ -120,9 +129,24 @@ class OllamaClient(GenAIClient):
**ollama_options,
)
logger.debug(
f"Ollama tokens used: eval_count={result.get('eval_count')}, prompt_eval_count={result.get('prompt_eval_count')}"
"Ollama generate response: done=%s, done_reason=%s, eval_count=%s, "
"prompt_eval_count=%s, response_len=%s",
result.get("done"),
result.get("done_reason"),
result.get("eval_count"),
result.get("prompt_eval_count"),
len(result.get("response", "") or ""),
)
return str(result["response"]).strip()
response_text = str(result["response"]).strip()
if not response_text:
logger.warning(
"Ollama returned a blank response for model %s (done_reason=%s, "
"eval_count=%s). Check model output, ensure thinking is disabled.",
self.genai_config.model,
result.get("done_reason"),
result.get("eval_count"),
)
return response_text
except (
TimeoutException,
ResponseError,
@ -132,6 +156,29 @@ class OllamaClient(GenAIClient):
logger.warning("Ollama returned an error: %s", str(e))
return None
def list_models(self) -> list[str]:
"""Return available model names from the Ollama server."""
client = self.provider
if client is None:
# Provider init may have failed due to invalid model, but we can
# still list available models with a fresh client.
if not self.genai_config.base_url:
return []
try:
client = ApiClient(
host=self.genai_config.base_url, timeout=self.timeout
)
except Exception:
return []
try:
response = client.list()
return sorted(
m.get("name", m.get("model", "")) for m in response.get("models", [])
)
except Exception as e:
logger.warning("Failed to list Ollama models: %s", e)
return []
def get_context_size(self) -> int:
"""Get the context window size for Ollama."""
return int(

View File

@ -80,12 +80,36 @@ class OpenAIClient(GenAIClient):
and hasattr(result, "choices")
and len(result.choices) > 0
):
return str(result.choices[0].message.content.strip())
message = result.choices[0].message
content = message.content
if not content:
# When reasoning is enabled for some OpenAI backends the actual response
# is incorrectly placed in reasoning_content instead of content.
# This is buggy/incorrect behavior — reasoning should not be
# enabled for these models.
reasoning_content = getattr(message, "reasoning_content", None)
if reasoning_content:
logger.warning(
"Response content was empty but reasoning_content was provided; "
"reasoning appears to be enabled and should be disabled for this model."
)
content = reasoning_content
return str(content.strip()) if content else None
return None
except (TimeoutException, Exception) as e:
logger.warning("OpenAI returned an error: %s", str(e))
return None
def list_models(self) -> list[str]:
"""Return available model IDs from the OpenAI-compatible API."""
try:
return sorted(m.id for m in self.provider.models.list().data)
except Exception as e:
logger.warning("Failed to list OpenAI models: %s", e)
return []
def get_context_size(self) -> int:
"""Get the context window size for OpenAI."""
if self.context_size is not None:

504
frigate/jobs/export.py Normal file
View File

@ -0,0 +1,504 @@
"""Export job management with queued background execution."""
import logging
import os
import threading
import time
from dataclasses import dataclass
from pathlib import Path
from queue import Full, Queue
from typing import Any, Callable, Optional
from peewee import DoesNotExist
from frigate.comms.inter_process import InterProcessRequestor
from frigate.config import FrigateConfig
from frigate.const import UPDATE_JOB_STATE
from frigate.jobs.job import Job
from frigate.models import Export
from frigate.record.export import PlaybackSourceEnum, RecordingExporter
from frigate.types import JobStatusTypesEnum
logger = logging.getLogger(__name__)
# Maximum number of jobs that can sit in the queue waiting to run.
# Prevents a runaway client from unbounded memory growth.
MAX_QUEUED_EXPORT_JOBS = 100
# Minimum interval between progress broadcasts. FFmpeg can emit progress
# events many times per second; we coalesce them so the WebSocket isn't
# flooded with redundant updates.
PROGRESS_BROADCAST_MIN_INTERVAL = 1.0
# Delay before removing a completed job from the in-memory map. Gives the
# frontend a chance to receive the final state via WebSocket before SWR
# polling takes over.
COMPLETED_JOB_CLEANUP_DELAY = 5.0
class ExportQueueFullError(RuntimeError):
"""Raised when the export queue is at capacity."""
@dataclass
class ExportJob(Job):
"""Job state for export operations."""
job_type: str = "export"
camera: str = ""
name: Optional[str] = None
image_path: Optional[str] = None
export_case_id: Optional[str] = None
request_start_time: float = 0.0
request_end_time: float = 0.0
playback_source: str = PlaybackSourceEnum.recordings.value
ffmpeg_input_args: Optional[str] = None
ffmpeg_output_args: Optional[str] = None
cpu_fallback: bool = False
current_step: str = "queued"
progress_percent: float = 0.0
def to_dict(self) -> dict[str, Any]:
"""Convert to dictionary for API responses.
Only exposes fields that are part of the public ExportJobModel schema.
Internal execution details (image_path, ffmpeg args, cpu_fallback) are
intentionally omitted so they don't leak through the API.
"""
return {
"id": self.id,
"job_type": self.job_type,
"status": self.status,
"camera": self.camera,
"name": self.name,
"export_case_id": self.export_case_id,
"request_start_time": self.request_start_time,
"request_end_time": self.request_end_time,
"start_time": self.start_time,
"end_time": self.end_time,
"error_message": self.error_message,
"results": self.results,
"current_step": self.current_step,
"progress_percent": self.progress_percent,
}
class ExportQueueWorker(threading.Thread):
"""Worker that executes queued exports."""
def __init__(self, manager: "ExportJobManager", worker_index: int) -> None:
super().__init__(
daemon=True,
name=f"export_queue_worker_{worker_index}",
)
self.manager = manager
def run(self) -> None:
while True:
job = self.manager.queue.get()
try:
self.manager.run_job(job)
except Exception:
logger.exception(
"Export queue worker failed while processing %s", job.id
)
finally:
self.manager.queue.task_done()
class JobStatePublisher:
"""Publishes a single job state payload to the dispatcher.
Each call opens a short-lived :py:class:`InterProcessRequestor`, sends
the payload, and closes the socket. The short-lived design avoids
REQ/REP state corruption that would arise from sharing a single REQ
socket across the API thread and worker threads (REQ sockets must
strictly alternate send/recv).
With the 1s broadcast throttle in place, socket creation overhead is
negligible. The class also exists so tests can substitute a no-op
instance instead of stubbing ZMQ see ``BaseTestHttp.setUp``.
"""
def publish(self, payload: dict[str, Any]) -> None:
try:
requestor = InterProcessRequestor()
except Exception as err:
logger.warning("Failed to open job state requestor: %s", err)
return
try:
requestor.send_data(UPDATE_JOB_STATE, payload)
except Exception as err:
logger.debug("Job state broadcast failed: %s", err)
finally:
try:
requestor.stop()
except Exception:
pass
class ExportJobManager:
"""Concurrency-limited manager for queued export jobs."""
def __init__(
self,
config: FrigateConfig,
max_concurrent: int,
max_queued: int = MAX_QUEUED_EXPORT_JOBS,
publisher: Optional[JobStatePublisher] = None,
) -> None:
self.config = config
self.max_concurrent = max(1, max_concurrent)
self.queue: Queue[ExportJob] = Queue(maxsize=max(1, max_queued))
self.jobs: dict[str, ExportJob] = {}
self.lock = threading.Lock()
self.workers: list[ExportQueueWorker] = []
self.started = False
self.publisher = publisher if publisher is not None else JobStatePublisher()
self._last_broadcast_monotonic: float = 0.0
self._broadcast_throttle_lock = threading.Lock()
def _broadcast_all_jobs(self, force: bool = False) -> None:
"""Publish aggregate export job state via the job_state WS topic.
When ``force`` is False, broadcasts within
``PROGRESS_BROADCAST_MIN_INTERVAL`` of the previous one are skipped
to avoid flooding the WebSocket with rapid progress updates.
``force`` bypasses the throttle and is used for status transitions
(enqueue/start/finish) where the frontend needs the latest state.
"""
now = time.monotonic()
with self._broadcast_throttle_lock:
if (
not force
and now - self._last_broadcast_monotonic
< PROGRESS_BROADCAST_MIN_INTERVAL
):
return
self._last_broadcast_monotonic = now
with self.lock:
active = [
j
for j in self.jobs.values()
if j.status in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running)
]
any_running = any(j.status == JobStatusTypesEnum.running for j in active)
payload: dict[str, Any] = {
"job_type": "export",
"status": "running" if any_running else "queued",
"results": {"jobs": [j.to_dict() for j in active]},
}
try:
self.publisher.publish(payload)
except Exception as err:
logger.warning("Publisher raised during job state broadcast: %s", err)
def _make_progress_callback(self, job: ExportJob) -> Callable[[str, float], None]:
"""Build a callback the exporter can invoke during execution."""
def on_progress(step: str, percent: float) -> None:
job.current_step = step
job.progress_percent = percent
self._broadcast_all_jobs()
return on_progress
def _schedule_job_cleanup(self, job_id: str) -> None:
"""Drop a completed job from ``self.jobs`` after a short delay."""
def cleanup() -> None:
with self.lock:
self.jobs.pop(job_id, None)
timer = threading.Timer(COMPLETED_JOB_CLEANUP_DELAY, cleanup)
timer.daemon = True
timer.start()
def ensure_started(self) -> None:
"""Ensure worker threads are started exactly once."""
with self.lock:
if self.started:
self._restart_dead_workers_locked()
return
for index in range(self.max_concurrent):
worker = ExportQueueWorker(self, index)
worker.start()
self.workers.append(worker)
self.started = True
def _restart_dead_workers_locked(self) -> None:
for index, worker in enumerate(self.workers):
if worker.is_alive():
continue
logger.error(
"Export queue worker %s died unexpectedly, restarting", worker.name
)
replacement = ExportQueueWorker(self, index)
replacement.start()
self.workers[index] = replacement
def enqueue(self, job: ExportJob) -> str:
"""Queue a job for background execution.
Raises ExportQueueFullError if the queue is at capacity.
"""
self.ensure_started()
try:
self.queue.put_nowait(job)
except Full as err:
raise ExportQueueFullError(
"Export queue is full; try again once current exports finish"
) from err
with self.lock:
self.jobs[job.id] = job
self._broadcast_all_jobs(force=True)
return job.id
def get_job(self, job_id: str) -> Optional[ExportJob]:
"""Get a job by ID."""
with self.lock:
return self.jobs.get(job_id)
def list_active_jobs(self) -> list[ExportJob]:
"""List queued and running jobs."""
with self.lock:
return [
job
for job in self.jobs.values()
if job.status in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running)
]
def cancel_queued_jobs_for_case(self, case_id: str) -> list[ExportJob]:
"""Cancel queued export jobs assigned to a deleted case."""
cancelled_jobs: list[ExportJob] = []
with self.lock:
with self.queue.mutex:
retained_jobs: list[ExportJob] = []
while self.queue.queue:
job = self.queue.queue.popleft()
if (
job.export_case_id == case_id
and job.status == JobStatusTypesEnum.queued
):
job.status = JobStatusTypesEnum.cancelled
job.end_time = time.time()
cancelled_jobs.append(job)
continue
retained_jobs.append(job)
self.queue.queue.extend(retained_jobs)
if cancelled_jobs:
self.queue.unfinished_tasks = max(
0,
self.queue.unfinished_tasks - len(cancelled_jobs),
)
if self.queue.unfinished_tasks == 0:
self.queue.all_tasks_done.notify_all()
self.queue.not_full.notify_all()
return cancelled_jobs
def available_slots(self) -> int:
"""Approximate number of additional jobs that could be queued right now.
Uses Queue.qsize() which is best-effort; callers should treat the
result as advisory since another thread could enqueue between
checking and enqueueing.
"""
return max(0, self.queue.maxsize - self.queue.qsize())
def run_job(self, job: ExportJob) -> None:
"""Execute a queued export job."""
job.status = JobStatusTypesEnum.running
job.start_time = time.time()
self._broadcast_all_jobs(force=True)
exporter = RecordingExporter(
self.config,
job.id,
job.camera,
job.name,
job.image_path,
int(job.request_start_time),
int(job.request_end_time),
PlaybackSourceEnum(job.playback_source),
job.export_case_id,
job.ffmpeg_input_args,
job.ffmpeg_output_args,
job.cpu_fallback,
on_progress=self._make_progress_callback(job),
)
try:
exporter.run()
export = Export.get_or_none(Export.id == job.id)
if export is None:
job.status = JobStatusTypesEnum.failed
job.error_message = "Export failed"
elif export.in_progress:
job.status = JobStatusTypesEnum.failed
job.error_message = "Export did not complete"
else:
job.status = JobStatusTypesEnum.success
job.results = {
"export_id": export.id,
"export_case_id": export.export_case_id,
"video_path": export.video_path,
"thumb_path": export.thumb_path,
}
except DoesNotExist:
job.status = JobStatusTypesEnum.failed
job.error_message = "Export not found"
except Exception as err:
logger.exception("Export job %s failed: %s", job.id, err)
job.status = JobStatusTypesEnum.failed
job.error_message = str(err)
finally:
job.end_time = time.time()
self._broadcast_all_jobs(force=True)
self._schedule_job_cleanup(job.id)
_job_manager: Optional[ExportJobManager] = None
_job_manager_lock = threading.Lock()
def _get_max_concurrent(config: FrigateConfig) -> int:
return int(config.record.export.max_concurrent)
def reap_stale_exports() -> None:
"""Sweep Export rows stuck with in_progress=True from previous sessions.
On Frigate startup no export job is alive yet, so any in_progress=True
row must be a leftover from a previous session that crashed, was killed
mid-export, or returned early from RecordingExporter.run() without
flipping the flag. For each stale row we either:
- delete the row (and any thumb) if the video file is missing or empty,
since there is nothing worth recovering
- flip in_progress to False if the video file exists on disk and is
non-empty, treating it as a completed export the user can manage
through the normal UI
Must only be called when the export job manager is certain to have no
active jobs i.e., at Frigate startup, before any worker runs.
All exceptions are caught and logged; the caller does not need to wrap
this in a try/except. A failure on a single row will not stop the rest
of the sweep, and a failure in the top-level query will log and return.
"""
try:
stale_exports = list(Export.select().where(Export.in_progress == True)) # noqa: E712
except Exception:
logger.exception("Failed to query stale in-progress exports")
return
if not stale_exports:
logger.debug("No stale in-progress exports found on startup")
return
flipped = 0
deleted = 0
errored = 0
for export in stale_exports:
try:
video_path = export.video_path
has_usable_file = False
if video_path:
try:
has_usable_file = os.path.getsize(video_path) > 0
except OSError:
has_usable_file = False
if has_usable_file:
# Unassign from any case on recovery: the user should
# re-triage a recovered export rather than have it silently
# reappear inside a case they curated.
Export.update(
{Export.in_progress: False, Export.export_case: None}
).where(Export.id == export.id).execute()
flipped += 1
logger.info(
"Recovered stale in-progress export %s (file intact on disk)",
export.id,
)
continue
if export.thumb_path:
Path(export.thumb_path).unlink(missing_ok=True)
if video_path:
Path(video_path).unlink(missing_ok=True)
Export.delete().where(Export.id == export.id).execute()
deleted += 1
logger.info(
"Deleted stale in-progress export %s (no usable file on disk)",
export.id,
)
except Exception:
errored += 1
logger.exception("Failed to reap stale export %s", export.id)
logger.info(
"Stale export cleanup complete: %d recovered, %d deleted, %d errored",
flipped,
deleted,
errored,
)
def get_export_job_manager(config: FrigateConfig) -> ExportJobManager:
"""Get or create the singleton export job manager."""
global _job_manager
with _job_manager_lock:
if _job_manager is None:
_job_manager = ExportJobManager(config, _get_max_concurrent(config))
_job_manager.ensure_started()
return _job_manager
def start_export_job(config: FrigateConfig, job: ExportJob) -> str:
"""Queue an export job and return its ID."""
return get_export_job_manager(config).enqueue(job)
def get_export_job(config: FrigateConfig, job_id: str) -> Optional[ExportJob]:
"""Get a queued or completed export job by ID."""
return get_export_job_manager(config).get_job(job_id)
def list_active_export_jobs(config: FrigateConfig) -> list[ExportJob]:
"""List queued and running export jobs."""
return get_export_job_manager(config).list_active_jobs()
def cancel_queued_export_jobs_for_case(
config: FrigateConfig, case_id: str
) -> list[ExportJob]:
"""Cancel queued export jobs that still point at a deleted case."""
return get_export_job_manager(config).cancel_queued_jobs_for_case(case_id)
def available_export_queue_slots(config: FrigateConfig) -> int:
"""Approximate number of additional export jobs that could be queued now."""
return get_export_job_manager(config).available_slots()

View File

@ -121,11 +121,12 @@ class VLMWatchRunner(threading.Thread):
def _run_iteration(self) -> float:
"""Run one VLM analysis iteration. Returns seconds until next run."""
vision_client = (
self.genai_manager.vision_client or self.genai_manager.tool_client
chat_client = self.genai_manager.chat_client
if chat_client is None or not chat_client.supports_vision:
logger.warning(
"VLM watch job %s: no chat client with vision support available",
self.job.id,
)
if vision_client is None:
logger.warning("VLM watch job %s: no vision client available", self.job.id)
return 30
frame = self.frame_processor.get_current_frame(self.job.camera, {})
@ -163,7 +164,7 @@ class VLMWatchRunner(threading.Thread):
}
)
response = vision_client.chat_with_tools(
response = chat_client.chat_with_tools(
messages=self.conversation,
tools=None,
tool_choice=None,

View File

@ -152,21 +152,12 @@ class OnvifController:
cam = self.camera_configs[cam_name]
try:
user = cam.onvif.user
password = cam.onvif.password
if user is not None and isinstance(user, bytes):
user = user.decode("utf-8")
if password is not None and isinstance(password, bytes):
password = password.decode("utf-8")
self.cams[cam_name] = {
"onvif": ONVIFCamera(
cam.onvif.host,
cam.onvif.port,
user,
password,
cam.onvif.user,
cam.onvif.password,
wsdl_dir=str(Path(find_spec("onvif").origin).parent / "wsdl"),
adjust_time=cam.onvif.ignore_time_mismatch,
encrypt=not cam.onvif.tls_insecure,
@ -459,15 +450,15 @@ class OnvifController:
presets = []
for preset in presets:
# Ensure preset name is a Unicode string and handle UTF-8 characters correctly
preset_name = getattr(preset, "Name") or f"preset {preset['token']}"
if isinstance(preset_name, bytes):
preset_name = preset_name.decode("utf-8")
# Convert to lowercase while preserving UTF-8 characters
preset_name_lower = preset_name.lower()
self.cams[camera_name]["presets"][preset_name_lower] = preset["token"]
# Some cameras (e.g. Reolink) return UTF-8 bytes that zeep decodes
# as latin-1, producing mojibake. Detect that and repair it by
# round-tripping through latin-1 -> utf-8.
try:
preset_name = preset_name.encode("latin-1").decode("utf-8")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
self.cams[camera_name]["presets"][preset_name.lower()] = preset["token"]
# get list of supported features
supported_features = []
@ -695,9 +686,6 @@ class OnvifController:
self.cams[camera_name]["active"] = False
async def _move_to_preset(self, camera_name: str, preset: str) -> None:
if isinstance(preset, bytes):
preset = preset.decode("utf-8")
preset = preset.lower()
if preset not in self.cams[camera_name]["presets"]:

View File

@ -4,13 +4,14 @@ import datetime
import logging
import os
import random
import re
import shutil
import string
import subprocess as sp
import threading
from enum import Enum
from pathlib import Path
from typing import Optional
from typing import Callable, Optional
from peewee import DoesNotExist
@ -36,22 +37,24 @@ logger = logging.getLogger(__name__)
DEFAULT_TIME_LAPSE_FFMPEG_ARGS = "-vf setpts=0.04*PTS -r 30"
TIMELAPSE_DATA_INPUT_ARGS = "-an -skip_frame nokey"
# ffmpeg flags that can read from or write to arbitrary files.
# filter flags are blocked because source filters like movie= and
# amovie= can read arbitrary files from the filesystem.
# Matches the setpts factor used in timelapse exports (e.g. setpts=0.04*PTS).
# Captures the floating-point factor so we can scale expected duration.
SETPTS_FACTOR_RE = re.compile(r"setpts=([0-9]*\.?[0-9]+)\*PTS")
# ffmpeg flags that can read from or write to arbitrary files
BLOCKED_FFMPEG_ARGS = frozenset(
{
"-i",
"-filter_script",
"-vstats_file",
"-passlogfile",
"-sdp_file",
"-dump_attachment",
"-filter_complex",
"-lavfi",
"-vf",
"-af",
"-filter",
"-vstats_file",
"-passlogfile",
"-sdp_file",
"-dump_attachment",
"-attach",
}
)
@ -62,8 +65,11 @@ def validate_ffmpeg_args(args: str) -> tuple[bool, str]:
Blocks:
- The -i flag and other flags that read/write arbitrary files
- Filter flags (can read files via movie=/amovie= source filters)
- Absolute/relative file paths (potential extra outputs)
- URLs and ffmpeg protocol references (data exfiltration)
Admin users skip this validation entirely since they are trusted.
"""
if not args or not args.strip():
return True, ""
@ -115,6 +121,7 @@ class RecordingExporter(threading.Thread):
ffmpeg_input_args: Optional[str] = None,
ffmpeg_output_args: Optional[str] = None,
cpu_fallback: bool = False,
on_progress: Optional[Callable[[str, float], None]] = None,
) -> None:
super().__init__()
self.config = config
@ -129,10 +136,213 @@ class RecordingExporter(threading.Thread):
self.ffmpeg_input_args = ffmpeg_input_args
self.ffmpeg_output_args = ffmpeg_output_args
self.cpu_fallback = cpu_fallback
self.on_progress = on_progress
# ensure export thumb dir
Path(os.path.join(CLIPS_DIR, "export")).mkdir(exist_ok=True)
def _emit_progress(self, step: str, percent: float) -> None:
"""Invoke the progress callback if one was supplied."""
if self.on_progress is None:
return
try:
self.on_progress(step, max(0.0, min(100.0, percent)))
except Exception:
logger.exception("Export progress callback failed")
def _expected_output_duration_seconds(self) -> float:
"""Compute the expected duration of the output video in seconds.
Users often request a wide time range (e.g. a full hour) when only
a few minutes of recordings actually live on disk for that span,
so the requested range overstates the work and progress would
plateau very early. We sum the actual saved seconds from the
Recordings/Previews tables and use that as the input duration.
Timelapse exports then scale this by the setpts factor.
"""
requested_duration = max(0.0, float(self.end_time - self.start_time))
recorded = self._sum_source_duration_seconds()
input_duration = (
recorded if recorded is not None and recorded > 0 else requested_duration
)
if not self.ffmpeg_output_args:
return input_duration
match = SETPTS_FACTOR_RE.search(self.ffmpeg_output_args)
if match is None:
return input_duration
try:
factor = float(match.group(1))
except ValueError:
return input_duration
if factor <= 0:
return input_duration
return input_duration * factor
def _sum_source_duration_seconds(self) -> Optional[float]:
"""Sum saved-video seconds inside [start_time, end_time].
Queries Recordings or Previews depending on the playback source,
clamps each segment to the requested range, and returns the total.
Returns ``None`` on any error so the caller can fall back to the
requested range duration without losing progress reporting.
"""
try:
if self.playback_source == PlaybackSourceEnum.recordings:
rows = (
Recordings.select(Recordings.start_time, Recordings.end_time)
.where(
Recordings.start_time.between(self.start_time, self.end_time)
| Recordings.end_time.between(self.start_time, self.end_time)
| (
(self.start_time > Recordings.start_time)
& (self.end_time < Recordings.end_time)
)
)
.where(Recordings.camera == self.camera)
.iterator()
)
else:
rows = (
Previews.select(Previews.start_time, Previews.end_time)
.where(
Previews.start_time.between(self.start_time, self.end_time)
| Previews.end_time.between(self.start_time, self.end_time)
| (
(self.start_time > Previews.start_time)
& (self.end_time < Previews.end_time)
)
)
.where(Previews.camera == self.camera)
.iterator()
)
except Exception:
logger.exception(
"Failed to sum source duration for export %s", self.export_id
)
return None
total = 0.0
try:
for row in rows:
clipped_start = max(float(row.start_time), float(self.start_time))
clipped_end = min(float(row.end_time), float(self.end_time))
if clipped_end > clipped_start:
total += clipped_end - clipped_start
except Exception:
logger.exception(
"Failed to read recording rows for export %s", self.export_id
)
return None
return total
def _inject_progress_flags(self, ffmpeg_cmd: list[str]) -> list[str]:
"""Insert FFmpeg progress reporting flags before the output path.
``-progress pipe:2`` writes structured key=value lines to stderr,
``-nostats`` suppresses the noisy default stats output.
"""
if not ffmpeg_cmd:
return ffmpeg_cmd
return ffmpeg_cmd[:-1] + ["-progress", "pipe:2", "-nostats", ffmpeg_cmd[-1]]
def _run_ffmpeg_with_progress(
self,
ffmpeg_cmd: list[str],
playlist_lines: str | list[str],
step: str = "encoding",
) -> tuple[int, str]:
"""Run an FFmpeg export command, parsing progress events from stderr.
Returns ``(returncode, captured_stderr)``. Stdout is left attached to
the parent process so we don't have to drain it (and risk a deadlock
if the buffer fills). Progress percent is computed against the
expected output duration; values are clamped to [0, 100] inside
:py:meth:`_emit_progress`.
"""
cmd = ["nice", "-n", str(PROCESS_PRIORITY_LOW)] + self._inject_progress_flags(
ffmpeg_cmd
)
if isinstance(playlist_lines, list):
stdin_payload = "\n".join(playlist_lines)
else:
stdin_payload = playlist_lines
expected_duration = self._expected_output_duration_seconds()
self._emit_progress(step, 0.0)
proc = sp.Popen(
cmd,
stdin=sp.PIPE,
stderr=sp.PIPE,
text=True,
encoding="ascii",
errors="replace",
)
assert proc.stdin is not None
assert proc.stderr is not None
try:
proc.stdin.write(stdin_payload)
except (BrokenPipeError, OSError):
# FFmpeg may have rejected the input early; still wait for it
# to terminate so the returncode is meaningful.
pass
finally:
try:
proc.stdin.close()
except (BrokenPipeError, OSError):
pass
captured: list[str] = []
try:
for raw_line in proc.stderr:
captured.append(raw_line)
line = raw_line.strip()
if not line:
continue
if line.startswith("out_time_us="):
if expected_duration <= 0:
continue
try:
out_time_us = int(line.split("=", 1)[1])
except (ValueError, IndexError):
continue
if out_time_us < 0:
continue
out_seconds = out_time_us / 1_000_000.0
percent = (out_seconds / expected_duration) * 100.0
self._emit_progress(step, percent)
elif line == "progress=end":
self._emit_progress(step, 100.0)
break
except Exception:
logger.exception("Failed reading FFmpeg progress for %s", self.export_id)
proc.wait()
# Drain any remaining stderr so callers can log it on failure.
try:
remaining = proc.stderr.read()
if remaining:
captured.append(remaining)
except Exception:
pass
return proc.returncode, "".join(captured)
def get_datetime_from_timestamp(self, timestamp: int) -> str:
# return in iso format
return datetime.datetime.fromtimestamp(timestamp).strftime("%Y-%m-%d %H:%M:%S")
@ -405,6 +615,7 @@ class RecordingExporter(threading.Thread):
logger.debug(
f"Beginning export for {self.camera} from {self.start_time} to {self.end_time}"
)
self._emit_progress("preparing", 0.0)
export_name = (
self.user_provided_name
or f"{self.camera.replace('_', ' ')} {self.get_datetime_from_timestamp(self.start_time)} {self.get_datetime_from_timestamp(self.end_time)}"
@ -442,16 +653,23 @@ class RecordingExporter(threading.Thread):
except DoesNotExist:
return
p = sp.run(
["nice", "-n", str(PROCESS_PRIORITY_LOW)] + ffmpeg_cmd,
input="\n".join(playlist_lines),
encoding="ascii",
capture_output=True,
# When neither custom ffmpeg arg is set the default path uses
# `-c copy` (stream copy — no re-encoding). Report that as a
# distinct step so the UI doesn't mislabel a remux as encoding.
# The retry branch below always re-encodes because cpu_fallback
# requires custom args; it stays "encoding_retry".
is_stream_copy = (
self.ffmpeg_input_args is None and self.ffmpeg_output_args is None
)
initial_step = "copying" if is_stream_copy else "encoding"
returncode, stderr = self._run_ffmpeg_with_progress(
ffmpeg_cmd, playlist_lines, step=initial_step
)
# If export failed and cpu_fallback is enabled, retry without hwaccel
if (
p.returncode != 0
returncode != 0
and self.cpu_fallback
and self.ffmpeg_input_args is not None
and self.ffmpeg_output_args is not None
@ -469,23 +687,21 @@ class RecordingExporter(threading.Thread):
video_path, use_hwaccel=False
)
p = sp.run(
["nice", "-n", str(PROCESS_PRIORITY_LOW)] + ffmpeg_cmd,
input="\n".join(playlist_lines),
encoding="ascii",
capture_output=True,
returncode, stderr = self._run_ffmpeg_with_progress(
ffmpeg_cmd, playlist_lines, step="encoding_retry"
)
if p.returncode != 0:
if returncode != 0:
logger.error(
f"Failed to export {self.playback_source.value} for command {' '.join(ffmpeg_cmd)}"
)
logger.error(p.stderr)
logger.error(stderr)
Path(video_path).unlink(missing_ok=True)
Export.delete().where(Export.id == self.export_id).execute()
Path(thumb_path).unlink(missing_ok=True)
return
else:
self._emit_progress("finalizing", 100.0)
Export.update({Export.in_progress: False}).where(
Export.id == self.export_id
).execute()

View File

@ -372,6 +372,7 @@ class RecordingMaintainer(threading.Thread):
)
record_config = self.config.cameras[camera].record
segment_stats: SegmentInfo | None = None
highest = None
if record_config.continuous.days > 0:
@ -401,8 +402,18 @@ class RecordingMaintainer(threading.Thread):
if highest == "continuous"
else RetainModeEnum.motion
)
segment_stats = self.segment_stats(camera, start_time, end_time)
# Here we only check if we should move the segment based on non-object recording retention
# we will always want to check for overlapping review items below before dropping the segment
if not segment_stats.should_discard_segment(record_mode):
return await self.move_segment(
camera, start_time, end_time, duration, cache_path, record_mode
camera,
start_time,
end_time,
duration,
cache_path,
segment_stats,
)
# we fell through the continuous / motion check, so we need to check the review items
@ -435,6 +446,11 @@ class RecordingMaintainer(threading.Thread):
if review.severity == "alert"
else record_config.detections.retain.mode
)
if segment_stats is None:
segment_stats = self.segment_stats(camera, start_time, end_time)
if not segment_stats.should_discard_segment(record_mode):
# move from cache to recordings immediately
return await self.move_segment(
camera,
@ -442,12 +458,18 @@ class RecordingMaintainer(threading.Thread):
end_time,
duration,
cache_path,
record_mode,
segment_stats,
)
# if it doesn't overlap with an review item, go ahead and drop the segment
# if it ends more than the configured pre_capture for the camera
# BUT only if continuous/motion is NOT enabled (otherwise wait for processing)
elif highest is None:
else:
self.drop_segment(cache_path)
return None
# if it doesn't overlap with a review item, drop the segment once it
# ends more than event_pre_capture before the most recently processed
# frame. at this point we've already decided not to keep it for
# continuous/motion retention (either disabled or segment_stats said
# discard), so waiting longer just fills the cache.
else:
camera_info = self.object_recordings_info[camera]
most_recently_processed_frame_time = (
camera_info[-1][0] if len(camera_info) > 0 else 0
@ -455,6 +477,7 @@ class RecordingMaintainer(threading.Thread):
retain_cutoff = datetime.datetime.fromtimestamp(
most_recently_processed_frame_time - record_config.event_pre_capture
).astimezone(datetime.timezone.utc)
if end_time < retain_cutoff:
self.drop_segment(cache_path)
@ -578,15 +601,8 @@ class RecordingMaintainer(threading.Thread):
end_time: datetime.datetime,
duration: float,
cache_path: str,
store_mode: RetainModeEnum,
segment_info: SegmentInfo,
) -> Optional[dict[str, Any]]:
segment_info = self.segment_stats(camera, start_time, end_time)
# check if the segment shouldn't be stored
if segment_info.should_discard_segment(store_mode):
self.drop_segment(cache_path)
return None
# directory will be in utc due to start_time being in utc
directory = os.path.join(
RECORD_DIR,

View File

@ -197,7 +197,7 @@ class StorageMaintainer(threading.Thread):
# check if need to delete retained segments
if deleted_segments_size < hourly_bandwidth:
logger.error(
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size} MB have been cleared. Retained recordings must be deleted."
f"Could not clear {hourly_bandwidth} MB, currently {deleted_segments_size:.2f} MB have been cleared. Retained recordings must be deleted."
)
recordings = (
Recordings.select(
@ -225,7 +225,7 @@ class StorageMaintainer(threading.Thread):
# this file was not found so we must assume no space was cleaned up
pass
else:
logger.info(f"Cleaned up {deleted_segments_size} MB of recordings")
logger.info(f"Cleaned up {deleted_segments_size:.2f} MB of recordings")
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
# delete up to 100,000 at a time

View File

@ -2,6 +2,7 @@ import datetime
import logging
import os
import unittest
from unittest.mock import patch
from fastapi import Request
from fastapi.testclient import TestClient
@ -14,6 +15,7 @@ from frigate.api.fastapi_app import create_fastapi_app
from frigate.config import FrigateConfig
from frigate.const import BASE_DIR, CACHE_DIR
from frigate.debug_replay import DebugReplayManager
from frigate.jobs.export import JobStatePublisher
from frigate.models import Event, Recordings, ReviewSegment
from frigate.review.types import SeverityEnum
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
@ -44,6 +46,19 @@ class BaseTestHttp(unittest.TestCase):
self.db = SqliteQueueDatabase(TEST_DB)
self.db.bind(models)
# The export job manager broadcasts via JobStatePublisher on
# enqueue/start/finish. There is no dispatcher process bound to
# the IPC socket in tests, so a real publish() would block on
# recv_json forever. Replace publish with a no-op for the
# lifetime of this test; the lookup goes through the class so any
# already-instantiated publisher (the singleton manager's) picks
# up the no-op too.
publisher_patch = patch.object(
JobStatePublisher, "publish", lambda self, payload: None
)
publisher_patch.start()
self.addCleanup(publisher_patch.stop)
self.minimal_config = {
"mqtt": {"host": "mqtt"},
"cameras": {

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,303 @@
"""Tests for the find_similar_objects chat tool."""
import asyncio
import os
import tempfile
import unittest
from types import SimpleNamespace
from unittest.mock import MagicMock
from playhouse.sqlite_ext import SqliteExtDatabase
from frigate.api.chat import (
_execute_find_similar_objects,
get_tool_definitions,
)
from frigate.api.chat_util import (
DESCRIPTION_WEIGHT,
VISUAL_WEIGHT,
distance_to_score,
fuse_scores,
)
from frigate.embeddings.util import ZScoreNormalization
from frigate.models import Event
def _run(coro):
return asyncio.new_event_loop().run_until_complete(coro)
class TestDistanceToScore(unittest.TestCase):
def test_lower_distance_gives_higher_score(self):
stats = ZScoreNormalization()
# Seed the stats with a small distribution so stddev > 0.
stats._update([0.1, 0.2, 0.3, 0.4, 0.5])
close_score = distance_to_score(0.1, stats)
far_score = distance_to_score(0.5, stats)
self.assertGreater(close_score, far_score)
self.assertGreaterEqual(close_score, 0.0)
self.assertLessEqual(close_score, 1.0)
self.assertGreaterEqual(far_score, 0.0)
self.assertLessEqual(far_score, 1.0)
def test_uninitialized_stats_returns_neutral_score(self):
stats = ZScoreNormalization() # n == 0, stddev == 0
self.assertEqual(distance_to_score(0.3, stats), 0.5)
class TestFuseScores(unittest.TestCase):
def test_weights_sum_to_one(self):
self.assertAlmostEqual(VISUAL_WEIGHT + DESCRIPTION_WEIGHT, 1.0)
def test_fuses_both_sides(self):
fused = fuse_scores(visual_score=0.8, description_score=0.4)
expected = VISUAL_WEIGHT * 0.8 + DESCRIPTION_WEIGHT * 0.4
self.assertAlmostEqual(fused, expected)
def test_missing_description_uses_visual_only(self):
fused = fuse_scores(visual_score=0.7, description_score=None)
self.assertAlmostEqual(fused, 0.7)
def test_missing_visual_uses_description_only(self):
fused = fuse_scores(visual_score=None, description_score=0.6)
self.assertAlmostEqual(fused, 0.6)
def test_both_missing_returns_none(self):
self.assertIsNone(fuse_scores(visual_score=None, description_score=None))
class TestToolDefinition(unittest.TestCase):
def test_find_similar_objects_is_registered(self):
tools = get_tool_definitions()
names = [t["function"]["name"] for t in tools]
self.assertIn("find_similar_objects", names)
def test_find_similar_objects_schema(self):
tools = get_tool_definitions()
tool = next(t for t in tools if t["function"]["name"] == "find_similar_objects")
params = tool["function"]["parameters"]["properties"]
self.assertIn("event_id", params)
self.assertIn("after", params)
self.assertIn("before", params)
self.assertIn("cameras", params)
self.assertIn("labels", params)
self.assertIn("sub_labels", params)
self.assertIn("zones", params)
self.assertIn("similarity_mode", params)
self.assertIn("min_score", params)
self.assertIn("limit", params)
self.assertEqual(tool["function"]["parameters"]["required"], ["event_id"])
self.assertEqual(
params["similarity_mode"]["enum"], ["visual", "semantic", "fused"]
)
class TestExecuteFindSimilarObjects(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.NamedTemporaryFile(suffix=".db", delete=False)
self.tmp.close()
self.db = SqliteExtDatabase(self.tmp.name)
Event.bind(self.db, bind_refs=False, bind_backrefs=False)
self.db.connect()
self.db.create_tables([Event])
# Insert an anchor plus two candidates.
def make(event_id, label="car", camera="driveway", start=1_700_000_100):
Event.create(
id=event_id,
label=label,
sub_label=None,
camera=camera,
start_time=start,
end_time=start + 10,
top_score=0.9,
score=0.9,
false_positive=False,
zones=[],
thumbnail="",
has_clip=True,
has_snapshot=True,
region=[0, 0, 1, 1],
box=[0, 0, 1, 1],
area=1,
retain_indefinitely=False,
ratio=1.0,
plus_id="",
model_hash="",
detector_type="",
model_type="",
data={"description": "a green sedan"},
)
make("anchor", start=1_700_000_200)
make("cand_a", start=1_700_000_100)
make("cand_b", start=1_700_000_150)
self.make = make
def tearDown(self):
self.db.close()
os.unlink(self.tmp.name)
def _make_request(self, semantic_enabled=True, embeddings=None):
app = SimpleNamespace(
embeddings=embeddings,
frigate_config=SimpleNamespace(
semantic_search=SimpleNamespace(enabled=semantic_enabled),
),
)
return SimpleNamespace(app=app)
def test_semantic_search_disabled_returns_error(self):
req = self._make_request(semantic_enabled=False)
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "anchor"},
allowed_cameras=["driveway"],
)
)
self.assertEqual(result["error"], "semantic_search_disabled")
def test_anchor_not_found_returns_error(self):
embeddings = MagicMock()
req = self._make_request(embeddings=embeddings)
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "nope"},
allowed_cameras=["driveway"],
)
)
self.assertEqual(result["error"], "anchor_not_found")
def test_empty_candidates_returns_empty_results(self):
embeddings = MagicMock()
req = self._make_request(embeddings=embeddings)
# Filter to a camera with no other events.
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "anchor", "cameras": ["nonexistent_cam"]},
allowed_cameras=["nonexistent_cam"],
)
)
self.assertEqual(result["results"], [])
self.assertFalse(result["candidate_truncated"])
self.assertEqual(result["anchor"]["id"], "anchor")
def test_fused_calls_both_searches_and_ranks(self):
embeddings = MagicMock()
# cand_a visually closer, cand_b semantically closer.
embeddings.search_thumbnail.return_value = [
("cand_a", 0.10),
("cand_b", 0.40),
]
embeddings.search_description.return_value = [
("cand_a", 0.50),
("cand_b", 0.20),
]
embeddings.thumb_stats = ZScoreNormalization()
embeddings.thumb_stats._update([0.1, 0.2, 0.3, 0.4, 0.5])
embeddings.desc_stats = ZScoreNormalization()
embeddings.desc_stats._update([0.1, 0.2, 0.3, 0.4, 0.5])
req = self._make_request(embeddings=embeddings)
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "anchor"},
allowed_cameras=["driveway"],
)
)
embeddings.search_thumbnail.assert_called_once()
embeddings.search_description.assert_called_once()
# cand_a should rank first because visual is weighted higher.
self.assertEqual(result["results"][0]["id"], "cand_a")
self.assertIn("score", result["results"][0])
self.assertEqual(result["similarity_mode"], "fused")
def test_visual_mode_only_calls_thumbnail(self):
embeddings = MagicMock()
embeddings.search_thumbnail.return_value = [("cand_a", 0.1)]
embeddings.thumb_stats = ZScoreNormalization()
embeddings.thumb_stats._update([0.1, 0.2, 0.3])
req = self._make_request(embeddings=embeddings)
_run(
_execute_find_similar_objects(
req,
{"event_id": "anchor", "similarity_mode": "visual"},
allowed_cameras=["driveway"],
)
)
embeddings.search_thumbnail.assert_called_once()
embeddings.search_description.assert_not_called()
def test_semantic_mode_only_calls_description(self):
embeddings = MagicMock()
embeddings.search_description.return_value = [("cand_a", 0.1)]
embeddings.desc_stats = ZScoreNormalization()
embeddings.desc_stats._update([0.1, 0.2, 0.3])
req = self._make_request(embeddings=embeddings)
_run(
_execute_find_similar_objects(
req,
{"event_id": "anchor", "similarity_mode": "semantic"},
allowed_cameras=["driveway"],
)
)
embeddings.search_description.assert_called_once()
embeddings.search_thumbnail.assert_not_called()
def test_min_score_drops_low_scoring_results(self):
embeddings = MagicMock()
embeddings.search_thumbnail.return_value = [
("cand_a", 0.10),
("cand_b", 0.90),
]
embeddings.search_description.return_value = []
embeddings.thumb_stats = ZScoreNormalization()
embeddings.thumb_stats._update([0.1, 0.2, 0.3, 0.4, 0.5])
embeddings.desc_stats = ZScoreNormalization()
req = self._make_request(embeddings=embeddings)
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "anchor", "similarity_mode": "visual", "min_score": 0.6},
allowed_cameras=["driveway"],
)
)
ids = [r["id"] for r in result["results"]]
self.assertIn("cand_a", ids)
self.assertNotIn("cand_b", ids)
def test_labels_defaults_to_anchor_label(self):
self.make("person_a", label="person")
embeddings = MagicMock()
embeddings.search_thumbnail.return_value = [
("cand_a", 0.1),
("cand_b", 0.2),
]
embeddings.search_description.return_value = []
embeddings.thumb_stats = ZScoreNormalization()
embeddings.thumb_stats._update([0.1, 0.2, 0.3])
embeddings.desc_stats = ZScoreNormalization()
req = self._make_request(embeddings=embeddings)
result = _run(
_execute_find_similar_objects(
req,
{"event_id": "anchor", "similarity_mode": "visual"},
allowed_cameras=["driveway"],
)
)
ids = [r["id"] for r in result["results"]]
self.assertNotIn("person_a", ids)
if __name__ == "__main__":
unittest.main()

View File

@ -0,0 +1,211 @@
"""Tests for DeferredRealtimeProcessorApi."""
import sys
import time
import unittest
from typing import Any
from unittest.mock import MagicMock, patch
import numpy as np
from frigate.data_processing.real_time.api import DeferredRealtimeProcessorApi
# Mock TFLite before importing classification module
_MOCK_MODULES = [
"tflite_runtime",
"tflite_runtime.interpreter",
"ai_edge_litert",
"ai_edge_litert.interpreter",
]
for mod in _MOCK_MODULES:
if mod not in sys.modules:
sys.modules[mod] = MagicMock()
from frigate.data_processing.real_time.custom_classification import ( # noqa: E402
CustomObjectClassificationProcessor,
)
class StubDeferredProcessor(DeferredRealtimeProcessorApi):
"""Minimal concrete subclass for testing the deferred base."""
def __init__(self, max_queue: int = 8):
config = MagicMock()
metrics = MagicMock()
super().__init__(config, metrics, max_queue=max_queue)
self.processed_items: list[tuple] = []
def process_frame(self, obj_data: dict[str, Any], frame: np.ndarray) -> None:
"""Enqueue every call — no gating logic in the stub."""
self._enqueue_task(("frame", obj_data, frame.copy()))
def _process_task(self, task: tuple) -> None:
kind = task[0]
if kind == "frame":
_, obj_data, frame = task
self.processed_items.append((obj_data["id"], frame.shape))
self._emit_result(
{
"type": "test_result",
"id": obj_data["id"],
"label": "cat",
"score": 0.95,
}
)
elif kind == "expire":
_, object_id = task
self.processed_items.append(("expired", object_id))
def handle_request(
self, topic: str, request_data: dict[str, Any]
) -> dict[str, Any] | None:
if topic == "reload":
def _do_reload(data):
return {"success": True, "model": data.get("name")}
return self._enqueue_request(_do_reload, request_data)
return None
def expire_object(self, object_id: str, camera: str) -> None:
self._enqueue_task(("expire", object_id))
class TestDeferredProcessorBase(unittest.TestCase):
def test_enqueue_and_drain(self):
"""Tasks enqueued on main thread are processed by worker, results are drainable."""
proc = StubDeferredProcessor()
frame = np.zeros((100, 100, 3), dtype=np.uint8)
proc.process_frame({"id": "obj1"}, frame)
proc.process_frame({"id": "obj2"}, frame)
# Give the worker time to process
time.sleep(0.1)
results = proc.drain_results()
self.assertEqual(len(results), 2)
self.assertEqual(results[0]["id"], "obj1")
self.assertEqual(results[1]["id"], "obj2")
# Second drain should be empty
self.assertEqual(len(proc.drain_results()), 0)
def test_backpressure_drops_tasks(self):
"""When queue is full, new tasks are silently dropped."""
proc = StubDeferredProcessor(max_queue=2)
frame = np.zeros((10, 10, 3), dtype=np.uint8)
for i in range(10):
proc.process_frame({"id": f"obj{i}"}, frame)
time.sleep(0.2)
results = proc.drain_results()
# The key property: no crash, no unbounded growth
self.assertLessEqual(len(results), 10)
self.assertGreater(len(results), 0)
def test_handle_request_through_worker(self):
"""handle_request blocks until the worker processes it and returns a response."""
proc = StubDeferredProcessor()
result = proc.handle_request("reload", {"name": "my_model"})
self.assertEqual(result, {"success": True, "model": "my_model"})
def test_expire_object_serialized_with_work(self):
"""expire_object goes through the queue, serialized with inference work."""
proc = StubDeferredProcessor()
frame = np.zeros((10, 10, 3), dtype=np.uint8)
proc.process_frame({"id": "obj1"}, frame)
proc.expire_object("obj1", "front_door")
time.sleep(0.1)
# Both should have been processed in order
self.assertEqual(len(proc.processed_items), 2)
self.assertEqual(proc.processed_items[0][0], "obj1")
self.assertEqual(proc.processed_items[1], ("expired", "obj1"))
def test_shutdown_joins_worker(self):
"""shutdown() signals the worker to stop and joins the thread."""
proc = StubDeferredProcessor()
proc.shutdown()
self.assertFalse(proc._worker.is_alive())
def test_drain_results_returns_list(self):
"""drain_results returns a plain list, not a deque."""
proc = StubDeferredProcessor()
results = proc.drain_results()
self.assertIsInstance(results, list)
class TestCustomObjectClassificationDeferred(unittest.TestCase):
"""Test that CustomObjectClassificationProcessor uses the deferred pattern correctly."""
def _make_processor(self):
config = MagicMock()
model_config = MagicMock()
model_config.name = "test_breed"
model_config.object_config = MagicMock()
model_config.object_config.objects = ["dog"]
model_config.threshold = 0.5
model_config.save_attempts = 10
model_config.object_config.classification_type = "sub_label"
publisher = MagicMock()
requestor = MagicMock()
metrics = MagicMock()
metrics.classification_speeds = {}
metrics.classification_cps = {}
with patch.object(
CustomObjectClassificationProcessor,
"_CustomObjectClassificationProcessor__build_detector",
):
proc = CustomObjectClassificationProcessor(
config, model_config, publisher, requestor, metrics
)
proc.interpreter = None
proc.tensor_input_details = [{"index": 0}]
proc.tensor_output_details = [{"index": 0}]
proc.labelmap = {0: "labrador", 1: "poodle", 2: "none"}
return proc
def test_is_deferred_processor(self):
"""CustomObjectClassificationProcessor should be a DeferredRealtimeProcessorApi."""
proc = self._make_processor()
self.assertIsInstance(proc, DeferredRealtimeProcessorApi)
def test_expire_clears_history(self):
"""expire_object should clear classification history for the object."""
proc = self._make_processor()
proc.classification_history["obj1"] = [("labrador", 0.9, 1.0)]
proc.expire_object("obj1", "front")
time.sleep(0.1)
self.assertNotIn("obj1", proc.classification_history)
def test_drain_results_empty_when_no_model(self):
"""With no interpreter, process_frame saves training images but emits no results."""
proc = self._make_processor()
proc.interpreter = None
frame = np.zeros((150, 100), dtype=np.uint8)
obj_data = {
"id": "obj1",
"label": "dog",
"false_positive": False,
"end_time": None,
"box": [10, 10, 50, 50],
"camera": "front",
}
with patch(
"frigate.data_processing.real_time.custom_classification.write_classification_attempt"
):
proc.process_frame(obj_data, frame)
time.sleep(0.1)
results = proc.drain_results()
self.assertEqual(len(results), 0)
if __name__ == "__main__":
unittest.main()

View File

@ -2,6 +2,7 @@
import os
import unittest
from unittest.mock import MagicMock, patch
from frigate.config.env import (
FRIGATE_ENV_VARS,
@ -10,6 +11,71 @@ from frigate.config.env import (
)
class TestGo2RtcAddStreamSubstitution(unittest.TestCase):
"""Covers the API path: PUT /go2rtc/streams/{stream_name}.
The route shells out to go2rtc via `requests.put`; we mock the HTTP call
and assert that the substituted `src` parameter handles the same mixed
{FRIGATE_*} + literal-brace strings as the config-loading path.
"""
def setUp(self):
self._original_env_vars = dict(FRIGATE_ENV_VARS)
def tearDown(self):
FRIGATE_ENV_VARS.clear()
FRIGATE_ENV_VARS.update(self._original_env_vars)
def _call_route(self, src: str) -> str:
"""Invoke go2rtc_add_stream and return the substituted src param."""
from frigate.api import camera as camera_api
captured = {}
def fake_put(url, params=None, timeout=None):
captured["params"] = params
resp = MagicMock()
resp.ok = True
resp.text = ""
resp.status_code = 200
return resp
with patch.object(camera_api.requests, "put", side_effect=fake_put):
camera_api.go2rtc_add_stream(
request=MagicMock(), stream_name="cam1", src=src
)
return captured["params"]["src"]
def test_mixed_localtime_and_frigate_var(self):
"""%{localtime\\:...} alongside {FRIGATE_USER} substitutes only the var."""
FRIGATE_ENV_VARS["FRIGATE_USER"] = "admin"
src = (
"ffmpeg:rtsp://host/s#raw=-vf "
"drawtext=text=%{localtime\\:%Y-%m-%d}:user={FRIGATE_USER}"
)
self.assertEqual(
self._call_route(src),
"ffmpeg:rtsp://host/s#raw=-vf "
"drawtext=text=%{localtime\\:%Y-%m-%d}:user=admin",
)
def test_unknown_var_falls_back_to_raw_src(self):
"""Existing route behavior: unknown {FRIGATE_*} keeps raw src."""
src = "rtsp://host/{FRIGATE_NONEXISTENT}/stream"
self.assertEqual(self._call_route(src), src)
def test_malformed_placeholder_rejected_via_api(self):
"""Malformed FRIGATE placeholders raise (not silently passed through).
Regression: previously camera.py caught any KeyError and fell back
to the raw src, so `{FRIGATE_FOO:>5}` was silently accepted via the
API while config loading rejected it. The helper now raises
ValueError for malformed syntax to keep the two paths consistent.
"""
with self.assertRaises(ValueError):
self._call_route("rtsp://host/{FRIGATE_FOO:>5}/stream")
class TestEnvString(unittest.TestCase):
def setUp(self):
self._original_env_vars = dict(FRIGATE_ENV_VARS)
@ -43,6 +109,72 @@ class TestEnvString(unittest.TestCase):
with self.assertRaises(KeyError):
validate_env_string("{FRIGATE_NONEXISTENT_VAR}")
def test_non_frigate_braces_passthrough(self):
"""Braces that are not {FRIGATE_*} placeholders pass through untouched.
Regression test for ffmpeg drawtext expressions like
"%{localtime\\:%Y-%m-%d}" being mangled by str.format().
"""
expr = (
"ffmpeg:rtsp://127.0.0.1/src#raw=-vf "
"drawtext=text=%{localtime\\:%Y-%m-%d_%H\\:%M\\:%S}"
":x=5:fontcolor=white"
)
self.assertEqual(validate_env_string(expr), expr)
def test_double_brace_escape_preserved(self):
"""`{{output}}` collapses to `{output}` (documented go2rtc escape)."""
result = validate_env_string(
"exec:ffmpeg -i /media/file.mp4 -f rtsp {{output}}"
)
self.assertEqual(result, "exec:ffmpeg -i /media/file.mp4 -f rtsp {output}")
def test_double_brace_around_frigate_var(self):
"""`{{FRIGATE_FOO}}` stays literal — escape takes precedence."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
self.assertEqual(validate_env_string("{{FRIGATE_FOO}}"), "{FRIGATE_FOO}")
def test_mixed_frigate_var_and_braces(self):
"""A FRIGATE_ var alongside literal single braces substitutes only the var."""
FRIGATE_ENV_VARS["FRIGATE_USER"] = "admin"
result = validate_env_string(
"drawtext=text=%{localtime}:user={FRIGATE_USER}:x=5"
)
self.assertEqual(result, "drawtext=text=%{localtime}:user=admin:x=5")
def test_triple_braces_around_frigate_var(self):
"""`{{{FRIGATE_FOO}}}` collapses like str.format(): `{bar}`."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
self.assertEqual(validate_env_string("{{{FRIGATE_FOO}}}"), "{bar}")
def test_trailing_double_brace_after_var(self):
"""`{FRIGATE_FOO}}}` collapses like str.format(): `bar}`."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
self.assertEqual(validate_env_string("{FRIGATE_FOO}}}"), "bar}")
def test_leading_double_brace_then_var(self):
"""`{{{FRIGATE_FOO}` collapses like str.format(): `{bar`."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
self.assertEqual(validate_env_string("{{{FRIGATE_FOO}"), "{bar")
def test_malformed_unterminated_placeholder_raises(self):
"""`{FRIGATE_FOO` (no closing brace) raises like str.format() did."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
with self.assertRaises(ValueError):
validate_env_string("prefix-{FRIGATE_FOO")
def test_malformed_format_spec_raises(self):
"""`{FRIGATE_FOO:>5}` (format spec) raises like str.format() did."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
with self.assertRaises(ValueError):
validate_env_string("{FRIGATE_FOO:>5}")
def test_malformed_conversion_raises(self):
"""`{FRIGATE_FOO!r}` (conversion) raises like str.format() did."""
FRIGATE_ENV_VARS["FRIGATE_FOO"] = "bar"
with self.assertRaises(ValueError):
validate_env_string("{FRIGATE_FOO!r}")
class TestEnvVars(unittest.TestCase):
def setUp(self):

View File

@ -0,0 +1,385 @@
"""Tests for export progress tracking, broadcast, and FFmpeg parsing."""
import io
import unittest
from unittest.mock import MagicMock, patch
from frigate.jobs.export import (
PROGRESS_BROADCAST_MIN_INTERVAL,
ExportJob,
ExportJobManager,
)
from frigate.record.export import PlaybackSourceEnum, RecordingExporter
from frigate.types import JobStatusTypesEnum
def _make_exporter(
end_minus_start: int = 100,
ffmpeg_input_args=None,
ffmpeg_output_args=None,
on_progress=None,
) -> RecordingExporter:
"""Build a RecordingExporter without invoking its real __init__ side
effects (which create directories and require a full FrigateConfig)."""
exporter = RecordingExporter.__new__(RecordingExporter)
exporter.config = MagicMock()
exporter.export_id = "test_export"
exporter.camera = "front"
exporter.user_provided_name = None
exporter.user_provided_image = None
exporter.start_time = 1_000
exporter.end_time = 1_000 + end_minus_start
exporter.playback_source = PlaybackSourceEnum.recordings
exporter.export_case_id = None
exporter.ffmpeg_input_args = ffmpeg_input_args
exporter.ffmpeg_output_args = ffmpeg_output_args
exporter.cpu_fallback = False
exporter.on_progress = on_progress
return exporter
class TestExportJobToDict(unittest.TestCase):
def test_to_dict_includes_progress_fields(self) -> None:
job = ExportJob(camera="front", request_start_time=0, request_end_time=10)
result = job.to_dict()
assert "current_step" in result
assert "progress_percent" in result
assert result["current_step"] == "queued"
assert result["progress_percent"] == 0.0
def test_to_dict_reflects_updated_progress(self) -> None:
job = ExportJob(camera="front", request_start_time=0, request_end_time=10)
job.current_step = "encoding"
job.progress_percent = 42.5
result = job.to_dict()
assert result["current_step"] == "encoding"
assert result["progress_percent"] == 42.5
class TestExpectedOutputDuration(unittest.TestCase):
def test_normal_export_uses_input_duration(self) -> None:
exporter = _make_exporter(end_minus_start=600)
assert exporter._expected_output_duration_seconds() == 600.0
def test_timelapse_uses_setpts_factor(self) -> None:
exporter = _make_exporter(
end_minus_start=1000,
ffmpeg_input_args="-y",
ffmpeg_output_args="-vf setpts=0.04*PTS -r 30",
)
# 1000s input * 0.04 = 40s of output
assert exporter._expected_output_duration_seconds() == 40.0
def test_unknown_factor_falls_back_to_input_duration(self) -> None:
exporter = _make_exporter(
end_minus_start=300,
ffmpeg_input_args="-y",
ffmpeg_output_args="-c:v libx264 -preset veryfast",
)
assert exporter._expected_output_duration_seconds() == 300.0
def test_zero_factor_falls_back_to_input_duration(self) -> None:
exporter = _make_exporter(
end_minus_start=300,
ffmpeg_input_args="-y",
ffmpeg_output_args="-vf setpts=0*PTS",
)
assert exporter._expected_output_duration_seconds() == 300.0
def test_uses_actual_recorded_seconds_when_available(self) -> None:
"""If the DB shows only 120s of saved recordings inside a 1h
requested range, progress should be computed against 120s."""
exporter = _make_exporter(end_minus_start=3600)
exporter._sum_source_duration_seconds = lambda: 120.0 # type: ignore[method-assign]
assert exporter._expected_output_duration_seconds() == 120.0
def test_actual_recorded_seconds_scaled_by_setpts(self) -> None:
"""Recorded duration must still be scaled by the timelapse factor."""
exporter = _make_exporter(
end_minus_start=3600,
ffmpeg_input_args="-y",
ffmpeg_output_args="-vf setpts=0.04*PTS -r 30",
)
exporter._sum_source_duration_seconds = lambda: 600.0 # type: ignore[method-assign]
# 600s * 0.04 = 24s of output
assert exporter._expected_output_duration_seconds() == 24.0
def test_db_failure_falls_back_to_requested_range(self) -> None:
exporter = _make_exporter(end_minus_start=300)
exporter._sum_source_duration_seconds = lambda: None # type: ignore[method-assign]
assert exporter._expected_output_duration_seconds() == 300.0
class TestProgressFlagInjection(unittest.TestCase):
def test_inserts_before_output_path(self) -> None:
exporter = _make_exporter()
cmd = ["ffmpeg", "-i", "input.m3u8", "-c", "copy", "/tmp/output.mp4"]
result = exporter._inject_progress_flags(cmd)
assert result == [
"ffmpeg",
"-i",
"input.m3u8",
"-c",
"copy",
"-progress",
"pipe:2",
"-nostats",
"/tmp/output.mp4",
]
def test_handles_empty_cmd(self) -> None:
exporter = _make_exporter()
assert exporter._inject_progress_flags([]) == []
class TestFfmpegProgressParsing(unittest.TestCase):
"""Verify percentage calculation from FFmpeg ``-progress`` output."""
def _run_with_stderr(
self,
stderr_text: str,
expected_duration_seconds: int = 90,
) -> list[tuple[str, float]]:
"""Helper: run _run_ffmpeg_with_progress against a mocked Popen
whose stderr emits the supplied text. Returns the list of
(step, percent) tuples that the on_progress callback received."""
captured: list[tuple[str, float]] = []
def on_progress(step: str, percent: float) -> None:
captured.append((step, percent))
exporter = _make_exporter(
end_minus_start=expected_duration_seconds,
on_progress=on_progress,
)
fake_proc = MagicMock()
fake_proc.stdin = io.StringIO()
fake_proc.stderr = io.StringIO(stderr_text)
fake_proc.returncode = 0
fake_proc.wait = MagicMock(return_value=0)
with patch("frigate.record.export.sp.Popen", return_value=fake_proc):
returncode, _stderr = exporter._run_ffmpeg_with_progress(
["ffmpeg", "-i", "x.m3u8", "/tmp/out.mp4"], "playlist", step="encoding"
)
assert returncode == 0
return captured
def test_parses_out_time_us_into_percent(self) -> None:
# 90s duration; 45s out_time => 50%
stderr = "out_time_us=45000000\nprogress=continue\n"
captured = self._run_with_stderr(stderr, expected_duration_seconds=90)
# The first call is the synchronous 0.0 emit before Popen runs.
assert captured[0] == ("encoding", 0.0)
assert any(percent == 50.0 for step, percent in captured if step == "encoding")
def test_progress_end_emits_100_percent(self) -> None:
stderr = "out_time_us=10000000\nprogress=end\n"
captured = self._run_with_stderr(stderr, expected_duration_seconds=90)
assert captured[-1] == ("encoding", 100.0)
def test_clamps_overshoot_at_100(self) -> None:
# 150s of output reported against 90s expected duration.
stderr = "out_time_us=150000000\nprogress=continue\n"
captured = self._run_with_stderr(stderr, expected_duration_seconds=90)
encoding_values = [p for s, p in captured if s == "encoding" and p > 0]
assert all(p <= 100.0 for p in encoding_values)
assert encoding_values[-1] == 100.0
def test_ignores_garbage_lines(self) -> None:
stderr = (
"frame= 120 fps= 30 q=23.0 size= 512kB\n"
"out_time_us=not-a-number\n"
"out_time_us=30000000\n"
"progress=continue\n"
)
captured = self._run_with_stderr(stderr, expected_duration_seconds=90)
# We expect 0.0 (from initial emit) plus the 30s/90s = 33.33...% step
encoding_percents = sorted({round(p, 2) for s, p in captured})
assert 0.0 in encoding_percents
assert any(abs(p - (30 / 90 * 100)) < 0.01 for p in encoding_percents)
class TestBroadcastAggregation(unittest.TestCase):
"""Verify ExportJobManager broadcast payload shape and throttling."""
def _make_manager(self) -> tuple[ExportJobManager, MagicMock]:
"""Build a manager with an injected mock publisher. Returns
``(manager, publisher)`` so tests can assert on broadcast payloads
without touching ZMQ at all."""
config = MagicMock()
publisher = MagicMock()
manager = ExportJobManager(
config, max_concurrent=2, max_queued=10, publisher=publisher
)
return manager, publisher
@staticmethod
def _last_payload(publisher: MagicMock) -> dict:
return publisher.publish.call_args.args[0]
def test_empty_jobs_broadcasts_empty_list(self) -> None:
manager, publisher = self._make_manager()
manager._broadcast_all_jobs(force=True)
publisher.publish.assert_called_once()
payload = self._last_payload(publisher)
assert payload["job_type"] == "export"
assert payload["status"] == "queued"
assert payload["results"]["jobs"] == []
def test_single_running_job_payload(self) -> None:
manager, publisher = self._make_manager()
job = ExportJob(camera="front", request_start_time=0, request_end_time=10)
job.status = JobStatusTypesEnum.running
job.current_step = "encoding"
job.progress_percent = 75.0
manager.jobs[job.id] = job
manager._broadcast_all_jobs(force=True)
payload = self._last_payload(publisher)
assert payload["status"] == "running"
assert len(payload["results"]["jobs"]) == 1
broadcast_job = payload["results"]["jobs"][0]
assert broadcast_job["current_step"] == "encoding"
assert broadcast_job["progress_percent"] == 75.0
def test_multiple_jobs_broadcast(self) -> None:
manager, publisher = self._make_manager()
for i, status in enumerate(
(JobStatusTypesEnum.queued, JobStatusTypesEnum.running)
):
job = ExportJob(
id=f"job_{i}",
camera="front",
request_start_time=0,
request_end_time=10,
)
job.status = status
manager.jobs[job.id] = job
manager._broadcast_all_jobs(force=True)
payload = self._last_payload(publisher)
assert payload["status"] == "running"
assert len(payload["results"]["jobs"]) == 2
def test_completed_jobs_are_excluded(self) -> None:
manager, publisher = self._make_manager()
active = ExportJob(id="active", camera="front")
active.status = JobStatusTypesEnum.running
finished = ExportJob(id="done", camera="front")
finished.status = JobStatusTypesEnum.success
manager.jobs[active.id] = active
manager.jobs[finished.id] = finished
manager._broadcast_all_jobs(force=True)
payload = self._last_payload(publisher)
ids = [j["id"] for j in payload["results"]["jobs"]]
assert ids == ["active"]
def test_throttle_skips_rapid_unforced_broadcasts(self) -> None:
manager, publisher = self._make_manager()
job = ExportJob(camera="front")
job.status = JobStatusTypesEnum.running
manager.jobs[job.id] = job
manager._broadcast_all_jobs(force=True)
# Immediately following non-forced broadcasts should be skipped.
for _ in range(5):
manager._broadcast_all_jobs(force=False)
assert publisher.publish.call_count == 1
def test_throttle_allows_broadcast_after_interval(self) -> None:
manager, publisher = self._make_manager()
job = ExportJob(camera="front")
job.status = JobStatusTypesEnum.running
manager.jobs[job.id] = job
with patch("frigate.jobs.export.time.monotonic") as mock_mono:
mock_mono.return_value = 100.0
manager._broadcast_all_jobs(force=True)
mock_mono.return_value = 100.0 + PROGRESS_BROADCAST_MIN_INTERVAL + 0.01
manager._broadcast_all_jobs(force=False)
assert publisher.publish.call_count == 2
def test_force_bypasses_throttle(self) -> None:
manager, publisher = self._make_manager()
job = ExportJob(camera="front")
job.status = JobStatusTypesEnum.running
manager.jobs[job.id] = job
manager._broadcast_all_jobs(force=True)
manager._broadcast_all_jobs(force=True)
assert publisher.publish.call_count == 2
def test_publisher_exceptions_do_not_propagate(self) -> None:
"""A failing publisher must not break the manager: broadcasts are
best-effort since the dispatcher may not be available (tests,
startup races)."""
manager, publisher = self._make_manager()
publisher.publish.side_effect = RuntimeError("comms down")
job = ExportJob(camera="front")
job.status = JobStatusTypesEnum.running
manager.jobs[job.id] = job
# Swallow our own RuntimeError if the manager doesn't; the real
# JobStatePublisher handles its own exceptions internally, so the
# manager can stay naive. But if something bubbles up it should
# not escape _broadcast_all_jobs — enforce that contract here.
try:
manager._broadcast_all_jobs(force=True)
except RuntimeError:
self.fail("_broadcast_all_jobs must tolerate publisher failures")
def test_progress_callback_updates_job_and_broadcasts(self) -> None:
manager, _publisher = self._make_manager()
job = ExportJob(camera="front")
job.status = JobStatusTypesEnum.running
manager.jobs[job.id] = job
callback = manager._make_progress_callback(job)
callback("encoding", 33.0)
assert job.current_step == "encoding"
assert job.progress_percent == 33.0
class TestSchedulesCleanup(unittest.TestCase):
def test_schedule_job_cleanup_removes_after_delay(self) -> None:
config = MagicMock()
manager = ExportJobManager(config, max_concurrent=1, max_queued=1)
job = ExportJob(id="cleanup_me", camera="front")
manager.jobs[job.id] = job
with patch("frigate.jobs.export.threading.Timer") as mock_timer:
manager._schedule_job_cleanup(job.id)
mock_timer.assert_called_once()
delay, fn = mock_timer.call_args.args
assert delay > 0
# Invoke the callback directly to confirm it removes the job.
fn()
assert job.id not in manager.jobs
if __name__ == "__main__":
unittest.main()

View File

@ -1,3 +1,4 @@
import datetime
import sys
import unittest
from unittest.mock import MagicMock, patch
@ -74,6 +75,46 @@ class TestMaintainer(unittest.IsolatedAsyncioTestCase):
f"Expected a single warning for unexpected files, got {len(matching)}",
)
async def test_drops_quiet_segment_when_only_motion_retention(self):
# Regression: when motion retention is enabled but a segment has no
# motion and no review overlaps it, the segment must still be dropped.
# Otherwise it sits in cache forever, accumulates, and triggers the
# "Unable to keep up with recording segments in cache" warning every
# ~10s as the overflow trim in move_files discards the oldest one.
config = MagicMock(spec=FrigateConfig)
camera_config = MagicMock()
camera_config.record.enabled = True
camera_config.record.continuous.days = 0
camera_config.record.motion.days = 1
camera_config.record.event_pre_capture = 5
config.cameras = {"test_cam": camera_config}
stop_event = MagicMock()
maintainer = RecordingMaintainer(config, stop_event)
now = datetime.datetime.now(datetime.timezone.utc)
start_time = now - datetime.timedelta(seconds=20)
end_time = now - datetime.timedelta(seconds=10)
cache_path = "/tmp/cache/test_cam@20260417150000+0000.mp4"
maintainer.end_time_cache = {cache_path: (end_time, 10.0)}
# Single processed frame well past end_time with no motion/objects.
maintainer.object_recordings_info["test_cam"] = [(now.timestamp(), [], [], [])]
maintainer.audio_recordings_info["test_cam"] = []
maintainer.drop_segment = MagicMock()
maintainer.recordings_publisher = MagicMock()
result = await maintainer.validate_and_move_segment(
"test_cam",
reviews=[],
recording={"start_time": start_time, "cache_path": cache_path},
)
self.assertIsNone(result)
maintainer.drop_segment.assert_called_once_with(cache_path)
if __name__ == "__main__":
unittest.main()

View File

@ -116,6 +116,8 @@ class TimelineProcessor(threading.Thread):
),
"attribute": "",
"score": event_data["score"],
"computed_score": event_data.get("computed_score"),
"top_score": event_data.get("top_score"),
},
}

View File

@ -400,6 +400,7 @@ class TrackedObject:
"start_time": self.obj_data["start_time"],
"end_time": self.obj_data.get("end_time", None),
"score": self.obj_data["score"],
"computed_score": self.computed_score,
"box": self.obj_data["box"],
"area": self.obj_data["area"],
"ratio": self.obj_data["ratio"],

View File

@ -62,11 +62,12 @@ def get_camera_regions_grid(
.where((Event.false_positive == None) | (Event.false_positive == False))
.where(Event.start_time > last_update)
)
valid_event_ids = [e["id"] for e in events.dicts()]
logger.debug(f"Found {len(valid_event_ids)} new events for {name}")
event_count = events.count()
logger.debug(f"Found {event_count} new events for {name}")
# no new events, return as is
if not valid_event_ids:
if event_count == 0:
return grid
new_update = datetime.datetime.now().timestamp()
@ -78,7 +79,7 @@ def get_camera_regions_grid(
Timeline.data,
]
)
.where(Timeline.source_id << valid_event_ids)
.where(Timeline.source_id << events)
.limit(10000)
.dicts()
)

View File

@ -807,10 +807,15 @@ async def get_video_properties(
) -> dict[str, Any]:
async def probe_with_ffprobe(
url: str,
rtsp_transport: Optional[str] = None,
) -> tuple[bool, int, int, Optional[str], float]:
"""Fallback using ffprobe: returns (valid, width, height, codec, duration)."""
cmd = [
ffmpeg.ffprobe_path,
cmd = [ffmpeg.ffprobe_path]
if rtsp_transport:
cmd += ["-rtsp_transport", rtsp_transport]
cmd += [
"-rw_timeout",
"5000000",
"-v",
"quiet",
"-print_format",
@ -872,13 +877,27 @@ async def get_video_properties(
cap.release()
return valid, width, height, fourcc, duration
# try cv2 first
is_rtsp = url.startswith("rtsp://")
if is_rtsp:
# skip cv2 for RTSP: its FFmpeg backend has a hardcoded ~30s internal
# timeout that cannot be shortened per-call, and ffprobe bounded by
# -rw_timeout handles RTSP probing reliably
has_video, width, height, fourcc, duration = await probe_with_ffprobe(url)
else:
# try cv2 first for local files, HTTP, RTMP
has_video, width, height, fourcc, duration = probe_with_cv2(url)
# fallback to ffprobe if needed
if not has_video or (get_duration and duration < 0):
has_video, width, height, fourcc, duration = await probe_with_ffprobe(url)
# last resort for RTSP: try TCP transport, since default UDP may be blocked
if (not has_video or (get_duration and duration < 0)) and is_rtsp:
has_video, width, height, fourcc, duration = await probe_with_ffprobe(
url, rtsp_transport="tcp"
)
result: dict[str, Any] = {"has_valid_video": has_video}
if has_video:
result.update({"width": width, "height": height})

View File

@ -471,8 +471,16 @@ class CameraWatchdog(threading.Thread):
p["cmd"], self.logger, p["logpipe"], ffmpeg_process=p["process"]
)
# Update stall metrics based on last processed frame timestamp
# Prune expired reconnect timestamps
now = datetime.now().timestamp()
while (
self.reconnect_timestamps and self.reconnect_timestamps[0] < now - 3600
):
self.reconnect_timestamps.popleft()
if self.reconnects:
self.reconnects.value = len(self.reconnect_timestamps)
# Update stall metrics based on last processed frame timestamp
processed_ts = (
float(self.detection_frame.value) if self.detection_frame else 0.0
)

View File

@ -0,0 +1,116 @@
/**
* Global allowlist of regex patterns that the error collector ignores.
*
* Each entry MUST include a comment explaining what it silences and why.
* The allowlist is filtered at collection time, so failure messages list
* only unfiltered errors.
*
* Per-spec additions go through the `expectedErrors` test fixture parameter
* (see error-collector.ts), not by editing this file. That keeps allowlist
* drift visible per-PR rather than buried in shared infrastructure.
*
* NOTE ON CONSOLE vs REQUEST ERRORS:
* When a network request returns a 5xx response, the browser emits two
* events that the error collector captures:
* [request] "500 Internal Server Error <url>" from onResponse (URL included)
* [console] "Failed to load resource: ..." from onConsole (URL NOT included)
*
* The request-level message includes the URL, so those patterns are specific.
* The console-level message text (from ConsoleMessage.text()) does NOT include
* the URL the URL is stored separately in e.url. Therefore the console
* pattern for HTTP 500s cannot be URL-discriminated, and a single pattern
* covers all such browser echoes. This is safe because every such console
* error is already caught (and specifically matched) by its paired [request]
* entry below.
*/
export const GLOBAL_ALLOWLIST: RegExp[] = [
// -------------------------------------------------------------------------
// Browser echo of HTTP 5xx responses (console mirror of [request] events).
//
// Whenever the browser receives a 5xx response it emits a console error:
// "Failed to load resource: the server responded with a status of 500
// (Internal Server Error)"
// The URL is NOT part of ConsoleMessage.text() — it is stored separately.
// Every console error of this form is therefore paired with a specific
// [request] 500 entry below that names the exact endpoint. Allowlisting
// this pattern here silences the browser echo; the request-level entries
// enforce specificity.
// -------------------------------------------------------------------------
/Failed to load resource: the server responded with a status of 500/,
// -------------------------------------------------------------------------
// Mock infrastructure gaps — API endpoints not yet covered by ApiMocker.
//
// These produce 500s because Vite's preview server has no handler for them.
// Each is a TODO(real-bug): the mock should be extended so these endpoints
// return sensible fixture data in tests.
//
// Only [request] patterns are listed here; the paired [console] mirror is
// covered by the "Failed to load resource" entry above.
// -------------------------------------------------------------------------
// TODO(real-bug): ApiMocker registers "**/api/reviews**" (plural) but the
// app fetches /api/review (singular) for the review list and timeline.
// Affects: review.spec.ts, navigation.spec.ts, live.spec.ts, auth.spec.ts.
// Fix: add route handlers for /api/review and /api/review/** in api-mocker.ts.
/500 Internal Server Error.*\/api\/review(\?|\/|$)/,
// TODO(real-bug): /api/stats/history is not mocked; the system page fetches
// it for the detector/process history charts.
// Fix: add route handler for /api/stats/history in api-mocker.ts.
/500 Internal Server Error.*\/api\/stats\/history/,
// TODO(real-bug): /api/event_ids is not mocked; the explore/search page
// fetches it to resolve event IDs for display.
// Fix: add route handler for /api/event_ids in api-mocker.ts.
/500 Internal Server Error.*\/api\/event_ids/,
// TODO(real-bug): /api/sub_labels?split_joined=1 returns 500; the mock
// registers "**/api/sub_labels" which may not match when a query string is
// present, or route registration order causes the catch-all to win first.
// Fix: change the mock route to "**/api/sub_labels**" in api-mocker.ts.
/500 Internal Server Error.*\/api\/sub_labels/,
// TODO(real-bug): MediaMocker handles /api/*/latest.jpg but the app also
// requests /api/*/latest.webp (webp format) for camera snapshots.
// Affects: live.spec.ts, review.spec.ts, auth.spec.ts, navigation.spec.ts.
// Fix: add route handler for /api/*/latest.webp in MediaMocker.install().
/500 Internal Server Error.*\/api\/[^/]+\/latest\.webp/,
/failed: net::ERR_ABORTED.*\/api\/[^/]+\/latest\.webp/,
// -------------------------------------------------------------------------
// Mock infrastructure gap — WebSocket streams.
//
// Playwright's page.route() does not intercept WebSocket connections.
// The jsmpeg live-stream WS connections to /live/jsmpeg/* always fail
// with a 500 handshake error because the Vite preview server has no WS
// handler. TODO(real-bug): add WsMocker support for jsmpeg WebSocket
// connections, or suppress the connection attempt in the test environment.
// Affects: live.spec.ts (single camera view), auth.spec.ts.
// -------------------------------------------------------------------------
/WebSocket connection to '.*\/live\/jsmpeg\/.*' failed/,
// -------------------------------------------------------------------------
// Benign — lazy-loaded chunk aborts during navigation.
//
// When a test navigates away from a page while the browser is still
// fetching lazily-split JS/CSS asset chunks, the in-flight fetch is
// cancelled (net::ERR_ABORTED). This is normal browser behaviour on
// navigation and does not indicate a real error; the assets load fine
// on a stable connection.
// -------------------------------------------------------------------------
/failed: net::ERR_ABORTED.*\/assets\//,
// -------------------------------------------------------------------------
// Real app bug — Radix UI DialogContent missing accessible title.
//
// TODO(real-bug): A dialog somewhere in the app renders <DialogContent>
// without a <DialogTitle>, violating Radix UI's accessibility contract.
// The warning originates from the bundled main-*.js. Investigate which
// dialog component is missing the title and add a VisuallyHidden DialogTitle.
// Likely candidate: face-library or search-detail dialog in explore page.
// See: https://radix-ui.com/primitives/docs/components/dialog
// -------------------------------------------------------------------------
/`DialogContent` requires a `DialogTitle`/,
];

View File

@ -0,0 +1,122 @@
/**
* Collects console errors, page errors, and failed network requests
* during a Playwright test, with regex-based allowlist filtering.
*
* Usage:
* const collector = installErrorCollector(page, [...GLOBAL_ALLOWLIST]);
* // ... run test ...
* collector.assertClean(); // throws if any non-allowlisted error
*
* The collector is wired into the `frigateApp` fixture so every test
* gets it for free. Tests that intentionally trigger an error pass
* additional regexes via the `expectedErrors` fixture parameter.
*/
import type { Page, Request, Response, ConsoleMessage } from "@playwright/test";
export type CollectedError = {
kind: "console" | "pageerror" | "request";
message: string;
url?: string;
stack?: string;
};
export type ErrorCollector = {
errors: CollectedError[];
assertClean(): void;
};
function isAllowlisted(message: string, allowlist: RegExp[]): boolean {
return allowlist.some((pattern) => pattern.test(message));
}
function firstStackFrame(stack: string | undefined): string | undefined {
if (!stack) return undefined;
const lines = stack
.split("\n")
.map((l) => l.trim())
.filter(Boolean);
// Skip the error message line (line 0); return the first "at ..." frame
return lines.find((l) => l.startsWith("at "));
}
function isSameOrigin(url: string, baseURL: string | undefined): boolean {
if (!baseURL) return true;
try {
return new URL(url).origin === new URL(baseURL).origin;
} catch {
return false;
}
}
export function installErrorCollector(
page: Page,
allowlist: RegExp[],
): ErrorCollector {
const errors: CollectedError[] = [];
const baseURL = (
page.context() as unknown as { _options?: { baseURL?: string } }
)._options?.baseURL;
const onConsole = (msg: ConsoleMessage) => {
if (msg.type() !== "error") return;
const text = msg.text();
if (isAllowlisted(text, allowlist)) return;
errors.push({
kind: "console",
message: text,
url: msg.location().url,
});
};
const onPageError = (err: Error) => {
const text = err.message;
if (isAllowlisted(text, allowlist)) return;
errors.push({
kind: "pageerror",
message: text,
stack: firstStackFrame(err.stack),
});
};
const onResponse = (response: Response) => {
const status = response.status();
if (status < 500) return;
const url = response.url();
if (!isSameOrigin(url, baseURL)) return;
const text = `${status} ${response.statusText()} ${url}`;
if (isAllowlisted(text, allowlist)) return;
errors.push({ kind: "request", message: text, url });
};
const onRequestFailed = (request: Request) => {
const url = request.url();
if (!isSameOrigin(url, baseURL)) return;
const failure = request.failure();
const text = `failed: ${failure?.errorText ?? "unknown"} ${url}`;
if (isAllowlisted(text, allowlist)) return;
errors.push({ kind: "request", message: text, url });
};
page.on("console", onConsole);
page.on("pageerror", onPageError);
page.on("response", onResponse);
page.on("requestfailed", onRequestFailed);
return {
errors,
assertClean() {
if (errors.length === 0) return;
const formatted = errors
.map((e, i) => {
const stack = e.stack ? `\n ${e.stack}` : "";
const url = e.url && e.url !== e.message ? ` (${e.url})` : "";
return ` ${i + 1}. [${e.kind}] ${e.message}${url}${stack}`;
})
.join("\n");
throw new Error(
`Page emitted ${errors.length} unexpected error${errors.length === 1 ? "" : "s"}:\n${formatted}`,
);
},
};
}

View File

@ -0,0 +1,120 @@
/* eslint-disable react-hooks/rules-of-hooks */
/**
* Extended Playwright test fixture with FrigateApp.
*
* Every test imports `test` and `expect` from this file instead of
* @playwright/test directly. The `frigateApp` fixture provides a
* fully mocked Frigate frontend ready for interaction.
*
* The fixture also installs the error collector (see error-collector.ts).
* Any console error, page error, or same-origin failed request that is
* not on the global allowlist or the test's `expectedErrors` list will
* fail the test in the fixture's teardown.
*
* CRITICAL: All route/WS handlers are registered before page.goto()
* to prevent AuthProvider from redirecting to login.html.
*/
import { test as base, expect, type Page } from "@playwright/test";
import {
ApiMocker,
MediaMocker,
type ApiMockOverrides,
} from "../helpers/api-mocker";
import { WsMocker } from "../helpers/ws-mocker";
import { installErrorCollector, type ErrorCollector } from "./error-collector";
import { GLOBAL_ALLOWLIST } from "./error-allowlist";
export class FrigateApp {
public api: ApiMocker;
public media: MediaMocker;
public ws: WsMocker;
public page: Page;
private isDesktop: boolean;
constructor(page: Page, projectName: string) {
this.page = page;
this.api = new ApiMocker(page);
this.media = new MediaMocker(page);
this.ws = new WsMocker();
this.isDesktop = projectName === "desktop";
}
get isMobile() {
return !this.isDesktop;
}
/** Install all mocks with default data. Call before goto(). */
async installDefaults(overrides?: ApiMockOverrides) {
// Mock i18n locale files to prevent 404s
await this.page.route("**/locales/**", async (route) => {
// Let the request through to the built files
return route.fallback();
});
await this.ws.install(this.page);
await this.media.install();
await this.api.install(overrides);
}
/** Navigate to a page. Always call installDefaults() first. */
async goto(path: string) {
await this.page.goto(path);
// Wait for the app to render past the loading indicator
await this.page.waitForSelector("#pageRoot", { timeout: 10_000 });
}
/** Navigate to a page that may show a loading indicator */
async gotoAndWait(path: string, selector: string) {
await this.page.goto(path);
await this.page.waitForSelector(selector, { timeout: 10_000 });
}
}
type FrigateFixtures = {
frigateApp: FrigateApp;
/**
* Per-test additional allowlist regex patterns. Tests that intentionally
* trigger errors (e.g. error-state tests that hit a mocked 500) declare
* their expected errors here so the collector ignores them.
*
* Default is `[]` most tests should not need this.
*/
expectedErrors: RegExp[];
errorCollector: ErrorCollector;
};
export const test = base.extend<FrigateFixtures>({
expectedErrors: [[], { option: true }],
errorCollector: async ({ page, expectedErrors }, use, testInfo) => {
const collector = installErrorCollector(page, [
...GLOBAL_ALLOWLIST,
...expectedErrors,
]);
await use(collector);
if (process.env.E2E_STRICT_ERRORS === "1") {
collector.assertClean();
} else if (collector.errors.length > 0) {
// Soft mode: attach errors to the test report so they're visible
// without failing the run.
await testInfo.attach("collected-errors.txt", {
body: collector.errors
.map((e) => `[${e.kind}] ${e.message}${e.url ? ` (${e.url})` : ""}`)
.join("\n"),
contentType: "text/plain",
});
}
},
frigateApp: async ({ page, errorCollector }, use, testInfo) => {
// Reference the collector so its `use()` runs and teardown fires
void errorCollector;
const app = new FrigateApp(page, testInfo.project.name);
await app.installDefaults();
await use(app);
},
});
export { expect };

View File

@ -0,0 +1,77 @@
/**
* Camera activity WebSocket payload factory.
*
* The camera_activity topic payload is double-serialized:
* the WS message contains { topic: "camera_activity", payload: JSON.stringify(activityMap) }
*/
export interface CameraActivityState {
config: {
enabled: boolean;
detect: boolean;
record: boolean;
snapshots: boolean;
audio: boolean;
audio_transcription: boolean;
notifications: boolean;
notifications_suspended: number;
autotracking: boolean;
alerts: boolean;
detections: boolean;
object_descriptions: boolean;
review_descriptions: boolean;
};
motion: boolean;
objects: Array<{
label: string;
score: number;
box: [number, number, number, number];
area: number;
ratio: number;
region: [number, number, number, number];
current_zones: string[];
id: string;
}>;
audio_detections: Array<{
label: string;
score: number;
}>;
}
function defaultCameraActivity(): CameraActivityState {
return {
config: {
enabled: true,
detect: true,
record: true,
snapshots: true,
audio: false,
audio_transcription: false,
notifications: false,
notifications_suspended: 0,
autotracking: false,
alerts: true,
detections: true,
object_descriptions: false,
review_descriptions: false,
},
motion: false,
objects: [],
audio_detections: [],
};
}
export function cameraActivityPayload(
cameras: string[],
overrides?: Partial<Record<string, Partial<CameraActivityState>>>,
): string {
const activity: Record<string, CameraActivityState> = {};
for (const name of cameras) {
activity[name] = {
...defaultCameraActivity(),
...overrides?.[name],
} as CameraActivityState;
}
// Double-serialize: the WS payload is a JSON string
return JSON.stringify(activity);
}

View File

@ -0,0 +1 @@
[{"id": "case-001", "name": "Package Theft Investigation", "description": "Review of suspicious activity near the front porch", "created_at": 1775407931.3863528, "updated_at": 1775483531.3863528}]

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,76 @@
/**
* FrigateConfig factory for E2E tests.
*
* Uses a real config snapshot generated from the Python backend's FrigateConfig
* model. This guarantees all fields are present and match what the app expects.
* Tests override specific fields via DeepPartial.
*/
import { readFileSync } from "node:fs";
import { resolve, dirname } from "node:path";
import { fileURLToPath } from "node:url";
const __dirname = dirname(fileURLToPath(import.meta.url));
const configSnapshot = JSON.parse(
readFileSync(resolve(__dirname, "config-snapshot.json"), "utf-8"),
);
export type DeepPartial<T> = {
[P in keyof T]?: T[P] extends object ? DeepPartial<T[P]> : T[P];
};
function deepMerge<T extends Record<string, unknown>>(
base: T,
overrides?: DeepPartial<T>,
): T {
if (!overrides) return base;
const result = { ...base };
for (const key of Object.keys(overrides) as (keyof T)[]) {
const val = overrides[key];
if (
val !== undefined &&
typeof val === "object" &&
val !== null &&
!Array.isArray(val) &&
typeof base[key] === "object" &&
base[key] !== null &&
!Array.isArray(base[key])
) {
result[key] = deepMerge(
base[key] as Record<string, unknown>,
val as DeepPartial<Record<string, unknown>>,
) as T[keyof T];
} else if (val !== undefined) {
result[key] = val as T[keyof T];
}
}
return result;
}
// The base config is a real snapshot from the Python backend.
// Apply test-specific overrides: friendly names, camera groups, version.
export const BASE_CONFIG = {
...configSnapshot,
version: "0.15.0-test",
cameras: {
...configSnapshot.cameras,
front_door: {
...configSnapshot.cameras.front_door,
friendly_name: "Front Door",
},
backyard: {
...configSnapshot.cameras.backyard,
friendly_name: "Backyard",
},
garage: {
...configSnapshot.cameras.garage,
friendly_name: "Garage",
},
},
};
export function configFactory(
overrides?: DeepPartial<typeof BASE_CONFIG>,
): typeof BASE_CONFIG {
return deepMerge(BASE_CONFIG, overrides);
}

View File

@ -0,0 +1,54 @@
/**
* Debug replay status factory.
*
* The Replay page polls /api/debug_replay/status every 1s via SWR.
* The no-session state shows an empty state; the active state
* renders the live camera image + debug toggles + objects/messages
* tabs. Used by replay.spec.ts.
*/
export type DebugReplayStatus = {
active: boolean;
replay_camera: string | null;
source_camera: string | null;
start_time: number | null;
end_time: number | null;
live_ready: boolean;
};
export function noSessionStatus(): DebugReplayStatus {
return {
active: false,
replay_camera: null,
source_camera: null,
start_time: null,
end_time: null,
live_ready: false,
};
}
export function activeSessionStatus(
opts: {
camera?: string;
sourceCamera?: string;
startTime?: number;
endTime?: number;
liveReady?: boolean;
} = {},
): DebugReplayStatus {
const {
camera = "front_door",
sourceCamera = "front_door",
startTime = Date.now() / 1000 - 3600,
endTime = Date.now() / 1000 - 1800,
liveReady = true,
} = opts;
return {
active: true,
replay_camera: camera,
source_camera: sourceCamera,
start_time: startTime,
end_time: endTime,
live_ready: liveReady,
};
}

View File

@ -0,0 +1 @@
[{"id": "event-person-001", "label": "person", "sub_label": null, "camera": "front_door", "start_time": 1775487131.3863528, "end_time": 1775487161.3863528, "false_positive": false, "zones": ["front_yard"], "thumbnail": null, "has_clip": true, "has_snapshot": true, "retain_indefinitely": false, "plus_id": null, "model_hash": "abc123", "detector_type": "cpu", "model_type": "ssd", "data": {"top_score": 0.92, "score": 0.92, "region": [0.1, 0.1, 0.5, 0.8], "box": [0.2, 0.15, 0.45, 0.75], "area": 0.18, "ratio": 0.6, "type": "object", "description": "A person walking toward the front door", "average_estimated_speed": 1.2, "velocity_angle": 45.0, "path_data": [[[0.2, 0.5], 0.0], [[0.3, 0.5], 1.0]]}}, {"id": "event-car-001", "label": "car", "sub_label": null, "camera": "backyard", "start_time": 1775483531.3863528, "end_time": 1775483576.3863528, "false_positive": false, "zones": ["driveway"], "thumbnail": null, "has_clip": true, "has_snapshot": true, "retain_indefinitely": false, "plus_id": null, "model_hash": "def456", "detector_type": "cpu", "model_type": "ssd", "data": {"top_score": 0.87, "score": 0.87, "region": [0.3, 0.2, 0.9, 0.7], "box": [0.35, 0.25, 0.85, 0.65], "area": 0.2, "ratio": 1.25, "type": "object", "description": "A car parked in the driveway", "average_estimated_speed": 0.0, "velocity_angle": 0.0, "path_data": []}}, {"id": "event-person-002", "label": "person", "sub_label": null, "camera": "garage", "start_time": 1775479931.3863528, "end_time": 1775479951.3863528, "false_positive": false, "zones": [], "thumbnail": null, "has_clip": false, "has_snapshot": true, "retain_indefinitely": false, "plus_id": null, "model_hash": "ghi789", "detector_type": "cpu", "model_type": "ssd", "data": {"top_score": 0.78, "score": 0.78, "region": [0.0, 0.0, 0.6, 0.9], "box": [0.1, 0.05, 0.5, 0.85], "area": 0.32, "ratio": 0.5, "type": "object", "description": null, "average_estimated_speed": 0.5, "velocity_angle": 90.0, "path_data": [[[0.1, 0.4], 0.0]]}}]

View File

@ -0,0 +1 @@
[{"id": "export-001", "camera": "front_door", "name": "Front Door - Person Alert", "date": 1775490731.3863528, "video_path": "/exports/export-001.mp4", "thumb_path": "/exports/export-001-thumb.jpg", "in_progress": false, "export_case_id": null}, {"id": "export-002", "camera": "backyard", "name": "Backyard - Car Detection", "date": 1775483531.3863528, "video_path": "/exports/export-002.mp4", "thumb_path": "/exports/export-002-thumb.jpg", "in_progress": false, "export_case_id": "case-001"}, {"id": "export-003", "camera": "garage", "name": "Garage - In Progress", "date": 1775492531.3863528, "video_path": "/exports/export-003.mp4", "thumb_path": "/exports/export-003-thumb.jpg", "in_progress": true, "export_case_id": null}]

View File

@ -0,0 +1,45 @@
/**
* Face library factories.
*
* The /api/faces endpoint returns a record keyed by collection name
* with the list of face image filenames. Grouped training attempts
* live under the "train" key with filenames of the form
* `${event_id}-${timestamp}-${label}-${score}.webp`.
*
* Used by face-library.spec.ts and chat.spec.ts (attachment chip).
*/
export type FacesMock = Record<string, string[]>;
export function basicFacesMock(): FacesMock {
return {
alice: ["alice-1.webp", "alice-2.webp"],
bob: ["bob-1.webp"],
charlie: ["charlie-1.webp"],
};
}
export function emptyFacesMock(): FacesMock {
return {};
}
/**
* Adds a grouped recent-recognition training attempt to an existing
* faces mock. The grouping key on the backend is the event id so
* images with the same event-id prefix render as one dialog-able card.
*/
export function withGroupedTrainingAttempt(
base: FacesMock,
opts: {
eventId: string;
attempts: Array<{ timestamp: number; label: string; score: number }>;
},
): FacesMock {
const trainImages = opts.attempts.map(
(a) => `${opts.eventId}-${a.timestamp}-${a.label}-${a.score}.webp`,
);
return {
...base,
train: [...(base.train ?? []), ...trainImages],
};
}

View File

@ -0,0 +1,426 @@
#!/usr/bin/env python3
"""Generate E2E mock data from backend Pydantic and Peewee models.
Run from the repo root:
PYTHONPATH=/workspace/frigate python3 web/e2e/fixtures/mock-data/generate-mock-data.py
Strategy:
- FrigateConfig: instantiate the Pydantic config model, then model_dump()
- API responses: instantiate Pydantic response models (ReviewSegmentResponse,
EventResponse, ExportModel, ExportCaseModel) to validate all required fields
- If the backend adds a required field, this script fails at instantiation time
- The Peewee model field list is checked to detect new columns that would
appear in .dicts() API responses but aren't in our mock data
"""
import json
import sys
import time
import warnings
from datetime import datetime, timedelta
from pathlib import Path
warnings.filterwarnings("ignore")
OUTPUT_DIR = Path(__file__).parent
NOW = time.time()
HOUR = 3600
CAMERAS = ["front_door", "backyard", "garage"]
def check_pydantic_fields(pydantic_class, mock_keys, model_name):
"""Verify mock data covers all fields declared in the Pydantic response model.
The Pydantic response model is what the frontend actually receives.
Peewee models may have extra legacy columns that are filtered out by
FastAPI's response_model validation.
"""
required_fields = set()
for name, field_info in pydantic_class.model_fields.items():
required_fields.add(name)
missing = required_fields - mock_keys
if missing:
print(
f" ERROR: {model_name} response model has fields not in mock data: {missing}",
file=sys.stderr,
)
print(
f" Add these fields to the mock data in this script.",
file=sys.stderr,
)
sys.exit(1)
extra = mock_keys - required_fields
if extra:
print(
f" NOTE: {model_name} mock data has extra fields (not in response model): {extra}",
)
def generate_config():
"""Generate FrigateConfig from the Python backend model."""
from frigate.config import FrigateConfig
config = FrigateConfig.model_validate_json(
json.dumps(
{
"mqtt": {"host": "mqtt"},
"cameras": {
cam: {
"ffmpeg": {
"inputs": [
{
"path": f"rtsp://10.0.0.{i+1}:554/video",
"roles": ["detect"],
}
]
},
"detect": {"height": 720, "width": 1280, "fps": 5},
}
for i, cam in enumerate(CAMERAS)
},
"camera_groups": {
"default": {
"cameras": CAMERAS,
"icon": "generic",
"order": 0,
},
"outdoor": {
"cameras": ["front_door", "backyard"],
"icon": "generic",
"order": 1,
},
},
}
)
)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
snapshot = config.model_dump()
# Runtime-computed fields not in the Pydantic dump
all_attrs = set()
for attrs in snapshot.get("model", {}).get("attributes_map", {}).values():
all_attrs.update(attrs)
snapshot["model"]["all_attributes"] = sorted(all_attrs)
snapshot["model"]["colormap"] = {}
return snapshot
def generate_reviews():
"""Generate ReviewSegmentResponse[] validated against Pydantic + Peewee."""
from frigate.api.defs.response.review_response import ReviewSegmentResponse
reviews = [
ReviewSegmentResponse(
id="review-alert-001",
camera="front_door",
severity="alert",
start_time=datetime.fromtimestamp(NOW - 2 * HOUR),
end_time=datetime.fromtimestamp(NOW - 2 * HOUR + 30),
has_been_reviewed=False,
thumb_path="/clips/front_door/review-alert-001-thumb.jpg",
data=json.dumps(
{
"audio": [],
"detections": ["person-abc123"],
"objects": ["person"],
"sub_labels": [],
"significant_motion_areas": [],
"zones": ["front_yard"],
}
),
),
ReviewSegmentResponse(
id="review-alert-002",
camera="backyard",
severity="alert",
start_time=datetime.fromtimestamp(NOW - 3 * HOUR),
end_time=datetime.fromtimestamp(NOW - 3 * HOUR + 45),
has_been_reviewed=True,
thumb_path="/clips/backyard/review-alert-002-thumb.jpg",
data=json.dumps(
{
"audio": [],
"detections": ["car-def456"],
"objects": ["car"],
"sub_labels": [],
"significant_motion_areas": [],
"zones": ["driveway"],
}
),
),
ReviewSegmentResponse(
id="review-detect-001",
camera="garage",
severity="detection",
start_time=datetime.fromtimestamp(NOW - 4 * HOUR),
end_time=datetime.fromtimestamp(NOW - 4 * HOUR + 20),
has_been_reviewed=False,
thumb_path="/clips/garage/review-detect-001-thumb.jpg",
data=json.dumps(
{
"audio": [],
"detections": ["person-ghi789"],
"objects": ["person"],
"sub_labels": [],
"significant_motion_areas": [],
"zones": [],
}
),
),
ReviewSegmentResponse(
id="review-detect-002",
camera="front_door",
severity="detection",
start_time=datetime.fromtimestamp(NOW - 5 * HOUR),
end_time=datetime.fromtimestamp(NOW - 5 * HOUR + 15),
has_been_reviewed=False,
thumb_path="/clips/front_door/review-detect-002-thumb.jpg",
data=json.dumps(
{
"audio": [],
"detections": ["car-jkl012"],
"objects": ["car"],
"sub_labels": [],
"significant_motion_areas": [],
"zones": ["front_yard"],
}
),
),
]
result = [r.model_dump(mode="json") for r in reviews]
# Verify mock data covers all Pydantic response model fields
check_pydantic_fields(
ReviewSegmentResponse, set(result[0].keys()), "ReviewSegment"
)
return result
def generate_events():
"""Generate EventResponse[] validated against Pydantic + Peewee."""
from frigate.api.defs.response.event_response import EventResponse
events = [
EventResponse(
id="event-person-001",
label="person",
sub_label=None,
camera="front_door",
start_time=NOW - 2 * HOUR,
end_time=NOW - 2 * HOUR + 30,
false_positive=False,
zones=["front_yard"],
thumbnail=None,
has_clip=True,
has_snapshot=True,
retain_indefinitely=False,
plus_id=None,
model_hash="abc123",
detector_type="cpu",
model_type="ssd",
data={
"top_score": 0.92,
"score": 0.92,
"region": [0.1, 0.1, 0.5, 0.8],
"box": [0.2, 0.15, 0.45, 0.75],
"area": 0.18,
"ratio": 0.6,
"type": "object",
"description": "A person walking toward the front door",
"average_estimated_speed": 1.2,
"velocity_angle": 45.0,
"path_data": [[[0.2, 0.5], 0.0], [[0.3, 0.5], 1.0]],
},
),
EventResponse(
id="event-car-001",
label="car",
sub_label=None,
camera="backyard",
start_time=NOW - 3 * HOUR,
end_time=NOW - 3 * HOUR + 45,
false_positive=False,
zones=["driveway"],
thumbnail=None,
has_clip=True,
has_snapshot=True,
retain_indefinitely=False,
plus_id=None,
model_hash="def456",
detector_type="cpu",
model_type="ssd",
data={
"top_score": 0.87,
"score": 0.87,
"region": [0.3, 0.2, 0.9, 0.7],
"box": [0.35, 0.25, 0.85, 0.65],
"area": 0.2,
"ratio": 1.25,
"type": "object",
"description": "A car parked in the driveway",
"average_estimated_speed": 0.0,
"velocity_angle": 0.0,
"path_data": [],
},
),
EventResponse(
id="event-person-002",
label="person",
sub_label=None,
camera="garage",
start_time=NOW - 4 * HOUR,
end_time=NOW - 4 * HOUR + 20,
false_positive=False,
zones=[],
thumbnail=None,
has_clip=False,
has_snapshot=True,
retain_indefinitely=False,
plus_id=None,
model_hash="ghi789",
detector_type="cpu",
model_type="ssd",
data={
"top_score": 0.78,
"score": 0.78,
"region": [0.0, 0.0, 0.6, 0.9],
"box": [0.1, 0.05, 0.5, 0.85],
"area": 0.32,
"ratio": 0.5,
"type": "object",
"description": None,
"average_estimated_speed": 0.5,
"velocity_angle": 90.0,
"path_data": [[[0.1, 0.4], 0.0]],
},
),
]
result = [e.model_dump(mode="json") for e in events]
check_pydantic_fields(EventResponse, set(result[0].keys()), "Event")
return result
def generate_exports():
"""Generate ExportModel[] validated against Pydantic + Peewee."""
from frigate.api.defs.response.export_response import ExportModel
exports = [
ExportModel(
id="export-001",
camera="front_door",
name="Front Door - Person Alert",
date=NOW - 1 * HOUR,
video_path="/exports/export-001.mp4",
thumb_path="/exports/export-001-thumb.jpg",
in_progress=False,
export_case_id=None,
),
ExportModel(
id="export-002",
camera="backyard",
name="Backyard - Car Detection",
date=NOW - 3 * HOUR,
video_path="/exports/export-002.mp4",
thumb_path="/exports/export-002-thumb.jpg",
in_progress=False,
export_case_id="case-001",
),
ExportModel(
id="export-003",
camera="garage",
name="Garage - In Progress",
date=NOW - 0.5 * HOUR,
video_path="/exports/export-003.mp4",
thumb_path="/exports/export-003-thumb.jpg",
in_progress=True,
export_case_id=None,
),
]
result = [e.model_dump(mode="json") for e in exports]
check_pydantic_fields(ExportModel, set(result[0].keys()), "Export")
return result
def generate_cases():
"""Generate ExportCaseModel[] validated against Pydantic + Peewee."""
from frigate.api.defs.response.export_case_response import ExportCaseModel
cases = [
ExportCaseModel(
id="case-001",
name="Package Theft Investigation",
description="Review of suspicious activity near the front porch",
created_at=NOW - 24 * HOUR,
updated_at=NOW - 3 * HOUR,
),
]
result = [c.model_dump(mode="json") for c in cases]
check_pydantic_fields(ExportCaseModel, set(result[0].keys()), "ExportCase")
return result
def generate_review_summary():
"""Generate ReviewSummary for the calendar filter."""
today = datetime.now().strftime("%Y-%m-%d")
yesterday = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d")
return {
today: {
"day": today,
"reviewed_alert": 1,
"reviewed_detection": 0,
"total_alert": 2,
"total_detection": 2,
},
yesterday: {
"day": yesterday,
"reviewed_alert": 3,
"reviewed_detection": 2,
"total_alert": 3,
"total_detection": 4,
},
}
def write_json(filename, data):
path = OUTPUT_DIR / filename
path.write_text(json.dumps(data, default=str))
print(f" {path.name} ({path.stat().st_size} bytes)")
def main():
print("Generating E2E mock data from backend models...")
print(" Validating against Pydantic response models + Peewee DB columns")
print()
write_json("config-snapshot.json", generate_config())
write_json("reviews.json", generate_reviews())
write_json("events.json", generate_events())
write_json("exports.json", generate_exports())
write_json("cases.json", generate_cases())
write_json("review-summary.json", generate_review_summary())
print()
print("All mock data validated against backend schemas.")
print("If this script fails, update the mock data to match the new schema.")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,39 @@
/**
* User profile factories for E2E tests.
*/
export interface UserProfile {
username: string;
role: string;
allowed_cameras: string[] | null;
}
export function adminProfile(overrides?: Partial<UserProfile>): UserProfile {
return {
username: "admin",
role: "admin",
allowed_cameras: null,
...overrides,
};
}
export function viewerProfile(overrides?: Partial<UserProfile>): UserProfile {
return {
username: "viewer",
role: "viewer",
allowed_cameras: null,
...overrides,
};
}
export function restrictedProfile(
cameras: string[],
overrides?: Partial<UserProfile>,
): UserProfile {
return {
username: "restricted",
role: "viewer",
allowed_cameras: cameras,
...overrides,
};
}

View File

@ -0,0 +1 @@
{"2026-04-06": {"day": "2026-04-06", "reviewed_alert": 1, "reviewed_detection": 0, "total_alert": 2, "total_detection": 2}, "2026-04-05": {"day": "2026-04-05", "reviewed_alert": 3, "reviewed_detection": 2, "total_alert": 3, "total_detection": 4}}

View File

@ -0,0 +1 @@
[{"id": "review-alert-001", "camera": "front_door", "start_time": "2026-04-06T09:52:11.386353", "end_time": "2026-04-06T09:52:41.386353", "has_been_reviewed": false, "severity": "alert", "thumb_path": "/clips/front_door/review-alert-001-thumb.jpg", "data": {"audio": [], "detections": ["person-abc123"], "objects": ["person"], "sub_labels": [], "significant_motion_areas": [], "zones": ["front_yard"]}}, {"id": "review-alert-002", "camera": "backyard", "start_time": "2026-04-06T08:52:11.386353", "end_time": "2026-04-06T08:52:56.386353", "has_been_reviewed": true, "severity": "alert", "thumb_path": "/clips/backyard/review-alert-002-thumb.jpg", "data": {"audio": [], "detections": ["car-def456"], "objects": ["car"], "sub_labels": [], "significant_motion_areas": [], "zones": ["driveway"]}}, {"id": "review-detect-001", "camera": "garage", "start_time": "2026-04-06T07:52:11.386353", "end_time": "2026-04-06T07:52:31.386353", "has_been_reviewed": false, "severity": "detection", "thumb_path": "/clips/garage/review-detect-001-thumb.jpg", "data": {"audio": [], "detections": ["person-ghi789"], "objects": ["person"], "sub_labels": [], "significant_motion_areas": [], "zones": []}}, {"id": "review-detect-002", "camera": "front_door", "start_time": "2026-04-06T06:52:11.386353", "end_time": "2026-04-06T06:52:26.386353", "has_been_reviewed": false, "severity": "detection", "thumb_path": "/clips/front_door/review-detect-002-thumb.jpg", "data": {"audio": [], "detections": ["car-jkl012"], "objects": ["car"], "sub_labels": [], "significant_motion_areas": [], "zones": ["front_yard"]}}]

View File

@ -0,0 +1,76 @@
/**
* FrigateStats factory for E2E tests.
*/
import type { DeepPartial } from "./config";
function cameraStats(_name: string) {
return {
audio_dBFPS: 0,
audio_rms: 0,
camera_fps: 5.0,
capture_pid: 100,
detection_enabled: 1,
detection_fps: 5.0,
ffmpeg_pid: 101,
pid: 102,
process_fps: 5.0,
skipped_fps: 0,
connection_quality: "excellent" as const,
expected_fps: 5,
reconnects_last_hour: 0,
stalls_last_hour: 0,
};
}
export const BASE_STATS = {
cameras: {
front_door: cameraStats("front_door"),
backyard: cameraStats("backyard"),
garage: cameraStats("garage"),
},
cpu_usages: {
"1": { cmdline: "frigate.app", cpu: "5.0", cpu_average: "4.5", mem: "2.1" },
},
detectors: {
cpu: {
detection_start: 0,
inference_speed: 75.5,
pid: 200,
},
},
gpu_usages: {},
npu_usages: {},
processes: {},
service: {
last_updated: Date.now() / 1000,
storage: {
"/media/frigate/recordings": {
free: 50000000000,
total: 100000000000,
used: 50000000000,
mount_type: "ext4",
},
"/tmp/cache": {
free: 500000000,
total: 1000000000,
used: 500000000,
mount_type: "tmpfs",
},
},
uptime: 86400,
latest_version: "0.15.0",
version: "0.15.0-test",
},
camera_fps: 15.0,
process_fps: 15.0,
skipped_fps: 0,
detection_fps: 15.0,
};
export function statsFactory(
overrides?: DeepPartial<typeof BASE_STATS>,
): typeof BASE_STATS {
if (!overrides) return BASE_STATS;
return { ...BASE_STATS, ...overrides } as typeof BASE_STATS;
}

Some files were not shown because too many files have changed in this diff Show More