Compare commits

..

46 Commits

Author SHA1 Message Date
Hosted Weblate
b8089fa8f0
Update translation files
Updated by "Squash Git commits" add-on in Weblate.

Translation: Frigate NVR/common
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/
2026-03-04 09:27:00 +01:00
Hosted Weblate
962c042e70
Translated using Weblate (Cantonese (Traditional Han script))
Currently translated at 0.2% (1 of 464 strings)

Added translation using Weblate (Cantonese (Traditional Han script))

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: beginner2047 <leoywng44@gmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/yue_Hant/
Translation: Frigate NVR/Config - Cameras
2026-03-04 09:27:00 +01:00
Hosted Weblate
91385ce652
Added translation using Weblate (Norwegian Bokmål)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:27:00 +01:00
Hosted Weblate
6296f0acd1
Added translation using Weblate (Chinese (Simplified Han script))
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:59 +01:00
Hosted Weblate
131b3b5a7b
Added translation using Weblate (Chinese (Traditional Han script))
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:59 +01:00
Hosted Weblate
cd26d99b52
Added translation using Weblate (Uzbek)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:58 +01:00
Hosted Weblate
3ef60cc15c
Added translation using Weblate (Urdu)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:58 +01:00
Hosted Weblate
724cabee38
Added translation using Weblate (Slovenian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:58 +01:00
Hosted Weblate
0158c5f37b
Added translation using Weblate (Slovak)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:58 +01:00
Hosted Weblate
f774e33d1f
Added translation using Weblate (Korean)
Translated using Weblate (Korean)

Currently translated at 86.2% (50 of 58 strings)

Translated using Weblate (Korean)

Currently translated at 99.5% (227 of 228 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: John <john@akfn.net>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ko/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/ko/
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
2026-03-04 09:26:57 +01:00
Hosted Weblate
53b03018b3
Added translation using Weblate (Serbian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:57 +01:00
Hosted Weblate
11e401340b
Added translation using Weblate (Finnish)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:57 +01:00
Hosted Weblate
fbc41ae39a
Added translation using Weblate (Persian)
Translated using Weblate (Persian)

Currently translated at 99.0% (215 of 217 strings)

Co-authored-by: Amir reza Irani ali poor <amir1376irani@yahoo.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/fa/
Translation: Frigate NVR/common
2026-03-04 09:26:57 +01:00
Hosted Weblate
ac5e59af7e
Added translation using Weblate (Swedish)
Translated using Weblate (Swedish)

Currently translated at 96.5% (56 of 58 strings)

Translated using Weblate (Swedish)

Currently translated at 92.5% (136 of 147 strings)

Translated using Weblate (Swedish)

Currently translated at 95.1% (217 of 228 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: ThomasW <thomas.wursig@remote24.se>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/sv/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/sv/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/sv/
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-system
2026-03-04 09:26:56 +01:00
Hosted Weblate
bec4692f92
Added translation using Weblate (French)
Translated using Weblate (French)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (French)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (French)

Currently translated at 93.8% (138 of 147 strings)

Translated using Weblate (French)

Currently translated at 96.4% (220 of 228 strings)

Translated using Weblate (French)

Currently translated at 81.0% (716 of 883 strings)

Co-authored-by: Apocoloquintose <bertrand.moreux@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/fr/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/fr/
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-03-04 09:26:56 +01:00
Hosted Weblate
e409e27554
Added translation using Weblate (Spanish)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:56 +01:00
Hosted Weblate
4f32a53bd9
Added translation using Weblate (Dutch)
Translated using Weblate (Dutch)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Dutch)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Dutch)

Currently translated at 74.9% (671 of 895 strings)

Translated using Weblate (Dutch)

Currently translated at 93.1% (137 of 147 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: Marijn <168113859+Marijn0@users.noreply.github.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/nl/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/nl/
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-03-04 09:26:56 +01:00
Hosted Weblate
8e57faca4b
Added translation using Weblate (Indonesian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:55 +01:00
Hosted Weblate
02ff89a20f
Added translation using Weblate (Arabic)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:54 +01:00
Hosted Weblate
cf88791463
Added translation using Weblate (Italian)
Translated using Weblate (Italian)

Currently translated at 100.0% (228 of 228 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Italian)

Currently translated at 100.0% (147 of 147 strings)

Co-authored-by: Gringo <ita.translations@tiscali.it>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/it/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/it/
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-system
2026-03-04 09:26:54 +01:00
Hosted Weblate
1c05e9f242
Translated using Weblate (Polish)
Currently translated at 16.5% (77 of 464 strings)

Added translation using Weblate (Polish)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: J P <jpoloczek24@gmail.com>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/pl/
Translation: Frigate NVR/Config - Cameras
2026-03-04 09:26:53 +01:00
Hosted Weblate
eb052fa0d4
Added translation using Weblate (Malayalam)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:53 +01:00
Hosted Weblate
b7daa81e39
Added translation using Weblate (Hebrew)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:53 +01:00
Hosted Weblate
31a789ec2c
Added translation using Weblate (Hindi)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:52 +01:00
Hosted Weblate
305c080276
Added translation using Weblate (Hungarian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:52 +01:00
Hosted Weblate
820582fdf1
Added translation using Weblate (Croatian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:52 +01:00
Hosted Weblate
353df1715c
Added translation using Weblate (Icelandic)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:52 +01:00
Hosted Weblate
395602394c
Added translation using Weblate (Vietnamese)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:52 +01:00
Hosted Weblate
41ecc646cc
Translated using Weblate (Portuguese)
Currently translated at 89.6% (52 of 58 strings)

Added translation using Weblate (Portuguese)

Translated using Weblate (Portuguese)

Currently translated at 89.6% (52 of 58 strings)

Translated using Weblate (Portuguese)

Currently translated at 90.7% (49 of 54 strings)

Translated using Weblate (Portuguese)

Currently translated at 29.5% (36 of 122 strings)

Translated using Weblate (Portuguese)

Currently translated at 43.4% (10 of 23 strings)

Co-authored-by: Abel Nunes <lidh08@gmail.com>
Co-authored-by: Hellyson Rodrigo Parteka <hellysonrp@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/pt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/pt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/pt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/pt/
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
2026-03-04 09:26:51 +01:00
Hosted Weblate
c90cd65496
Added translation using Weblate (Czech)
Translated using Weblate (Czech)

Currently translated at 100.0% (217 of 217 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: vaclav <zahorec@orcave.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/cs/
Translation: Frigate NVR/common
2026-03-04 09:26:51 +01:00
Hosted Weblate
76ed2dad1b
Update translation files
Updated by "Squash Git commits" add-on in Weblate.

Added translation using Weblate (Catalan)

Added translation using Weblate (Catalan)

Added translation using Weblate (Catalan)

Translated using Weblate (Catalan)

Currently translated at 100.0% (464 of 464 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (136 of 136 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (895 of 895 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (118 of 118 strings)

Added translation using Weblate (Catalan)

Translated using Weblate (Catalan)

Currently translated at 100.0% (895 of 895 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (228 of 228 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (122 of 122 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (883 of 883 strings)

Translated using Weblate (Catalan)

Currently translated at 100.0% (147 of 147 strings)

Co-authored-by: Eduardo Pastor Fernández <123eduardoneko123@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Josh Hawkins <joshhawk2003@yahoo.com>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/objects/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-classificationmodel/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ca/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ca/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/objects
Translation: Frigate NVR/views-classificationmodel
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-03-04 09:26:50 +01:00
Hosted Weblate
4697d9d7b8
Added translation using Weblate (Japanese)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:50 +01:00
Hosted Weblate
9810be1933
Added translation using Weblate (Ukrainian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:50 +01:00
Hosted Weblate
7737a05e3e
Added translation using Weblate (Bulgarian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:50 +01:00
Hosted Weblate
4374f49abf
Added translation using Weblate (Romanian)
Added translation using Weblate (Romanian)

Added translation using Weblate (Romanian)

Added translation using Weblate (Romanian)

Translated using Weblate (Romanian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (46 of 46 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (74 of 74 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (49 of 49 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (147 of 147 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (228 of 228 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (58 of 58 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (136 of 136 strings)

Translated using Weblate (Romanian)

Currently translated at 100.0% (895 of 895 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: lukasig <lukasig@hotmail.com>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-camera/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-filter/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-explore/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-search/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/ro/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-system/ro/
Translation: Frigate NVR/common
Translation: Frigate NVR/components-camera
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/components-filter
Translation: Frigate NVR/views-explore
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-search
Translation: Frigate NVR/views-settings
Translation: Frigate NVR/views-system
2026-03-04 09:26:49 +01:00
Hosted Weblate
c35b016f47
Added translation using Weblate (Russian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:49 +01:00
Hosted Weblate
0708d2adaf
Added translation using Weblate (Estonian)
Translated using Weblate (Estonian)

Currently translated at 100.0% (228 of 228 strings)

Translated using Weblate (Estonian)

Currently translated at 99.5% (227 of 228 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Co-authored-by: Priit Jõerüüt <jrthwlate@users.noreply.hosted.weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/et/
Translation: Frigate NVR/common
2026-03-04 09:26:49 +01:00
Hosted Weblate
696c59cdcf
Added translation using Weblate (Greek)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:48 +01:00
Hosted Weblate
7fe9058bc6
Added translation using Weblate (Danish)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:48 +01:00
Hosted Weblate
9ac535f2ab
Added translation using Weblate (German)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:48 +01:00
Hosted Weblate
0190daec35
Translated using Weblate (Portuguese (Brazil))
Currently translated at 96.5% (56 of 58 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (54 of 54 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (98 of 98 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (228 of 228 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 2.3% (11 of 464 strings)

Translated using Weblate (Portuguese (Brazil))

Currently translated at 50.5% (452 of 895 strings)

Added translation using Weblate (Portuguese (Brazil))

Co-authored-by: Hellyson Rodrigo Parteka <hellysonrp@gmail.com>
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/common/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/config-cameras/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-live/pt_BR/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/pt_BR/
Translation: Frigate NVR/Config - Cameras
Translation: Frigate NVR/common
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-live
Translation: Frigate NVR/views-settings
2026-03-04 09:26:48 +01:00
Hosted Weblate
67d324d3b0
Added translation using Weblate (Thai)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:47 +01:00
Hosted Weblate
9868d49763
Added translation using Weblate (Lithuanian)
Translated using Weblate (Lithuanian)

Currently translated at 62.1% (556 of 895 strings)

Translated using Weblate (Lithuanian)

Currently translated at 96.5% (56 of 58 strings)

Translated using Weblate (Lithuanian)

Currently translated at 100.0% (23 of 23 strings)

Translated using Weblate (Lithuanian)

Currently translated at 100.0% (54 of 54 strings)

Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Karolis Jeicenas <jeicenas@gmail.com>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/components-dialog/lt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-exports/lt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-facelibrary/lt/
Translate-URL: https://hosted.weblate.org/projects/frigate-nvr/views-settings/lt/
Translation: Frigate NVR/components-dialog
Translation: Frigate NVR/views-exports
Translation: Frigate NVR/views-facelibrary
Translation: Frigate NVR/views-settings
2026-03-04 09:26:47 +01:00
Hosted Weblate
8b670bf577
Added translation using Weblate (Latvian)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:47 +01:00
Hosted Weblate
1eaaa9b64e
Added translation using Weblate (Turkish)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:26:47 +01:00
Hosted Weblate
2675fec16a
Added translation using Weblate (Galician)
Co-authored-by: Hosted Weblate <hosted@weblate.org>
Co-authored-by: Languages add-on <noreply-addon-languages@weblate.org>
2026-03-04 09:24:50 +01:00
73 changed files with 524 additions and 4919 deletions

View File

@ -324,12 +324,6 @@ try:
value = await sensor.read()
except Exception: # ❌ Too broad
logger.error("Failed")
# Returning exceptions in JSON responses
except ValueError as e:
return JSONResponse(
content={"success": False, "message": str(e)},
)
```
### ✅ Use These Instead
@ -359,16 +353,6 @@ try:
value = await sensor.read()
except SensorException as err: # ✅ Specific
logger.exception("Failed to read sensor")
# Safe error responses
except ValueError:
logger.exception("Invalid parameters for API request")
return JSONResponse(
content={
"success": False,
"message": "Invalid request parameters",
},
)
```
## Project-Specific Conventions

View File

@ -75,4 +75,4 @@ Many providers also have a public facing chat interface for their models. Downlo
- OpenAI - [ChatGPT](https://chatgpt.com)
- Gemini - [Google AI Studio](https://aistudio.google.com)
- Ollama - [Open WebUI](https://docs.openwebui.com/)
- Ollama - [Open WebUI](https://docs.openwebui.com/)

View File

@ -49,13 +49,12 @@ from frigate.stats.prometheus import get_metrics, update_metrics
from frigate.types import JobStatusTypesEnum
from frigate.util.builtin import (
clean_camera_user_pass,
deep_merge,
flatten_config_data,
load_labels,
process_config_query_string,
update_yaml_file_bulk,
)
from frigate.util.config import apply_section_update, find_config_file
from frigate.util.config import find_config_file
from frigate.util.schema import get_config_schema
from frigate.util.services import (
get_nvidia_driver_info,
@ -423,100 +422,9 @@ def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
)
def _config_set_in_memory(request: Request, body: AppConfigSetBody) -> JSONResponse:
"""Apply config changes in-memory only, without writing to YAML.
Used for temporary config changes like debug replay camera tuning.
Updates the in-memory Pydantic config and publishes ZMQ updates,
bypassing YAML parsing entirely.
"""
try:
updates = {}
if body.config_data:
updates = flatten_config_data(body.config_data)
updates = {k: ("" if v is None else v) for k, v in updates.items()}
if not updates:
return JSONResponse(
content={"success": False, "message": "No configuration data provided"},
status_code=400,
)
config: FrigateConfig = request.app.frigate_config
# Group flat key paths into nested per-camera, per-section dicts
grouped: dict[str, dict[str, dict]] = {}
for key_path, value in updates.items():
parts = key_path.split(".")
if len(parts) < 3 or parts[0] != "cameras":
continue
cam, section = parts[1], parts[2]
grouped.setdefault(cam, {}).setdefault(section, {})
# Build nested dict from remaining path (e.g. "filters.person.threshold")
target = grouped[cam][section]
for part in parts[3:-1]:
target = target.setdefault(part, {})
if len(parts) > 3:
target[parts[-1]] = value
elif isinstance(value, dict):
grouped[cam][section] = deep_merge(
grouped[cam][section], value, override=True
)
else:
grouped[cam][section] = value
# Apply each section update
for cam_name, sections in grouped.items():
camera_config = config.cameras.get(cam_name)
if not camera_config:
return JSONResponse(
content={
"success": False,
"message": f"Camera '{cam_name}' not found",
},
status_code=400,
)
for section_name, update in sections.items():
err = apply_section_update(camera_config, section_name, update)
if err is not None:
return JSONResponse(
content={"success": False, "message": err},
status_code=400,
)
# Publish ZMQ updates so processing threads pick up changes
if body.update_topic and body.update_topic.startswith("config/cameras/"):
_, _, camera, field = body.update_topic.split("/")
settings = getattr(config.cameras.get(camera, None), field, None)
if settings is not None:
request.app.config_publisher.publish_update(
CameraConfigUpdateTopic(CameraConfigUpdateEnum[field], camera),
settings,
)
return JSONResponse(
content={"success": True, "message": "Config applied in-memory"},
status_code=200,
)
except Exception as e:
logger.error(f"Error applying config in-memory: {e}")
return JSONResponse(
content={"success": False, "message": "Error applying config"},
status_code=500,
)
@router.put("/config/set", dependencies=[Depends(require_role(["admin"]))])
def config_set(request: Request, body: AppConfigSetBody):
config_file = find_config_file()
if body.skip_save:
return _config_set_in_memory(request, body)
lock = FileLock(f"{config_file}.lock", timeout=5)
try:

View File

@ -1,176 +0,0 @@
"""Debug replay API endpoints."""
import asyncio
import logging
from datetime import datetime
from fastapi import APIRouter, Depends, Request
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
from frigate.api.auth import require_role
from frigate.api.defs.tags import Tags
logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.app])
class DebugReplayStartBody(BaseModel):
"""Request body for starting a debug replay session."""
camera: str = Field(title="Source camera name")
start_time: float = Field(title="Start timestamp")
end_time: float = Field(title="End timestamp")
class DebugReplayStartResponse(BaseModel):
"""Response for starting a debug replay session."""
success: bool
replay_camera: str
class DebugReplayStatusResponse(BaseModel):
"""Response for debug replay status."""
active: bool
replay_camera: str | None = None
source_camera: str | None = None
start_time: float | None = None
end_time: float | None = None
live_ready: bool = False
class DebugReplayStopResponse(BaseModel):
"""Response for stopping a debug replay session."""
success: bool
@router.post(
"/debug_replay/start",
response_model=DebugReplayStartResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Start debug replay",
description="Start a debug replay session from camera recordings.",
)
async def start_debug_replay(request: Request, body: DebugReplayStartBody):
"""Start a debug replay session."""
replay_manager = request.app.replay_manager
if replay_manager.active:
return JSONResponse(
content={
"success": False,
"message": "A replay session is already active",
},
status_code=409,
)
try:
replay_camera = await asyncio.to_thread(
replay_manager.start,
source_camera=body.camera,
start_ts=body.start_time,
end_ts=body.end_time,
frigate_config=request.app.frigate_config,
config_publisher=request.app.config_publisher,
)
except ValueError:
logger.exception("Invalid parameters for debug replay start request")
return JSONResponse(
content={
"success": False,
"message": "Invalid debug replay request parameters",
},
status_code=400,
)
except RuntimeError:
logger.exception("Error while starting debug replay session")
return JSONResponse(
content={
"success": False,
"message": "An internal error occurred while starting debug replay",
},
status_code=500,
)
return DebugReplayStartResponse(
success=True,
replay_camera=replay_camera,
)
@router.get(
"/debug_replay/status",
response_model=DebugReplayStatusResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Get debug replay status",
description="Get the status of the current debug replay session.",
)
def get_debug_replay_status(request: Request):
"""Get the current replay session status."""
replay_manager = request.app.replay_manager
live_ready = False
replay_camera = replay_manager.replay_camera_name
if replay_manager.active and replay_camera:
frame_processor = request.app.detected_frames_processor
frame = frame_processor.get_current_frame(replay_camera)
if frame is not None:
frame_time = frame_processor.get_current_frame_time(replay_camera)
camera_config = request.app.frigate_config.cameras.get(replay_camera)
retry_interval = 10
if camera_config is not None:
retry_interval = float(camera_config.ffmpeg.retry_interval or 10)
live_ready = datetime.now().timestamp() <= frame_time + retry_interval
return DebugReplayStatusResponse(
active=replay_manager.active,
replay_camera=replay_camera,
source_camera=replay_manager.source_camera,
start_time=replay_manager.start_ts,
end_time=replay_manager.end_ts,
live_ready=live_ready,
)
@router.post(
"/debug_replay/stop",
response_model=DebugReplayStopResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Stop debug replay",
description="Stop the active debug replay session and clean up all artifacts.",
)
async def stop_debug_replay(request: Request):
"""Stop the active replay session."""
replay_manager = request.app.replay_manager
if not replay_manager.active:
return JSONResponse(
content={"success": False, "message": "No active replay session"},
status_code=400,
)
try:
await asyncio.to_thread(
replay_manager.stop,
frigate_config=request.app.frigate_config,
config_publisher=request.app.config_publisher,
)
except (ValueError, RuntimeError, OSError) as e:
logger.error("Error stopping replay: %s", e)
return JSONResponse(
content={
"success": False,
"message": "Failed to stop replay session due to an internal error.",
},
status_code=500,
)
return DebugReplayStopResponse(success=True)

View File

@ -7,7 +7,6 @@ class AppConfigSetBody(BaseModel):
requires_restart: int = 1
update_topic: str | None = None
config_data: Optional[Dict[str, Any]] = None
skip_save: bool = False
class AppPutPasswordBody(BaseModel):

View File

@ -18,7 +18,6 @@ from frigate.api import (
camera,
chat,
classification,
debug_replay,
event,
export,
media,
@ -33,7 +32,6 @@ from frigate.comms.event_metadata_updater import (
)
from frigate.config import FrigateConfig
from frigate.config.camera.updater import CameraConfigUpdatePublisher
from frigate.debug_replay import DebugReplayManager
from frigate.embeddings import EmbeddingsContext
from frigate.genai import GenAIClientManager
from frigate.ptz.onvif import OnvifController
@ -67,7 +65,6 @@ def create_fastapi_app(
stats_emitter: StatsEmitter,
event_metadata_updater: EventMetadataPublisher,
config_publisher: CameraConfigUpdatePublisher,
replay_manager: DebugReplayManager,
enforce_default_admin: bool = True,
):
logger.info("Starting FastAPI app")
@ -136,7 +133,6 @@ def create_fastapi_app(
app.include_router(event.router)
app.include_router(media.router)
app.include_router(record.router)
app.include_router(debug_replay.router)
# App Properties
app.frigate_config = frigate_config
app.genai_manager = GenAIClientManager(frigate_config)
@ -148,7 +144,6 @@ def create_fastapi_app(
app.stats_emitter = stats_emitter
app.event_metadata_updater = event_metadata_updater
app.config_publisher = config_publisher
app.replay_manager = replay_manager
if frigate_config.auth.enabled:
secret = get_jwt_secret()

View File

@ -43,10 +43,6 @@ from frigate.const import (
)
from frigate.data_processing.types import DataProcessorMetrics
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
from frigate.debug_replay import (
DebugReplayManager,
cleanup_replay_cameras,
)
from frigate.embeddings import EmbeddingProcess, EmbeddingsContext
from frigate.events.audio import AudioProcessor
from frigate.events.cleanup import EventCleanup
@ -143,9 +139,6 @@ class FrigateApp:
else:
logger.debug(f"Skipping directory: {d}")
def init_debug_replay_manager(self) -> None:
self.replay_manager = DebugReplayManager()
def init_camera_metrics(self) -> None:
# create camera_metrics
for camera_name in self.config.cameras.keys():
@ -538,7 +531,6 @@ class FrigateApp:
set_file_limit()
# Start frigate services.
self.init_debug_replay_manager()
self.init_camera_metrics()
self.init_queues()
self.init_database()
@ -549,10 +541,6 @@ class FrigateApp:
self.init_embeddings_manager()
self.bind_database()
self.check_db_data_migrations()
# Clean up any stale replay camera artifacts (filesystem + DB)
cleanup_replay_cameras()
self.init_inter_process_communicator()
self.start_detectors()
self.init_dispatcher()
@ -584,7 +572,6 @@ class FrigateApp:
self.stats_emitter,
self.event_metadata_updater,
self.inter_config_updater,
self.replay_manager,
),
host="127.0.0.1",
port=5001,
@ -650,7 +637,6 @@ class FrigateApp:
self.record_cleanup.join()
self.stats_emitter.join()
self.frigate_watchdog.join()
self.camera_maintainer.join()
self.db.stop()
# Save embeddings stats to disk

View File

@ -57,9 +57,6 @@ class CameraActivityManager:
all_objects: list[dict[str, Any]] = []
for camera in new_activity.keys():
if camera not in self.config.cameras:
continue
# handle cameras that were added dynamically
if camera not in self.camera_all_object_counts:
self.__init_camera(self.config.cameras[camera])
@ -127,11 +124,7 @@ class CameraActivityManager:
any_changed = False
# run through each object and check what topics need to be updated
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
for label in camera_config.objects.track:
for label in self.config.cameras[camera].objects.track:
if label in self.config.model.non_logo_attributes:
continue
@ -181,9 +174,6 @@ class AudioActivityManager:
now = datetime.datetime.now().timestamp()
for camera in new_activity.keys():
if camera not in self.config.cameras:
continue
# handle cameras that were added dynamically
if camera not in self.current_audio_detections:
self.__init_camera(self.config.cameras[camera])
@ -203,11 +193,7 @@ class AudioActivityManager:
def compare_audio_activity(
self, camera: str, new_detections: list[tuple[str, float]], now: float
) -> None:
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False
max_not_heard = camera_config.audio.max_not_heard
max_not_heard = self.config.cameras[camera].audio.max_not_heard
current = self.current_audio_detections[camera]
any_changed = False

View File

@ -55,20 +55,8 @@ class CameraMaintainer(threading.Thread):
self.shm_count = self.__calculate_shm_frame_count()
self.camera_processes: dict[str, mp.Process] = {}
self.capture_processes: dict[str, mp.Process] = {}
self.camera_stop_events: dict[str, MpEvent] = {}
self.metrics_manager = metrics_manager
def __ensure_camera_stop_event(self, camera: str) -> MpEvent:
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is None:
camera_stop_event = mp.Event()
self.camera_stop_events[camera] = camera_stop_event
else:
camera_stop_event.clear()
return camera_stop_event
def __init_historical_regions(self) -> None:
# delete region grids for removed or renamed cameras
cameras = list(self.config.cameras.keys())
@ -111,8 +99,6 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Camera processor not started for disabled camera {name}")
return
camera_stop_event = self.__ensure_camera_stop_event(name)
if runtime:
self.camera_metrics[name] = CameraMetrics(self.metrics_manager)
self.ptz_metrics[name] = PTZMetrics(autotracker_enabled=False)
@ -149,7 +135,7 @@ class CameraMaintainer(threading.Thread):
self.camera_metrics[name],
self.ptz_metrics[name],
self.region_grids[name],
camera_stop_event,
self.stop_event,
self.config.logger,
)
self.camera_processes[config.name] = camera_process
@ -164,8 +150,6 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Capture process not started for disabled camera {name}")
return
camera_stop_event = self.__ensure_camera_stop_event(name)
# pre-create shms
count = 10 if runtime else self.shm_count
for i in range(count):
@ -176,7 +160,7 @@ class CameraMaintainer(threading.Thread):
config,
count,
self.camera_metrics[name],
camera_stop_event,
self.stop_event,
self.config.logger,
)
capture_process.daemon = True
@ -186,36 +170,18 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Capture process started for {name}: {capture_process.pid}")
def __stop_camera_capture_process(self, camera: str) -> None:
capture_process = self.capture_processes.get(camera)
capture_process = self.capture_processes[camera]
if capture_process is not None:
logger.info(f"Waiting for capture process for {camera} to stop")
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is not None:
camera_stop_event.set()
capture_process.join(timeout=10)
if capture_process.is_alive():
logger.warning(
f"Capture process for {camera} didn't exit, forcing termination"
)
capture_process.terminate()
capture_process.join()
capture_process.terminate()
capture_process.join()
def __stop_camera_process(self, camera: str) -> None:
camera_process = self.camera_processes.get(camera)
camera_process = self.camera_processes[camera]
if camera_process is not None:
logger.info(f"Waiting for process for {camera} to stop")
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is not None:
camera_stop_event.set()
camera_process.join(timeout=10)
if camera_process.is_alive():
logger.warning(f"Process for {camera} didn't exit, forcing termination")
camera_process.terminate()
camera_process.join()
camera_process.terminate()
camera_process.join()
logger.info(f"Closing frame queue for {camera}")
empty_and_close_queue(self.camera_metrics[camera].frame_queue)
@ -233,12 +199,6 @@ class CameraMaintainer(threading.Thread):
for update_type, updated_cameras in updates.items():
if update_type == CameraConfigUpdateEnum.add.name:
for camera in updated_cameras:
if (
camera in self.camera_processes
or camera in self.capture_processes
):
continue
self.__start_camera_processor(
camera,
self.update_subscriber.camera_configs[camera],
@ -250,22 +210,15 @@ class CameraMaintainer(threading.Thread):
runtime=True,
)
elif update_type == CameraConfigUpdateEnum.remove.name:
for camera in updated_cameras:
self.__stop_camera_capture_process(camera)
self.__stop_camera_process(camera)
self.capture_processes.pop(camera, None)
self.camera_processes.pop(camera, None)
self.camera_stop_events.pop(camera, None)
self.region_grids.pop(camera, None)
self.camera_metrics.pop(camera, None)
self.ptz_metrics.pop(camera, None)
self.__stop_camera_capture_process(camera)
self.__stop_camera_process(camera)
# ensure the capture processes are done
for camera in self.capture_processes.keys():
for camera in self.camera_processes.keys():
self.__stop_camera_capture_process(camera)
# ensure the camera processors are done
for camera in self.camera_processes.keys():
for camera in self.capture_processes.keys():
self.__stop_camera_process(camera)
self.update_subscriber.stop()

View File

@ -26,8 +26,8 @@ class ConfigPublisher:
def stop(self) -> None:
self.stop_event.set()
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()
class ConfigSubscriber:
@ -55,5 +55,5 @@ class ConfigSubscriber:
return (None, None)
def stop(self) -> None:
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()

View File

@ -110,9 +110,6 @@ class Dispatcher:
payload: str,
sub_command: str | None = None,
) -> None:
if camera_name not in self.config.cameras:
return
try:
if command_type == "set":
if sub_command:
@ -134,9 +131,6 @@ class Dispatcher:
def handle_request_region_grid() -> Any:
camera = payload
if camera not in self.config.cameras:
return None
grid = get_camera_regions_grid(
camera,
self.config.cameras[camera].detect,
@ -249,11 +243,7 @@ class Dispatcher:
self.publish("birdseye_layout", json.dumps(self.birdseye_layout.copy()))
def handle_on_connect() -> None:
camera_status = {
camera: status
for camera, status in self.camera_activity.last_camera_activity.copy().items()
if camera in self.config.cameras
}
camera_status = self.camera_activity.last_camera_activity.copy()
audio_detections = self.audio_activity.current_audio_detections.copy()
cameras_with_status = camera_status.keys()
@ -356,8 +346,7 @@ class Dispatcher:
# example /cam_name/notifications/suspend payload=duration
camera_name = parts[-3]
command = parts[-2]
if camera_name in self.config.cameras:
self._on_camera_notification_suspend(camera_name, payload)
self._on_camera_notification_suspend(camera_name, payload)
except IndexError:
logger.error(
f"Received invalid {topic.split('/')[-1]} command: {topic}"

View File

@ -61,8 +61,8 @@ class InterProcessCommunicator(Communicator):
def stop(self) -> None:
self.stop_event.set()
self.reader_thread.join()
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()
class InterProcessRequestor:
@ -82,5 +82,5 @@ class InterProcessRequestor:
return ""
def stop(self) -> None:
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()

View File

@ -43,7 +43,7 @@ class ZmqProxy:
def stop(self) -> None:
# destroying the context will tell the proxy to stop
self.context.destroy(linger=0)
self.context.destroy()
self.runner.join()
@ -66,8 +66,8 @@ class Publisher(Generic[T]):
self.socket.send_string(f"{self.topic}{sub_topic} {json.dumps(payload)}")
def stop(self) -> None:
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()
class Subscriber(Generic[T]):
@ -96,8 +96,8 @@ class Subscriber(Generic[T]):
return self._return_object("", None)
def stop(self) -> None:
self.socket.close(linger=0)
self.context.destroy(linger=0)
self.socket.close()
self.context.destroy()
def _return_object(self, topic: str, payload: T | None) -> T | None:
return payload

View File

@ -80,8 +80,8 @@ class CameraConfigUpdateSubscriber:
self.camera_configs[camera] = updated_config
return
elif update_type == CameraConfigUpdateEnum.remove:
self.config.cameras.pop(camera, None)
self.camera_configs.pop(camera, None)
self.config.cameras.pop(camera)
self.camera_configs.pop(camera)
return
config = self.camera_configs.get(camera)

View File

@ -14,8 +14,6 @@ RECORD_DIR = f"{BASE_DIR}/recordings"
TRIGGER_DIR = f"{CLIPS_DIR}/triggers"
BIRDSEYE_PIPE = "/tmp/cache/birdseye"
CACHE_DIR = "/tmp/cache"
REPLAY_CAMERA_PREFIX = "_replay_"
REPLAY_DIR = os.path.join(CACHE_DIR, "replay")
PLUS_ENV_VAR = "PLUS_API_KEY"
PLUS_API_HOST = "https://api.frigate.video"

View File

@ -1,443 +0,0 @@
"""Debug replay camera management for replaying recordings with detection overlays."""
import logging
import os
import shutil
import subprocess as sp
import threading
from ruamel.yaml import YAML
from frigate.config import FrigateConfig
from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdatePublisher,
CameraConfigUpdateTopic,
)
from frigate.const import (
CLIPS_DIR,
RECORD_DIR,
REPLAY_CAMERA_PREFIX,
REPLAY_DIR,
THUMB_DIR,
)
from frigate.models import Event, Recordings, ReviewSegment, Timeline
from frigate.util.config import find_config_file
logger = logging.getLogger(__name__)
class DebugReplayManager:
"""Manages a single debug replay session."""
def __init__(self) -> None:
self._lock = threading.Lock()
self.replay_camera_name: str | None = None
self.source_camera: str | None = None
self.clip_path: str | None = None
self.start_ts: float | None = None
self.end_ts: float | None = None
@property
def active(self) -> bool:
"""Whether a replay session is currently active."""
return self.replay_camera_name is not None
def start(
self,
source_camera: str,
start_ts: float,
end_ts: float,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> str:
"""Start a debug replay session.
Args:
source_camera: Name of the source camera to replay
start_ts: Start timestamp
end_ts: End timestamp
frigate_config: Current Frigate configuration
config_publisher: Publisher for camera config updates
Returns:
The replay camera name
Raises:
ValueError: If a session is already active or parameters are invalid
RuntimeError: If clip generation fails
"""
with self._lock:
return self._start_locked(
source_camera, start_ts, end_ts, frigate_config, config_publisher
)
def _start_locked(
self,
source_camera: str,
start_ts: float,
end_ts: float,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> str:
if self.active:
raise ValueError("A replay session is already active")
if source_camera not in frigate_config.cameras:
raise ValueError(f"Camera '{source_camera}' not found")
if end_ts <= start_ts:
raise ValueError("End time must be after start time")
# Query recordings for the source camera in the time range
recordings = (
Recordings.select(
Recordings.path,
Recordings.start_time,
Recordings.end_time,
)
.where(
Recordings.start_time.between(start_ts, end_ts)
| Recordings.end_time.between(start_ts, end_ts)
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
)
.where(Recordings.camera == source_camera)
.order_by(Recordings.start_time.asc())
)
if not recordings.count():
raise ValueError(
f"No recordings found for camera '{source_camera}' in the specified time range"
)
# Create replay directory
os.makedirs(REPLAY_DIR, exist_ok=True)
# Generate replay camera name
replay_name = f"{REPLAY_CAMERA_PREFIX}{source_camera}"
# Build concat file for ffmpeg
concat_file = os.path.join(REPLAY_DIR, f"{replay_name}_concat.txt")
clip_path = os.path.join(REPLAY_DIR, f"{replay_name}.mp4")
with open(concat_file, "w") as f:
for recording in recordings:
f.write(f"file '{recording.path}'\n")
# Concatenate recordings into a single clip with -c copy (fast)
ffmpeg_cmd = [
frigate_config.ffmpeg.ffmpeg_path,
"-hide_banner",
"-y",
"-f",
"concat",
"-safe",
"0",
"-i",
concat_file,
"-c",
"copy",
"-movflags",
"+faststart",
clip_path,
]
logger.info(
"Generating replay clip for %s (%.1f - %.1f)",
source_camera,
start_ts,
end_ts,
)
try:
result = sp.run(
ffmpeg_cmd,
capture_output=True,
text=True,
timeout=120,
)
if result.returncode != 0:
logger.error("FFmpeg error: %s", result.stderr)
raise RuntimeError(
f"Failed to generate replay clip: {result.stderr[-500:]}"
)
except sp.TimeoutExpired:
raise RuntimeError("Clip generation timed out")
finally:
# Clean up concat file
if os.path.exists(concat_file):
os.remove(concat_file)
if not os.path.exists(clip_path):
raise RuntimeError("Clip file was not created")
# Build camera config dict for the replay camera
source_config = frigate_config.cameras[source_camera]
camera_dict = self._build_camera_config_dict(
source_config, replay_name, clip_path
)
# Build an in-memory config with the replay camera added
config_file = find_config_file()
yaml_parser = YAML()
with open(config_file, "r") as f:
config_data = yaml_parser.load(f)
if "cameras" not in config_data or config_data["cameras"] is None:
config_data["cameras"] = {}
config_data["cameras"][replay_name] = camera_dict
try:
new_config = FrigateConfig.parse_object(config_data)
except Exception as e:
raise RuntimeError(f"Failed to validate replay camera config: {e}")
# Update the running config
frigate_config.cameras[replay_name] = new_config.cameras[replay_name]
# Publish the add event
config_publisher.publish_update(
CameraConfigUpdateTopic(CameraConfigUpdateEnum.add, replay_name),
new_config.cameras[replay_name],
)
# Store session state
self.replay_camera_name = replay_name
self.source_camera = source_camera
self.clip_path = clip_path
self.start_ts = start_ts
self.end_ts = end_ts
logger.info("Debug replay started: %s -> %s", source_camera, replay_name)
return replay_name
def stop(
self,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> None:
"""Stop the active replay session and clean up all artifacts.
Args:
frigate_config: Current Frigate configuration
config_publisher: Publisher for camera config updates
"""
with self._lock:
self._stop_locked(frigate_config, config_publisher)
def _stop_locked(
self,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> None:
if not self.active:
logger.warning("No active replay session to stop")
return
replay_name = self.replay_camera_name
# Publish remove event so subscribers stop and remove from their config
if replay_name in frigate_config.cameras:
config_publisher.publish_update(
CameraConfigUpdateTopic(CameraConfigUpdateEnum.remove, replay_name),
frigate_config.cameras[replay_name],
)
# Do NOT pop here — let subscribers handle removal from the shared
# config dict when they process the ZMQ message to avoid race conditions
# Defensive DB cleanup
self._cleanup_db(replay_name)
# Remove filesystem artifacts
self._cleanup_files(replay_name)
# Reset state
self.replay_camera_name = None
self.source_camera = None
self.clip_path = None
self.start_ts = None
self.end_ts = None
logger.info("Debug replay stopped and cleaned up: %s", replay_name)
def _build_camera_config_dict(
self,
source_config,
replay_name: str,
clip_path: str,
) -> dict:
"""Build a camera config dictionary for the replay camera.
Args:
source_config: Source camera's CameraConfig
replay_name: Name for the replay camera
clip_path: Path to the replay clip file
Returns:
Camera config as a dictionary
"""
# Extract detect config (exclude computed fields)
detect_dict = source_config.detect.model_dump(
exclude={"min_initialized", "max_disappeared", "enabled_in_config"}
)
# Extract objects config, using .dict() on filters to convert
# RuntimeFilterConfig ndarray masks back to string coordinates
objects_dict = {
"track": source_config.objects.track,
"mask": {
mask_id: (
mask_cfg.model_dump(
exclude={"raw_coordinates", "enabled_in_config"}
)
if mask_cfg is not None
else None
)
for mask_id, mask_cfg in source_config.objects.mask.items()
}
if source_config.objects.mask
else {},
"filters": {
name: filt.dict() if hasattr(filt, "dict") else filt.model_dump()
for name, filt in source_config.objects.filters.items()
},
}
# Extract zones (exclude_defaults avoids serializing empty defaults
# like distances=[] that fail validation on re-parse)
zones_dict = {}
for zone_name, zone_config in source_config.zones.items():
zone_dump = zone_config.model_dump(
exclude={"contour", "color"}, exclude_defaults=True
)
# Always include required fields
zone_dump.setdefault("coordinates", zone_config.coordinates)
zones_dict[zone_name] = zone_dump
# Extract motion config (exclude runtime fields)
motion_dict = {}
if source_config.motion is not None:
motion_dict = source_config.motion.model_dump(
exclude={
"frame_shape",
"raw_mask",
"mask",
"improved_contrast_enabled",
"rasterized_mask",
}
)
return {
"enabled": True,
"ffmpeg": {
"inputs": [
{
"path": clip_path,
"roles": ["detect"],
"input_args": "-re -stream_loop -1 -fflags +genpts",
}
],
"hwaccel_args": [],
},
"detect": detect_dict,
"objects": objects_dict,
"zones": zones_dict,
"motion": motion_dict,
"record": {"enabled": False},
"snapshots": {"enabled": False},
"review": {
"alerts": {"enabled": False},
"detections": {"enabled": False},
},
"birdseye": {"enabled": False},
"audio": {"enabled": False},
"lpr": {"enabled": False},
"face_recognition": {"enabled": False},
}
def _cleanup_db(self, camera_name: str) -> None:
"""Defensively remove any database rows for the replay camera."""
try:
Event.delete().where(Event.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay events: %s", e)
try:
Timeline.delete().where(Timeline.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay timeline: %s", e)
try:
Recordings.delete().where(Recordings.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay recordings: %s", e)
try:
ReviewSegment.delete().where(ReviewSegment.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay review segments: %s", e)
def _cleanup_files(self, camera_name: str) -> None:
"""Remove filesystem artifacts for the replay camera."""
dirs_to_clean = [
os.path.join(RECORD_DIR, camera_name),
os.path.join(CLIPS_DIR, camera_name),
os.path.join(THUMB_DIR, camera_name),
]
for dir_path in dirs_to_clean:
if os.path.exists(dir_path):
try:
shutil.rmtree(dir_path)
logger.debug("Removed replay directory: %s", dir_path)
except Exception as e:
logger.error("Failed to remove %s: %s", dir_path, e)
# Remove replay clip and any related files
if os.path.exists(REPLAY_DIR):
try:
shutil.rmtree(REPLAY_DIR)
logger.debug("Removed replay cache directory")
except Exception as e:
logger.error("Failed to remove replay cache: %s", e)
def cleanup_replay_cameras() -> None:
"""Remove any stale replay camera artifacts on startup.
Since replay cameras are memory-only and never written to YAML, they
won't appear in the config after a restart. This function cleans up
filesystem and database artifacts from any replay that was running when
the process stopped.
Must be called AFTER the database is bound.
"""
stale_cameras: set[str] = set()
# Scan filesystem for leftover replay artifacts to derive camera names
for dir_path in [RECORD_DIR, CLIPS_DIR, THUMB_DIR]:
if os.path.isdir(dir_path):
for entry in os.listdir(dir_path):
if entry.startswith(REPLAY_CAMERA_PREFIX):
stale_cameras.add(entry)
if os.path.isdir(REPLAY_DIR):
for entry in os.listdir(REPLAY_DIR):
if entry.startswith(REPLAY_CAMERA_PREFIX) and entry.endswith(".mp4"):
stale_cameras.add(entry.removesuffix(".mp4"))
if not stale_cameras:
return
logger.info("Cleaning up stale replay camera artifacts: %s", list(stale_cameras))
manager = DebugReplayManager()
for camera_name in stale_cameras:
manager._cleanup_db(camera_name)
manager._cleanup_files(camera_name)
if os.path.exists(REPLAY_DIR):
try:
shutil.rmtree(REPLAY_DIR)
except Exception as e:
logger.error("Failed to remove replay cache directory: %s", e)

View File

@ -421,9 +421,7 @@ class EmbeddingMaintainer(threading.Thread):
if self.config.semantic_search.enabled:
self.embeddings.update_stats()
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
camera_config = self.config.cameras[camera]
# no need to process updated objects if no processors are active
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
@ -641,10 +639,7 @@ class EmbeddingMaintainer(threading.Thread):
if not camera or camera not in self.config.cameras:
return
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
camera_config = self.config.cameras[camera]
dedicated_lpr_enabled = (
camera_config.type == CameraTypeEnum.lpr
and "license_plate" not in camera_config.objects.track

View File

@ -7,7 +7,6 @@ from typing import Dict
from frigate.comms.events_updater import EventEndPublisher, EventUpdateSubscriber
from frigate.config import FrigateConfig
from frigate.config.classification import ObjectClassificationType
from frigate.const import REPLAY_CAMERA_PREFIX
from frigate.events.types import EventStateEnum, EventTypeEnum
from frigate.models import Event
from frigate.util.builtin import to_relative_box
@ -147,9 +146,7 @@ class EventProcessor(threading.Thread):
if should_update_db(self.events_in_process[event_data["id"]], event_data):
updated_db = True
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
camera_config = self.config.cameras[camera]
width = camera_config.detect.width
height = camera_config.detect.height
first_detector = list(self.config.detectors.values())[0]
@ -286,10 +283,6 @@ class EventProcessor(threading.Thread):
def handle_external_detection(
self, event_type: EventStateEnum, event_data: Event
) -> None:
# Skip replay cameras
if event_data.get("camera", "").startswith(REPLAY_CAMERA_PREFIX):
return
if event_type == EventStateEnum.start:
event = {
Event.id: event_data["id"],

View File

@ -420,8 +420,7 @@ class BirdsEyeFrameManager:
[
cam
for cam, cam_data in self.cameras.items()
if cam in self.config.cameras
and self.config.cameras[cam].birdseye.enabled
if self.config.cameras[cam].birdseye.enabled
and self.config.cameras[cam].enabled_in_config
and self.config.cameras[cam].enabled
and cam_data["last_active_frame"] > 0
@ -724,11 +723,8 @@ class BirdsEyeFrameManager:
Update birdseye for a specific camera with new frame data.
Returns (frame_changed, layout_changed) to indicate if the frame or layout changed.
"""
# don't process if camera was removed or birdseye is disabled
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False, False
# don't process if birdseye is disabled for this camera
camera_config = self.config.cameras[camera]
force_update = False
# disabling birdseye is a little tricky

View File

@ -22,12 +22,7 @@ from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdateSubscriber,
)
from frigate.const import (
CACHE_DIR,
CLIPS_DIR,
PROCESS_PRIORITY_MED,
REPLAY_CAMERA_PREFIX,
)
from frigate.const import CACHE_DIR, CLIPS_DIR, PROCESS_PRIORITY_MED
from frigate.output.birdseye import Birdseye
from frigate.output.camera import JsmpegCamera
from frigate.output.preview import PreviewRecorder
@ -84,32 +79,6 @@ class OutputProcess(FrigateProcess):
)
self.config = config
def is_debug_replay_camera(self, camera: str) -> bool:
return camera.startswith(REPLAY_CAMERA_PREFIX)
def add_camera(
self,
camera: str,
websocket_server: WSGIServer,
jsmpeg_cameras: dict[str, JsmpegCamera],
preview_recorders: dict[str, PreviewRecorder],
preview_write_times: dict[str, float],
birdseye: Birdseye | None,
) -> None:
camera_config = self.config.cameras[camera]
jsmpeg_cameras[camera] = JsmpegCamera(
camera_config, self.stop_event, websocket_server
)
preview_recorders[camera] = PreviewRecorder(camera_config)
preview_write_times[camera] = 0
if (
birdseye is not None
and self.config.birdseye.enabled
and camera_config.birdseye.enabled
):
birdseye.add_camera(camera)
def run(self) -> None:
self.pre_run_setup(self.config.logger)
@ -149,17 +118,14 @@ class OutputProcess(FrigateProcess):
move_preview_frames("cache")
for camera, cam_config in self.config.cameras.items():
if not cam_config.enabled_in_config or self.is_debug_replay_camera(camera):
if not cam_config.enabled_in_config:
continue
self.add_camera(
camera,
websocket_server,
jsmpeg_cameras,
preview_recorders,
preview_write_times,
birdseye,
jsmpeg_cameras[camera] = JsmpegCamera(
cam_config, self.stop_event, websocket_server
)
preview_recorders[camera] = PreviewRecorder(cam_config)
preview_write_times[camera] = 0
if self.config.birdseye.enabled:
birdseye = Birdseye(self.config, self.stop_event, websocket_server)
@ -172,15 +138,19 @@ class OutputProcess(FrigateProcess):
if CameraConfigUpdateEnum.add in updates:
for camera in updates["add"]:
if not self.is_debug_replay_camera(camera):
self.add_camera(
camera,
websocket_server,
jsmpeg_cameras,
preview_recorders,
preview_write_times,
birdseye,
)
jsmpeg_cameras[camera] = JsmpegCamera(
self.config.cameras[camera], self.stop_event, websocket_server
)
preview_recorders[camera] = PreviewRecorder(
self.config.cameras[camera]
)
preview_write_times[camera] = 0
if (
self.config.birdseye.enabled
and self.config.cameras[camera].birdseye.enabled
):
birdseye.add_camera(camera)
(topic, data) = detection_subscriber.check_for_update(timeout=1)
now = datetime.datetime.now().timestamp()
@ -204,11 +174,7 @@ class OutputProcess(FrigateProcess):
_,
) = data
if (
camera not in self.config.cameras
or not self.config.cameras[camera].enabled
or self.is_debug_replay_camera(camera)
):
if not self.config.cameras[camera].enabled:
continue
frame = frame_manager.get(

View File

@ -287,12 +287,11 @@ class RecordingMaintainer(threading.Thread):
)
# publish most recently available recording time and None if disabled
camera_cfg = self.config.cameras.get(camera)
self.recordings_publisher.publish(
(
camera,
recordings[0]["start_time"].timestamp()
if camera_cfg and camera_cfg.record.enabled
if self.config.cameras[camera].record.enabled
else None,
None,
),
@ -316,8 +315,9 @@ class RecordingMaintainer(threading.Thread):
) -> Optional[Recordings]:
cache_path: str = recording["cache_path"]
start_time: datetime.datetime = recording["start_time"]
record_config = self.config.cameras[camera].record
# Just delete files if camera removed or recordings are turned off
# Just delete files if recordings are turned off
if (
camera not in self.config.cameras
or not self.config.cameras[camera].record.enabled

View File

@ -652,9 +652,6 @@ class ReviewSegmentMaintainer(threading.Thread):
if camera not in self.indefinite_events:
self.indefinite_events[camera] = {}
if camera not in self.config.cameras:
continue
if (
not self.config.cameras[camera].enabled
or not self.config.cameras[camera].record.enabled

View File

@ -340,9 +340,6 @@ def stats_snapshot(
stats["cameras"] = {}
for name, camera_stats in camera_metrics.items():
if name not in config.cameras:
continue
total_camera_fps += camera_stats.camera_fps.value
total_process_fps += camera_stats.process_fps.value
total_skipped_fps += camera_stats.skipped_fps.value

View File

@ -8,7 +8,7 @@ from pathlib import Path
from peewee import SQL, fn
from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, REPLAY_CAMERA_PREFIX
from frigate.const import RECORD_DIR
from frigate.models import Event, Recordings
from frigate.util.builtin import clear_and_unlink
@ -32,10 +32,6 @@ class StorageMaintainer(threading.Thread):
def calculate_camera_bandwidth(self) -> None:
"""Calculate an average MB/hr for each camera."""
for camera in self.config.cameras.keys():
# Skip replay cameras
if camera.startswith(REPLAY_CAMERA_PREFIX):
continue
# cameras with < 50 segments should be refreshed to keep size accurate
# when few segments are available
if self.camera_storage_stats.get(camera, {}).get("needs_refresh", True):
@ -81,10 +77,6 @@ class StorageMaintainer(threading.Thread):
usages: dict[str, dict] = {}
for camera in self.config.cameras.keys():
# Skip replay cameras
if camera.startswith(REPLAY_CAMERA_PREFIX):
continue
camera_storage = (
Recordings.select(fn.SUM(Recordings.segment_size))
.where(Recordings.camera == camera, Recordings.segment_size != 0)

View File

@ -13,7 +13,6 @@ from pydantic import Json
from frigate.api.fastapi_app import create_fastapi_app
from frigate.config import FrigateConfig
from frigate.const import BASE_DIR, CACHE_DIR
from frigate.debug_replay import DebugReplayManager
from frigate.models import Event, Recordings, ReviewSegment
from frigate.review.types import SeverityEnum
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
@ -142,7 +141,6 @@ class BaseTestHttp(unittest.TestCase):
stats,
event_metadata_publisher,
None,
DebugReplayManager(),
enforce_default_admin=False,
)

View File

@ -22,32 +22,3 @@ class TestHttpApp(BaseTestHttp):
response = client.get("/stats")
response_json = response.json()
assert response_json == self.test_stats
def test_config_set_in_memory_replaces_objects_track_list(self):
self.minimal_config["cameras"]["front_door"]["objects"] = {
"track": ["person", "car"],
}
app = super().create_app()
app.config_publisher = Mock()
with AuthTestClient(app) as client:
response = client.put(
"/config/set",
json={
"requires_restart": 0,
"skip_save": True,
"update_topic": "config/cameras/front_door/objects",
"config_data": {
"cameras": {
"front_door": {
"objects": {
"track": ["person"],
}
}
}
},
},
)
assert response.status_code == 200
assert app.frigate_config.cameras["front_door"].objects.track == ["person"]

View File

@ -151,22 +151,6 @@ class TestConfig(unittest.TestCase):
frigate_config = FrigateConfig(**config)
assert "dog" in frigate_config.cameras["back"].objects.track
def test_deep_merge_override_replaces_list_values(self):
base = {"objects": {"track": ["person", "face"]}}
update = {"objects": {"track": ["person"]}}
merged = deep_merge(base, update, override=True)
assert merged["objects"]["track"] == ["person"]
def test_deep_merge_merge_lists_still_appends(self):
base = {"track": ["person"]}
update = {"track": ["face"]}
merged = deep_merge(base, update, override=True, merge_lists=True)
assert merged["track"] == ["person", "face"]
def test_override_birdseye(self):
config = {
"mqtt": {"host": "mqtt"},

View File

@ -86,9 +86,7 @@ class TimelineProcessor(threading.Thread):
event_data: dict[Any, Any],
) -> bool:
"""Handle object detection."""
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False
camera_config = self.config.cameras[camera]
event_id = event_data["id"]
# Base timeline entry data that all entries will share

View File

@ -690,13 +690,9 @@ class TrackedObjectProcessor(threading.Thread):
self.create_camera_state(camera)
elif "remove" in updated_topics:
for camera in updated_topics["remove"]:
removed_camera_state = self.camera_states[camera]
removed_camera_state.shutdown()
camera_state = self.camera_states[camera]
camera_state.shutdown()
self.camera_states.pop(camera)
self.camera_activity.pop(camera, None)
self.last_motion_detected.pop(camera, None)
self.requestor.send_data(UPDATE_CAMERA_ACTIVITY, self.camera_activity)
# manage camera disabled state
for camera, config in self.config.cameras.items():
@ -704,10 +700,6 @@ class TrackedObjectProcessor(threading.Thread):
continue
current_enabled = config.enabled
camera_state = self.camera_states.get(camera)
if camera_state is None:
continue
camera_state = self.camera_states[camera]
if camera_state.prev_enabled and not current_enabled:
@ -760,11 +752,7 @@ class TrackedObjectProcessor(threading.Thread):
except queue.Empty:
continue
camera_config = self.config.cameras.get(camera)
if camera_config is None:
continue
if not camera_config.enabled:
if not self.config.cameras[camera].enabled:
logger.debug(f"Camera {camera} disabled, skipping update")
continue

View File

@ -16,7 +16,7 @@ from frigate.config import (
SnapshotsConfig,
UIConfig,
)
from frigate.const import CLIPS_DIR, REPLAY_CAMERA_PREFIX, THUMB_DIR
from frigate.const import CLIPS_DIR, THUMB_DIR
from frigate.detectors.detector_config import ModelConfig
from frigate.review.types import SeverityEnum
from frigate.util.builtin import sanitize_float
@ -621,9 +621,6 @@ class TrackedObject:
if not self.camera_config.name:
return
if self.camera_config.name.startswith(REPLAY_CAMERA_PREFIX):
return
directory = os.path.join(THUMB_DIR, self.camera_config.name)
if not os.path.exists(directory):

View File

@ -84,8 +84,7 @@ def deep_merge(dct1: dict, dct2: dict, override=False, merge_lists=False) -> dic
"""
:param dct1: First dict to merge
:param dct2: Second dict to merge
:param override: if same key exists in both dictionaries, should override? otherwise ignore.
:param merge_lists: if True, lists will be merged.
:param override: if same key exists in both dictionaries, should override? otherwise ignore. (default=True)
:return: The merge dictionary
"""
merged = copy.deepcopy(dct1)
@ -97,8 +96,6 @@ def deep_merge(dct1: dict, dct2: dict, override=False, merge_lists=False) -> dic
elif isinstance(v1, list) and isinstance(v2, list):
if merge_lists:
merged[k] = v1 + v2
elif override:
merged[k] = copy.deepcopy(v2)
else:
if override:
merged[k] = copy.deepcopy(v2)

View File

@ -9,7 +9,6 @@ from typing import Any, Optional, Union
from ruamel.yaml import YAML
from frigate.const import CONFIG_DIR, EXPORT_DIR
from frigate.util.builtin import deep_merge
from frigate.util.services import get_video_properties
logger = logging.getLogger(__name__)
@ -689,78 +688,3 @@ class StreamInfoRetriever:
info = asyncio.run(get_video_properties(ffmpeg, path))
self.stream_cache[path] = info
return info
def apply_section_update(camera_config, section: str, update: dict) -> Optional[str]:
"""Merge an update dict into a camera config section and rebuild runtime variants.
For motion and object filter sections, the plain Pydantic models are rebuilt
as RuntimeMotionConfig / RuntimeFilterConfig so that rasterized numpy masks
are recomputed. This mirrors the logic in FrigateConfig.post_validation.
Args:
camera_config: The CameraConfig instance to update.
section: Config section name (e.g. "motion", "objects").
update: Nested dict of field updates to merge.
Returns:
None on success, or an error message string on failure.
"""
from frigate.config.config import RuntimeFilterConfig, RuntimeMotionConfig
current = getattr(camera_config, section, None)
if current is None:
return f"Section '{section}' not found on camera '{camera_config.name}'"
try:
frame_shape = camera_config.frame_shape
if section == "motion":
merged = deep_merge(
current.model_dump(exclude_unset=True, exclude={"rasterized_mask"}),
update,
override=True,
)
camera_config.motion = RuntimeMotionConfig(
frame_shape=frame_shape, **merged
)
elif section == "objects":
merged = deep_merge(
current.model_dump(
exclude={"filters": {"__all__": {"rasterized_mask"}}}
),
update,
override=True,
)
new_objects = current.__class__.model_validate(merged)
# Preserve private _all_objects from original config
try:
new_objects._all_objects = current._all_objects
except AttributeError:
pass
# Rebuild RuntimeFilterConfig with merged global + per-object masks
for obj_name, filt in new_objects.filters.items():
merged_mask = dict(filt.mask)
if new_objects.mask:
for gid, gmask in new_objects.mask.items():
merged_mask[f"global_{gid}"] = gmask
new_objects.filters[obj_name] = RuntimeFilterConfig(
frame_shape=frame_shape,
mask=merged_mask,
**filt.model_dump(exclude_unset=True, exclude={"mask", "raw_mask"}),
)
camera_config.objects = new_objects
else:
merged = deep_merge(current.model_dump(), update, override=True)
setattr(camera_config, section, current.__class__.model_validate(merged))
except Exception:
logger.exception("Config validation error")
return "Validation error. Check logs for details."
return None

View File

@ -117,7 +117,6 @@
"button": {
"add": "Add",
"apply": "Apply",
"applying": "Applying…",
"reset": "Reset",
"undo": "Undo",
"done": "Done",
@ -253,7 +252,6 @@
"review": "Review",
"explore": "Explore",
"export": "Export",
"actions": "Actions",
"uiPlayground": "UI Playground",
"faceLibrary": "Face Library",
"classification": "Classification",

View File

@ -216,10 +216,6 @@
},
"hideObjectDetails": {
"label": "Hide object path"
},
"debugReplay": {
"label": "Debug replay",
"aria": "View this tracked object in the debug replay view"
}
},
"dialog": {

View File

@ -1,54 +0,0 @@
{
"title": "Debug Replay",
"description": "Replay camera recordings for debugging. The object list shows a time-delayed summary of detected objects and the Messages tab shows a stream of Frigate's internal messages from the replay footage.",
"websocket_messages": "Messages",
"dialog": {
"title": "Start Debug Replay",
"description": "Create a temporary replay camera that loops historical footage for debugging object detection and tracking issues. The replay camera will have the same detection configuration as the source camera. Choose a time range to begin.",
"camera": "Source Camera",
"timeRange": "Time Range",
"preset": {
"1m": "Last 1 Minute",
"5m": "Last 5 Minutes",
"timeline": "From Timeline",
"custom": "Custom"
},
"startButton": "Start Replay",
"selectFromTimeline": "Select",
"starting": "Starting replay...",
"startLabel": "Start",
"endLabel": "End",
"toast": {
"success": "Debug replay started successfully",
"error": "Failed to start debug replay: {{error}}",
"alreadyActive": "A replay session is already active",
"stopped": "Debug replay stopped",
"stopError": "Failed to stop debug replay: {{error}}",
"goToReplay": "Go to Replay"
}
},
"page": {
"noSession": "No Active Replay Session",
"noSessionDesc": "Start a debug replay from the History view by clicking the Debug Replay button in the toolbar.",
"goToRecordings": "Go to History",
"sourceCamera": "Source Camera",
"replayCamera": "Replay Camera",
"initializingReplay": "Initializing replay...",
"stoppingReplay": "Stopping replay...",
"stopReplay": "Stop Replay",
"confirmStop": {
"title": "Stop Debug Replay?",
"description": "This will stop the replay session and clean up all temporary data. Are you sure?",
"confirm": "Stop Replay",
"cancel": "Cancel"
},
"activity": "Activity",
"objects": "Object List",
"audioDetections": "Audio Detections",
"noActivity": "No activity detected",
"activeTracking": "Active tracking",
"noActiveTracking": "No active tracking",
"configuration": "Configuration",
"configurationDesc": "Fine tune motion detection and object tracking settings for the debug replay camera. No changes are saved to your Frigate configuration file."
}
}

View File

@ -1392,7 +1392,6 @@
},
"toast": {
"success": "Settings saved successfully",
"applied": "Settings applied successfully",
"successRestartRequired": "Settings saved successfully. Restart Frigate to apply your changes.",
"error": "Failed to save settings",
"validationError": "Validation failed: {{message}}",

View File

@ -7,39 +7,12 @@
"logs": {
"frigate": "Frigate Logs - Frigate",
"go2rtc": "Go2RTC Logs - Frigate",
"nginx": "Nginx Logs - Frigate",
"websocket": "Messages Logs - Frigate"
"nginx": "Nginx Logs - Frigate"
}
},
"title": "System",
"metrics": "System metrics",
"logs": {
"websocket": {
"label": "Messages",
"pause": "Pause",
"resume": "Resume",
"clear": "Clear",
"filter": {
"all": "All topics",
"topics": "Topics",
"events": "Events",
"reviews": "Reviews",
"classification": "Classification",
"face_recognition": "Face Recognition",
"lpr": "LPR",
"camera_activity": "Camera activity",
"system": "System",
"camera": "Camera",
"all_cameras": "All cameras",
"cameras_count_one": "{{count}} Camera",
"cameras_count_other": "{{count}} Cameras"
},
"empty": "No messages captured yet",
"count": "{{count}} messages",
"expanded": {
"payload": "Payload"
}
},
"download": {
"label": "Download Logs"
},
@ -216,8 +189,7 @@
"cameraIsOffline": "{{camera}} is offline",
"detectIsSlow": "{{detect}} is slow ({{speed}} ms)",
"detectIsVerySlow": "{{detect}} is very slow ({{speed}} ms)",
"shmTooLow": "/dev/shm allocation ({{total}} MB) should be increased to at least {{min}} MB.",
"debugReplayActive": "Debug replay session is active"
"shmTooLow": "/dev/shm allocation ({{total}} MB) should be increased to at least {{min}} MB."
},
"enrichments": {
"title": "Enrichments",

View File

@ -132,14 +132,7 @@
"continue": "Continuer",
"add": "Ajouter",
"undo": "Annuler",
"copiedToClipboard": "Copié dans le presse-papiers",
"modified": "Modifié",
"overridden": "Surpassé",
"resetToGlobal": "Réinitialiser aux réglages globaux",
"resetToDefault": "Réinitialiser aux réglages par défaut",
"saveAll": "Tout enregistrer",
"savingAll": "Enregistrement de tout en cours…",
"undoAll": "Tout annuler"
"copiedToClipboard": "Copié dans le presse-papiers"
},
"menu": {
"configuration": "Configuration",

View File

@ -1,320 +1 @@
{
"name": {
"label": "Nom de la caméra",
"description": "Le nom de la caméra est requis"
},
"friendly_name": {
"label": "Nom convivial",
"description": "Nom convivial de la caméra utilisé dans l'IU Frigate"
},
"enabled": {
"label": "Activé",
"description": "Activé"
},
"audio": {
"label": "Événements audio",
"description": "Réglages pour la détection des événements audio de cette caméra.",
"enabled": {
"label": "Activer la détection audio",
"description": "Activer ou désactiver la détection des événements audio pour cette caméra."
},
"max_not_heard": {
"description": "Nombre de secondes sans le type audio configuré avant que l'événement audio se termine.",
"label": "Délai d'inactivité"
},
"min_volume": {
"label": "Volume minimal",
"description": "Seuil minimal d'activation du volume en moyenne quadratique requis pour exécuter la détection audio. Des valeurs plus faibles augmentent la sensibilité (p. ex. 200 est élevé, 500 est moyen et 1000 est faible)."
},
"listen": {
"label": "Types d'écoute",
"description": "Liste des types d'événements audio à détecter (p. ex. bark, fire_alarm, scream, speech, yell)."
},
"filters": {
"label": "Filtres audio",
"description": "Réglages des filtres par type audio, tels que seuils de confiance utilisé afin de réduire les faux positifs."
},
"enabled_in_config": {
"label": "État audio original",
"description": "Indique si la détection audio était initialement activée dans le fichier de configuration statique."
},
"num_threads": {
"label": "Fils d'exécution pour la détection",
"description": "Nombre de fils d'éxécution à utiliser pour le traitement de la détection audio."
}
},
"audio_transcription": {
"label": "Transcription audio",
"description": "Réglages pour la transcription audio et vocale utilisée pour les événements et les sous-titres en temps réel.",
"enabled": {
"label": "Activer la transcription",
"description": "Activer ou désactiver le déclenchement manuel de la transcription des événements audio."
},
"enabled_in_config": {
"label": "État original de la transcription"
},
"live_enabled": {
"label": "Transcription en temps réel",
"description": "Activer la diffusion de la transcription en temps réel pour le flux sonore dès sa réception."
}
},
"birdseye": {
"label": "À vol d'oiseau",
"description": "Réglages pour la vue composée à vol d'oiseau qui combine plusieurs flux de caméras dans une simple disposition.",
"enabled": {
"label": "Activer la vue à vol d'oiseau",
"description": "Activer ou désactiver la fonctionalité de vue à vol d'oiseau."
},
"mode": {
"label": "Mode de suivi",
"description": "Mode pour l'inclusion des caméras dans la vue à vol d'oiseau: 'objects', 'motion', ou 'continuous'."
},
"order": {
"label": "Emplacement",
"description": "Emplacement numérique contrôlant l'ordre de la caméra dans la disposition en vue à vol d'oiseau."
}
},
"detect": {
"label": "Détection d'objets",
"description": "Réglages pour la détection ou le rôle de détection utilisé pour exécuter la détection des objets et initialiser les traceurs.",
"enabled": {
"label": "Détection activée",
"description": "Activer ou désactiver la détection des objets pour cette caméra. La détection doit être activée pour que le suivi des objets fonctionne."
},
"height": {
"label": "Hauteur de détection",
"description": "Hauteur (en pixels) des images utilisées pour le flux de détection; garder vide pour utiliser la résolution native du flux."
},
"width": {
"label": "Largeur de détection",
"description": "Largeur (en pixels) des images utilisées pour le flux de détection; garder vide pour utiliser la résolution native du flux."
},
"fps": {
"label": "IPS de la détection",
"description": "Nombre cible d'images par seconde à utiliser pour la détection; des valeurs plus faibles réduisent l'utilisation de l'UCT (la valeur recommandée est 5, ne la définir à une valeur supérieure - au maximum 10, uniquement lors du suivi d'objets se déplaçant extrêmement rapidement)."
},
"min_initialized": {
"label": "Minimum d'images d'initialisation",
"description": "Nombre de détections consécutives requises avant de créer un objet suivi. Augmenter pour réduire les initialisations erronées. La valeur par défaut est fps divisé par 2."
},
"max_disappeared": {
"label": "Nombre maximal d'images disparues",
"description": "Nombre d'images sans détection avant qu'un objet suivi est considéré comme étant disparu."
},
"stationary": {
"label": "Configuration des objets stationnaires",
"description": "Réglages pour la détection et la gestion des objets qui restent stationnaires pendant un certain temps.",
"interval": {
"label": "Intervalle stationnaire",
"description": "À quelle fréquence (en images) effectuer une détection pour la confirmation d'un objet stationnaire."
},
"threshold": {
"label": "Seuil d'activation stationnaire",
"description": "Nombre d'images sans changement d'emplacement requis pour marquer un objet en tant que stationnaire."
},
"max_frames": {
"label": "Nombre max. d'images",
"description": "Limite le temps pour lequel les objets stationnaires sont suivis avant d'être supprimés.",
"default": {
"label": "Nombre max. d'images par défaut",
"description": "Nombre maximal d'images pour suivre un objet stationnaire avant d'arrêter."
},
"objects": {
"label": "Nombre max. d'images pour l'objet",
"description": "Remplacement des réglages par défaut par objet pour le nombre maximal d'images requis pour suivre les objets stationnaires."
}
},
"classifier": {
"label": "Activer le classificateur visuel",
"description": "Utiliser un classificateur visuel pour détecter les objets véritablement stationnaires même lorsque les boîtes englobantes tremblent."
}
},
"annotation_offset": {
"label": "Décalage de l'annotation",
"description": "Millisecondes pour le décalage des annotations afin de mieux aligner les boîtes englobantes de la ligne du temps avec les enregistrements; peut être positif ou négatif."
}
},
"face_recognition": {
"label": "Reconnaissance faciale",
"description": "Réglages pour la détection et reconnaissance faciale pour cette caméra.",
"enabled": {
"label": "Activer la reconnaissance faciale",
"description": "Activer ou désactiver la reconnaissance faciale."
},
"min_area": {
"label": "Surface minimale du visage",
"description": "Surface minimale (en pixels) d'une boîte faciale détectée requise pour tenter la reconnaissance."
}
},
"ffmpeg": {
"label": "FFmpeg",
"description": "Réglages de FFmpeg incluant l'emplacement du fichier binaire, les arguments, les options pour hwaccel et les arguments de sortie par rôle.",
"path": {
"label": "Emplacement de FFmpeg",
"description": "Emplacement du fichier binaire de FFmpeg à utiliser ou un alias de version (peut être «5.0» ou «7.0»)."
},
"global_args": {
"label": "Arguments globaux de FFmpeg",
"description": "Arguments globaux transmis aux processus de FFmpeg."
},
"hwaccel_args": {
"label": "Arguments pour l'accélération matérielle",
"description": "Arguments de l'accélération matérielle pour FFmpeg. Les préréglages spécifiques au fournisseur sont recommandés."
},
"input_args": {
"label": "Arguments d'entrée",
"description": "Arguments d'entrée appliqués aux flux d'entrée FFmpeg."
},
"output_args": {
"label": "Arguments de sortie",
"description": "Arguments de sortie par défaut utilisés pour les différents rôles FFmpeg, tels que detect et record.",
"detect": {
"label": "Détecter les arguments de sortie",
"description": "Arguments de sortie par défaut pour les flux du rôle detect."
},
"record": {
"label": "Arguments de sortie pour l'enregistrement",
"description": "Arguments de sortie par défaut pour les flux du rôle record."
}
},
"retry_interval": {
"label": "Temps de réessai FFmpeg",
"description": "Nombre de secondes à attendre avant de tenter de reconnecter un flux de caméra après un échec. La valeur par défaut est 10."
},
"apple_compatibility": {
"label": "Compatibilité avec Apple",
"description": "Activer l'étiquetage HEVC pour une meilleure compatibilité avec les lecteurs Apple lors de l'enregistrement H.265."
},
"gpu": {
"label": "Index de l'UTG",
"description": "Index par défaut de l'UTG utilisé pour l'accélération matérielle si disponible."
},
"inputs": {
"label": "Entrées des caméras",
"description": "Liste des définitions des flux entrants (emplacements et rôles) pour cette caméra.",
"path": {
"label": "Emplacement d'entrée",
"description": "URL ou emplacement du flux d'entrée de la caméra."
},
"roles": {
"label": "Rôles d'entrée",
"description": "Rôles pour ce flux entrant."
},
"global_args": {
"label": "Arguments globaux de FFmpeg",
"description": "Arguments globaux de FFmpeg pour ce flux entrant."
},
"hwaccel_args": {
"label": "Arguments pour l'accélération matérielle",
"description": "Arguments de l'accélération matérielle pour ce flux entrant."
},
"input_args": {
"label": "Arguments d'entrée",
"description": "Arguments d'entrée spéficiques à ce flux."
}
}
},
"live": {
"label": "Lecture en direct",
"description": "Réglages utilisés par l'IU Web afin de contrôler la sélection, la résolution et la qualité des flux en direct.",
"streams": {
"label": "Nom des flux en direct",
"description": "Mappage des noms des flux configurés vers les noms de restream et go2rtc utilisés pour la lecture en direct."
},
"height": {
"label": "Hauteur de la diffusion en direct",
"description": "Hauteur (en pixels) à laquelle afficher le flux en direct jsmpeg dans l'IU Web; doit être inférieure ou égale à la hauteur détectée du flux."
},
"quality": {
"label": "Qualité de la diffusion en direct",
"description": "Qualité de l'encodage pour le flux jsmpeg (1 étant la plus élevée, 31 la plus faible)."
}
},
"lpr": {
"label": "Reconnaissance des plaques d'immatriculation",
"description": "Réglages de la reconnaissance des plaques d'immatriculation incluant les seuils de détection, le formatage et les plaques connues.",
"enabled": {
"label": "Activer la RPI",
"description": "Activer ou désactiver la RPI sur cette caméra."
},
"expire_time": {
"label": "Expiration en secondes",
"description": "Temps en secondes après lequel une plaque non vue expire du système de suivi (seulement pour les caméras dédiées à la RPI)."
},
"min_area": {
"label": "Surface minimale de la plaque",
"description": "Surface minimale de la plaque (en pixels) requise pour tenter la reconnaissance."
},
"enhancement": {
"label": "Niveau de l'enrichissement",
"description": "Niveau de l'enrichissement (de 0 à 10) à appliquer aux recadrages des plaques avant la ROC. Des valeurs plus élevées n'améliorent pas nécessairement les résultats, les niveaux supérieurs à 5 peuvent ne fonctionner qu'avec des plaques la nuit et doivent être utilisés avec prudence."
}
},
"motion": {
"label": "Détection du mouvement",
"description": "Réglages par défaut de la détection de mouvement pour cette caméra.",
"enabled": {
"label": "Activer la détection de mouvement",
"description": "Activer ou désactiver la détection de mouvement pour cette caméra."
},
"threshold": {
"label": "Seuil de détection du mouvement",
"description": "Seuil de différence de pixels utilisé par le détecteur de mouvement; les valeurs plus élevées réduisent la sensibilité (plage de 1 à 255)."
},
"lightning_threshold": {
"label": "Seuil d'éclairage",
"description": "Seuil permettant de détecter et d'ignorer les brusques pointes d'éclairage (plus la valeur est faible, plus la sensibilité est élevée, valeurs comprises entre 0.3 et 1.0)."
},
"improve_contrast": {
"label": "Améliorer le contraste",
"description": "Appliquer les amélioration du contraste aux images avant l'analyse de mouvement afin d'améliorer la détection."
},
"contour_area": {
"label": "Zone de contour",
"description": "Aire de la zone de contour minimale en pixels requise pour qu'un contour de mouvement soit comptabilisé."
},
"delta_alpha": {
"label": "Delta pour alpha",
"description": "Facteur de mélange alpha utilisé dans la différenciation d'images pour le calcul du mouvement."
},
"frame_alpha": {
"label": "Alpha pour l'image",
"description": "Valeur alpha utilisée lors du mélange d'images pour le prétraitement du mouvement."
},
"frame_height": {
"label": "Hauteur de l'image",
"description": "Hauteur en pixels à laquelle mettre à l'échelle les images lors du traitement du mouvement."
},
"mask": {
"label": "Moordonnées du masque",
"description": "Coordonnées ordonnés x et y définissant le polygone du masque de mouvement utilisé pour inclure ou exclure des aires."
},
"mqtt_off_delay": {
"label": "Délai de désactivation de MQTT",
"description": "Nombre de secondes à attendre après le dernier mouvement avant de publier un état « off » MQTT."
},
"enabled_in_config": {
"label": "État original du mouvement",
"description": "Indique si la détection de mouvement a été activée dans la configuration originale statique."
},
"raw_mask": {
"label": "Masque brut"
}
},
"objects": {
"label": "Objets",
"description": "Réglages par défaut pour le suivi des objets incluant les étiquettes à suivre et les filtres par objets.",
"track": {
"label": "Objets à suivre",
"description": "Liste des étiquettes d'objets à suivre pour cette caméra."
},
"filters": {
"label": "Filtres d'objets",
"description": "Filtres appliqués aux objets détectés afin de réduire les faux positifs (aire, rapport, facteur de confiance).",
"min_area": {
"label": "Aire minimal de l'objet"
}
}
},
"label": "ConfigurationCamera"
}
{}

View File

@ -21,7 +21,7 @@
"ui": "Interface utilisateur",
"classification": "Classification",
"masksAndZones": "Masques / Zones",
"motionTuner": "Ajusteur de la détection de mouvement",
"motionTuner": "Réglage de la détection de mouvement",
"debug": "Débogage",
"cameras": "Paramètres des caméras",
"users": "Utilisateurs",
@ -99,7 +99,7 @@
"noCamera": "Aucune caméra"
},
"general": {
"title": "Paramètres du profil",
"title": "Paramètres de l'interface utilisateur",
"liveDashboard": {
"title": "Tableau de bord en direct",
"automaticLiveView": {
@ -781,7 +781,7 @@
"readTheDocumentation": "Lire la documentation",
"reindexNow": {
"label": "Réindexer maintenant",
"desc": "La réindexation va régénérer les ingrations pour tous les objets suivis. Ce processus s'exécute en arrière-plan et peut saturer votre processeur et prendre un temps considérable en fonction du nombre d'objets suivis.",
"desc": "La réindexation va régénérer les embeddings pour tous les objets suivis. Ce processus s'exécute en arrière-plan et peut saturer votre processeur et prendre un temps considérable en fonction du nombre d'objets suivis.",
"confirmTitle": "Confirmer la réindexation",
"confirmButton": "Réindexer",
"success": "La réindexation a démarré avec succès.",
@ -1387,8 +1387,7 @@
"label": "Champ"
},
"value": {
"label": "Nouvelle valeur",
"reset": "Réinitialiser"
"label": "Nouvelle valeur"
}
}
}

View File

@ -167,17 +167,6 @@
"error": {
"unableToProbeCamera": "Impossible d'interroger la caméra : {{errorMessage}}"
}
},
"connectionQuality": {
"title": "Qualité de la connexion",
"excellent": "Excellente",
"fair": "Acceptable",
"poor": "Médiocre",
"unusable": "Inutilisable",
"fps": "IPS",
"expectedFps": "IPS attendues",
"reconnectsLastHour": "Reconnexions (dernière heure)",
"stallsLastHour": "Baisses de qualité (dernière heure)"
}
},
"lastRefreshed": "Dernier rafraichissement : ",

View File

@ -30,7 +30,6 @@ const Classification = lazy(() => import("@/pages/ClassificationModel"));
const Chat = lazy(() => import("@/pages/Chat"));
const Logs = lazy(() => import("@/pages/Logs"));
const AccessDenied = lazy(() => import("@/pages/AccessDenied"));
const Replay = lazy(() => import("@/pages/Replay"));
function App() {
const { data: config } = useSWR<FrigateConfig>("config", {
@ -109,8 +108,7 @@ function DefaultAppView() {
<Route path="/faces" element={<FaceLibrary />} />
<Route path="/classification" element={<Classification />} />
<Route path="/chat" element={<Chat />} />
<Route path="/playground" element={<UIPlayground />} />{" "}
<Route path="/replay" element={<Replay />} />{" "}
<Route path="/playground" element={<UIPlayground />} />
</Route>
<Route path="/unauthorized" element={<AccessDenied />} />
<Route path="*" element={<Redirect to="/" />} />

View File

@ -1,5 +1,5 @@
import { baseUrl } from "./baseUrl";
import { useCallback, useEffect, useRef, useState } from "react";
import { useCallback, useEffect, useState } from "react";
import useWebSocket, { ReadyState } from "react-use-websocket";
import {
EmbeddingsReindexProgressType,
@ -17,13 +17,6 @@ import { FrigateStats } from "@/types/stats";
import { createContainer } from "react-tracked";
import useDeepMemo from "@/hooks/use-deep-memo";
export type WsFeedMessage = {
topic: string;
payload: unknown;
timestamp: number;
id: string;
};
type Update = {
topic: string;
payload: unknown;
@ -36,9 +29,6 @@ type WsState = {
type useValueReturn = [WsState, (update: Update) => void];
const wsMessageSubscribers = new Set<(msg: WsFeedMessage) => void>();
let wsMessageIdCounter = 0;
function useValue(): useValueReturn {
const wsUrl = `${baseUrl.replace(/^http/, "ws")}ws`;
@ -53,13 +43,8 @@ function useValue(): useValueReturn {
return;
}
let cameraActivity: { [key: string]: Partial<FrigateCameraState> };
try {
cameraActivity = JSON.parse(activityValue);
} catch {
return;
}
const cameraActivity: { [key: string]: FrigateCameraState } =
JSON.parse(activityValue);
if (Object.keys(cameraActivity).length === 0) {
return;
@ -68,12 +53,6 @@ function useValue(): useValueReturn {
const cameraStates: WsState = {};
Object.entries(cameraActivity).forEach(([name, state]) => {
const cameraConfig = state?.config;
if (!cameraConfig) {
return;
}
const {
record,
detect,
@ -88,7 +67,7 @@ function useValue(): useValueReturn {
detections,
object_descriptions,
review_descriptions,
} = cameraConfig;
} = state["config"];
cameraStates[`${name}/recordings/state`] = record ? "ON" : "OFF";
cameraStates[`${name}/enabled/state`] = enabled ? "ON" : "OFF";
cameraStates[`${name}/detect/state`] = detect ? "ON" : "OFF";
@ -136,17 +115,6 @@ function useValue(): useValueReturn {
...prevState,
[data.topic]: data.payload,
}));
// Notify feed subscribers
if (wsMessageSubscribers.size > 0) {
const feedMsg: WsFeedMessage = {
topic: data.topic,
payload: data.payload,
timestamp: Date.now(),
id: String(wsMessageIdCounter++),
};
wsMessageSubscribers.forEach((cb) => cb(feedMsg));
}
}
},
onOpen: () => {
@ -772,16 +740,3 @@ export function useJobStatus(
return { payload: currentJob as Job | null };
}
export function useWsMessageSubscribe(callback: (msg: WsFeedMessage) => void) {
const callbackRef = useRef(callback);
callbackRef.current = callback;
useEffect(() => {
const handler = (msg: WsFeedMessage) => callbackRef.current(msg);
wsMessageSubscribers.add(handler);
return () => {
wsMessageSubscribers.delete(handler);
};
}, []);
}

View File

@ -26,8 +26,7 @@ export default function CameraImage({
const containerRef = useRef<HTMLDivElement | null>(null);
const imgRef = useRef<HTMLImageElement | null>(null);
const cameraConfig = config?.cameras?.[camera];
const { name } = cameraConfig ?? { name: camera };
const { name } = config ? config.cameras[camera] : "";
const { payload: enabledState } = useEnabledState(camera);
const enabled = enabledState ? enabledState === "ON" : true;
@ -35,15 +34,15 @@ export default function CameraImage({
useResizeObserver(containerRef);
const requestHeight = useMemo(() => {
if (!cameraConfig || containerHeight == 0) {
if (!config || containerHeight == 0) {
return 360;
}
return Math.min(
cameraConfig.detect.height,
config.cameras[camera].detect.height,
Math.round(containerHeight * (isDesktop ? 1.1 : 1.25)),
);
}, [cameraConfig, containerHeight]);
}, [config, camera, containerHeight]);
const [isPortraitImage, setIsPortraitImage] = useState(false);

View File

@ -44,30 +44,6 @@ const motion: SectionConfigOverrides = {
camera: {
restartRequired: ["frame_height"],
},
replay: {
restartRequired: [],
fieldOrder: [
"threshold",
"contour_area",
"lightning_threshold",
"improve_contrast",
],
fieldGroups: {
sensitivity: ["threshold", "contour_area"],
algorithm: ["improve_contrast"],
},
hiddenFields: [
"enabled",
"enabled_in_config",
"mask",
"raw_mask",
"mqtt_off_delay",
"delta_alpha",
"frame_alpha",
"frame_height",
],
advancedFields: ["lightning_threshold"],
},
};
export default motion;

View File

@ -99,28 +99,6 @@ const objects: SectionConfigOverrides = {
camera: {
restartRequired: [],
},
replay: {
restartRequired: [],
fieldOrder: ["track", "filters"],
fieldGroups: {
tracking: ["track"],
filtering: ["filters"],
},
hiddenFields: [
"enabled_in_config",
"alert",
"detect",
"mask",
"raw_mask",
"genai",
"genai.enabled_in_config",
"filters.*.mask",
"filters.*.raw_mask",
"filters.mask",
"filters.raw_mask",
],
advancedFields: [],
},
};
export default objects;

View File

@ -4,5 +4,4 @@ export type SectionConfigOverrides = {
base?: SectionConfig;
global?: Partial<SectionConfig>;
camera?: Partial<SectionConfig>;
replay?: Partial<SectionConfig>;
};

View File

@ -95,9 +95,9 @@ export interface SectionConfig {
}
export interface BaseSectionProps {
/** Whether this is at global, camera, or replay level */
level: "global" | "camera" | "replay";
/** Camera name (required if level is "camera" or "replay") */
/** Whether this is at global or camera level */
level: "global" | "camera";
/** Camera name (required if level is "camera") */
cameraName?: string;
/** Whether to show override indicator badge */
showOverrideIndicator?: boolean;
@ -117,10 +117,6 @@ export interface BaseSectionProps {
defaultCollapsed?: boolean;
/** Whether to show the section title (default: false for global, true for camera) */
showTitle?: boolean;
/** If true, apply config in-memory only without writing to YAML */
skipSave?: boolean;
/** If true, buttons are not sticky at the bottom */
noStickyButtons?: boolean;
/** Callback when section status changes */
onStatusChange?: (status: {
hasChanges: boolean;
@ -160,16 +156,12 @@ export function ConfigSection({
collapsible = false,
defaultCollapsed = true,
showTitle,
skipSave = false,
noStickyButtons = false,
onStatusChange,
pendingDataBySection,
onPendingDataChange,
}: ConfigSectionProps) {
// For replay level, treat as camera-level config access
const effectiveLevel = level === "replay" ? "camera" : level;
const { t, i18n } = useTranslation([
effectiveLevel === "camera" ? "config/cameras" : "config/global",
level === "camera" ? "config/cameras" : "config/global",
"config/cameras",
"views/settings",
"common",
@ -182,10 +174,10 @@ export function ConfigSection({
// Create a key for this section's pending data
const pendingDataKey = useMemo(
() =>
effectiveLevel === "camera" && cameraName
level === "camera" && cameraName
? `${cameraName}::${sectionPath}`
: sectionPath,
[effectiveLevel, cameraName, sectionPath],
[level, cameraName, sectionPath],
);
// Use pending data from parent if available, otherwise use local state
@ -230,20 +222,20 @@ export function ConfigSection({
const lastPendingDataKeyRef = useRef<string | null>(null);
const updateTopic =
effectiveLevel === "camera" && cameraName
level === "camera" && cameraName
? cameraUpdateTopicMap[sectionPath]
? `config/cameras/${cameraName}/${cameraUpdateTopicMap[sectionPath]}`
: undefined
: `config/${sectionPath}`;
// Default: show title for camera level (since it might be collapsible), hide for global
const shouldShowTitle = showTitle ?? effectiveLevel === "camera";
const shouldShowTitle = showTitle ?? level === "camera";
// Fetch config
const { data: config, mutate: refreshConfig } =
useSWR<FrigateConfig>("config");
// Get section schema using cached hook
const sectionSchema = useSectionSchema(sectionPath, effectiveLevel);
const sectionSchema = useSectionSchema(sectionPath, level);
// Apply special case handling for sections with problematic schema defaults
const modifiedSchema = useMemo(
@ -255,7 +247,7 @@ export function ConfigSection({
// Get override status
const { isOverridden, globalValue, cameraValue } = useConfigOverride({
config,
cameraName: effectiveLevel === "camera" ? cameraName : undefined,
cameraName: level === "camera" ? cameraName : undefined,
sectionPath,
compareFields: sectionConfig.overrideFields,
});
@ -264,12 +256,12 @@ export function ConfigSection({
const rawSectionValue = useMemo(() => {
if (!config) return undefined;
if (effectiveLevel === "camera" && cameraName) {
if (level === "camera" && cameraName) {
return get(config.cameras?.[cameraName], sectionPath);
}
return get(config, sectionPath);
}, [config, cameraName, sectionPath, effectiveLevel]);
}, [config, level, cameraName, sectionPath]);
const rawFormData = useMemo(() => {
if (!config) return {};
@ -336,10 +328,9 @@ export function ConfigSection({
[rawFormData, sanitizeSectionData],
);
// Clear pendingData whenever the section/camera key changes (e.g., switching
// cameras) or when there is no pending data yet (initialization).
// This prevents RJSF's initial onChange call from being treated as a user edit.
// Only clear if pendingData is managed locally (not by parent).
// Clear pendingData whenever formData changes (e.g., from server refresh)
// This prevents RJSF's initial onChange call from being treated as a user edit
// Only clear if pendingData is managed locally (not by parent)
useEffect(() => {
const pendingKeyChanged = lastPendingDataKeyRef.current !== pendingDataKey;
@ -348,16 +339,15 @@ export function ConfigSection({
isInitializingRef.current = true;
setPendingOverrides(undefined);
setDirtyOverrides(undefined);
// Reset local pending data when switching sections/cameras
if (onPendingDataChange === undefined) {
setPendingData(null);
}
} else if (!pendingData) {
isInitializingRef.current = true;
setPendingOverrides(undefined);
setDirtyOverrides(undefined);
}
if (onPendingDataChange === undefined) {
setPendingData(null);
}
}, [
onPendingDataChange,
pendingData,
@ -494,7 +484,7 @@ export function ConfigSection({
setIsSaving(true);
try {
const basePath =
effectiveLevel === "camera" && cameraName
level === "camera" && cameraName
? `cameras.${cameraName}.${sectionPath}`
: sectionPath;
const rawData = sanitizeSectionData(rawFormData);
@ -505,7 +495,7 @@ export function ConfigSection({
);
const sanitizedOverrides = sanitizeOverridesForSection(
sectionPath,
effectiveLevel,
level,
overrides,
);
@ -518,26 +508,16 @@ export function ConfigSection({
return;
}
const needsRestart = skipSave
? false
: requiresRestartForOverrides(sanitizedOverrides);
const needsRestart = requiresRestartForOverrides(sanitizedOverrides);
const configData = buildConfigDataForPath(basePath, sanitizedOverrides);
await axios.put("config/set", {
requires_restart: needsRestart ? 1 : 0,
update_topic: updateTopic,
config_data: configData,
...(skipSave ? { skip_save: true } : {}),
});
if (skipSave) {
toast.success(
t("toast.applied", {
ns: "views/settings",
defaultValue: "Settings applied successfully",
}),
);
} else if (needsRestart) {
if (needsRestart) {
statusBar?.addMessage(
"config_restart_required",
t("configForm.restartRequiredFooter", {
@ -616,7 +596,7 @@ export function ConfigSection({
}, [
sectionPath,
pendingData,
effectiveLevel,
level,
cameraName,
t,
refreshConfig,
@ -628,16 +608,15 @@ export function ConfigSection({
updateTopic,
setPendingData,
requiresRestartForOverrides,
skipSave,
]);
// Handle reset to global/defaults - removes camera-level override or resets global to defaults
const handleResetToGlobal = useCallback(async () => {
if (effectiveLevel === "camera" && !cameraName) return;
if (level === "camera" && !cameraName) return;
try {
const basePath =
effectiveLevel === "camera" && cameraName
level === "camera" && cameraName
? `cameras.${cameraName}.${sectionPath}`
: sectionPath;
@ -653,7 +632,7 @@ export function ConfigSection({
t("toast.resetSuccess", {
ns: "views/settings",
defaultValue:
effectiveLevel === "global"
level === "global"
? "Reset to defaults"
: "Reset to global defaults",
}),
@ -672,7 +651,7 @@ export function ConfigSection({
}
}, [
sectionPath,
effectiveLevel,
level,
cameraName,
requiresRestart,
t,
@ -682,8 +661,8 @@ export function ConfigSection({
]);
const sectionValidation = useMemo(
() => getSectionValidation({ sectionPath, level: effectiveLevel, t }),
[sectionPath, effectiveLevel, t],
() => getSectionValidation({ sectionPath, level, t }),
[sectionPath, level, t],
);
const customValidate = useMemo(() => {
@ -754,7 +733,7 @@ export function ConfigSection({
// nested under the section name (e.g., `audio.label`). For global-level
// sections, keys are nested under the section name in `config/global`.
const configNamespace =
effectiveLevel === "camera" ? "config/cameras" : "config/global";
level === "camera" ? "config/cameras" : "config/global";
const title = t(`${sectionPath}.label`, {
ns: configNamespace,
defaultValue: defaultTitle,
@ -790,7 +769,7 @@ export function ConfigSection({
i18nNamespace={configNamespace}
customValidate={customValidate}
formContext={{
level: effectiveLevel,
level,
cameraName,
globalValue,
cameraValue,
@ -805,7 +784,7 @@ export function ConfigSection({
onFormDataChange: (data: ConfigSectionData) => handleChange(data),
// For widgets that need access to full camera config (e.g., zone names)
fullCameraConfig:
effectiveLevel === "camera" && cameraName
level === "camera" && cameraName
? config?.cameras?.[cameraName]
: undefined,
fullConfig: config,
@ -825,12 +804,7 @@ export function ConfigSection({
}}
/>
<div
className={cn(
"w-full border-t border-secondary bg-background pb-5 pt-0",
!noStickyButtons && "sticky bottom-0 z-50",
)}
>
<div className="sticky bottom-0 z-50 w-full border-t border-secondary bg-background pb-5 pt-0">
<div
className={cn(
"flex flex-col items-center gap-4 pt-2 md:flex-row",
@ -848,17 +822,15 @@ export function ConfigSection({
</div>
)}
<div className="flex w-full items-center gap-2 md:w-auto">
{((effectiveLevel === "camera" && isOverridden) ||
effectiveLevel === "global") &&
!hasChanges &&
!skipSave && (
{((level === "camera" && isOverridden) || level === "global") &&
!hasChanges && (
<Button
onClick={() => setIsResetDialogOpen(true)}
variant="outline"
disabled={isSaving || disabled}
className="flex flex-1 gap-2"
>
{effectiveLevel === "global"
{level === "global"
? t("button.resetToDefault", {
ns: "common",
defaultValue: "Reset to Default",
@ -890,18 +862,11 @@ export function ConfigSection({
{isSaving ? (
<>
<ActivityIndicator className="h-4 w-4" />
{skipSave
? t("button.applying", {
ns: "common",
defaultValue: "Applying...",
})
: t("button.saving", {
ns: "common",
defaultValue: "Saving...",
})}
{t("button.saving", {
ns: "common",
defaultValue: "Saving...",
})}
</>
) : skipSave ? (
t("button.apply", { ns: "common", defaultValue: "Apply" })
) : (
t("button.save", { ns: "common", defaultValue: "Save" })
)}
@ -933,7 +898,7 @@ export function ConfigSection({
setIsResetDialogOpen(false);
}}
>
{effectiveLevel === "global"
{level === "global"
? t("button.resetToDefault", { ns: "common" })
: t("button.resetToGlobal", { ns: "common" })}
</AlertDialogAction>
@ -958,7 +923,7 @@ export function ConfigSection({
)}
<Heading as="h4">{title}</Heading>
{showOverrideIndicator &&
effectiveLevel === "camera" &&
level === "camera" &&
isOverridden && (
<Badge variant="secondary" className="text-xs">
{t("button.overridden", {
@ -1002,7 +967,7 @@ export function ConfigSection({
<div className="flex items-center gap-3">
<Heading as="h4">{title}</Heading>
{showOverrideIndicator &&
effectiveLevel === "camera" &&
level === "camera" &&
isOverridden && (
<Badge
variant="secondary"

View File

@ -93,7 +93,7 @@ export function AudioLabelSwitchesWidget(props: WidgetProps) {
getDisplayLabel: getAudioLabelDisplayName,
i18nKey: "audioLabels",
listClassName:
"relative max-h-none overflow-visible md:max-h-64 md:overflow-y-auto md:overscroll-contain md:scrollbar-container",
"max-h-none overflow-visible md:max-h-64 md:overflow-y-auto md:overscroll-contain md:scrollbar-container",
enableSearch: true,
}}
/>

View File

@ -94,7 +94,7 @@ export function ObjectLabelSwitchesWidget(props: WidgetProps) {
getDisplayLabel: getObjectLabelDisplayName,
i18nKey: "objectLabels",
listClassName:
"relative max-h-none overflow-visible md:max-h-64 md:overflow-y-auto md:overscroll-contain md:scrollbar-container",
"max-h-none overflow-visible md:max-h-64 md:overflow-y-auto md:overscroll-contain md:scrollbar-container",
}}
/>
);

View File

@ -42,7 +42,7 @@ export function ZoneSwitchesWidget(props: WidgetProps) {
getEntities: getZoneNames,
getDisplayLabel: getZoneDisplayName,
i18nKey: "zoneNames",
listClassName: "relative max-h-64 overflow-y-auto scrollbar-container",
listClassName: "max-h-64 overflow-y-auto scrollbar-container",
}}
/>
);

View File

@ -1,11 +1,11 @@
import { useState, ReactNode, useCallback } from "react";
import { useState, ReactNode } from "react";
import { SearchResult } from "@/types/search";
import { FrigateConfig } from "@/types/frigateConfig";
import { baseUrl } from "@/api/baseUrl";
import { toast } from "sonner";
import axios from "axios";
import { FiMoreVertical } from "react-icons/fi";
import { Button, buttonVariants } from "@/components/ui/button";
import { buttonVariants } from "@/components/ui/button";
import {
ContextMenu,
ContextMenuContent,
@ -32,7 +32,6 @@ import useSWR from "swr";
import { Trans, useTranslation } from "react-i18next";
import BlurredIconButton from "../button/BlurredIconButton";
import { useIsAdmin } from "@/hooks/use-is-admin";
import { useNavigate } from "react-router-dom";
type SearchResultActionsProps = {
searchResult: SearchResult;
@ -53,10 +52,8 @@ export default function SearchResultActions({
isContextMenu = false,
children,
}: SearchResultActionsProps) {
const { t } = useTranslation(["views/explore", "views/replay", "common"]);
const { t } = useTranslation(["views/explore"]);
const isAdmin = useIsAdmin();
const navigate = useNavigate();
const [isStarting, setIsStarting] = useState(false);
const { data: config } = useSWR<FrigateConfig>("config");
@ -87,59 +84,6 @@ export default function SearchResultActions({
});
};
const handleDebugReplay = useCallback(
(event: SearchResult) => {
setIsStarting(true);
axios
.post("debug_replay/start", {
camera: event.camera,
start_time: event.start_time,
end_time: event.end_time,
})
.then((response) => {
if (response.status === 200) {
toast.success(t("dialog.toast.success", { ns: "views/replay" }), {
position: "top-center",
});
navigate("/replay");
}
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
if (error.response?.status === 409) {
toast.error(
t("dialog.toast.alreadyActive", { ns: "views/replay" }),
{
position: "top-center",
closeButton: true,
dismissible: false,
action: (
<a href="/replay" target="_blank" rel="noopener noreferrer">
<Button>
{t("dialog.toast.goToReplay", { ns: "views/replay" })}
</Button>
</a>
),
},
);
} else {
toast.error(t("dialog.toast.error", { error: errorMessage }), {
position: "top-center",
});
}
})
.finally(() => {
setIsStarting(false);
});
},
[navigate, t],
);
const MenuItem = isContextMenu ? ContextMenuItem : DropdownMenuItem;
const menuItems = (
@ -205,20 +149,6 @@ export default function SearchResultActions({
<span>{t("itemMenu.addTrigger.label")}</span>
</MenuItem>
)}
{searchResult.has_clip && (
<MenuItem
className="cursor-pointer"
aria-label={t("itemMenu.debugReplay.aria")}
disabled={isStarting}
onSelect={() => {
handleDebugReplay(searchResult);
}}
>
{isStarting
? t("dialog.starting", { ns: "views/replay" })
: t("itemMenu.debugReplay.label")}
</MenuItem>
)}
{isAdmin && (
<MenuItem
aria-label={t("itemMenu.deleteTrackedObject.label")}

View File

@ -1,46 +0,0 @@
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "../ui/dropdown-menu";
import { Button } from "../ui/button";
import { useTranslation } from "react-i18next";
import { FaFilm } from "react-icons/fa6";
type ActionsDropdownProps = {
onDebugReplayClick: () => void;
onExportClick: () => void;
};
export default function ActionsDropdown({
onDebugReplayClick,
onExportClick,
}: ActionsDropdownProps) {
const { t } = useTranslation(["components/dialog", "views/replay", "common"]);
return (
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.actions", { ns: "common" })}
size="sm"
>
<FaFilm className="size-5 text-secondary-foreground" />
<div className="text-primary">
{t("menu.actions", { ns: "common" })}
</div>
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem onClick={onExportClick}>
{t("menu.export", { ns: "common" })}
</DropdownMenuItem>
<DropdownMenuItem onClick={onDebugReplayClick}>
{t("title", { ns: "views/replay" })}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
);
}

View File

@ -1,240 +0,0 @@
import { useMemo, useState } from "react";
import { Button } from "../ui/button";
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
import { SelectSeparator } from "../ui/select";
import { TimeRange } from "@/types/timeline";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import { getUTCOffset } from "@/utils/dateUtil";
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
import { FaArrowRight, FaCalendarAlt } from "react-icons/fa";
import { isDesktop, isIOS } from "react-device-detect";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { useTranslation } from "react-i18next";
type CustomTimeSelectorProps = {
latestTime: number;
range?: TimeRange;
setRange: (range: TimeRange | undefined) => void;
startLabel: string;
endLabel: string;
};
export function CustomTimeSelector({
latestTime,
range,
setRange,
startLabel,
endLabel,
}: CustomTimeSelectorProps) {
const { t } = useTranslation(["common"]);
const { data: config } = useSWR<FrigateConfig>("config");
// times
const timezoneOffset = useMemo(
() =>
config?.ui.timezone
? Math.round(getUTCOffset(new Date(), config.ui.timezone))
: undefined,
[config?.ui.timezone],
);
const localTimeOffset = useMemo(
() =>
Math.round(
getUTCOffset(
new Date(),
Intl.DateTimeFormat().resolvedOptions().timeZone,
),
),
[],
);
const startTime = useMemo(() => {
let time = range?.after || latestTime - 3600;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const endTime = useMemo(() => {
let time = range?.before || latestTime;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const formattedStart = useFormattedTimestamp(
startTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour")
: t("time.formattedTimestamp.12hour"),
);
const formattedEnd = useFormattedTimestamp(
endTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour")
: t("time.formattedTimestamp.12hour"),
);
const startClock = useMemo(() => {
const date = new Date(startTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [startTime]);
const endClock = useMemo(() => {
const date = new Date(endTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [endTime]);
// calendars
const [startOpen, setStartOpen] = useState(false);
const [endOpen, setEndOpen] = useState(false);
return (
<div
className={`mt-3 flex items-center rounded-lg bg-secondary text-secondary-foreground ${isDesktop ? "mx-8 gap-2 px-2" : "pl-2"}`}
>
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
open={startOpen}
onOpenChange={(open) => {
if (!open) {
setStartOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={startLabel}
variant={startOpen ? "select" : "default"}
size="sm"
onClick={() => {
setStartOpen(true);
setEndOpen(false);
}}
>
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent className="flex flex-col items-center" disablePortal>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
before: endTime,
after: day.getTime() / 1000 + 1,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="startTime"
type="time"
value={startClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const start = new Date(startTime * 1000);
start.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: endTime,
after: start.getTime() / 1000,
});
}}
/>
</PopoverContent>
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
open={endOpen}
onOpenChange={(open) => {
if (!open) {
setEndOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={endLabel}
variant={endOpen ? "select" : "default"}
size="sm"
onClick={() => {
setEndOpen(true);
setStartOpen(false);
}}
>
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent className="flex flex-col items-center" disablePortal>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
after: startTime,
before: day.getTime() / 1000,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const end = new Date(endTime * 1000);
end.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: end.getTime() / 1000,
after: startTime,
});
}}
/>
</PopoverContent>
</Popover>
</div>
</div>
);
}

View File

@ -1,367 +0,0 @@
import { useCallback, useState } from "react";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "../ui/dialog";
import { Label } from "../ui/label";
import { RadioGroup, RadioGroupItem } from "../ui/radio-group";
import { Button } from "../ui/button";
import axios from "axios";
import { toast } from "sonner";
import { isDesktop } from "react-device-detect";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import { useNavigate } from "react-router-dom";
import { useTranslation } from "react-i18next";
import { SelectSeparator } from "../ui/select";
import ActivityIndicator from "../indicators/activity-indicator";
import { LuBug, LuPlay, LuX } from "react-icons/lu";
import { ExportMode } from "@/types/filter";
import { TimeRange } from "@/types/timeline";
import { cn } from "@/lib/utils";
import { CustomTimeSelector } from "./CustomTimeSelector";
const REPLAY_TIME_OPTIONS = ["1", "5", "timeline", "custom"] as const;
type ReplayTimeOption = (typeof REPLAY_TIME_OPTIONS)[number];
type DebugReplayContentProps = {
currentTime: number;
latestTime: number;
range?: TimeRange;
selectedOption: ReplayTimeOption;
isStarting: boolean;
onSelectedOptionChange: (option: ReplayTimeOption) => void;
onStart: () => void;
onCancel: () => void;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
};
export function DebugReplayContent({
currentTime,
latestTime,
range,
selectedOption,
isStarting,
onSelectedOptionChange,
onStart,
onCancel,
setRange,
setMode,
}: DebugReplayContentProps) {
const { t } = useTranslation(["views/replay"]);
return (
<div className="w-full">
{isDesktop && (
<>
<DialogHeader>
<DialogTitle>{t("dialog.title")}</DialogTitle>
<DialogDescription>{t("dialog.description")}</DialogDescription>
</DialogHeader>
<SelectSeparator className="my-4 bg-secondary" />
</>
)}
{/* Time range */}
<div className="mt-4 flex flex-col gap-2">
<RadioGroup
className="mt-2 flex flex-col gap-4"
value={selectedOption}
onValueChange={(value) =>
onSelectedOptionChange(value as ReplayTimeOption)
}
>
{REPLAY_TIME_OPTIONS.map((opt) => (
<div key={opt} className="flex items-center gap-2">
<RadioGroupItem
className={
opt === selectedOption
? "bg-selected from-selected/50 to-selected/90 text-selected"
: "bg-secondary from-secondary/50 to-secondary/90 text-secondary"
}
id={`replay-${opt}`}
value={opt}
/>
<Label className="cursor-pointer" htmlFor={`replay-${opt}`}>
{opt === "custom"
? t("dialog.preset.custom")
: opt === "timeline"
? t("dialog.preset.timeline")
: t(`dialog.preset.${opt}m`)}
</Label>
</div>
))}
</RadioGroup>
</div>
{/* Custom time inputs */}
{selectedOption === "custom" && (
<CustomTimeSelector
latestTime={latestTime}
range={range}
setRange={setRange}
startLabel={t("dialog.startLabel")}
endLabel={t("dialog.endLabel")}
/>
)}
{isDesktop && <SelectSeparator className="my-4 bg-secondary" />}
<DialogFooter
className={isDesktop ? "" : "mt-3 flex flex-col-reverse gap-4"}
>
<div
className={`cursor-pointer p-2 text-center ${isDesktop ? "" : "w-full"}`}
onClick={onCancel}
>
{t("button.cancel", { ns: "common" })}
</div>
<Button
className={isDesktop ? "" : "w-full"}
variant="select"
size="sm"
disabled={isStarting}
onClick={() => {
if (selectedOption === "timeline") {
setRange({
after: currentTime - 30,
before: currentTime + 30,
});
setMode("timeline");
} else {
onStart();
}
}}
>
{isStarting ? <ActivityIndicator className="mr-2" /> : null}
{isStarting
? t("dialog.starting")
: selectedOption === "timeline"
? t("dialog.selectFromTimeline")
: t("dialog.startButton")}
</Button>
</DialogFooter>
</div>
);
}
type DebugReplayDialogProps = {
camera: string;
currentTime: number;
latestTime: number;
range?: TimeRange;
mode: ExportMode;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
};
export default function DebugReplayDialog({
camera,
currentTime,
latestTime,
range,
mode,
setRange,
setMode,
}: DebugReplayDialogProps) {
const { t } = useTranslation(["views/replay"]);
const navigate = useNavigate();
const [selectedOption, setSelectedOption] = useState<ReplayTimeOption>("1");
const [isStarting, setIsStarting] = useState(false);
const handleTimeOptionChange = useCallback(
(option: ReplayTimeOption) => {
setSelectedOption(option);
if (option === "custom" || option === "timeline") {
return;
}
const minutes = parseInt(option, 10);
const end = latestTime;
setRange({ after: end - minutes * 60, before: end });
},
[latestTime, setRange],
);
const handleStart = useCallback(() => {
if (!range || range.before <= range.after) {
toast.error(
t("dialog.toast.error", { error: "End time must be after start time" }),
{ position: "top-center" },
);
return;
}
setIsStarting(true);
axios
.post("debug_replay/start", {
camera: camera,
start_time: range.after,
end_time: range.before,
})
.then((response) => {
if (response.status === 200) {
toast.success(t("dialog.toast.success"), {
position: "top-center",
});
setMode("none");
setRange(undefined);
navigate("/replay");
}
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
if (error.response?.status === 409) {
toast.error(t("dialog.toast.alreadyActive"), {
position: "top-center",
closeButton: true,
dismissible: false,
action: (
<a href="/replay" target="_blank" rel="noopener noreferrer">
<Button>{t("dialog.toast.goToReplay")}</Button>
</a>
),
});
} else {
toast.error(t("dialog.toast.error", { error: errorMessage }), {
position: "top-center",
});
}
})
.finally(() => {
setIsStarting(false);
});
}, [camera, range, navigate, setMode, setRange, t]);
const handleCancel = useCallback(() => {
setMode("none");
setRange(undefined);
}, [setMode, setRange]);
const Overlay = isDesktop ? Dialog : Drawer;
const Trigger = isDesktop ? DialogTrigger : DrawerTrigger;
const Content = isDesktop ? DialogContent : DrawerContent;
return (
<>
<SaveDebugReplayOverlay
className="pointer-events-none absolute left-1/2 top-8 z-50 -translate-x-1/2"
show={mode == "timeline"}
isStarting={isStarting}
onSave={handleStart}
onCancel={handleCancel}
/>
<Overlay
open={mode == "select"}
onOpenChange={(open) => {
if (!open) {
setMode("none");
}
}}
>
{!isDesktop && (
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("title")}
size="sm"
onClick={() => {
const end = latestTime;
setRange({ after: end - 60, before: end });
setSelectedOption("1");
setMode("select");
}}
>
<LuBug className="size-5 rounded-md bg-secondary-foreground fill-secondary stroke-secondary p-1" />
{isDesktop && <div className="text-primary">{t("title")}</div>}
</Button>
</Trigger>
)}
<Content
className={
isDesktop
? "max-h-[90dvh] w-auto max-w-2xl overflow-visible sm:rounded-lg md:rounded-2xl"
: "max-h-[75dvh] overflow-y-auto rounded-lg px-4 pb-4 md:rounded-2xl"
}
>
<DebugReplayContent
currentTime={currentTime}
latestTime={latestTime}
range={range}
selectedOption={selectedOption}
isStarting={isStarting}
onSelectedOptionChange={handleTimeOptionChange}
onStart={handleStart}
onCancel={handleCancel}
setRange={setRange}
setMode={setMode}
/>
</Content>
</Overlay>
</>
);
}
type SaveDebugReplayOverlayProps = {
className: string;
show: boolean;
isStarting: boolean;
onSave: () => void;
onCancel: () => void;
};
export function SaveDebugReplayOverlay({
className,
show,
isStarting,
onSave,
onCancel,
}: SaveDebugReplayOverlayProps) {
const { t } = useTranslation(["views/replay"]);
return (
<div className={className}>
<div
className={cn(
"pointer-events-auto flex items-center justify-center gap-2 rounded-lg px-2",
show ? "duration-500 animate-in slide-in-from-top" : "invisible",
"mx-auto mt-5 text-center",
)}
>
<Button
className="flex items-center gap-1 text-primary"
aria-label={t("button.cancel", { ns: "common" })}
size="sm"
disabled={isStarting}
onClick={onCancel}
>
<LuX />
{t("button.cancel", { ns: "common" })}
</Button>
<Button
className="flex items-center gap-1"
aria-label={t("dialog.startButton")}
variant="select"
size="sm"
disabled={isStarting}
onClick={onSave}
>
{isStarting ? <ActivityIndicator className="size-4" /> : <LuPlay />}
{isStarting ? t("dialog.starting") : t("dialog.startButton")}
</Button>
</div>
</div>
);
}

View File

@ -1,4 +1,4 @@
import { useCallback, useState } from "react";
import { useCallback, useMemo, useState } from "react";
import {
Dialog,
DialogContent,
@ -12,12 +12,16 @@ import { Label } from "../ui/label";
import { RadioGroup, RadioGroupItem } from "../ui/radio-group";
import { Button } from "../ui/button";
import { ExportMode } from "@/types/filter";
import { FaArrowDown } from "react-icons/fa";
import { FaArrowDown, FaArrowRight, FaCalendarAlt } from "react-icons/fa";
import axios from "axios";
import { toast } from "sonner";
import { Input } from "../ui/input";
import { TimeRange } from "@/types/timeline";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
import {
Select,
SelectContent,
@ -26,15 +30,15 @@ import {
SelectTrigger,
SelectValue,
} from "../ui/select";
import { isDesktop, isMobile } from "react-device-detect";
import { isDesktop, isIOS, isMobile } from "react-device-detect";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import SaveExportOverlay from "./SaveExportOverlay";
import { getUTCOffset } from "@/utils/dateUtil";
import { baseUrl } from "@/api/baseUrl";
import { cn } from "@/lib/utils";
import { GenericVideoPlayer } from "../player/GenericVideoPlayer";
import { useTranslation } from "react-i18next";
import { ExportCase } from "@/types/export";
import { CustomTimeSelector } from "./CustomTimeSelector";
const EXPORT_OPTIONS = [
"1",
@ -163,33 +167,31 @@ export default function ExportDialog({
}
}}
>
{!isDesktop && (
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.export", { ns: "common" })}
size="sm"
onClick={() => {
const now = new Date(latestTime * 1000);
let start = 0;
now.setHours(now.getHours() - 1);
start = now.getTime() / 1000;
setRange({
before: latestTime,
after: start,
});
setMode("select");
}}
>
<FaArrowDown className="rounded-md bg-secondary-foreground fill-secondary p-1" />
{isDesktop && (
<div className="text-primary">
{t("menu.export", { ns: "common" })}
</div>
)}
</Button>
</Trigger>
)}
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.export", { ns: "common" })}
size="sm"
onClick={() => {
const now = new Date(latestTime * 1000);
let start = 0;
now.setHours(now.getHours() - 1);
start = now.getTime() / 1000;
setRange({
before: latestTime,
after: start,
});
setMode("select");
}}
>
<FaArrowDown className="rounded-md bg-secondary-foreground fill-secondary p-1" />
{isDesktop && (
<div className="text-primary">
{t("menu.export", { ns: "common" })}
</div>
)}
</Button>
</Trigger>
<Content
className={
isDesktop
@ -330,8 +332,6 @@ export function ExportContent({
latestTime={latestTime}
range={range}
setRange={setRange}
startLabel={t("export.time.start.title")}
endLabel={t("export.time.end.title")}
/>
)}
<Input
@ -414,6 +414,234 @@ export function ExportContent({
);
}
type CustomTimeSelectorProps = {
latestTime: number;
range?: TimeRange;
setRange: (range: TimeRange | undefined) => void;
};
function CustomTimeSelector({
latestTime,
range,
setRange,
}: CustomTimeSelectorProps) {
const { t } = useTranslation(["components/dialog"]);
const { data: config } = useSWR<FrigateConfig>("config");
// times
const timezoneOffset = useMemo(
() =>
config?.ui.timezone
? Math.round(getUTCOffset(new Date(), config.ui.timezone))
: undefined,
[config?.ui.timezone],
);
const localTimeOffset = useMemo(
() =>
Math.round(
getUTCOffset(
new Date(),
Intl.DateTimeFormat().resolvedOptions().timeZone,
),
),
[],
);
const startTime = useMemo(() => {
let time = range?.after || latestTime - 3600;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const endTime = useMemo(() => {
let time = range?.before || latestTime;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const formattedStart = useFormattedTimestamp(
startTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const formattedEnd = useFormattedTimestamp(
endTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const startClock = useMemo(() => {
const date = new Date(startTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [startTime]);
const endClock = useMemo(() => {
const date = new Date(endTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [endTime]);
// calendars
const [startOpen, setStartOpen] = useState(false);
const [endOpen, setEndOpen] = useState(false);
return (
<div
className={`mt-3 flex items-center rounded-lg bg-secondary text-secondary-foreground ${isDesktop ? "mx-8 gap-2 px-2" : "pl-2"}`}
>
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
modal={false}
open={startOpen}
onOpenChange={(open) => {
if (!open) {
setStartOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={t("export.time.start.title")}
variant={startOpen ? "select" : "default"}
size="sm"
onClick={() => {
setStartOpen(true);
setEndOpen(false);
}}
>
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
before: endTime,
after: day.getTime() / 1000 + 1,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="startTime"
type="time"
value={startClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const start = new Date(startTime * 1000);
start.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: endTime,
after: start.getTime() / 1000,
});
}}
/>
</PopoverContent>
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
modal={false}
open={endOpen}
onOpenChange={(open) => {
if (!open) {
setEndOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={t("export.time.end.title")}
variant={endOpen ? "select" : "default"}
size="sm"
onClick={() => {
setEndOpen(true);
setStartOpen(false);
}}
>
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
after: startTime,
before: day.getTime() / 1000,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const end = new Date(endTime * 1000);
end.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: end.getTime() / 1000,
after: startTime,
});
}}
/>
</PopoverContent>
</Popover>
</div>
</div>
);
}
type ExportPreviewDialogProps = {
camera: string;
range?: TimeRange;

View File

@ -2,13 +2,8 @@ import { useCallback, useState } from "react";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import { Button } from "../ui/button";
import { FaArrowDown, FaCalendarAlt, FaCog, FaFilter } from "react-icons/fa";
import { LuBug } from "react-icons/lu";
import { TimeRange } from "@/types/timeline";
import { ExportContent, ExportPreviewDialog } from "./ExportDialog";
import {
DebugReplayContent,
SaveDebugReplayOverlay,
} from "./DebugReplayDialog";
import { ExportMode, GeneralFilter } from "@/types/filter";
import ReviewActivityCalendar from "./ReviewActivityCalendar";
import { SelectSeparator } from "../ui/select";
@ -21,32 +16,19 @@ import {
import { getEndOfDayTimestamp } from "@/utils/dateUtil";
import { GeneralFilterContent } from "../filter/ReviewFilterGroup";
import { toast } from "sonner";
import axios, { AxiosError } from "axios";
import axios from "axios";
import SaveExportOverlay from "./SaveExportOverlay";
import { isIOS, isMobile } from "react-device-detect";
import { useTranslation } from "react-i18next";
import { useNavigate } from "react-router-dom";
type DrawerMode =
| "none"
| "select"
| "export"
| "calendar"
| "filter"
| "debug-replay";
type DrawerMode = "none" | "select" | "export" | "calendar" | "filter";
const DRAWER_FEATURES = [
"export",
"calendar",
"filter",
"debug-replay",
] as const;
const DRAWER_FEATURES = ["export", "calendar", "filter"] as const;
export type DrawerFeatures = (typeof DRAWER_FEATURES)[number];
const DEFAULT_DRAWER_FEATURES: DrawerFeatures[] = [
"export",
"calendar",
"filter",
"debug-replay",
];
type MobileReviewSettingsDrawerProps = {
@ -63,10 +45,6 @@ type MobileReviewSettingsDrawerProps = {
recordingsSummary?: RecordingsSummary;
allLabels: string[];
allZones: string[];
debugReplayMode?: ExportMode;
debugReplayRange?: TimeRange;
setDebugReplayMode?: (mode: ExportMode) => void;
setDebugReplayRange?: (range: TimeRange | undefined) => void;
onUpdateFilter: (filter: ReviewFilter) => void;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
@ -86,26 +64,13 @@ export default function MobileReviewSettingsDrawer({
recordingsSummary,
allLabels,
allZones,
debugReplayMode = "none",
debugReplayRange,
setDebugReplayMode = () => {},
setDebugReplayRange = () => {},
onUpdateFilter,
setRange,
setMode,
setShowExportPreview,
}: MobileReviewSettingsDrawerProps) {
const { t } = useTranslation([
"views/recording",
"components/dialog",
"views/replay",
]);
const navigate = useNavigate();
const { t } = useTranslation(["views/recording", "components/dialog"]);
const [drawerMode, setDrawerMode] = useState<DrawerMode>("none");
const [selectedReplayOption, setSelectedReplayOption] = useState<
"1" | "5" | "custom" | "timeline"
>("1");
const [isDebugReplayStarting, setIsDebugReplayStarting] = useState(false);
// exports
@ -175,76 +140,6 @@ export default function MobileReviewSettingsDrawer({
});
}, [camera, name, range, selectedCaseId, setRange, setName, setMode, t]);
const onStartDebugReplay = useCallback(async () => {
if (
!debugReplayRange ||
debugReplayRange.before <= debugReplayRange.after
) {
toast.error(
t("dialog.toast.error", {
error: "End time must be after start time",
ns: "views/replay",
}),
{ position: "top-center" },
);
return;
}
setIsDebugReplayStarting(true);
try {
const response = await axios.post("debug_replay/start", {
camera: camera,
start_time: debugReplayRange.after,
end_time: debugReplayRange.before,
});
if (response.status === 200) {
toast.success(t("dialog.toast.success", { ns: "views/replay" }), {
position: "top-center",
});
setDebugReplayMode("none");
setDebugReplayRange(undefined);
setDrawerMode("none");
navigate("/replay");
}
} catch (error) {
const axiosError = error as AxiosError<{
message?: string;
detail?: string;
}>;
const errorMessage =
axiosError.response?.data?.message ||
axiosError.response?.data?.detail ||
"Unknown error";
if (axiosError.response?.status === 409) {
toast.error(t("dialog.toast.alreadyActive", { ns: "views/replay" }), {
position: "top-center",
});
} else {
toast.error(
t("dialog.toast.error", {
error: errorMessage,
ns: "views/replay",
}),
{
position: "top-center",
},
);
}
} finally {
setIsDebugReplayStarting(false);
}
}, [
camera,
debugReplayRange,
navigate,
setDebugReplayMode,
setDebugReplayRange,
t,
]);
// filters
const [currentFilter, setCurrentFilter] = useState<GeneralFilter>({
@ -301,26 +196,6 @@ export default function MobileReviewSettingsDrawer({
{t("filter")}
</Button>
)}
{features.includes("debug-replay") && (
<Button
className="flex w-full items-center justify-center gap-2"
aria-label={t("title", { ns: "views/replay" })}
onClick={() => {
const now = new Date(latestTime * 1000);
now.setHours(now.getHours() - 1);
setDebugReplayRange({
after: now.getTime() / 1000,
before: latestTime,
});
setSelectedReplayOption("1");
setDrawerMode("debug-replay");
setDebugReplayMode("select");
}}
>
<LuBug className="size-5 rounded-md bg-secondary-foreground fill-secondary stroke-secondary p-1" />
{t("title", { ns: "views/replay" })}
</Button>
)}
</div>
);
} else if (drawerMode == "export") {
@ -436,47 +311,6 @@ export default function MobileReviewSettingsDrawer({
/>
</div>
);
} else if (drawerMode == "debug-replay") {
const handleTimeOptionChange = (
option: "1" | "5" | "custom" | "timeline",
) => {
setSelectedReplayOption(option);
if (option === "custom" || option === "timeline") {
return;
}
const hours = parseInt(option);
const end = latestTime;
const now = new Date(end * 1000);
now.setHours(now.getHours() - hours);
setDebugReplayRange({ after: now.getTime() / 1000, before: end });
};
content = (
<DebugReplayContent
currentTime={currentTime}
latestTime={latestTime}
range={debugReplayRange}
selectedOption={selectedReplayOption}
isStarting={isDebugReplayStarting}
onSelectedOptionChange={handleTimeOptionChange}
onStart={onStartDebugReplay}
onCancel={() => {
setDebugReplayMode("none");
setDebugReplayRange(undefined);
setDrawerMode("select");
}}
setRange={setDebugReplayRange}
setMode={(mode) => {
setDebugReplayMode(mode);
if (mode == "timeline") {
setDrawerMode("none");
}
}}
/>
);
}
return (
@ -488,16 +322,6 @@ export default function MobileReviewSettingsDrawer({
onCancel={() => setMode("none")}
onPreview={() => setShowExportPreview(true)}
/>
<SaveDebugReplayOverlay
className="pointer-events-none absolute left-1/2 top-8 z-50 -translate-x-1/2"
show={debugReplayRange != undefined && debugReplayMode == "timeline"}
isStarting={isDebugReplayStarting}
onSave={onStartDebugReplay}
onCancel={() => {
setDebugReplayMode("none");
setDebugReplayRange(undefined);
}}
/>
<ExportPreviewDialog
camera={camera}
range={range}
@ -530,9 +354,7 @@ export default function MobileReviewSettingsDrawer({
/>
</Button>
</DrawerTrigger>
<DrawerContent
className={`mx-1 flex max-h-[80dvh] flex-col items-center gap-2 rounded-t-2xl px-4 pb-4 ${drawerMode == "export" || drawerMode == "debug-replay" ? "overflow-visible" : "overflow-hidden"}`}
>
<DrawerContent className="mx-1 flex max-h-[80dvh] flex-col items-center gap-2 overflow-hidden rounded-t-2xl px-4 pb-4">
{content}
</DrawerContent>
</Drawer>

View File

@ -1,7 +1,6 @@
import useSWR from "swr";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useResizeObserver } from "@/hooks/resize-observer";
import { useFullscreen } from "@/hooks/use-fullscreen";
import { Event } from "@/types/event";
import ActivityIndicator from "@/components/indicators/activity-indicator";
import { TrackingDetailsSequence } from "@/types/timeline";
@ -244,8 +243,6 @@ export function TrackingDetails({
}, [manualOverride, currentTime, annotationOffset]);
const containerRef = useRef<HTMLDivElement | null>(null);
const { fullscreen, toggleFullscreen, supportsFullScreen } =
useFullscreen(containerRef);
const timelineContainerRef = useRef<HTMLDivElement | null>(null);
const rowRefs = useRef<(HTMLDivElement | null)[]>([]);
const [_selectedZone, setSelectedZone] = useState("");
@ -562,15 +559,14 @@ export function TrackingDetails({
visible={true}
currentSource={videoSource}
hotKeys={false}
supportsFullscreen={supportsFullScreen}
fullscreen={fullscreen}
supportsFullscreen={false}
fullscreen={false}
frigateControls={true}
onTimeUpdate={handleTimeUpdate}
onSeekToTime={handleSeekToTime}
onUploadFrame={onUploadFrameToPlus}
onPlaying={() => setIsVideoLoading(false)}
setFullResolution={setFullResolution}
toggleFullscreen={toggleFullscreen}
isDetailMode={true}
camera={event.camera}
currentTimeOverride={currentTime}

View File

@ -12,11 +12,8 @@ import { useNavigate } from "react-router-dom";
import { useTranslation } from "react-i18next";
import { Event } from "@/types/event";
import { FrigateConfig } from "@/types/frigateConfig";
import { useCallback, useState } from "react";
import { useState } from "react";
import { useIsAdmin } from "@/hooks/use-is-admin";
import axios from "axios";
import { toast } from "sonner";
import { Button } from "../ui/button";
type EventMenuProps = {
event: Event;
@ -37,10 +34,9 @@ export default function EventMenu({
}: EventMenuProps) {
const apiHost = useApiHost();
const navigate = useNavigate();
const { t } = useTranslation(["views/explore", "views/replay"]);
const { t } = useTranslation("views/explore");
const [isOpen, setIsOpen] = useState(false);
const isAdmin = useIsAdmin();
const [isStarting, setIsStarting] = useState(false);
const handleObjectSelect = () => {
if (isSelected) {
@ -50,59 +46,6 @@ export default function EventMenu({
}
};
const handleDebugReplay = useCallback(
(event: Event) => {
setIsStarting(true);
axios
.post("debug_replay/start", {
camera: event.camera,
start_time: event.start_time,
end_time: event.end_time,
})
.then((response) => {
if (response.status === 200) {
toast.success(t("dialog.toast.success", { ns: "views/replay" }), {
position: "top-center",
});
navigate("/replay");
}
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
if (error.response?.status === 409) {
toast.error(
t("dialog.toast.alreadyActive", { ns: "views/replay" }),
{
position: "top-center",
closeButton: true,
dismissible: false,
action: (
<a href="/replay" target="_blank" rel="noopener noreferrer">
<Button>
{t("dialog.toast.goToReplay", { ns: "views/replay" })}
</Button>
</a>
),
},
);
} else {
toast.error(t("dialog.toast.error", { error: errorMessage }), {
position: "top-center",
});
}
})
.finally(() => {
setIsStarting(false);
});
},
[navigate, t],
);
return (
<>
<span tabIndex={0} className="sr-only" />
@ -174,19 +117,6 @@ export default function EventMenu({
{t("itemMenu.findSimilar.label")}
</DropdownMenuItem>
)}
{event.has_clip && (
<DropdownMenuItem
className="cursor-pointer"
disabled={isStarting}
onSelect={() => {
handleDebugReplay(event);
}}
>
{isStarting
? t("dialog.starting", { ns: "views/replay" })
: t("itemMenu.debugReplay.label")}
</DropdownMenuItem>
)}
</DropdownMenuContent>
</DropdownMenuPortal>
</DropdownMenu>

View File

@ -1,608 +0,0 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import useSWR from "swr";
import { WsFeedMessage } from "@/api/ws";
import { useWsMessageBuffer } from "@/hooks/use-ws-message-buffer";
import WsMessageRow from "./WsMessageRow";
import { Button } from "@/components/ui/button";
import { Badge } from "@/components/ui/badge";
import { FaEraser, FaFilter, FaPause, FaPlay, FaVideo } from "react-icons/fa";
import { FrigateConfig } from "@/types/frigateConfig";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { Drawer, DrawerContent, DrawerTrigger } from "@/components/ui/drawer";
import FilterSwitch from "@/components/filter/FilterSwitch";
import { isMobile } from "react-device-detect";
import { isReplayCamera } from "@/utils/cameraUtil";
type TopicCategory =
| "events"
| "camera_activity"
| "system"
| "reviews"
| "classification"
| "face_recognition"
| "lpr";
const ALL_TOPIC_CATEGORIES: TopicCategory[] = [
"events",
"reviews",
"classification",
"face_recognition",
"lpr",
"camera_activity",
"system",
];
const PRESET_TOPICS: Record<TopicCategory, Set<string>> = {
events: new Set(["events", "triggers"]),
reviews: new Set(["reviews"]),
classification: new Set(["tracked_object_update"]),
face_recognition: new Set(["tracked_object_update"]),
lpr: new Set(["tracked_object_update"]),
camera_activity: new Set(["camera_activity", "audio_detections"]),
system: new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]),
};
// Maps tracked_object_update payload type to TopicCategory
const TRACKED_UPDATE_TYPE_MAP: Record<string, TopicCategory> = {
classification: "classification",
face: "face_recognition",
lpr: "lpr",
};
// camera_activity preset also matches topics with camera prefix patterns
const CAMERA_ACTIVITY_TOPIC_PATTERNS = [
"/motion",
"/audio",
"/detect",
"/recordings",
"/enabled",
"/snapshots",
"/ptz",
];
function matchesCategories(
msg: WsFeedMessage,
categories: TopicCategory[] | undefined,
): boolean {
// undefined means all topics
if (!categories) return true;
const { topic, payload } = msg;
// Handle tracked_object_update with payload-based sub-categories
if (topic === "tracked_object_update") {
// payload might be a JSON string or a parsed object
let data: unknown = payload;
if (typeof data === "string") {
try {
data = JSON.parse(data);
} catch {
// not valid JSON, fall through
}
}
const updateType =
data && typeof data === "object" && "type" in data
? (data as { type: string }).type
: undefined;
if (updateType && updateType in TRACKED_UPDATE_TYPE_MAP) {
const mappedCategory = TRACKED_UPDATE_TYPE_MAP[updateType];
return categories.includes(mappedCategory);
}
// tracked_object_update with other types (e.g. "description") falls under "events"
return categories.includes("events");
}
for (const cat of categories) {
const topicSet = PRESET_TOPICS[cat];
if (topicSet.has(topic)) return true;
if (cat === "camera_activity") {
if (
CAMERA_ACTIVITY_TOPIC_PATTERNS.some((pattern) =>
topic.includes(pattern),
)
) {
return true;
}
}
}
return false;
}
type WsMessageFeedProps = {
maxSize?: number;
defaultCamera?: string;
lockedCamera?: string;
showCameraBadge?: boolean;
};
export default function WsMessageFeed({
maxSize = 500,
defaultCamera,
lockedCamera,
showCameraBadge = true,
}: WsMessageFeedProps) {
const { t } = useTranslation(["views/system"]);
const [paused, setPaused] = useState(false);
// undefined = all topics
const [selectedTopics, setSelectedTopics] = useState<
TopicCategory[] | undefined
>(undefined);
// undefined = all cameras
const [selectedCameras, setSelectedCameras] = useState<string[] | undefined>(
() => {
if (lockedCamera) return [lockedCamera];
if (defaultCamera) return [defaultCamera];
return undefined;
},
);
const { messages, clear } = useWsMessageBuffer(maxSize, paused, {
cameraFilter: selectedCameras,
});
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
const availableCameras = useMemo(() => {
if (!config?.cameras) return [];
return Object.keys(config.cameras)
.filter((name) => {
const cam = config.cameras[name];
return !isReplayCamera(name) && cam.enabled_in_config;
})
.sort();
}, [config]);
const filteredMessages = useMemo(() => {
return messages.filter((msg: WsFeedMessage) => {
if (!matchesCategories(msg, selectedTopics)) return false;
return true;
});
}, [messages, selectedTopics]);
// Auto-scroll logic
const scrollContainerRef = useRef<HTMLDivElement>(null);
const autoScrollRef = useRef(true);
const handleScroll = useCallback(() => {
const el = scrollContainerRef.current;
if (!el) return;
const atBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
autoScrollRef.current = atBottom;
}, []);
useEffect(() => {
const el = scrollContainerRef.current;
if (!el || !autoScrollRef.current) return;
el.scrollTop = el.scrollHeight;
}, [filteredMessages.length]);
return (
<div className="flex size-full flex-col">
{/* Toolbar */}
<div className="flex flex-row flex-wrap items-center justify-between gap-2 border-b border-secondary p-2">
<div className="flex flex-row flex-wrap items-center gap-1">
<TopicFilterButton
selectedTopics={selectedTopics}
updateTopicFilter={setSelectedTopics}
/>
{!lockedCamera && (
<WsCamerasFilterButton
allCameras={availableCameras}
selectedCameras={selectedCameras}
updateCameraFilter={setSelectedCameras}
/>
)}
</div>
<div className="flex flex-row items-center gap-2">
<Badge variant="secondary" className="text-xs text-primary-variant">
{t("logs.websocket.count", {
count: filteredMessages.length,
})}
</Badge>
<div className="flex items-center gap-1">
<Button
variant="outline"
size="sm"
className="h-7 gap-1 px-2 text-xs"
onClick={() => setPaused(!paused)}
aria-label={
paused ? t("logs.websocket.resume") : t("logs.websocket.pause")
}
>
{paused ? (
<FaPlay className="size-2.5" />
) : (
<FaPause className="size-2.5" />
)}
{paused ? t("logs.websocket.resume") : t("logs.websocket.pause")}
</Button>
<Button
variant="outline"
size="sm"
className="h-7 gap-1 px-2 text-xs"
onClick={clear}
aria-label={t("logs.websocket.clear")}
>
<FaEraser className="size-2.5" />
{t("logs.websocket.clear")}
</Button>
</div>
</div>
</div>
{/* Feed area */}
<div
ref={scrollContainerRef}
onScroll={handleScroll}
className="scrollbar-container flex-1 overflow-y-auto"
>
{filteredMessages.length === 0 ? (
<div className="flex size-full items-center justify-center p-8 text-sm text-muted-foreground">
{t("logs.websocket.empty")}
</div>
) : (
filteredMessages.map((msg: WsFeedMessage) => (
<WsMessageRow
key={msg.id}
message={msg}
showCameraBadge={showCameraBadge}
/>
))
)}
</div>
</div>
);
}
// Topic Filter Button
type TopicFilterButtonProps = {
selectedTopics: TopicCategory[] | undefined;
updateTopicFilter: (topics: TopicCategory[] | undefined) => void;
};
function TopicFilterButton({
selectedTopics,
updateTopicFilter,
}: TopicFilterButtonProps) {
const { t } = useTranslation(["views/system"]);
const [open, setOpen] = useState(false);
const [currentTopics, setCurrentTopics] = useState<
TopicCategory[] | undefined
>(selectedTopics);
useEffect(() => {
setCurrentTopics(selectedTopics);
}, [selectedTopics]);
const isFiltered = selectedTopics !== undefined;
const trigger = (
<Button
variant={isFiltered ? "select" : "outline"}
size="sm"
className="h-7 gap-1 px-2 text-xs"
aria-label={t("logs.websocket.filter.all")}
>
<FaFilter
className={`size-2.5 ${isFiltered ? "text-selected-foreground" : "text-secondary-foreground"}`}
/>
<span className={isFiltered ? "text-selected-foreground" : ""}>
{t("logs.websocket.filter.topics")}
</span>
</Button>
);
const content = (
<TopicFilterContent
currentTopics={currentTopics}
setCurrentTopics={setCurrentTopics}
onApply={() => {
updateTopicFilter(currentTopics);
setOpen(false);
}}
onReset={() => {
setCurrentTopics(undefined);
updateTopicFilter(undefined);
}}
/>
);
if (isMobile) {
return (
<Drawer
open={open}
onOpenChange={(open) => {
if (!open) setCurrentTopics(selectedTopics);
setOpen(open);
}}
>
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
<DrawerContent className="max-h-[75dvh] overflow-hidden">
{content}
</DrawerContent>
</Drawer>
);
}
return (
<DropdownMenu
modal={false}
open={open}
onOpenChange={(open) => {
if (!open) setCurrentTopics(selectedTopics);
setOpen(open);
}}
>
<DropdownMenuTrigger asChild>{trigger}</DropdownMenuTrigger>
<DropdownMenuContent>{content}</DropdownMenuContent>
</DropdownMenu>
);
}
type TopicFilterContentProps = {
currentTopics: TopicCategory[] | undefined;
setCurrentTopics: (topics: TopicCategory[] | undefined) => void;
onApply: () => void;
onReset: () => void;
};
function TopicFilterContent({
currentTopics,
setCurrentTopics,
onApply,
onReset,
}: TopicFilterContentProps) {
const { t } = useTranslation(["views/system", "common"]);
return (
<>
<div className="flex flex-col gap-2.5 p-4">
<FilterSwitch
isChecked={currentTopics === undefined}
label={t("logs.websocket.filter.all")}
onCheckedChange={(isChecked) => {
if (isChecked) {
setCurrentTopics(undefined);
}
}}
/>
<DropdownMenuSeparator />
{ALL_TOPIC_CATEGORIES.map((cat) => (
<FilterSwitch
key={cat}
isChecked={currentTopics?.includes(cat) ?? false}
label={t(`logs.websocket.filter.${cat}`)}
onCheckedChange={(isChecked) => {
if (isChecked) {
const updated = currentTopics ? [...currentTopics, cat] : [cat];
setCurrentTopics(updated);
} else {
const updated = currentTopics
? currentTopics.filter((c) => c !== cat)
: [];
if (updated.length === 0) {
setCurrentTopics(undefined);
} else {
setCurrentTopics(updated);
}
}
}}
/>
))}
</div>
<DropdownMenuSeparator />
<div className="flex items-center justify-evenly p-2">
<Button
aria-label={t("button.apply", { ns: "common" })}
variant="select"
size="sm"
onClick={onApply}
>
{t("button.apply", { ns: "common" })}
</Button>
<Button
aria-label={t("button.reset", { ns: "common" })}
size="sm"
onClick={onReset}
>
{t("button.reset", { ns: "common" })}
</Button>
</div>
</>
);
}
// Camera Filter Button
type WsCamerasFilterButtonProps = {
allCameras: string[];
selectedCameras: string[] | undefined;
updateCameraFilter: (cameras: string[] | undefined) => void;
};
function WsCamerasFilterButton({
allCameras,
selectedCameras,
updateCameraFilter,
}: WsCamerasFilterButtonProps) {
const { t } = useTranslation(["views/system", "common"]);
const [open, setOpen] = useState(false);
const [currentCameras, setCurrentCameras] = useState<string[] | undefined>(
selectedCameras,
);
useEffect(() => {
setCurrentCameras(selectedCameras);
}, [selectedCameras]);
const isFiltered = selectedCameras !== undefined;
const trigger = (
<Button
variant={isFiltered ? "select" : "outline"}
size="sm"
className="h-7 gap-1 px-2 text-xs"
aria-label={t("logs.websocket.filter.all_cameras")}
>
<FaVideo
className={`size-2.5 ${isFiltered ? "text-selected-foreground" : "text-secondary-foreground"}`}
/>
<span className={isFiltered ? "text-selected-foreground" : ""}>
{!selectedCameras
? t("logs.websocket.filter.all_cameras")
: t("logs.websocket.filter.cameras_count", {
count: selectedCameras.length,
})}
</span>
</Button>
);
const content = (
<WsCamerasFilterContent
allCameras={allCameras}
currentCameras={currentCameras}
setCurrentCameras={setCurrentCameras}
onApply={() => {
updateCameraFilter(currentCameras);
setOpen(false);
}}
onReset={() => {
setCurrentCameras(undefined);
updateCameraFilter(undefined);
}}
/>
);
if (isMobile) {
return (
<Drawer
open={open}
onOpenChange={(open) => {
if (!open) setCurrentCameras(selectedCameras);
setOpen(open);
}}
>
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
<DrawerContent className="max-h-[75dvh] overflow-hidden">
{content}
</DrawerContent>
</Drawer>
);
}
return (
<DropdownMenu
modal={false}
open={open}
onOpenChange={(open) => {
if (!open) setCurrentCameras(selectedCameras);
setOpen(open);
}}
>
<DropdownMenuTrigger asChild>{trigger}</DropdownMenuTrigger>
<DropdownMenuContent>{content}</DropdownMenuContent>
</DropdownMenu>
);
}
type WsCamerasFilterContentProps = {
allCameras: string[];
currentCameras: string[] | undefined;
setCurrentCameras: (cameras: string[] | undefined) => void;
onApply: () => void;
onReset: () => void;
};
function WsCamerasFilterContent({
allCameras,
currentCameras,
setCurrentCameras,
onApply,
onReset,
}: WsCamerasFilterContentProps) {
const { t } = useTranslation(["views/system", "common"]);
return (
<>
<div className="scrollbar-container flex max-h-[60dvh] flex-col gap-2.5 overflow-y-auto p-4">
<FilterSwitch
isChecked={currentCameras === undefined}
label={t("logs.websocket.filter.all_cameras")}
onCheckedChange={(isChecked) => {
if (isChecked) {
setCurrentCameras(undefined);
}
}}
/>
<DropdownMenuSeparator />
{allCameras.map((cam) => (
<FilterSwitch
key={cam}
isChecked={currentCameras?.includes(cam) ?? false}
label={cam}
type="camera"
onCheckedChange={(isChecked) => {
if (isChecked) {
const updated = currentCameras ? [...currentCameras] : [];
if (!updated.includes(cam)) {
updated.push(cam);
}
setCurrentCameras(updated);
} else {
const updated = currentCameras ? [...currentCameras] : [];
if (updated.length > 1) {
updated.splice(updated.indexOf(cam), 1);
setCurrentCameras(updated);
}
}
}}
/>
))}
</div>
<DropdownMenuSeparator />
<div className="flex items-center justify-evenly p-2">
<Button
aria-label={t("button.apply", { ns: "common" })}
variant="select"
size="sm"
disabled={currentCameras?.length === 0}
onClick={onApply}
>
{t("button.apply", { ns: "common" })}
</Button>
<Button
aria-label={t("button.reset", { ns: "common" })}
size="sm"
onClick={onReset}
>
{t("button.reset", { ns: "common" })}
</Button>
</div>
</>
);
}

View File

@ -1,433 +0,0 @@
import { memo, useCallback, useState } from "react";
import { WsFeedMessage } from "@/api/ws";
import { cn } from "@/lib/utils";
import { ChevronRight } from "lucide-react";
import { useTranslation } from "react-i18next";
import { extractCameraName } from "@/utils/wsUtil";
import { getIconForLabel } from "@/utils/iconUtil";
import { LuCheck, LuCopy } from "react-icons/lu";
type TopicCategory = "events" | "camera_activity" | "system" | "other";
const TOPIC_CATEGORY_COLORS: Record<TopicCategory, string> = {
events: "bg-blue-500/20 text-blue-700 dark:text-blue-300 border-blue-500/30",
camera_activity:
"bg-green-500/20 text-green-700 dark:text-green-300 border-green-500/30",
system:
"bg-purple-500/20 text-purple-700 dark:text-purple-300 border-purple-500/30",
other: "bg-gray-500/20 text-gray-700 dark:text-gray-300 border-gray-500/30",
};
const EVENT_TYPE_COLORS: Record<string, string> = {
start:
"bg-green-500/20 text-green-700 dark:text-green-300 border-green-500/30",
update: "bg-cyan-500/20 text-cyan-700 dark:text-cyan-300 border-cyan-500/30",
end: "bg-red-500/20 text-red-700 dark:text-red-300 border-red-500/30",
};
const TRACKED_OBJECT_UPDATE_COLORS: Record<string, string> = {
description:
"bg-amber-500/20 text-amber-700 dark:text-amber-300 border-amber-500/30",
face: "bg-pink-500/20 text-pink-700 dark:text-pink-300 border-pink-500/30",
lpr: "bg-yellow-500/20 text-yellow-700 dark:text-yellow-300 border-yellow-500/30",
classification:
"bg-violet-500/20 text-violet-700 dark:text-violet-300 border-violet-500/30",
};
function getEventTypeColor(eventType: string): string {
return (
EVENT_TYPE_COLORS[eventType] ||
"bg-orange-500/20 text-orange-700 dark:text-orange-300 border-orange-500/30"
);
}
function getTrackedObjectTypeColor(objectType: string): string {
return (
TRACKED_OBJECT_UPDATE_COLORS[objectType] ||
"bg-orange-500/20 text-orange-700 dark:text-orange-300 border-orange-500/30"
);
}
const EVENT_TOPICS = new Set([
"events",
"reviews",
"tracked_object_update",
"triggers",
]);
const SYSTEM_TOPICS = new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]);
function getTopicCategory(topic: string): TopicCategory {
if (EVENT_TOPICS.has(topic)) return "events";
if (SYSTEM_TOPICS.has(topic)) return "system";
if (
topic === "camera_activity" ||
topic === "audio_detections" ||
topic.includes("/motion") ||
topic.includes("/audio") ||
topic.includes("/detect") ||
topic.includes("/recordings") ||
topic.includes("/enabled") ||
topic.includes("/snapshots") ||
topic.includes("/ptz")
) {
return "camera_activity";
}
return "other";
}
function formatTimestamp(ts: number): string {
const d = new Date(ts);
const hh = String(d.getHours()).padStart(2, "0");
const mm = String(d.getMinutes()).padStart(2, "0");
const ss = String(d.getSeconds()).padStart(2, "0");
const ms = String(d.getMilliseconds()).padStart(3, "0");
return `${hh}:${mm}:${ss}.${ms}`;
}
function getPayloadSummary(
topic: string,
payload: unknown,
hideType: boolean = false,
): string {
if (payload === null || payload === undefined) return "";
try {
const data = typeof payload === "string" ? JSON.parse(payload) : payload;
if (typeof data === "object" && data !== null) {
// Topic-specific summary handlers
if (topic === "tracked_object_update") {
return getTrackedObjectUpdateSummary(data);
}
if ("type" in data && "label" in (data.after || data)) {
const after = data.after || data;
const parts: string[] = [];
if (!hideType) {
parts.push(`type: ${data.type}`);
}
parts.push(`label: ${after.label || "?"}`);
// Add sub_label for events topic if present
if (topic === "events" && after.sub_label) {
parts.push(`sub_label: ${after.sub_label}`);
}
return parts.join(", ");
}
if ("type" in data && "camera" in data) {
if (hideType) {
return `camera: ${data.camera}`;
}
return `type: ${data.type}, camera: ${data.camera}`;
}
const keys = Object.keys(data);
if (keys.length <= 3) {
return keys
.map((k) => {
const v = data[k];
if (typeof v === "string" || typeof v === "number") {
return `${k}: ${v}`;
}
return k;
})
.join(", ");
}
return `{${keys.length} keys}`;
}
const str = String(data);
return str.length > 80 ? str.slice(0, 80) + "…" : str;
} catch {
const str = String(payload);
return str.length > 80 ? str.slice(0, 80) + "…" : str;
}
}
function getTrackedObjectUpdateSummary(data: unknown): string {
if (typeof data !== "object" || data === null) return "";
const obj = data as Record<string, unknown>;
const type = obj.type as string;
switch (type) {
case "description":
return obj.description ? `${obj.description}` : "no description";
case "face": {
const name = obj.name as string | undefined;
return name || "unknown";
}
case "lpr": {
const name = obj.name as string | undefined;
const plate = obj.plate as string | undefined;
return name || plate || "unknown";
}
case "classification": {
const parts: string[] = [];
const model = obj.model as string | undefined;
const subLabel = obj.sub_label as string | undefined;
const attribute = obj.attribute as string | undefined;
if (model) parts.push(`model: ${model}`);
if (subLabel) parts.push(`sub_label: ${subLabel}`);
if (attribute) parts.push(`attribute: ${attribute}`);
return parts.length > 0 ? parts.join(", ") : "classification";
}
default:
return type || "unknown";
}
}
function extractTypeForBadge(payload: unknown): string | null {
if (payload === null || payload === undefined) return null;
try {
const data = typeof payload === "string" ? JSON.parse(payload) : payload;
if (typeof data === "object" && data !== null && "type" in data) {
return data.type as string;
}
} catch {
// ignore
}
return null;
}
function shouldShowTypeBadge(type: string | null): boolean {
if (!type) return false;
return true;
}
function shouldShowSummary(topic: string): boolean {
// Hide summary for reviews topic
return topic !== "reviews";
}
function escapeHtml(s: string): string {
return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
function highlightJson(value: unknown): string {
// Try to auto-parse JSON strings
if (typeof value === "string") {
try {
const parsed = JSON.parse(value);
if (typeof parsed === "object" && parsed !== null) {
value = parsed;
}
} catch {
// not JSON
}
}
const raw = JSON.stringify(value, null, 2) ?? String(value);
// Single regex pass to colorize JSON tokens
return raw.replace(
/("(?:[^"\\]|\\.)*")\s*:|("(?:[^"\\]|\\.)*")|(true|false|null)|(-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?)/g,
(match, key: string, str: string, keyword: string, num: string) => {
if (key) {
return `<span class="text-indigo-400">${escapeHtml(key)}</span>:`;
}
if (str) {
const content = escapeHtml(str);
return `<span class="text-green-500">${content}</span>`;
}
if (keyword) {
return `<span class="text-orange-500">${keyword}</span>`;
}
if (num) {
return `<span class="text-cyan-500">${num}</span>`;
}
return match;
},
);
}
function CopyJsonButton({ payload }: { payload: unknown }) {
const [copied, setCopied] = useState(false);
const handleCopy = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation();
const text =
typeof payload === "string"
? payload
: JSON.stringify(payload, null, 2);
navigator.clipboard.writeText(text).then(() => {
setCopied(true);
setTimeout(() => setCopied(false), 2000);
});
},
[payload],
);
return (
<button
onClick={handleCopy}
className="rounded p-1 text-muted-foreground transition-colors hover:bg-secondary hover:text-foreground"
aria-label="Copy JSON"
>
{copied ? (
<LuCheck className="size-3.5 text-green-500" />
) : (
<LuCopy className="size-3.5" />
)}
</button>
);
}
type WsMessageRowProps = {
message: WsFeedMessage;
showCameraBadge?: boolean;
};
const WsMessageRow = memo(function WsMessageRow({
message,
showCameraBadge = true,
}: WsMessageRowProps) {
const { t } = useTranslation(["views/system"]);
const [expanded, setExpanded] = useState(false);
const category = getTopicCategory(message.topic);
const cameraName = extractCameraName(message);
const messageType = extractTypeForBadge(message.payload);
const showTypeBadge = shouldShowTypeBadge(messageType);
const summary = getPayloadSummary(message.topic, message.payload);
const eventLabel = (() => {
try {
const data =
typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
if (typeof data === "object" && data !== null) {
return (data.after?.label as string) || (data.label as string) || null;
}
} catch {
// ignore
}
return null;
})();
const parsedPayload = (() => {
try {
return typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
} catch {
return message.payload;
}
})();
const handleToggle = useCallback(() => {
setExpanded((prev) => !prev);
}, []);
// Determine which color function to use based on topic
const getTypeBadgeColor = (type: string | null) => {
if (!type) return "";
if (message.topic === "tracked_object_update") {
return getTrackedObjectTypeColor(type);
}
return getEventTypeColor(type);
};
return (
<div className="border-b border-secondary/50">
<div
className={cn(
"flex cursor-pointer items-center gap-2 px-2 py-1.5 transition-colors hover:bg-muted/50",
expanded && "bg-muted/30",
)}
onClick={handleToggle}
>
<ChevronRight
className={cn(
"size-3.5 shrink-0 text-muted-foreground transition-transform",
expanded && "rotate-90",
)}
/>
<span className="shrink-0 font-mono text-xs text-muted-foreground">
{formatTimestamp(message.timestamp)}
</span>
<span
className={cn(
"shrink-0 rounded border px-1.5 py-0.5 font-mono text-xs",
TOPIC_CATEGORY_COLORS[category],
)}
>
{message.topic}
</span>
{showTypeBadge && messageType && (
<span
className={cn(
"shrink-0 rounded border px-1.5 py-0.5 text-xs",
getTypeBadgeColor(messageType),
)}
>
{messageType}
</span>
)}
{showCameraBadge && cameraName && (
<span className="shrink-0 rounded bg-secondary px-1.5 py-0.5 text-xs text-secondary-foreground">
{cameraName}
</span>
)}
{eventLabel && (
<span className="shrink-0">
{getIconForLabel(
eventLabel,
"object",
"size-3.5 text-primary-variant",
)}
</span>
)}
{shouldShowSummary(message.topic) && (
<span className="min-w-0 truncate text-xs text-muted-foreground">
{summary}
</span>
)}
</div>
{expanded && (
<div className="border-t border-secondary/30 bg-background_alt/50 px-4 py-2">
<div className="mb-1 flex items-center justify-between">
<span className="text-xs font-medium uppercase tracking-wider text-muted-foreground">
{t("logs.websocket.expanded.payload")}
</span>
<CopyJsonButton payload={parsedPayload} />
</div>
<pre
className="scrollbar-container max-h-[60vh] overflow-auto rounded bg-background p-2 font-mono text-[11px] leading-relaxed"
dangerouslySetInnerHTML={{ __html: highlightJson(parsedPayload) }}
/>
</div>
)}
</div>
);
});
export default WsMessageRow;

View File

@ -2,7 +2,6 @@ import { useContext } from "react";
import { AuthContext } from "@/context/auth-context";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { isReplayCamera } from "@/utils/cameraUtil";
export function useAllowedCameras() {
const { auth } = useContext(AuthContext);
@ -15,11 +14,9 @@ export function useAllowedCameras() {
auth.user?.role === "admin" ||
!auth.isAuthenticated // anonymous internal port
) {
// return all cameras, excluding replay cameras
return config?.cameras
? Object.keys(config.cameras).filter((name) => !isReplayCamera(name))
: [];
// return all cameras
return config?.cameras ? Object.keys(config.cameras) : [];
}
return (auth.allowedCameras || []).filter((name) => !isReplayCamera(name));
return auth.allowedCameras || [];
}

View File

@ -26,7 +26,7 @@ type useCameraActivityReturn = {
};
export function useCameraActivity(
camera: CameraConfig | undefined,
camera: CameraConfig,
revalidateOnFocus: boolean = true,
): useCameraActivityReturn {
const { data: config } = useSWR<FrigateConfig>("config", {
@ -47,7 +47,7 @@ export function useCameraActivity(
// init camera activity
const { payload: updatedCameraState } = useInitialCameraState(
camera?.name ?? "",
camera.name,
revalidateOnFocus,
);
useEffect(() => {
@ -60,7 +60,7 @@ export function useCameraActivity(
const memoizedAudioState = useDeepMemo(updatedAudioState);
useEffect(() => {
if (memoizedAudioState && camera?.name) {
if (memoizedAudioState) {
setAudioDetections(memoizedAudioState[camera.name]);
}
}, [memoizedAudioState, camera]);
@ -72,8 +72,8 @@ export function useCameraActivity(
[objects],
);
const { payload: cameraEnabled } = useEnabledState(camera?.name ?? "");
const { payload: detectingMotion } = useMotionActivity(camera?.name ?? "");
const { payload: cameraEnabled } = useEnabledState(camera.name);
const { payload: detectingMotion } = useMotionActivity(camera.name);
const { payload: event } = useFrigateEvents();
const updatedEvent = useDeepMemo(event);
@ -91,7 +91,7 @@ export function useCameraActivity(
return;
}
if (!camera?.name || updatedEvent.after.camera !== camera.name) {
if (updatedEvent.after.camera !== camera.name) {
return;
}
@ -158,10 +158,6 @@ export function useCameraActivity(
return false;
}
if (!camera?.name) {
return false;
}
return (
cameras[camera.name]?.camera_fps == 0 && stats["service"].uptime > 60
);

View File

@ -9,7 +9,6 @@ import { useMemo } from "react";
import useSWR from "swr";
import useDeepMemo from "./use-deep-memo";
import { capitalizeAll, capitalizeFirstLetter } from "@/utils/stringUtil";
import { isReplayCamera } from "@/utils/cameraUtil";
import { useFrigateStats } from "@/api/ws";
import { useTranslation } from "react-i18next";
@ -17,9 +16,6 @@ import { useTranslation } from "react-i18next";
export default function useStats(stats: FrigateStats | undefined) {
const { t } = useTranslation(["views/system"]);
const { data: config } = useSWR<FrigateConfig>("config");
const { data: debugReplayStatus } = useSWR("debug_replay/status", {
revalidateOnFocus: false,
});
const memoizedStats = useDeepMemo(stats);
@ -78,11 +74,6 @@ export default function useStats(stats: FrigateStats | undefined) {
return;
}
// Skip replay cameras
if (isReplayCamera(name)) {
return;
}
const cameraName = config.cameras?.[name]?.friendly_name ?? name;
if (config.cameras[name].enabled && cam["camera_fps"] == 0) {
problems.push({
@ -105,15 +96,7 @@ export default function useStats(stats: FrigateStats | undefined) {
);
const cameraName = config?.cameras?.[name]?.friendly_name ?? name;
// Skip ffmpeg warnings for replay cameras when debug replay is active
if (
!isNaN(ffmpegAvg) &&
ffmpegAvg >= CameraFfmpegThreshold.error &&
!(
debugReplayStatus?.active && debugReplayStatus?.replay_camera === name
)
) {
if (!isNaN(ffmpegAvg) && ffmpegAvg >= CameraFfmpegThreshold.error) {
problems.push({
text: t("stats.ffmpegHighCpuUsage", {
camera: capitalizeFirstLetter(capitalizeAll(cameraName)),
@ -136,19 +119,8 @@ export default function useStats(stats: FrigateStats | undefined) {
}
});
// Add message if debug replay is active
if (debugReplayStatus?.active) {
problems.push({
text: t("stats.debugReplayActive", {
defaultValue: "Debug replay session is active",
}),
color: "text-selected",
relevantLink: "/replay",
});
}
return problems;
}, [config, memoizedStats, t, debugReplayStatus]);
}, [config, memoizedStats, t]);
return { potentialProblems };
}

View File

@ -1,99 +0,0 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useWsMessageSubscribe, WsFeedMessage } from "@/api/ws";
import { extractCameraName } from "@/utils/wsUtil";
type UseWsMessageBufferReturn = {
messages: WsFeedMessage[];
clear: () => void;
};
type MessageFilter = {
cameraFilter?: string | string[]; // "all", specific camera name, or array of camera names (undefined in array = all)
};
export function useWsMessageBuffer(
maxSize: number = 2000,
paused: boolean = false,
filter?: MessageFilter,
): UseWsMessageBufferReturn {
const bufferRef = useRef<WsFeedMessage[]>([]);
const [version, setVersion] = useState(0);
const pausedRef = useRef(paused);
const filterRef = useRef(filter);
pausedRef.current = paused;
filterRef.current = filter;
const batchTimerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const dirtyRef = useRef(false);
useEffect(() => {
batchTimerRef.current = setInterval(() => {
if (dirtyRef.current) {
dirtyRef.current = false;
setVersion((v) => v + 1);
}
}, 200);
return () => {
if (batchTimerRef.current) {
clearInterval(batchTimerRef.current);
}
};
}, []);
const shouldIncludeMessage = useCallback((msg: WsFeedMessage): boolean => {
const currentFilter = filterRef.current;
if (!currentFilter) return true;
// Check camera filter
const cf = currentFilter.cameraFilter;
if (cf !== undefined) {
if (Array.isArray(cf)) {
// Array of cameras: include messages matching any camera in the list
const msgCamera = extractCameraName(msg);
if (msgCamera && !cf.includes(msgCamera)) {
return false;
}
} else if (cf !== "all") {
// Single string camera filter
const msgCamera = extractCameraName(msg);
if (msgCamera !== cf) {
return false;
}
}
}
return true;
}, []);
useWsMessageSubscribe(
useCallback(
(msg: WsFeedMessage) => {
if (pausedRef.current) return;
if (!shouldIncludeMessage(msg)) return;
const buf = bufferRef.current;
buf.push(msg);
if (buf.length > maxSize) {
buf.splice(0, buf.length - maxSize);
}
dirtyRef.current = true;
},
[shouldIncludeMessage, maxSize],
),
);
const clear = useCallback(() => {
bufferRef.current = [];
setVersion((v) => v + 1);
}, []);
// version is used to trigger re-renders; we spread the buffer
// into a new array so that downstream useMemo dependencies
// see a new reference and recompute.
// eslint-disable-next-line react-hooks/exhaustive-deps
const messages = useMemo(() => [...bufferRef.current], [version]);
return { messages, clear };
}

View File

@ -642,4 +642,4 @@ function CaseAssignmentDialog({
);
}
export default Exports;
export default Exports;

View File

@ -35,12 +35,10 @@ import { isIOS, isMobile } from "react-device-detect";
import { isPWA } from "@/utils/isPWA";
import { isInIframe } from "@/utils/isIFrame";
import { useTranslation } from "react-i18next";
import WsMessageFeed from "@/components/ws/WsMessageFeed";
function Logs() {
const { t } = useTranslation(["views/system"]);
const [logService, setLogService] = useState<LogType>("frigate");
const isWebsocket = logService === "websocket";
const tabsRef = useRef<HTMLDivElement | null>(null);
const lazyLogWrapperRef = useRef<HTMLDivElement>(null);
const [logs, setLogs] = useState<string[]>([]);
@ -218,12 +216,6 @@ function Logs() {
}, [logService, filterSeverity, t]);
useEffect(() => {
if (isWebsocket) {
setIsLoading(false);
setLogs([]);
return;
}
setIsLoading(true);
setLogs([]);
lastFetchedIndexRef.current = -1;
@ -502,128 +494,116 @@ function Logs() {
data-nav-item={item}
aria-label={`Select ${item}`}
>
<div
className={item !== "websocket" ? "smart-capitalize" : ""}
>
{item === "websocket" ? t("logs.websocket.label") : item}
</div>
<div className="smart-capitalize">{item}</div>
</ToggleGroupItem>
))}
</ToggleGroup>
<ScrollBar orientation="horizontal" className="h-0" />
</div>
</ScrollArea>
{!isWebsocket && (
<div className="flex items-center gap-2">
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.copy.label")}
size="sm"
onClick={handleCopyLogs}
>
<FaCopy className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("logs.copy.label")}
</div>
</Button>
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.download.label")}
size="sm"
onClick={handleDownloadLogs}
>
<FaDownload className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("button.download", { ns: "common" })}
</div>
</Button>
<LogSettingsButton
selectedLabels={filterSeverity}
updateLabelFilter={setFilterSeverity}
logSettings={logSettings}
setLogSettings={setLogSettings}
/>
</div>
)}
<div className="flex items-center gap-2">
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.copy.label")}
size="sm"
onClick={handleCopyLogs}
>
<FaCopy className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("logs.copy.label")}
</div>
</Button>
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.download.label")}
size="sm"
onClick={handleDownloadLogs}
>
<FaDownload className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("button.download", { ns: "common" })}
</div>
</Button>
<LogSettingsButton
selectedLabels={filterSeverity}
updateLabelFilter={setFilterSeverity}
logSettings={logSettings}
setLogSettings={setLogSettings}
/>
</div>
</div>
{isWebsocket ? (
<div className="my-2 flex size-full flex-col overflow-hidden rounded-md border border-secondary bg-background_alt">
<WsMessageFeed maxSize={2000} />
</div>
) : (
<div className="relative my-2 flex size-full flex-col overflow-hidden whitespace-pre-wrap rounded-md border border-secondary bg-background_alt font-mono text-xs sm:p-1">
<div className="grid grid-cols-5 *:px-0 *:py-3 *:text-sm *:text-primary/40 md:grid-cols-12">
<div className="col-span-3 lg:col-span-2">
<div className="flex w-full flex-row items-center">
<div className="ml-1 min-w-16 smart-capitalize lg:min-w-20">
{t("logs.type.label")}
</div>
<div className="mr-3">{t("logs.type.timestamp")}</div>
<div className="relative my-2 flex size-full flex-col overflow-hidden whitespace-pre-wrap rounded-md border border-secondary bg-background_alt font-mono text-xs sm:p-1">
<div className="grid grid-cols-5 *:px-0 *:py-3 *:text-sm *:text-primary/40 md:grid-cols-12">
<div className="col-span-3 lg:col-span-2">
<div className="flex w-full flex-row items-center">
<div className="ml-1 min-w-16 smart-capitalize lg:min-w-20">
{t("logs.type.label")}
</div>
</div>
<div
className={cn(
"flex items-center",
logService == "frigate" ? "col-span-2" : "col-span-1",
)}
>
{t("logs.type.tag")}
</div>
<div
className={cn(
"col-span-5 flex items-center",
logService == "frigate"
? "md:col-span-7 lg:col-span-8"
: "md:col-span-8 lg:col-span-9",
)}
>
<div className="flex flex-1">{t("logs.type.message")}</div>
<div className="mr-3">{t("logs.type.timestamp")}</div>
</div>
</div>
<div ref={lazyLogWrapperRef} className="size-full">
{isLoading ? (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
) : (
<EnhancedScrollFollow
startFollowing={!isLoading}
onCustomScroll={handleScroll}
render={({ follow, onScroll }) => (
<>
{follow && !logSettings.disableStreaming && (
<div className="absolute right-1 top-3">
<Tooltip>
<TooltipTrigger>
<MdCircle className="mr-2 size-2 animate-pulse cursor-default text-selected shadow-selected drop-shadow-md" />
</TooltipTrigger>
<TooltipContent>{t("logs.tips")}</TooltipContent>
</Tooltip>
</div>
)}
<LazyLog
ref={lazyLogRef}
enableLineNumbers={false}
selectableLines
lineClassName="text-primary bg-background"
highlightLineClassName="bg-primary/20"
onRowClick={handleRowClick}
formatPart={formatPart}
text={logs.join("\n")}
follow={follow}
onScroll={onScroll}
loadingComponent={
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
}
loading={isLoading}
/>
</>
)}
/>
<div
className={cn(
"flex items-center",
logService == "frigate" ? "col-span-2" : "col-span-1",
)}
>
{t("logs.type.tag")}
</div>
<div
className={cn(
"col-span-5 flex items-center",
logService == "frigate"
? "md:col-span-7 lg:col-span-8"
: "md:col-span-8 lg:col-span-9",
)}
>
<div className="flex flex-1">{t("logs.type.message")}</div>
</div>
</div>
)}
<div ref={lazyLogWrapperRef} className="size-full">
{isLoading ? (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
) : (
<EnhancedScrollFollow
startFollowing={!isLoading}
onCustomScroll={handleScroll}
render={({ follow, onScroll }) => (
<>
{follow && !logSettings.disableStreaming && (
<div className="absolute right-1 top-3">
<Tooltip>
<TooltipTrigger>
<MdCircle className="mr-2 size-2 animate-pulse cursor-default text-selected shadow-selected drop-shadow-md" />
</TooltipTrigger>
<TooltipContent>{t("logs.tips")}</TooltipContent>
</Tooltip>
</div>
)}
<LazyLog
ref={lazyLogRef}
enableLineNumbers={false}
selectableLines
lineClassName="text-primary bg-background"
highlightLineClassName="bg-primary/20"
onRowClick={handleRowClick}
formatPart={formatPart}
text={logs.join("\n")}
follow={follow}
onScroll={onScroll}
loadingComponent={
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
}
loading={isLoading}
/>
</>
)}
/>
)}
</div>
</div>
</div>
);
}

View File

@ -1,725 +0,0 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Link, useNavigate } from "react-router-dom";
import { Trans, useTranslation } from "react-i18next";
import useSWR from "swr";
import axios from "axios";
import { toast } from "sonner";
import AutoUpdatingCameraImage from "@/components/camera/AutoUpdatingCameraImage";
import { Button, buttonVariants } from "@/components/ui/button";
import { Skeleton } from "@/components/ui/skeleton";
import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch";
import ActivityIndicator from "@/components/indicators/activity-indicator";
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import {
Dialog,
DialogContent,
DialogHeader,
DialogTitle,
DialogDescription,
} from "@/components/ui/dialog";
import { useCameraActivity } from "@/hooks/use-camera-activity";
import { cn } from "@/lib/utils";
import Heading from "@/components/ui/heading";
import { Toaster } from "@/components/ui/sonner";
import { CameraConfig, FrigateConfig } from "@/types/frigateConfig";
import { getIconForLabel } from "@/utils/iconUtil";
import { getTranslatedLabel } from "@/utils/i18n";
import { ObjectType } from "@/types/ws";
import WsMessageFeed from "@/components/ws/WsMessageFeed";
import { ConfigSectionTemplate } from "@/components/config-form/sections/ConfigSectionTemplate";
import { LuExternalLink, LuInfo, LuSettings } from "react-icons/lu";
import { LuSquare } from "react-icons/lu";
import { MdReplay } from "react-icons/md";
import { isDesktop, isMobile } from "react-device-detect";
import Logo from "@/components/Logo";
import { Separator } from "@/components/ui/separator";
import { useDocDomain } from "@/hooks/use-doc-domain";
import DebugDrawingLayer from "@/components/overlay/DebugDrawingLayer";
import { IoMdArrowRoundBack } from "react-icons/io";
type DebugReplayStatus = {
active: boolean;
replay_camera: string | null;
source_camera: string | null;
start_time: number | null;
end_time: number | null;
live_ready: boolean;
};
type DebugOptions = {
bbox: boolean;
timestamp: boolean;
zones: boolean;
mask: boolean;
motion: boolean;
regions: boolean;
paths: boolean;
};
const DEFAULT_OPTIONS: DebugOptions = {
bbox: true,
timestamp: false,
zones: false,
mask: false,
motion: true,
regions: false,
paths: false,
};
const DEBUG_OPTION_KEYS: (keyof DebugOptions)[] = [
"bbox",
"timestamp",
"zones",
"mask",
"motion",
"regions",
"paths",
];
const DEBUG_OPTION_I18N_KEY: Record<keyof DebugOptions, string> = {
bbox: "boundingBoxes",
timestamp: "timestamp",
zones: "zones",
mask: "mask",
motion: "motion",
regions: "regions",
paths: "paths",
};
const REPLAY_INIT_SKELETON_TIMEOUT_MS = 8000;
export default function Replay() {
const { t } = useTranslation(["views/replay", "views/settings", "common"]);
const navigate = useNavigate();
const { getLocaleDocUrl } = useDocDomain();
const {
data: status,
mutate: refreshStatus,
isLoading,
} = useSWR<DebugReplayStatus>("debug_replay/status", {
refreshInterval: 1000,
});
const [isInitializing, setIsInitializing] = useState(true);
// Refresh status immediately on mount to avoid showing "no session" briefly
useEffect(() => {
const initializeStatus = async () => {
await refreshStatus();
setIsInitializing(false);
};
initializeStatus();
}, [refreshStatus]);
useEffect(() => {
if (status?.live_ready) {
setShowReplayInitSkeleton(false);
}
}, [status?.live_ready]);
const [options, setOptions] = useState<DebugOptions>(DEFAULT_OPTIONS);
const [isStopping, setIsStopping] = useState(false);
const [configDialogOpen, setConfigDialogOpen] = useState(false);
const searchParams = useMemo(() => {
const params = new URLSearchParams();
for (const key of DEBUG_OPTION_KEYS) {
params.set(key, options[key] ? "1" : "0");
}
return params;
}, [options]);
const handleSetOption = useCallback(
(key: keyof DebugOptions, value: boolean) => {
setOptions((prev) => ({ ...prev, [key]: value }));
},
[],
);
const handleStop = useCallback(() => {
setIsStopping(true);
axios
.post("debug_replay/stop")
.then(() => {
toast.success(t("dialog.toast.stopped"), {
position: "top-center",
});
refreshStatus();
navigate("/review");
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
toast.error(t("dialog.toast.stopError", { error: errorMessage }), {
position: "top-center",
});
})
.finally(() => {
setIsStopping(false);
});
}, [navigate, refreshStatus, t]);
// Camera activity for the replay camera
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
const replayCameraName = status?.replay_camera ?? "";
const replayCameraConfig = replayCameraName
? config?.cameras?.[replayCameraName]
: undefined;
const { objects } = useCameraActivity(replayCameraConfig);
const [showReplayInitSkeleton, setShowReplayInitSkeleton] = useState(false);
// debug draw
const containerRef = useRef<HTMLDivElement>(null);
const [debugDraw, setDebugDraw] = useState(false);
useEffect(() => {
if (!status?.active || !status.replay_camera) {
setShowReplayInitSkeleton(false);
return;
}
setShowReplayInitSkeleton(true);
const timeout = window.setTimeout(() => {
setShowReplayInitSkeleton(false);
}, REPLAY_INIT_SKELETON_TIMEOUT_MS);
return () => {
window.clearTimeout(timeout);
};
}, [status?.active, status?.replay_camera]);
useEffect(() => {
if (status?.live_ready) {
setShowReplayInitSkeleton(false);
}
}, [status?.live_ready]);
// Format time range for display
const timeRangeDisplay = useMemo(() => {
if (!status?.start_time || !status?.end_time) return "";
const start = new Date(status.start_time * 1000).toLocaleString();
const end = new Date(status.end_time * 1000).toLocaleString();
return `${start}${end}`;
}, [status]);
// Show loading state
if (isInitializing || (isLoading && !status?.active)) {
return (
<div className="flex size-full items-center justify-center">
<ActivityIndicator />
</div>
);
}
// No active session
if (!status?.active) {
return (
<div className="flex size-full flex-col items-center justify-center gap-4 p-8">
<MdReplay className="size-12" />
<Heading as="h2" className="text-center">
{t("page.noSession")}
</Heading>
<p className="max-w-md text-center text-muted-foreground">
{t("page.noSessionDesc")}
</p>
<Button variant="default" onClick={() => navigate("/review")}>
{t("page.goToRecordings")}
</Button>
</div>
);
}
return (
<div className="flex size-full flex-col overflow-hidden">
<Toaster position="top-center" closeButton={true} />
{/* Top bar */}
<div className="flex min-h-12 items-center justify-between border-b border-secondary px-2 py-2 md:min-h-16 md:px-3 md:py-3">
{isMobile && (
<Logo className="absolute inset-x-1/2 h-8 -translate-x-1/2" />
)}
<Button
className="flex items-center gap-2.5 rounded-lg"
aria-label={t("label.back", { ns: "common" })}
size="sm"
onClick={() => navigate(-1)}
>
<IoMdArrowRoundBack className="size-5 text-secondary-foreground" />
{isDesktop && (
<div className="text-primary">
{t("button.back", { ns: "common" })}
</div>
)}
</Button>
<div className="flex items-center gap-2">
<Button
variant="outline"
size="sm"
className="flex items-center gap-2"
onClick={() => setConfigDialogOpen(true)}
>
<LuSettings className="size-4" />
<span className="hidden md:inline">{t("page.configuration")}</span>
</Button>
<AlertDialog>
<AlertDialogTrigger asChild>
<Button
variant="destructive"
size="sm"
className="flex items-center gap-2 text-white"
disabled={isStopping}
>
{isStopping && <ActivityIndicator className="size-4" />}
<span className="hidden md:inline">{t("page.stopReplay")}</span>
<LuSquare className="size-4 md:hidden" />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>
{t("page.confirmStop.title")}
</AlertDialogTitle>
<AlertDialogDescription>
{t("page.confirmStop.description")}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>
{t("page.confirmStop.cancel")}
</AlertDialogCancel>
<AlertDialogAction
onClick={handleStop}
className={cn(
buttonVariants({ variant: "destructive" }),
"text-white",
)}
>
{t("page.confirmStop.confirm")}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
</div>
{/* Main content */}
<div className="flex flex-1 flex-col overflow-hidden pb-2 md:flex-row">
{/* Camera feed */}
<div className="flex max-h-[40%] px-2 pt-2 md:h-dvh md:max-h-full md:w-7/12 md:grow md:px-4 md:pt-2">
{isStopping ? (
<div className="flex size-full items-center justify-center rounded-lg bg-background_alt">
<div className="flex flex-col items-center justify-center gap-2">
<ActivityIndicator className="size-8" />
<div className="text-secondary-foreground">
{t("page.stoppingReplay")}
</div>
</div>
</div>
) : (
status.replay_camera && (
<div className="relative size-full min-h-10" ref={containerRef}>
<AutoUpdatingCameraImage
className="size-full"
cameraClasses="relative w-full h-full flex flex-col justify-start"
searchParams={searchParams}
camera={status.replay_camera}
showFps={false}
/>
{debugDraw && (
<DebugDrawingLayer
containerRef={containerRef}
cameraWidth={
config?.cameras?.[status.source_camera ?? ""]?.detect
.width ?? 1280
}
cameraHeight={
config?.cameras?.[status.source_camera ?? ""]?.detect
.height ?? 720
}
/>
)}
{showReplayInitSkeleton && (
<div className="pointer-events-none absolute inset-0 z-10 size-full rounded-lg bg-background">
<Skeleton className="size-full rounded-lg" />
<div className="absolute left-1/2 top-1/2 flex -translate-x-1/2 -translate-y-1/2 flex-col items-center justify-center gap-2">
<ActivityIndicator className="size-8" />
<div className="text-secondary-foreground">
{t("page.initializingReplay")}
</div>
</div>
</div>
)}
</div>
)
)}
</div>
{/* Side panel */}
<div className="scrollbar-container order-last mb-2 mt-2 flex h-full w-full flex-col overflow-y-auto rounded-lg border-[1px] border-secondary-foreground bg-background_alt p-2 md:order-none md:mb-0 md:mr-2 md:mt-0 md:w-4/12">
<div className="mb-5 flex flex-col space-y-2">
<Heading as="h3" className="mb-0">
{t("title")}
</Heading>
<div className="flex flex-wrap items-center gap-2 text-xs text-muted-foreground">
<span className="smart-capitalize">{status.source_camera}</span>
{timeRangeDisplay && (
<>
<span className="hidden md:inline"></span>
<span className="hidden md:inline">{timeRangeDisplay}</span>
</>
)}
</div>
<div className="mb-5 space-y-3 text-sm text-muted-foreground">
<p>{t("description")}</p>
</div>
</div>
<Tabs defaultValue="debug" className="flex h-full w-full flex-col">
<TabsList className="grid w-full grid-cols-3">
<TabsTrigger value="debug">
{t("debug.debugging", { ns: "views/settings" })}
</TabsTrigger>
<TabsTrigger value="objects">{t("page.objects")}</TabsTrigger>
<TabsTrigger value="messages">
{t("websocket_messages")}
</TabsTrigger>
</TabsList>
<TabsContent value="debug" className="mt-2">
<div className="mt-2 space-y-6">
<div className="my-2.5 flex flex-col gap-2.5">
{DEBUG_OPTION_KEYS.map((key) => {
const i18nKey = DEBUG_OPTION_I18N_KEY[key];
return (
<div
key={key}
className="flex w-full flex-row items-center justify-between"
>
<div className="mb-1 flex flex-col">
<div className="flex items-center gap-2">
<Label
className="mb-0 cursor-pointer text-primary smart-capitalize"
htmlFor={`debug-${key}`}
>
{t(`debug.${i18nKey}.title`, {
ns: "views/settings",
})}
</Label>
{(key === "bbox" ||
key === "motion" ||
key === "regions" ||
key === "paths") && (
<Popover>
<PopoverTrigger asChild>
<div className="cursor-pointer p-0">
<LuInfo className="size-4" />
<span className="sr-only">
{t("button.info", { ns: "common" })}
</span>
</div>
</PopoverTrigger>
<PopoverContent className="w-80 text-sm">
{key === "bbox" ? (
<>
<p className="mb-2">
<strong>
{t(
"debug.boundingBoxes.colors.label",
{
ns: "views/settings",
},
)}
</strong>
</p>
<ul className="list-disc space-y-1 pl-5">
<Trans ns="views/settings">
debug.boundingBoxes.colors.info
</Trans>
</ul>
</>
) : (
<Trans ns="views/settings">
{`debug.${i18nKey}.tips`}
</Trans>
)}
</PopoverContent>
</Popover>
)}
</div>
<div className="mt-1 text-xs text-muted-foreground">
{t(`debug.${i18nKey}.desc`, {
ns: "views/settings",
})}
</div>
</div>
<Switch
id={`debug-${key}`}
className="ml-1"
checked={options[key]}
onCheckedChange={(checked) =>
handleSetOption(key, checked)
}
/>
</div>
);
})}
{isDesktop && (
<>
<Separator className="my-2" />
<div className="flex w-full flex-row items-center justify-between">
<div className="mb-2 flex flex-col">
<div className="flex items-center gap-2">
<Label
className="mb-0 cursor-pointer text-primary smart-capitalize"
htmlFor="debugdraw"
>
{t("debug.objectShapeFilterDrawing.title", {
ns: "views/settings",
})}
</Label>
<Popover>
<PopoverTrigger asChild>
<div className="cursor-pointer p-0">
<LuInfo className="size-4" />
<span className="sr-only">
{t("button.info", { ns: "common" })}
</span>
</div>
</PopoverTrigger>
<PopoverContent className="w-80 text-sm">
{t("debug.objectShapeFilterDrawing.tips", {
ns: "views/settings",
})}
<div className="mt-2 flex items-center text-primary">
<Link
to={getLocaleDocUrl(
"configuration/object_filters#object-shape",
)}
target="_blank"
rel="noopener noreferrer"
className="inline"
>
{t("readTheDocumentation", {
ns: "common",
})}
<LuExternalLink className="ml-2 inline-flex size-3" />
</Link>
</div>
</PopoverContent>
</Popover>
</div>
<div className="mt-1 text-xs text-muted-foreground">
{t("debug.objectShapeFilterDrawing.desc", {
ns: "views/settings",
})}
</div>
</div>
<Switch
key={"draw"}
className="ml-1"
id="debug_draw"
checked={debugDraw}
onCheckedChange={(isChecked) => {
setDebugDraw(isChecked);
}}
/>
</div>
</>
)}
</div>
</div>
</TabsContent>
<TabsContent value="objects" className="mt-2">
<ObjectList
cameraConfig={replayCameraConfig}
objects={objects}
config={config}
/>
</TabsContent>
<TabsContent
value="messages"
className="mt-2 flex min-h-0 flex-1 flex-col"
>
<div className="flex h-full flex-col overflow-hidden rounded-md border border-secondary">
<WsMessageFeed
maxSize={2000}
lockedCamera={status.replay_camera ?? undefined}
showCameraBadge={false}
/>
</div>
</TabsContent>
</Tabs>
</div>
</div>
<Dialog open={configDialogOpen} onOpenChange={setConfigDialogOpen}>
<DialogContent className="scrollbar-container max-h-[90dvh] overflow-y-auto sm:max-w-xl md:max-w-3xl lg:max-w-4xl">
<DialogHeader>
<DialogTitle>{t("page.configuration")}</DialogTitle>
<DialogDescription className="mb-5">
{t("page.configurationDesc")}
</DialogDescription>
</DialogHeader>
<div className="space-y-6">
<ConfigSectionTemplate
sectionKey="motion"
level="replay"
cameraName={status.replay_camera ?? undefined}
skipSave
noStickyButtons
requiresRestart={false}
collapsible
defaultCollapsed={false}
showTitle
showOverrideIndicator={false}
/>
<ConfigSectionTemplate
sectionKey="objects"
level="replay"
cameraName={status.replay_camera ?? undefined}
skipSave
noStickyButtons
requiresRestart={false}
collapsible
defaultCollapsed={false}
showTitle
showOverrideIndicator={false}
/>
</div>
</DialogContent>
</Dialog>
</div>
);
}
type ObjectListProps = {
cameraConfig?: CameraConfig;
objects?: ObjectType[];
config?: FrigateConfig;
};
function ObjectList({ cameraConfig, objects, config }: ObjectListProps) {
const { t } = useTranslation(["views/settings"]);
const colormap = useMemo(() => {
if (!config) {
return;
}
return config.model?.colormap;
}, [config]);
const getColorForObjectName = useCallback(
(objectName: string) => {
return colormap && colormap[objectName]
? `rgb(${colormap[objectName][2]}, ${colormap[objectName][1]}, ${colormap[objectName][0]})`
: "rgb(128, 128, 128)";
},
[colormap],
);
if (!objects || objects.length === 0) {
return (
<div className="p-3 text-center text-sm text-muted-foreground">
{t("debug.noObjects", { ns: "views/settings" })}
</div>
);
}
return (
<div className="flex w-full flex-col gap-2">
{objects.map((obj: ObjectType) => {
return (
<div
key={obj.id}
className="flex flex-col rounded-lg bg-secondary/30 p-2"
>
<div className="flex flex-row items-center gap-3 pb-1">
<div
className="rounded-lg p-2"
style={{
backgroundColor: obj.stationary
? "rgb(110,110,110)"
: getColorForObjectName(obj.label),
}}
>
{getIconForLabel(obj.label, "object", "size-4 text-white")}
</div>
<div className="text-sm font-medium">
{getTranslatedLabel(obj.label)}
</div>
</div>
<div className="flex flex-col gap-1 pl-1 text-xs text-primary-variant">
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.score", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">
{obj.score ? (obj.score * 100).toFixed(1) : "-"}%
</span>
</div>
{obj.ratio && (
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.ratio", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">{obj.ratio.toFixed(2)}</span>
</div>
)}
{obj.area && cameraConfig && (
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.area", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">
{obj.area} px (
{(
(obj.area /
(cameraConfig.detect.width *
cameraConfig.detect.height)) *
100
).toFixed(2)}
%)
</span>
</div>
)}
</div>
</div>
);
})}
</div>
);
}

View File

@ -12,7 +12,7 @@ export type LogLine = {
content: string;
};
export const logTypes = ["frigate", "go2rtc", "nginx", "websocket"] as const;
export const logTypes = ["frigate", "go2rtc", "nginx"] as const;
export type LogType = (typeof logTypes)[number];
export type LogSettingsType = {

View File

@ -148,15 +148,3 @@ export function detectCameraAudioFeatures(
audioOutput: !!audioOutput,
};
}
const REPLAY_CAMERA_PREFIX = "_replay_";
/**
* Check if a camera name is a debug replay camera.
*
* @param name - The camera name to check
* @returns true if the camera is a replay camera
*/
export function isReplayCamera(name: string): boolean {
return name.startsWith(REPLAY_CAMERA_PREFIX);
}

View File

@ -514,18 +514,13 @@ const mergeSectionConfig = (
export function getSectionConfig(
sectionKey: string,
level: "global" | "camera" | "replay",
level: "global" | "camera",
): SectionConfig {
const entry = sectionConfigs[sectionKey];
if (!entry) {
return {};
}
const overrides =
level === "global"
? entry.global
: level === "replay"
? entry.replay
: entry.camera;
const overrides = level === "global" ? entry.global : entry.camera;
return mergeSectionConfig(entry.base, overrides);
}

View File

@ -1,53 +0,0 @@
import { WsFeedMessage } from "@/api/ws";
const EVENT_TOPICS = new Set([
"events",
"reviews",
"tracked_object_update",
"triggers",
]);
const SYSTEM_TOPICS = new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]);
export function extractCameraName(message: WsFeedMessage): string | null {
// Try extracting from topic pattern: {camera}/motion, {camera}/audio/rms, etc.
const topicParts = message.topic.split("/");
if (
topicParts.length >= 2 &&
!EVENT_TOPICS.has(message.topic) &&
!SYSTEM_TOPICS.has(message.topic) &&
message.topic !== "camera_activity" &&
message.topic !== "audio_detections" &&
message.topic !== "restart" &&
message.topic !== "notification_test"
) {
return topicParts[0];
}
// Try extracting from payload
try {
const data =
typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
if (typeof data === "object" && data !== null) {
if ("camera" in data) return data.camera as string;
if ("after" in data && data.after?.camera)
return data.after.camera as string;
if ("before" in data && data.before?.camera)
return data.before.camera as string;
}
} catch {
// ignore parse errors
}
return null;
}

View File

@ -1,8 +1,6 @@
import ReviewCard from "@/components/card/ReviewCard";
import ReviewFilterGroup from "@/components/filter/ReviewFilterGroup";
import DebugReplayDialog from "@/components/overlay/DebugReplayDialog";
import ExportDialog from "@/components/overlay/ExportDialog";
import ActionsDropdown from "@/components/overlay/ActionsDropdown";
import PreviewPlayer, {
PreviewController,
} from "@/components/player/PreviewPlayer";
@ -201,11 +199,6 @@ export function RecordingView({
const [exportRange, setExportRange] = useState<TimeRange>();
const [showExportPreview, setShowExportPreview] = useState(false);
// debug replay
const [debugReplayMode, setDebugReplayMode] = useState<ExportMode>("none");
const [debugReplayRange, setDebugReplayRange] = useState<TimeRange>();
// move to next clip
const onClipEnded = useCallback(() => {
@ -276,7 +269,7 @@ export function RecordingView({
);
useEffect(() => {
if (scrubbing || exportRange || debugReplayRange) {
if (scrubbing || exportRange) {
if (
currentTime > currentTimeRange.before + 60 ||
currentTime < currentTimeRange.after - 60
@ -598,23 +591,6 @@ export function RecordingView({
selected={mainCamera}
onSelectCamera={onSelectCamera}
/>
{isDesktop && (
<DebugReplayDialog
camera={mainCamera}
currentTime={currentTime}
latestTime={timeRange.before}
mode={debugReplayMode}
range={debugReplayRange}
setRange={(range: TimeRange | undefined) => {
setDebugReplayRange(range);
if (range != undefined) {
mainControllerRef.current?.pause();
}
}}
setMode={setDebugReplayMode}
/>
)}
{isDesktop && (
<ExportDialog
camera={mainCamera}
@ -663,28 +639,6 @@ export function RecordingView({
setMotionOnly={() => {}}
/>
)}
{isDesktop && (
<ActionsDropdown
onDebugReplayClick={() => {
const now = new Date(timeRange.before * 1000);
now.setHours(now.getHours() - 1);
setDebugReplayRange({
after: now.getTime() / 1000,
before: timeRange.before,
});
setDebugReplayMode("select");
}}
onExportClick={() => {
const now = new Date(timeRange.before * 1000);
now.setHours(now.getHours() - 1);
setExportRange({
before: timeRange.before,
after: now.getTime() / 1000,
});
setExportMode("select");
}}
/>
)}
{isDesktop ? (
<ToggleGroup
className="*:rounded-md *:px-3 *:py-4"
@ -734,16 +688,6 @@ export function RecordingView({
showExportPreview={showExportPreview}
allLabels={reviewFilterList.labels}
allZones={reviewFilterList.zones}
debugReplayMode={debugReplayMode}
debugReplayRange={debugReplayRange}
setDebugReplayMode={setDebugReplayMode}
setDebugReplayRange={(range: TimeRange | undefined) => {
setDebugReplayRange(range);
if (range != undefined) {
mainControllerRef.current?.pause();
}
}}
onUpdateFilter={updateFilter}
setRange={setExportRange}
setMode={setExportMode}
@ -814,9 +758,7 @@ export function RecordingView({
timeRange={currentTimeRange}
cameraPreviews={allPreviews ?? []}
startTimestamp={playbackStart}
hotKeys={
exportMode != "select" && debugReplayMode != "select"
}
hotKeys={exportMode != "select"}
fullscreen={fullscreen}
onTimestampUpdate={(timestamp) => {
setPlayerTime(timestamp);
@ -830,11 +772,7 @@ export function RecordingView({
onControllerReady={(controller) => {
mainControllerRef.current = controller;
}}
isScrubbing={
scrubbing ||
exportMode == "timeline" ||
debugReplayMode == "timeline"
}
isScrubbing={scrubbing || exportMode == "timeline"}
supportsFullscreen={supportsFullScreen}
setFullResolution={setFullResolution}
toggleFullscreen={toggleFullscreen}
@ -902,29 +840,18 @@ export function RecordingView({
contentRef={contentRef}
mainCamera={mainCamera}
timelineType={
(exportRange == undefined && debugReplayRange == undefined
? timelineType
: "timeline") ?? "timeline"
(exportRange == undefined ? timelineType : "timeline") ??
"timeline"
}
timeRange={timeRange}
mainCameraReviewItems={mainCameraReviewItems}
activeReviewItem={activeReviewItem}
currentTime={currentTime}
exportRange={
exportMode == "timeline"
? exportRange
: debugReplayMode == "timeline"
? debugReplayRange
: undefined
}
exportRange={exportMode == "timeline" ? exportRange : undefined}
setCurrentTime={setCurrentTime}
manuallySetCurrentTime={manuallySetCurrentTime}
setScrubbing={setScrubbing}
setExportRange={
debugReplayMode == "timeline"
? setDebugReplayRange
: setExportRange
}
setExportRange={setExportRange}
onAnalysisOpen={onAnalysisOpen}
isPlaying={mainControllerRef?.current?.isPlaying() ?? false}
/>