diff --git a/web/src/routes/Cameras.jsx b/web/src/routes/Cameras.jsx
index 2298b992e..18128f738 100644
--- a/web/src/routes/Cameras.jsx
+++ b/web/src/routes/Cameras.jsx
@@ -5,24 +5,41 @@ import CameraImage from '../components/CameraImage';
import AudioIcon from '../icons/Audio';
import ClipIcon from '../icons/Clip';
import MotionIcon from '../icons/Motion';
+import SettingsIcon from '../icons/Settings';
import SnapshotIcon from '../icons/Snapshot';
import { useAudioState, useDetectState, useRecordingsState, useSnapshotsState } from '../api/ws';
import { useMemo } from 'preact/hooks';
import useSWR from 'swr';
+import { useRef, useState } from 'react';
+import { useResizeObserver } from '../hooks';
+import Dialog from '../components/Dialog';
+import Switch from '../components/Switch';
+import Heading from '../components/Heading';
+import Button from '../components/Button';
export default function Cameras() {
const { data: config } = useSWR('config');
+ const containerRef = useRef(null);
+ const [{ width: containerWidth }] = useResizeObserver(containerRef);
+ // Add scrollbar width (when visible) to the available observer width to eliminate screen juddering.
+ // https://github.com/blakeblackshear/frigate/issues/1657
+ let scrollBarWidth = 0;
+ if (window.innerWidth && document.body.offsetWidth) {
+ scrollBarWidth = window.innerWidth - document.body.offsetWidth;
+ }
+ const availableWidth = scrollBarWidth ? containerWidth + scrollBarWidth : containerWidth;
+
return !config ? (
-
+
+
);
}
-function SortedCameras({ config, unsortedCameras }) {
+function SortedCameras({ config, unsortedCameras, availableWidth }) {
const sortedCameras = useMemo(
() =>
Object.entries(unsortedCameras)
@@ -34,17 +51,20 @@ function SortedCameras({ config, unsortedCameras }) {
return (
{sortedCameras.map(([camera, conf]) => (
-
+
))}
);
}
-function Camera({ name, config }) {
+function Camera({ name, config, availableWidth }) {
const { payload: detectValue, send: sendDetect } = useDetectState(name);
const { payload: recordValue, send: sendRecordings } = useRecordingsState(name);
const { payload: snapshotValue, send: sendSnapshots } = useSnapshotsState(name);
const { payload: audioValue, send: sendAudio } = useAudioState(name);
+
+ const [cameraOptions, setCameraOptions] = useState('');
+
const href = `/cameras/${name}`;
const buttons = useMemo(() => {
return [
@@ -56,7 +76,15 @@ function Camera({ name, config }) {
return `${name.replaceAll('_', ' ')}`;
}, [name]);
const icons = useMemo(
- () => [
+ () => (availableWidth < 448 ? [
+ {
+ icon: SettingsIcon,
+ color: 'gray',
+ onClick: () => {
+ setCameraOptions(config.name);
+ },
+ },
+ ] : [
{
name: `Toggle detect ${detectValue === 'ON' ? 'off' : 'on'}`,
icon: MotionIcon,
@@ -95,17 +123,64 @@ function Camera({ name, config }) {
},
}
: null,
- ].filter((button) => button != null),
- [config, audioValue, sendAudio, detectValue, sendDetect, recordValue, sendRecordings, snapshotValue, sendSnapshots]
+ ]).filter((button) => button != null),
+ [config, availableWidth, setCameraOptions, audioValue, sendAudio, detectValue, sendDetect, recordValue, sendRecordings, snapshotValue, sendSnapshots]
);
return (
-
}
- />
+
+ {cameraOptions && (
+
+ )}
+
+ }
+ />
+
);
}
diff --git a/web/src/routes/__tests__/Cameras.test.jsx b/web/src/routes/__tests__/Cameras.test.jsx
index 7dfaa8d53..faa3b2bc9 100644
--- a/web/src/routes/__tests__/Cameras.test.jsx
+++ b/web/src/routes/__tests__/Cameras.test.jsx
@@ -1,5 +1,6 @@
import { h } from 'preact';
import * as CameraImage from '../../components/CameraImage';
+import * as Hooks from '../../hooks';
import * as WS from '../../api/ws';
import Cameras from '../Cameras';
import { fireEvent, render, screen, waitForElementToBeRemoved } from 'testing-library';
@@ -8,6 +9,7 @@ describe('Cameras Route', () => {
beforeEach(() => {
vi.spyOn(CameraImage, 'default').mockImplementation(() =>
);
vi.spyOn(WS, 'useWs').mockImplementation(() => ({ value: { payload: 'OFF' }, send: vi.fn() }));
+ vi.spyOn(Hooks, 'useResizeObserver').mockImplementation(() => [{ width: 1000 }]);
});
test('shows an ActivityIndicator if not yet loaded', async () => {
diff --git a/web/src/routes/__tests__/Recording.test.jsx b/web/src/routes/__tests__/Recording.test.jsx
index 8dc33fdaf..2351eaf81 100644
--- a/web/src/routes/__tests__/Recording.test.jsx
+++ b/web/src/routes/__tests__/Recording.test.jsx
@@ -1,6 +1,7 @@
import { h } from 'preact';
import * as CameraImage from '../../components/CameraImage';
import * as WS from '../../api/ws';
+import * as Hooks from '../../hooks';
import Cameras from '../Cameras';
import { render, screen, waitForElementToBeRemoved } from 'testing-library';
@@ -8,6 +9,7 @@ describe('Recording Route', () => {
beforeEach(() => {
vi.spyOn(CameraImage, 'default').mockImplementation(() =>
);
vi.spyOn(WS, 'useWs').mockImplementation(() => ({ value: { payload: 'OFF' }, send: jest.fn() }));
+ vi.spyOn(Hooks, 'useResizeObserver').mockImplementation(() => [{ width: 1000 }]);
});
test('shows an ActivityIndicator if not yet loaded', async () => {
From a82334ca1cd57bf3517df43552a58947ffcf6209 Mon Sep 17 00:00:00 2001
From: tpjanssen <25168870+tpjanssen@users.noreply.github.com>
Date: Mon, 9 Oct 2023 15:52:26 +0200
Subject: [PATCH 04/18] API enhancements (#8107)
---
docs/docs/integrations/api.md | 2 ++
frigate/http.py | 8 ++++++++
2 files changed, 10 insertions(+)
diff --git a/docs/docs/integrations/api.md b/docs/docs/integrations/api.md
index 27b760cab..7ddc773c3 100644
--- a/docs/docs/integrations/api.md
+++ b/docs/docs/integrations/api.md
@@ -172,6 +172,8 @@ Events from the database. Accepts the following query string parameters:
| `min_score` | float | Minimum score of the event |
| `max_score` | float | Maximum score of the event |
| `is_submitted` | int | Filter events that are submitted to Frigate+ (0 or 1) |
+| `min_length` | float | Minimum length of the event |
+| `max_length` | float | Maximum length of the event |
### `GET /api/timeline`
diff --git a/frigate/http.py b/frigate/http.py
index 3098285cc..64ba51ea0 100644
--- a/frigate/http.py
+++ b/frigate/http.py
@@ -805,6 +805,8 @@ def events():
min_score = request.args.get("min_score", type=float)
max_score = request.args.get("max_score", type=float)
is_submitted = request.args.get("is_submitted", type=int)
+ min_length = request.args.get("min_length", type=float)
+ max_length = request.args.get("max_length", type=float)
clauses = []
@@ -933,6 +935,12 @@ def events():
if min_score is not None:
clauses.append((Event.data["score"] >= min_score))
+ if min_length is not None:
+ clauses.append(((Event.end_time - Event.start_time) >= min_length))
+
+ if max_length is not None:
+ clauses.append(((Event.end_time - Event.start_time) <= max_length))
+
if is_submitted is not None:
if is_submitted == 0:
clauses.append((Event.plus_id.is_null()))
From 68ebd5542587757c89195b679b89f1c572efd873 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Mon, 9 Oct 2023 17:15:23 -0600
Subject: [PATCH 05/18] Lower min scores for person and car in plus docs
(#8114)
---
docs/docs/plus/index.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/docs/plus/index.md b/docs/docs/plus/index.md
index ced79174a..7d92fb531 100644
--- a/docs/docs/plus/index.md
+++ b/docs/docs/plus/index.md
@@ -70,10 +70,10 @@ objects:
fedex:
min_score: .75
person:
- min_score: .8
+ min_score: .65
threshold: .85
car:
- min_score: .8
+ min_score: .65
threshold: .85
```
From 97e5a98b95d4e918f6e23c24ac1d006f06bf8824 Mon Sep 17 00:00:00 2001
From: Felipe Santos
Date: Mon, 9 Oct 2023 20:16:05 -0300
Subject: [PATCH 06/18] Fix jetson docker build with old apt key (#8112)
* Fix jetson docker build with old apt key
* Fix CI
---
docker/main/Dockerfile | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/docker/main/Dockerfile b/docker/main/Dockerfile
index e0a67d1f7..c7fd149fd 100644
--- a/docker/main/Dockerfile
+++ b/docker/main/Dockerfile
@@ -121,13 +121,15 @@ RUN apt-get -qq update \
apt-transport-https \
gnupg \
wget \
- && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 648ACFD622F3D138 \
- && echo "deb http://deb.debian.org/debian bullseye main contrib non-free" | tee /etc/apt/sources.list.d/raspi.list \
+ # the key fingerprint can be obtained from https://ftp-master.debian.org/keys.html
+ && wget -qO- "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xA4285295FC7B1A81600062A9605C66F00D6C9793" | \
+ gpg --dearmor > /usr/share/keyrings/debian-archive-bullseye-stable.gpg \
+ && echo "deb [signed-by=/usr/share/keyrings/debian-archive-bullseye-stable.gpg] http://deb.debian.org/debian bullseye main contrib non-free" | \
+ tee /etc/apt/sources.list.d/debian-bullseye-nonfree.list \
&& apt-get -qq update \
&& apt-get -qq install -y \
python3.9 \
python3.9-dev \
- wget \
# opencv dependencies
build-essential cmake git pkg-config libgtk-3-dev \
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
From d508088bd0237813630e4afff275363b3f38de18 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Mon, 9 Oct 2023 17:16:34 -0600
Subject: [PATCH 07/18] Add audio role to camera config docs (#8113)
---
docs/docs/configuration/cameras.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/docs/docs/configuration/cameras.md b/docs/docs/configuration/cameras.md
index 262811395..58bf2bdc6 100644
--- a/docs/docs/configuration/cameras.md
+++ b/docs/docs/configuration/cameras.md
@@ -13,8 +13,9 @@ Each role can only be assigned to one input per camera. The options for roles ar
| Role | Description |
| -------- | ---------------------------------------------------------------------------------------- |
-| `detect` | Main feed for object detection |
+| `detect` | Main feed for object detection. [docs](object_detectors.md) |
| `record` | Saves segments of the video feed based on configuration settings. [docs](record.md) |
+| `audio` | Feed for audio based detection. [docs](audio_detectors.md) |
| `rtmp` | Deprecated: Broadcast as an RTMP feed for other services to consume. [docs](restream.md) |
```yaml
From dcafcc13206c472cbacce5535003b6cc6f190e0a Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Tue, 10 Oct 2023 07:23:18 -0600
Subject: [PATCH 08/18] Fix Config success message (#8121)
* Fix saved message
* Fix save mask
---
web/src/routes/CameraMap.jsx | 2 +-
web/src/routes/Config.jsx | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/web/src/routes/CameraMap.jsx b/web/src/routes/CameraMap.jsx
index a508d80b2..6beaff2f5 100644
--- a/web/src/routes/CameraMap.jsx
+++ b/web/src/routes/CameraMap.jsx
@@ -135,7 +135,7 @@ export default function CameraMasks({ camera }) {
const endpoint = `config/set?${queryParameters}`;
const response = await axios.put(endpoint);
if (response.status === 200) {
- setSuccess(response.data);
+ setSuccess(response.data.message);
}
} catch (error) {
if (error.response) {
diff --git a/web/src/routes/Config.jsx b/web/src/routes/Config.jsx
index 33354b30b..21d2160a7 100644
--- a/web/src/routes/Config.jsx
+++ b/web/src/routes/Config.jsx
@@ -29,7 +29,7 @@ export default function Config() {
.then((response) => {
if (response.status === 200) {
setError('');
- setSuccess(response.data);
+ setSuccess(response.data.message);
}
})
.catch((error) => {
From 2379e6fd1be3be2ae23718b52e7451f7611498a6 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 05:14:24 -0600
Subject: [PATCH 09/18] Support TiB in storage calculation (#8142)
---
web/src/routes/Storage.jsx | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/web/src/routes/Storage.jsx b/web/src/routes/Storage.jsx
index 71fde1668..4e187ef86 100644
--- a/web/src/routes/Storage.jsx
+++ b/web/src/routes/Storage.jsx
@@ -27,8 +27,9 @@ export default function Storage() {
const getUnitSize = (MB) => {
if (isNaN(MB) || MB < 0) return 'Invalid number';
if (MB < 1024) return `${MB} MiB`;
+ if (MB < 1048576) return `${(MB / 1024).toFixed(2)} GiB`;
- return `${(MB / 1024).toFixed(2)} GiB`;
+ return `${(MB / 1048576).toFixed(2)} TiB`;
};
let storage_usage;
From 3869b274e2f70ac7f75176b5ce19613a2a7e7320 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 05:14:55 -0600
Subject: [PATCH 10/18] Add note about recording retention to manual event docs
(#8141)
---
docs/docs/integrations/api.md | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/docs/docs/integrations/api.md b/docs/docs/integrations/api.md
index 7ddc773c3..f080c0a36 100644
--- a/docs/docs/integrations/api.md
+++ b/docs/docs/integrations/api.md
@@ -322,6 +322,12 @@ Get PTZ info for the camera.
Create a manual event with a given `label` (ex: doorbell press) to capture a specific event besides an object being detected.
+:::caution
+
+Recording retention config still applies to manual events, if frigate is configured with `mode: motion` then the manual event will only keep recording segments when motion occurred.
+
+:::
+
**Optional Body:**
```json
From 869bb2b1777b0d214ee8fe261c6beb3bca89849f Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 05:15:39 -0600
Subject: [PATCH 11/18] clarifications and fixes for live go2rtc example
(#8132)
* clarifications and fixes for live go2rtc example
* fix
---
docs/docs/configuration/live.md | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/docs/docs/configuration/live.md b/docs/docs/configuration/live.md
index 9ffd4114b..d5ed1f033 100644
--- a/docs/docs/configuration/live.md
+++ b/docs/docs/configuration/live.md
@@ -37,12 +37,12 @@ There may be some cameras that you would prefer to use the sub stream for live v
```yaml
go2rtc:
streams:
- rtsp_cam:
+ test_cam:
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio.
- - "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
- rtsp_cam_sub:
+ - "ffmpeg:test_cam#audio=opus" # <- copy of the stream which transcodes audio to opus for webrtc
+ test_cam_sub:
- rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio.
- - "ffmpeg:rtsp_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus
+ - "ffmpeg:test_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus for webrtc
cameras:
test_cam:
@@ -59,7 +59,7 @@ cameras:
roles:
- detect
live:
- stream_name: rtsp_cam_sub
+ stream_name: test_cam_sub
```
### WebRTC extra configuration:
From e19c0668e760d78cc98e06010f896e038090a6e3 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 05:16:36 -0600
Subject: [PATCH 12/18] Require init delay (#8126)
---
frigate/track/norfair_tracker.py | 7 +------
1 file changed, 1 insertion(+), 6 deletions(-)
diff --git a/frigate/track/norfair_tracker.py b/frigate/track/norfair_tracker.py
index 42a2fde2f..ff63ba563 100644
--- a/frigate/track/norfair_tracker.py
+++ b/frigate/track/norfair_tracker.py
@@ -77,7 +77,7 @@ class NorfairTracker(ObjectTracker):
self.tracker = Tracker(
distance_function=frigate_distance,
distance_threshold=2.5,
- initialization_delay=0,
+ initialization_delay=config.detect.fps / 2,
hit_counter_max=self.max_disappeared,
)
if self.ptz_autotracker_enabled.value:
@@ -106,11 +106,6 @@ class NorfairTracker(ObjectTracker):
"ymax": self.detect_config.height,
}
- # start object with a hit count of `fps` to avoid quick detection -> loss
- next(
- (o for o in self.tracker.tracked_objects if o.global_id == track_id)
- ).hit_counter = self.camera_config.detect.fps
-
def deregister(self, id, track_id):
del self.tracked_objects[id]
del self.disappeared[id]
From e32bd4ab15360b00ed1f1e5c24588def1bd9c060 Mon Sep 17 00:00:00 2001
From: tpjanssen <25168870+tpjanssen@users.noreply.github.com>
Date: Fri, 13 Oct 2023 13:17:41 +0200
Subject: [PATCH 13/18] Added audio sensors to camera metrics and API stats
(#8109)
* Added audio sensor to camera metrics and API stats
* Update types.py
* Update app.py
---
frigate/app.py | 3 +++
frigate/events/audio.py | 9 ++++++++-
frigate/stats.py | 2 ++
frigate/types.py | 2 ++
4 files changed, 15 insertions(+), 1 deletion(-)
diff --git a/frigate/app.py b/frigate/app.py
index 3a5da0e80..a2e300526 100644
--- a/frigate/app.py
+++ b/frigate/app.py
@@ -163,6 +163,8 @@ class FrigateApp:
"frame_queue": mp.Queue(maxsize=2),
"capture_process": None,
"process": None,
+ "audio_rms": mp.Value("d", 0.0), # type: ignore[typeddict-item]
+ "audio_dBFS": mp.Value("d", 0.0), # type: ignore[typeddict-item]
}
self.ptz_metrics[camera_name] = {
"ptz_autotracker_enabled": mp.Value( # type: ignore[typeddict-item]
@@ -500,6 +502,7 @@ class FrigateApp:
args=(
self.config,
self.audio_recordings_info_queue,
+ self.camera_metrics,
self.feature_metrics,
self.inter_process_communicator,
),
diff --git a/frigate/events/audio.py b/frigate/events/audio.py
index f06b593c1..50e1d9ab0 100644
--- a/frigate/events/audio.py
+++ b/frigate/events/audio.py
@@ -26,7 +26,7 @@ from frigate.const import (
from frigate.ffmpeg_presets import parse_preset_input
from frigate.log import LogPipe
from frigate.object_detection import load_labels
-from frigate.types import FeatureMetricsTypes
+from frigate.types import CameraMetricsTypes, FeatureMetricsTypes
from frigate.util.builtin import get_ffmpeg_arg_list
from frigate.util.services import listen
from frigate.video import start_or_restart_ffmpeg, stop_ffmpeg
@@ -52,6 +52,7 @@ def get_ffmpeg_command(input_args: list[str], input_path: str) -> list[str]:
def listen_to_audio(
config: FrigateConfig,
recordings_info_queue: mp.Queue,
+ camera_metrics: dict[str, CameraMetricsTypes],
process_info: dict[str, FeatureMetricsTypes],
inter_process_communicator: InterProcessCommunicator,
) -> None:
@@ -80,6 +81,7 @@ def listen_to_audio(
audio = AudioEventMaintainer(
camera,
recordings_info_queue,
+ camera_metrics,
process_info,
stop_event,
inter_process_communicator,
@@ -153,6 +155,7 @@ class AudioEventMaintainer(threading.Thread):
self,
camera: CameraConfig,
recordings_info_queue: mp.Queue,
+ camera_metrics: dict[str, CameraMetricsTypes],
feature_metrics: dict[str, FeatureMetricsTypes],
stop_event: mp.Event,
inter_process_communicator: InterProcessCommunicator,
@@ -161,6 +164,7 @@ class AudioEventMaintainer(threading.Thread):
self.name = f"{camera.name}_audio_event_processor"
self.config = camera
self.recordings_info_queue = recordings_info_queue
+ self.camera_metrics = camera_metrics
self.feature_metrics = feature_metrics
self.inter_process_communicator = inter_process_communicator
self.detections: dict[dict[str, any]] = {}
@@ -184,6 +188,9 @@ class AudioEventMaintainer(threading.Thread):
audio_as_float = audio.astype(np.float32)
rms, dBFS = self.calculate_audio_levels(audio_as_float)
+ self.camera_metrics[self.config.name]["audio_rms"].value = rms
+ self.camera_metrics[self.config.name]["audio_dBFS"].value = dBFS
+
# only run audio detection when volume is above min_volume
if rms >= self.config.audio.min_volume:
# add audio info to recordings queue
diff --git a/frigate/stats.py b/frigate/stats.py
index da5eb358e..41ba2c367 100644
--- a/frigate/stats.py
+++ b/frigate/stats.py
@@ -266,6 +266,8 @@ def stats_snapshot(
"pid": pid,
"capture_pid": cpid,
"ffmpeg_pid": ffmpeg_pid,
+ "audio_rms": round(camera_stats["audio_rms"].value, 4),
+ "audio_dBFS": round(camera_stats["audio_dBFS"].value, 4),
}
stats["detectors"] = {}
diff --git a/frigate/types.py b/frigate/types.py
index 60e8d5a14..d97c0e53b 100644
--- a/frigate/types.py
+++ b/frigate/types.py
@@ -23,6 +23,8 @@ class CameraMetricsTypes(TypedDict):
process_fps: Synchronized
read_start: Synchronized
skipped_fps: Synchronized
+ audio_rms: Synchronized
+ audio_dBFS: Synchronized
class PTZMetricsTypes(TypedDict):
From 9b687d77cedd5fd7956384da2e2e942ca12f46fa Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 08:44:18 -0600
Subject: [PATCH 14/18] Add support for nvidia decoder and encoder utilization
stats (#8150)
* Add encoder and decoder stats to nvidia hwaccel stats
* Fix
* Fix
---
frigate/stats.py | 2 ++
frigate/util/services.py | 14 ++++++++++++++
web/src/routes/System.jsx | 4 ++++
3 files changed, 20 insertions(+)
diff --git a/frigate/stats.py b/frigate/stats.py
index 41ba2c367..8f8d03ed4 100644
--- a/frigate/stats.py
+++ b/frigate/stats.py
@@ -176,6 +176,8 @@ async def set_gpu_stats(
stats[nvidia_usage[i]["name"]] = {
"gpu": str(round(float(nvidia_usage[i]["gpu"]), 2)) + "%",
"mem": str(round(float(nvidia_usage[i]["mem"]), 2)) + "%",
+ "enc": str(round(float(nvidia_usage[i]["enc"]), 2)) + "%",
+ "dec": str(round(float(nvidia_usage[i]["dec"]), 2)) + "%",
}
else:
diff --git a/frigate/util/services.py b/frigate/util/services.py
index 2ffddcacf..94946434b 100644
--- a/frigate/util/services.py
+++ b/frigate/util/services.py
@@ -293,6 +293,8 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
handle = nvml.nvmlDeviceGetHandleByIndex(i)
meminfo = try_get_info(nvml.nvmlDeviceGetMemoryInfo, handle)
util = try_get_info(nvml.nvmlDeviceGetUtilizationRates, handle)
+ enc = try_get_info(nvml.nvmlDeviceGetEncoderUtilization, handle)
+ dec = try_get_info(nvml.nvmlDeviceGetDecoderUtilization, handle)
if util != "N/A":
gpu_util = util.gpu
else:
@@ -303,10 +305,22 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
else:
gpu_mem_util = -1
+ if enc != "N/A":
+ enc_util = enc[0]
+ else:
+ enc_util = -1
+
+ if dec != "N/A":
+ dec_util = dec[0]
+ else:
+ dec_util = -1
+
results[i] = {
"name": nvml.nvmlDeviceGetName(handle),
"gpu": gpu_util,
"mem": gpu_mem_util,
+ "enc": enc_util,
+ "dec": dec_util,
}
except Exception:
pass
diff --git a/web/src/routes/System.jsx b/web/src/routes/System.jsx
index 602522924..0074763fa 100644
--- a/web/src/routes/System.jsx
+++ b/web/src/routes/System.jsx
@@ -301,12 +301,16 @@ export default function System() {
| GPU % |
Memory % |
+ {'dec' in gpu_usages[gpu] && (Decoder % | )}
+ {'enc' in gpu_usages[gpu] && (Encoder % | )}
| {gpu_usages[gpu]['gpu']} |
{gpu_usages[gpu]['mem']} |
+ {'dec' in gpu_usages[gpu] && ({gpu_usages[gpu]['dec']} | )}
+ {'enc' in gpu_usages[gpu] && ({gpu_usages[gpu]['enc']} | )}
From 34b315cc8c7408328c397b41d29af097047ff668 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 16:03:04 -0600
Subject: [PATCH 15/18] Fix bug forcing rtsp for audio detection ffmpeg process
(#8156)
* Fix forcing rtsp for audio process
* Send as list
---
frigate/events/audio.py | 36 ++++++++++++++++++++++++------------
frigate/ffmpeg_presets.py | 7 -------
2 files changed, 24 insertions(+), 19 deletions(-)
diff --git a/frigate/events/audio.py b/frigate/events/audio.py
index 50e1d9ab0..b96e76af3 100644
--- a/frigate/events/audio.py
+++ b/frigate/events/audio.py
@@ -14,7 +14,7 @@ import requests
from setproctitle import setproctitle
from frigate.comms.inter_process import InterProcessCommunicator
-from frigate.config import CameraConfig, FrigateConfig
+from frigate.config import CameraConfig, CameraInput, FfmpegConfig, FrigateConfig
from frigate.const import (
AUDIO_DURATION,
AUDIO_FORMAT,
@@ -39,13 +39,29 @@ except ModuleNotFoundError:
logger = logging.getLogger(__name__)
-def get_ffmpeg_command(input_args: list[str], input_path: str) -> list[str]:
- return get_ffmpeg_arg_list(
- f"ffmpeg {{}} -i {{}} -f {AUDIO_FORMAT} -ar {AUDIO_SAMPLE_RATE} -ac 1 -y {{}}".format(
- " ".join(input_args),
- input_path,
+def get_ffmpeg_command(ffmpeg: FfmpegConfig) -> list[str]:
+ ffmpeg_input: CameraInput = [i for i in ffmpeg.inputs if "audio" in i.roles][0]
+ input_args = get_ffmpeg_arg_list(ffmpeg.global_args) + (
+ parse_preset_input(ffmpeg_input.input_args, 1)
+ or ffmpeg_input.input_args
+ or parse_preset_input(ffmpeg.input_args, 1)
+ or ffmpeg.input_args
+ )
+ return (
+ ["ffmpeg", "-vn"]
+ + input_args
+ + ["-i"]
+ + [ffmpeg_input.path]
+ + [
+ "-f",
+ f"{AUDIO_FORMAT}",
+ "-ar",
+ f"{AUDIO_SAMPLE_RATE}",
+ "-ac",
+ "1",
+ "-y",
"pipe:",
- )
+ ]
)
@@ -173,11 +189,7 @@ class AudioEventMaintainer(threading.Thread):
self.shape = (int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE)),)
self.chunk_size = int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE * 2))
self.logger = logging.getLogger(f"audio.{self.config.name}")
- self.ffmpeg_cmd = get_ffmpeg_command(
- get_ffmpeg_arg_list(self.config.ffmpeg.global_args)
- + parse_preset_input("preset-rtsp-audio-only", 1),
- [i.path for i in self.config.ffmpeg.inputs if "audio" in i.roles][0],
- )
+ self.ffmpeg_cmd = get_ffmpeg_command(self.config.ffmpeg)
self.logpipe = LogPipe(f"ffmpeg.{self.config.name}.audio")
self.audio_listener = None
diff --git a/frigate/ffmpeg_presets.py b/frigate/ffmpeg_presets.py
index a477c0404..fad68a670 100644
--- a/frigate/ffmpeg_presets.py
+++ b/frigate/ffmpeg_presets.py
@@ -256,13 +256,6 @@ PRESETS_INPUT = {
"-use_wallclock_as_timestamps",
"1",
],
- "preset-rtsp-audio-only": [
- "-rtsp_transport",
- "tcp",
- TIMEOUT_PARAM,
- "5000000",
- "-vn",
- ],
"preset-rtsp-restream": _user_agent_args
+ [
"-rtsp_transport",
From 4c7ea01137d5046879355e35fe9e260d1624d031 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 16:04:38 -0600
Subject: [PATCH 16/18] Don't print ffprobe stdout logs (#8153)
---
frigate/record/maintainer.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/frigate/record/maintainer.py b/frigate/record/maintainer.py
index 5e1889ad1..9aa60585d 100644
--- a/frigate/record/maintainer.py
+++ b/frigate/record/maintainer.py
@@ -355,6 +355,7 @@ class RecordingMaintainer(threading.Thread):
"+faststart",
file_path,
stderr=asyncio.subprocess.PIPE,
+ stdout=asyncio.subprocess.DEVNULL,
)
await p.wait()
From fa6c6c50d04ba30abf3fb57d4e7c94ab6d6fd437 Mon Sep 17 00:00:00 2001
From: Nicolas Mowen
Date: Fri, 13 Oct 2023 18:17:09 -0600
Subject: [PATCH 17/18] Support ManagedMediaSource and update docs to reflect
iOS 17.1+ supports MSE (#8160)
* Refactor media source handling in MsePlayer.js and Birdseye.jsx to support ManagedMediaSource
* lint
* Update docs to reflect iOS supporting mse
---------
Co-authored-by: Sergey Krashevich
---
docs/docs/configuration/live.md | 10 +++----
web/src/components/MsePlayer.js | 46 +++++++++++++++++++--------------
web/src/routes/Birdseye.jsx | 4 +--
web/src/routes/Camera.jsx | 4 +--
4 files changed, 36 insertions(+), 28 deletions(-)
diff --git a/docs/docs/configuration/live.md b/docs/docs/configuration/live.md
index d5ed1f033..f061d65ff 100644
--- a/docs/docs/configuration/live.md
+++ b/docs/docs/configuration/live.md
@@ -9,11 +9,11 @@ Frigate has different live view options, some of which require the bundled `go2r
Live view options can be selected while viewing the live stream. The options are:
-| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
-| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | -------------------------------------------- |
-| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
-| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
-| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
+| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
+| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | ------------------------------------------------- |
+| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
+| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
+| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
### Audio Support
diff --git a/web/src/components/MsePlayer.js b/web/src/components/MsePlayer.js
index ac0062391..6607bc2f4 100644
--- a/web/src/components/MsePlayer.js
+++ b/web/src/components/MsePlayer.js
@@ -157,12 +157,9 @@ class VideoRTC extends HTMLElement {
if (this.ws) this.ws.send(JSON.stringify(value));
}
- codecs(type) {
- const test =
- type === 'mse'
- ? (codec) => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
- : (codec) => this.video.canPlayType(`video/mp4; codecs="${codec}"`);
- return this.CODECS.filter(test).join();
+ /** @param {Function} isSupported */
+ codecs(isSupported) {
+ return this.CODECS.filter(codec => isSupported(`video/mp4; codecs="${codec}"`)).join();
}
/**
@@ -311,7 +308,7 @@ class VideoRTC extends HTMLElement {
const modes = [];
- if (this.mode.indexOf('mse') >= 0 && 'MediaSource' in window) {
+ if (this.mode.indexOf('mse') >= 0 && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
// iPhone
modes.push('mse');
this.onmse();
@@ -363,18 +360,29 @@ class VideoRTC extends HTMLElement {
}
onmse() {
- const ms = new MediaSource();
- ms.addEventListener(
- 'sourceopen',
- () => {
- URL.revokeObjectURL(this.video.src);
- this.send({ type: 'mse', value: this.codecs('mse') });
- },
- { once: true }
- );
+ /** @type {MediaSource} */
+ let ms;
- this.video.src = URL.createObjectURL(ms);
- this.video.srcObject = null;
+ if ('ManagedMediaSource' in window) {
+ const MediaSource = window.ManagedMediaSource;
+
+ ms = new MediaSource();
+ ms.addEventListener('sourceopen', () => {
+ this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
+ }, {once: true});
+
+ this.video.disableRemotePlayback = true;
+ this.video.srcObject = ms;
+ } else {
+ ms = new MediaSource();
+ ms.addEventListener('sourceopen', () => {
+ URL.revokeObjectURL(this.video.src);
+ this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
+ }, {once: true});
+
+ this.video.src = URL.createObjectURL(ms);
+ this.video.srcObject = null;
+ }
this.play();
this.mseCodecs = '';
@@ -580,7 +588,7 @@ class VideoRTC extends HTMLElement {
video2.src = `data:video/mp4;base64,${VideoRTC.btoa(data)}`;
};
- this.send({ type: 'mp4', value: this.codecs('mp4') });
+ this.send({ type: 'mp4', value: this.codecs(this.video.canPlayType) });
}
static btoa(buffer) {
diff --git a/web/src/routes/Birdseye.jsx b/web/src/routes/Birdseye.jsx
index d41e04b04..3ff1ffac1 100644
--- a/web/src/routes/Birdseye.jsx
+++ b/web/src/routes/Birdseye.jsx
@@ -35,7 +35,7 @@ export default function Birdseye() {
let player;
const playerClass = ptzCameras.length || isMaxWidth ? 'w-full' : 'max-w-5xl xl:w-1/2';
if (viewSource == 'mse' && config.birdseye.restream) {
- if ('MediaSource' in window) {
+ if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = (
@@ -50,7 +50,7 @@ export default function Birdseye() {
player = (
- MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info.
+ MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
);
diff --git a/web/src/routes/Camera.jsx b/web/src/routes/Camera.jsx
index a134209df..9260b41fc 100644
--- a/web/src/routes/Camera.jsx
+++ b/web/src/routes/Camera.jsx
@@ -116,7 +116,7 @@ export default function Camera({ camera }) {
let player;
if (viewMode === 'live') {
if (viewSource == 'mse' && restreamEnabled) {
- if ('MediaSource' in window) {
+ if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = (
@@ -133,7 +133,7 @@ export default function Camera({ camera }) {
player = (
- MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info.
+ MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
);
From 9ea10f8541811078b9079cd577533bd3e3e27d59 Mon Sep 17 00:00:00 2001
From: Blake Blackshear
Date: Sat, 14 Oct 2023 08:05:44 -0400
Subject: [PATCH 18/18] Don't zero out motion during calibration (#8163)
* don't zero out motion boxes
* define detect resolution to speed up tests
---
frigate/motion/__init__.py | 4 ++
frigate/motion/frigate_motion.py | 3 +
frigate/motion/improved_motion.py | 4 +-
frigate/test/test_config.py | 110 ++++++++++++++++++++++++++++--
frigate/video.py | 27 +++-----
5 files changed, 125 insertions(+), 23 deletions(-)
diff --git a/frigate/motion/__init__.py b/frigate/motion/__init__.py
index 73d2ee1fd..248c37092 100644
--- a/frigate/motion/__init__.py
+++ b/frigate/motion/__init__.py
@@ -20,3 +20,7 @@ class MotionDetector(ABC):
@abstractmethod
def detect(self, frame):
pass
+
+ @abstractmethod
+ def is_calibrating(self):
+ pass
diff --git a/frigate/motion/frigate_motion.py b/frigate/motion/frigate_motion.py
index 71fb35981..dc9c6b9ec 100644
--- a/frigate/motion/frigate_motion.py
+++ b/frigate/motion/frigate_motion.py
@@ -38,6 +38,9 @@ class FrigateMotionDetector(MotionDetector):
self.threshold = threshold
self.contour_area = contour_area
+ def is_calibrating(self):
+ return False
+
def detect(self, frame):
motion_boxes = []
diff --git a/frigate/motion/improved_motion.py b/frigate/motion/improved_motion.py
index b281cbbeb..b9d72da29 100644
--- a/frigate/motion/improved_motion.py
+++ b/frigate/motion/improved_motion.py
@@ -49,6 +49,9 @@ class ImprovedMotionDetector(MotionDetector):
self.contrast_values[:, 1:2] = 255
self.contrast_values_index = 0
+ def is_calibrating(self):
+ return self.calibrating
+
def detect(self, frame):
motion_boxes = []
@@ -141,7 +144,6 @@ class ImprovedMotionDetector(MotionDetector):
# if calibrating or the motion contours are > 80% of the image area (lightning, ir, ptz) recalibrate
if self.calibrating or pct_motion > self.config.lightning_threshold:
- motion_boxes = []
self.calibrating = True
if self.save_images:
diff --git a/frigate/test/test_config.py b/frigate/test/test_config.py
index ba6ecdcf5..ac3c11866 100644
--- a/frigate/test/test_config.py
+++ b/frigate/test/test_config.py
@@ -1027,7 +1027,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
- }
+ },
+ "detect": {
+ "height": 720,
+ "width": 1280,
+ "fps": 5,
+ },
}
},
}
@@ -1082,6 +1087,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"snapshots": {
"height": 100,
},
@@ -1107,7 +1117,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
- }
+ },
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1132,6 +1147,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"snapshots": {
"height": 150,
"enabled": True,
@@ -1160,6 +1180,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1181,7 +1206,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
- }
+ },
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1205,6 +1235,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"rtmp": {
"enabled": True,
},
@@ -1234,6 +1269,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1257,6 +1297,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1278,7 +1323,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
- }
+ },
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1302,6 +1352,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"live": {
"quality": 7,
},
@@ -1329,6 +1384,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1350,7 +1410,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
- }
+ },
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1375,6 +1440,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"timestamp_style": {"position": "bl", "thickness": 4},
}
},
@@ -1400,6 +1470,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1423,6 +1498,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1450,6 +1530,11 @@ class TestConfig(unittest.TestCase):
},
],
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
}
},
}
@@ -1475,6 +1560,11 @@ class TestConfig(unittest.TestCase):
},
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"zones": {
"steps": {
"coordinates": "0,0,0,0",
@@ -1546,6 +1636,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"onvif": {"autotracking": {"movement_weights": "1.23, 2.34, 0.50"}},
}
},
@@ -1569,6 +1664,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
+ "detect": {
+ "height": 1080,
+ "width": 1920,
+ "fps": 5,
+ },
"onvif": {"autotracking": {"movement_weights": "1.234, 2.345a"}},
}
},
diff --git a/frigate/video.py b/frigate/video.py
index 97f903816..47e65811d 100755
--- a/frigate/video.py
+++ b/frigate/video.py
@@ -21,7 +21,6 @@ from frigate.log import LogPipe
from frigate.motion import MotionDetector
from frigate.motion.improved_motion import ImprovedMotionDetector
from frigate.object_detection import RemoteObjectDetector
-from frigate.ptz.autotrack import ptz_moving_at_frame_time
from frigate.track import ObjectTracker
from frigate.track.norfair_tracker import NorfairTracker
from frigate.types import PTZMetricsTypes
@@ -777,19 +776,8 @@ def process_frames(
logger.info(f"{camera_name}: frame {frame_time} is not in memory store.")
continue
- # look for motion if enabled and ptz is not moving
- # ptz_moving_at_frame_time() always returns False for
- # non ptz/autotracking cameras
- motion_boxes = (
- motion_detector.detect(frame)
- if motion_enabled.value
- and not ptz_moving_at_frame_time(
- frame_time,
- ptz_metrics["ptz_start_time"].value,
- ptz_metrics["ptz_stop_time"].value,
- )
- else []
- )
+ # look for motion if enabled
+ motion_boxes = motion_detector.detect(frame) if motion_enabled.value else []
regions = []
consolidated_detections = []
@@ -814,8 +802,10 @@ def process_frames(
)
# and it hasn't disappeared
and object_tracker.disappeared[obj["id"]] == 0
- # and it doesn't overlap with any current motion boxes
- and not intersects_any(obj["box"], motion_boxes)
+ # and it doesn't overlap with any current motion boxes when not calibrating
+ and not intersects_any(
+ obj["box"], [] if motion_detector.is_calibrating() else motion_boxes
+ )
]
# get tracked object boxes that aren't stationary
@@ -825,7 +815,10 @@ def process_frames(
if obj["id"] not in stationary_object_ids
]
- combined_boxes = motion_boxes + tracked_object_boxes
+ combined_boxes = tracked_object_boxes
+ # only add in the motion boxes when not calibrating
+ if not motion_detector.is_calibrating():
+ combined_boxes += motion_boxes
cluster_candidates = get_cluster_candidates(
frame_shape, region_min_size, combined_boxes