From dd7820e4ee97c33538751196915a30cd45caeef2 Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Mon, 10 Feb 2025 10:42:35 -0600 Subject: [PATCH] Improve live streaming (#16447) * config file changes * config migrator * stream selection on single camera live view * camera streaming settings dialog * manage persistent group streaming settings * apply streaming settings in camera groups * add ability to clear all streaming settings from settings * docs * update reference config * fixes * clarify docs * use first stream as default in dialog * ensure still image is visible after switching stream type to none * docs * clarify docs * add ability to continue playing stream in background * fix props * put stream selection inside dropdown on desktop * add capabilities to live mode hook * live context menu component * resize observer: only return new dimensions if they've actually changed * pass volume prop to players * fix slider bug, https://github.com/shadcn-ui/ui/issues/1448 * update react-grid-layout * prevent animated transitions on draggable grid layout * add context menu to dashboards * use provider * streaming dialog from context menu * docs * add jsmpeg warning to context menu * audio and two way talk indicators in single camera view * add link to debug view * don't use hook * create manual events from live camera view * maintain grow classes on grid items * fix initial volume state on default dashboard * fix pointer events causing context menu to end up underneath image on iOS * mobile drawer tweaks * stream stats * show settings menu for non-restreamed cameras * consistent settings icon * tweaks * optional stats to fix birdseye player * add toaster to live camera view * fix crash on initial save in streaming dialog * don't require restreaming for context menu streaming settings * add debug view to context menu * stats fixes * update docs * always show stream info when restreamed * update camera streaming dialog * make note of no h265 support for webrtc * docs clarity * ensure docs show streams as a dict * docs clarity * fix css file * tweaks --- docs/docs/configuration/live.md | 80 +- docs/docs/configuration/reference.md | 6 +- frigate/config/camera/live.py | 7 +- frigate/config/config.py | 23 +- frigate/util/config.py | 32 +- web/package-lock.json | 15 +- web/package.json | 2 +- .../dynamic/CameraFeatureToggle.tsx | 2 +- .../components/filter/CameraGroupSelector.tsx | 117 ++- web/src/components/menu/LiveContextMenu.tsx | 302 +++++++ .../overlay/detail/SearchDetailDialog.tsx | 3 +- .../components/player/BirdseyeLivePlayer.tsx | 1 + web/src/components/player/JSMpegPlayer.tsx | 61 +- web/src/components/player/LivePlayer.tsx | 66 +- web/src/components/player/MsePlayer.tsx | 105 ++- web/src/components/player/PlayerStats.tsx | 100 +++ web/src/components/player/WebRTCPlayer.tsx | 87 +- .../settings/CameraStreamingDialog.tsx | 371 +++++++++ web/src/components/ui/slider.tsx | 12 +- web/src/context/providers.tsx | 7 +- .../context/streaming-settings-provider.tsx | 68 ++ web/src/hooks/resize-observer.ts | 10 +- web/src/hooks/use-camera-live-mode.ts | 87 +- web/src/index.css | 5 + web/src/pages/Settings.tsx | 16 + web/src/types/frigateConfig.ts | 34 +- web/src/types/live.ts | 14 + web/src/views/live/DraggableGridLayout.tsx | 318 +++++-- web/src/views/live/LiveCameraView.tsx | 783 +++++++++++++++++- web/src/views/live/LiveDashboardView.tsx | 115 ++- web/src/views/settings/UiSettingsView.tsx | 51 +- 31 files changed, 2681 insertions(+), 219 deletions(-) create mode 100644 web/src/components/menu/LiveContextMenu.tsx create mode 100644 web/src/components/player/PlayerStats.tsx create mode 100644 web/src/components/settings/CameraStreamingDialog.tsx create mode 100644 web/src/context/streaming-settings-provider.tsx diff --git a/docs/docs/configuration/live.md b/docs/docs/configuration/live.md index 22789181a..bc19d3caa 100644 --- a/docs/docs/configuration/live.md +++ b/docs/docs/configuration/live.md @@ -3,9 +3,9 @@ id: live title: Live View --- -Frigate intelligently displays your camera streams on the Live view dashboard. Your camera images update once per minute when no detectable activity is occurring to conserve bandwidth and resources. As soon as any motion is detected, cameras seamlessly switch to a live stream. +Frigate intelligently displays your camera streams on the Live view dashboard. By default, Frigate employs "smart streaming" where camera images update once per minute when no detectable activity is occurring to conserve bandwidth and resources. As soon as any motion or active objects are detected, cameras seamlessly switch to a live stream. -## Live View technologies +### Live View technologies Frigate intelligently uses three different streaming technologies to display your camera streams on the dashboard and the single camera view, switching between available modes based on network bandwidth, player errors, or required features like two-way talk. The highest quality and fluency of the Live view requires the bundled `go2rtc` to be configured as shown in the [step by step guide](/guides/configuring_go2rtc). @@ -51,19 +51,32 @@ go2rtc: - ffmpeg:rtsp://192.168.1.5:554/live0#video=copy ``` -### Setting Stream For Live UI +### Setting Streams For Live UI -There may be some cameras that you would prefer to use the sub stream for live view, but the main stream for recording. This can be done via `live -> stream_name`. +You can configure Frigate to allow manual selection of the stream you want to view in the Live UI. For example, you may want to view your camera's substream on mobile devices, but the full resolution stream on desktop devices. Setting the `live -> streams` list will populate a dropdown in the UI's Live view that allows you to choose between the streams. This stream setting is _per device_ and is saved in your browser's local storage. + +Additionally, when creating and editing camera groups in the UI, you can choose the stream you want to use for your camera group's Live dashboard. + +::: note + +Frigate's default dashboard ("All Cameras") will always use the first entry you've defined in `streams:` when playing live streams from your cameras. + +::: + +Configure the `streams` option with a "friendly name" for your stream followed by the go2rtc stream name. + +Using Frigate's internal version of go2rtc is required to use this feature. You cannot specify paths in the `streams` configuration, only go2rtc stream names. ```yaml go2rtc: streams: test_cam: - - rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. + - rtsp://192.168.1.5:554/live_main # <- stream which supports video & aac audio. - "ffmpeg:test_cam#audio=opus" # <- copy of the stream which transcodes audio to opus for webrtc test_cam_sub: - - rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio. - - "ffmpeg:test_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus for webrtc + - rtsp://192.168.1.5:554/live_sub # <- stream which supports video & aac audio. + test_cam_another_sub: + - rtsp://192.168.1.5:554/live_alt # <- stream which supports video & aac audio. cameras: test_cam: @@ -80,7 +93,10 @@ cameras: roles: - detect live: - stream_name: test_cam_sub + streams: # <--- Multiple streams for Frigate 0.16 and later + Main Stream: test_cam # <--- Specify a "friendly name" followed by the go2rtc stream name + Sub Stream: test_cam_sub + Special Stream: test_cam_another_sub ``` ### WebRTC extra configuration: @@ -101,6 +117,7 @@ WebRTC works by creating a TCP or UDP connection on port `8555`. However, it req ``` - For access through Tailscale, the Frigate system's Tailscale IP must be added as a WebRTC candidate. Tailscale IPs all start with `100.`, and are reserved within the `100.64.0.0/10` CIDR block. +- Note that WebRTC does not support H.265. :::tip @@ -148,3 +165,50 @@ For devices that support two way talk, Frigate can be configured to use the feat - For the Home Assistant Frigate card, [follow the docs](https://github.com/dermotduffy/frigate-hass-card?tab=readme-ov-file#using-2-way-audio) for the correct source. To use the Reolink Doorbell with two way talk, you should use the [recommended Reolink configuration](/configuration/camera_specific#reolink-doorbell) + +### Streaming options on camera group dashboards + +Frigate provides a dialog in the Camera Group Edit pane with several options for streaming on a camera group's dashboard. These settings are _per device_ and are saved in your device's local storage. + +- Stream selection using the `live -> streams` configuration option (see _Setting Streams For Live UI_ above) +- Streaming type: + - _No streaming_: Camera images will only update once per minute and no live streaming will occur. + - _Smart Streaming_ (default, recommended setting): Smart streaming will update your camera image once per minute when no detectable activity is occurring to conserve bandwidth and resources, since a static picture is the same as a streaming image with no motion or objects. When motion or objects are detected, the image seamlessly switches to a live stream. + - _Continuous Streaming_: Camera image will always be a live stream when visible on the dashboard, even if no activity is being detected. Continuous streaming may cause high bandwidth usage and performance issues. **Use with caution.** +- _Compatibility mode_: Enable this option only if your camera's live stream is displaying color artifacts and has a diagonal line on the right side of the image. Before enabling this, try setting your camera's `detect` width and height to a standard aspect ratio (for example: 640x352 becomes 640x360, and 800x443 becomes 800x450, 2688x1520 becomes 2688x1512, etc). Depending on your browser and device, more than a few cameras in compatibility mode may not be supported, so only use this option if changing your config fails to resolve the color artifacts and diagonal line. + +:::note + +The default dashboard ("All Cameras") will always use Smart Streaming and the first entry set in your `streams` configuration, if defined. Use a camera group if you want to change any of these settings from the defaults. + +::: + +## Live view FAQ + +1. Why don't I have audio in my Live view? + You must use go2rtc to hear audio in your live streams. If you have go2rtc already configured, you need to ensure your camera is sending PCMA/PCMU or AAC audio. If you can't change your camera's audio codec, you need to [transcode the audio](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#source-ffmpeg) using go2rtc. + + Note that the low bandwidth mode player is a video-only stream. You should not expect to hear audio when in low bandwidth mode, even if you've set up go2rtc. + +2. Frigate shows that my live stream is in "low bandwidth mode". What does this mean? + Frigate intelligently selects the live streaming technology based on a number of factors (user-selected modes like two-way talk, camera settings, browser capabilities, available bandwidth) and prioritizes showing an actual up-to-date live view of your camera's stream as quickly as possible. + + When you have go2rtc configured, Live view initially attempts to load and play back your stream with a clearer, fluent stream technology (MSE). An initial timeout, a low bandwidth condition that would cause buffering of the stream, or decoding errors in the stream will cause Frigate to switch to the stream defined by the `detect` role, using the jsmpeg format. This is what the UI labels as "low bandwidth mode". On Live dashboards, the mode will automatically reset when smart streaming is configured and activity stops. You can also try using the _Reset_ button to force a reload of your stream. + + If you are still experiencing Frigate falling back to low bandwidth mode, you may need to adjust your camera's settings per the recommendations above or ensure you have enough bandwidth available. + +3. It doesn't seem like my cameras are streaming on the Live dashboard. Why? + On the default Live dashboard ("All Cameras"), your camera images will update once per minute when no detectable activity is occurring to conserve bandwidth and resources. As soon as any activity is detected, cameras seamlessly switch to a full-resolution live stream. If you want to customize this behavior, use a camera group. + +4. I see a strange diagonal line on my live view, but my recordings look fine. How can I fix it? + This is caused by incorrect dimensions set in your detect width or height (or incorrectly auto-detected), causing the jsmpeg player's rendering engine to display a slightly distorted image. You should enlarge the width and height of your `detect` resolution up to a standard aspect ratio (example: 640x352 becomes 640x360, and 800x443 becomes 800x450, 2688x1520 becomes 2688x1512, etc). If changing the resolution to match a standard (4:3, 16:9, or 32:9, etc) aspect ratio does not solve the issue, you can enable "compatibility mode" in your camera group dashboard's stream settings. Depending on your browser and device, more than a few cameras in compatibility mode may not be supported, so only use this option if changing your `detect` width and height fails to resolve the color artifacts and diagonal line. + +5. How does "smart streaming" work? + Because a static image of a scene looks exactly the same as a live stream with no motion or activity, smart streaming updates your camera images once per minute when no detectable activity is occurring to conserve bandwidth and resources. As soon as any activity (motion or object/audio detection) occurs, cameras seamlessly switch to a live stream. + + This static image is pulled from the stream defined in your config with the `detect` role. When activity is detected, images from the `detect` stream immediately begin updating at ~5 frames per second so you can see the activity until the live player is loaded and begins playing. This usually only takes a second or two. If the live player times out, buffers, or has streaming errors, the jsmpeg player is loaded and plays a video-only stream from the `detect` role. When activity ends, the players are destroyed and a static image is displayed until activity is detected again, and the process repeats. + + This is Frigate's default and recommended setting because it results in a significant bandwidth savings, especially for high resolution cameras. + +6. I have unmuted some cameras on my dashboard, but I do not hear sound. Why? + If your camera is streaming (as indicated by a red dot in the upper right, or if it has been set to continuous streaming mode), your browser may be blocking audio until you interact with the page. This is an intentional browser limitation. See [this article](https://developer.mozilla.org/en-US/docs/Web/Media/Autoplay_guide#autoplay_availability). Many browsers have a whitelist feature to change this behavior. diff --git a/docs/docs/configuration/reference.md b/docs/docs/configuration/reference.md index 3c055fadf..30b14f687 100644 --- a/docs/docs/configuration/reference.md +++ b/docs/docs/configuration/reference.md @@ -572,10 +572,12 @@ go2rtc: # Optional: Live stream configuration for WebUI. # NOTE: Can be overridden at the camera level live: - # Optional: Set the name of the stream configured in go2rtc + # Optional: Set the streams configured in go2rtc # that should be used for live view in frigate WebUI. (default: name of camera) # NOTE: In most cases this should be set at the camera level only. - stream_name: camera_name + streams: + main_stream: main_stream_name + sub_stream: sub_stream_name # Optional: Set the height of the jsmpeg stream. (default: 720) # This must be less than or equal to the height of the detect stream. Lower resolutions # reduce bandwidth required for viewing the jsmpeg stream. Width is computed to match known aspect ratio. diff --git a/frigate/config/camera/live.py b/frigate/config/camera/live.py index 9f15f2645..13ae2d04f 100644 --- a/frigate/config/camera/live.py +++ b/frigate/config/camera/live.py @@ -1,3 +1,5 @@ +from typing import Dict + from pydantic import Field from ..base import FrigateBaseModel @@ -6,6 +8,9 @@ __all__ = ["CameraLiveConfig"] class CameraLiveConfig(FrigateBaseModel): - stream_name: str = Field(default="", title="Name of restream to use as live view.") + streams: Dict[str, str] = Field( + default_factory=list, + title="Friendly names and restream names to use for live view.", + ) height: int = Field(default=720, title="Live camera view height") quality: int = Field(default=8, ge=1, le=31, title="Live camera view quality") diff --git a/frigate/config/config.py b/frigate/config/config.py index c4c502d26..694a3389f 100644 --- a/frigate/config/config.py +++ b/frigate/config/config.py @@ -199,17 +199,18 @@ def verify_config_roles(camera_config: CameraConfig) -> None: ) -def verify_valid_live_stream_name( +def verify_valid_live_stream_names( frigate_config: FrigateConfig, camera_config: CameraConfig ) -> ValueError | None: """Verify that a restream exists to use for live view.""" - if ( - camera_config.live.stream_name - not in frigate_config.go2rtc.model_dump().get("streams", {}).keys() - ): - return ValueError( - f"No restream with name {camera_config.live.stream_name} exists for camera {camera_config.name}." - ) + for _, stream_name in camera_config.live.streams.items(): + if ( + stream_name + not in frigate_config.go2rtc.model_dump().get("streams", {}).keys() + ): + return ValueError( + f"No restream with name {stream_name} exists for camera {camera_config.name}." + ) def verify_recording_retention(camera_config: CameraConfig) -> None: @@ -586,15 +587,15 @@ class FrigateConfig(FrigateBaseModel): zone.generate_contour(camera_config.frame_shape) # Set live view stream if none is set - if not camera_config.live.stream_name: - camera_config.live.stream_name = name + if not camera_config.live.streams: + camera_config.live.streams = {name: name} # generate the ffmpeg commands camera_config.create_ffmpeg_cmds() self.cameras[name] = camera_config verify_config_roles(camera_config) - verify_valid_live_stream_name(self, camera_config) + verify_valid_live_stream_names(self, camera_config) verify_recording_retention(camera_config) verify_recording_segments_setup_with_reasonable_time(camera_config) verify_zone_objects_are_tracked(camera_config) diff --git a/frigate/util/config.py b/frigate/util/config.py index a8664ea4e..5b40fe37b 100644 --- a/frigate/util/config.py +++ b/frigate/util/config.py @@ -13,7 +13,7 @@ from frigate.util.services import get_video_properties logger = logging.getLogger(__name__) -CURRENT_CONFIG_VERSION = "0.15-1" +CURRENT_CONFIG_VERSION = "0.16-0" DEFAULT_CONFIG_FILE = "/config/config.yml" @@ -84,6 +84,13 @@ def migrate_frigate_config(config_file: str): yaml.dump(new_config, f) previous_version = "0.15-1" + if previous_version < "0.16-0": + logger.info(f"Migrating frigate config from {previous_version} to 0.16-0...") + new_config = migrate_016_0(config) + with open(config_file, "w") as f: + yaml.dump(new_config, f) + previous_version = "0.16-0" + logger.info("Finished frigate config migration...") @@ -289,6 +296,29 @@ def migrate_015_1(config: dict[str, dict[str, any]]) -> dict[str, dict[str, any] return new_config +def migrate_016_0(config: dict[str, dict[str, any]]) -> dict[str, dict[str, any]]: + """Handle migrating frigate config to 0.16-0""" + new_config = config.copy() + + for name, camera in config.get("cameras", {}).items(): + camera_config: dict[str, dict[str, any]] = camera.copy() + + live_config = camera_config.get("live", {}) + if "stream_name" in live_config: + # Migrate from live -> stream_name to live -> streams -> dict + stream_name = live_config["stream_name"] + live_config["streams"] = {stream_name: stream_name} + + del live_config["stream_name"] + + camera_config["live"] = live_config + + new_config["cameras"][name] = camera_config + + new_config["version"] = "0.16-0" + return new_config + + def get_relative_coordinates( mask: Optional[Union[str, list]], frame_shape: tuple[int, int] ) -> Union[str, list]: diff --git a/web/package-lock.json b/web/package-lock.json index 3ced33ffe..119fc79ea 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -54,7 +54,7 @@ "react-day-picker": "^8.10.1", "react-device-detect": "^2.2.3", "react-dom": "^18.3.1", - "react-grid-layout": "^1.4.4", + "react-grid-layout": "^1.5.0", "react-hook-form": "^7.52.1", "react-icons": "^5.2.1", "react-konva": "^18.2.10", @@ -5120,7 +5120,8 @@ "node_modules/fast-equals": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-4.0.3.tgz", - "integrity": "sha512-G3BSX9cfKttjr+2o1O22tYMLq0DPluZnYtq1rXumE1SpL/F/SLIfHx08WYQoWSIpeMYf8sRbJ8++71+v6Pnxfg==" + "integrity": "sha512-G3BSX9cfKttjr+2o1O22tYMLq0DPluZnYtq1rXumE1SpL/F/SLIfHx08WYQoWSIpeMYf8sRbJ8++71+v6Pnxfg==", + "license": "MIT" }, "node_modules/fast-glob": { "version": "3.3.2", @@ -7275,9 +7276,10 @@ } }, "node_modules/react-grid-layout": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/react-grid-layout/-/react-grid-layout-1.4.4.tgz", - "integrity": "sha512-7+Lg8E8O8HfOH5FrY80GCIR1SHTn2QnAYKh27/5spoz+OHhMmEhU/14gIkRzJOtympDPaXcVRX/nT1FjmeOUmQ==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/react-grid-layout/-/react-grid-layout-1.5.0.tgz", + "integrity": "sha512-WBKX7w/LsTfI99WskSu6nX2nbJAUD7GD6nIXcwYLyPpnslojtmql2oD3I2g5C3AK8hrxIarYT8awhuDIp7iQ5w==", + "license": "MIT", "dependencies": { "clsx": "^2.0.0", "fast-equals": "^4.0.3", @@ -7624,7 +7626,8 @@ "node_modules/resize-observer-polyfill": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz", - "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" + "integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==", + "license": "MIT" }, "node_modules/resolve": { "version": "1.22.8", diff --git a/web/package.json b/web/package.json index 6b3ec1e44..d0bdd01d4 100644 --- a/web/package.json +++ b/web/package.json @@ -60,7 +60,7 @@ "react-day-picker": "^8.10.1", "react-device-detect": "^2.2.3", "react-dom": "^18.3.1", - "react-grid-layout": "^1.4.4", + "react-grid-layout": "^1.5.0", "react-hook-form": "^7.52.1", "react-icons": "^5.2.1", "react-konva": "^18.2.10", diff --git a/web/src/components/dynamic/CameraFeatureToggle.tsx b/web/src/components/dynamic/CameraFeatureToggle.tsx index 4b9dabe95..8a284b82f 100644 --- a/web/src/components/dynamic/CameraFeatureToggle.tsx +++ b/web/src/components/dynamic/CameraFeatureToggle.tsx @@ -40,9 +40,9 @@ export default function CameraFeatureToggle({
("config"); + const { allGroupsStreamingSettings, setAllGroupsStreamingSettings } = + useStreamingSettings(); + + const [groupStreamingSettings, setGroupStreamingSettings] = + useState( + allGroupsStreamingSettings[editingGroup?.[0] ?? ""], + ); + + const [openCamera, setOpenCamera] = useState(); + const birdseyeConfig = useMemo(() => config?.birdseye, [config]); const formSchema = z.object({ @@ -656,6 +675,16 @@ export function CameraGroupEdit({ setIsLoading(true); + // update streaming settings + const updatedSettings: AllGroupsStreamingSettings = { + ...Object.fromEntries( + Object.entries(allGroupsStreamingSettings || {}).filter( + ([key]) => key !== editingGroup?.[0], + ), + ), + [values.name]: groupStreamingSettings, + }; + let renamingQuery = ""; if (editingGroup && editingGroup[0] !== values.name) { renamingQuery = `camera_groups.${editingGroup[0]}&`; @@ -679,7 +708,7 @@ export function CameraGroupEdit({ requires_restart: 0, }, ) - .then((res) => { + .then(async (res) => { if (res.status === 200) { toast.success(`Camera group (${values.name}) has been saved.`, { position: "top-center", @@ -688,6 +717,7 @@ export function CameraGroupEdit({ if (onSave) { onSave(); } + setAllGroupsStreamingSettings(updatedSettings); } else { toast.error(`Failed to save config changes: ${res.statusText}`, { position: "top-center", @@ -704,7 +734,16 @@ export function CameraGroupEdit({ setIsLoading(false); }); }, - [currentGroups, setIsLoading, onSave, updateConfig, editingGroup], + [ + currentGroups, + setIsLoading, + onSave, + updateConfig, + editingGroup, + groupStreamingSettings, + allGroupsStreamingSettings, + setAllGroupsStreamingSettings, + ], ); const form = useForm>({ @@ -762,16 +801,66 @@ export function CameraGroupEdit({ ), ].map((camera) => ( - { - const updatedCameras = checked - ? [...(field.value || []), camera] - : (field.value || []).filter((c) => c !== camera); - form.setValue("cameras", updatedCameras); - }} - /> +
+ + +
+ {camera !== "birdseye" && ( + + setOpenCamera(isOpen ? camera : null) + } + > + + + + + setOpenCamera(isOpen ? camera : null) + } + /> + + )} + { + const updatedCameras = checked + ? [...(field.value || []), camera] + : (field.value || []).filter((c) => c !== camera); + form.setValue("cameras", updatedCameras); + }} + /> +
+
))} diff --git a/web/src/components/menu/LiveContextMenu.tsx b/web/src/components/menu/LiveContextMenu.tsx new file mode 100644 index 000000000..f5222592d --- /dev/null +++ b/web/src/components/menu/LiveContextMenu.tsx @@ -0,0 +1,302 @@ +import { + ReactNode, + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from "react"; +import { + ContextMenu, + ContextMenuContent, + ContextMenuItem, + ContextMenuSeparator, + ContextMenuTrigger, +} from "@/components/ui/context-menu"; +import { + MdVolumeDown, + MdVolumeMute, + MdVolumeOff, + MdVolumeUp, +} from "react-icons/md"; +import { Dialog } from "@/components/ui/dialog"; +import { VolumeSlider } from "@/components/ui/slider"; +import { CameraStreamingDialog } from "../settings/CameraStreamingDialog"; +import { + AllGroupsStreamingSettings, + GroupStreamingSettings, +} from "@/types/frigateConfig"; +import { useStreamingSettings } from "@/context/streaming-settings-provider"; +import { IoIosWarning } from "react-icons/io"; +import { cn } from "@/lib/utils"; +import { useNavigate } from "react-router-dom"; + +type LiveContextMenuProps = { + className?: string; + camera: string; + streamName: string; + cameraGroup?: string; + preferredLiveMode: string; + isRestreamed: boolean; + supportsAudio: boolean; + audioState: boolean; + toggleAudio: () => void; + volumeState?: number; + setVolumeState: (volumeState: number) => void; + muteAll: () => void; + unmuteAll: () => void; + statsState: boolean; + toggleStats: () => void; + resetPreferredLiveMode: () => void; + children?: ReactNode; +}; +export default function LiveContextMenu({ + className, + camera, + streamName, + cameraGroup, + preferredLiveMode, + isRestreamed, + supportsAudio, + audioState, + toggleAudio, + volumeState, + setVolumeState, + muteAll, + unmuteAll, + statsState, + toggleStats, + resetPreferredLiveMode, + children, +}: LiveContextMenuProps) { + const [showSettings, setShowSettings] = useState(false); + + // streaming settings + + const { allGroupsStreamingSettings, setAllGroupsStreamingSettings } = + useStreamingSettings(); + + const [groupStreamingSettings, setGroupStreamingSettings] = + useState( + allGroupsStreamingSettings[cameraGroup ?? ""], + ); + + useEffect(() => { + if (cameraGroup) { + setGroupStreamingSettings(allGroupsStreamingSettings[cameraGroup]); + } + // set individual group when all groups changes + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [allGroupsStreamingSettings]); + + const onSave = useCallback( + (settings: GroupStreamingSettings) => { + if (!cameraGroup || !allGroupsStreamingSettings) { + return; + } + + const updatedSettings: AllGroupsStreamingSettings = { + ...Object.fromEntries( + Object.entries(allGroupsStreamingSettings || {}).filter( + ([key]) => key !== cameraGroup, + ), + ), + [cameraGroup]: { + ...Object.fromEntries( + Object.entries(settings).map(([cameraName, cameraSettings]) => [ + cameraName, + cameraName === camera + ? { + ...cameraSettings, + playAudio: audioState ?? cameraSettings.playAudio ?? false, + volume: volumeState ?? cameraSettings.volume ?? 1, + } + : cameraSettings, + ]), + ), + // Add the current camera if it doesn't exist + ...(!settings[camera] + ? { + [camera]: { + streamName: streamName, + streamType: "smart", + compatibilityMode: false, + playAudio: audioState, + volume: volumeState ?? 1, + }, + } + : {}), + }, + }; + + setAllGroupsStreamingSettings?.(updatedSettings); + }, + [ + camera, + streamName, + cameraGroup, + allGroupsStreamingSettings, + setAllGroupsStreamingSettings, + audioState, + volumeState, + ], + ); + + // ui + + const audioControlsUsed = useRef(false); + + const VolumeIcon = useMemo(() => { + if (!volumeState || volumeState == 0.0 || !audioState) { + return MdVolumeOff; + } else if (volumeState <= 0.33) { + return MdVolumeMute; + } else if (volumeState <= 0.67) { + return MdVolumeDown; + } else { + return MdVolumeUp; + } + // only update when specific fields change + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [volumeState, audioState]); + + const handleVolumeIconClick = (e: React.MouseEvent) => { + e.stopPropagation(); + audioControlsUsed.current = true; + toggleAudio(); + }; + + const handleVolumeChange = (value: number[]) => { + audioControlsUsed.current = true; + setVolumeState(value[0]); + }; + + const handleOpenChange = (open: boolean) => { + if (!open && audioControlsUsed.current) { + onSave(groupStreamingSettings); + audioControlsUsed.current = false; + } + }; + + // navigate for debug view + + const navigate = useNavigate(); + + return ( +
+ + {children} + +
+
+ {camera.replaceAll("_", " ")} +
+ {preferredLiveMode == "jsmpeg" && isRestreamed && ( +
+ +

Low-bandwidth mode

+
+ )} +
+ {preferredLiveMode != "jsmpeg" && isRestreamed && supportsAudio && ( + <> + +
+
+

Audio

+
+ + +
+
+
+ + )} + + +
+
Mute All Cameras
+
+
+ +
+
Unmute All Cameras
+
+
+ + +
+
+ {statsState ? "Hide" : "Show"} Stream Stats +
+
+
+ +
navigate(`/settings?page=debug&camera=${camera}`)} + > +
Debug View
+
+
+ {cameraGroup && cameraGroup !== "default" && ( + <> + + +
setShowSettings(true)} + > +
Streaming Settings
+
+
+ + )} + {preferredLiveMode == "jsmpeg" && isRestreamed && ( + <> + + +
+
Reset
+
+
+ + )} +
+
+ + + + +
+ ); +} diff --git a/web/src/components/overlay/detail/SearchDetailDialog.tsx b/web/src/components/overlay/detail/SearchDetailDialog.tsx index 45aabb07c..f15627b71 100644 --- a/web/src/components/overlay/detail/SearchDetailDialog.tsx +++ b/web/src/components/overlay/detail/SearchDetailDialog.tsx @@ -673,7 +673,8 @@ export function ObjectSnapshotTab({ {search.data.type == "object" && search.plus_id !== "not_enabled" && - search.end_time && ( + search.end_time && + search.label != "on_demand" && (
diff --git a/web/src/components/player/BirdseyeLivePlayer.tsx b/web/src/components/player/BirdseyeLivePlayer.tsx index 2666ac9f7..286f19216 100644 --- a/web/src/components/player/BirdseyeLivePlayer.tsx +++ b/web/src/components/player/BirdseyeLivePlayer.tsx @@ -58,6 +58,7 @@ export default function BirdseyeLivePlayer({ height={birdseyeConfig.height} containerRef={containerRef} playbackEnabled={true} + useWebGL={true} /> ); } else { diff --git a/web/src/components/player/JSMpegPlayer.tsx b/web/src/components/player/JSMpegPlayer.tsx index 401e85869..3753a9e46 100644 --- a/web/src/components/player/JSMpegPlayer.tsx +++ b/web/src/components/player/JSMpegPlayer.tsx @@ -1,6 +1,7 @@ import { baseUrl } from "@/api/baseUrl"; import { useResizeObserver } from "@/hooks/resize-observer"; import { cn } from "@/lib/utils"; +import { PlayerStatsType } from "@/types/live"; // @ts-expect-error we know this doesn't have types import JSMpeg from "@cycjimmy/jsmpeg-player"; import React, { useEffect, useMemo, useRef, useState } from "react"; @@ -12,6 +13,8 @@ type JSMpegPlayerProps = { height: number; containerRef: React.MutableRefObject; playbackEnabled: boolean; + useWebGL: boolean; + setStats?: (stats: PlayerStatsType) => void; onPlaying?: () => void; }; @@ -22,6 +25,8 @@ export default function JSMpegPlayer({ className, containerRef, playbackEnabled, + useWebGL = false, + setStats, onPlaying, }: JSMpegPlayerProps) { const url = `${baseUrl.replace(/^http/, "ws")}live/jsmpeg/${camera}`; @@ -33,6 +38,9 @@ export default function JSMpegPlayer({ const [hasData, setHasData] = useState(false); const hasDataRef = useRef(hasData); const [dimensionsReady, setDimensionsReady] = useState(false); + const bytesReceivedRef = useRef(0); + const lastTimestampRef = useRef(Date.now()); + const statsIntervalRef = useRef(null); const selectedContainerRef = useMemo( () => (containerRef.current ? containerRef : internalContainerRef), @@ -111,6 +119,8 @@ export default function JSMpegPlayer({ const canvas = canvasRef.current; let videoElement: JSMpeg.VideoElement | null = null; + let frameCount = 0; + setHasData(false); if (videoWrapper && playbackEnabled) { @@ -123,21 +133,68 @@ export default function JSMpegPlayer({ { protocols: [], audio: false, - disableGl: camera != "birdseye", - disableWebAssembly: camera != "birdseye", + disableGl: !useWebGL, + disableWebAssembly: !useWebGL, videoBufferSize: 1024 * 1024 * 4, onVideoDecode: () => { if (!hasDataRef.current) { setHasData(true); onPlayingRef.current?.(); } + frameCount++; }, }, ); + + // Set up WebSocket message handler + if ( + videoElement.player && + videoElement.player.source && + videoElement.player.source.socket + ) { + const socket = videoElement.player.source.socket; + socket.addEventListener("message", (event: MessageEvent) => { + if (event.data instanceof ArrayBuffer) { + bytesReceivedRef.current += event.data.byteLength; + } + }); + } + + // Update stats every second + statsIntervalRef.current = setInterval(() => { + const currentTimestamp = Date.now(); + const timeDiff = (currentTimestamp - lastTimestampRef.current) / 1000; // in seconds + const bitrate = (bytesReceivedRef.current * 8) / timeDiff / 1000; // in kbps + + setStats?.({ + streamType: "jsmpeg", + bandwidth: Math.round(bitrate), + totalFrames: frameCount, + latency: undefined, + droppedFrames: undefined, + decodedFrames: undefined, + droppedFrameRate: undefined, + }); + + bytesReceivedRef.current = 0; + lastTimestampRef.current = currentTimestamp; + }, 1000); + + return () => { + if (statsIntervalRef.current) { + clearInterval(statsIntervalRef.current); + frameCount = 0; + statsIntervalRef.current = null; + } + }; }, 0); return () => { clearTimeout(initPlayer); + if (statsIntervalRef.current) { + clearInterval(statsIntervalRef.current); + statsIntervalRef.current = null; + } if (videoElement) { try { // this causes issues in react strict mode diff --git a/web/src/components/player/LivePlayer.tsx b/web/src/components/player/LivePlayer.tsx index abf908baa..4bd751469 100644 --- a/web/src/components/player/LivePlayer.tsx +++ b/web/src/components/player/LivePlayer.tsx @@ -11,6 +11,7 @@ import { useCameraActivity } from "@/hooks/use-camera-activity"; import { LivePlayerError, LivePlayerMode, + PlayerStatsType, VideoResolutionType, } from "@/types/live"; import { getIconForLabel } from "@/utils/iconUtil"; @@ -20,20 +21,26 @@ import { cn } from "@/lib/utils"; import { TbExclamationCircle } from "react-icons/tb"; import { TooltipPortal } from "@radix-ui/react-tooltip"; import { baseUrl } from "@/api/baseUrl"; +import { PlayerStats } from "./PlayerStats"; type LivePlayerProps = { cameraRef?: (ref: HTMLDivElement | null) => void; containerRef?: React.MutableRefObject; className?: string; cameraConfig: CameraConfig; + streamName: string; preferredLiveMode: LivePlayerMode; showStillWithoutActivity?: boolean; + useWebGL: boolean; windowVisible?: boolean; playAudio?: boolean; + volume?: number; + playInBackground: boolean; micEnabled?: boolean; // only webrtc supports mic iOSCompatFullScreen?: boolean; pip?: boolean; autoLive?: boolean; + showStats?: boolean; onClick?: () => void; setFullResolution?: React.Dispatch>; onError?: (error: LivePlayerError) => void; @@ -45,14 +52,19 @@ export default function LivePlayer({ containerRef, className, cameraConfig, + streamName, preferredLiveMode, showStillWithoutActivity = true, + useWebGL = false, windowVisible = true, playAudio = false, + volume, + playInBackground = false, micEnabled = false, iOSCompatFullScreen = false, pip, autoLive = true, + showStats = false, onClick, setFullResolution, onError, @@ -60,6 +72,18 @@ export default function LivePlayer({ }: LivePlayerProps) { const internalContainerRef = useRef(null); + // stats + + const [stats, setStats] = useState({ + streamType: "-", + bandwidth: 0, // in kbps + latency: undefined, // in seconds + totalFrames: 0, + droppedFrames: undefined, + decodedFrames: 0, + droppedFrameRate: 0, // percentage + }); + // camera activity const { activeMotion, activeTracking, objects, offline } = @@ -144,6 +168,25 @@ export default function LivePlayer({ setLiveReady(false); }, [preferredLiveMode]); + const [key, setKey] = useState(0); + + const resetPlayer = () => { + setLiveReady(false); + setKey((prevKey) => prevKey + 1); + }; + + useEffect(() => { + if (streamName) { + resetPlayer(); + } + }, [streamName]); + + useEffect(() => { + if (showStillWithoutActivity && !autoLive) { + setLiveReady(false); + } + }, [showStillWithoutActivity, autoLive]); + const playerIsPlaying = useCallback(() => { setLiveReady(true); }, []); @@ -153,15 +196,19 @@ export default function LivePlayer({ } let player; - if (!autoLive) { + if (!autoLive || !streamName) { player = null; } else if (preferredLiveMode == "webrtc") { player = ( @@ -293,7 +348,7 @@ export default function LivePlayer({ )} > )}
+ {showStats && ( + + )}
); } diff --git a/web/src/components/player/MsePlayer.tsx b/web/src/components/player/MsePlayer.tsx index 52cf8f99c..554eb5af1 100644 --- a/web/src/components/player/MsePlayer.tsx +++ b/web/src/components/player/MsePlayer.tsx @@ -1,5 +1,9 @@ import { baseUrl } from "@/api/baseUrl"; -import { LivePlayerError, VideoResolutionType } from "@/types/live"; +import { + LivePlayerError, + PlayerStatsType, + VideoResolutionType, +} from "@/types/live"; import { SetStateAction, useCallback, @@ -15,7 +19,11 @@ type MSEPlayerProps = { className?: string; playbackEnabled?: boolean; audioEnabled?: boolean; + volume?: number; + playInBackground?: boolean; pip?: boolean; + getStats?: boolean; + setStats?: (stats: PlayerStatsType) => void; onPlaying?: () => void; setFullResolution?: React.Dispatch>; onError?: (error: LivePlayerError) => void; @@ -26,7 +34,11 @@ function MSEPlayer({ className, playbackEnabled = true, audioEnabled = false, + volume, + playInBackground = false, pip = false, + getStats = false, + setStats, onPlaying, setFullResolution, onError, @@ -57,6 +69,7 @@ function MSEPlayer({ const [connectTS, setConnectTS] = useState(0); const [bufferTimeout, setBufferTimeout] = useState(); const [errorCount, setErrorCount] = useState(0); + const totalBytesLoaded = useRef(0); const videoRef = useRef(null); const wsRef = useRef(null); @@ -316,6 +329,8 @@ function MSEPlayer({ let bufLen = 0; ondataRef.current = (data) => { + totalBytesLoaded.current += data.byteLength; + if (sb?.updating || bufLen > 0) { const b = new Uint8Array(data); buf.set(b, bufLen); @@ -508,12 +523,22 @@ function MSEPlayer({ } }; - document.addEventListener("visibilitychange", listener); + if (!playInBackground) { + document.addEventListener("visibilitychange", listener); + } return () => { - document.removeEventListener("visibilitychange", listener); + if (!playInBackground) { + document.removeEventListener("visibilitychange", listener); + } }; - }, [playbackEnabled, visibilityCheck, onConnect, onDisconnect]); + }, [ + playbackEnabled, + visibilityCheck, + playInBackground, + onConnect, + onDisconnect, + ]); // control pip @@ -525,6 +550,16 @@ function MSEPlayer({ videoRef.current.requestPictureInPicture(); }, [pip, videoRef]); + // control volume + + useEffect(() => { + if (!videoRef.current || volume == undefined) { + return; + } + + videoRef.current.volume = volume; + }, [volume, videoRef]); + // ensure we disconnect for slower connections useEffect(() => { @@ -542,6 +577,68 @@ function MSEPlayer({ // eslint-disable-next-line react-hooks/exhaustive-deps }, [playbackEnabled]); + // stats + + useEffect(() => { + const video = videoRef.current; + let lastLoadedBytes = totalBytesLoaded.current; + let lastTimestamp = Date.now(); + + if (!getStats) return; + + const updateStats = () => { + if (video) { + const now = Date.now(); + const bytesLoaded = totalBytesLoaded.current; + const timeElapsed = (now - lastTimestamp) / 1000; // seconds + const bandwidth = (bytesLoaded - lastLoadedBytes) / timeElapsed / 1024; // kbps + + lastLoadedBytes = bytesLoaded; + lastTimestamp = now; + + const latency = + video.seekable.length > 0 + ? Math.max( + 0, + video.seekable.end(video.seekable.length - 1) - + video.currentTime, + ) + : 0; + + const videoQuality = video.getVideoPlaybackQuality(); + const { totalVideoFrames, droppedVideoFrames } = videoQuality; + const droppedFrameRate = totalVideoFrames + ? (droppedVideoFrames / totalVideoFrames) * 100 + : 0; + + setStats?.({ + streamType: "MSE", + bandwidth, + latency, + totalFrames: totalVideoFrames, + droppedFrames: droppedVideoFrames || undefined, + decodedFrames: totalVideoFrames - droppedVideoFrames, + droppedFrameRate, + }); + } + }; + + const interval = setInterval(updateStats, 1000); // Update every second + + return () => { + clearInterval(interval); + setStats?.({ + streamType: "-", + bandwidth: 0, + latency: undefined, + totalFrames: 0, + droppedFrames: undefined, + decodedFrames: 0, + droppedFrameRate: 0, + }); + }; + }, [setStats, getStats]); + return (