frigate/web/src/hooks/use-camera-live-mode.ts
Nicolas Mowen 78eace258e
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
Miscellaneous Fixes (0.17 Beta) (#21320)
* Exclude D-FINE from using CUDA Graphs

* fix objects count in detail stream

* Add debugging for classification models

* validate idb stored stream name and reset if invalid

fixes https://github.com/blakeblackshear/frigate/discussions/21311

* ensure jina loading takes place in the main thread to prevent lazily importing tensorflow in another thread later

reverts atexit changes in https://github.com/blakeblackshear/frigate/pull/21301 and fixes https://github.com/blakeblackshear/frigate/discussions/21306

* revert old atexit change in bird too

* revert types

* ensure we bail in the live mode hook for empty camera groups

prevent infinite rendering on camera groups with no cameras

---------

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>
2025-12-16 22:35:43 -06:00

137 lines
4.3 KiB
TypeScript

import { CameraConfig, FrigateConfig } from "@/types/frigateConfig";
import { useCallback, useEffect, useState, useMemo } from "react";
import useSWR from "swr";
import { LivePlayerMode } from "@/types/live";
import useDeferredStreamMetadata from "./use-deferred-stream-metadata";
import { detectCameraAudioFeatures } from "@/utils/cameraUtil";
export default function useCameraLiveMode(
cameras: CameraConfig[],
windowVisible: boolean,
activeStreams?: { [cameraName: string]: string },
) {
const { data: config } = useSWR<FrigateConfig>("config");
// Compute which streams need metadata (restreamed streams only)
const restreamedStreamNames = useMemo(() => {
if (!cameras || !config) return [];
const streamNames = new Set<string>();
cameras.forEach((camera) => {
const isRestreamed = Object.keys(config.go2rtc.streams || {}).includes(
Object.values(camera.live.streams)[0],
);
if (isRestreamed) {
if (activeStreams && activeStreams[camera.name]) {
streamNames.add(activeStreams[camera.name]);
} else {
Object.values(camera.live.streams).forEach((streamName) => {
streamNames.add(streamName);
});
}
}
});
return Array.from(streamNames);
}, [cameras, config, activeStreams]);
// Fetch stream metadata with deferred loading (doesn't block initial render)
const streamMetadata = useDeferredStreamMetadata(restreamedStreamNames);
// Compute live mode states
const [preferredLiveModes, setPreferredLiveModes] = useState<{
[key: string]: LivePlayerMode;
}>({});
const [isRestreamedStates, setIsRestreamedStates] = useState<{
[key: string]: boolean;
}>({});
const [supportsAudioOutputStates, setSupportsAudioOutputStates] = useState<{
[key: string]: {
supportsAudio: boolean;
cameraName: string;
};
}>({});
useEffect(() => {
if (!cameras || cameras.length === 0) return;
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const newPreferredLiveModes: { [key: string]: LivePlayerMode } = {};
const newIsRestreamedStates: { [key: string]: boolean } = {};
const newSupportsAudioOutputStates: {
[key: string]: { supportsAudio: boolean; cameraName: string };
} = {};
cameras.forEach((camera) => {
const isRestreamed =
config &&
Object.keys(config.go2rtc.streams || {}).includes(
Object.values(camera.live.streams)[0],
);
newIsRestreamedStates[camera.name] = isRestreamed ?? false;
if (!mseSupported) {
newPreferredLiveModes[camera.name] = isRestreamed ? "webrtc" : "jsmpeg";
} else {
newPreferredLiveModes[camera.name] = isRestreamed ? "mse" : "jsmpeg";
}
// Check each stream for audio support
if (isRestreamed) {
Object.values(camera.live.streams).forEach((streamName) => {
const metadata = streamMetadata[streamName];
const audioFeatures = detectCameraAudioFeatures(metadata);
newSupportsAudioOutputStates[streamName] = {
supportsAudio: audioFeatures.audioOutput,
cameraName: camera.name,
};
});
} else {
newSupportsAudioOutputStates[camera.name] = {
supportsAudio: false,
cameraName: camera.name,
};
}
});
setPreferredLiveModes(newPreferredLiveModes);
setIsRestreamedStates(newIsRestreamedStates);
setSupportsAudioOutputStates(newSupportsAudioOutputStates);
}, [cameras, config, windowVisible, streamMetadata]);
const resetPreferredLiveMode = useCallback(
(cameraName: string) => {
const mseSupported =
"MediaSource" in window || "ManagedMediaSource" in window;
const isRestreamed =
config && Object.keys(config.go2rtc.streams || {}).includes(cameraName);
setPreferredLiveModes((prevModes) => {
const newModes = { ...prevModes };
if (!mseSupported) {
newModes[cameraName] = isRestreamed ? "webrtc" : "jsmpeg";
} else {
newModes[cameraName] = isRestreamed ? "mse" : "jsmpeg";
}
return newModes;
});
},
[config],
);
return {
preferredLiveModes,
setPreferredLiveModes,
resetPreferredLiveMode,
isRestreamedStates,
supportsAudioOutputStates,
streamMetadata,
};
}