import { CameraConfig, FrigateConfig } from "@/types/frigateConfig"; import { useCallback, useEffect, useState, useMemo } from "react"; import useSWR from "swr"; import { LivePlayerMode } from "@/types/live"; import useDeferredStreamMetadata from "./use-deferred-stream-metadata"; export default function useCameraLiveMode( cameras: CameraConfig[], windowVisible: boolean, activeStreams?: { [cameraName: string]: string }, ) { const { data: config } = useSWR("config"); // Compute which streams need metadata (restreamed streams only) const restreamedStreamNames = useMemo(() => { if (!cameras || !config) return []; const streamNames = new Set(); cameras.forEach((camera) => { const isRestreamed = Object.keys(config.go2rtc.streams || {}).includes( Object.values(camera.live.streams)[0], ); if (isRestreamed) { if (activeStreams && activeStreams[camera.name]) { streamNames.add(activeStreams[camera.name]); } else { Object.values(camera.live.streams).forEach((streamName) => { streamNames.add(streamName); }); } } }); return Array.from(streamNames); }, [cameras, config, activeStreams]); // Fetch stream metadata with deferred loading (doesn't block initial render) const streamMetadata = useDeferredStreamMetadata(restreamedStreamNames); // Compute live mode states const [preferredLiveModes, setPreferredLiveModes] = useState<{ [key: string]: LivePlayerMode; }>({}); const [isRestreamedStates, setIsRestreamedStates] = useState<{ [key: string]: boolean; }>({}); const [supportsAudioOutputStates, setSupportsAudioOutputStates] = useState<{ [key: string]: { supportsAudio: boolean; cameraName: string; }; }>({}); useEffect(() => { if (!cameras) return; const mseSupported = "MediaSource" in window || "ManagedMediaSource" in window; const newPreferredLiveModes: { [key: string]: LivePlayerMode } = {}; const newIsRestreamedStates: { [key: string]: boolean } = {}; const newSupportsAudioOutputStates: { [key: string]: { supportsAudio: boolean; cameraName: string }; } = {}; cameras.forEach((camera) => { const isRestreamed = config && Object.keys(config.go2rtc.streams || {}).includes( Object.values(camera.live.streams)[0], ); newIsRestreamedStates[camera.name] = isRestreamed ?? false; if (!mseSupported) { newPreferredLiveModes[camera.name] = isRestreamed ? "webrtc" : "jsmpeg"; } else { newPreferredLiveModes[camera.name] = isRestreamed ? "mse" : "jsmpeg"; } // Check each stream for audio support if (isRestreamed) { Object.values(camera.live.streams).forEach((streamName) => { const metadata = streamMetadata[streamName]; newSupportsAudioOutputStates[streamName] = { supportsAudio: metadata ? metadata.producers.find( (prod) => prod.medias && prod.medias.find((media) => media.includes("audio, recvonly"), ) !== undefined, ) !== undefined : false, cameraName: camera.name, }; }); } else { newSupportsAudioOutputStates[camera.name] = { supportsAudio: false, cameraName: camera.name, }; } }); setPreferredLiveModes(newPreferredLiveModes); setIsRestreamedStates(newIsRestreamedStates); setSupportsAudioOutputStates(newSupportsAudioOutputStates); }, [cameras, config, windowVisible, streamMetadata]); const resetPreferredLiveMode = useCallback( (cameraName: string) => { const mseSupported = "MediaSource" in window || "ManagedMediaSource" in window; const isRestreamed = config && Object.keys(config.go2rtc.streams || {}).includes(cameraName); setPreferredLiveModes((prevModes) => { const newModes = { ...prevModes }; if (!mseSupported) { newModes[cameraName] = isRestreamed ? "webrtc" : "jsmpeg"; } else { newModes[cameraName] = isRestreamed ? "mse" : "jsmpeg"; } return newModes; }); }, [config], ); return { preferredLiveModes, setPreferredLiveModes, resetPreferredLiveMode, isRestreamedStates, supportsAudioOutputStates, streamMetadata, }; }