frigate/web/src/components/player/WebRTCPlayer.tsx
Josh Hawkins dd7820e4ee
Improve live streaming (#16447)
* config file changes

* config migrator

* stream selection on single camera live view

* camera streaming settings dialog

* manage persistent group streaming settings

* apply streaming settings in camera groups

* add ability to clear all streaming settings from settings

* docs

* update reference config

* fixes

* clarify docs

* use first stream as default in dialog

* ensure still image is visible after switching stream type to none

* docs

* clarify docs

* add ability to continue playing stream in background

* fix props

* put stream selection inside dropdown on desktop

* add capabilities to live mode hook

* live context menu component

* resize observer: only return new dimensions if they've actually changed

* pass volume prop to players

* fix slider bug, https://github.com/shadcn-ui/ui/issues/1448

* update react-grid-layout

* prevent animated transitions on draggable grid layout

* add context menu to dashboards

* use provider

* streaming dialog from context menu

* docs

* add jsmpeg warning to context menu

* audio and two way talk indicators in single camera view

* add link to debug view

* don't use hook

* create manual events from live camera view

* maintain grow classes on grid items

* fix initial volume state on default dashboard

* fix pointer events causing context menu to end up underneath image on iOS

* mobile drawer tweaks

* stream stats

* show settings menu for non-restreamed cameras

* consistent settings icon

* tweaks

* optional stats to fix birdseye player

* add toaster to live camera view

* fix crash on initial save in streaming dialog

* don't require restreaming for context menu streaming settings

* add debug view to context menu

* stats fixes

* update docs

* always show stream info when restreamed

* update camera streaming dialog

* make note of no h265 support for webrtc

* docs clarity

* ensure docs show streams as a dict

* docs clarity

* fix css file

* tweaks
2025-02-10 09:42:35 -07:00

353 lines
9.0 KiB
TypeScript

import { baseUrl } from "@/api/baseUrl";
import { LivePlayerError, PlayerStatsType } from "@/types/live";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
type WebRtcPlayerProps = {
className?: string;
camera: string;
playbackEnabled?: boolean;
audioEnabled?: boolean;
volume?: number;
microphoneEnabled?: boolean;
iOSCompatFullScreen?: boolean; // ios doesn't support fullscreen divs so we must support the video element
pip?: boolean;
getStats?: boolean;
setStats?: (stats: PlayerStatsType) => void;
onPlaying?: () => void;
onError?: (error: LivePlayerError) => void;
};
export default function WebRtcPlayer({
className,
camera,
playbackEnabled = true,
audioEnabled = false,
volume,
microphoneEnabled = false,
iOSCompatFullScreen = false,
pip = false,
getStats = false,
setStats,
onPlaying,
onError,
}: WebRtcPlayerProps) {
// metadata
const wsURL = useMemo(() => {
return `${baseUrl.replace(/^http/, "ws")}live/webrtc/api/ws?src=${camera}`;
}, [camera]);
// camera states
const pcRef = useRef<RTCPeerConnection | undefined>();
const videoRef = useRef<HTMLVideoElement | null>(null);
const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>();
const videoLoadTimeoutRef = useRef<NodeJS.Timeout>();
const PeerConnection = useCallback(
async (media: string) => {
if (!videoRef.current) {
return;
}
const pc = new RTCPeerConnection({
bundlePolicy: "max-bundle",
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
});
const localTracks = [];
if (/camera|microphone/.test(media)) {
const tracks = await getMediaTracks("user", {
video: media.indexOf("camera") >= 0,
audio: media.indexOf("microphone") >= 0,
});
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: "sendonly" });
if (track.kind === "video") localTracks.push(track);
});
}
if (media.indexOf("display") >= 0) {
const tracks = await getMediaTracks("display", {
video: true,
audio: media.indexOf("speaker") >= 0,
});
tracks.forEach((track) => {
pc.addTransceiver(track, { direction: "sendonly" });
if (track.kind === "video") localTracks.push(track);
});
}
if (/video|audio/.test(media)) {
const tracks = ["video", "audio"]
.filter((kind) => media.indexOf(kind) >= 0)
.map(
(kind) =>
pc.addTransceiver(kind, { direction: "recvonly" }).receiver.track,
);
localTracks.push(...tracks);
}
videoRef.current.srcObject = new MediaStream(localTracks);
return pc;
},
[videoRef],
);
async function getMediaTracks(
media: string,
constraints: MediaStreamConstraints,
) {
try {
const stream =
media === "user"
? await navigator.mediaDevices.getUserMedia(constraints)
: await navigator.mediaDevices.getDisplayMedia(constraints);
return stream.getTracks();
} catch (e) {
return [];
}
}
const connect = useCallback(
async (aPc: Promise<RTCPeerConnection | undefined>) => {
if (!aPc) {
return;
}
pcRef.current = await aPc;
const ws = new WebSocket(wsURL);
ws.addEventListener("open", () => {
pcRef.current?.addEventListener("icecandidate", (ev) => {
if (!ev.candidate) return;
const msg = {
type: "webrtc/candidate",
value: ev.candidate.candidate,
};
ws.send(JSON.stringify(msg));
});
pcRef.current
?.createOffer()
.then((offer) => pcRef.current?.setLocalDescription(offer))
.then(() => {
const msg = {
type: "webrtc/offer",
value: pcRef.current?.localDescription?.sdp,
};
ws.send(JSON.stringify(msg));
});
});
ws.addEventListener("message", (ev) => {
const msg = JSON.parse(ev.data);
if (msg.type === "webrtc/candidate") {
pcRef.current?.addIceCandidate({ candidate: msg.value, sdpMid: "0" });
} else if (msg.type === "webrtc/answer") {
pcRef.current?.setRemoteDescription({
type: "answer",
sdp: msg.value,
});
}
});
},
[wsURL],
);
useEffect(() => {
if (!videoRef.current) {
return;
}
if (!playbackEnabled) {
return;
}
const aPc = PeerConnection(
microphoneEnabled ? "video+audio+microphone" : "video+audio",
);
connect(aPc);
return () => {
if (pcRef.current) {
pcRef.current.close();
pcRef.current = undefined;
}
};
}, [
camera,
connect,
PeerConnection,
pcRef,
videoRef,
playbackEnabled,
microphoneEnabled,
]);
// ios compat
const [iOSCompatControls, setiOSCompatControls] = useState(false);
// control pip
useEffect(() => {
if (!videoRef.current || !pip) {
return;
}
videoRef.current.requestPictureInPicture();
}, [pip, videoRef]);
// control volume
useEffect(() => {
if (!videoRef.current || volume == undefined) {
return;
}
videoRef.current.volume = volume;
}, [volume, videoRef]);
useEffect(() => {
videoLoadTimeoutRef.current = setTimeout(() => {
onError?.("stalled");
}, 5000);
return () => {
if (videoLoadTimeoutRef.current) {
clearTimeout(videoLoadTimeoutRef.current);
}
};
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const handleLoadedData = () => {
if (videoLoadTimeoutRef.current) {
clearTimeout(videoLoadTimeoutRef.current);
}
onPlaying?.();
};
// stats
useEffect(() => {
if (!pcRef.current || !getStats) return;
let lastBytesReceived = 0;
let lastTimestamp = 0;
const interval = setInterval(async () => {
if (pcRef.current && videoRef.current && !videoRef.current.paused) {
const report = await pcRef.current.getStats();
let bytesReceived = 0;
let timestamp = 0;
let roundTripTime = 0;
let framesReceived = 0;
let framesDropped = 0;
let framesDecoded = 0;
report.forEach((stat) => {
if (stat.type === "inbound-rtp" && stat.kind === "video") {
bytesReceived = stat.bytesReceived;
timestamp = stat.timestamp;
framesReceived = stat.framesReceived;
framesDropped = stat.framesDropped;
framesDecoded = stat.framesDecoded;
}
if (stat.type === "candidate-pair" && stat.state === "succeeded") {
roundTripTime = stat.currentRoundTripTime;
}
});
const timeDiff = (timestamp - lastTimestamp) / 1000; // in seconds
const bitrate =
timeDiff > 0
? (bytesReceived - lastBytesReceived) / timeDiff / 1000
: 0; // in kbps
setStats?.({
streamType: "WebRTC",
bandwidth: Math.round(bitrate),
latency: roundTripTime,
totalFrames: framesReceived,
droppedFrames: framesDropped,
decodedFrames: framesDecoded,
droppedFrameRate:
framesReceived > 0 ? (framesDropped / framesReceived) * 100 : 0,
});
lastBytesReceived = bytesReceived;
lastTimestamp = timestamp;
}
}, 1000);
return () => {
clearInterval(interval);
setStats?.({
streamType: "-",
bandwidth: 0,
latency: undefined,
totalFrames: 0,
droppedFrames: undefined,
decodedFrames: 0,
droppedFrameRate: 0,
});
};
// we need to listen on the value of the ref
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [pcRef, pcRef.current, getStats]);
return (
<video
ref={videoRef}
className={className}
controls={iOSCompatControls}
autoPlay
playsInline
muted={!audioEnabled}
onLoadedData={handleLoadedData}
onProgress={
onError != undefined
? () => {
if (videoRef.current?.paused) {
return;
}
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
setBufferTimeout(
setTimeout(() => {
if (
document.visibilityState === "visible" &&
pcRef.current != undefined
) {
onError("stalled");
}
}, 3000),
);
}
: undefined
}
onClick={
iOSCompatFullScreen
? () => setiOSCompatControls(!iOSCompatControls)
: undefined
}
onError={(e) => {
if (
// @ts-expect-error code does exist
e.target.error.code == MediaError.MEDIA_ERR_NETWORK
) {
onError?.("startup");
}
}}
/>
);
}