2023-12-16 02:24:50 +03:00
import { baseUrl } from "@/api/baseUrl" ;
2025-02-10 19:42:35 +03:00
import { LivePlayerError , PlayerStatsType } from "@/types/live" ;
2024-03-15 21:46:17 +03:00
import { useCallback , useEffect , useMemo , useRef , useState } from "react" ;
2023-12-16 02:24:50 +03:00
type WebRtcPlayerProps = {
2024-02-10 15:30:53 +03:00
className? : string ;
2023-12-16 02:24:50 +03:00
camera : string ;
2024-02-15 03:19:55 +03:00
playbackEnabled? : boolean ;
2024-03-02 03:43:02 +03:00
audioEnabled? : boolean ;
2025-02-10 19:42:35 +03:00
volume? : number ;
2024-03-13 02:19:02 +03:00
microphoneEnabled? : boolean ;
2024-03-15 21:46:17 +03:00
iOSCompatFullScreen? : boolean ; // ios doesn't support fullscreen divs so we must support the video element
2024-04-02 15:45:16 +03:00
pip? : boolean ;
2025-02-10 19:42:35 +03:00
getStats? : boolean ;
setStats ? : ( stats : PlayerStatsType ) = > void ;
2024-02-10 15:30:53 +03:00
onPlaying ? : ( ) = > void ;
2024-05-31 16:52:42 +03:00
onError ? : ( error : LivePlayerError ) = > void ;
2023-12-16 02:24:50 +03:00
} ;
export default function WebRtcPlayer ( {
2024-02-10 15:30:53 +03:00
className ,
2023-12-16 02:24:50 +03:00
camera ,
2024-02-15 03:19:55 +03:00
playbackEnabled = true ,
2024-03-02 03:43:02 +03:00
audioEnabled = false ,
2025-02-10 19:42:35 +03:00
volume ,
2024-03-13 02:19:02 +03:00
microphoneEnabled = false ,
2024-03-15 21:46:17 +03:00
iOSCompatFullScreen = false ,
2024-04-02 15:45:16 +03:00
pip = false ,
2025-02-10 19:42:35 +03:00
getStats = false ,
setStats ,
2024-02-10 15:30:53 +03:00
onPlaying ,
2024-05-31 16:52:42 +03:00
onError ,
2023-12-16 02:24:50 +03:00
} : WebRtcPlayerProps ) {
2024-03-13 17:04:11 +03:00
// metadata
const wsURL = useMemo ( ( ) = > {
return ` ${ baseUrl . replace ( /^http/ , "ws" ) } live/webrtc/api/ws?src= ${ camera } ` ;
} , [ camera ] ) ;
2025-09-23 05:21:51 +03:00
// error handler
const handleError = useCallback (
( error : LivePlayerError , description : string = "Unknown error" ) = > {
// eslint-disable-next-line no-console
console . error (
2025-11-23 18:40:25 +03:00
` ${ camera } - WebRTC error ' ${ error } ': ${ description } See the documentation: https://docs.frigate.video/configuration/live/#live-player-error-messages ` ,
2025-09-23 05:21:51 +03:00
) ;
onError ? . ( error ) ;
} ,
[ camera , onError ] ,
) ;
2024-02-15 03:19:55 +03:00
// camera states
2023-12-16 02:24:50 +03:00
const pcRef = useRef < RTCPeerConnection | undefined > ( ) ;
const videoRef = useRef < HTMLVideoElement | null > ( null ) ;
2024-05-31 16:52:42 +03:00
const [ bufferTimeout , setBufferTimeout ] = useState < NodeJS.Timeout > ( ) ;
2024-06-04 18:11:32 +03:00
const videoLoadTimeoutRef = useRef < NodeJS.Timeout > ( ) ;
2024-02-15 03:19:55 +03:00
2023-12-16 02:24:50 +03:00
const PeerConnection = useCallback (
async ( media : string ) = > {
if ( ! videoRef . current ) {
return ;
}
const pc = new RTCPeerConnection ( {
2024-05-17 16:30:22 +03:00
bundlePolicy : "max-bundle" ,
2023-12-16 02:24:50 +03:00
iceServers : [ { urls : "stun:stun.l.google.com:19302" } ] ,
} ) ;
const localTracks = [ ] ;
if ( /camera|microphone/ . test ( media ) ) {
const tracks = await getMediaTracks ( "user" , {
video : media.indexOf ( "camera" ) >= 0 ,
audio : media.indexOf ( "microphone" ) >= 0 ,
} ) ;
tracks . forEach ( ( track ) = > {
pc . addTransceiver ( track , { direction : "sendonly" } ) ;
if ( track . kind === "video" ) localTracks . push ( track ) ;
} ) ;
}
if ( media . indexOf ( "display" ) >= 0 ) {
const tracks = await getMediaTracks ( "display" , {
video : true ,
audio : media.indexOf ( "speaker" ) >= 0 ,
} ) ;
tracks . forEach ( ( track ) = > {
pc . addTransceiver ( track , { direction : "sendonly" } ) ;
if ( track . kind === "video" ) localTracks . push ( track ) ;
} ) ;
}
if ( /video|audio/ . test ( media ) ) {
const tracks = [ "video" , "audio" ]
. filter ( ( kind ) = > media . indexOf ( kind ) >= 0 )
. map (
( kind ) = >
2024-02-29 01:23:56 +03:00
pc . addTransceiver ( kind , { direction : "recvonly" } ) . receiver . track ,
2023-12-16 02:24:50 +03:00
) ;
localTracks . push ( . . . tracks ) ;
}
videoRef . current . srcObject = new MediaStream ( localTracks ) ;
return pc ;
} ,
2024-02-29 01:23:56 +03:00
[ videoRef ] ,
2023-12-16 02:24:50 +03:00
) ;
async function getMediaTracks (
media : string ,
2024-02-29 01:23:56 +03:00
constraints : MediaStreamConstraints ,
2023-12-16 02:24:50 +03:00
) {
try {
const stream =
media === "user"
? await navigator . mediaDevices . getUserMedia ( constraints )
: await navigator . mediaDevices . getDisplayMedia ( constraints ) ;
return stream . getTracks ( ) ;
} catch ( e ) {
return [ ] ;
}
}
const connect = useCallback (
2024-03-13 17:04:11 +03:00
async ( aPc : Promise < RTCPeerConnection | undefined > ) = > {
2023-12-16 02:24:50 +03:00
if ( ! aPc ) {
return ;
}
pcRef . current = await aPc ;
2024-03-13 17:04:11 +03:00
const ws = new WebSocket ( wsURL ) ;
2023-12-16 02:24:50 +03:00
ws . addEventListener ( "open" , ( ) = > {
pcRef . current ? . addEventListener ( "icecandidate" , ( ev ) = > {
if ( ! ev . candidate ) return ;
const msg = {
type : "webrtc/candidate" ,
value : ev.candidate.candidate ,
} ;
ws . send ( JSON . stringify ( msg ) ) ;
} ) ;
pcRef . current
? . createOffer ( )
. then ( ( offer ) = > pcRef . current ? . setLocalDescription ( offer ) )
. then ( ( ) = > {
const msg = {
type : "webrtc/offer" ,
value : pcRef.current?.localDescription?.sdp ,
} ;
ws . send ( JSON . stringify ( msg ) ) ;
} ) ;
} ) ;
ws . addEventListener ( "message" , ( ev ) = > {
const msg = JSON . parse ( ev . data ) ;
if ( msg . type === "webrtc/candidate" ) {
pcRef . current ? . addIceCandidate ( { candidate : msg.value , sdpMid : "0" } ) ;
} else if ( msg . type === "webrtc/answer" ) {
pcRef . current ? . setRemoteDescription ( {
type : "answer" ,
sdp : msg.value ,
} ) ;
}
} ) ;
} ,
2024-03-13 17:04:11 +03:00
[ wsURL ] ,
2023-12-16 02:24:50 +03:00
) ;
useEffect ( ( ) = > {
if ( ! videoRef . current ) {
return ;
}
2024-02-15 03:19:55 +03:00
if ( ! playbackEnabled ) {
return ;
}
2024-03-13 02:19:02 +03:00
const aPc = PeerConnection (
microphoneEnabled ? "video+audio+microphone" : "video+audio" ,
) ;
2024-03-13 17:04:11 +03:00
connect ( aPc ) ;
2023-12-16 02:24:50 +03:00
return ( ) = > {
if ( pcRef . current ) {
pcRef . current . close ( ) ;
pcRef . current = undefined ;
}
} ;
2024-03-13 02:19:02 +03:00
} , [
camera ,
connect ,
PeerConnection ,
pcRef ,
videoRef ,
playbackEnabled ,
microphoneEnabled ,
] ) ;
2023-12-16 02:24:50 +03:00
2024-03-15 21:46:17 +03:00
// ios compat
2024-04-02 15:45:16 +03:00
2024-03-15 21:46:17 +03:00
const [ iOSCompatControls , setiOSCompatControls ] = useState ( false ) ;
2024-04-02 15:45:16 +03:00
// control pip
useEffect ( ( ) = > {
if ( ! videoRef . current || ! pip ) {
return ;
}
videoRef . current . requestPictureInPicture ( ) ;
} , [ pip , videoRef ] ) ;
2025-02-10 19:42:35 +03:00
// control volume
useEffect ( ( ) = > {
if ( ! videoRef . current || volume == undefined ) {
return ;
}
videoRef . current . volume = volume ;
} , [ volume , videoRef ] ) ;
2024-06-04 18:11:32 +03:00
useEffect ( ( ) = > {
videoLoadTimeoutRef . current = setTimeout ( ( ) = > {
2025-09-23 05:21:51 +03:00
handleError ( "stalled" , "WebRTC connection timed out." ) ;
2024-06-04 18:11:32 +03:00
} , 5000 ) ;
return ( ) = > {
if ( videoLoadTimeoutRef . current ) {
clearTimeout ( videoLoadTimeoutRef . current ) ;
}
} ;
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
} , [ ] ) ;
const handleLoadedData = ( ) = > {
if ( videoLoadTimeoutRef . current ) {
clearTimeout ( videoLoadTimeoutRef . current ) ;
}
onPlaying ? . ( ) ;
} ;
2025-02-10 19:42:35 +03:00
// stats
useEffect ( ( ) = > {
if ( ! pcRef . current || ! getStats ) return ;
let lastBytesReceived = 0 ;
let lastTimestamp = 0 ;
const interval = setInterval ( async ( ) = > {
if ( pcRef . current && videoRef . current && ! videoRef . current . paused ) {
const report = await pcRef . current . getStats ( ) ;
let bytesReceived = 0 ;
let timestamp = 0 ;
let roundTripTime = 0 ;
let framesReceived = 0 ;
let framesDropped = 0 ;
let framesDecoded = 0 ;
report . forEach ( ( stat ) = > {
if ( stat . type === "inbound-rtp" && stat . kind === "video" ) {
bytesReceived = stat . bytesReceived ;
timestamp = stat . timestamp ;
framesReceived = stat . framesReceived ;
framesDropped = stat . framesDropped ;
framesDecoded = stat . framesDecoded ;
}
if ( stat . type === "candidate-pair" && stat . state === "succeeded" ) {
roundTripTime = stat . currentRoundTripTime ;
}
} ) ;
const timeDiff = ( timestamp - lastTimestamp ) / 1000 ; // in seconds
const bitrate =
timeDiff > 0
? ( bytesReceived - lastBytesReceived ) / timeDiff / 1000
2025-09-02 03:23:44 +03:00
: 0 ; // in kBps
2025-02-10 19:42:35 +03:00
setStats ? . ( {
streamType : "WebRTC" ,
bandwidth : Math.round ( bitrate ) ,
latency : roundTripTime ,
totalFrames : framesReceived ,
droppedFrames : framesDropped ,
decodedFrames : framesDecoded ,
droppedFrameRate :
framesReceived > 0 ? ( framesDropped / framesReceived ) * 100 : 0 ,
} ) ;
lastBytesReceived = bytesReceived ;
lastTimestamp = timestamp ;
}
} , 1000 ) ;
return ( ) = > {
clearInterval ( interval ) ;
setStats ? . ( {
streamType : "-" ,
bandwidth : 0 ,
latency : undefined ,
totalFrames : 0 ,
droppedFrames : undefined ,
decodedFrames : 0 ,
droppedFrameRate : 0 ,
} ) ;
} ;
// we need to listen on the value of the ref
// eslint-disable-next-line react-hooks/exhaustive-deps
} , [ pcRef , pcRef . current , getStats ] ) ;
2023-12-16 02:24:50 +03:00
return (
2024-02-15 03:19:55 +03:00
< video
ref = { videoRef }
className = { className }
2024-03-15 21:46:17 +03:00
controls = { iOSCompatControls }
2024-02-15 03:19:55 +03:00
autoPlay
playsInline
2024-03-02 03:43:02 +03:00
muted = { ! audioEnabled }
2024-06-04 18:11:32 +03:00
onLoadedData = { handleLoadedData }
2024-05-31 16:52:42 +03:00
onProgress = {
onError != undefined
? ( ) = > {
if ( videoRef . current ? . paused ) {
return ;
}
if ( bufferTimeout ) {
clearTimeout ( bufferTimeout ) ;
setBufferTimeout ( undefined ) ;
}
setBufferTimeout (
setTimeout ( ( ) = > {
2024-06-13 17:45:07 +03:00
if (
document . visibilityState === "visible" &&
pcRef . current != undefined
) {
2025-10-03 15:37:18 +03:00
handleError (
"stalled" ,
"Media playback has stalled after 3 seconds due to insufficient buffering or a network interruption." ,
) ;
2024-06-11 02:24:25 +03:00
}
2024-05-31 16:52:42 +03:00
} , 3000 ) ,
) ;
}
: undefined
}
2024-03-15 21:46:17 +03:00
onClick = {
iOSCompatFullScreen
? ( ) = > setiOSCompatControls ( ! iOSCompatControls )
: undefined
}
2024-05-31 16:52:42 +03:00
onError = { ( e ) = > {
if (
// @ts-expect-error code does exist
e . target . error . code == MediaError . MEDIA_ERR_NETWORK
) {
2025-09-23 05:21:51 +03:00
handleError ( "startup" , "Browser reported a network error." ) ;
2024-05-31 16:52:42 +03:00
}
} }
2024-02-15 03:19:55 +03:00
/ >
2023-12-16 02:24:50 +03:00
) ;
}