frigate/web/src/components/MsePlayer.jsx
Nicolas Mowen d8123d2497
Add go2rtc and add restream role / live source (#4082)
* Pull go2rtc dependency

* Add go2rtc to local services and add to s6

* Add relay controller for go2rtc

* Add restream role

* Add restream role

* Add restream to nginx

* Add camera live source config

* Disable RTMP by default and use restream

* Use go2rtc for camera config

* Fix go2rtc move

* Start restream on frigate start

* Send restream to camera level

* Fix restream

* Make sure jsmpeg works as expected

* Make view rspect live size config

* Tweak player options to fit live view

* Adjust VideoPlayer to accept live option which disables irrelevant controls

* Add multiple options from restream live view

* Add base for webrtc option

* Setup specific restream modules

* Make mp4 the default streaming for now

* Expose 8554 for rtsp relay from go2rtc

* Formatting

* Update docs to suggest new restream method.

* Update docs to reflect restream role

* Update docs to reflect restream role

* Add webrtc player

* Improvements to webRTC

* Support webrtc

* Cleanup

* Adjust rtmp test and add restream test

* Fix tests

* Add restream tests

* Add live view docs and show different options

* Small docs tweak

* Support all stream types

* Update to beta 9 of go2rtc

* Formatting

* Make jsmpeg the default

* Support wss if made from https

* Support wss if made from https

* Use onEffect

* Set url outside onEffect

* Fix passed deps

* Update docs about required host mode

* Try memo instead

* Close websocket on changing camera

* Formatting

* Close pc connection

* Set video source to null on cleanup

* Use full path since go2rtc can't see PATH var

* Adjust audio codec to enable browser audio by default

* Cleanup stream creation

* Add restream tests

* Format tests

* Mock requests

* Adjust paths

* Move stream configs to restream

* Remove live source

* Remove live config

* Use live persistence for which view to use on each camera

* Fix live sizes

* Only use jsmpeg sizes for jsmpeg live

* Set max live size

* Remove access of live config

* Add selector for live view source in web view

* Remove RTMP from default list of roles

* Update docs

* Fix tests

* Fix docs for live view modes

* make default undefined to avoid race condition

* Wait until camera source is loaded to avoid race condition

* Fix tests

* Add config to go2rtc

* Work with config

* Set full path for config

* Set to use stun

* Check for mounted file

* Look for frigate-go2rtc

* Update docs to reflect webRTC configuration.

* Add link to go2rtc config

* Update docs to be more clear

* Update docs to be more clear

* Update format

Co-authored-by: Felipe Santos <felipecassiors@gmail.com>

* Update live docs

* Improve bash startup script

* Add option to force audio compatibility

* Formatting

* Fix mapping

* Fix broken link

* Update go2rtc version

* Get go2rtc webui working

* Add support for mse

* Remove mp4 option

* Undo changes to video player

* Update docs for new live view options

* Make separate path for mse

* Remove unused

* Remove mp4 path

* Try to get go2rtc proxy working

* Try to get go2rtc proxy working

* Remove unused callback

* Allow websocket on restrea dashboard

* Make mse default stream option

* Fix mse sizing

* don't assume roles is defined

* Remove nginx mapping to go2rtc ui

Co-authored-by: Felipe Santos <felipecassiors@gmail.com>
Co-authored-by: Blake Blackshear <blakeb@blakeshome.com>
2022-11-02 06:36:09 -05:00

94 lines
2.8 KiB
JavaScript

import { h } from 'preact';
import { baseUrl } from '../api/baseUrl';
import { useEffect } from 'preact/hooks';
export default function MsePlayer({ camera, width, height }) {
const url = `${baseUrl.replace(/^http/, 'ws')}live/mse/api/ws?src=${camera}`;
useEffect(() => {
const video = document.querySelector('#video');
// support api_path
const ws = new WebSocket(url);
ws.binaryType = 'arraybuffer';
let mediaSource;
ws.onopen = () => {
// https://web.dev/i18n/en/fast-playback-with-preload/#manual_buffering
// https://developer.mozilla.org/en-US/docs/Web/API/Media_Source_Extensions_API
mediaSource = new MediaSource();
video.src = URL.createObjectURL(mediaSource);
mediaSource.onsourceopen = () => {
mediaSource.onsourceopen = null;
URL.revokeObjectURL(video.src);
ws.send(JSON.stringify({ type: 'mse' }));
};
};
let sourceBuffer,
queueBuffer = [];
ws.onmessage = (ev) => {
if (typeof ev.data === 'string') {
const data = JSON.parse(ev.data);
if (data.type === 'mse') {
sourceBuffer = mediaSource.addSourceBuffer(data.value);
// important: segments supports TrackFragDecodeTime
// sequence supports only TrackFragRunEntry Duration
sourceBuffer.mode = 'segments';
sourceBuffer.onupdateend = () => {
if (!sourceBuffer.updating && queueBuffer.length > 0) {
sourceBuffer.appendBuffer(queueBuffer.shift());
}
};
}
} else if (sourceBuffer.updating) {
queueBuffer.push(ev.data);
} else {
sourceBuffer.appendBuffer(ev.data);
}
};
let offsetTime = 1,
noWaiting = 0;
setInterval(() => {
if (video.paused || video.seekable.length === 0) return;
if (noWaiting < 0) {
offsetTime = Math.min(offsetTime * 1.1, 5);
} else if (noWaiting >= 30) {
noWaiting = 0;
offsetTime = Math.max(offsetTime * 0.9, 0.5);
}
noWaiting += 1;
const endTime = video.seekable.end(video.seekable.length - 1);
let playbackRate = (endTime - video.currentTime) / offsetTime;
if (playbackRate < 0.1) {
// video.currentTime = endTime - offsetTime;
playbackRate = 0.1;
} else if (playbackRate > 10) {
// video.currentTime = endTime - offsetTime;
playbackRate = 10;
}
// https://github.com/GoogleChrome/developer.chrome.com/issues/135
video.playbackRate = playbackRate;
}, 1000);
video.onwaiting = () => {
const endTime = video.seekable.end(video.seekable.length - 1);
video.currentTime = endTime - offsetTime;
noWaiting = -1;
};
}, [url]);
return (
<div>
<video id="video" autoplay playsinline controls muted width={width} height={height} />
</div>
);
}