Compare commits

..

No commits in common. "c01443e5551580120c4ca3b6f14365f267b87e1f" and "de704ef2116ee322a4397a98f670b6704c36114d" have entirely different histories.

6 changed files with 100 additions and 164 deletions

View File

@ -56,7 +56,6 @@ export function TrackingDetails({
const apiHost = useApiHost(); const apiHost = useApiHost();
const imgRef = useRef<HTMLImageElement | null>(null); const imgRef = useRef<HTMLImageElement | null>(null);
const [imgLoaded, setImgLoaded] = useState(false); const [imgLoaded, setImgLoaded] = useState(false);
const [isVideoLoading, setIsVideoLoading] = useState(true);
const [displaySource, _setDisplaySource] = useState<"video" | "image">( const [displaySource, _setDisplaySource] = useState<"video" | "image">(
"video", "video",
); );
@ -71,10 +70,6 @@ export function TrackingDetails({
(event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING, (event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING,
); );
useEffect(() => {
setIsVideoLoading(true);
}, [event.id]);
const { data: eventSequence } = useSWR<TrackingDetailsSequence[]>([ const { data: eventSequence } = useSWR<TrackingDetailsSequence[]>([
"timeline", "timeline",
{ {
@ -532,7 +527,6 @@ export function TrackingDetails({
)} )}
> >
{displaySource == "video" && ( {displaySource == "video" && (
<>
<HlsVideoPlayer <HlsVideoPlayer
videoRef={videoRef} videoRef={videoRef}
containerRef={containerRef} containerRef={containerRef}
@ -545,15 +539,10 @@ export function TrackingDetails({
onTimeUpdate={handleTimeUpdate} onTimeUpdate={handleTimeUpdate}
onSeekToTime={handleSeekToTime} onSeekToTime={handleSeekToTime}
onUploadFrame={onUploadFrameToPlus} onUploadFrame={onUploadFrameToPlus}
onPlaying={() => setIsVideoLoading(false)}
isDetailMode={true} isDetailMode={true}
camera={event.camera} camera={event.camera}
currentTimeOverride={currentTime} currentTimeOverride={currentTime}
/> />
{isVideoLoading && (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
)}
</>
)} )}
{displaySource == "image" && ( {displaySource == "image" && (
<> <>

View File

@ -130,8 +130,6 @@ export default function HlsVideoPlayer({
return; return;
} }
setLoadedMetadata(false);
const currentPlaybackRate = videoRef.current.playbackRate; const currentPlaybackRate = videoRef.current.playbackRate;
if (!useHlsCompat) { if (!useHlsCompat) {

View File

@ -309,7 +309,6 @@ function PreviewVideoPlayer({
playsInline playsInline
muted muted
disableRemotePlayback disableRemotePlayback
disablePictureInPicture
onSeeked={onPreviewSeeked} onSeeked={onPreviewSeeked}
onLoadedData={() => { onLoadedData={() => {
if (firstLoad) { if (firstLoad) {

View File

@ -2,10 +2,7 @@ import { Recording } from "@/types/record";
import { DynamicPlayback } from "@/types/playback"; import { DynamicPlayback } from "@/types/playback";
import { PreviewController } from "../PreviewPlayer"; import { PreviewController } from "../PreviewPlayer";
import { TimeRange, TrackingDetailsSequence } from "@/types/timeline"; import { TimeRange, TrackingDetailsSequence } from "@/types/timeline";
import { import { calculateInpointOffset } from "@/utils/videoUtil";
calculateInpointOffset,
calculateSeekPosition,
} from "@/utils/videoUtil";
type PlayerMode = "playback" | "scrubbing"; type PlayerMode = "playback" | "scrubbing";
@ -75,21 +72,39 @@ export class DynamicVideoController {
return; return;
} }
if (
this.recordings.length == 0 ||
time < this.recordings[0].start_time ||
time > this.recordings[this.recordings.length - 1].end_time
) {
this.setNoRecording(true);
return;
}
if (this.playerMode != "playback") { if (this.playerMode != "playback") {
this.playerMode = "playback"; this.playerMode = "playback";
} }
const seekSeconds = calculateSeekPosition( let seekSeconds = 0;
time, (this.recordings || []).every((segment) => {
this.recordings, // if the next segment is past the desired time, stop calculating
this.inpointOffset, if (segment.start_time > time) {
); return false;
if (seekSeconds === undefined) {
this.setNoRecording(true);
return;
} }
if (segment.end_time < time) {
seekSeconds += segment.end_time - segment.start_time;
return true;
}
seekSeconds +=
segment.end_time - segment.start_time - (segment.end_time - time);
return true;
});
// adjust for HLS inpoint offset
seekSeconds -= this.inpointOffset;
if (seekSeconds != 0) { if (seekSeconds != 0) {
this.playerController.currentTime = seekSeconds; this.playerController.currentTime = seekSeconds;

View File

@ -14,10 +14,7 @@ import { VideoResolutionType } from "@/types/live";
import axios from "axios"; import axios from "axios";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { import { calculateInpointOffset } from "@/utils/videoUtil";
calculateInpointOffset,
calculateSeekPosition,
} from "@/utils/videoUtil";
import { isFirefox } from "react-device-detect"; import { isFirefox } from "react-device-detect";
/** /**
@ -112,10 +109,10 @@ export default function DynamicVideoPlayer({
const [isLoading, setIsLoading] = useState(false); const [isLoading, setIsLoading] = useState(false);
const [isBuffering, setIsBuffering] = useState(false); const [isBuffering, setIsBuffering] = useState(false);
const [loadingTimeout, setLoadingTimeout] = useState<NodeJS.Timeout>(); const [loadingTimeout, setLoadingTimeout] = useState<NodeJS.Timeout>();
const [source, setSource] = useState<HlsSource>({
// Don't set source until recordings load - we need accurate startPosition playlist: `${apiHost}vod/${camera}/start/${timeRange.after}/end/${timeRange.before}/master.m3u8`,
// to avoid hls.js clamping to video end when startPosition exceeds duration startPosition: startTimestamp ? startTimestamp - timeRange.after : 0,
const [source, setSource] = useState<HlsSource | undefined>(undefined); });
// start at correct time // start at correct time
@ -187,7 +184,7 @@ export default function DynamicVideoPlayer({
); );
useEffect(() => { useEffect(() => {
if (!recordings?.length) { if (!controller || !recordings?.length) {
if (recordings?.length == 0) { if (recordings?.length == 0) {
setNoRecording(true); setNoRecording(true);
} }
@ -195,6 +192,10 @@ export default function DynamicVideoPlayer({
return; return;
} }
if (playerRef.current) {
playerRef.current.autoplay = !isScrubbing;
}
let startPosition = undefined; let startPosition = undefined;
if (startTimestamp) { if (startTimestamp) {
@ -202,12 +203,14 @@ export default function DynamicVideoPlayer({
recordingParams.after, recordingParams.after,
(recordings || [])[0], (recordings || [])[0],
); );
const idealStartPosition = Math.max(
startPosition = calculateSeekPosition( 0,
startTimestamp, startTimestamp - timeRange.after - inpointOffset,
recordings,
inpointOffset,
); );
if (idealStartPosition >= recordings[0].start_time - timeRange.after) {
startPosition = idealStartPosition;
}
} }
setSource({ setSource({
@ -215,18 +218,6 @@ export default function DynamicVideoPlayer({
startPosition, startPosition,
}); });
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [recordings]);
useEffect(() => {
if (!controller || !recordings?.length) {
return;
}
if (playerRef.current) {
playerRef.current.autoplay = !isScrubbing;
}
setLoadingTimeout(setTimeout(() => setIsLoading(true), 1000)); setLoadingTimeout(setTimeout(() => setIsLoading(true), 1000));
controller.newPlayback({ controller.newPlayback({
@ -234,7 +225,7 @@ export default function DynamicVideoPlayer({
timeRange, timeRange,
}); });
// we only want this to change when controller or recordings update // we only want this to change when recordings update
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [controller, recordings]); }, [controller, recordings]);
@ -272,7 +263,6 @@ export default function DynamicVideoPlayer({
return ( return (
<> <>
{source && (
<HlsVideoPlayer <HlsVideoPlayer
videoRef={playerRef} videoRef={playerRef}
containerRef={containerRef} containerRef={containerRef}
@ -313,7 +303,6 @@ export default function DynamicVideoPlayer({
camera={contextCamera || camera} camera={contextCamera || camera}
currentTimeOverride={currentTime} currentTimeOverride={currentTime}
/> />
)}
<PreviewPlayer <PreviewPlayer
className={cn( className={cn(
className, className,

View File

@ -24,57 +24,3 @@ export function calculateInpointOffset(
return 0; return 0;
} }
/**
* Calculates the video player time (in seconds) for a given timestamp
* by iterating through recording segments and summing their durations.
* This accounts for the fact that the video is a concatenation of segments,
* not a single continuous stream.
*
* @param timestamp - The target timestamp to seek to
* @param recordings - Array of recording segments
* @param inpointOffset - HLS inpoint offset to subtract from the result
* @returns The calculated seek position in seconds, or undefined if timestamp is out of range
*/
export function calculateSeekPosition(
timestamp: number,
recordings: Recording[],
inpointOffset: number = 0,
): number | undefined {
if (!recordings || recordings.length === 0) {
return undefined;
}
// Check if timestamp is within the recordings range
if (
timestamp < recordings[0].start_time ||
timestamp > recordings[recordings.length - 1].end_time
) {
return undefined;
}
let seekSeconds = 0;
(recordings || []).every((segment) => {
// if the next segment is past the desired time, stop calculating
if (segment.start_time > timestamp) {
return false;
}
if (segment.end_time < timestamp) {
// Add the full duration of this segment
seekSeconds += segment.end_time - segment.start_time;
return true;
}
// We're in this segment - calculate position within it
seekSeconds +=
segment.end_time - segment.start_time - (segment.end_time - timestamp);
return true;
});
// Adjust for HLS inpoint offset
seekSeconds -= inpointOffset;
return seekSeconds >= 0 ? seekSeconds : undefined;
}