mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-03-17 21:58:22 +03:00
Correct forever spinners on short explore videos.
The core fix is switching the VOD request from /vod/clip/ (which forces HLS discontinuity markers) to /vod/, and extending the start time to align with the first recording segment's boundary (recordings[0].start_time). Previously, the backend applied a clipFrom offset to trim the first recording segment to the exact requested start time. For short events or cameras with large GOP intervals, this trimming could skip past all keyframes in the segment, leaving hls.js with no decodable starting frame — so it buffered forever. By aligning to the recording boundary, the full segment is included and keyframes are always available. The remaining changes adjust for the fact that the video now starts earlier (at the recording boundary rather than the padded event start). The timestampToVideoTime and videoTimeToTimestamp functions are simplified since there's no longer an inpoint offset to account for. The onPlayerLoaded callback uses a seek-then-play pattern (matching DynamicVideoPlayer's waitAndPlay) to skip past the extra content at the start and begin playback at the correct position. The player rendering is also gated on recordings being loaded so the timestamp mapping always has accurate data. Fix wayward 0 — classic React falsy rendering bug.
This commit is contained in:
parent
5a214eb0d1
commit
5d43ba8a67
@ -134,7 +134,9 @@ export function TrackingDetails({
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Convert a timeline timestamp to actual video player time, accounting for
|
// Convert a timeline timestamp to actual video player time, accounting for
|
||||||
// motion-only recording gaps. Uses the same algorithm as DynamicVideoController.
|
// motion-only recording gaps. The VOD start time is aligned to
|
||||||
|
// recordings[0].start_time (see videoSource), so video position 0
|
||||||
|
// corresponds to the start of the first recording segment.
|
||||||
const timestampToVideoTime = useCallback(
|
const timestampToVideoTime = useCallback(
|
||||||
(timestamp: number): number => {
|
(timestamp: number): number => {
|
||||||
if (!recordings || recordings.length === 0) {
|
if (!recordings || recordings.length === 0) {
|
||||||
@ -142,48 +144,20 @@ export function TrackingDetails({
|
|||||||
return timestamp - (eventStartRecord - REVIEW_PADDING);
|
return timestamp - (eventStartRecord - REVIEW_PADDING);
|
||||||
}
|
}
|
||||||
|
|
||||||
const videoStartTime = eventStartRecord - REVIEW_PADDING;
|
|
||||||
|
|
||||||
// If timestamp is before video start, return 0
|
|
||||||
if (timestamp < videoStartTime) return 0;
|
|
||||||
|
|
||||||
// Check if timestamp is before the first recording or after the last
|
// Check if timestamp is before the first recording or after the last
|
||||||
if (
|
if (timestamp <= recordings[0].start_time) {
|
||||||
timestamp < recordings[0].start_time ||
|
|
||||||
timestamp > recordings[recordings.length - 1].end_time
|
|
||||||
) {
|
|
||||||
// No recording available at this timestamp
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
if (timestamp > recordings[recordings.length - 1].end_time) {
|
||||||
// Calculate the inpoint offset - the HLS video may start partway through the first segment
|
return 0;
|
||||||
let inpointOffset = 0;
|
|
||||||
if (
|
|
||||||
videoStartTime > recordings[0].start_time &&
|
|
||||||
videoStartTime < recordings[0].end_time
|
|
||||||
) {
|
|
||||||
inpointOffset = videoStartTime - recordings[0].start_time;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let seekSeconds = 0;
|
let seekSeconds = 0;
|
||||||
for (const segment of recordings) {
|
for (const segment of recordings) {
|
||||||
// Skip segments that end before our timestamp
|
|
||||||
if (segment.end_time <= timestamp) {
|
if (segment.end_time <= timestamp) {
|
||||||
// Add this segment's duration, but subtract inpoint offset from first segment
|
seekSeconds += segment.duration;
|
||||||
if (segment === recordings[0]) {
|
|
||||||
seekSeconds += segment.duration - inpointOffset;
|
|
||||||
} else {
|
|
||||||
seekSeconds += segment.duration;
|
|
||||||
}
|
|
||||||
} else if (segment.start_time <= timestamp) {
|
} else if (segment.start_time <= timestamp) {
|
||||||
// The timestamp is within this segment
|
seekSeconds += timestamp - segment.start_time;
|
||||||
if (segment === recordings[0]) {
|
|
||||||
// For the first segment, account for the inpoint offset
|
|
||||||
seekSeconds +=
|
|
||||||
timestamp - Math.max(segment.start_time, videoStartTime);
|
|
||||||
} else {
|
|
||||||
seekSeconds += timestamp - segment.start_time;
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -194,7 +168,8 @@ export function TrackingDetails({
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Convert video player time back to timeline timestamp, accounting for
|
// Convert video player time back to timeline timestamp, accounting for
|
||||||
// motion-only recording gaps. Reverse of timestampToVideoTime.
|
// motion-only recording gaps. Reverse of timestampToVideoTime. Video
|
||||||
|
// position 0 corresponds to recordings[0].start_time (see videoSource).
|
||||||
const videoTimeToTimestamp = useCallback(
|
const videoTimeToTimestamp = useCallback(
|
||||||
(playerTime: number): number => {
|
(playerTime: number): number => {
|
||||||
if (!recordings || recordings.length === 0) {
|
if (!recordings || recordings.length === 0) {
|
||||||
@ -203,39 +178,15 @@ export function TrackingDetails({
|
|||||||
return playerTime + videoStartTime;
|
return playerTime + videoStartTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
const videoStartTime = eventStartRecord - REVIEW_PADDING;
|
let timestamp = recordings[0].start_time;
|
||||||
|
|
||||||
// Calculate the inpoint offset - the video may start partway through the first segment
|
|
||||||
let inpointOffset = 0;
|
|
||||||
if (
|
|
||||||
videoStartTime > recordings[0].start_time &&
|
|
||||||
videoStartTime < recordings[0].end_time
|
|
||||||
) {
|
|
||||||
inpointOffset = videoStartTime - recordings[0].start_time;
|
|
||||||
}
|
|
||||||
|
|
||||||
let timestamp = 0;
|
|
||||||
let totalTime = 0;
|
let totalTime = 0;
|
||||||
|
|
||||||
for (const segment of recordings) {
|
for (const segment of recordings) {
|
||||||
const segmentDuration =
|
if (totalTime + segment.duration > playerTime) {
|
||||||
segment === recordings[0]
|
timestamp = segment.start_time + (playerTime - totalTime);
|
||||||
? segment.duration - inpointOffset
|
|
||||||
: segment.duration;
|
|
||||||
|
|
||||||
if (totalTime + segmentDuration > playerTime) {
|
|
||||||
// The player time is within this segment
|
|
||||||
if (segment === recordings[0]) {
|
|
||||||
// For the first segment, add the inpoint offset
|
|
||||||
timestamp =
|
|
||||||
Math.max(segment.start_time, videoStartTime) +
|
|
||||||
(playerTime - totalTime);
|
|
||||||
} else {
|
|
||||||
timestamp = segment.start_time + (playerTime - totalTime);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
totalTime += segmentDuration;
|
totalTime += segment.duration;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -522,16 +473,23 @@ export function TrackingDetails({
|
|||||||
const eventStartRec = event.start_time + sourceOffset / 1000;
|
const eventStartRec = event.start_time + sourceOffset / 1000;
|
||||||
const eventEndRec =
|
const eventEndRec =
|
||||||
(event.end_time ?? Date.now() / 1000) + sourceOffset / 1000;
|
(event.end_time ?? Date.now() / 1000) + sourceOffset / 1000;
|
||||||
const startTime = eventStartRec - REVIEW_PADDING;
|
// Use the first recording's start_time when available so the VOD
|
||||||
|
// request aligns to a recording boundary. This prevents clipFrom
|
||||||
|
// from trimming past keyframes in the first segment, which causes
|
||||||
|
// hls.js to stall on short clips.
|
||||||
|
const startTime =
|
||||||
|
recordings && recordings.length > 0
|
||||||
|
? Math.min(recordings[0].start_time, eventStartRec - REVIEW_PADDING)
|
||||||
|
: eventStartRec - REVIEW_PADDING;
|
||||||
const endTime = eventEndRec + REVIEW_PADDING;
|
const endTime = eventEndRec + REVIEW_PADDING;
|
||||||
const playlist = `${baseUrl}vod/clip/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`;
|
const playlist = `${baseUrl}vod/${event.camera}/start/${startTime}/end/${endTime}/master.m3u8`;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
playlist,
|
playlist,
|
||||||
startPosition: 0,
|
startPosition: 0,
|
||||||
};
|
};
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [event]);
|
}, [event, recordings]);
|
||||||
|
|
||||||
// Determine camera aspect ratio category
|
// Determine camera aspect ratio category
|
||||||
const cameraAspect = useMemo(() => {
|
const cameraAspect = useMemo(() => {
|
||||||
@ -620,7 +578,7 @@ export function TrackingDetails({
|
|||||||
cameraAspect === "tall" ? "h-full" : "w-full",
|
cameraAspect === "tall" ? "h-full" : "w-full",
|
||||||
)}
|
)}
|
||||||
>
|
>
|
||||||
{displaySource == "video" && (
|
{displaySource == "video" && recordings && (
|
||||||
<>
|
<>
|
||||||
<HlsVideoPlayer
|
<HlsVideoPlayer
|
||||||
videoRef={videoRef}
|
videoRef={videoRef}
|
||||||
@ -634,6 +592,24 @@ export function TrackingDetails({
|
|||||||
onTimeUpdate={handleTimeUpdate}
|
onTimeUpdate={handleTimeUpdate}
|
||||||
onSeekToTime={handleSeekToTime}
|
onSeekToTime={handleSeekToTime}
|
||||||
onUploadFrame={onUploadFrameToPlus}
|
onUploadFrame={onUploadFrameToPlus}
|
||||||
|
onPlayerLoaded={() => {
|
||||||
|
if (videoRef.current) {
|
||||||
|
const video = videoRef.current;
|
||||||
|
// The VOD starts at recordings[0].start_time (see
|
||||||
|
// videoSource), so seek to the desired start position
|
||||||
|
// within that segment, wait for the seek to complete,
|
||||||
|
// then play. Mirrors DynamicVideoController's waitAndPlay.
|
||||||
|
const startPos = timestampToVideoTime(
|
||||||
|
eventStartRecord - REVIEW_PADDING,
|
||||||
|
);
|
||||||
|
const onSeeked = () => {
|
||||||
|
video.removeEventListener("seeked", onSeeked);
|
||||||
|
video.play();
|
||||||
|
};
|
||||||
|
video.addEventListener("seeked", onSeeked, { once: true });
|
||||||
|
video.currentTime = startPos;
|
||||||
|
}
|
||||||
|
}}
|
||||||
onPlaying={() => setIsVideoLoading(false)}
|
onPlaying={() => setIsVideoLoading(false)}
|
||||||
setFullResolution={setFullResolution}
|
setFullResolution={setFullResolution}
|
||||||
toggleFullscreen={toggleFullscreen}
|
toggleFullscreen={toggleFullscreen}
|
||||||
|
|||||||
@ -357,7 +357,7 @@ export default function HlsVideoPlayer({
|
|||||||
{transformedOverlay}
|
{transformedOverlay}
|
||||||
{isDetailMode &&
|
{isDetailMode &&
|
||||||
camera &&
|
camera &&
|
||||||
currentTime &&
|
currentTime != null &&
|
||||||
loadedMetadata &&
|
loadedMetadata &&
|
||||||
videoDimensions.width > 0 &&
|
videoDimensions.width > 0 &&
|
||||||
videoDimensions.height > 0 && (
|
videoDimensions.height > 0 && (
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user