Tracked Object Details pane tweaks (#20762)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions

* normalize path and points sizes

* fix bounding box display to only show on actual points that have a box

* add support for using snapshots
This commit is contained in:
Josh Hawkins 2025-11-02 07:48:43 -06:00 committed by GitHub
parent aff82f809c
commit d44340eca6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 208 additions and 43 deletions

View File

@ -58,6 +58,47 @@ export default function ObjectTrackOverlay({
const effectiveCurrentTime = currentTime - annotationOffset / 1000; const effectiveCurrentTime = currentTime - annotationOffset / 1000;
const {
pathStroke,
pointRadius,
pointStroke,
zoneStroke,
boxStroke,
highlightRadius,
} = useMemo(() => {
const BASE_WIDTH = 1280;
const BASE_HEIGHT = 720;
const BASE_PATH_STROKE = 5;
const BASE_POINT_RADIUS = 7;
const BASE_POINT_STROKE = 3;
const BASE_ZONE_STROKE = 5;
const BASE_BOX_STROKE = 5;
const BASE_HIGHLIGHT_RADIUS = 5;
const scale = Math.sqrt(
(videoWidth * videoHeight) / (BASE_WIDTH * BASE_HEIGHT),
);
const pathStroke = Math.max(1, Math.round(BASE_PATH_STROKE * scale));
const pointRadius = Math.max(2, Math.round(BASE_POINT_RADIUS * scale));
const pointStroke = Math.max(1, Math.round(BASE_POINT_STROKE * scale));
const zoneStroke = Math.max(1, Math.round(BASE_ZONE_STROKE * scale));
const boxStroke = Math.max(1, Math.round(BASE_BOX_STROKE * scale));
const highlightRadius = Math.max(
2,
Math.round(BASE_HIGHLIGHT_RADIUS * scale),
);
return {
pathStroke,
pointRadius,
pointStroke,
zoneStroke,
boxStroke,
highlightRadius,
};
}, [videoWidth, videoHeight]);
// Fetch all event data in a single request (CSV ids) // Fetch all event data in a single request (CSV ids)
const { data: eventsData } = useSWR<Event[]>( const { data: eventsData } = useSWR<Event[]>(
selectedObjectIds.length > 0 selectedObjectIds.length > 0
@ -198,16 +239,21 @@ export default function ObjectTrackOverlay({
b.timestamp - a.timestamp, b.timestamp - a.timestamp,
)[0]?.data?.zones || []; )[0]?.data?.zones || [];
// bounding box (with tolerance for browsers with seek precision by-design issues) // bounding box - only show if there's a timeline event at/near the current time with a box
const boxCandidates = timelineData?.filter( // Search all timeline events (not just those before current time) to find one matching the seek position
(event: TrackingDetailsSequence) => const nearbyTimelineEvent = timelineData
event.timestamp <= effectiveCurrentTime + TOLERANCE && ?.filter((event: TrackingDetailsSequence) => event.data.box)
event.data.box, .sort(
); (a: TrackingDetailsSequence, b: TrackingDetailsSequence) =>
const currentBox = boxCandidates?.sort( Math.abs(a.timestamp - effectiveCurrentTime) -
(a: TrackingDetailsSequence, b: TrackingDetailsSequence) => Math.abs(b.timestamp - effectiveCurrentTime),
b.timestamp - a.timestamp, )
)[0]?.data?.box; .find(
(event: TrackingDetailsSequence) =>
Math.abs(event.timestamp - effectiveCurrentTime) <= TOLERANCE,
);
const currentBox = nearbyTimelineEvent?.data?.box;
return { return {
objectId, objectId,
@ -333,7 +379,7 @@ export default function ObjectTrackOverlay({
points={zone.points} points={zone.points}
fill={zone.fill} fill={zone.fill}
stroke={zone.stroke} stroke={zone.stroke}
strokeWidth="5" strokeWidth={zoneStroke}
opacity="0.7" opacity="0.7"
/> />
))} ))}
@ -353,7 +399,7 @@ export default function ObjectTrackOverlay({
d={generateStraightPath(absolutePositions)} d={generateStraightPath(absolutePositions)}
fill="none" fill="none"
stroke={objData.color} stroke={objData.color}
strokeWidth="5" strokeWidth={pathStroke}
strokeLinecap="round" strokeLinecap="round"
strokeLinejoin="round" strokeLinejoin="round"
/> />
@ -365,13 +411,13 @@ export default function ObjectTrackOverlay({
<circle <circle
cx={pos.x} cx={pos.x}
cy={pos.y} cy={pos.y}
r="7" r={pointRadius}
fill={getPointColor( fill={getPointColor(
objData.color, objData.color,
pos.lifecycle_item?.class_type, pos.lifecycle_item?.class_type,
)} )}
stroke="white" stroke="white"
strokeWidth="3" strokeWidth={pointStroke}
style={{ cursor: onSeekToTime ? "pointer" : "default" }} style={{ cursor: onSeekToTime ? "pointer" : "default" }}
onClick={() => handlePointClick(pos.timestamp)} onClick={() => handlePointClick(pos.timestamp)}
/> />
@ -400,7 +446,7 @@ export default function ObjectTrackOverlay({
height={objData.currentBox[3] * videoHeight} height={objData.currentBox[3] * videoHeight}
fill="none" fill="none"
stroke={objData.color} stroke={objData.color}
strokeWidth="5" strokeWidth={boxStroke}
opacity="0.9" opacity="0.9"
/> />
<circle <circle
@ -412,10 +458,10 @@ export default function ObjectTrackOverlay({
(objData.currentBox[1] + objData.currentBox[3]) * (objData.currentBox[1] + objData.currentBox[3]) *
videoHeight videoHeight
} }
r="5" r={highlightRadius}
fill="rgb(255, 255, 0)" // yellow highlight fill="rgb(255, 255, 0)" // yellow highlight
stroke={objData.color} stroke={objData.color}
strokeWidth="5" strokeWidth={boxStroke}
opacity="1" opacity="1"
/> />
</g> </g>

View File

@ -8,7 +8,7 @@ import Heading from "@/components/ui/heading";
import { FrigateConfig } from "@/types/frigateConfig"; import { FrigateConfig } from "@/types/frigateConfig";
import { formatUnixTimestampToDateTime } from "@/utils/dateUtil"; import { formatUnixTimestampToDateTime } from "@/utils/dateUtil";
import { getIconForLabel } from "@/utils/iconUtil"; import { getIconForLabel } from "@/utils/iconUtil";
import { LuCircle, LuSettings } from "react-icons/lu"; import { LuCircle, LuFolderX, LuSettings } from "react-icons/lu";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { import {
Tooltip, Tooltip,
@ -37,9 +37,12 @@ import { HiDotsHorizontal } from "react-icons/hi";
import axios from "axios"; import axios from "axios";
import { toast } from "sonner"; import { toast } from "sonner";
import { useDetailStream } from "@/context/detail-stream-context"; import { useDetailStream } from "@/context/detail-stream-context";
import { isDesktop, isIOS } from "react-device-detect"; import { isDesktop, isIOS, isMobileOnly, isSafari } from "react-device-detect";
import Chip from "@/components/indicators/Chip"; import Chip from "@/components/indicators/Chip";
import { FaDownload, FaHistory } from "react-icons/fa"; import { FaDownload, FaHistory } from "react-icons/fa";
import { useApiHost } from "@/api";
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
import ObjectTrackOverlay from "../ObjectTrackOverlay";
type TrackingDetailsProps = { type TrackingDetailsProps = {
className?: string; className?: string;
@ -56,9 +59,19 @@ export function TrackingDetails({
const videoRef = useRef<HTMLVideoElement | null>(null); const videoRef = useRef<HTMLVideoElement | null>(null);
const { t } = useTranslation(["views/explore"]); const { t } = useTranslation(["views/explore"]);
const navigate = useNavigate(); const navigate = useNavigate();
const apiHost = useApiHost();
const imgRef = useRef<HTMLImageElement | null>(null);
const [imgLoaded, setImgLoaded] = useState(false);
const [displaySource, _setDisplaySource] = useState<"video" | "image">(
"video",
);
const { setSelectedObjectIds, annotationOffset, setAnnotationOffset } = const { setSelectedObjectIds, annotationOffset, setAnnotationOffset } =
useDetailStream(); useDetailStream();
// manualOverride holds a record-stream timestamp explicitly chosen by the
// user (eg, clicking a lifecycle row). When null we display `currentTime`.
const [manualOverride, setManualOverride] = useState<number | null>(null);
// event.start_time is detect time, convert to record, then subtract padding // event.start_time is detect time, convert to record, then subtract padding
const [currentTime, setCurrentTime] = useState( const [currentTime, setCurrentTime] = useState(
(event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING, (event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING,
@ -73,9 +86,13 @@ export function TrackingDetails({
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
// Use manualOverride (set when seeking in image mode) if present so
// lifecycle rows and overlays follow image-mode seeks. Otherwise fall
// back to currentTime used for video mode.
const effectiveTime = useMemo(() => { const effectiveTime = useMemo(() => {
return currentTime - annotationOffset / 1000; const displayedRecordTime = manualOverride ?? currentTime;
}, [currentTime, annotationOffset]); return displayedRecordTime - annotationOffset / 1000;
}, [manualOverride, currentTime, annotationOffset]);
const containerRef = useRef<HTMLDivElement | null>(null); const containerRef = useRef<HTMLDivElement | null>(null);
const [_selectedZone, setSelectedZone] = useState(""); const [_selectedZone, setSelectedZone] = useState("");
@ -118,20 +135,30 @@ export function TrackingDetails({
const handleLifecycleClick = useCallback( const handleLifecycleClick = useCallback(
(item: TrackingDetailsSequence) => { (item: TrackingDetailsSequence) => {
if (!videoRef.current) return; if (!videoRef.current && !imgRef.current) return;
// Convert lifecycle timestamp (detect stream) to record stream time // Convert lifecycle timestamp (detect stream) to record stream time
const targetTimeRecord = item.timestamp + annotationOffset / 1000; const targetTimeRecord = item.timestamp + annotationOffset / 1000;
// Convert to video-relative time for seeking if (displaySource === "image") {
// For image mode: set a manual override timestamp and update
// currentTime so overlays render correctly.
setManualOverride(targetTimeRecord);
setCurrentTime(targetTimeRecord);
return;
}
// For video mode: convert to video-relative time and seek player
const eventStartRecord = const eventStartRecord =
(event.start_time ?? 0) + annotationOffset / 1000; (event.start_time ?? 0) + annotationOffset / 1000;
const videoStartTime = eventStartRecord - REVIEW_PADDING; const videoStartTime = eventStartRecord - REVIEW_PADDING;
const relativeTime = targetTimeRecord - videoStartTime; const relativeTime = targetTimeRecord - videoStartTime;
videoRef.current.currentTime = relativeTime; if (videoRef.current) {
videoRef.current.currentTime = relativeTime;
}
}, },
[event.start_time, annotationOffset], [event.start_time, annotationOffset, displaySource],
); );
const formattedStart = config const formattedStart = config
@ -172,11 +199,20 @@ export function TrackingDetails({
}, [eventSequence]); }, [eventSequence]);
useEffect(() => { useEffect(() => {
if (seekToTimestamp === null || !videoRef.current) return; if (seekToTimestamp === null) return;
if (displaySource === "image") {
// For image mode, set the manual override so the snapshot updates to
// the exact record timestamp.
setManualOverride(seekToTimestamp);
setSeekToTimestamp(null);
return;
}
// seekToTimestamp is a record stream timestamp // seekToTimestamp is a record stream timestamp
// event.start_time is detect stream time, convert to record // event.start_time is detect stream time, convert to record
// The video clip starts at (eventStartRecord - REVIEW_PADDING) // The video clip starts at (eventStartRecord - REVIEW_PADDING)
if (!videoRef.current) return;
const eventStartRecord = event.start_time + annotationOffset / 1000; const eventStartRecord = event.start_time + annotationOffset / 1000;
const videoStartTime = eventStartRecord - REVIEW_PADDING; const videoStartTime = eventStartRecord - REVIEW_PADDING;
const relativeTime = seekToTimestamp - videoStartTime; const relativeTime = seekToTimestamp - videoStartTime;
@ -184,7 +220,14 @@ export function TrackingDetails({
videoRef.current.currentTime = relativeTime; videoRef.current.currentTime = relativeTime;
} }
setSeekToTimestamp(null); setSeekToTimestamp(null);
}, [seekToTimestamp, event.start_time, annotationOffset]); }, [
seekToTimestamp,
event.start_time,
annotationOffset,
apiHost,
event.camera,
displaySource,
]);
const isWithinEventRange = const isWithinEventRange =
effectiveTime !== undefined && effectiveTime !== undefined &&
@ -287,6 +330,27 @@ export function TrackingDetails({
[event.start_time, annotationOffset], [event.start_time, annotationOffset],
); );
const [src, setSrc] = useState(
`${apiHost}api/${event.camera}/recordings/${currentTime + REVIEW_PADDING}/snapshot.jpg?height=500`,
);
const [hasError, setHasError] = useState(false);
// Derive the record timestamp to display: manualOverride if present,
// otherwise use currentTime.
const displayedRecordTime = manualOverride ?? currentTime;
useEffect(() => {
if (displayedRecordTime) {
const newSrc = `${apiHost}api/${event.camera}/recordings/${displayedRecordTime}/snapshot.jpg?height=500`;
setSrc(newSrc);
}
setImgLoaded(false);
setHasError(false);
// we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [displayedRecordTime]);
if (!config) { if (!config) {
return <ActivityIndicator />; return <ActivityIndicator />;
} }
@ -304,9 +368,10 @@ export function TrackingDetails({
<div <div
className={cn( className={cn(
"flex w-full items-center justify-center", "flex items-center justify-center",
isDesktop && "overflow-hidden", isDesktop && "overflow-hidden",
cameraAspect === "tall" ? "max-h-[50dvh] lg:max-h-[70dvh]" : "w-full", cameraAspect === "tall" ? "max-h-[50dvh] lg:max-h-[70dvh]" : "w-full",
cameraAspect === "tall" && isMobileOnly && "w-full",
cameraAspect !== "tall" && isDesktop && "flex-[3]", cameraAspect !== "tall" && isDesktop && "flex-[3]",
)} )}
style={{ aspectRatio: aspectRatio }} style={{ aspectRatio: aspectRatio }}
@ -318,21 +383,75 @@ export function TrackingDetails({
cameraAspect === "tall" ? "h-full" : "w-full", cameraAspect === "tall" ? "h-full" : "w-full",
)} )}
> >
<HlsVideoPlayer {displaySource == "video" && (
videoRef={videoRef} <HlsVideoPlayer
containerRef={containerRef} videoRef={videoRef}
visible={true} containerRef={containerRef}
currentSource={videoSource} visible={true}
hotKeys={false} currentSource={videoSource}
supportsFullscreen={false} hotKeys={false}
fullscreen={false} supportsFullscreen={false}
frigateControls={true} fullscreen={false}
onTimeUpdate={handleTimeUpdate} frigateControls={true}
onSeekToTime={handleSeekToTime} onTimeUpdate={handleTimeUpdate}
isDetailMode={true} onSeekToTime={handleSeekToTime}
camera={event.camera} isDetailMode={true}
currentTimeOverride={currentTime} camera={event.camera}
/> currentTimeOverride={currentTime}
/>
)}
{displaySource == "image" && (
<>
<ImageLoadingIndicator
className="absolute inset-0"
imgLoaded={imgLoaded}
/>
{hasError && (
<div className="relative aspect-video">
<div className="flex flex-col items-center justify-center p-20 text-center">
<LuFolderX className="size-16" />
{t("objectLifecycle.noImageFound")}
</div>
</div>
)}
<div
className={cn("relative", imgLoaded ? "visible" : "invisible")}
>
<div className="absolute z-50 size-full">
<ObjectTrackOverlay
key={`overlay-${displayedRecordTime}`}
camera={event.camera}
showBoundingBoxes={true}
currentTime={displayedRecordTime}
videoWidth={imgRef?.current?.naturalWidth ?? 0}
videoHeight={imgRef?.current?.naturalHeight ?? 0}
className="absolute inset-0 z-10"
onSeekToTime={handleSeekToTime}
/>
</div>
<img
key={event.id}
ref={imgRef}
className={cn(
"max-h-[50dvh] max-w-full select-none rounded-lg object-contain",
)}
loading={isSafari ? "eager" : "lazy"}
style={
isIOS
? {
WebkitUserSelect: "none",
WebkitTouchCallout: "none",
}
: undefined
}
draggable={false}
src={src}
onLoad={() => setImgLoaded(true)}
onError={() => setHasError(true)}
/>
</div>
</>
)}
<div <div
className={cn( className={cn(
"absolute top-2 z-[5] flex items-center gap-2", "absolute top-2 z-[5] flex items-center gap-2",