From 6cb1c411479501b8782f5b743e09cb2319401c6e Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Tue, 7 Oct 2025 06:59:30 -0500 Subject: [PATCH] overlay for object tracking details in history view --- .../components/overlay/ObjectTrackOverlay.tsx | 372 ++++++++++++++++++ 1 file changed, 372 insertions(+) create mode 100644 web/src/components/overlay/ObjectTrackOverlay.tsx diff --git a/web/src/components/overlay/ObjectTrackOverlay.tsx b/web/src/components/overlay/ObjectTrackOverlay.tsx new file mode 100644 index 000000000..2c4414c30 --- /dev/null +++ b/web/src/components/overlay/ObjectTrackOverlay.tsx @@ -0,0 +1,372 @@ +import { useMemo, useCallback } from "react"; +import { ObjectLifecycleSequence, LifecycleClassType } from "@/types/timeline"; +import { FrigateConfig } from "@/types/frigateConfig"; +import useSWR from "swr"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { TooltipPortal } from "@radix-ui/react-tooltip"; +import { cn } from "@/lib/utils"; + +type ObjectTrackOverlayProps = { + camera: string; + selectedObjectId: string; + currentTime: number; + videoWidth: number; + videoHeight: number; + className?: string; + onSeekToTime?: (timestamp: number) => void; +}; + +export default function ObjectTrackOverlay({ + camera, + selectedObjectId, + currentTime, + videoWidth, + videoHeight, + className, + onSeekToTime, +}: ObjectTrackOverlayProps) { + const { data: config } = useSWR("config"); + + // Fetch timeline data for the selected object + const { data: objectTimeline } = useSWR([ + "timeline", + { + source_id: selectedObjectId, + }, + ]); + + // Fetch the full event data to get saved path points + const { data: eventData } = useSWR(["event_ids", { ids: selectedObjectId }]); + + const typeColorMap = useMemo( + () => ({ + [LifecycleClassType.VISIBLE]: [0, 255, 0], // Green + [LifecycleClassType.GONE]: [255, 0, 0], // Red + [LifecycleClassType.ENTERED_ZONE]: [255, 165, 0], // Orange + [LifecycleClassType.ATTRIBUTE]: [128, 0, 128], // Purple + [LifecycleClassType.ACTIVE]: [255, 255, 0], // Yellow + [LifecycleClassType.STATIONARY]: [128, 128, 128], // Gray + [LifecycleClassType.HEARD]: [0, 255, 255], // Cyan + [LifecycleClassType.EXTERNAL]: [165, 42, 42], // Brown + }), + [], + ); + + const getObjectColor = useMemo(() => { + return (label: string) => { + const objectColor = config?.model?.colormap[label]; + if (objectColor) { + const reversed = [...objectColor].reverse(); + return `rgb(${reversed.join(",")})`; + } + return "rgb(255, 0, 0)"; // fallback red + }; + }, [config]); + + const getZoneColor = useCallback( + (zoneName: string) => { + const zoneColor = config?.cameras?.[camera]?.zones?.[zoneName]?.color; + if (zoneColor) { + const reversed = [...zoneColor].reverse(); + return `rgb(${reversed.join(",")})`; + } + return "rgb(255, 0, 0)"; // fallback red + }, + [config, camera], + ); + + const currentObjectZones = useMemo(() => { + if (!objectTimeline) return []; + + // Find the most recent timeline event at or before current time + const relevantEvents = objectTimeline + .filter((event) => event.timestamp <= currentTime) + .sort((a, b) => b.timestamp - a.timestamp); // Most recent first + + // Get zones from the most recent event + return relevantEvents[0]?.data?.zones || []; + }, [objectTimeline, currentTime]); + + const zones = useMemo(() => { + if (!config?.cameras?.[camera]?.zones || !currentObjectZones.length) + return []; + + return Object.entries(config.cameras[camera].zones) + .filter(([name]) => currentObjectZones.includes(name)) + .map(([name, zone]) => ({ + name, + coordinates: zone.coordinates, + color: getZoneColor(name), + })); + }, [config, camera, getZoneColor, currentObjectZones]); + + const savedPathPoints = useMemo(() => { + return ( + eventData?.[0].data?.path_data?.map( + ([coords, timestamp]: [number[], number]) => ({ + x: coords[0], + y: coords[1], + timestamp, + lifecycle_item: undefined, + }), + ) || [] + ); + }, [eventData]); + + const eventSequencePoints = useMemo(() => { + return ( + objectTimeline + ?.filter((event) => event.data.box !== undefined) + .map((event) => { + const [left, top, width, height] = event.data.box!; + + return { + x: left + width / 2, // Center x-coordinate + y: top + height, // Bottom y-coordinate + timestamp: event.timestamp, + lifecycle_item: event, + }; + }) || [] + ); + }, [objectTimeline]); + + // final object path with timeline points included + const pathPoints = useMemo(() => { + // don't display a path for autotracking cameras + if (config?.cameras[camera]?.onvif.autotracking.enabled_in_config) + return []; + + const combinedPoints = [...savedPathPoints, ...eventSequencePoints].sort( + (a, b) => a.timestamp - b.timestamp, + ); + + // Filter points around current time (within a reasonable window) + const timeWindow = 30; // 30 seconds window + return combinedPoints.filter( + (point) => + point.timestamp >= currentTime - timeWindow && + point.timestamp <= currentTime + timeWindow, + ); + }, [savedPathPoints, eventSequencePoints, config, camera, currentTime]); + + const getAbsolutePositions = useCallback(() => { + if (!pathPoints) return []; + return pathPoints.map((point) => { + // Find the corresponding timeline entry for this point + const timelineEntry = objectTimeline?.find( + (entry) => Math.abs(entry.timestamp - point.timestamp) < 0.1, + ); + + return { + x: point.x * videoWidth, + y: point.y * videoHeight, + timestamp: point.timestamp, + lifecycle_item: + timelineEntry || + (point.box + ? { + timestamp: point.timestamp, + camera: camera, + source: "tracked_object", + source_id: selectedObjectId, + class_type: "visible" as LifecycleClassType, + data: { + camera: camera, + label: point.label, + sub_label: "", + box: point.box, + region: [0, 0, 0, 0], // placeholder + attribute: "", + zones: [], + }, + } + : undefined), + }; + }); + }, [ + pathPoints, + videoWidth, + videoHeight, + objectTimeline, + camera, + selectedObjectId, + ]); + + const generateStraightPath = useCallback( + (points: { x: number; y: number }[]) => { + if (!points || points.length < 2) return ""; + let path = `M ${points[0].x} ${points[0].y}`; + for (let i = 1; i < points.length; i++) { + path += ` L ${points[i].x} ${points[i].y}`; + } + return path; + }, + [], + ); + + const getPointColor = useCallback( + (baseColor: number[], type?: string) => { + if (type && typeColorMap[type as keyof typeof typeColorMap]) { + const typeColor = typeColorMap[type as keyof typeof typeColorMap]; + if (typeColor) { + return `rgb(${typeColor.join(",")})`; + } + } + // normal path point + return `rgb(${baseColor.map((c) => Math.max(0, c - 10)).join(",")})`; + }, + [typeColorMap], + ); + + if (!pathPoints.length || !config) { + return null; + } + + // Get the object color from the first point's label + const objectColor = pathPoints[0]?.label + ? getObjectColor(pathPoints[0].label) + : "rgb(255, 0, 0)"; + const objectColorArray = pathPoints[0]?.label + ? getObjectColor(pathPoints[0].label).match(/\d+/g)?.map(Number) || [ + 255, 0, 0, + ] + : [255, 0, 0]; + + const absolutePositions = getAbsolutePositions(); + + return ( + + {/* Render zones */} + {zones.map((zone) => { + // Convert zone coordinates from normalized (0-1) to pixel coordinates + const points = zone.coordinates + .split(",") + .map(Number.parseFloat) + .reduce((acc: string[], value, index) => { + const isXCoordinate = index % 2 === 0; + const coordinate = isXCoordinate + ? value * videoWidth + : value * videoHeight; + acc.push(coordinate.toString()); + return acc; + }, []) + .join(","); + + return ( + + ); + })} + + {/* Draw path connecting the points */} + {absolutePositions.length > 1 && ( + + )} + + {/* Draw points with tooltips */} + {absolutePositions.map((pos, index) => ( + + + { + onSeekToTime?.(pos.timestamp); + }} + /> + + + + {pos.lifecycle_item + ? `${pos.lifecycle_item.class_type.replace("_", " ")} at ${new Date(pos.timestamp * 1000).toLocaleTimeString()}` + : "Tracked point"} + {onSeekToTime && ( +
+ Click to seek to this time +
+ )} + + + + ))} + + {/* Highlight current position with bounding box */} + {(() => { + if (!objectTimeline) return null; + + // Find the most recent timeline event at or before current time with a bounding box + const relevantEvents = objectTimeline + .filter((event) => event.timestamp <= currentTime && event.data.box) + .sort((a, b) => b.timestamp - a.timestamp); // Most recent first + + const currentEvent = relevantEvents[0]; + + if (currentEvent && currentEvent.data.box) { + const [left, top, width, height] = currentEvent.data.box; + const centerX = left + width / 2; + const centerY = top + height; + + return ( + + {/* Bounding box */} + + {/* Center point highlight */} + + + ); + } + return null; + })()} + + ); +}