import { useMemo, useCallback } from "react"; import { ObjectLifecycleSequence, LifecycleClassType } from "@/types/timeline"; import { FrigateConfig } from "@/types/frigateConfig"; import useSWR from "swr"; import { useDetailStream } from "@/context/detail-stream-context"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/ui/tooltip"; import { TooltipPortal } from "@radix-ui/react-tooltip"; import { cn } from "@/lib/utils"; import { useTranslation } from "react-i18next"; import { resolveZoneName } from "@/hooks/use-zone-friendly-name"; type ObjectTrackOverlayProps = { camera: string; selectedObjectId: string; showBoundingBoxes?: boolean; currentTime: number; videoWidth: number; videoHeight: number; className?: string; onSeekToTime?: (timestamp: number, play?: boolean) => void; objectTimeline?: ObjectLifecycleSequence[]; }; export default function ObjectTrackOverlay({ camera, selectedObjectId, showBoundingBoxes = false, currentTime, videoWidth, videoHeight, className, onSeekToTime, objectTimeline, }: ObjectTrackOverlayProps) { const { t } = useTranslation("views/events"); const { data: config } = useSWR("config"); const { annotationOffset } = useDetailStream(); const effectiveCurrentTime = currentTime - annotationOffset / 1000; // Fetch the full event data to get saved path points const { data: eventData } = useSWR(["event_ids", { ids: selectedObjectId }]); const typeColorMap = useMemo( () => ({ [LifecycleClassType.VISIBLE]: [0, 255, 0], // Green [LifecycleClassType.GONE]: [255, 0, 0], // Red [LifecycleClassType.ENTERED_ZONE]: [255, 165, 0], // Orange [LifecycleClassType.ATTRIBUTE]: [128, 0, 128], // Purple [LifecycleClassType.ACTIVE]: [255, 255, 0], // Yellow [LifecycleClassType.STATIONARY]: [128, 128, 128], // Gray [LifecycleClassType.HEARD]: [0, 255, 255], // Cyan [LifecycleClassType.EXTERNAL]: [165, 42, 42], // Brown }), [], ); const getObjectColor = useMemo(() => { return (label: string) => { const objectColor = config?.model?.colormap[label]; if (objectColor) { const reversed = [...objectColor].reverse(); return `rgb(${reversed.join(",")})`; } return "rgb(255, 0, 0)"; // fallback red }; }, [config]); const getZoneColor = useCallback( (zoneName: string) => { const zoneColor = config?.cameras?.[camera]?.zones?.[zoneName]?.color; if (zoneColor) { const reversed = [...zoneColor].reverse(); return `rgb(${reversed.join(",")})`; } return "rgb(255, 0, 0)"; // fallback red }, [config, camera], ); const currentObjectZones = useMemo(() => { if (!objectTimeline) return []; // Find the most recent timeline event at or before effective current time const relevantEvents = objectTimeline .filter((event) => event.timestamp <= effectiveCurrentTime) .sort((a, b) => b.timestamp - a.timestamp); // Most recent first // Get zones from the most recent event return relevantEvents[0]?.data?.zones || []; }, [objectTimeline, effectiveCurrentTime]); const zones = useMemo(() => { if (!config?.cameras?.[camera]?.zones || !currentObjectZones.length) return []; return Object.entries(config.cameras[camera].zones) .filter(([name]) => currentObjectZones.includes(name)) .map(([name, zone]) => ({ name, coordinates: zone.coordinates, color: getZoneColor(name), })); }, [config, camera, getZoneColor, currentObjectZones]); // get saved path points from event const savedPathPoints = useMemo(() => { return ( eventData?.[0].data?.path_data?.map( ([coords, timestamp]: [number[], number]) => ({ x: coords[0], y: coords[1], timestamp, lifecycle_item: undefined, }), ) || [] ); }, [eventData]); // timeline points for selected event const eventSequencePoints = useMemo(() => { return ( objectTimeline ?.filter((event) => event.data.box !== undefined) .map((event) => { const [left, top, width, height] = event.data.box!; event.data.zones_friendly_names = event?.data?.zones?.map((zone) => { return resolveZoneName(config, zone); }); return { x: left + width / 2, // Center x y: top + height, // Bottom y timestamp: event.timestamp, lifecycle_item: event, }; }) || [] ); }, [config, objectTimeline]); // final object path with timeline points included const pathPoints = useMemo(() => { // don't display a path for autotracking cameras if (config?.cameras[camera]?.onvif.autotracking.enabled_in_config) return []; const combinedPoints = [...savedPathPoints, ...eventSequencePoints].sort( (a, b) => a.timestamp - b.timestamp, ); // Filter points around current time (within a reasonable window) const timeWindow = 30; // 30 seconds window return combinedPoints.filter( (point) => point.timestamp >= currentTime - timeWindow && point.timestamp <= currentTime + timeWindow, ); }, [savedPathPoints, eventSequencePoints, config, camera, currentTime]); // get absolute positions on the svg canvas for each point const absolutePositions = useMemo(() => { if (!pathPoints) return []; return pathPoints.map((point) => { // Find the corresponding timeline entry for this point const timelineEntry = objectTimeline?.find( (entry) => entry.timestamp == point.timestamp, ); return { x: point.x * videoWidth, y: point.y * videoHeight, timestamp: point.timestamp, lifecycle_item: timelineEntry || (point.box // normal path point ? { timestamp: point.timestamp, camera: camera, source: "tracked_object", source_id: selectedObjectId, class_type: "visible" as LifecycleClassType, data: { camera: camera, label: point.label, sub_label: "", box: point.box, region: [0, 0, 0, 0], // placeholder attribute: "", zones: [], }, } : undefined), }; }); }, [ pathPoints, videoWidth, videoHeight, objectTimeline, camera, selectedObjectId, ]); const generateStraightPath = useCallback( (points: { x: number; y: number }[]) => { if (!points || points.length < 2) return ""; let path = `M ${points[0].x} ${points[0].y}`; for (let i = 1; i < points.length; i++) { path += ` L ${points[i].x} ${points[i].y}`; } return path; }, [], ); const getPointColor = useCallback( (baseColor: number[], type?: string) => { if (type && typeColorMap[type as keyof typeof typeColorMap]) { const typeColor = typeColorMap[type as keyof typeof typeColorMap]; if (typeColor) { return `rgb(${typeColor.join(",")})`; } } // normal path point return `rgb(${baseColor.map((c) => Math.max(0, c - 10)).join(",")})`; }, [typeColorMap], ); const handlePointClick = useCallback( (timestamp: number) => { onSeekToTime?.(timestamp, false); }, [onSeekToTime], ); // render bounding box for object at current time if we have a timeline entry const currentBoundingBox = useMemo(() => { if (!objectTimeline) return null; // Find the most recent timeline event at or before effective current time with a bounding box const relevantEvents = objectTimeline .filter( (event) => event.timestamp <= effectiveCurrentTime && event.data.box, ) .sort((a, b) => b.timestamp - a.timestamp); // Most recent first const currentEvent = relevantEvents[0]; if (!currentEvent?.data.box) return null; const [left, top, width, height] = currentEvent.data.box; return { left, top, width, height, centerX: left + width / 2, centerY: top + height, }; }, [objectTimeline, effectiveCurrentTime]); const objectColor = useMemo(() => { return pathPoints[0]?.label ? getObjectColor(pathPoints[0].label) : "rgb(255, 0, 0)"; }, [pathPoints, getObjectColor]); const objectColorArray = useMemo(() => { return pathPoints[0]?.label ? getObjectColor(pathPoints[0].label).match(/\d+/g)?.map(Number) || [ 255, 0, 0, ] : [255, 0, 0]; }, [pathPoints, getObjectColor]); // render any zones for object at current time const zonePolygons = useMemo(() => { return zones.map((zone) => { // Convert zone coordinates from normalized (0-1) to pixel coordinates const points = zone.coordinates .split(",") .map(Number.parseFloat) .reduce((acc: string[], value, index) => { const isXCoordinate = index % 2 === 0; const coordinate = isXCoordinate ? value * videoWidth : value * videoHeight; acc.push(coordinate.toString()); return acc; }, []) .join(","); return { key: zone.name, points, fill: `rgba(${zone.color.replace("rgb(", "").replace(")", "")}, 0.3)`, stroke: zone.color, }; }); }, [zones, videoWidth, videoHeight]); if (!pathPoints.length || !config) { return null; } return ( {zonePolygons.map((zone) => ( ))} {absolutePositions.length > 1 && ( )} {absolutePositions.map((pos, index) => ( handlePointClick(pos.timestamp)} /> {pos.lifecycle_item ? `${pos.lifecycle_item.class_type.replace("_", " ")} at ${new Date(pos.timestamp * 1000).toLocaleTimeString()}` : t("objectTrack.trackedPoint")} {onSeekToTime && (
{t("objectTrack.clickToSeek")}
)} ))} {currentBoundingBox && showBoundingBoxes && ( )} ); }