import useSWR from "swr"; import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Event } from "@/types/event"; import ActivityIndicator from "@/components/indicators/activity-indicator"; import { Button } from "@/components/ui/button"; import { TrackingDetailsSequence } from "@/types/timeline"; import Heading from "@/components/ui/heading"; import { FrigateConfig } from "@/types/frigateConfig"; import { formatUnixTimestampToDateTime } from "@/utils/dateUtil"; import { getIconForLabel } from "@/utils/iconUtil"; import { LuCircle, LuSettings } from "react-icons/lu"; import { cn } from "@/lib/utils"; import { Tooltip, TooltipContent, TooltipTrigger, } from "@/components/ui/tooltip"; import { AnnotationSettingsPane } from "./AnnotationSettingsPane"; import { TooltipPortal } from "@radix-ui/react-tooltip"; import HlsVideoPlayer from "@/components/player/HlsVideoPlayer"; import { baseUrl } from "@/api/baseUrl"; import { REVIEW_PADDING } from "@/types/review"; import { ASPECT_VERTICAL_LAYOUT, ASPECT_WIDE_LAYOUT } from "@/types/record"; import { DropdownMenu, DropdownMenuTrigger, DropdownMenuContent, DropdownMenuItem, DropdownMenuPortal, } from "@/components/ui/dropdown-menu"; import { Link, useNavigate } from "react-router-dom"; import { getLifecycleItemDescription } from "@/utils/lifecycleUtil"; import { useTranslation } from "react-i18next"; import { getTranslatedLabel } from "@/utils/i18n"; import { Badge } from "@/components/ui/badge"; import { HiDotsHorizontal } from "react-icons/hi"; import axios from "axios"; import { toast } from "sonner"; import { useDetailStream } from "@/context/detail-stream-context"; import { isDesktop, isIOS } from "react-device-detect"; import Chip from "@/components/indicators/Chip"; import { FaDownload, FaHistory } from "react-icons/fa"; type TrackingDetailsProps = { className?: string; event: Event; fullscreen?: boolean; tabs?: React.ReactNode; }; export function TrackingDetails({ className, event, tabs, }: TrackingDetailsProps) { const videoRef = useRef(null); const { t } = useTranslation(["views/explore"]); const navigate = useNavigate(); const { setSelectedObjectIds, annotationOffset, setAnnotationOffset } = useDetailStream(); // event.start_time is detect time, convert to record, then subtract padding const [currentTime, setCurrentTime] = useState( (event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING, ); const { data: eventSequence } = useSWR([ "timeline", { source_id: event.id, }, ]); const { data: config } = useSWR("config"); const effectiveTime = useMemo(() => { return currentTime - annotationOffset / 1000; }, [currentTime, annotationOffset]); const containerRef = useRef(null); const [_selectedZone, setSelectedZone] = useState(""); const [_lifecycleZones, setLifecycleZones] = useState([]); const [showControls, setShowControls] = useState(false); const [showZones, setShowZones] = useState(true); const [seekToTimestamp, setSeekToTimestamp] = useState(null); const aspectRatio = useMemo(() => { if (!config) { return 16 / 9; } return ( config.cameras[event.camera].detect.width / config.cameras[event.camera].detect.height ); }, [config, event]); const label = event.sub_label ? event.sub_label : getTranslatedLabel(event.label); const getZoneColor = useCallback( (zoneName: string) => { const zoneColor = config?.cameras?.[event.camera]?.zones?.[zoneName]?.color; if (zoneColor) { const reversed = [...zoneColor].reverse(); return reversed; } }, [config, event], ); // Set the selected object ID in the context so ObjectTrackOverlay can display it useEffect(() => { setSelectedObjectIds([event.id]); }, [event.id, setSelectedObjectIds]); const handleLifecycleClick = useCallback( (item: TrackingDetailsSequence) => { if (!videoRef.current) return; // Convert lifecycle timestamp (detect stream) to record stream time const targetTimeRecord = item.timestamp + annotationOffset / 1000; // Convert to video-relative time for seeking const eventStartRecord = (event.start_time ?? 0) + annotationOffset / 1000; const videoStartTime = eventStartRecord - REVIEW_PADDING; const relativeTime = targetTimeRecord - videoStartTime; videoRef.current.currentTime = relativeTime; }, [event.start_time, annotationOffset], ); const formattedStart = config ? formatUnixTimestampToDateTime(event.start_time ?? 0, { timezone: config.ui.timezone, date_format: config.ui.time_format == "24hour" ? t("time.formattedTimestamp.24hour", { ns: "common", }) : t("time.formattedTimestamp.12hour", { ns: "common", }), time_style: "medium", date_style: "medium", }) : ""; const formattedEnd = config ? formatUnixTimestampToDateTime(event.end_time ?? 0, { timezone: config.ui.timezone, date_format: config.ui.time_format == "24hour" ? t("time.formattedTimestamp.24hour", { ns: "common", }) : t("time.formattedTimestamp.12hour", { ns: "common", }), time_style: "medium", date_style: "medium", }) : ""; useEffect(() => { if (!eventSequence || eventSequence.length === 0) return; setLifecycleZones(eventSequence[0]?.data.zones); }, [eventSequence]); useEffect(() => { if (seekToTimestamp === null || !videoRef.current) return; // seekToTimestamp is a record stream timestamp // event.start_time is detect stream time, convert to record // The video clip starts at (eventStartRecord - REVIEW_PADDING) const eventStartRecord = event.start_time + annotationOffset / 1000; const videoStartTime = eventStartRecord - REVIEW_PADDING; const relativeTime = seekToTimestamp - videoStartTime; if (relativeTime >= 0) { videoRef.current.currentTime = relativeTime; } setSeekToTimestamp(null); }, [seekToTimestamp, event.start_time, annotationOffset]); const isWithinEventRange = effectiveTime !== undefined && event.start_time !== undefined && event.end_time !== undefined && effectiveTime >= event.start_time && effectiveTime <= event.end_time; // Calculate how far down the blue line should extend based on effectiveTime const calculateLineHeight = useCallback(() => { if (!eventSequence || eventSequence.length === 0 || !isWithinEventRange) { return 0; } const currentTime = effectiveTime ?? 0; // Find which events have been passed let lastPassedIndex = -1; for (let i = 0; i < eventSequence.length; i++) { if (currentTime >= (eventSequence[i].timestamp ?? 0)) { lastPassedIndex = i; } else { break; } } // No events passed yet if (lastPassedIndex < 0) return 0; // All events passed if (lastPassedIndex >= eventSequence.length - 1) return 100; // Calculate percentage based on item position, not time // Each item occupies an equal visual space regardless of time gaps const itemPercentage = 100 / (eventSequence.length - 1); // Find progress between current and next event for smooth transition const currentEvent = eventSequence[lastPassedIndex]; const nextEvent = eventSequence[lastPassedIndex + 1]; const currentTimestamp = currentEvent.timestamp ?? 0; const nextTimestamp = nextEvent.timestamp ?? 0; // Calculate interpolation between the two events const timeBetween = nextTimestamp - currentTimestamp; const timeElapsed = currentTime - currentTimestamp; const interpolation = timeBetween > 0 ? timeElapsed / timeBetween : 0; // Base position plus interpolated progress to next item return Math.min( 100, lastPassedIndex * itemPercentage + interpolation * itemPercentage, ); }, [eventSequence, effectiveTime, isWithinEventRange]); const blueLineHeight = calculateLineHeight(); const videoSource = useMemo(() => { // event.start_time and event.end_time are in DETECT stream time // Convert to record stream time, then create video clip with padding const eventStartRecord = event.start_time + annotationOffset / 1000; const eventEndRecord = (event.end_time ?? Date.now() / 1000) + annotationOffset / 1000; const startTime = eventStartRecord - REVIEW_PADDING; const endTime = eventEndRecord + REVIEW_PADDING; const playlist = `${baseUrl}vod/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`; return { playlist, startPosition: 0, }; }, [event, annotationOffset]); // Determine camera aspect ratio category const cameraAspect = useMemo(() => { if (!aspectRatio) { return "normal"; } else if (aspectRatio > ASPECT_WIDE_LAYOUT) { return "wide"; } else if (aspectRatio < ASPECT_VERTICAL_LAYOUT) { return "tall"; } else { return "normal"; } }, [aspectRatio]); const handleSeekToTime = useCallback((timestamp: number, _play?: boolean) => { // Set the target timestamp to seek to setSeekToTimestamp(timestamp); }, []); const handleTimeUpdate = useCallback( (time: number) => { // event.start_time is detect stream time, convert to record const eventStartRecord = event.start_time + annotationOffset / 1000; const videoStartTime = eventStartRecord - REVIEW_PADDING; const absoluteTime = time + videoStartTime; setCurrentTime(absoluteTime); }, [event.start_time, annotationOffset], ); if (!config) { return ; } return (
{event && ( { if (event?.id) { const params = new URLSearchParams({ id: event.id, }).toString(); navigate(`/review?${params}`); } }} > {t("itemMenu.viewInHistory.label")} )} {t("button.download", { ns: "common" })}
{isDesktop && tabs &&
{tabs}
}
{t("trackingDetails.title")}
{t("trackingDetails.adjustAnnotationSettings")}
{t("trackingDetails.scrollViewTips")}
{t("trackingDetails.count", { first: eventSequence?.length ?? 0, second: eventSequence?.length ?? 0, })}
{config?.cameras[event.camera]?.onvif.autotracking .enabled_in_config && (
{t("trackingDetails.autoTrackingTips")}
)} {showControls && ( { if (typeof value === "function") { const newValue = value(annotationOffset); setAnnotationOffset(newValue); } else { setAnnotationOffset(value); } }} /> )}
{ e.stopPropagation(); // event.start_time is detect time, convert to record handleSeekToTime( (event.start_time ?? 0) + annotationOffset / 1000, ); }} role="button" >
{getIconForLabel( event.sub_label ? event.label + "-verified" : event.label, "size-4 text-white", )}
{label} {formattedStart ?? ""} - {formattedEnd ?? ""} {event.data?.recognized_license_plate && ( <> ·
{event.data.recognized_license_plate}
)}
{!eventSequence ? ( ) : eventSequence.length === 0 ? (
{t("detail.noObjectDetailData", { ns: "views/events" })}
) : (
{isWithinEventRange && (
)}
{eventSequence.map((item, idx) => { const isActive = Math.abs( (effectiveTime ?? 0) - (item.timestamp ?? 0), ) <= 0.5; const formattedEventTimestamp = config ? formatUnixTimestampToDateTime(item.timestamp ?? 0, { timezone: config.ui.timezone, date_format: config.ui.time_format == "24hour" ? t( "time.formattedTimestampHourMinuteSecond.24hour", { ns: "common" }, ) : t( "time.formattedTimestampHourMinuteSecond.12hour", { ns: "common" }, ), time_style: "medium", date_style: "medium", }) : ""; const ratio = Array.isArray(item.data.box) && item.data.box.length >= 4 ? ( aspectRatio * (item.data.box[2] / item.data.box[3]) ).toFixed(2) : "N/A"; const areaPx = Array.isArray(item.data.box) && item.data.box.length >= 4 ? Math.round( (config.cameras[event.camera]?.detect?.width ?? 0) * (config.cameras[event.camera]?.detect ?.height ?? 0) * (item.data.box[2] * item.data.box[3]), ) : undefined; const areaPct = Array.isArray(item.data.box) && item.data.box.length >= 4 ? (item.data.box[2] * item.data.box[3]).toFixed(4) : undefined; return ( handleLifecycleClick(item)} setSelectedZone={setSelectedZone} getZoneColor={getZoneColor} effectiveTime={effectiveTime} isTimelineActive={isWithinEventRange} /> ); })}
)}
); } type LifecycleIconRowProps = { item: TrackingDetailsSequence; isActive?: boolean; formattedEventTimestamp: string; ratio: string; areaPx?: number; areaPct?: string; onClick: () => void; setSelectedZone: (z: string) => void; getZoneColor: (zoneName: string) => number[] | undefined; effectiveTime?: number; isTimelineActive?: boolean; }; function LifecycleIconRow({ item, isActive, formattedEventTimestamp, ratio, areaPx, areaPct, onClick, setSelectedZone, getZoneColor, effectiveTime, isTimelineActive, }: LifecycleIconRowProps) { const { t } = useTranslation(["views/explore", "components/player"]); const { data: config } = useSWR("config"); const [isOpen, setIsOpen] = useState(false); const navigate = useNavigate(); return (
= (item?.timestamp ?? 0)) && isTimelineActive && "fill-selected duration-300", )} />
{getLifecycleItemDescription(item)}
{t("trackingDetails.lifecycleItemDesc.header.ratio")} {ratio}
{t("trackingDetails.lifecycleItemDesc.header.area")} {areaPx !== undefined && areaPct !== undefined ? ( {t("information.pixels", { ns: "common", area: areaPx })} ·{" "} {areaPct}% ) : ( N/A )}
{item.data?.zones && item.data.zones.length > 0 && (
{item.data.zones.map((zone, zidx) => { const color = getZoneColor(zone)?.join(",") ?? "0,0,0"; return ( { e.stopPropagation(); setSelectedZone(zone); }} style={{ borderColor: `rgba(${color}, 0.6)`, background: `rgba(${color}, 0.08)`, }} > {zone.replaceAll("_", " ")} ); })}
)}
{formattedEventTimestamp}
{(config?.plus?.enabled || item.data.box) && (
{config?.plus?.enabled && ( { const resp = await axios.post( `/${item.camera}/plus/${item.timestamp}`, ); if (resp && resp.status == 200) { toast.success( t("toast.success.submittedFrigatePlus", { ns: "components/player", }), { position: "top-center", }, ); } else { toast.success( t("toast.error.submitFrigatePlusFailed", { ns: "components/player", }), { position: "top-center", }, ); } }} > {t("itemMenu.submitToPlus.label")} )} {item.data.box && ( { setIsOpen(false); setTimeout(() => { navigate( `/settings?page=masksAndZones&camera=${item.camera}&object_mask=${item.data.box}`, ); }, 0); }} > {t("trackingDetails.createObjectMask")} )}
)}
); }