2024-09-04 16:46:49 +03:00
|
|
|
import useSWR from "swr";
|
|
|
|
|
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
2025-11-17 17:12:05 +03:00
|
|
|
import { useResizeObserver } from "@/hooks/resize-observer";
|
2024-09-04 16:46:49 +03:00
|
|
|
import { Event } from "@/types/event";
|
|
|
|
|
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
2025-10-26 21:12:20 +03:00
|
|
|
import { TrackingDetailsSequence } from "@/types/timeline";
|
2024-09-04 16:46:49 +03:00
|
|
|
import { FrigateConfig } from "@/types/frigateConfig";
|
|
|
|
|
import { formatUnixTimestampToDateTime } from "@/utils/dateUtil";
|
|
|
|
|
import { getIconForLabel } from "@/utils/iconUtil";
|
2025-11-06 19:22:52 +03:00
|
|
|
import { LuCircle, LuFolderX } from "react-icons/lu";
|
2024-09-04 16:46:49 +03:00
|
|
|
import { cn } from "@/lib/utils";
|
2025-11-01 17:19:30 +03:00
|
|
|
import HlsVideoPlayer from "@/components/player/HlsVideoPlayer";
|
|
|
|
|
import { baseUrl } from "@/api/baseUrl";
|
|
|
|
|
import { REVIEW_PADDING } from "@/types/review";
|
2025-11-19 01:33:42 +03:00
|
|
|
import {
|
|
|
|
|
ASPECT_VERTICAL_LAYOUT,
|
|
|
|
|
ASPECT_WIDE_LAYOUT,
|
|
|
|
|
Recording,
|
|
|
|
|
} from "@/types/record";
|
2025-10-26 21:12:20 +03:00
|
|
|
import {
|
|
|
|
|
DropdownMenu,
|
|
|
|
|
DropdownMenuTrigger,
|
|
|
|
|
DropdownMenuContent,
|
|
|
|
|
DropdownMenuItem,
|
|
|
|
|
DropdownMenuPortal,
|
|
|
|
|
} from "@/components/ui/dropdown-menu";
|
2025-10-26 15:27:07 +03:00
|
|
|
import { Link, useNavigate } from "react-router-dom";
|
2025-02-18 17:17:51 +03:00
|
|
|
import { getLifecycleItemDescription } from "@/utils/lifecycleUtil";
|
2025-03-16 18:36:20 +03:00
|
|
|
import { useTranslation } from "react-i18next";
|
2025-10-18 21:19:21 +03:00
|
|
|
import { getTranslatedLabel } from "@/utils/i18n";
|
2025-11-07 17:02:06 +03:00
|
|
|
import { resolveZoneName } from "@/hooks/use-zone-friendly-name";
|
2025-10-24 20:08:59 +03:00
|
|
|
import { Badge } from "@/components/ui/badge";
|
2025-10-26 21:12:20 +03:00
|
|
|
import { HiDotsHorizontal } from "react-icons/hi";
|
|
|
|
|
import axios from "axios";
|
|
|
|
|
import { toast } from "sonner";
|
2025-11-01 17:19:30 +03:00
|
|
|
import { useDetailStream } from "@/context/detail-stream-context";
|
2025-11-02 16:48:43 +03:00
|
|
|
import { isDesktop, isIOS, isMobileOnly, isSafari } from "react-device-detect";
|
|
|
|
|
import { useApiHost } from "@/api";
|
|
|
|
|
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
|
|
|
|
|
import ObjectTrackOverlay from "../ObjectTrackOverlay";
|
2025-11-29 16:30:04 +03:00
|
|
|
import { useIsAdmin } from "@/hooks/use-is-admin";
|
2025-02-17 19:37:17 +03:00
|
|
|
|
2025-10-26 21:12:20 +03:00
|
|
|
type TrackingDetailsProps = {
|
2024-09-12 17:46:29 +03:00
|
|
|
className?: string;
|
2024-09-04 16:46:49 +03:00
|
|
|
event: Event;
|
2024-09-12 17:46:29 +03:00
|
|
|
fullscreen?: boolean;
|
2025-11-01 17:19:30 +03:00
|
|
|
tabs?: React.ReactNode;
|
2024-09-04 16:46:49 +03:00
|
|
|
};
|
|
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
export function TrackingDetails({
|
2024-09-12 17:46:29 +03:00
|
|
|
className,
|
2024-09-04 16:46:49 +03:00
|
|
|
event,
|
2025-11-01 17:19:30 +03:00
|
|
|
tabs,
|
2025-10-26 21:12:20 +03:00
|
|
|
}: TrackingDetailsProps) {
|
2025-11-01 17:19:30 +03:00
|
|
|
const videoRef = useRef<HTMLVideoElement | null>(null);
|
2025-03-16 18:36:20 +03:00
|
|
|
const { t } = useTranslation(["views/explore"]);
|
2025-11-02 16:48:43 +03:00
|
|
|
const apiHost = useApiHost();
|
|
|
|
|
const imgRef = useRef<HTMLImageElement | null>(null);
|
|
|
|
|
const [imgLoaded, setImgLoaded] = useState(false);
|
2025-11-22 00:40:58 +03:00
|
|
|
const [isVideoLoading, setIsVideoLoading] = useState(true);
|
2025-11-02 16:48:43 +03:00
|
|
|
const [displaySource, _setDisplaySource] = useState<"video" | "image">(
|
|
|
|
|
"video",
|
|
|
|
|
);
|
2025-11-06 19:22:52 +03:00
|
|
|
const { setSelectedObjectIds, annotationOffset } = useDetailStream();
|
2025-11-01 17:19:30 +03:00
|
|
|
|
2025-11-02 16:48:43 +03:00
|
|
|
// manualOverride holds a record-stream timestamp explicitly chosen by the
|
|
|
|
|
// user (eg, clicking a lifecycle row). When null we display `currentTime`.
|
|
|
|
|
const [manualOverride, setManualOverride] = useState<number | null>(null);
|
|
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
// event.start_time is detect time, convert to record, then subtract padding
|
|
|
|
|
const [currentTime, setCurrentTime] = useState(
|
|
|
|
|
(event.start_time ?? 0) + annotationOffset / 1000 - REVIEW_PADDING,
|
|
|
|
|
);
|
2025-03-16 18:36:20 +03:00
|
|
|
|
2025-11-22 00:40:58 +03:00
|
|
|
useEffect(() => {
|
|
|
|
|
setIsVideoLoading(true);
|
|
|
|
|
}, [event.id]);
|
|
|
|
|
|
2025-11-24 16:34:56 +03:00
|
|
|
const { data: eventSequence } = useSWR<TrackingDetailsSequence[]>(
|
|
|
|
|
["timeline", { source_id: event.id }],
|
|
|
|
|
null,
|
2024-09-04 16:46:49 +03:00
|
|
|
{
|
2025-11-24 16:34:56 +03:00
|
|
|
revalidateOnFocus: false,
|
|
|
|
|
revalidateOnReconnect: false,
|
|
|
|
|
dedupingInterval: 30000,
|
2024-09-04 16:46:49 +03:00
|
|
|
},
|
2025-11-24 16:34:56 +03:00
|
|
|
);
|
2024-09-04 16:46:49 +03:00
|
|
|
|
|
|
|
|
const { data: config } = useSWR<FrigateConfig>("config");
|
|
|
|
|
|
2025-11-19 01:33:42 +03:00
|
|
|
// Fetch recording segments for the event's time range to handle motion-only gaps
|
|
|
|
|
const eventStartRecord = useMemo(
|
|
|
|
|
() => (event.start_time ?? 0) + annotationOffset / 1000,
|
|
|
|
|
[event.start_time, annotationOffset],
|
|
|
|
|
);
|
|
|
|
|
const eventEndRecord = useMemo(
|
|
|
|
|
() => (event.end_time ?? Date.now() / 1000) + annotationOffset / 1000,
|
|
|
|
|
[event.end_time, annotationOffset],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const { data: recordings } = useSWR<Recording[]>(
|
|
|
|
|
event.camera
|
|
|
|
|
? [
|
|
|
|
|
`${event.camera}/recordings`,
|
|
|
|
|
{
|
|
|
|
|
after: eventStartRecord - REVIEW_PADDING,
|
|
|
|
|
before: eventEndRecord + REVIEW_PADDING,
|
|
|
|
|
},
|
|
|
|
|
]
|
|
|
|
|
: null,
|
2025-11-24 16:34:56 +03:00
|
|
|
null,
|
|
|
|
|
{
|
|
|
|
|
revalidateOnFocus: false,
|
|
|
|
|
revalidateOnReconnect: false,
|
|
|
|
|
dedupingInterval: 30000,
|
|
|
|
|
},
|
2025-11-19 01:33:42 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Convert a timeline timestamp to actual video player time, accounting for
|
|
|
|
|
// motion-only recording gaps. Uses the same algorithm as DynamicVideoController.
|
|
|
|
|
const timestampToVideoTime = useCallback(
|
|
|
|
|
(timestamp: number): number => {
|
|
|
|
|
if (!recordings || recordings.length === 0) {
|
|
|
|
|
// Fallback to simple calculation if no recordings data
|
|
|
|
|
return timestamp - (eventStartRecord - REVIEW_PADDING);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const videoStartTime = eventStartRecord - REVIEW_PADDING;
|
|
|
|
|
|
|
|
|
|
// If timestamp is before video start, return 0
|
|
|
|
|
if (timestamp < videoStartTime) return 0;
|
|
|
|
|
|
|
|
|
|
// Check if timestamp is before the first recording or after the last
|
|
|
|
|
if (
|
|
|
|
|
timestamp < recordings[0].start_time ||
|
|
|
|
|
timestamp > recordings[recordings.length - 1].end_time
|
|
|
|
|
) {
|
|
|
|
|
// No recording available at this timestamp
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Calculate the inpoint offset - the HLS video may start partway through the first segment
|
|
|
|
|
let inpointOffset = 0;
|
|
|
|
|
if (
|
|
|
|
|
videoStartTime > recordings[0].start_time &&
|
|
|
|
|
videoStartTime < recordings[0].end_time
|
|
|
|
|
) {
|
|
|
|
|
inpointOffset = videoStartTime - recordings[0].start_time;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let seekSeconds = 0;
|
|
|
|
|
for (const segment of recordings) {
|
|
|
|
|
// Skip segments that end before our timestamp
|
|
|
|
|
if (segment.end_time <= timestamp) {
|
|
|
|
|
// Add this segment's duration, but subtract inpoint offset from first segment
|
|
|
|
|
if (segment === recordings[0]) {
|
|
|
|
|
seekSeconds += segment.duration - inpointOffset;
|
|
|
|
|
} else {
|
|
|
|
|
seekSeconds += segment.duration;
|
|
|
|
|
}
|
|
|
|
|
} else if (segment.start_time <= timestamp) {
|
|
|
|
|
// The timestamp is within this segment
|
|
|
|
|
if (segment === recordings[0]) {
|
|
|
|
|
// For the first segment, account for the inpoint offset
|
|
|
|
|
seekSeconds +=
|
|
|
|
|
timestamp - Math.max(segment.start_time, videoStartTime);
|
|
|
|
|
} else {
|
|
|
|
|
seekSeconds += timestamp - segment.start_time;
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return seekSeconds;
|
|
|
|
|
},
|
|
|
|
|
[recordings, eventStartRecord],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Convert video player time back to timeline timestamp, accounting for
|
|
|
|
|
// motion-only recording gaps. Reverse of timestampToVideoTime.
|
|
|
|
|
const videoTimeToTimestamp = useCallback(
|
|
|
|
|
(playerTime: number): number => {
|
|
|
|
|
if (!recordings || recordings.length === 0) {
|
|
|
|
|
// Fallback to simple calculation if no recordings data
|
|
|
|
|
const videoStartTime = eventStartRecord - REVIEW_PADDING;
|
|
|
|
|
return playerTime + videoStartTime;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const videoStartTime = eventStartRecord - REVIEW_PADDING;
|
|
|
|
|
|
|
|
|
|
// Calculate the inpoint offset - the video may start partway through the first segment
|
|
|
|
|
let inpointOffset = 0;
|
|
|
|
|
if (
|
|
|
|
|
videoStartTime > recordings[0].start_time &&
|
|
|
|
|
videoStartTime < recordings[0].end_time
|
|
|
|
|
) {
|
|
|
|
|
inpointOffset = videoStartTime - recordings[0].start_time;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let timestamp = 0;
|
|
|
|
|
let totalTime = 0;
|
|
|
|
|
|
|
|
|
|
for (const segment of recordings) {
|
|
|
|
|
const segmentDuration =
|
|
|
|
|
segment === recordings[0]
|
|
|
|
|
? segment.duration - inpointOffset
|
|
|
|
|
: segment.duration;
|
|
|
|
|
|
|
|
|
|
if (totalTime + segmentDuration > playerTime) {
|
|
|
|
|
// The player time is within this segment
|
|
|
|
|
if (segment === recordings[0]) {
|
|
|
|
|
// For the first segment, add the inpoint offset
|
|
|
|
|
timestamp =
|
|
|
|
|
Math.max(segment.start_time, videoStartTime) +
|
|
|
|
|
(playerTime - totalTime);
|
|
|
|
|
} else {
|
|
|
|
|
timestamp = segment.start_time + (playerTime - totalTime);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
} else {
|
|
|
|
|
totalTime += segmentDuration;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return timestamp;
|
|
|
|
|
},
|
|
|
|
|
[recordings, eventStartRecord],
|
|
|
|
|
);
|
|
|
|
|
|
2025-11-07 17:02:06 +03:00
|
|
|
eventSequence?.map((event) => {
|
|
|
|
|
event.data.zones_friendly_names = event.data?.zones?.map((zone) => {
|
|
|
|
|
return resolveZoneName(config, zone);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
2025-11-02 16:48:43 +03:00
|
|
|
// Use manualOverride (set when seeking in image mode) if present so
|
|
|
|
|
// lifecycle rows and overlays follow image-mode seeks. Otherwise fall
|
|
|
|
|
// back to currentTime used for video mode.
|
2025-11-01 17:19:30 +03:00
|
|
|
const effectiveTime = useMemo(() => {
|
2025-11-02 16:48:43 +03:00
|
|
|
const displayedRecordTime = manualOverride ?? currentTime;
|
|
|
|
|
return displayedRecordTime - annotationOffset / 1000;
|
|
|
|
|
}, [manualOverride, currentTime, annotationOffset]);
|
2024-09-04 16:46:49 +03:00
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
const containerRef = useRef<HTMLDivElement | null>(null);
|
2025-11-17 17:12:05 +03:00
|
|
|
const timelineContainerRef = useRef<HTMLDivElement | null>(null);
|
|
|
|
|
const rowRefs = useRef<(HTMLDivElement | null)[]>([]);
|
2025-11-01 17:19:30 +03:00
|
|
|
const [_selectedZone, setSelectedZone] = useState("");
|
|
|
|
|
const [_lifecycleZones, setLifecycleZones] = useState<string[]>([]);
|
|
|
|
|
const [seekToTimestamp, setSeekToTimestamp] = useState<number | null>(null);
|
2025-11-17 17:12:05 +03:00
|
|
|
const [lineBottomOffsetPx, setLineBottomOffsetPx] = useState<number>(32);
|
|
|
|
|
const [lineTopOffsetPx, setLineTopOffsetPx] = useState<number>(8);
|
|
|
|
|
const [blueLineHeightPx, setBlueLineHeightPx] = useState<number>(0);
|
|
|
|
|
|
|
|
|
|
const [timelineSize] = useResizeObserver(timelineContainerRef);
|
2024-09-04 16:46:49 +03:00
|
|
|
|
2024-10-01 16:01:45 +03:00
|
|
|
const aspectRatio = useMemo(() => {
|
|
|
|
|
if (!config) {
|
|
|
|
|
return 16 / 9;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (
|
|
|
|
|
config.cameras[event.camera].detect.width /
|
|
|
|
|
config.cameras[event.camera].detect.height
|
|
|
|
|
);
|
|
|
|
|
}, [config, event]);
|
|
|
|
|
|
2025-10-26 01:15:36 +03:00
|
|
|
const label = event.sub_label
|
|
|
|
|
? event.sub_label
|
|
|
|
|
: getTranslatedLabel(event.label);
|
|
|
|
|
|
2024-09-04 16:46:49 +03:00
|
|
|
const getZoneColor = useCallback(
|
|
|
|
|
(zoneName: string) => {
|
|
|
|
|
const zoneColor =
|
2024-09-12 17:46:29 +03:00
|
|
|
config?.cameras?.[event.camera]?.zones?.[zoneName]?.color;
|
2024-09-04 16:46:49 +03:00
|
|
|
if (zoneColor) {
|
|
|
|
|
const reversed = [...zoneColor].reverse();
|
|
|
|
|
return reversed;
|
|
|
|
|
}
|
|
|
|
|
},
|
2024-09-12 17:46:29 +03:00
|
|
|
[config, event],
|
2024-09-04 16:46:49 +03:00
|
|
|
);
|
|
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
// Set the selected object ID in the context so ObjectTrackOverlay can display it
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
setSelectedObjectIds([event.id]);
|
|
|
|
|
}, [event.id, setSelectedObjectIds]);
|
2024-09-04 16:46:49 +03:00
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
const handleLifecycleClick = useCallback(
|
|
|
|
|
(item: TrackingDetailsSequence) => {
|
2025-11-02 16:48:43 +03:00
|
|
|
if (!videoRef.current && !imgRef.current) return;
|
2024-09-04 16:46:49 +03:00
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
// Convert lifecycle timestamp (detect stream) to record stream time
|
|
|
|
|
const targetTimeRecord = item.timestamp + annotationOffset / 1000;
|
2025-02-17 19:37:17 +03:00
|
|
|
|
2025-11-02 16:48:43 +03:00
|
|
|
if (displaySource === "image") {
|
|
|
|
|
// For image mode: set a manual override timestamp and update
|
|
|
|
|
// currentTime so overlays render correctly.
|
|
|
|
|
setManualOverride(targetTimeRecord);
|
|
|
|
|
setCurrentTime(targetTimeRecord);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-19 01:33:42 +03:00
|
|
|
// For video mode: convert to video-relative time (accounting for motion-only gaps)
|
|
|
|
|
const relativeTime = timestampToVideoTime(targetTimeRecord);
|
2025-02-17 19:37:17 +03:00
|
|
|
|
2025-11-02 16:48:43 +03:00
|
|
|
if (videoRef.current) {
|
|
|
|
|
videoRef.current.currentTime = relativeTime;
|
|
|
|
|
}
|
2024-09-04 16:46:49 +03:00
|
|
|
},
|
2025-11-19 01:33:42 +03:00
|
|
|
[annotationOffset, displaySource, timestampToVideoTime],
|
2025-02-17 19:37:17 +03:00
|
|
|
);
|
|
|
|
|
|
2025-10-18 21:19:21 +03:00
|
|
|
const formattedStart = config
|
|
|
|
|
? formatUnixTimestampToDateTime(event.start_time ?? 0, {
|
|
|
|
|
timezone: config.ui.timezone,
|
|
|
|
|
date_format:
|
|
|
|
|
config.ui.time_format == "24hour"
|
2025-10-26 15:27:07 +03:00
|
|
|
? t("time.formattedTimestamp.24hour", {
|
2025-10-18 21:19:21 +03:00
|
|
|
ns: "common",
|
|
|
|
|
})
|
2025-10-26 15:27:07 +03:00
|
|
|
: t("time.formattedTimestamp.12hour", {
|
2025-10-18 21:19:21 +03:00
|
|
|
ns: "common",
|
|
|
|
|
}),
|
|
|
|
|
time_style: "medium",
|
|
|
|
|
date_style: "medium",
|
|
|
|
|
})
|
|
|
|
|
: "";
|
|
|
|
|
|
2025-11-19 01:33:42 +03:00
|
|
|
const formattedEnd =
|
|
|
|
|
config && event.end_time != null
|
|
|
|
|
? formatUnixTimestampToDateTime(event.end_time, {
|
|
|
|
|
timezone: config.ui.timezone,
|
|
|
|
|
date_format:
|
|
|
|
|
config.ui.time_format == "24hour"
|
|
|
|
|
? t("time.formattedTimestamp.24hour", {
|
|
|
|
|
ns: "common",
|
|
|
|
|
})
|
|
|
|
|
: t("time.formattedTimestamp.12hour", {
|
|
|
|
|
ns: "common",
|
|
|
|
|
}),
|
|
|
|
|
time_style: "medium",
|
|
|
|
|
date_style: "medium",
|
|
|
|
|
})
|
|
|
|
|
: "";
|
2025-10-18 21:19:21 +03:00
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
if (!eventSequence || eventSequence.length === 0) return;
|
2025-11-01 17:19:30 +03:00
|
|
|
setLifecycleZones(eventSequence[0]?.data.zones);
|
|
|
|
|
}, [eventSequence]);
|
2025-10-18 21:19:21 +03:00
|
|
|
|
|
|
|
|
useEffect(() => {
|
2025-11-02 16:48:43 +03:00
|
|
|
if (seekToTimestamp === null) return;
|
|
|
|
|
|
|
|
|
|
if (displaySource === "image") {
|
|
|
|
|
// For image mode, set the manual override so the snapshot updates to
|
|
|
|
|
// the exact record timestamp.
|
|
|
|
|
setManualOverride(seekToTimestamp);
|
|
|
|
|
setSeekToTimestamp(null);
|
|
|
|
|
return;
|
|
|
|
|
}
|
2025-11-01 17:19:30 +03:00
|
|
|
|
|
|
|
|
// seekToTimestamp is a record stream timestamp
|
2025-11-19 01:33:42 +03:00
|
|
|
// Convert to video position (accounting for motion-only recording gaps)
|
2025-11-02 16:48:43 +03:00
|
|
|
if (!videoRef.current) return;
|
2025-11-19 01:33:42 +03:00
|
|
|
const relativeTime = timestampToVideoTime(seekToTimestamp);
|
2025-11-01 17:19:30 +03:00
|
|
|
if (relativeTime >= 0) {
|
|
|
|
|
videoRef.current.currentTime = relativeTime;
|
|
|
|
|
}
|
|
|
|
|
setSeekToTimestamp(null);
|
2025-11-19 01:33:42 +03:00
|
|
|
}, [seekToTimestamp, displaySource, timestampToVideoTime]);
|
2025-11-01 17:19:30 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
const isWithinEventRange = useMemo(() => {
|
|
|
|
|
if (effectiveTime === undefined || event.start_time === undefined) {
|
|
|
|
|
return false;
|
2025-10-18 21:19:21 +03:00
|
|
|
}
|
2025-11-17 17:12:05 +03:00
|
|
|
// If an event has not ended yet, fall back to last timestamp in eventSequence
|
|
|
|
|
let eventEnd = event.end_time;
|
|
|
|
|
if (eventEnd == null && eventSequence && eventSequence.length > 0) {
|
|
|
|
|
const last = eventSequence[eventSequence.length - 1];
|
|
|
|
|
if (last && last.timestamp !== undefined) {
|
|
|
|
|
eventEnd = last.timestamp;
|
2025-10-24 20:08:59 +03:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
if (eventEnd == null) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return effectiveTime >= event.start_time && effectiveTime <= eventEnd;
|
|
|
|
|
}, [effectiveTime, event.start_time, event.end_time, eventSequence]);
|
|
|
|
|
|
|
|
|
|
// Dynamically compute pixel offsets so the timeline line starts at the
|
|
|
|
|
// first row midpoint and ends at the last row midpoint. For accuracy,
|
|
|
|
|
// measure the center Y of each lifecycle row and interpolate the current
|
|
|
|
|
// effective time into a pixel position; then set the blue line height
|
|
|
|
|
// so it reaches the center dot at the same time the dot becomes active.
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
if (!timelineContainerRef.current || !eventSequence) return;
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
const containerRect = timelineContainerRef.current.getBoundingClientRect();
|
|
|
|
|
const validRefs = rowRefs.current.filter((r) => r !== null);
|
|
|
|
|
if (validRefs.length === 0) return;
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
const centers = validRefs.map((n) => {
|
|
|
|
|
const r = n.getBoundingClientRect();
|
|
|
|
|
return r.top + r.height / 2 - containerRect.top;
|
|
|
|
|
});
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
const topOffset = Math.max(0, centers[0]);
|
|
|
|
|
const bottomOffset = Math.max(
|
|
|
|
|
0,
|
|
|
|
|
containerRect.height - centers[centers.length - 1],
|
|
|
|
|
);
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
setLineTopOffsetPx(Math.round(topOffset));
|
|
|
|
|
setLineBottomOffsetPx(Math.round(bottomOffset));
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
const eff = effectiveTime ?? 0;
|
|
|
|
|
const timestamps = eventSequence.map((s) => s.timestamp ?? 0);
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-17 17:12:05 +03:00
|
|
|
let pixelPos = centers[0];
|
|
|
|
|
if (eff <= timestamps[0]) {
|
|
|
|
|
pixelPos = centers[0];
|
|
|
|
|
} else if (eff >= timestamps[timestamps.length - 1]) {
|
|
|
|
|
pixelPos = centers[centers.length - 1];
|
|
|
|
|
} else {
|
|
|
|
|
for (let i = 0; i < timestamps.length - 1; i++) {
|
|
|
|
|
const t1 = timestamps[i];
|
|
|
|
|
const t2 = timestamps[i + 1];
|
|
|
|
|
if (eff >= t1 && eff <= t2) {
|
|
|
|
|
const ratio = t2 > t1 ? (eff - t1) / (t2 - t1) : 0;
|
|
|
|
|
pixelPos = centers[i] + ratio * (centers[i + 1] - centers[i]);
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const bluePx = Math.round(Math.max(0, pixelPos - topOffset));
|
|
|
|
|
setBlueLineHeightPx(bluePx);
|
|
|
|
|
}, [eventSequence, timelineSize.width, timelineSize.height, effectiveTime]);
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
const videoSource = useMemo(() => {
|
|
|
|
|
// event.start_time and event.end_time are in DETECT stream time
|
|
|
|
|
// Convert to record stream time, then create video clip with padding
|
|
|
|
|
const eventStartRecord = event.start_time + annotationOffset / 1000;
|
|
|
|
|
const eventEndRecord =
|
|
|
|
|
(event.end_time ?? Date.now() / 1000) + annotationOffset / 1000;
|
|
|
|
|
const startTime = eventStartRecord - REVIEW_PADDING;
|
|
|
|
|
const endTime = eventEndRecord + REVIEW_PADDING;
|
|
|
|
|
const playlist = `${baseUrl}vod/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`;
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
playlist,
|
|
|
|
|
startPosition: 0,
|
|
|
|
|
};
|
|
|
|
|
}, [event, annotationOffset]);
|
|
|
|
|
|
|
|
|
|
// Determine camera aspect ratio category
|
|
|
|
|
const cameraAspect = useMemo(() => {
|
|
|
|
|
if (!aspectRatio) {
|
|
|
|
|
return "normal";
|
|
|
|
|
} else if (aspectRatio > ASPECT_WIDE_LAYOUT) {
|
|
|
|
|
return "wide";
|
|
|
|
|
} else if (aspectRatio < ASPECT_VERTICAL_LAYOUT) {
|
|
|
|
|
return "tall";
|
|
|
|
|
} else {
|
|
|
|
|
return "normal";
|
|
|
|
|
}
|
|
|
|
|
}, [aspectRatio]);
|
|
|
|
|
|
|
|
|
|
const handleSeekToTime = useCallback((timestamp: number, _play?: boolean) => {
|
|
|
|
|
// Set the target timestamp to seek to
|
|
|
|
|
setSeekToTimestamp(timestamp);
|
|
|
|
|
}, []);
|
|
|
|
|
|
|
|
|
|
const handleTimeUpdate = useCallback(
|
|
|
|
|
(time: number) => {
|
2025-11-19 01:33:42 +03:00
|
|
|
// Convert video player time back to timeline timestamp
|
|
|
|
|
// accounting for motion-only recording gaps
|
|
|
|
|
const absoluteTime = videoTimeToTimestamp(time);
|
2025-11-01 17:19:30 +03:00
|
|
|
|
|
|
|
|
setCurrentTime(absoluteTime);
|
|
|
|
|
},
|
2025-11-19 01:33:42 +03:00
|
|
|
[videoTimeToTimestamp],
|
2025-11-01 17:19:30 +03:00
|
|
|
);
|
|
|
|
|
|
2025-11-02 16:48:43 +03:00
|
|
|
const [src, setSrc] = useState(
|
|
|
|
|
`${apiHost}api/${event.camera}/recordings/${currentTime + REVIEW_PADDING}/snapshot.jpg?height=500`,
|
|
|
|
|
);
|
|
|
|
|
const [hasError, setHasError] = useState(false);
|
|
|
|
|
|
|
|
|
|
// Derive the record timestamp to display: manualOverride if present,
|
|
|
|
|
// otherwise use currentTime.
|
|
|
|
|
const displayedRecordTime = manualOverride ?? currentTime;
|
|
|
|
|
|
|
|
|
|
useEffect(() => {
|
|
|
|
|
if (displayedRecordTime) {
|
|
|
|
|
const newSrc = `${apiHost}api/${event.camera}/recordings/${displayedRecordTime}/snapshot.jpg?height=500`;
|
|
|
|
|
setSrc(newSrc);
|
|
|
|
|
}
|
|
|
|
|
setImgLoaded(false);
|
|
|
|
|
setHasError(false);
|
|
|
|
|
|
|
|
|
|
// we know that these deps are correct
|
|
|
|
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
|
|
|
}, [displayedRecordTime]);
|
|
|
|
|
|
2025-11-12 02:00:54 +03:00
|
|
|
const onUploadFrameToPlus = useCallback(() => {
|
|
|
|
|
return axios.post(`/${event.camera}/plus/${currentTime}`);
|
|
|
|
|
}, [event.camera, currentTime]);
|
|
|
|
|
|
2025-10-18 21:19:21 +03:00
|
|
|
if (!config) {
|
2024-09-04 16:46:49 +03:00
|
|
|
return <ActivityIndicator />;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (
|
2025-11-01 17:19:30 +03:00
|
|
|
<div
|
|
|
|
|
className={cn(
|
|
|
|
|
isDesktop
|
2025-11-06 19:22:52 +03:00
|
|
|
? "flex size-full justify-evenly gap-4 overflow-hidden"
|
2025-11-08 15:44:30 +03:00
|
|
|
: "flex flex-col gap-2",
|
|
|
|
|
!isDesktop && cameraAspect === "tall" && "size-full",
|
2025-11-01 17:19:30 +03:00
|
|
|
className,
|
2024-09-12 17:46:29 +03:00
|
|
|
)}
|
2025-11-01 17:19:30 +03:00
|
|
|
>
|
|
|
|
|
<span tabIndex={0} className="sr-only" />
|
2024-09-04 16:46:49 +03:00
|
|
|
|
2024-10-01 16:01:45 +03:00
|
|
|
<div
|
|
|
|
|
className={cn(
|
2025-11-02 16:48:43 +03:00
|
|
|
"flex items-center justify-center",
|
2025-11-01 17:19:30 +03:00
|
|
|
isDesktop && "overflow-hidden",
|
|
|
|
|
cameraAspect === "tall" ? "max-h-[50dvh] lg:max-h-[70dvh]" : "w-full",
|
2025-11-02 16:48:43 +03:00
|
|
|
cameraAspect === "tall" && isMobileOnly && "w-full",
|
2025-11-01 17:19:30 +03:00
|
|
|
cameraAspect !== "tall" && isDesktop && "flex-[3]",
|
2024-10-01 16:01:45 +03:00
|
|
|
)}
|
2025-11-01 17:19:30 +03:00
|
|
|
style={{ aspectRatio: aspectRatio }}
|
|
|
|
|
ref={containerRef}
|
2024-10-01 16:01:45 +03:00
|
|
|
>
|
2024-09-12 22:39:35 +03:00
|
|
|
<div
|
|
|
|
|
className={cn(
|
2025-11-01 17:19:30 +03:00
|
|
|
"relative",
|
|
|
|
|
cameraAspect === "tall" ? "h-full" : "w-full",
|
2024-09-12 22:39:35 +03:00
|
|
|
)}
|
|
|
|
|
>
|
2025-11-02 16:48:43 +03:00
|
|
|
{displaySource == "video" && (
|
2025-11-22 00:40:58 +03:00
|
|
|
<>
|
|
|
|
|
<HlsVideoPlayer
|
|
|
|
|
videoRef={videoRef}
|
|
|
|
|
containerRef={containerRef}
|
|
|
|
|
visible={true}
|
|
|
|
|
currentSource={videoSource}
|
|
|
|
|
hotKeys={false}
|
|
|
|
|
supportsFullscreen={false}
|
|
|
|
|
fullscreen={false}
|
|
|
|
|
frigateControls={true}
|
|
|
|
|
onTimeUpdate={handleTimeUpdate}
|
|
|
|
|
onSeekToTime={handleSeekToTime}
|
|
|
|
|
onUploadFrame={onUploadFrameToPlus}
|
|
|
|
|
onPlaying={() => setIsVideoLoading(false)}
|
|
|
|
|
isDetailMode={true}
|
|
|
|
|
camera={event.camera}
|
|
|
|
|
currentTimeOverride={currentTime}
|
|
|
|
|
/>
|
|
|
|
|
{isVideoLoading && (
|
|
|
|
|
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
|
|
|
|
|
)}
|
|
|
|
|
</>
|
2025-11-02 16:48:43 +03:00
|
|
|
)}
|
|
|
|
|
{displaySource == "image" && (
|
|
|
|
|
<>
|
|
|
|
|
<ImageLoadingIndicator
|
|
|
|
|
className="absolute inset-0"
|
|
|
|
|
imgLoaded={imgLoaded}
|
|
|
|
|
/>
|
|
|
|
|
{hasError && (
|
|
|
|
|
<div className="relative aspect-video">
|
|
|
|
|
<div className="flex flex-col items-center justify-center p-20 text-center">
|
|
|
|
|
<LuFolderX className="size-16" />
|
|
|
|
|
{t("objectLifecycle.noImageFound")}
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
|
|
|
|
<div
|
|
|
|
|
className={cn("relative", imgLoaded ? "visible" : "invisible")}
|
|
|
|
|
>
|
|
|
|
|
<div className="absolute z-50 size-full">
|
|
|
|
|
<ObjectTrackOverlay
|
|
|
|
|
key={`overlay-${displayedRecordTime}`}
|
|
|
|
|
camera={event.camera}
|
|
|
|
|
showBoundingBoxes={true}
|
|
|
|
|
currentTime={displayedRecordTime}
|
|
|
|
|
videoWidth={imgRef?.current?.naturalWidth ?? 0}
|
|
|
|
|
videoHeight={imgRef?.current?.naturalHeight ?? 0}
|
|
|
|
|
className="absolute inset-0 z-10"
|
|
|
|
|
onSeekToTime={handleSeekToTime}
|
|
|
|
|
/>
|
|
|
|
|
</div>
|
|
|
|
|
<img
|
|
|
|
|
key={event.id}
|
|
|
|
|
ref={imgRef}
|
|
|
|
|
className={cn(
|
|
|
|
|
"max-h-[50dvh] max-w-full select-none rounded-lg object-contain",
|
|
|
|
|
)}
|
|
|
|
|
loading={isSafari ? "eager" : "lazy"}
|
|
|
|
|
style={
|
|
|
|
|
isIOS
|
|
|
|
|
? {
|
|
|
|
|
WebkitUserSelect: "none",
|
|
|
|
|
WebkitTouchCallout: "none",
|
|
|
|
|
}
|
|
|
|
|
: undefined
|
|
|
|
|
}
|
|
|
|
|
draggable={false}
|
|
|
|
|
src={src}
|
|
|
|
|
onLoad={() => setImgLoaded(true)}
|
|
|
|
|
onError={() => setHasError(true)}
|
|
|
|
|
/>
|
|
|
|
|
</div>
|
|
|
|
|
</>
|
|
|
|
|
)}
|
2024-09-04 16:46:49 +03:00
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
|
2025-11-06 19:22:52 +03:00
|
|
|
<div
|
|
|
|
|
className={cn(
|
2025-12-02 17:21:15 +03:00
|
|
|
isDesktop && "justify-between overflow-hidden lg:basis-2/5",
|
2025-11-06 19:22:52 +03:00
|
|
|
)}
|
|
|
|
|
>
|
|
|
|
|
{isDesktop && tabs && (
|
2025-11-08 15:44:30 +03:00
|
|
|
<div className="mb-2 flex items-center justify-between">
|
2025-11-06 19:22:52 +03:00
|
|
|
<div className="flex-1">{tabs}</div>
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
2025-10-18 21:19:21 +03:00
|
|
|
<div
|
|
|
|
|
className={cn(
|
2025-11-01 17:19:30 +03:00
|
|
|
isDesktop && "scrollbar-container h-full overflow-y-auto",
|
2025-10-18 21:19:21 +03:00
|
|
|
)}
|
|
|
|
|
>
|
2025-11-01 17:19:30 +03:00
|
|
|
{config?.cameras[event.camera]?.onvif.autotracking
|
|
|
|
|
.enabled_in_config && (
|
2025-11-07 16:53:27 +03:00
|
|
|
<div className="mb-2 ml-3 text-sm text-danger">
|
2025-11-01 17:19:30 +03:00
|
|
|
{t("trackingDetails.autoTrackingTips")}
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
2025-10-18 21:19:21 +03:00
|
|
|
|
2025-11-01 17:19:30 +03:00
|
|
|
<div className="mt-4">
|
|
|
|
|
<div
|
2025-11-06 19:22:52 +03:00
|
|
|
className={cn("rounded-md bg-background_alt px-0 py-3 md:px-2")}
|
2025-11-01 17:19:30 +03:00
|
|
|
>
|
|
|
|
|
<div className="flex w-full items-center justify-between">
|
2025-10-24 20:08:59 +03:00
|
|
|
<div
|
2025-11-01 17:19:30 +03:00
|
|
|
className="flex items-center gap-2 font-medium"
|
|
|
|
|
onClick={(e) => {
|
|
|
|
|
e.stopPropagation();
|
|
|
|
|
// event.start_time is detect time, convert to record
|
|
|
|
|
handleSeekToTime(
|
|
|
|
|
(event.start_time ?? 0) + annotationOffset / 1000,
|
2025-10-24 20:08:59 +03:00
|
|
|
);
|
2025-11-01 17:19:30 +03:00
|
|
|
}}
|
|
|
|
|
role="button"
|
|
|
|
|
>
|
|
|
|
|
<div
|
|
|
|
|
className={cn(
|
|
|
|
|
"relative ml-2 rounded-full bg-muted-foreground p-2",
|
|
|
|
|
)}
|
|
|
|
|
>
|
|
|
|
|
{getIconForLabel(
|
|
|
|
|
event.sub_label ? event.label + "-verified" : event.label,
|
|
|
|
|
"size-4 text-white",
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
|
|
|
|
<div className="flex items-center gap-2">
|
|
|
|
|
<span className="capitalize">{label}</span>
|
2025-11-19 01:33:42 +03:00
|
|
|
<div className="md:text-md flex items-center text-xs text-secondary-foreground">
|
|
|
|
|
{formattedStart ?? ""}
|
|
|
|
|
{event.end_time != null ? (
|
|
|
|
|
<> - {formattedEnd}</>
|
|
|
|
|
) : (
|
|
|
|
|
<div className="inline-block">
|
|
|
|
|
<ActivityIndicator className="ml-3 size-4" />
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
2025-11-01 17:19:30 +03:00
|
|
|
{event.data?.recognized_license_plate && (
|
|
|
|
|
<>
|
|
|
|
|
<span className="text-secondary-foreground">·</span>
|
|
|
|
|
<div className="text-sm text-secondary-foreground">
|
|
|
|
|
<Link
|
|
|
|
|
to={`/explore?recognized_license_plate=${event.data.recognized_license_plate}`}
|
|
|
|
|
className="text-sm"
|
|
|
|
|
>
|
|
|
|
|
{event.data.recognized_license_plate}
|
|
|
|
|
</Link>
|
|
|
|
|
</div>
|
|
|
|
|
</>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
2025-10-24 20:08:59 +03:00
|
|
|
</div>
|
2025-10-18 21:19:21 +03:00
|
|
|
</div>
|
2025-11-01 17:19:30 +03:00
|
|
|
|
|
|
|
|
<div className="mt-2">
|
|
|
|
|
{!eventSequence ? (
|
|
|
|
|
<ActivityIndicator className="size-2" size={2} />
|
|
|
|
|
) : eventSequence.length === 0 ? (
|
|
|
|
|
<div className="py-2 text-muted-foreground">
|
|
|
|
|
{t("detail.noObjectDetailData", { ns: "views/events" })}
|
|
|
|
|
</div>
|
|
|
|
|
) : (
|
2025-11-17 17:12:05 +03:00
|
|
|
<div
|
|
|
|
|
className="-pb-2 relative mx-0"
|
|
|
|
|
ref={timelineContainerRef}
|
|
|
|
|
>
|
|
|
|
|
<div
|
|
|
|
|
className="absolute -top-2 left-6 z-0 w-0.5 -translate-x-1/2 bg-secondary-foreground"
|
|
|
|
|
style={{ bottom: lineBottomOffsetPx }}
|
|
|
|
|
/>
|
2025-11-01 17:19:30 +03:00
|
|
|
{isWithinEventRange && (
|
|
|
|
|
<div
|
2025-11-17 17:12:05 +03:00
|
|
|
className="absolute left-6 z-[5] w-0.5 -translate-x-1/2 bg-selected transition-all duration-300"
|
|
|
|
|
style={{
|
|
|
|
|
top: `${lineTopOffsetPx}px`,
|
|
|
|
|
height: `${blueLineHeightPx}px`,
|
|
|
|
|
}}
|
2025-11-01 17:19:30 +03:00
|
|
|
/>
|
|
|
|
|
)}
|
|
|
|
|
<div className="space-y-2">
|
|
|
|
|
{eventSequence.map((item, idx) => {
|
|
|
|
|
return (
|
2025-11-17 17:12:05 +03:00
|
|
|
<div
|
2025-11-01 17:19:30 +03:00
|
|
|
key={`${item.timestamp}-${item.source_id ?? ""}-${idx}`}
|
2025-11-17 17:12:05 +03:00
|
|
|
ref={(el) => {
|
|
|
|
|
rowRefs.current[idx] = el;
|
|
|
|
|
}}
|
|
|
|
|
>
|
|
|
|
|
<LifecycleIconRow
|
|
|
|
|
item={item}
|
2025-11-24 16:34:56 +03:00
|
|
|
event={event}
|
2025-11-17 17:12:05 +03:00
|
|
|
onClick={() => handleLifecycleClick(item)}
|
|
|
|
|
setSelectedZone={setSelectedZone}
|
|
|
|
|
getZoneColor={getZoneColor}
|
|
|
|
|
effectiveTime={effectiveTime}
|
|
|
|
|
isTimelineActive={isWithinEventRange}
|
|
|
|
|
/>
|
|
|
|
|
</div>
|
2025-11-01 17:19:30 +03:00
|
|
|
);
|
|
|
|
|
})}
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
2025-10-18 21:19:21 +03:00
|
|
|
</div>
|
|
|
|
|
</div>
|
2024-09-04 16:46:49 +03:00
|
|
|
</div>
|
2024-09-12 17:46:29 +03:00
|
|
|
</div>
|
2024-09-04 16:46:49 +03:00
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2025-10-24 20:08:59 +03:00
|
|
|
type LifecycleIconRowProps = {
|
2025-10-26 21:12:20 +03:00
|
|
|
item: TrackingDetailsSequence;
|
2025-11-24 16:34:56 +03:00
|
|
|
event: Event;
|
2025-10-24 20:08:59 +03:00
|
|
|
onClick: () => void;
|
|
|
|
|
setSelectedZone: (z: string) => void;
|
|
|
|
|
getZoneColor: (zoneName: string) => number[] | undefined;
|
2025-11-01 17:19:30 +03:00
|
|
|
effectiveTime?: number;
|
|
|
|
|
isTimelineActive?: boolean;
|
2025-10-24 20:08:59 +03:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
function LifecycleIconRow({
|
|
|
|
|
item,
|
2025-11-24 16:34:56 +03:00
|
|
|
event,
|
2025-10-24 20:08:59 +03:00
|
|
|
onClick,
|
|
|
|
|
setSelectedZone,
|
|
|
|
|
getZoneColor,
|
2025-11-01 17:19:30 +03:00
|
|
|
effectiveTime,
|
|
|
|
|
isTimelineActive,
|
2025-10-24 20:08:59 +03:00
|
|
|
}: LifecycleIconRowProps) {
|
2025-10-26 21:12:20 +03:00
|
|
|
const { t } = useTranslation(["views/explore", "components/player"]);
|
|
|
|
|
const { data: config } = useSWR<FrigateConfig>("config");
|
|
|
|
|
const [isOpen, setIsOpen] = useState(false);
|
|
|
|
|
const navigate = useNavigate();
|
2025-11-29 16:30:04 +03:00
|
|
|
const isAdmin = useIsAdmin();
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-11-24 16:34:56 +03:00
|
|
|
const aspectRatio = useMemo(() => {
|
|
|
|
|
if (!config) {
|
|
|
|
|
return 16 / 9;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (
|
|
|
|
|
config.cameras[event.camera].detect.width /
|
|
|
|
|
config.cameras[event.camera].detect.height
|
|
|
|
|
);
|
|
|
|
|
}, [config, event]);
|
|
|
|
|
|
|
|
|
|
const isActive = useMemo(
|
|
|
|
|
() => Math.abs((effectiveTime ?? 0) - (item.timestamp ?? 0)) <= 0.5,
|
|
|
|
|
[effectiveTime, item.timestamp],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const formattedEventTimestamp = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
config
|
|
|
|
|
? formatUnixTimestampToDateTime(item.timestamp ?? 0, {
|
|
|
|
|
timezone: config.ui.timezone,
|
|
|
|
|
date_format:
|
|
|
|
|
config.ui.time_format == "24hour"
|
|
|
|
|
? t("time.formattedTimestampHourMinuteSecond.24hour", {
|
|
|
|
|
ns: "common",
|
|
|
|
|
})
|
|
|
|
|
: t("time.formattedTimestampHourMinuteSecond.12hour", {
|
|
|
|
|
ns: "common",
|
|
|
|
|
}),
|
|
|
|
|
time_style: "medium",
|
|
|
|
|
date_style: "medium",
|
|
|
|
|
})
|
|
|
|
|
: "",
|
|
|
|
|
[config, item.timestamp, t],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const ratio = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
Array.isArray(item.data.box) && item.data.box.length >= 4
|
|
|
|
|
? (aspectRatio * (item.data.box[2] / item.data.box[3])).toFixed(2)
|
|
|
|
|
: "N/A",
|
|
|
|
|
[aspectRatio, item.data.box],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const areaPx = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
Array.isArray(item.data.box) && item.data.box.length >= 4
|
|
|
|
|
? Math.round(
|
|
|
|
|
(config?.cameras[event.camera]?.detect?.width ?? 0) *
|
|
|
|
|
(config?.cameras[event.camera]?.detect?.height ?? 0) *
|
|
|
|
|
(item.data.box[2] * item.data.box[3]),
|
|
|
|
|
)
|
|
|
|
|
: undefined,
|
|
|
|
|
[config, event.camera, item.data.box],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const attributeAreaPx = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
Array.isArray(item.data.attribute_box) &&
|
|
|
|
|
item.data.attribute_box.length >= 4
|
|
|
|
|
? Math.round(
|
|
|
|
|
(config?.cameras[event.camera]?.detect?.width ?? 0) *
|
|
|
|
|
(config?.cameras[event.camera]?.detect?.height ?? 0) *
|
|
|
|
|
(item.data.attribute_box[2] * item.data.attribute_box[3]),
|
|
|
|
|
)
|
|
|
|
|
: undefined,
|
|
|
|
|
[config, event.camera, item.data.attribute_box],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const attributeAreaPct = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
Array.isArray(item.data.attribute_box) &&
|
|
|
|
|
item.data.attribute_box.length >= 4
|
|
|
|
|
? (item.data.attribute_box[2] * item.data.attribute_box[3]).toFixed(4)
|
|
|
|
|
: undefined,
|
|
|
|
|
[item.data.attribute_box],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const areaPct = useMemo(
|
|
|
|
|
() =>
|
|
|
|
|
Array.isArray(item.data.box) && item.data.box.length >= 4
|
|
|
|
|
? (item.data.box[2] * item.data.box[3]).toFixed(4)
|
|
|
|
|
: undefined,
|
|
|
|
|
[item.data.box],
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
const score = useMemo(() => {
|
|
|
|
|
if (item.data.score !== undefined) {
|
|
|
|
|
return (item.data.score * 100).toFixed(0) + "%";
|
|
|
|
|
}
|
|
|
|
|
return "N/A";
|
|
|
|
|
}, [item.data.score]);
|
|
|
|
|
|
2025-10-24 20:08:59 +03:00
|
|
|
return (
|
|
|
|
|
<div
|
|
|
|
|
role="button"
|
|
|
|
|
onClick={onClick}
|
|
|
|
|
className={cn(
|
2025-11-01 17:19:30 +03:00
|
|
|
"rounded-md p-2 pr-0 text-sm text-primary-variant",
|
2025-10-24 20:08:59 +03:00
|
|
|
isActive && "bg-secondary-highlight font-semibold text-primary",
|
|
|
|
|
!isActive && "duration-500",
|
|
|
|
|
)}
|
|
|
|
|
>
|
|
|
|
|
<div className="flex items-center gap-2">
|
2025-11-01 17:19:30 +03:00
|
|
|
<div className="relative ml-2 flex size-4 items-center justify-center">
|
2025-10-24 20:08:59 +03:00
|
|
|
<LuCircle
|
|
|
|
|
className={cn(
|
2025-11-01 17:19:30 +03:00
|
|
|
"relative z-10 size-2.5 fill-secondary-foreground stroke-none",
|
|
|
|
|
(isActive || (effectiveTime ?? 0) >= (item?.timestamp ?? 0)) &&
|
|
|
|
|
isTimelineActive &&
|
|
|
|
|
"fill-selected duration-300",
|
2025-10-24 20:08:59 +03:00
|
|
|
)}
|
|
|
|
|
/>
|
|
|
|
|
</div>
|
|
|
|
|
|
2025-10-26 15:27:07 +03:00
|
|
|
<div className="ml-2 flex w-full min-w-0 flex-1">
|
2025-10-24 20:08:59 +03:00
|
|
|
<div className="flex flex-col">
|
2025-10-26 15:27:07 +03:00
|
|
|
<div className="text-md flex items-start break-words text-left">
|
|
|
|
|
{getLifecycleItemDescription(item)}
|
|
|
|
|
</div>
|
2025-12-02 17:21:15 +03:00
|
|
|
{/* Only show Score/Ratio/Area for object events, not for audio (heard) or manual API (external) events */}
|
|
|
|
|
{item.class_type !== "heard" && item.class_type !== "external" && (
|
|
|
|
|
<div className="my-2 ml-2 flex flex-col flex-wrap items-start gap-1.5 text-xs text-secondary-foreground">
|
|
|
|
|
<div className="flex items-center gap-1.5">
|
|
|
|
|
<span className="text-primary-variant">
|
|
|
|
|
{t("trackingDetails.lifecycleItemDesc.header.score")}
|
2025-10-24 20:08:59 +03:00
|
|
|
</span>
|
2025-12-02 17:21:15 +03:00
|
|
|
<span className="font-medium text-primary">{score}</span>
|
|
|
|
|
</div>
|
|
|
|
|
<div className="flex items-center gap-1.5">
|
|
|
|
|
<span className="text-primary-variant">
|
|
|
|
|
{t("trackingDetails.lifecycleItemDesc.header.ratio")}
|
|
|
|
|
</span>
|
|
|
|
|
<span className="font-medium text-primary">{ratio}</span>
|
|
|
|
|
</div>
|
|
|
|
|
<div className="flex items-center gap-1.5">
|
|
|
|
|
<span className="text-primary-variant">
|
|
|
|
|
{t("trackingDetails.lifecycleItemDesc.header.area")}{" "}
|
|
|
|
|
{attributeAreaPx !== undefined &&
|
|
|
|
|
attributeAreaPct !== undefined && (
|
|
|
|
|
<span className="text-primary-variant">
|
|
|
|
|
({getTranslatedLabel(item.data.label)})
|
|
|
|
|
</span>
|
|
|
|
|
)}
|
|
|
|
|
</span>
|
|
|
|
|
{areaPx !== undefined && areaPct !== undefined ? (
|
2025-11-24 16:34:56 +03:00
|
|
|
<span className="font-medium text-primary">
|
2025-12-02 17:21:15 +03:00
|
|
|
{t("information.pixels", { ns: "common", area: areaPx })}{" "}
|
|
|
|
|
· {areaPct}%
|
2025-11-24 16:34:56 +03:00
|
|
|
</span>
|
2025-12-02 17:21:15 +03:00
|
|
|
) : (
|
|
|
|
|
<span>N/A</span>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
|
|
|
|
{attributeAreaPx !== undefined &&
|
|
|
|
|
attributeAreaPct !== undefined && (
|
|
|
|
|
<div className="flex items-center gap-1.5">
|
|
|
|
|
<span className="text-primary-variant">
|
|
|
|
|
{t("trackingDetails.lifecycleItemDesc.header.area")} (
|
|
|
|
|
{getTranslatedLabel(item.data.attribute)})
|
|
|
|
|
</span>
|
|
|
|
|
<span className="font-medium text-primary">
|
|
|
|
|
{t("information.pixels", {
|
|
|
|
|
ns: "common",
|
|
|
|
|
area: attributeAreaPx,
|
|
|
|
|
})}{" "}
|
|
|
|
|
· {attributeAreaPct}%
|
|
|
|
|
</span>
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
2025-10-24 20:08:59 +03:00
|
|
|
|
2025-12-02 17:21:15 +03:00
|
|
|
{item.data?.zones && item.data.zones.length > 0 && (
|
|
|
|
|
<div className="mt-1 flex flex-wrap items-center gap-2">
|
|
|
|
|
{item.data.zones.map((zone, zidx) => {
|
|
|
|
|
const color = getZoneColor(zone)?.join(",") ?? "0,0,0";
|
|
|
|
|
return (
|
|
|
|
|
<Badge
|
|
|
|
|
key={`${zone}-${zidx}`}
|
|
|
|
|
variant="outline"
|
|
|
|
|
className="inline-flex cursor-pointer items-center gap-2"
|
|
|
|
|
onClick={(e: React.MouseEvent) => {
|
|
|
|
|
e.stopPropagation();
|
|
|
|
|
setSelectedZone(zone);
|
|
|
|
|
}}
|
|
|
|
|
style={{
|
|
|
|
|
borderColor: `rgba(${color}, 0.6)`,
|
|
|
|
|
background: `rgba(${color}, 0.08)`,
|
|
|
|
|
}}
|
|
|
|
|
>
|
|
|
|
|
<span
|
|
|
|
|
className="size-1 rounded-full"
|
2025-10-24 20:08:59 +03:00
|
|
|
style={{
|
2025-12-02 17:21:15 +03:00
|
|
|
display: "inline-block",
|
|
|
|
|
width: 10,
|
|
|
|
|
height: 10,
|
|
|
|
|
backgroundColor: `rgb(${color})`,
|
2025-10-24 20:08:59 +03:00
|
|
|
}}
|
2025-12-02 17:21:15 +03:00
|
|
|
/>
|
|
|
|
|
<span
|
|
|
|
|
className={cn(
|
|
|
|
|
item.data?.zones_friendly_names?.[zidx] === zone &&
|
|
|
|
|
"smart-capitalize",
|
|
|
|
|
)}
|
2025-10-24 20:08:59 +03:00
|
|
|
>
|
2025-12-02 17:21:15 +03:00
|
|
|
{item.data?.zones_friendly_names?.[zidx]}
|
|
|
|
|
</span>
|
|
|
|
|
</Badge>
|
|
|
|
|
);
|
|
|
|
|
})}
|
|
|
|
|
</div>
|
|
|
|
|
)}
|
2025-10-24 20:08:59 +03:00
|
|
|
</div>
|
2025-10-26 15:27:07 +03:00
|
|
|
</div>
|
|
|
|
|
<div className="ml-3 flex-shrink-0 px-1 text-right text-xs text-primary-variant">
|
2025-10-26 21:12:20 +03:00
|
|
|
<div className="flex flex-row items-center gap-3">
|
|
|
|
|
<div className="whitespace-nowrap">{formattedEventTimestamp}</div>
|
2025-11-29 16:30:04 +03:00
|
|
|
{((isAdmin && config?.plus?.enabled) || item.data.box) && (
|
2025-10-26 21:12:20 +03:00
|
|
|
<DropdownMenu open={isOpen} onOpenChange={setIsOpen}>
|
|
|
|
|
<DropdownMenuTrigger>
|
|
|
|
|
<div className="rounded p-1 pr-2" role="button">
|
|
|
|
|
<HiDotsHorizontal className="size-4 text-muted-foreground" />
|
|
|
|
|
</div>
|
|
|
|
|
</DropdownMenuTrigger>
|
|
|
|
|
<DropdownMenuPortal>
|
|
|
|
|
<DropdownMenuContent>
|
2025-11-29 16:30:04 +03:00
|
|
|
{isAdmin && config?.plus?.enabled && (
|
2025-10-26 21:12:20 +03:00
|
|
|
<DropdownMenuItem
|
|
|
|
|
className="cursor-pointer"
|
|
|
|
|
onSelect={async () => {
|
|
|
|
|
const resp = await axios.post(
|
|
|
|
|
`/${item.camera}/plus/${item.timestamp}`,
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
if (resp && resp.status == 200) {
|
|
|
|
|
toast.success(
|
|
|
|
|
t("toast.success.submittedFrigatePlus", {
|
|
|
|
|
ns: "components/player",
|
|
|
|
|
}),
|
|
|
|
|
{
|
|
|
|
|
position: "top-center",
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
toast.success(
|
|
|
|
|
t("toast.error.submitFrigatePlusFailed", {
|
|
|
|
|
ns: "components/player",
|
|
|
|
|
}),
|
|
|
|
|
{
|
|
|
|
|
position: "top-center",
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}}
|
|
|
|
|
>
|
|
|
|
|
{t("itemMenu.submitToPlus.label")}
|
|
|
|
|
</DropdownMenuItem>
|
|
|
|
|
)}
|
|
|
|
|
{item.data.box && (
|
|
|
|
|
<DropdownMenuItem
|
|
|
|
|
className="cursor-pointer"
|
|
|
|
|
onSelect={() => {
|
|
|
|
|
setIsOpen(false);
|
|
|
|
|
setTimeout(() => {
|
|
|
|
|
navigate(
|
|
|
|
|
`/settings?page=masksAndZones&camera=${item.camera}&object_mask=${item.data.box}`,
|
|
|
|
|
);
|
|
|
|
|
}, 0);
|
|
|
|
|
}}
|
|
|
|
|
>
|
|
|
|
|
{t("trackingDetails.createObjectMask")}
|
|
|
|
|
</DropdownMenuItem>
|
|
|
|
|
)}
|
|
|
|
|
</DropdownMenuContent>
|
|
|
|
|
</DropdownMenuPortal>
|
|
|
|
|
</DropdownMenu>
|
|
|
|
|
)}
|
|
|
|
|
</div>
|
2025-10-24 20:08:59 +03:00
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
);
|
|
|
|
|
}
|