Indicate in review timeline when motion was considered calibration.

This commit is contained in:
p-boon 2025-03-05 12:53:02 +01:00
parent d4946f19f5
commit 31dfd97493
17 changed files with 79 additions and 15 deletions

View File

@ -40,4 +40,5 @@ class ReviewSummaryResponse(BaseModel):
class ReviewActivityMotionResponse(BaseModel): class ReviewActivityMotionResponse(BaseModel):
start_time: int start_time: int
motion: float motion: float
is_calibrating: bool
camera: str camera: str

View File

@ -416,6 +416,7 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
Recordings.camera, Recordings.camera,
Recordings.start_time, Recordings.start_time,
Recordings.motion, Recordings.motion,
Recordings.is_calibrating,
) )
.where(reduce(operator.and_, clauses)) .where(reduce(operator.and_, clauses))
.order_by(Recordings.start_time.asc()) .order_by(Recordings.start_time.asc())
@ -424,7 +425,9 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
) )
# resample data using pandas to get activity on scaled basis # resample data using pandas to get activity on scaled basis
df = pd.DataFrame(data, columns=["start_time", "motion", "camera"]) df = pd.DataFrame(
data, columns=["start_time", "motion", "is_calibrating", "camera"]
)
if df.empty: if df.empty:
logger.warning("No motion data found for the requested time range") logger.warning("No motion data found for the requested time range")
@ -445,7 +448,8 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
.to_frame() .to_frame()
) )
cameras = df["camera"].resample(f"{scale}s").agg(lambda x: ",".join(set(x))) cameras = df["camera"].resample(f"{scale}s").agg(lambda x: ",".join(set(x)))
df = motion.join(cameras) calibrations = df["is_calibrating"].resample(f"{scale}s").apply(lambda x: all(x))
df = motion.join(cameras).join(calibrations)
length = df.shape[0] length = df.shape[0]
chunk = int(60 * (60 / scale)) chunk = int(60 * (60 / scale))

View File

@ -71,6 +71,7 @@ class Recordings(Model): # type: ignore[misc]
end_time = DateTimeField() end_time = DateTimeField()
duration = FloatField() duration = FloatField()
motion = IntegerField(null=True) motion = IntegerField(null=True)
is_calibrating = BooleanField(default=False)
objects = IntegerField(null=True) objects = IntegerField(null=True)
dBFS = IntegerField(null=True) dBFS = IntegerField(null=True)
segment_size = FloatField(default=0) # this should be stored as MB segment_size = FloatField(default=0) # this should be stored as MB

View File

@ -689,6 +689,7 @@ class TrackedObjectProcessor(threading.Thread):
current_tracked_objects, current_tracked_objects,
motion_boxes, motion_boxes,
regions, regions,
is_calibrating,
) = self.tracked_objects_queue.get(True, 1) ) = self.tracked_objects_queue.get(True, 1)
except queue.Empty: except queue.Empty:
continue continue
@ -714,6 +715,7 @@ class TrackedObjectProcessor(threading.Thread):
tracked_objects, tracked_objects,
motion_boxes, motion_boxes,
regions, regions,
is_calibrating,
) )
) )

View File

@ -93,6 +93,7 @@ def output_frames(
current_tracked_objects, current_tracked_objects,
motion_boxes, motion_boxes,
_, _,
_,
) = data ) = data
frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv) frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv)
@ -168,6 +169,7 @@ def output_frames(
current_tracked_objects, current_tracked_objects,
motion_boxes, motion_boxes,
regions, regions,
_,
) = data ) = data
frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv) frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv)

View File

@ -45,11 +45,13 @@ class SegmentInfo:
def __init__( def __init__(
self, self,
motion_count: int, motion_count: int,
is_calibrating: bool,
active_object_count: int, active_object_count: int,
region_count: int, region_count: int,
average_dBFS: int, average_dBFS: int,
) -> None: ) -> None:
self.motion_count = motion_count self.motion_count = motion_count
self.is_calibrating = is_calibrating
self.active_object_count = active_object_count self.active_object_count = active_object_count
self.region_count = region_count self.region_count = region_count
self.average_dBFS = average_dBFS self.average_dBFS = average_dBFS
@ -368,6 +370,7 @@ class RecordingMaintainer(threading.Thread):
active_count = 0 active_count = 0
region_count = 0 region_count = 0
motion_count = 0 motion_count = 0
is_calibrating = False
for frame in self.object_recordings_info[camera]: for frame in self.object_recordings_info[camera]:
# frame is after end time of segment # frame is after end time of segment
if frame[0] > end_time.timestamp(): if frame[0] > end_time.timestamp():
@ -385,7 +388,8 @@ class RecordingMaintainer(threading.Thread):
] ]
) )
motion_count += len(frame[2]) motion_count += len(frame[2])
region_count += len(frame[3]) is_calibrating += frame[3]
region_count += len(frame[4])
audio_values = [] audio_values = []
for frame in self.audio_recordings_info[camera]: for frame in self.audio_recordings_info[camera]:
@ -406,7 +410,11 @@ class RecordingMaintainer(threading.Thread):
average_dBFS = 0 if not audio_values else np.average(audio_values) average_dBFS = 0 if not audio_values else np.average(audio_values)
return SegmentInfo( return SegmentInfo(
motion_count, active_count, region_count, round(average_dBFS) motion_count,
is_calibrating,
active_count,
region_count,
round(average_dBFS),
) )
async def move_segment( async def move_segment(
@ -492,6 +500,7 @@ class RecordingMaintainer(threading.Thread):
Recordings.end_time.name: end_time.timestamp(), Recordings.end_time.name: end_time.timestamp(),
Recordings.duration.name: duration, Recordings.duration.name: duration,
Recordings.motion.name: segment_info.motion_count, Recordings.motion.name: segment_info.motion_count,
Recordings.is_calibrating.name: segment_info.is_calibrating,
# TODO: update this to store list of active objects at some point # TODO: update this to store list of active objects at some point
Recordings.objects.name: segment_info.active_object_count, Recordings.objects.name: segment_info.active_object_count,
Recordings.regions.name: segment_info.region_count, Recordings.regions.name: segment_info.region_count,
@ -550,6 +559,7 @@ class RecordingMaintainer(threading.Thread):
current_tracked_objects, current_tracked_objects,
motion_boxes, motion_boxes,
regions, regions,
is_calibrating,
) = data ) = data
if self.config.cameras[camera].record.enabled: if self.config.cameras[camera].record.enabled:
@ -558,6 +568,7 @@ class RecordingMaintainer(threading.Thread):
frame_time, frame_time,
current_tracked_objects, current_tracked_objects,
motion_boxes, motion_boxes,
is_calibrating,
regions, regions,
) )
) )

View File

@ -474,6 +474,7 @@ class ReviewSegmentMaintainer(threading.Thread):
current_tracked_objects, current_tracked_objects,
_, _,
_, _,
_,
) = data ) = data
elif topic == DetectionTypeEnum.audio: elif topic == DetectionTypeEnum.audio:
( (

View File

@ -838,6 +838,7 @@ def process_frames(
detections, detections,
motion_boxes, motion_boxes,
regions, regions,
motion_detector.is_calibrating(),
) )
) )
camera_metrics.detection_fps.value = object_detector.fps.eps() camera_metrics.detection_fps.value = object_detector.fps.eps()

View File

@ -121,7 +121,7 @@ export function MotionReviewTimeline({
); );
const segmentMotion = const segmentMotion =
firstHalfMotionValue > 0 || secondHalfMotionValue > 0; firstHalfMotionValue.totalMotion > 0 || secondHalfMotionValue.totalMotion > 0;
if (segmentMotion && !overlappingReviewItems) { if (segmentMotion && !overlappingReviewItems) {
segments.push(segmentTime); segments.push(segmentTime);
} }

View File

@ -4,6 +4,7 @@ import { ReviewSegment } from "@/types/review";
import React, { useCallback, useEffect, useMemo, useRef } from "react"; import React, { useCallback, useEffect, useMemo, useRef } from "react";
import { MinimapBounds, Tick, Timestamp } from "./segment-metadata"; import { MinimapBounds, Tick, Timestamp } from "./segment-metadata";
import { useMotionSegmentUtils } from "@/hooks/use-motion-segment-utils"; import { useMotionSegmentUtils } from "@/hooks/use-motion-segment-utils";
import { MotionSegmentValue } from "@/hooks/use-motion-segment-utils";
import { isMobile } from "react-device-detect"; import { isMobile } from "react-device-detect";
import useTapUtils from "@/hooks/use-tap-utils"; import useTapUtils from "@/hooks/use-tap-utils";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
@ -13,8 +14,8 @@ type MotionSegmentProps = {
segmentTime: number; segmentTime: number;
segmentDuration: number; segmentDuration: number;
timestampSpread: number; timestampSpread: number;
firstHalfMotionValue: number; firstHalfMotionValue: MotionSegmentValue;
secondHalfMotionValue: number; secondHalfMotionValue: MotionSegmentValue;
motionOnly: boolean; motionOnly: boolean;
showMinimap: boolean; showMinimap: boolean;
minimapStartTime?: number; minimapStartTime?: number;
@ -77,12 +78,26 @@ export function MotionSegment({
}, []); }, []);
const firstHalfSegmentWidth = useMemo(() => { const firstHalfSegmentWidth = useMemo(() => {
return interpolateMotionAudioData(firstHalfMotionValue, maxSegmentWidth); return interpolateMotionAudioData(
}, [maxSegmentWidth, firstHalfMotionValue, interpolateMotionAudioData]); firstHalfMotionValue.totalMotion,
maxSegmentWidth,
);
}, [
maxSegmentWidth,
firstHalfMotionValue.totalMotion,
interpolateMotionAudioData,
]);
const secondHalfSegmentWidth = useMemo(() => { const secondHalfSegmentWidth = useMemo(() => {
return interpolateMotionAudioData(secondHalfMotionValue, maxSegmentWidth); return interpolateMotionAudioData(
}, [maxSegmentWidth, secondHalfMotionValue, interpolateMotionAudioData]); secondHalfMotionValue.totalMotion,
maxSegmentWidth,
);
}, [
maxSegmentWidth,
secondHalfMotionValue.totalMotion,
interpolateMotionAudioData,
]);
const alignedMinimapStartTime = useMemo( const alignedMinimapStartTime = useMemo(
() => alignStartDateToTimeline(minimapStartTime ?? 0), () => alignStartDateToTimeline(minimapStartTime ?? 0),
@ -222,6 +237,9 @@ export function MotionSegment({
secondHalfSegmentWidth secondHalfSegmentWidth
? "bg-motion_review" ? "bg-motion_review"
: "bg-muted-foreground", : "bg-muted-foreground",
secondHalfSegmentWidth && secondHalfMotionValue.isCalibrating
? "bg-motion_review_is_calibrating"
: "",
)} )}
style={{ style={{
width: secondHalfSegmentWidth || 1, width: secondHalfSegmentWidth || 1,
@ -241,6 +259,9 @@ export function MotionSegment({
firstHalfSegmentWidth firstHalfSegmentWidth
? "bg-motion_review" ? "bg-motion_review"
: "bg-muted-foreground", : "bg-muted-foreground",
firstHalfSegmentWidth && firstHalfMotionValue.isCalibrating
? "bg-motion_review_is_calibrating"
: "",
)} )}
style={{ style={{
width: firstHalfSegmentWidth || 1, width: firstHalfSegmentWidth || 1,

View File

@ -8,6 +8,7 @@ import React, {
} from "react"; } from "react";
import MotionSegment from "./MotionSegment"; import MotionSegment from "./MotionSegment";
import { ReviewSegment, MotionData } from "@/types/review"; import { ReviewSegment, MotionData } from "@/types/review";
import { MotionSegmentValue } from "@/hooks/use-motion-segment-utils";
type VirtualizedMotionSegmentsProps = { type VirtualizedMotionSegmentsProps = {
timelineRef: React.RefObject<HTMLDivElement>; timelineRef: React.RefObject<HTMLDivElement>;
@ -23,7 +24,7 @@ type VirtualizedMotionSegmentsProps = {
setHandlebarTime?: React.Dispatch<React.SetStateAction<number>>; setHandlebarTime?: React.Dispatch<React.SetStateAction<number>>;
dense: boolean; dense: boolean;
motionOnly: boolean; motionOnly: boolean;
getMotionSegmentValue: (timestamp: number) => number; getMotionSegmentValue: (timestamp: number) => MotionSegmentValue;
}; };
export interface VirtualizedMotionSegmentsRef { export interface VirtualizedMotionSegmentsRef {
@ -144,7 +145,8 @@ export const VirtualizedMotionSegments = forwardRef<
); );
const segmentMotion = const segmentMotion =
firstHalfMotionValue > 0 || secondHalfMotionValue > 0; firstHalfMotionValue.totalMotion > 0 ||
secondHalfMotionValue.totalMotion > 0;
const overlappingReviewItems = events.some( const overlappingReviewItems = events.some(
(item) => (item) =>
(item.start_time >= motionStart && item.start_time < motionEnd) || (item.start_time >= motionStart && item.start_time < motionEnd) ||

View File

@ -28,6 +28,8 @@ export const useEventSegmentUtils = (
const mapSeverityToNumber = useCallback((severity: string): number => { const mapSeverityToNumber = useCallback((severity: string): number => {
switch (severity) { switch (severity) {
case "calibration":
return -1;
case "significant_motion": case "significant_motion":
return 1; return 1;
case "detection": case "detection":

View File

@ -1,6 +1,11 @@
import { useCallback, useMemo } from "react"; import { useCallback, useMemo } from "react";
import { MotionData } from "@/types/review"; import { MotionData } from "@/types/review";
export type MotionSegmentValue = {
totalMotion: number;
isCalibrating: boolean;
};
export const useMotionSegmentUtils = ( export const useMotionSegmentUtils = (
segmentDuration: number, segmentDuration: number,
motion_events: MotionData[], motion_events: MotionData[],
@ -39,7 +44,7 @@ export const useMotionSegmentUtils = (
); );
const getMotionSegmentValue = useCallback( const getMotionSegmentValue = useCallback(
(time: number): number => { (time: number): MotionSegmentValue => {
const segmentStart = getSegmentStart(time); const segmentStart = getSegmentStart(time);
const segmentEnd = getSegmentEnd(time); const segmentEnd = getSegmentEnd(time);
const matchingEvents = motion_events.filter((event) => { const matchingEvents = motion_events.filter((event) => {
@ -52,8 +57,9 @@ export const useMotionSegmentUtils = (
(acc, curr) => acc + (curr.motion ?? 0), (acc, curr) => acc + (curr.motion ?? 0),
0, 0,
); );
const isCalibrating = matchingEvents.every((curr) => curr.is_calibrating);
return totalMotion; return { totalMotion: totalMotion, isCalibrating: isCalibrating };
}, },
[motion_events, getSegmentStart, getSegmentEnd], [motion_events, getSegmentStart, getSegmentEnd],
); );

View File

@ -57,6 +57,7 @@ export type RecordingsSummary = {
export type MotionData = { export type MotionData = {
start_time: number; start_time: number;
motion?: number; motion?: number;
is_calibrating?: boolean;
audio?: number; audio?: number;
camera: string; camera: string;
}; };

View File

@ -102,6 +102,9 @@ module.exports = {
DEFAULT: "hsl(var(--motion_review))", DEFAULT: "hsl(var(--motion_review))",
dimmed: "hsl(var(--motion_review_dimmed))", dimmed: "hsl(var(--motion_review_dimmed))",
}, },
motion_review_is_calibrating: {
DEFAULT: "hsl(var(--motion_review_is_calibrating))",
},
audio_review: { audio_review: {
DEFAULT: "hsl(var(--audio_review))", DEFAULT: "hsl(var(--audio_review))",
}, },

View File

@ -99,6 +99,9 @@
--motion_review_dimmed: hsl(44, 60%, 40%); --motion_review_dimmed: hsl(44, 60%, 40%);
--motion_review_dimmed: 44 60% 40%; --motion_review_dimmed: 44 60% 40%;
--motion_review_is_calibrating: hsl(44, 94%, 80%);
--motion_review_is_calibrating: 44, 94%, 80%;
--audio_review: hsl(228, 94%, 67%); --audio_review: hsl(228, 94%, 67%);
--audio_review: 228 94% 67%; --audio_review: 228 94% 67%;
} }

View File

@ -99,6 +99,9 @@
--motion_review_dimmed: hsl(44, 60%, 40%); --motion_review_dimmed: hsl(44, 60%, 40%);
--motion_review_dimmed: 44 60% 40%; --motion_review_dimmed: 44 60% 40%;
--motion_review_is_calibrating: hsl(44, 94%, 80%);
--motion_review_is_calibrating: 44, 94%, 80%;
--audio_review: hsl(228, 94%, 67%); --audio_review: hsl(228, 94%, 67%);
--audio_review: 228 94% 67%; --audio_review: 228 94% 67%;
} }