Indicate in review timeline when motion was considered calibration.

This commit is contained in:
p-boon 2025-03-05 12:53:02 +01:00
parent d4946f19f5
commit 31dfd97493
17 changed files with 79 additions and 15 deletions

View File

@ -40,4 +40,5 @@ class ReviewSummaryResponse(BaseModel):
class ReviewActivityMotionResponse(BaseModel):
start_time: int
motion: float
is_calibrating: bool
camera: str

View File

@ -416,6 +416,7 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
Recordings.camera,
Recordings.start_time,
Recordings.motion,
Recordings.is_calibrating,
)
.where(reduce(operator.and_, clauses))
.order_by(Recordings.start_time.asc())
@ -424,7 +425,9 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
)
# resample data using pandas to get activity on scaled basis
df = pd.DataFrame(data, columns=["start_time", "motion", "camera"])
df = pd.DataFrame(
data, columns=["start_time", "motion", "is_calibrating", "camera"]
)
if df.empty:
logger.warning("No motion data found for the requested time range")
@ -445,7 +448,8 @@ def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
.to_frame()
)
cameras = df["camera"].resample(f"{scale}s").agg(lambda x: ",".join(set(x)))
df = motion.join(cameras)
calibrations = df["is_calibrating"].resample(f"{scale}s").apply(lambda x: all(x))
df = motion.join(cameras).join(calibrations)
length = df.shape[0]
chunk = int(60 * (60 / scale))

View File

@ -71,6 +71,7 @@ class Recordings(Model): # type: ignore[misc]
end_time = DateTimeField()
duration = FloatField()
motion = IntegerField(null=True)
is_calibrating = BooleanField(default=False)
objects = IntegerField(null=True)
dBFS = IntegerField(null=True)
segment_size = FloatField(default=0) # this should be stored as MB

View File

@ -689,6 +689,7 @@ class TrackedObjectProcessor(threading.Thread):
current_tracked_objects,
motion_boxes,
regions,
is_calibrating,
) = self.tracked_objects_queue.get(True, 1)
except queue.Empty:
continue
@ -714,6 +715,7 @@ class TrackedObjectProcessor(threading.Thread):
tracked_objects,
motion_boxes,
regions,
is_calibrating,
)
)

View File

@ -93,6 +93,7 @@ def output_frames(
current_tracked_objects,
motion_boxes,
_,
_,
) = data
frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv)
@ -168,6 +169,7 @@ def output_frames(
current_tracked_objects,
motion_boxes,
regions,
_,
) = data
frame = frame_manager.get(frame_name, config.cameras[camera].frame_shape_yuv)

View File

@ -45,11 +45,13 @@ class SegmentInfo:
def __init__(
self,
motion_count: int,
is_calibrating: bool,
active_object_count: int,
region_count: int,
average_dBFS: int,
) -> None:
self.motion_count = motion_count
self.is_calibrating = is_calibrating
self.active_object_count = active_object_count
self.region_count = region_count
self.average_dBFS = average_dBFS
@ -368,6 +370,7 @@ class RecordingMaintainer(threading.Thread):
active_count = 0
region_count = 0
motion_count = 0
is_calibrating = False
for frame in self.object_recordings_info[camera]:
# frame is after end time of segment
if frame[0] > end_time.timestamp():
@ -385,7 +388,8 @@ class RecordingMaintainer(threading.Thread):
]
)
motion_count += len(frame[2])
region_count += len(frame[3])
is_calibrating += frame[3]
region_count += len(frame[4])
audio_values = []
for frame in self.audio_recordings_info[camera]:
@ -406,7 +410,11 @@ class RecordingMaintainer(threading.Thread):
average_dBFS = 0 if not audio_values else np.average(audio_values)
return SegmentInfo(
motion_count, active_count, region_count, round(average_dBFS)
motion_count,
is_calibrating,
active_count,
region_count,
round(average_dBFS),
)
async def move_segment(
@ -492,6 +500,7 @@ class RecordingMaintainer(threading.Thread):
Recordings.end_time.name: end_time.timestamp(),
Recordings.duration.name: duration,
Recordings.motion.name: segment_info.motion_count,
Recordings.is_calibrating.name: segment_info.is_calibrating,
# TODO: update this to store list of active objects at some point
Recordings.objects.name: segment_info.active_object_count,
Recordings.regions.name: segment_info.region_count,
@ -550,6 +559,7 @@ class RecordingMaintainer(threading.Thread):
current_tracked_objects,
motion_boxes,
regions,
is_calibrating,
) = data
if self.config.cameras[camera].record.enabled:
@ -558,6 +568,7 @@ class RecordingMaintainer(threading.Thread):
frame_time,
current_tracked_objects,
motion_boxes,
is_calibrating,
regions,
)
)

View File

@ -474,6 +474,7 @@ class ReviewSegmentMaintainer(threading.Thread):
current_tracked_objects,
_,
_,
_,
) = data
elif topic == DetectionTypeEnum.audio:
(

View File

@ -838,6 +838,7 @@ def process_frames(
detections,
motion_boxes,
regions,
motion_detector.is_calibrating(),
)
)
camera_metrics.detection_fps.value = object_detector.fps.eps()

View File

@ -121,7 +121,7 @@ export function MotionReviewTimeline({
);
const segmentMotion =
firstHalfMotionValue > 0 || secondHalfMotionValue > 0;
firstHalfMotionValue.totalMotion > 0 || secondHalfMotionValue.totalMotion > 0;
if (segmentMotion && !overlappingReviewItems) {
segments.push(segmentTime);
}

View File

@ -4,6 +4,7 @@ import { ReviewSegment } from "@/types/review";
import React, { useCallback, useEffect, useMemo, useRef } from "react";
import { MinimapBounds, Tick, Timestamp } from "./segment-metadata";
import { useMotionSegmentUtils } from "@/hooks/use-motion-segment-utils";
import { MotionSegmentValue } from "@/hooks/use-motion-segment-utils";
import { isMobile } from "react-device-detect";
import useTapUtils from "@/hooks/use-tap-utils";
import { cn } from "@/lib/utils";
@ -13,8 +14,8 @@ type MotionSegmentProps = {
segmentTime: number;
segmentDuration: number;
timestampSpread: number;
firstHalfMotionValue: number;
secondHalfMotionValue: number;
firstHalfMotionValue: MotionSegmentValue;
secondHalfMotionValue: MotionSegmentValue;
motionOnly: boolean;
showMinimap: boolean;
minimapStartTime?: number;
@ -77,12 +78,26 @@ export function MotionSegment({
}, []);
const firstHalfSegmentWidth = useMemo(() => {
return interpolateMotionAudioData(firstHalfMotionValue, maxSegmentWidth);
}, [maxSegmentWidth, firstHalfMotionValue, interpolateMotionAudioData]);
return interpolateMotionAudioData(
firstHalfMotionValue.totalMotion,
maxSegmentWidth,
);
}, [
maxSegmentWidth,
firstHalfMotionValue.totalMotion,
interpolateMotionAudioData,
]);
const secondHalfSegmentWidth = useMemo(() => {
return interpolateMotionAudioData(secondHalfMotionValue, maxSegmentWidth);
}, [maxSegmentWidth, secondHalfMotionValue, interpolateMotionAudioData]);
return interpolateMotionAudioData(
secondHalfMotionValue.totalMotion,
maxSegmentWidth,
);
}, [
maxSegmentWidth,
secondHalfMotionValue.totalMotion,
interpolateMotionAudioData,
]);
const alignedMinimapStartTime = useMemo(
() => alignStartDateToTimeline(minimapStartTime ?? 0),
@ -222,6 +237,9 @@ export function MotionSegment({
secondHalfSegmentWidth
? "bg-motion_review"
: "bg-muted-foreground",
secondHalfSegmentWidth && secondHalfMotionValue.isCalibrating
? "bg-motion_review_is_calibrating"
: "",
)}
style={{
width: secondHalfSegmentWidth || 1,
@ -241,6 +259,9 @@ export function MotionSegment({
firstHalfSegmentWidth
? "bg-motion_review"
: "bg-muted-foreground",
firstHalfSegmentWidth && firstHalfMotionValue.isCalibrating
? "bg-motion_review_is_calibrating"
: "",
)}
style={{
width: firstHalfSegmentWidth || 1,

View File

@ -8,6 +8,7 @@ import React, {
} from "react";
import MotionSegment from "./MotionSegment";
import { ReviewSegment, MotionData } from "@/types/review";
import { MotionSegmentValue } from "@/hooks/use-motion-segment-utils";
type VirtualizedMotionSegmentsProps = {
timelineRef: React.RefObject<HTMLDivElement>;
@ -23,7 +24,7 @@ type VirtualizedMotionSegmentsProps = {
setHandlebarTime?: React.Dispatch<React.SetStateAction<number>>;
dense: boolean;
motionOnly: boolean;
getMotionSegmentValue: (timestamp: number) => number;
getMotionSegmentValue: (timestamp: number) => MotionSegmentValue;
};
export interface VirtualizedMotionSegmentsRef {
@ -144,7 +145,8 @@ export const VirtualizedMotionSegments = forwardRef<
);
const segmentMotion =
firstHalfMotionValue > 0 || secondHalfMotionValue > 0;
firstHalfMotionValue.totalMotion > 0 ||
secondHalfMotionValue.totalMotion > 0;
const overlappingReviewItems = events.some(
(item) =>
(item.start_time >= motionStart && item.start_time < motionEnd) ||

View File

@ -28,6 +28,8 @@ export const useEventSegmentUtils = (
const mapSeverityToNumber = useCallback((severity: string): number => {
switch (severity) {
case "calibration":
return -1;
case "significant_motion":
return 1;
case "detection":

View File

@ -1,6 +1,11 @@
import { useCallback, useMemo } from "react";
import { MotionData } from "@/types/review";
export type MotionSegmentValue = {
totalMotion: number;
isCalibrating: boolean;
};
export const useMotionSegmentUtils = (
segmentDuration: number,
motion_events: MotionData[],
@ -39,7 +44,7 @@ export const useMotionSegmentUtils = (
);
const getMotionSegmentValue = useCallback(
(time: number): number => {
(time: number): MotionSegmentValue => {
const segmentStart = getSegmentStart(time);
const segmentEnd = getSegmentEnd(time);
const matchingEvents = motion_events.filter((event) => {
@ -52,8 +57,9 @@ export const useMotionSegmentUtils = (
(acc, curr) => acc + (curr.motion ?? 0),
0,
);
const isCalibrating = matchingEvents.every((curr) => curr.is_calibrating);
return totalMotion;
return { totalMotion: totalMotion, isCalibrating: isCalibrating };
},
[motion_events, getSegmentStart, getSegmentEnd],
);

View File

@ -57,6 +57,7 @@ export type RecordingsSummary = {
export type MotionData = {
start_time: number;
motion?: number;
is_calibrating?: boolean;
audio?: number;
camera: string;
};

View File

@ -102,6 +102,9 @@ module.exports = {
DEFAULT: "hsl(var(--motion_review))",
dimmed: "hsl(var(--motion_review_dimmed))",
},
motion_review_is_calibrating: {
DEFAULT: "hsl(var(--motion_review_is_calibrating))",
},
audio_review: {
DEFAULT: "hsl(var(--audio_review))",
},

View File

@ -99,6 +99,9 @@
--motion_review_dimmed: hsl(44, 60%, 40%);
--motion_review_dimmed: 44 60% 40%;
--motion_review_is_calibrating: hsl(44, 94%, 80%);
--motion_review_is_calibrating: 44, 94%, 80%;
--audio_review: hsl(228, 94%, 67%);
--audio_review: 228 94% 67%;
}

View File

@ -99,6 +99,9 @@
--motion_review_dimmed: hsl(44, 60%, 40%);
--motion_review_dimmed: 44 60% 40%;
--motion_review_is_calibrating: hsl(44, 94%, 80%);
--motion_review_is_calibrating: 44, 94%, 80%;
--audio_review: hsl(228, 94%, 67%);
--audio_review: 228 94% 67%;
}