Improve motion review and add motion search (#22253)

* implement motion search and motion previews

* tweaks

* fix merge issue

* fix copilot instructions
This commit is contained in:
Josh Hawkins 2026-03-05 17:53:48 -06:00 committed by GitHub
parent 229436c94a
commit 2babfd2ec9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 6089 additions and 384 deletions

View File

@ -11,6 +11,7 @@ class Tags(Enum):
classification = "Classification" classification = "Classification"
logs = "Logs" logs = "Logs"
media = "Media" media = "Media"
motion_search = "Motion Search"
notifications = "Notifications" notifications = "Notifications"
preview = "Preview" preview = "Preview"
recordings = "Recordings" recordings = "Recordings"

View File

@ -22,6 +22,7 @@ from frigate.api import (
event, event,
export, export,
media, media,
motion_search,
notification, notification,
preview, preview,
record, record,
@ -135,6 +136,7 @@ def create_fastapi_app(
app.include_router(export.router) app.include_router(export.router)
app.include_router(event.router) app.include_router(event.router)
app.include_router(media.router) app.include_router(media.router)
app.include_router(motion_search.router)
app.include_router(record.router) app.include_router(record.router)
app.include_router(debug_replay.router) app.include_router(debug_replay.router)
# App Properties # App Properties

View File

@ -0,0 +1,292 @@
"""Motion search API for detecting changes within a region of interest."""
import logging
from typing import Any, List, Optional
from fastapi import APIRouter, Depends, Request
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
from frigate.api.auth import require_camera_access
from frigate.api.defs.tags import Tags
from frigate.jobs.motion_search import (
cancel_motion_search_job,
get_motion_search_job,
start_motion_search_job,
)
from frigate.types import JobStatusTypesEnum
logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.motion_search])
class MotionSearchRequest(BaseModel):
"""Request body for motion search."""
start_time: float = Field(description="Start timestamp for the search range")
end_time: float = Field(description="End timestamp for the search range")
polygon_points: List[List[float]] = Field(
description="List of [x, y] normalized coordinates (0-1) defining the ROI polygon"
)
threshold: int = Field(
default=30,
ge=1,
le=255,
description="Pixel difference threshold (1-255)",
)
min_area: float = Field(
default=5.0,
ge=0.1,
le=100.0,
description="Minimum change area as a percentage of the ROI",
)
frame_skip: int = Field(
default=5,
ge=1,
le=30,
description="Process every Nth frame (1=all frames, 5=every 5th frame)",
)
parallel: bool = Field(
default=False,
description="Enable parallel scanning across segments",
)
max_results: int = Field(
default=25,
ge=1,
le=200,
description="Maximum number of search results to return",
)
class MotionSearchResult(BaseModel):
"""A single search result with timestamp and change info."""
timestamp: float = Field(description="Timestamp where change was detected")
change_percentage: float = Field(description="Percentage of ROI area that changed")
class MotionSearchMetricsResponse(BaseModel):
"""Metrics collected during motion search execution."""
segments_scanned: int = 0
segments_processed: int = 0
metadata_inactive_segments: int = 0
heatmap_roi_skip_segments: int = 0
fallback_full_range_segments: int = 0
frames_decoded: int = 0
wall_time_seconds: float = 0.0
segments_with_errors: int = 0
class MotionSearchStartResponse(BaseModel):
"""Response when motion search job starts."""
success: bool
message: str
job_id: str
class MotionSearchStatusResponse(BaseModel):
"""Response containing job status and results."""
success: bool
message: str
status: str # "queued", "running", "success", "failed", or "cancelled"
results: Optional[List[MotionSearchResult]] = None
total_frames_processed: Optional[int] = None
error_message: Optional[str] = None
metrics: Optional[MotionSearchMetricsResponse] = None
@router.post(
"/{camera_name}/search/motion",
response_model=MotionSearchStartResponse,
dependencies=[Depends(require_camera_access)],
summary="Start motion search job",
description="""Starts an asynchronous search for significant motion changes within
a user-defined Region of Interest (ROI) over a specified time range. Returns a job_id
that can be used to poll for results.""",
)
async def start_motion_search(
request: Request,
camera_name: str,
body: MotionSearchRequest,
):
"""Start an async motion search job."""
config = request.app.frigate_config
if camera_name not in config.cameras:
return JSONResponse(
content={"success": False, "message": f"Camera {camera_name} not found"},
status_code=404,
)
# Validate polygon has at least 3 points
if len(body.polygon_points) < 3:
return JSONResponse(
content={
"success": False,
"message": "Polygon must have at least 3 points",
},
status_code=400,
)
# Validate time range
if body.start_time >= body.end_time:
return JSONResponse(
content={
"success": False,
"message": "Start time must be before end time",
},
status_code=400,
)
# Start the job using the jobs module
job_id = start_motion_search_job(
config=config,
camera_name=camera_name,
start_time=body.start_time,
end_time=body.end_time,
polygon_points=body.polygon_points,
threshold=body.threshold,
min_area=body.min_area,
frame_skip=body.frame_skip,
parallel=body.parallel,
max_results=body.max_results,
)
return JSONResponse(
content={
"success": True,
"message": "Search job started",
"job_id": job_id,
}
)
@router.get(
"/{camera_name}/search/motion/{job_id}",
response_model=MotionSearchStatusResponse,
dependencies=[Depends(require_camera_access)],
summary="Get motion search job status",
description="Returns the status and results (if complete) of a motion search job.",
)
async def get_motion_search_status_endpoint(
request: Request,
camera_name: str,
job_id: str,
):
"""Get the status of a motion search job."""
config = request.app.frigate_config
if camera_name not in config.cameras:
return JSONResponse(
content={"success": False, "message": f"Camera {camera_name} not found"},
status_code=404,
)
job = get_motion_search_job(job_id)
if not job:
return JSONResponse(
content={"success": False, "message": "Job not found"},
status_code=404,
)
api_status = job.status
# Build response content
response_content: dict[str, Any] = {
"success": api_status != JobStatusTypesEnum.failed,
"status": api_status,
}
if api_status == JobStatusTypesEnum.failed:
response_content["message"] = job.error_message or "Search failed"
response_content["error_message"] = job.error_message
elif api_status == JobStatusTypesEnum.cancelled:
response_content["message"] = "Search cancelled"
response_content["total_frames_processed"] = job.total_frames_processed
elif api_status == JobStatusTypesEnum.success:
response_content["message"] = "Search complete"
if job.results:
response_content["results"] = job.results.get("results", [])
response_content["total_frames_processed"] = job.results.get(
"total_frames_processed", job.total_frames_processed
)
else:
response_content["results"] = []
response_content["total_frames_processed"] = job.total_frames_processed
else:
response_content["message"] = "Job processing"
response_content["total_frames_processed"] = job.total_frames_processed
# Include partial results if available (streaming)
if job.results:
response_content["results"] = job.results.get("results", [])
response_content["total_frames_processed"] = job.results.get(
"total_frames_processed", job.total_frames_processed
)
# Include metrics if available
if job.metrics:
response_content["metrics"] = job.metrics.to_dict()
return JSONResponse(content=response_content)
@router.post(
"/{camera_name}/search/motion/{job_id}/cancel",
dependencies=[Depends(require_camera_access)],
summary="Cancel motion search job",
description="Cancels an active motion search job if it is still processing.",
)
async def cancel_motion_search_endpoint(
request: Request,
camera_name: str,
job_id: str,
):
"""Cancel an active motion search job."""
config = request.app.frigate_config
if camera_name not in config.cameras:
return JSONResponse(
content={"success": False, "message": f"Camera {camera_name} not found"},
status_code=404,
)
job = get_motion_search_job(job_id)
if not job:
return JSONResponse(
content={"success": False, "message": "Job not found"},
status_code=404,
)
# Check if already finished
api_status = job.status
if api_status not in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running):
return JSONResponse(
content={
"success": True,
"message": "Job already finished",
"status": api_status,
}
)
# Request cancellation
cancelled = cancel_motion_search_job(job_id)
if cancelled:
return JSONResponse(
content={
"success": True,
"message": "Search cancelled",
"status": "cancelled",
}
)
return JSONResponse(
content={
"success": False,
"message": "Failed to cancel job",
},
status_code=500,
)

View File

@ -261,6 +261,7 @@ async def recordings(
Recordings.segment_size, Recordings.segment_size,
Recordings.motion, Recordings.motion,
Recordings.objects, Recordings.objects,
Recordings.motion_heatmap,
Recordings.duration, Recordings.duration,
) )
.where( .where(

View File

@ -51,6 +51,7 @@ from frigate.embeddings import EmbeddingProcess, EmbeddingsContext
from frigate.events.audio import AudioProcessor from frigate.events.audio import AudioProcessor
from frigate.events.cleanup import EventCleanup from frigate.events.cleanup import EventCleanup
from frigate.events.maintainer import EventProcessor from frigate.events.maintainer import EventProcessor
from frigate.jobs.motion_search import stop_all_motion_search_jobs
from frigate.log import _stop_logging from frigate.log import _stop_logging
from frigate.models import ( from frigate.models import (
Event, Event,
@ -599,6 +600,9 @@ class FrigateApp:
# used by the docker healthcheck # used by the docker healthcheck
Path("/dev/shm/.frigate-is-stopping").touch() Path("/dev/shm/.frigate-is-stopping").touch()
# Cancel any running motion search jobs before setting stop_event
stop_all_motion_search_jobs()
self.stop_event.set() self.stop_event.set()
# set an end_time on entries without an end_time before exiting # set an end_time on entries without an end_time before exiting

View File

@ -0,0 +1,864 @@
"""Motion search job management with background execution and parallel verification."""
import logging
import os
import threading
from concurrent.futures import Future, ThreadPoolExecutor, as_completed
from dataclasses import asdict, dataclass, field
from datetime import datetime
from typing import Any, Optional
import cv2
import numpy as np
from frigate.comms.inter_process import InterProcessRequestor
from frigate.config import FrigateConfig
from frigate.const import UPDATE_JOB_STATE
from frigate.jobs.job import Job
from frigate.jobs.manager import (
get_job_by_id,
set_current_job,
)
from frigate.models import Recordings
from frigate.types import JobStatusTypesEnum
logger = logging.getLogger(__name__)
# Constants
HEATMAP_GRID_SIZE = 16
@dataclass
class MotionSearchMetrics:
"""Metrics collected during motion search execution."""
segments_scanned: int = 0
segments_processed: int = 0
metadata_inactive_segments: int = 0
heatmap_roi_skip_segments: int = 0
fallback_full_range_segments: int = 0
frames_decoded: int = 0
wall_time_seconds: float = 0.0
segments_with_errors: int = 0
def to_dict(self) -> dict[str, Any]:
"""Convert to dictionary."""
return asdict(self)
@dataclass
class MotionSearchResult:
"""A single search result with timestamp and change info."""
timestamp: float
change_percentage: float
def to_dict(self) -> dict[str, Any]:
"""Convert to dictionary."""
return asdict(self)
@dataclass
class MotionSearchJob(Job):
"""Job state for motion search operations."""
job_type: str = "motion_search"
camera: str = ""
start_time_range: float = 0.0
end_time_range: float = 0.0
polygon_points: list[list[float]] = field(default_factory=list)
threshold: int = 30
min_area: float = 5.0
frame_skip: int = 5
parallel: bool = False
max_results: int = 25
# Track progress
total_frames_processed: int = 0
# Metrics for observability
metrics: Optional[MotionSearchMetrics] = None
def to_dict(self) -> dict[str, Any]:
"""Convert to dictionary for WebSocket transmission."""
d = asdict(self)
if self.metrics:
d["metrics"] = self.metrics.to_dict()
return d
def create_polygon_mask(
polygon_points: list[list[float]], frame_width: int, frame_height: int
) -> np.ndarray:
"""Create a binary mask from normalized polygon coordinates."""
motion_points = np.array(
[[int(p[0] * frame_width), int(p[1] * frame_height)] for p in polygon_points],
dtype=np.int32,
)
mask = np.zeros((frame_height, frame_width), dtype=np.uint8)
cv2.fillPoly(mask, [motion_points], 255)
return mask
def compute_roi_bbox_normalized(
polygon_points: list[list[float]],
) -> tuple[float, float, float, float]:
"""Compute the bounding box of the ROI in normalized coordinates (0-1).
Returns (x_min, y_min, x_max, y_max) in normalized coordinates.
"""
if not polygon_points:
return (0.0, 0.0, 1.0, 1.0)
x_coords = [p[0] for p in polygon_points]
y_coords = [p[1] for p in polygon_points]
return (min(x_coords), min(y_coords), max(x_coords), max(y_coords))
def heatmap_overlaps_roi(
heatmap: dict[str, int], roi_bbox: tuple[float, float, float, float]
) -> bool:
"""Check if a sparse motion heatmap has any overlap with the ROI bounding box.
Args:
heatmap: Sparse dict mapping cell index (str) to intensity (1-255).
roi_bbox: (x_min, y_min, x_max, y_max) in normalized coordinates (0-1).
Returns:
True if there is overlap (any active cell in the ROI region).
"""
if not isinstance(heatmap, dict):
# Invalid heatmap, assume overlap to be safe
return True
x_min, y_min, x_max, y_max = roi_bbox
# Convert normalized coordinates to grid cells (0-15)
grid_x_min = max(0, int(x_min * HEATMAP_GRID_SIZE))
grid_y_min = max(0, int(y_min * HEATMAP_GRID_SIZE))
grid_x_max = min(HEATMAP_GRID_SIZE - 1, int(x_max * HEATMAP_GRID_SIZE))
grid_y_max = min(HEATMAP_GRID_SIZE - 1, int(y_max * HEATMAP_GRID_SIZE))
# Check each cell in the ROI bbox
for y in range(grid_y_min, grid_y_max + 1):
for x in range(grid_x_min, grid_x_max + 1):
idx = str(y * HEATMAP_GRID_SIZE + x)
if idx in heatmap:
return True
return False
def segment_passes_activity_gate(recording: Recordings) -> bool:
"""Check if a segment passes the activity gate.
Returns True if any of motion, objects, or regions is non-zero/non-null.
Returns True if all are null (old segments without data).
"""
motion = recording.motion
objects = recording.objects
regions = recording.regions
# Old segments without metadata - pass through (conservative)
if motion is None and objects is None and regions is None:
return True
# Pass if any activity indicator is positive
return bool(motion) or bool(objects) or bool(regions)
def segment_passes_heatmap_gate(
recording: Recordings, roi_bbox: tuple[float, float, float, float]
) -> bool:
"""Check if a segment passes the heatmap overlap gate.
Returns True if:
- No heatmap is stored (old segments).
- The heatmap overlaps with the ROI bbox.
"""
heatmap = getattr(recording, "motion_heatmap", None)
if heatmap is None:
# No heatmap stored, fall back to activity gate
return True
return heatmap_overlaps_roi(heatmap, roi_bbox)
class MotionSearchRunner(threading.Thread):
"""Thread-based runner for motion search jobs with parallel verification."""
def __init__(
self,
job: MotionSearchJob,
config: FrigateConfig,
cancel_event: threading.Event,
) -> None:
super().__init__(daemon=True, name=f"motion_search_{job.id}")
self.job = job
self.config = config
self.cancel_event = cancel_event
self.internal_stop_event = threading.Event()
self.requestor = InterProcessRequestor()
self.metrics = MotionSearchMetrics()
self.job.metrics = self.metrics
# Worker cap: min(4, cpu_count)
cpu_count = os.cpu_count() or 1
self.max_workers = min(4, cpu_count)
def run(self) -> None:
"""Execute the motion search job."""
try:
self.job.status = JobStatusTypesEnum.running
self.job.start_time = datetime.now().timestamp()
self._broadcast_status()
results = self._execute_search()
if self.cancel_event.is_set():
self.job.status = JobStatusTypesEnum.cancelled
else:
self.job.status = JobStatusTypesEnum.success
self.job.results = {
"results": [r.to_dict() for r in results],
"total_frames_processed": self.job.total_frames_processed,
}
self.job.end_time = datetime.now().timestamp()
self.metrics.wall_time_seconds = self.job.end_time - self.job.start_time
self.job.metrics = self.metrics
logger.debug(
"Motion search job %s completed: status=%s, results=%d, frames=%d",
self.job.id,
self.job.status,
len(results),
self.job.total_frames_processed,
)
self._broadcast_status()
except Exception as e:
logger.exception("Motion search job %s failed: %s", self.job.id, e)
self.job.status = JobStatusTypesEnum.failed
self.job.error_message = str(e)
self.job.end_time = datetime.now().timestamp()
self.metrics.wall_time_seconds = self.job.end_time - (
self.job.start_time or 0
)
self.job.metrics = self.metrics
self._broadcast_status()
finally:
if self.requestor:
self.requestor.stop()
def _broadcast_status(self) -> None:
"""Broadcast job status update via IPC to WebSocket subscribers."""
if self.job.status == JobStatusTypesEnum.running and self.job.start_time:
self.metrics.wall_time_seconds = (
datetime.now().timestamp() - self.job.start_time
)
try:
self.requestor.send_data(UPDATE_JOB_STATE, self.job.to_dict())
except Exception as e:
logger.warning("Failed to broadcast motion search status: %s", e)
def _should_stop(self) -> bool:
"""Check if processing should stop due to cancellation or internal limits."""
return self.cancel_event.is_set() or self.internal_stop_event.is_set()
def _execute_search(self) -> list[MotionSearchResult]:
"""Main search execution logic."""
camera_name = self.job.camera
camera_config = self.config.cameras.get(camera_name)
if not camera_config:
raise ValueError(f"Camera {camera_name} not found")
frame_width = camera_config.detect.width
frame_height = camera_config.detect.height
# Create polygon mask
polygon_mask = create_polygon_mask(
self.job.polygon_points, frame_width, frame_height
)
if np.count_nonzero(polygon_mask) == 0:
logger.warning("Polygon mask is empty for job %s", self.job.id)
return []
# Compute ROI bbox in normalized coordinates for heatmap gate
roi_bbox = compute_roi_bbox_normalized(self.job.polygon_points)
# Query recordings
recordings = list(
Recordings.select()
.where(
(
Recordings.start_time.between(
self.job.start_time_range, self.job.end_time_range
)
)
| (
Recordings.end_time.between(
self.job.start_time_range, self.job.end_time_range
)
)
| (
(self.job.start_time_range > Recordings.start_time)
& (self.job.end_time_range < Recordings.end_time)
)
)
.where(Recordings.camera == camera_name)
.order_by(Recordings.start_time.asc())
)
if not recordings:
logger.debug("No recordings found for motion search job %s", self.job.id)
return []
logger.debug(
"Motion search job %s: queried %d recording segments for camera %s "
"(range %.1f - %.1f)",
self.job.id,
len(recordings),
camera_name,
self.job.start_time_range,
self.job.end_time_range,
)
self.metrics.segments_scanned = len(recordings)
# Apply activity and heatmap gates
filtered_recordings = []
for recording in recordings:
if not segment_passes_activity_gate(recording):
self.metrics.metadata_inactive_segments += 1
self.metrics.segments_processed += 1
logger.debug(
"Motion search job %s: segment %s skipped by activity gate "
"(motion=%s, objects=%s, regions=%s)",
self.job.id,
recording.id,
recording.motion,
recording.objects,
recording.regions,
)
continue
if not segment_passes_heatmap_gate(recording, roi_bbox):
self.metrics.heatmap_roi_skip_segments += 1
self.metrics.segments_processed += 1
logger.debug(
"Motion search job %s: segment %s skipped by heatmap gate "
"(heatmap present=%s, roi_bbox=%s)",
self.job.id,
recording.id,
recording.motion_heatmap is not None,
roi_bbox,
)
continue
filtered_recordings.append(recording)
self._broadcast_status()
# Fallback: if all segments were filtered out, scan all segments
# This allows motion search to find things the detector missed
if not filtered_recordings and recordings:
logger.info(
"All %d segments filtered by gates, falling back to full scan",
len(recordings),
)
self.metrics.fallback_full_range_segments = len(recordings)
filtered_recordings = recordings
logger.debug(
"Motion search job %s: %d/%d segments passed gates "
"(activity_skipped=%d, heatmap_skipped=%d)",
self.job.id,
len(filtered_recordings),
len(recordings),
self.metrics.metadata_inactive_segments,
self.metrics.heatmap_roi_skip_segments,
)
if self.job.parallel:
return self._search_motion_parallel(filtered_recordings, polygon_mask)
return self._search_motion_sequential(filtered_recordings, polygon_mask)
def _search_motion_parallel(
self,
recordings: list[Recordings],
polygon_mask: np.ndarray,
) -> list[MotionSearchResult]:
"""Search for motion in parallel across segments, streaming results."""
all_results: list[MotionSearchResult] = []
total_frames = 0
next_recording_idx_to_merge = 0
logger.debug(
"Motion search job %s: starting motion search with %d workers "
"across %d segments",
self.job.id,
self.max_workers,
len(recordings),
)
# Initialize partial results on the job so they stream to the frontend
self.job.results = {"results": [], "total_frames_processed": 0}
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
futures: dict[Future, int] = {}
completed_segments: dict[int, tuple[list[MotionSearchResult], int]] = {}
for idx, recording in enumerate(recordings):
if self._should_stop():
break
future = executor.submit(
self._process_recording_for_motion,
recording.path,
recording.start_time,
recording.end_time,
self.job.start_time_range,
self.job.end_time_range,
polygon_mask,
self.job.threshold,
self.job.min_area,
self.job.frame_skip,
)
futures[future] = idx
for future in as_completed(futures):
if self._should_stop():
# Cancel remaining futures
for f in futures:
f.cancel()
break
recording_idx = futures[future]
recording = recordings[recording_idx]
try:
results, frames = future.result()
self.metrics.segments_processed += 1
completed_segments[recording_idx] = (results, frames)
while next_recording_idx_to_merge in completed_segments:
segment_results, segment_frames = completed_segments.pop(
next_recording_idx_to_merge
)
all_results.extend(segment_results)
total_frames += segment_frames
self.job.total_frames_processed = total_frames
self.metrics.frames_decoded = total_frames
if segment_results:
deduped = self._deduplicate_results(all_results)
self.job.results = {
"results": [
r.to_dict() for r in deduped[: self.job.max_results]
],
"total_frames_processed": total_frames,
}
self._broadcast_status()
if segment_results and len(deduped) >= self.job.max_results:
self.internal_stop_event.set()
for pending_future in futures:
pending_future.cancel()
break
next_recording_idx_to_merge += 1
if self.internal_stop_event.is_set():
break
except Exception as e:
self.metrics.segments_processed += 1
self.metrics.segments_with_errors += 1
self._broadcast_status()
logger.warning(
"Error processing segment %s: %s",
recording.path,
e,
)
self.job.total_frames_processed = total_frames
self.metrics.frames_decoded = total_frames
logger.debug(
"Motion search job %s: motion search complete, "
"found %d raw results, decoded %d frames, %d segment errors",
self.job.id,
len(all_results),
total_frames,
self.metrics.segments_with_errors,
)
# Sort and deduplicate results
all_results.sort(key=lambda x: x.timestamp)
return self._deduplicate_results(all_results)[: self.job.max_results]
def _search_motion_sequential(
self,
recordings: list[Recordings],
polygon_mask: np.ndarray,
) -> list[MotionSearchResult]:
"""Search for motion sequentially across segments, streaming results."""
all_results: list[MotionSearchResult] = []
total_frames = 0
logger.debug(
"Motion search job %s: starting sequential motion search across %d segments",
self.job.id,
len(recordings),
)
self.job.results = {"results": [], "total_frames_processed": 0}
for recording in recordings:
if self.cancel_event.is_set():
break
try:
results, frames = self._process_recording_for_motion(
recording.path,
recording.start_time,
recording.end_time,
self.job.start_time_range,
self.job.end_time_range,
polygon_mask,
self.job.threshold,
self.job.min_area,
self.job.frame_skip,
)
all_results.extend(results)
total_frames += frames
self.job.total_frames_processed = total_frames
self.metrics.frames_decoded = total_frames
self.metrics.segments_processed += 1
if results:
all_results.sort(key=lambda x: x.timestamp)
deduped = self._deduplicate_results(all_results)[
: self.job.max_results
]
self.job.results = {
"results": [r.to_dict() for r in deduped],
"total_frames_processed": total_frames,
}
self._broadcast_status()
if results and len(deduped) >= self.job.max_results:
break
except Exception as e:
self.metrics.segments_processed += 1
self.metrics.segments_with_errors += 1
self._broadcast_status()
logger.warning("Error processing segment %s: %s", recording.path, e)
self.job.total_frames_processed = total_frames
self.metrics.frames_decoded = total_frames
logger.debug(
"Motion search job %s: sequential motion search complete, "
"found %d raw results, decoded %d frames, %d segment errors",
self.job.id,
len(all_results),
total_frames,
self.metrics.segments_with_errors,
)
all_results.sort(key=lambda x: x.timestamp)
return self._deduplicate_results(all_results)[: self.job.max_results]
def _deduplicate_results(
self, results: list[MotionSearchResult], min_gap: float = 1.0
) -> list[MotionSearchResult]:
"""Deduplicate results that are too close together."""
if not results:
return results
deduplicated: list[MotionSearchResult] = []
last_timestamp = 0.0
for result in results:
if result.timestamp - last_timestamp >= min_gap:
deduplicated.append(result)
last_timestamp = result.timestamp
return deduplicated
def _process_recording_for_motion(
self,
recording_path: str,
recording_start: float,
recording_end: float,
search_start: float,
search_end: float,
polygon_mask: np.ndarray,
threshold: int,
min_area: float,
frame_skip: int,
) -> tuple[list[MotionSearchResult], int]:
"""Process a single recording file for motion detection.
This method is designed to be called from a thread pool.
Args:
min_area: Minimum change area as a percentage of the ROI (0-100).
"""
results: list[MotionSearchResult] = []
frames_processed = 0
if not os.path.exists(recording_path):
logger.warning("Recording file not found: %s", recording_path)
return results, frames_processed
cap = cv2.VideoCapture(recording_path)
if not cap.isOpened():
logger.error("Could not open recording: %s", recording_path)
return results, frames_processed
try:
fps = cap.get(cv2.CAP_PROP_FPS) or 30.0
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
recording_duration = recording_end - recording_start
# Calculate frame range
start_offset = max(0, search_start - recording_start)
end_offset = min(recording_duration, search_end - recording_start)
start_frame = int(start_offset * fps)
end_frame = int(end_offset * fps)
start_frame = max(0, min(start_frame, total_frames - 1))
end_frame = max(0, min(end_frame, total_frames))
if start_frame >= end_frame:
return results, frames_processed
cap.set(cv2.CAP_PROP_POS_FRAMES, start_frame)
# Get ROI bounding box
roi_bbox = cv2.boundingRect(polygon_mask)
roi_x, roi_y, roi_w, roi_h = roi_bbox
prev_frame_gray = None
frame_step = max(frame_skip, 1)
frame_idx = start_frame
while frame_idx < end_frame:
if self._should_stop():
break
ret, frame = cap.read()
if not ret:
frame_idx += 1
continue
if (frame_idx - start_frame) % frame_step != 0:
frame_idx += 1
continue
frames_processed += 1
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Handle frame dimension changes
if gray.shape != polygon_mask.shape:
resized_mask = cv2.resize(
polygon_mask, (gray.shape[1], gray.shape[0]), cv2.INTER_NEAREST
)
current_bbox = cv2.boundingRect(resized_mask)
else:
resized_mask = polygon_mask
current_bbox = roi_bbox
roi_x, roi_y, roi_w, roi_h = current_bbox
cropped_gray = gray[roi_y : roi_y + roi_h, roi_x : roi_x + roi_w]
cropped_mask = resized_mask[
roi_y : roi_y + roi_h, roi_x : roi_x + roi_w
]
cropped_mask_area = np.count_nonzero(cropped_mask)
if cropped_mask_area == 0:
frame_idx += 1
continue
# Convert percentage to pixel count for this ROI
min_area_pixels = int((min_area / 100.0) * cropped_mask_area)
masked_gray = cv2.bitwise_and(
cropped_gray, cropped_gray, mask=cropped_mask
)
if prev_frame_gray is not None:
diff = cv2.absdiff(prev_frame_gray, masked_gray)
diff_blurred = cv2.GaussianBlur(diff, (3, 3), 0)
_, thresh = cv2.threshold(
diff_blurred, threshold, 255, cv2.THRESH_BINARY
)
thresh_dilated = cv2.dilate(thresh, None, iterations=1)
thresh_masked = cv2.bitwise_and(
thresh_dilated, thresh_dilated, mask=cropped_mask
)
change_pixels = cv2.countNonZero(thresh_masked)
if change_pixels > min_area_pixels:
contours, _ = cv2.findContours(
thresh_masked, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
)
total_change_area = sum(
cv2.contourArea(c)
for c in contours
if cv2.contourArea(c) >= min_area_pixels
)
if total_change_area > 0:
frame_time_offset = (frame_idx - start_frame) / fps
timestamp = (
recording_start + start_offset + frame_time_offset
)
change_percentage = (
total_change_area / cropped_mask_area
) * 100
results.append(
MotionSearchResult(
timestamp=timestamp,
change_percentage=round(change_percentage, 2),
)
)
prev_frame_gray = masked_gray
frame_idx += 1
finally:
cap.release()
logger.debug(
"Motion search segment complete: %s, %d frames processed, %d results found",
recording_path,
frames_processed,
len(results),
)
return results, frames_processed
# Module-level state for managing per-camera jobs
_motion_search_jobs: dict[str, tuple[MotionSearchJob, threading.Event]] = {}
_jobs_lock = threading.Lock()
def stop_all_motion_search_jobs() -> None:
"""Cancel all running motion search jobs for clean shutdown."""
with _jobs_lock:
for job_id, (job, cancel_event) in _motion_search_jobs.items():
if job.status in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running):
cancel_event.set()
logger.debug("Signalling motion search job %s to stop", job_id)
def start_motion_search_job(
config: FrigateConfig,
camera_name: str,
start_time: float,
end_time: float,
polygon_points: list[list[float]],
threshold: int = 30,
min_area: float = 5.0,
frame_skip: int = 5,
parallel: bool = False,
max_results: int = 25,
) -> str:
"""Start a new motion search job.
Returns the job ID.
"""
job = MotionSearchJob(
camera=camera_name,
start_time_range=start_time,
end_time_range=end_time,
polygon_points=polygon_points,
threshold=threshold,
min_area=min_area,
frame_skip=frame_skip,
parallel=parallel,
max_results=max_results,
)
cancel_event = threading.Event()
with _jobs_lock:
_motion_search_jobs[job.id] = (job, cancel_event)
set_current_job(job)
runner = MotionSearchRunner(job, config, cancel_event)
runner.start()
logger.debug(
"Started motion search job %s for camera %s: "
"time_range=%.1f-%.1f, threshold=%d, min_area=%.1f%%, "
"frame_skip=%d, parallel=%s, max_results=%d, polygon_points=%d vertices",
job.id,
camera_name,
start_time,
end_time,
threshold,
min_area,
frame_skip,
parallel,
max_results,
len(polygon_points),
)
return job.id
def get_motion_search_job(job_id: str) -> Optional[MotionSearchJob]:
"""Get a motion search job by ID."""
with _jobs_lock:
job_entry = _motion_search_jobs.get(job_id)
if job_entry:
return job_entry[0]
# Check completed jobs via manager
return get_job_by_id("motion_search", job_id)
def cancel_motion_search_job(job_id: str) -> bool:
"""Cancel a motion search job.
Returns True if cancellation was initiated, False if job not found.
"""
with _jobs_lock:
job_entry = _motion_search_jobs.get(job_id)
if not job_entry:
return False
job, cancel_event = job_entry
if job.status not in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running):
# Already finished
return True
cancel_event.set()
job.status = JobStatusTypesEnum.cancelled
job_payload = job.to_dict()
logger.info("Cancelled motion search job %s", job_id)
requestor: Optional[InterProcessRequestor] = None
try:
requestor = InterProcessRequestor()
requestor.send_data(UPDATE_JOB_STATE, job_payload)
except Exception as e:
logger.warning(
"Failed to broadcast cancelled motion search job %s: %s", job_id, e
)
finally:
if requestor:
requestor.stop()
return True

View File

@ -78,6 +78,7 @@ class Recordings(Model):
dBFS = IntegerField(null=True) dBFS = IntegerField(null=True)
segment_size = FloatField(default=0) # this should be stored as MB segment_size = FloatField(default=0) # this should be stored as MB
regions = IntegerField(null=True) regions = IntegerField(null=True)
motion_heatmap = JSONField(null=True) # 16x16 grid, 256 values (0-255)
class ExportCase(Model): class ExportCase(Model):

View File

@ -50,11 +50,13 @@ class SegmentInfo:
active_object_count: int, active_object_count: int,
region_count: int, region_count: int,
average_dBFS: int, average_dBFS: int,
motion_heatmap: dict[str, int] | None = None,
) -> None: ) -> None:
self.motion_count = motion_count self.motion_count = motion_count
self.active_object_count = active_object_count self.active_object_count = active_object_count
self.region_count = region_count self.region_count = region_count
self.average_dBFS = average_dBFS self.average_dBFS = average_dBFS
self.motion_heatmap = motion_heatmap
def should_discard_segment(self, retain_mode: RetainModeEnum) -> bool: def should_discard_segment(self, retain_mode: RetainModeEnum) -> bool:
keep = False keep = False
@ -454,6 +456,59 @@ class RecordingMaintainer(threading.Thread):
if end_time < retain_cutoff: if end_time < retain_cutoff:
self.drop_segment(cache_path) self.drop_segment(cache_path)
def _compute_motion_heatmap(
self, camera: str, motion_boxes: list[tuple[int, int, int, int]]
) -> dict[str, int] | None:
"""Compute a 16x16 motion intensity heatmap from motion boxes.
Returns a sparse dict mapping cell index (as string) to intensity (1-255).
Only cells with motion are included.
Args:
camera: Camera name to get detect dimensions from.
motion_boxes: List of (x1, y1, x2, y2) pixel coordinates.
Returns:
Sparse dict like {"45": 3, "46": 5}, or None if no boxes.
"""
if not motion_boxes:
return None
camera_config = self.config.cameras.get(camera)
if not camera_config:
return None
frame_width = camera_config.detect.width
frame_height = camera_config.detect.height
if frame_width <= 0 or frame_height <= 0:
return None
GRID_SIZE = 16
counts: dict[int, int] = {}
for box in motion_boxes:
if len(box) < 4:
continue
x1, y1, x2, y2 = box
# Convert pixel coordinates to grid cells
grid_x1 = max(0, int((x1 / frame_width) * GRID_SIZE))
grid_y1 = max(0, int((y1 / frame_height) * GRID_SIZE))
grid_x2 = min(GRID_SIZE - 1, int((x2 / frame_width) * GRID_SIZE))
grid_y2 = min(GRID_SIZE - 1, int((y2 / frame_height) * GRID_SIZE))
for y in range(grid_y1, grid_y2 + 1):
for x in range(grid_x1, grid_x2 + 1):
idx = y * GRID_SIZE + x
counts[idx] = min(255, counts.get(idx, 0) + 1)
if not counts:
return None
# Convert to string keys for JSON storage
return {str(k): v for k, v in counts.items()}
def segment_stats( def segment_stats(
self, camera: str, start_time: datetime.datetime, end_time: datetime.datetime self, camera: str, start_time: datetime.datetime, end_time: datetime.datetime
) -> SegmentInfo: ) -> SegmentInfo:
@ -461,6 +516,8 @@ class RecordingMaintainer(threading.Thread):
active_count = 0 active_count = 0
region_count = 0 region_count = 0
motion_count = 0 motion_count = 0
all_motion_boxes: list[tuple[int, int, int, int]] = []
for frame in self.object_recordings_info[camera]: for frame in self.object_recordings_info[camera]:
# frame is after end time of segment # frame is after end time of segment
if frame[0] > end_time.timestamp(): if frame[0] > end_time.timestamp():
@ -479,6 +536,8 @@ class RecordingMaintainer(threading.Thread):
) )
motion_count += len(frame[2]) motion_count += len(frame[2])
region_count += len(frame[3]) region_count += len(frame[3])
# Collect motion boxes for heatmap computation
all_motion_boxes.extend(frame[2])
audio_values = [] audio_values = []
for frame in self.audio_recordings_info[camera]: for frame in self.audio_recordings_info[camera]:
@ -498,8 +557,14 @@ class RecordingMaintainer(threading.Thread):
average_dBFS = 0 if not audio_values else np.average(audio_values) average_dBFS = 0 if not audio_values else np.average(audio_values)
motion_heatmap = self._compute_motion_heatmap(camera, all_motion_boxes)
return SegmentInfo( return SegmentInfo(
motion_count, active_count, region_count, round(average_dBFS) motion_count,
active_count,
region_count,
round(average_dBFS),
motion_heatmap,
) )
async def move_segment( async def move_segment(
@ -590,6 +655,7 @@ class RecordingMaintainer(threading.Thread):
Recordings.regions.name: segment_info.region_count, Recordings.regions.name: segment_info.region_count,
Recordings.dBFS.name: segment_info.average_dBFS, Recordings.dBFS.name: segment_info.average_dBFS,
Recordings.segment_size.name: segment_size, Recordings.segment_size.name: segment_size,
Recordings.motion_heatmap.name: segment_info.motion_heatmap,
} }
except Exception as e: except Exception as e:
logger.error(f"Unable to store recording segment {cache_path}") logger.error(f"Unable to store recording segment {cache_path}")

View File

@ -0,0 +1,34 @@
"""Peewee migrations -- 035_add_motion_heatmap.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import peewee as pw
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql('ALTER TABLE "recordings" ADD COLUMN "motion_heatmap" TEXT NULL')
def rollback(migrator, database, fake=False, **kwargs):
pass

63
web/package-lock.json generated
View File

@ -22,6 +22,7 @@
"@radix-ui/react-hover-card": "^1.1.6", "@radix-ui/react-hover-card": "^1.1.6",
"@radix-ui/react-label": "^2.1.2", "@radix-ui/react-label": "^2.1.2",
"@radix-ui/react-popover": "^1.1.6", "@radix-ui/react-popover": "^1.1.6",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.2.3", "@radix-ui/react-radio-group": "^1.2.3",
"@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-select": "^2.1.6", "@radix-ui/react-select": "^2.1.6",
@ -2922,6 +2923,68 @@
} }
} }
}, },
"node_modules/@radix-ui/react-progress": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.8.tgz",
"integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-context": "1.1.3",
"@radix-ui/react-primitive": "2.1.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.3.tgz",
"integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==",
"license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz",
"integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-slot": "1.2.4"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-radio-group": { "node_modules/@radix-ui/react-radio-group": {
"version": "1.3.8", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz", "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz",

View File

@ -28,6 +28,7 @@
"@radix-ui/react-hover-card": "^1.1.6", "@radix-ui/react-hover-card": "^1.1.6",
"@radix-ui/react-label": "^2.1.2", "@radix-ui/react-label": "^2.1.2",
"@radix-ui/react-popover": "^1.1.6", "@radix-ui/react-popover": "^1.1.6",
"@radix-ui/react-progress": "^1.1.8",
"@radix-ui/react-radio-group": "^1.2.3", "@radix-ui/react-radio-group": "^1.2.3",
"@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3",
"@radix-ui/react-select": "^2.1.6", "@radix-ui/react-select": "^2.1.6",

View File

@ -61,5 +61,25 @@
"detected": "detected", "detected": "detected",
"normalActivity": "Normal", "normalActivity": "Normal",
"needsReview": "Needs review", "needsReview": "Needs review",
"securityConcern": "Security concern" "securityConcern": "Security concern",
"motionSearch": {
"menuItem": "Motion search",
"openMenu": "Camera options"
},
"motionPreviews": {
"menuItem": "View motion previews",
"title": "Motion previews: {{camera}}",
"mobileSettingsTitle": "Motion Preview Settings",
"mobileSettingsDesc": "Adjust playback speed and dimming, and choose a date to review motion-only clips.",
"dim": "Dim",
"dimAria": "Adjust dimming intensity",
"dimDesc": "Increase dimming to increase motion area visibility.",
"speed": "Speed",
"speedAria": "Select preview playback speed",
"speedDesc": "Choose how quickly preview clips play.",
"back": "Back",
"empty": "No previews available",
"noPreview": "Preview unavailable",
"seekAria": "Seek {{camera}} player to {{time}}"
}
} }

View File

@ -0,0 +1,75 @@
{
"documentTitle": "Motion Search - Frigate",
"title": "Motion Search",
"description": "Draw a polygon to define the region of interest, and specify a time range to search for motion changes within that region.",
"selectCamera": "Motion Search is loading",
"startSearch": "Start Search",
"searchStarted": "Search started",
"searchCancelled": "Search cancelled",
"cancelSearch": "Cancel",
"searching": "Search in progress.",
"searchComplete": "Search complete",
"noResultsYet": "Run a search to find motion changes in the selected region",
"noChangesFound": "No pixel changes detected in the selected region",
"changesFound_one": "Found {{count}} motion change",
"changesFound_other": "Found {{count}} motion changes",
"framesProcessed": "{{count}} frames processed",
"jumpToTime": "Jump to this time",
"results": "Results",
"showSegmentHeatmap": "Heatmap",
"newSearch": "New Search",
"clearResults": "Clear Results",
"clearROI": "Clear polygon",
"polygonControls": {
"points_one": "{{count}} point",
"points_other": "{{count}} points",
"undo": "Undo last point",
"reset": "Reset polygon"
},
"motionHeatmapLabel": "Motion Heatmap",
"dialog": {
"title": "Motion Search",
"cameraLabel": "Camera",
"previewAlt": "Camera preview for {{camera}}"
},
"timeRange": {
"title": "Search Range",
"start": "Start time",
"end": "End time"
},
"settings": {
"title": "Search Settings",
"parallelMode": "Parallel mode",
"parallelModeDesc": "Scan multiple recording segments at the same time (faster, but significantly more CPU intensive)",
"threshold": "Sensitivity Threshold",
"thresholdDesc": "Lower values detect smaller changes (1-255)",
"minArea": "Minimum Change Area",
"minAreaDesc": "Minimum percentage of the region of interest that must change to be considered significant",
"frameSkip": "Frame Skip",
"frameSkipDesc": "Process every Nth frame. Set this to your camera's frame rate to process one frame per second (e.g. 5 for a 5 FPS camera, 30 for a 30 FPS camera). Higher values will be faster, but may miss short motion events.",
"maxResults": "Maximum Results",
"maxResultsDesc": "Stop after this many matching timestamps"
},
"errors": {
"noCamera": "Please select a camera",
"noROI": "Please draw a region of interest",
"noTimeRange": "Please select a time range",
"invalidTimeRange": "End time must be after start time",
"searchFailed": "Search failed: {{message}}",
"polygonTooSmall": "Polygon must have at least 3 points",
"unknown": "Unknown error"
},
"changePercentage": "{{percentage}}% changed",
"metrics": {
"title": "Search Metrics",
"segmentsScanned": "Segments scanned",
"segmentsProcessed": "Processed",
"segmentsSkippedInactive": "Skipped (no activity)",
"segmentsSkippedHeatmap": "Skipped (no ROI overlap)",
"fallbackFullRange": "Fallback full-range scan",
"framesDecoded": "Frames decoded",
"wallTime": "Search time",
"segmentErrors": "Segment errors",
"seconds": "{{seconds}}s"
}
}

View File

@ -1,5 +1,6 @@
import { import {
MutableRefObject, MutableRefObject,
ReactNode,
useCallback, useCallback,
useEffect, useEffect,
useRef, useRef,
@ -57,6 +58,7 @@ type HlsVideoPlayerProps = {
isDetailMode?: boolean; isDetailMode?: boolean;
camera?: string; camera?: string;
currentTimeOverride?: number; currentTimeOverride?: number;
transformedOverlay?: ReactNode;
}; };
export default function HlsVideoPlayer({ export default function HlsVideoPlayer({
@ -81,6 +83,7 @@ export default function HlsVideoPlayer({
isDetailMode = false, isDetailMode = false,
camera, camera,
currentTimeOverride, currentTimeOverride,
transformedOverlay,
}: HlsVideoPlayerProps) { }: HlsVideoPlayerProps) {
const { t } = useTranslation("components/player"); const { t } = useTranslation("components/player");
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
@ -350,157 +353,162 @@ export default function HlsVideoPlayer({
height: isMobile ? "100%" : undefined, height: isMobile ? "100%" : undefined,
}} }}
> >
{isDetailMode && <div className="relative size-full">
camera && {transformedOverlay}
currentTime && {isDetailMode &&
loadedMetadata && camera &&
videoDimensions.width > 0 && currentTime &&
videoDimensions.height > 0 && ( loadedMetadata &&
<div videoDimensions.width > 0 &&
className={cn( videoDimensions.height > 0 && (
"absolute inset-0 z-50", <div
isDesktop className={cn(
? "size-full" "absolute inset-0 z-50",
: "mx-auto flex items-center justify-center portrait:max-h-[50dvh]", isDesktop
)} ? "size-full"
style={{ : "mx-auto flex items-center justify-center portrait:max-h-[50dvh]",
aspectRatio: `${videoDimensions.width} / ${videoDimensions.height}`, )}
}} style={{
> aspectRatio: `${videoDimensions.width} / ${videoDimensions.height}`,
<ObjectTrackOverlay
key={`overlay-${currentTime}`}
camera={camera}
showBoundingBoxes={!isPlaying}
currentTime={currentTime}
videoWidth={videoDimensions.width}
videoHeight={videoDimensions.height}
className="absolute inset-0 z-10"
onSeekToTime={(timestamp, play) => {
if (onSeekToTime) {
onSeekToTime(timestamp, play);
}
}} }}
/> >
</div> <ObjectTrackOverlay
)} key={`overlay-${currentTime}`}
<video camera={camera}
ref={videoRef} showBoundingBoxes={!isPlaying}
className={`size-full rounded-lg bg-black md:rounded-2xl ${loadedMetadata ? "" : "invisible"} cursor-pointer`} currentTime={currentTime}
preload="auto" videoWidth={videoDimensions.width}
autoPlay videoHeight={videoDimensions.height}
controls={!frigateControls} className="absolute inset-0 z-10"
playsInline onSeekToTime={(timestamp, play) => {
muted={muted} if (onSeekToTime) {
onClick={ onSeekToTime(timestamp, play);
isDesktop }
? () => { }}
if (zoomScale == 1.0) onPlayPause(!isPlaying); />
} </div>
: undefined )}
} <video
onVolumeChange={() => { ref={videoRef}
setVolume(videoRef.current?.volume ?? 1.0, true); className={`size-full rounded-lg bg-black md:rounded-2xl ${loadedMetadata ? "" : "invisible"} cursor-pointer`}
if (!frigateControls) { preload="auto"
setMuted(videoRef.current?.muted); autoPlay
} controls={!frigateControls}
}} playsInline
onPlay={() => { muted={muted}
setIsPlaying(true); onClick={
isDesktop
if (isMobile) { ? () => {
setControls(true); if (zoomScale == 1.0) onPlayPause(!isPlaying);
setMobileCtrlTimeout(setTimeout(() => setControls(false), 4000));
}
}}
onPlaying={onPlaying}
onPause={() => {
setIsPlaying(false);
clearTimeout(bufferTimeout);
if (isMobile && mobileCtrlTimeout) {
clearTimeout(mobileCtrlTimeout);
}
}}
onWaiting={() => {
if (onError != undefined) {
if (videoRef.current?.paused) {
return;
}
setBufferTimeout(
setTimeout(() => {
if (
document.visibilityState === "visible" &&
videoRef.current
) {
onError("stalled");
} }
}, 3000), : undefined
);
} }
}} onVolumeChange={() => {
onProgress={() => { setVolume(videoRef.current?.volume ?? 1.0, true);
if (onError != undefined) { if (!frigateControls) {
if (videoRef.current?.paused) { setMuted(videoRef.current?.muted);
}
}}
onPlay={() => {
setIsPlaying(true);
if (isMobile) {
setControls(true);
setMobileCtrlTimeout(
setTimeout(() => setControls(false), 4000),
);
}
}}
onPlaying={onPlaying}
onPause={() => {
setIsPlaying(false);
clearTimeout(bufferTimeout);
if (isMobile && mobileCtrlTimeout) {
clearTimeout(mobileCtrlTimeout);
}
}}
onWaiting={() => {
if (onError != undefined) {
if (videoRef.current?.paused) {
return;
}
setBufferTimeout(
setTimeout(() => {
if (
document.visibilityState === "visible" &&
videoRef.current
) {
onError("stalled");
}
}, 3000),
);
}
}}
onProgress={() => {
if (onError != undefined) {
if (videoRef.current?.paused) {
return;
}
if (bufferTimeout) {
clearTimeout(bufferTimeout);
setBufferTimeout(undefined);
}
}
}}
onTimeUpdate={() => {
if (!onTimeUpdate) {
return; return;
} }
if (bufferTimeout) { const frameTime = getVideoTime();
clearTimeout(bufferTimeout);
setBufferTimeout(undefined); if (frameTime) {
onTimeUpdate(frameTime);
} }
} }}
}} onLoadedData={() => {
onTimeUpdate={() => { onPlayerLoaded?.();
if (!onTimeUpdate) { handleLoadedMetadata();
return;
}
const frameTime = getVideoTime(); if (videoRef.current) {
if (playbackRate) {
videoRef.current.playbackRate = playbackRate;
}
if (frameTime) { if (volume) {
onTimeUpdate(frameTime); videoRef.current.volume = volume;
} }
}}
onLoadedData={() => {
onPlayerLoaded?.();
handleLoadedMetadata();
if (videoRef.current) {
if (playbackRate) {
videoRef.current.playbackRate = playbackRate;
} }
}}
if (volume) { onEnded={() => {
videoRef.current.volume = volume; if (onClipEnded) {
onClipEnded(getVideoTime() ?? 0);
} }
} }}
}} onError={(e) => {
onEnded={() => { if (
if (onClipEnded) { !hlsRef.current &&
onClipEnded(getVideoTime() ?? 0);
}
}}
onError={(e) => {
if (
!hlsRef.current &&
// @ts-expect-error code does exist
unsupportedErrorCodes.includes(e.target.error.code) &&
videoRef.current
) {
setLoadedMetadata(false);
setUseHlsCompat(true);
} else {
toast.error(
// @ts-expect-error code does exist // @ts-expect-error code does exist
`Failed to play recordings (error ${e.target.error.code}): ${e.target.error.message}`, unsupportedErrorCodes.includes(e.target.error.code) &&
{ videoRef.current
position: "top-center", ) {
}, setLoadedMetadata(false);
); setUseHlsCompat(true);
} } else {
}} toast.error(
/> // @ts-expect-error code does exist
`Failed to play recordings (error ${e.target.error.code}): ${e.target.error.message}`,
{
position: "top-center",
},
);
}
}}
/>
</div>
</TransformComponent> </TransformComponent>
</TransformWrapper> </TransformWrapper>
); );

View File

@ -1,4 +1,11 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import {
ReactNode,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from "react";
import { useApiHost } from "@/api"; import { useApiHost } from "@/api";
import useSWR from "swr"; import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig"; import { FrigateConfig } from "@/types/frigateConfig";
@ -40,6 +47,7 @@ type DynamicVideoPlayerProps = {
setFullResolution: React.Dispatch<React.SetStateAction<VideoResolutionType>>; setFullResolution: React.Dispatch<React.SetStateAction<VideoResolutionType>>;
toggleFullscreen: () => void; toggleFullscreen: () => void;
containerRef?: React.MutableRefObject<HTMLDivElement | null>; containerRef?: React.MutableRefObject<HTMLDivElement | null>;
transformedOverlay?: ReactNode;
}; };
export default function DynamicVideoPlayer({ export default function DynamicVideoPlayer({
className, className,
@ -58,6 +66,7 @@ export default function DynamicVideoPlayer({
setFullResolution, setFullResolution,
toggleFullscreen, toggleFullscreen,
containerRef, containerRef,
transformedOverlay,
}: DynamicVideoPlayerProps) { }: DynamicVideoPlayerProps) {
const { t } = useTranslation(["components/player"]); const { t } = useTranslation(["components/player"]);
const apiHost = useApiHost(); const apiHost = useApiHost();
@ -312,6 +321,7 @@ export default function DynamicVideoPlayer({
isDetailMode={isDetailMode} isDetailMode={isDetailMode}
camera={contextCamera || camera} camera={contextCamera || camera}
currentTimeOverride={currentTime} currentTimeOverride={currentTime}
transformedOverlay={transformedOverlay}
/> />
)} )}
<PreviewPlayer <PreviewPlayer

View File

@ -25,6 +25,7 @@ export type MotionReviewTimelineProps = {
timestampSpread: number; timestampSpread: number;
timelineStart: number; timelineStart: number;
timelineEnd: number; timelineEnd: number;
scrollToTime?: number;
showHandlebar?: boolean; showHandlebar?: boolean;
handlebarTime?: number; handlebarTime?: number;
setHandlebarTime?: React.Dispatch<React.SetStateAction<number>>; setHandlebarTime?: React.Dispatch<React.SetStateAction<number>>;
@ -58,6 +59,7 @@ export function MotionReviewTimeline({
timestampSpread, timestampSpread,
timelineStart, timelineStart,
timelineEnd, timelineEnd,
scrollToTime,
showHandlebar = false, showHandlebar = false,
handlebarTime, handlebarTime,
setHandlebarTime, setHandlebarTime,
@ -176,6 +178,15 @@ export function MotionReviewTimeline({
[], [],
); );
// allow callers to request the timeline center on a specific time
useEffect(() => {
if (scrollToTime == undefined) return;
setTimeout(() => {
scrollToSegment(alignStartDateToTimeline(scrollToTime), true, "auto");
}, 0);
}, [scrollToTime, scrollToSegment, alignStartDateToTimeline]);
// keep handlebar centered when zooming // keep handlebar centered when zooming
useEffect(() => { useEffect(() => {
setTimeout(() => { setTimeout(() => {

View File

@ -343,9 +343,12 @@ export function ReviewTimeline({
useEffect(() => { useEffect(() => {
if (onHandlebarDraggingChange) { if (onHandlebarDraggingChange) {
onHandlebarDraggingChange(isDraggingHandlebar); // Keep existing callback name but treat it as a generic dragging signal.
// This allows consumers (e.g. export-handle timelines) to correctly
// enable preview scrubbing while dragging export handles.
onHandlebarDraggingChange(isDragging);
} }
}, [isDraggingHandlebar, onHandlebarDraggingChange]); }, [isDragging, onHandlebarDraggingChange]);
const isHandlebarInNoRecordingPeriod = useMemo(() => { const isHandlebarInNoRecordingPeriod = useMemo(() => {
if (!getRecordingAvailability || handlebarTime === undefined) return false; if (!getRecordingAvailability || handlebarTime === undefined) return false;

View File

@ -0,0 +1,26 @@
import * as React from "react"
import * as ProgressPrimitive from "@radix-ui/react-progress"
import { cn } from "@/lib/utils"
const Progress = React.forwardRef<
React.ElementRef<typeof ProgressPrimitive.Root>,
React.ComponentPropsWithoutRef<typeof ProgressPrimitive.Root>
>(({ className, value, ...props }, ref) => (
<ProgressPrimitive.Root
ref={ref}
className={cn(
"relative h-4 w-full overflow-hidden rounded-full bg-secondary",
className
)}
{...props}
>
<ProgressPrimitive.Indicator
className="h-full w-full flex-1 bg-primary transition-all"
style={{ transform: `translateX(-${100 - (value || 0)}%)` }}
/>
</ProgressPrimitive.Root>
))
Progress.displayName = ProgressPrimitive.Root.displayName
export { Progress }

View File

@ -8,14 +8,19 @@ import {
import { CameraConfig, FrigateConfig } from "@/types/frigateConfig"; import { CameraConfig, FrigateConfig } from "@/types/frigateConfig";
import { MotionData, ReviewSegment } from "@/types/review"; import { MotionData, ReviewSegment } from "@/types/review";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
import { useTimelineUtils } from "./use-timeline-utils";
import { AudioDetection, ObjectType } from "@/types/ws"; import { AudioDetection, ObjectType } from "@/types/ws";
import { useTimelineUtils } from "./use-timeline-utils";
import useDeepMemo from "./use-deep-memo"; import useDeepMemo from "./use-deep-memo";
import { isEqual } from "lodash"; import { isEqual } from "lodash";
import { useAutoFrigateStats } from "./use-stats"; import { useAutoFrigateStats } from "./use-stats";
import useSWR from "swr"; import useSWR from "swr";
import { getAttributeLabels } from "@/utils/iconUtil"; import { getAttributeLabels } from "@/utils/iconUtil";
export type MotionOnlyRange = {
start_time: number;
end_time: number;
};
type useCameraActivityReturn = { type useCameraActivityReturn = {
enabled?: boolean; enabled?: boolean;
activeTracking: boolean; activeTracking: boolean;
@ -204,9 +209,9 @@ export function useCameraMotionNextTimestamp(
return []; return [];
} }
const ranges = []; const ranges: [number, number][] = [];
let currentSegmentStart = null; let currentSegmentStart: number | null = null;
let currentSegmentEnd = null; let currentSegmentEnd: number | null = null;
// align motion start to timeline start // align motion start to timeline start
const offset = const offset =
@ -215,13 +220,19 @@ export function useCameraMotionNextTimestamp(
segmentDuration; segmentDuration;
const startIndex = Math.abs(Math.floor(offset / 15)); const startIndex = Math.abs(Math.floor(offset / 15));
const now = Date.now() / 1000;
for ( for (
let i = startIndex; let i = startIndex;
i < motionData.length; i < motionData.length;
i = i + segmentDuration / 15 i = i + segmentDuration / 15
) { ) {
const motionStart = motionData[i].start_time; const motionStart = motionData[i]?.start_time;
if (motionStart == undefined) {
continue;
}
const motionEnd = motionStart + segmentDuration; const motionEnd = motionStart + segmentDuration;
const segmentMotion = motionData const segmentMotion = motionData
@ -230,10 +241,10 @@ export function useCameraMotionNextTimestamp(
const overlappingReviewItems = reviewItems.some( const overlappingReviewItems = reviewItems.some(
(item) => (item) =>
(item.start_time >= motionStart && item.start_time < motionEnd) || (item.start_time >= motionStart && item.start_time < motionEnd) ||
((item.end_time ?? Date.now() / 1000) > motionStart && ((item.end_time ?? now) > motionStart &&
(item.end_time ?? Date.now() / 1000) <= motionEnd) || (item.end_time ?? now) <= motionEnd) ||
(item.start_time <= motionStart && (item.start_time <= motionStart &&
(item.end_time ?? Date.now() / 1000) >= motionEnd), (item.end_time ?? now) >= motionEnd),
); );
if (!segmentMotion || overlappingReviewItems) { if (!segmentMotion || overlappingReviewItems) {
@ -241,16 +252,14 @@ export function useCameraMotionNextTimestamp(
currentSegmentStart = motionStart; currentSegmentStart = motionStart;
} }
currentSegmentEnd = motionEnd; currentSegmentEnd = motionEnd;
} else { } else if (currentSegmentStart !== null && currentSegmentEnd !== null) {
if (currentSegmentStart !== null) { ranges.push([currentSegmentStart, currentSegmentEnd]);
ranges.push([currentSegmentStart, currentSegmentEnd]); currentSegmentStart = null;
currentSegmentStart = null; currentSegmentEnd = null;
currentSegmentEnd = null;
}
} }
} }
if (currentSegmentStart !== null) { if (currentSegmentStart !== null && currentSegmentEnd !== null) {
ranges.push([currentSegmentStart, currentSegmentEnd]); ranges.push([currentSegmentStart, currentSegmentEnd]);
} }
@ -304,3 +313,93 @@ export function useCameraMotionNextTimestamp(
return nextTimestamp; return nextTimestamp;
} }
export function useCameraMotionOnlyRanges(
segmentDuration: number,
reviewItems: ReviewSegment[],
motionData: MotionData[],
) {
const motionOnlyRanges = useMemo(() => {
if (!motionData?.length || !reviewItems) {
return [];
}
const fallbackBucketDuration = Math.max(1, segmentDuration / 2);
const normalizedMotionData = Array.from(
motionData
.reduce((accumulator, item) => {
const currentMotion = accumulator.get(item.start_time) ?? 0;
accumulator.set(
item.start_time,
Math.max(currentMotion, item.motion ?? 0),
);
return accumulator;
}, new Map<number, number>())
.entries(),
)
.map(([start_time, motion]) => ({ start_time, motion }))
.sort((left, right) => left.start_time - right.start_time);
const bucketRanges: MotionOnlyRange[] = [];
const now = Date.now() / 1000;
for (let i = 0; i < normalizedMotionData.length; i++) {
const motionStart = normalizedMotionData[i].start_time;
const motionEnd = motionStart + fallbackBucketDuration;
const overlappingReviewItems = reviewItems.some(
(item) =>
(item.start_time >= motionStart && item.start_time < motionEnd) ||
((item.end_time ?? now) > motionStart &&
(item.end_time ?? now) <= motionEnd) ||
(item.start_time <= motionStart &&
(item.end_time ?? now) >= motionEnd),
);
const isMotionOnlySegment =
(normalizedMotionData[i].motion ?? 0) > 0 && !overlappingReviewItems;
if (!isMotionOnlySegment) {
continue;
}
bucketRanges.push({
start_time: motionStart,
end_time: motionEnd,
});
}
if (!bucketRanges.length) {
return [];
}
const mergedRanges = bucketRanges.reduce<MotionOnlyRange[]>(
(ranges, range) => {
if (!ranges.length) {
return [range];
}
const previousRange = ranges[ranges.length - 1];
const isContiguous =
range.start_time <= previousRange.end_time + 0.001 &&
range.start_time >= previousRange.end_time - 0.001;
if (isContiguous) {
previousRange.end_time = Math.max(
previousRange.end_time,
range.end_time,
);
return ranges;
}
ranges.push(range);
return ranges;
},
[],
);
return mergedRanges;
}, [motionData, reviewItems, segmentDuration]);
return motionOnlyRanges;
}

View File

@ -1,5 +1,6 @@
import ActivityIndicator from "@/components/indicators/activity-indicator"; import ActivityIndicator from "@/components/indicators/activity-indicator";
import useApiFilter from "@/hooks/use-api-filter"; import useApiFilter from "@/hooks/use-api-filter";
import { useAllowedCameras } from "@/hooks/use-allowed-cameras";
import { useCameraPreviews } from "@/hooks/use-camera-previews"; import { useCameraPreviews } from "@/hooks/use-camera-previews";
import { useTimezone } from "@/hooks/use-date-utils"; import { useTimezone } from "@/hooks/use-date-utils";
import { useOverlayState, useSearchEffect } from "@/hooks/use-overlay-state"; import { useOverlayState, useSearchEffect } from "@/hooks/use-overlay-state";
@ -21,6 +22,7 @@ import {
getEndOfDayTimestamp, getEndOfDayTimestamp,
} from "@/utils/dateUtil"; } from "@/utils/dateUtil";
import EventView from "@/views/events/EventView"; import EventView from "@/views/events/EventView";
import MotionSearchView from "@/views/motion-search/MotionSearchView";
import { RecordingView } from "@/views/recording/RecordingView"; import { RecordingView } from "@/views/recording/RecordingView";
import axios from "axios"; import axios from "axios";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
@ -34,6 +36,7 @@ export default function Events() {
revalidateOnFocus: false, revalidateOnFocus: false,
}); });
const timezone = useTimezone(config); const timezone = useTimezone(config);
const allowedCameras = useAllowedCameras();
// recordings viewer // recordings viewer
@ -52,6 +55,74 @@ export default function Events() {
undefined, undefined,
false, false,
); );
const [motionPreviewsCamera, setMotionPreviewsCamera] = useOverlayState<
string | undefined
>("motionPreviewsCamera", undefined);
const [motionSearchCamera, setMotionSearchCamera] = useState<string | null>(
null,
);
const [motionSearchDay, setMotionSearchDay] = useState<Date | undefined>(
undefined,
);
const motionSearchCameras = useMemo(() => {
if (!config?.cameras) {
return [] as string[];
}
return Object.keys(config.cameras).filter((cam) =>
allowedCameras.includes(cam),
);
}, [allowedCameras, config?.cameras]);
const selectedMotionSearchCamera = useMemo(() => {
if (!motionSearchCamera) {
return null;
}
if (motionSearchCameras.includes(motionSearchCamera)) {
return motionSearchCamera;
}
return motionSearchCameras[0] ?? null;
}, [motionSearchCamera, motionSearchCameras]);
const motionSearchTimeRange = useMemo(() => {
if (motionSearchDay) {
return {
after: getBeginningOfDayTimestamp(new Date(motionSearchDay)),
before: getEndOfDayTimestamp(new Date(motionSearchDay)),
};
}
const now = Date.now() / 1000;
return {
after: now - 86400,
before: now,
};
}, [motionSearchDay]);
const closeMotionSearch = useCallback(() => {
setMotionSearchCamera(null);
setMotionSearchDay(undefined);
setBeforeTs(Date.now() / 1000);
}, []);
const handleMotionSearchCameraSelect = useCallback((camera: string) => {
setMotionSearchCamera(camera);
}, []);
const handleMotionSearchDaySelect = useCallback((day: Date | undefined) => {
if (day == undefined) {
setMotionSearchDay(undefined);
return;
}
const normalizedDay = new Date(day);
normalizedDay.setHours(0, 0, 0, 0);
setMotionSearchDay(normalizedDay);
}, []);
const [notificationTab, setNotificationTab] = const [notificationTab, setNotificationTab] =
useState<TimelineType>("timeline"); useState<TimelineType>("timeline");
@ -508,7 +579,24 @@ export default function Events() {
); );
} }
} else { } else {
return ( return motionSearchCamera ? (
!config || !selectedMotionSearchCamera ? (
<ActivityIndicator />
) : (
<MotionSearchView
config={config}
cameras={motionSearchCameras}
selectedCamera={selectedMotionSearchCamera}
onCameraSelect={handleMotionSearchCameraSelect}
cameraLocked={true}
selectedDay={motionSearchDay}
onDaySelect={handleMotionSearchDaySelect}
timeRange={motionSearchTimeRange}
timezone={timezone}
onBack={closeMotionSearch}
/>
)
) : (
<EventView <EventView
reviewItems={reviewItems} reviewItems={reviewItems}
currentReviewItems={currentItems} currentReviewItems={currentItems}
@ -525,6 +613,11 @@ export default function Events() {
markItemAsReviewed={markItemAsReviewed} markItemAsReviewed={markItemAsReviewed}
markAllItemsAsReviewed={markAllItemsAsReviewed} markAllItemsAsReviewed={markAllItemsAsReviewed}
onOpenRecording={setRecording} onOpenRecording={setRecording}
motionPreviewsCamera={motionPreviewsCamera ?? null}
setMotionPreviewsCamera={(camera) =>
setMotionPreviewsCamera(camera ?? undefined)
}
setMotionSearchCamera={setMotionSearchCamera}
pullLatestData={reloadData} pullLatestData={reloadData}
updateFilter={onUpdateFilter} updateFilter={onUpdateFilter}
/> />

View File

@ -0,0 +1,112 @@
import { useEffect, useMemo, useState, useCallback } from "react";
import { useTranslation } from "react-i18next";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { useTimezone } from "@/hooks/use-date-utils";
import MotionSearchView from "@/views/motion-search/MotionSearchView";
import {
getBeginningOfDayTimestamp,
getEndOfDayTimestamp,
} from "@/utils/dateUtil";
import { useAllowedCameras } from "@/hooks/use-allowed-cameras";
import { useSearchEffect } from "@/hooks/use-overlay-state";
import ActivityIndicator from "@/components/indicators/activity-indicator";
export default function MotionSearch() {
const { t } = useTranslation(["views/motionSearch"]);
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
const timezone = useTimezone(config);
useEffect(() => {
document.title = t("documentTitle");
}, [t]);
// Get allowed cameras
const allowedCameras = useAllowedCameras();
const cameras = useMemo(() => {
if (!config?.cameras) return [];
return Object.keys(config.cameras).filter((cam) =>
allowedCameras.includes(cam),
);
}, [config?.cameras, allowedCameras]);
// Selected camera state
const [selectedCamera, setSelectedCamera] = useState<string | null>(null);
const [cameraLocked, setCameraLocked] = useState(false);
useSearchEffect("camera", (camera: string) => {
if (cameras.length > 0 && cameras.includes(camera)) {
setSelectedCamera(camera);
setCameraLocked(true);
}
return false;
});
// Initialize with first camera when available (only if not set by camera param)
useEffect(() => {
if (cameras.length === 0) return;
if (!selectedCamera) {
setSelectedCamera(cameras[0]);
}
}, [cameras, selectedCamera]);
// Time range state - default to last 24 hours
const [selectedDay, setSelectedDay] = useState<Date | undefined>(undefined);
const timeRange = useMemo(() => {
if (selectedDay) {
return {
after: getBeginningOfDayTimestamp(new Date(selectedDay)),
before: getEndOfDayTimestamp(new Date(selectedDay)),
};
}
// Default to last 24 hours
const now = Date.now() / 1000;
return {
after: now - 86400,
before: now,
};
}, [selectedDay]);
const handleCameraSelect = useCallback((camera: string) => {
setSelectedCamera(camera);
}, []);
const handleDaySelect = useCallback((day: Date | undefined) => {
if (day == undefined) {
setSelectedDay(undefined);
return;
}
const normalizedDay = new Date(day);
normalizedDay.setHours(0, 0, 0, 0);
setSelectedDay(normalizedDay);
}, []);
if (!config || cameras.length === 0) {
return (
<div className="flex size-full items-center justify-center">
<ActivityIndicator />
</div>
);
}
return (
<MotionSearchView
config={config}
cameras={cameras}
selectedCamera={selectedCamera ?? null}
onCameraSelect={handleCameraSelect}
cameraLocked={cameraLocked}
selectedDay={selectedDay}
onDaySelect={handleDaySelect}
timeRange={timeRange}
timezone={timezone}
/>
);
}

View File

@ -0,0 +1,46 @@
/**
* Types for the Motion Search feature
*/
export interface MotionSearchResult {
timestamp: number;
change_percentage: number;
}
export interface MotionSearchRequest {
start_time: number;
end_time: number;
polygon_points: number[][];
parallel?: boolean;
threshold?: number;
min_area?: number;
frame_skip?: number;
max_results?: number;
}
export interface MotionSearchStartResponse {
success: boolean;
message: string;
job_id: string;
}
export interface MotionSearchMetrics {
segments_scanned: number;
segments_processed: number;
metadata_inactive_segments: number;
heatmap_roi_skip_segments: number;
fallback_full_range_segments: number;
frames_decoded: number;
wall_time_seconds: number;
segments_with_errors: number;
}
export interface MotionSearchStatusResponse {
success: boolean;
message: string;
status: "queued" | "running" | "success" | "failed" | "cancelled";
results?: MotionSearchResult[];
total_frames_processed?: number;
error_message?: string;
metrics?: MotionSearchMetrics;
}

View File

@ -11,6 +11,7 @@ export type Recording = {
duration: number; duration: number;
motion: number; motion: number;
objects: number; objects: number;
motion_heatmap?: Record<string, number> | null;
dBFS: number; dBFS: number;
}; };

View File

@ -1,11 +1,21 @@
import Logo from "@/components/Logo"; import Logo from "@/components/Logo";
import NewReviewData from "@/components/dynamic/NewReviewData"; import NewReviewData from "@/components/dynamic/NewReviewData";
import CalendarFilterButton from "@/components/filter/CalendarFilterButton";
import ReviewActionGroup from "@/components/filter/ReviewActionGroup"; import ReviewActionGroup from "@/components/filter/ReviewActionGroup";
import ReviewFilterGroup from "@/components/filter/ReviewFilterGroup"; import ReviewFilterGroup from "@/components/filter/ReviewFilterGroup";
import PreviewThumbnailPlayer from "@/components/player/PreviewThumbnailPlayer"; import PreviewThumbnailPlayer from "@/components/player/PreviewThumbnailPlayer";
import EventReviewTimeline from "@/components/timeline/EventReviewTimeline"; import EventReviewTimeline from "@/components/timeline/EventReviewTimeline";
import ActivityIndicator from "@/components/indicators/activity-indicator"; import ActivityIndicator from "@/components/indicators/activity-indicator";
import { ToggleGroup, ToggleGroupItem } from "@/components/ui/toggle-group"; import { ToggleGroup, ToggleGroupItem } from "@/components/ui/toggle-group";
import { VolumeSlider } from "@/components/ui/slider";
import {
Select,
SelectContent,
SelectItem,
SelectSeparator,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { useTimelineUtils } from "@/hooks/use-timeline-utils"; import { useTimelineUtils } from "@/hooks/use-timeline-utils";
import { useScrollLockout } from "@/hooks/use-mouse-listener"; import { useScrollLockout } from "@/hooks/use-mouse-listener";
import { FrigateConfig } from "@/types/frigateConfig"; import { FrigateConfig } from "@/types/frigateConfig";
@ -22,6 +32,7 @@ import {
ZoomLevel, ZoomLevel,
} from "@/types/review"; } from "@/types/review";
import { getChunkedTimeRange } from "@/utils/timelineUtil"; import { getChunkedTimeRange } from "@/utils/timelineUtil";
import { getEndOfDayTimestamp } from "@/utils/dateUtil";
import axios from "axios"; import axios from "axios";
import { import {
MutableRefObject, MutableRefObject,
@ -34,9 +45,18 @@ import {
import { isDesktop, isMobile, isMobileOnly } from "react-device-detect"; import { isDesktop, isMobile, isMobileOnly } from "react-device-detect";
import { LuFolderCheck, LuFolderX } from "react-icons/lu"; import { LuFolderCheck, LuFolderX } from "react-icons/lu";
import { MdCircle } from "react-icons/md"; import { MdCircle } from "react-icons/md";
import { FiMoreVertical } from "react-icons/fi";
import { IoMdArrowRoundBack } from "react-icons/io";
import useSWR from "swr"; import useSWR from "swr";
import MotionReviewTimeline from "@/components/timeline/MotionReviewTimeline"; import MotionReviewTimeline from "@/components/timeline/MotionReviewTimeline";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import BlurredIconButton from "@/components/button/BlurredIconButton";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import PreviewPlayer, { import PreviewPlayer, {
PreviewController, PreviewController,
} from "@/components/player/PreviewPlayer"; } from "@/components/player/PreviewPlayer";
@ -44,7 +64,10 @@ import SummaryTimeline from "@/components/timeline/SummaryTimeline";
import { RecordingStartingPoint } from "@/types/record"; import { RecordingStartingPoint } from "@/types/record";
import VideoControls from "@/components/player/VideoControls"; import VideoControls from "@/components/player/VideoControls";
import { TimeRange } from "@/types/timeline"; import { TimeRange } from "@/types/timeline";
import { useCameraMotionNextTimestamp } from "@/hooks/use-camera-activity"; import {
useCameraMotionNextTimestamp,
useCameraMotionOnlyRanges,
} from "@/hooks/use-camera-activity";
import useOptimisticState from "@/hooks/use-optimistic-state"; import useOptimisticState from "@/hooks/use-optimistic-state";
import { Skeleton } from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import scrollIntoView from "scroll-into-view-if-needed"; import scrollIntoView from "scroll-into-view-if-needed";
@ -56,6 +79,10 @@ import { GiSoundWaves } from "react-icons/gi";
import useKeyboardListener from "@/hooks/use-keyboard-listener"; import useKeyboardListener from "@/hooks/use-keyboard-listener";
import { useTimelineZoom } from "@/hooks/use-timeline-zoom"; import { useTimelineZoom } from "@/hooks/use-timeline-zoom";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { FaCog } from "react-icons/fa";
import ReviewActivityCalendar from "@/components/overlay/ReviewActivityCalendar";
import PlatformAwareDialog from "@/components/overlay/dialog/PlatformAwareDialog";
import MotionPreviewsPane from "./MotionPreviewsPane";
import { EmptyCard } from "@/components/card/EmptyCard"; import { EmptyCard } from "@/components/card/EmptyCard";
import { EmptyCardData } from "@/types/card"; import { EmptyCardData } from "@/types/card";
@ -75,6 +102,9 @@ type EventViewProps = {
markItemAsReviewed: (review: ReviewSegment) => void; markItemAsReviewed: (review: ReviewSegment) => void;
markAllItemsAsReviewed: (currentItems: ReviewSegment[]) => void; markAllItemsAsReviewed: (currentItems: ReviewSegment[]) => void;
onOpenRecording: (recordingInfo: RecordingStartingPoint) => void; onOpenRecording: (recordingInfo: RecordingStartingPoint) => void;
motionPreviewsCamera: string | null;
setMotionPreviewsCamera: (camera: string | null) => void;
setMotionSearchCamera: (camera: string) => void;
pullLatestData: () => void; pullLatestData: () => void;
updateFilter: (filter: ReviewFilter) => void; updateFilter: (filter: ReviewFilter) => void;
}; };
@ -94,6 +124,9 @@ export default function EventView({
markItemAsReviewed, markItemAsReviewed,
markAllItemsAsReviewed, markAllItemsAsReviewed,
onOpenRecording, onOpenRecording,
motionPreviewsCamera,
setMotionPreviewsCamera,
setMotionSearchCamera,
pullLatestData, pullLatestData,
updateFilter, updateFilter,
}: EventViewProps) { }: EventViewProps) {
@ -274,6 +307,15 @@ export default function EventView({
100, 100,
); );
const motionPreviewsOpen =
severity === "significant_motion" && motionPreviewsCamera != null;
useEffect(() => {
if (severity !== "significant_motion") {
setMotionPreviewsCamera(null);
}
}, [setMotionPreviewsCamera, severity]);
// review filter info // review filter info
const reviewFilterList = useMemo<FilterList>(() => { const reviewFilterList = useMemo<FilterList>(() => {
@ -301,124 +343,136 @@ export default function EventView({
return ( return (
<div className="flex size-full flex-col pt-2 md:py-2"> <div className="flex size-full flex-col pt-2 md:py-2">
<Toaster closeButton={true} /> <Toaster closeButton={true} />
<div className="relative mb-2 flex h-11 items-center justify-between pl-2 pr-2 md:pl-3"> {!motionPreviewsOpen && (
{isMobile && ( <div className="relative mb-2 flex h-11 items-center justify-between pl-2 pr-2 md:pl-3">
<Logo className="absolute inset-x-1/2 h-8 -translate-x-1/2" /> {isMobile && (
)} <Logo className="absolute inset-x-1/2 h-8 -translate-x-1/2" />
<ToggleGroup )}
className="*:rounded-md *:px-3 *:py-4" <ToggleGroup
type="single" className="*:rounded-md *:px-3 *:py-4"
size="sm" type="single"
value={severityToggle} size="sm"
onValueChange={(value: ReviewSeverity) => value={severityToggle}
value ? setSeverityToggle(value) : null onValueChange={(value: ReviewSeverity) =>
} // don't allow the severity to be unselected value ? setSeverityToggle(value) : null
> } // don't allow the severity to be unselected
<ToggleGroupItem
className={cn(severityToggle != "alert" && "text-muted-foreground")}
value="alert"
aria-label={t("alerts")}
> >
<div <ToggleGroupItem
className={cn( className={cn(
"flex size-6 items-center justify-center rounded text-severity_alert sm:hidden", severityToggle != "alert" && "text-muted-foreground",
severityToggle == "alert" ? "font-semibold" : "font-medium",
)} )}
value="alert"
aria-label={t("alerts")}
> >
{reviewCounts.alert > -1 ? ( <div
reviewCounts.alert className={cn(
) : ( "flex size-6 items-center justify-center rounded text-severity_alert sm:hidden",
<ActivityIndicator className="size-4" /> severityToggle == "alert" ? "font-semibold" : "font-medium",
)} )}
</div> >
<div className="hidden items-center sm:flex">
<MdCircle className="size-2 text-severity_alert md:mr-[10px]" />
<div className="hidden md:flex md:flex-row md:items-center">
{t("alerts")}
{reviewCounts.alert > -1 ? ( {reviewCounts.alert > -1 ? (
`${reviewCounts.alert}` reviewCounts.alert
) : ( ) : (
<ActivityIndicator className="ml-2 size-4" /> <ActivityIndicator className="size-4" />
)} )}
</div> </div>
</div> <div className="hidden items-center sm:flex">
</ToggleGroupItem> <MdCircle className="size-2 text-severity_alert md:mr-[10px]" />
<ToggleGroupItem <div className="hidden md:flex md:flex-row md:items-center">
className={cn( {t("alerts")}
severityToggle != "detection" && "text-muted-foreground", {reviewCounts.alert > -1 ? (
)} `${reviewCounts.alert}`
value="detection" ) : (
aria-label={t("detections")} <ActivityIndicator className="ml-2 size-4" />
> )}
<div </div>
</div>
</ToggleGroupItem>
<ToggleGroupItem
className={cn( className={cn(
"flex size-6 items-center justify-center rounded text-severity_detection sm:hidden", severityToggle != "detection" && "text-muted-foreground",
severityToggle == "detection" ? "font-semibold" : "font-medium",
)} )}
value="detection"
aria-label={t("detections")}
> >
{reviewCounts.detection > -1 ? ( <div
reviewCounts.detection className={cn(
) : ( "flex size-6 items-center justify-center rounded text-severity_detection sm:hidden",
<ActivityIndicator className="size-4" /> severityToggle == "detection"
)} ? "font-semibold"
</div> : "font-medium",
<div className="hidden items-center sm:flex"> )}
<MdCircle className="size-2 text-severity_detection md:mr-[10px]" /> >
<div className="hidden md:flex md:flex-row md:items-center">
{t("detections")}
{reviewCounts.detection > -1 ? ( {reviewCounts.detection > -1 ? (
`${reviewCounts.detection}` reviewCounts.detection
) : ( ) : (
<ActivityIndicator className="ml-2 size-4" /> <ActivityIndicator className="size-4" />
)} )}
</div> </div>
</div> <div className="hidden items-center sm:flex">
</ToggleGroupItem> <MdCircle className="size-2 text-severity_detection md:mr-[10px]" />
<ToggleGroupItem <div className="hidden md:flex md:flex-row md:items-center">
className={cn( {t("detections")}
"rounded-lg px-3 py-4", {reviewCounts.detection > -1 ? (
severityToggle != "significant_motion" && "text-muted-foreground", `${reviewCounts.detection}`
)} ) : (
value="significant_motion" <ActivityIndicator className="ml-2 size-4" />
aria-label={t("motion.label")} )}
> </div>
<GiSoundWaves className="size-6 rotate-90 text-severity_significant_motion sm:hidden" /> </div>
<div className="hidden items-center sm:flex"> </ToggleGroupItem>
<MdCircle className="size-2 text-severity_significant_motion md:mr-[10px]" /> <ToggleGroupItem
<div className="hidden md:block">{t("motion.label")}</div> className={cn(
</div> "rounded-lg px-3 py-4",
</ToggleGroupItem> severityToggle != "significant_motion" &&
</ToggleGroup> "text-muted-foreground",
)}
value="significant_motion"
aria-label={t("motion.label")}
>
<GiSoundWaves className="size-6 rotate-90 text-severity_significant_motion sm:hidden" />
<div className="hidden items-center sm:flex">
<MdCircle className="size-2 text-severity_significant_motion md:mr-[10px]" />
<div className="hidden md:block">{t("motion.label")}</div>
</div>
</ToggleGroupItem>
</ToggleGroup>
{selectedReviews.length <= 0 ? ( {selectedReviews.length <= 0 ? (
<ReviewFilterGroup <ReviewFilterGroup
filters={ filters={
severity == "significant_motion" severity == "significant_motion"
? ["cameras", "date", "motionOnly"] ? ["cameras", "date", "motionOnly"]
: ["cameras", "reviewed", "date", "general"] : ["cameras", "reviewed", "date", "general"]
} }
currentSeverity={severityToggle} currentSeverity={severityToggle}
reviewSummary={reviewSummary} reviewSummary={reviewSummary}
recordingsSummary={recordingsSummary} recordingsSummary={recordingsSummary}
filter={filter} filter={filter}
motionOnly={motionOnly} motionOnly={motionOnly}
filterList={reviewFilterList} filterList={reviewFilterList}
showReviewed={showReviewed} showReviewed={showReviewed}
setShowReviewed={setShowReviewed} setShowReviewed={setShowReviewed}
onUpdateFilter={updateFilter} onUpdateFilter={updateFilter}
setMotionOnly={setMotionOnly} setMotionOnly={setMotionOnly}
/> />
) : ( ) : (
<ReviewActionGroup <ReviewActionGroup
selectedReviews={selectedReviews} selectedReviews={selectedReviews}
setSelectedReviews={setSelectedReviews} setSelectedReviews={setSelectedReviews}
onExport={exportReview} onExport={exportReview}
pullLatestData={pullLatestData} pullLatestData={pullLatestData}
/> />
)}
</div>
)}
<div
className={cn(
"h-full min-h-0 overflow-hidden",
motionPreviewsOpen ? "flex flex-col" : "flex",
)} )}
</div> >
<div className="flex h-full overflow-hidden">
{severity != "significant_motion" && ( {severity != "significant_motion" && (
<DetectionReview <DetectionReview
contentRef={contentRef} contentRef={contentRef}
@ -447,10 +501,16 @@ export default function EventView({
contentRef={contentRef} contentRef={contentRef}
reviewItems={reviewItems} reviewItems={reviewItems}
relevantPreviews={relevantPreviews} relevantPreviews={relevantPreviews}
reviewSummary={reviewSummary}
recordingsSummary={recordingsSummary}
timeRange={timeRange} timeRange={timeRange}
startTime={startTime} startTime={startTime}
filter={filter} filter={filter}
motionOnly={motionOnly} motionOnly={motionOnly}
updateFilter={updateFilter}
motionPreviewsCamera={motionPreviewsCamera}
setMotionPreviewsCamera={setMotionPreviewsCamera}
setMotionSearchCamera={setMotionSearchCamera}
emptyCardData={emptyCardData} emptyCardData={emptyCardData}
onOpenRecording={onOpenRecording} onOpenRecording={onOpenRecording}
/> />
@ -898,10 +958,16 @@ type MotionReviewProps = {
significant_motion: ReviewSegment[]; significant_motion: ReviewSegment[];
}; };
relevantPreviews?: Preview[]; relevantPreviews?: Preview[];
reviewSummary?: ReviewSummary;
recordingsSummary?: RecordingsSummary;
timeRange: TimeRange; timeRange: TimeRange;
startTime?: number; startTime?: number;
filter?: ReviewFilter; filter?: ReviewFilter;
motionOnly?: boolean; motionOnly?: boolean;
updateFilter: (filter: ReviewFilter) => void;
motionPreviewsCamera: string | null;
setMotionPreviewsCamera: (camera: string | null) => void;
setMotionSearchCamera: (camera: string) => void;
emptyCardData: EmptyCardData; emptyCardData: EmptyCardData;
onOpenRecording: (data: RecordingStartingPoint) => void; onOpenRecording: (data: RecordingStartingPoint) => void;
}; };
@ -909,13 +975,20 @@ function MotionReview({
contentRef, contentRef,
reviewItems, reviewItems,
relevantPreviews, relevantPreviews,
reviewSummary,
recordingsSummary,
timeRange, timeRange,
startTime, startTime,
filter, filter,
motionOnly = false, motionOnly = false,
updateFilter,
motionPreviewsCamera,
setMotionPreviewsCamera,
setMotionSearchCamera,
emptyCardData, emptyCardData,
onOpenRecording, onOpenRecording,
}: MotionReviewProps) { }: MotionReviewProps) {
const { t } = useTranslation(["views/events", "common"]);
const segmentDuration = 30; const segmentDuration = 30;
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
@ -961,6 +1034,15 @@ function MotionReview({
}, },
]); ]);
const { data: overlapReviewSegments } = useSWR<ReviewSegment[]>([
"review",
{
before: alignedBefore,
after: alignedAfter,
cameras: filter?.cameras?.join(",") ?? null,
},
]);
// timeline time // timeline time
const timeRangeSegments = useMemo( const timeRangeSegments = useMemo(
@ -973,19 +1055,29 @@ function MotionReview({
return timeRangeSegments.ranges.length - 1; return timeRangeSegments.ranges.length - 1;
} }
return timeRangeSegments.ranges.findIndex( const index = timeRangeSegments.ranges.findIndex(
(seg) => seg.after <= startTime && seg.before >= startTime, (seg) => seg.after <= startTime && seg.before >= startTime,
); );
if (index === -1) {
return timeRangeSegments.ranges.length - 1;
}
return index;
// only render once // only render once
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const [selectedRangeIdx, setSelectedRangeIdx] = useState(initialIndex); const [selectedRangeIdx, setSelectedRangeIdx] = useState(initialIndex);
const [currentTime, setCurrentTime] = useState<number>( const [currentTime, setCurrentTime] = useState<number>(
startTime ?? timeRangeSegments.ranges[selectedRangeIdx]?.before, startTime ??
timeRangeSegments.ranges[selectedRangeIdx]?.before ??
timeRangeSegments.end,
); );
const currentTimeRange = useMemo( const currentTimeRange = useMemo(
() => timeRangeSegments.ranges[selectedRangeIdx], () =>
timeRangeSegments.ranges[selectedRangeIdx] ??
timeRangeSegments.ranges[timeRangeSegments.ranges.length - 1],
[selectedRangeIdx, timeRangeSegments], [selectedRangeIdx, timeRangeSegments],
); );
@ -1023,18 +1115,86 @@ function MotionReview({
const [playbackRate, setPlaybackRate] = useState(8); const [playbackRate, setPlaybackRate] = useState(8);
const [controlsOpen, setControlsOpen] = useState(false); const [controlsOpen, setControlsOpen] = useState(false);
const [dimStrength, setDimStrength] = useState(82);
const [isPreviewSettingsOpen, setIsPreviewSettingsOpen] = useState(false);
const objectReviewItems = useMemo(
() =>
(overlapReviewSegments ?? []).filter(
(item) =>
item.severity === "alert" ||
item.severity === "detection" ||
(item.data.detections?.length ?? 0) > 0 ||
(item.data.objects?.length ?? 0) > 0,
),
[overlapReviewSegments],
);
const nextTimestamp = useCameraMotionNextTimestamp( const nextTimestamp = useCameraMotionNextTimestamp(
timeRangeSegments.end, timeRangeSegments.end,
segmentDuration, segmentDuration,
motionOnly, motionOnly,
reviewItems?.all ?? [], objectReviewItems,
motionData ?? [], motionData ?? [],
currentTime, currentTime,
); );
const timeoutIdRef = useRef<NodeJS.Timeout | null>(null); const timeoutIdRef = useRef<NodeJS.Timeout | null>(null);
const selectedMotionPreviewCamera = useMemo(
() =>
reviewCameras.find((camera) => camera.name === motionPreviewsCamera) ??
null,
[motionPreviewsCamera, reviewCameras],
);
const onUpdateSelectedDay = useCallback(
(day?: Date) => {
updateFilter({
...filter,
after: day == undefined ? undefined : day.getTime() / 1000,
before: day == undefined ? undefined : getEndOfDayTimestamp(day),
});
},
[filter, updateFilter],
);
const selectedCameraMotionData = useMemo(() => {
if (!motionPreviewsCamera) {
return [];
}
return (motionData ?? []).filter((item) => {
const cameras = item.camera.split(",").map((camera) => camera.trim());
return cameras.includes(motionPreviewsCamera);
});
}, [motionData, motionPreviewsCamera]);
const selectedCameraReviewItems = useMemo(() => {
if (!motionPreviewsCamera) {
return [];
}
return objectReviewItems.filter(
(item) => item.camera === motionPreviewsCamera,
);
}, [motionPreviewsCamera, objectReviewItems]);
const motionPreviewRanges = useCameraMotionOnlyRanges(
segmentDuration,
selectedCameraReviewItems,
selectedCameraMotionData,
);
useEffect(() => {
if (
motionPreviewsCamera &&
!reviewCameras.some((camera) => camera.name === motionPreviewsCamera)
) {
setMotionPreviewsCamera(null);
}
}, [motionPreviewsCamera, reviewCameras, setMotionPreviewsCamera]);
useEffect(() => { useEffect(() => {
if (nextTimestamp) { if (nextTimestamp) {
if (!playing && timeoutIdRef.current != null) { if (!playing && timeoutIdRef.current != null) {
@ -1124,132 +1284,349 @@ function MotionReview({
return ( return (
<> <>
<div className="no-scrollbar flex flex-1 flex-wrap content-start gap-2 overflow-y-auto md:gap-4"> {motionPreviewsCamera && selectedMotionPreviewCamera ? (
<div <>
ref={contentRef} <div className="relative mb-2 flex h-11 items-center justify-between pl-2 pr-2 md:px-3">
className={cn( <Button
"no-scrollbar grid w-full grid-cols-1", className="flex items-center gap-2.5 rounded-lg"
isMobile && "landscape:grid-cols-2", aria-label={t("label.back", { ns: "common" })}
reviewCameras.length > 3 && size="sm"
isMobile && onClick={() => setMotionPreviewsCamera(null)}
"portrait:md:grid-cols-2 landscape:md:grid-cols-3", >
isDesktop && "grid-cols-2 lg:grid-cols-3", <IoMdArrowRoundBack className="size-5 text-secondary-foreground" />
"gap-2 overflow-auto px-1 md:mx-2 md:gap-4 xl:grid-cols-3 3xl:grid-cols-4", {isDesktop && (
)} <div className="text-primary">
> {t("button.back", { ns: "common" })}
{reviewCameras.map((camera) => { </div>
let grow; )}
let spans; </Button>
const aspectRatio = camera.detect.width / camera.detect.height;
if (aspectRatio > 2) { <div className="flex items-center gap-2">
grow = "aspect-wide"; {isDesktop && (
spans = "sm:col-span-2"; <CalendarFilterButton
} else if (aspectRatio < 1) { reviewSummary={reviewSummary}
grow = "h-full aspect-tall"; recordingsSummary={recordingsSummary}
spans = "md:row-span-2"; day={
} else { filter?.after == undefined
grow = "aspect-video"; ? undefined
} : new Date(filter.after * 1000)
const detectionType = getDetectionType(camera.name); }
return ( updateSelectedDay={onUpdateSelectedDay}
<div key={camera.name} className={`relative ${spans}`}> />
{motionData ? ( )}
<> <PlatformAwareDialog
<PreviewPlayer trigger={
className={`rounded-lg md:rounded-2xl ${spans} ${grow}`} <Button
camera={camera.name} className={cn(
timeRange={currentTimeRange} isDesktop ? "flex items-center gap-2" : "rounded-lg",
startTime={previewStart} )}
cameraPreviews={relevantPreviews} size="sm"
isScrubbing={scrubbing} aria-label={
onControllerReady={(controller) => { isDesktop
videoPlayersRef.current[camera.name] = controller; ? t("motionPreviews.mobileSettingsTitle")
}} : t("filters", { ns: "views/recording" })
onClick={() => }
onOpenRecording({ >
camera: camera.name, <FaCog className="text-secondary-foreground" />
startTime: Math.min( {isDesktop && t("motionPreviews.mobileSettingsTitle")}
currentTime, </Button>
Date.now() / 1000 - 30, }
), content={
severity: "significant_motion", <div className="space-y-4 py-2">
}) {!isDesktop && (
} <div className="space-y-1">
/> <div className="text-md">
<div {t("motionPreviews.mobileSettingsTitle")}
className={`review-item-ring pointer-events-none absolute inset-0 z-20 size-full rounded-lg outline outline-[3px] -outline-offset-[2.8px] ${detectionType ? `outline-severity_${detectionType} shadow-severity_${detectionType}` : "outline-transparent duration-500"}`} </div>
/> <div className="text-xs text-muted-foreground">
</> {t("motionPreviews.mobileSettingsDesc")}
) : ( </div>
<Skeleton </div>
className={`size-full rounded-lg md:rounded-2xl ${spans} ${grow}`} )}
/>
<div className="space-y-3">
<div className="space-y-0.5">
<div className="text-md">
{t("motionPreviews.speed")}
</div>
<div className="text-xs text-muted-foreground">
{t("motionPreviews.speedDesc")}
</div>
</div>
<Select
value={String(playbackRate)}
onValueChange={(value) =>
setPlaybackRate(Number(value))
}
>
<SelectTrigger
className="h-10 w-full"
aria-label={t("motionPreviews.speedAria")}
>
<SelectValue />
</SelectTrigger>
<SelectContent>
{[4, 8, 12, 16].map((speed) => (
<SelectItem key={speed} value={String(speed)}>
{speed}x
</SelectItem>
))}
</SelectContent>
</Select>
</div>
<div className="space-y-3">
<div className="space-y-0.5">
<div className="text-md">{t("motionPreviews.dim")}</div>
<div className="text-xs text-muted-foreground">
{t("motionPreviews.dimDesc")}
</div>
</div>
<div className="px-1 py-2">
<VolumeSlider
className="w-full"
min={25}
max={95}
step={1}
value={[dimStrength]}
aria-label={t("motionPreviews.dimAria")}
onValueChange={(values) => {
const nextValue = values[0];
if (nextValue == undefined) {
return;
}
setDimStrength(nextValue);
}}
/>
</div>
</div>
{!isDesktop && (
<>
<SelectSeparator />
<div className="flex w-full flex-row justify-center">
<ReviewActivityCalendar
recordingsSummary={recordingsSummary}
selectedDay={
filter?.after == undefined
? undefined
: new Date(filter.after * 1000)
}
onSelect={(day) => {
onUpdateSelectedDay(day);
setIsPreviewSettingsOpen(false);
}}
/>
</div>
<div className="flex items-center justify-center p-2">
<Button
aria-label={t("button.reset", { ns: "common" })}
onClick={() => {
onUpdateSelectedDay(undefined);
setIsPreviewSettingsOpen(false);
}}
>
{t("button.reset", { ns: "common" })}
</Button>
</div>
</>
)}
</div>
}
contentClassName={cn(
isDesktop
? "w-80"
: "scrollbar-container max-h-[75dvh] overflow-y-auto overflow-x-hidden px-4",
)} )}
</div> open={isPreviewSettingsOpen}
); onOpenChange={setIsPreviewSettingsOpen}
})} />
</div> </div>
</div> </div>
<div className="no-scrollbar w-[55px] overflow-y-auto md:w-[100px]">
{motionData ? ( <MotionPreviewsPane
<MotionReviewTimeline camera={selectedMotionPreviewCamera}
segmentDuration={segmentDuration}
timestampSpread={15}
timelineStart={timeRangeSegments.end}
timelineEnd={timeRangeSegments.start}
motionOnly={motionOnly}
showHandlebar
handlebarTime={currentTime}
setHandlebarTime={setCurrentTime}
events={reviewItems?.all ?? []}
motion_events={motionData ?? []}
contentRef={contentRef} contentRef={contentRef}
onHandlebarDraggingChange={(scrubbing) => { cameraPreviews={relevantPreviews}
if (playing && scrubbing) { motionRanges={motionPreviewRanges}
setPlaying(false); isLoadingMotionRanges={
} motionData == undefined || overlapReviewSegments == undefined
}
setScrubbing(scrubbing); playbackRate={playbackRate}
nonMotionAlpha={dimStrength / 100}
onSeek={(timestamp) => {
onOpenRecording({
camera: selectedMotionPreviewCamera.name,
startTime: timestamp,
severity: "significant_motion",
});
}} }}
dense={isMobileOnly}
isZooming={false}
zoomDirection={null}
alwaysShowMotionLine={true}
/> />
) : ( </>
<Skeleton className="size-full" /> ) : (
)} <div className="no-scrollbar flex min-w-0 flex-1 flex-wrap content-start gap-2 overflow-y-auto md:gap-4">
</div> <div
ref={contentRef}
className={cn(
"no-scrollbar grid w-full grid-cols-1",
isMobile && "landscape:grid-cols-2",
reviewCameras.length > 3 &&
isMobile &&
"portrait:md:grid-cols-2 landscape:md:grid-cols-3",
isDesktop && "grid-cols-2 lg:grid-cols-3",
"gap-2 overflow-auto px-1 md:mx-2 md:gap-4 xl:grid-cols-3 3xl:grid-cols-4",
)}
>
{reviewCameras.map((camera) => {
let grow;
let spans;
const aspectRatio = camera.detect.width / camera.detect.height;
if (aspectRatio > 2) {
grow = "aspect-wide";
spans = "sm:col-span-2";
} else if (aspectRatio < 1) {
grow = "h-full aspect-tall";
spans = "md:row-span-2";
} else {
grow = "aspect-video";
}
const detectionType = getDetectionType(camera.name);
return (
<div key={camera.name} className={`relative ${spans}`}>
{motionData ? (
<>
<PreviewPlayer
className={`rounded-lg md:rounded-2xl ${spans} ${grow}`}
camera={camera.name}
timeRange={currentTimeRange}
startTime={previewStart}
cameraPreviews={relevantPreviews}
isScrubbing={scrubbing}
onControllerReady={(controller) => {
videoPlayersRef.current[camera.name] = controller;
}}
onClick={() =>
onOpenRecording({
camera: camera.name,
startTime: Math.min(
currentTime,
Date.now() / 1000 - 30,
),
severity: "significant_motion",
})
}
/>
<div
className={`review-item-ring pointer-events-none absolute inset-0 z-20 size-full rounded-lg outline outline-[3px] -outline-offset-[2.8px] ${detectionType ? `outline-severity_${detectionType} shadow-severity_${detectionType}` : "outline-transparent duration-500"}`}
/>
<div className="absolute bottom-2 right-2 z-30">
<DropdownMenu>
<DropdownMenuTrigger asChild>
<BlurredIconButton
aria-label={t("motionSearch.openMenu")}
onClick={(e) => e.stopPropagation()}
>
<FiMoreVertical className="size-5" />
</BlurredIconButton>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
setMotionPreviewsCamera(camera.name);
}}
>
{t("motionPreviews.menuItem")}
</DropdownMenuItem>
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
setMotionSearchCamera(camera.name);
}}
>
{t("motionSearch.menuItem")}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
</>
) : (
<Skeleton
className={`size-full rounded-lg md:rounded-2xl ${spans} ${grow}`}
/>
)}
</div>
);
})}
</div>
</div>
)}
{!selectedMotionPreviewCamera && (
<div className="no-scrollbar w-[55px] overflow-y-auto md:w-[100px]">
{motionData ? (
<MotionReviewTimeline
segmentDuration={segmentDuration}
timestampSpread={15}
timelineStart={timeRangeSegments.end}
timelineEnd={timeRangeSegments.start}
motionOnly={motionOnly}
showHandlebar
handlebarTime={currentTime}
setHandlebarTime={setCurrentTime}
events={reviewItems?.all ?? []}
motion_events={motionData ?? []}
contentRef={contentRef}
onHandlebarDraggingChange={(scrubbing) => {
if (playing && scrubbing) {
setPlaying(false);
}
<VideoControls setScrubbing(scrubbing);
className="absolute bottom-16 left-1/2 -translate-x-1/2 bg-secondary" }}
features={{ dense={isMobileOnly}
volume: false, isZooming={false}
seek: true, zoomDirection={null}
playbackRate: true, alwaysShowMotionLine={true}
fullscreen: false, />
}} ) : (
isPlaying={playing} <Skeleton className="size-full" />
show={!scrubbing || controlsOpen} )}
playbackRates={[4, 8, 12, 16]} </div>
playbackRate={playbackRate} )}
setControlsOpen={setControlsOpen}
onPlayPause={setPlaying}
onSeek={(diff) => {
const wasPlaying = playing;
if (wasPlaying) { {!selectedMotionPreviewCamera && (
setPlaying(false); <VideoControls
} className="absolute bottom-16 left-1/2 -translate-x-1/2 bg-secondary"
features={{
volume: false,
seek: true,
playbackRate: true,
fullscreen: false,
}}
isPlaying={playing}
show={!scrubbing || controlsOpen}
playbackRates={[4, 8, 12, 16]}
playbackRate={playbackRate}
setControlsOpen={setControlsOpen}
onPlayPause={setPlaying}
onSeek={(diff) => {
const wasPlaying = playing;
setCurrentTime(currentTime + diff); if (wasPlaying) {
setPlaying(false);
}
if (wasPlaying) { setCurrentTime(currentTime + diff);
setTimeout(() => setPlaying(true), 100);
} if (wasPlaying) {
}} setTimeout(() => setPlaying(true), 100);
onSetPlaybackRate={setPlaybackRate} }
/> }}
onSetPlaybackRate={setPlaybackRate}
/>
)}
</> </>
); );
} }

View File

@ -0,0 +1,898 @@
import { MotionOnlyRange } from "@/hooks/use-camera-activity";
import { Preview } from "@/types/preview";
import {
MutableRefObject,
useCallback,
useEffect,
useMemo,
useRef,
useState,
} from "react";
import { isCurrentHour } from "@/utils/dateUtil";
import { useTranslation } from "react-i18next";
import { CameraConfig } from "@/types/frigateConfig";
import useSWR from "swr";
import { baseUrl } from "@/api/baseUrl";
import { Recording } from "@/types/record";
import { useResizeObserver } from "@/hooks/resize-observer";
import { Skeleton } from "@/components/ui/skeleton";
import ActivityIndicator from "@/components/indicators/activity-indicator";
import TimeAgo from "@/components/dynamic/TimeAgo";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import { FrigateConfig } from "@/types/frigateConfig";
const MOTION_HEATMAP_GRID_SIZE = 16;
const MIN_MOTION_CELL_ALPHA = 0.06;
function getPreviewForMotionRange(
cameraPreviews: Preview[],
cameraName: string,
range: MotionOnlyRange,
) {
const matchingPreviews = cameraPreviews.filter(
(preview) =>
preview.camera === cameraName &&
preview.end > range.start_time &&
preview.start < range.end_time,
);
if (!matchingPreviews.length) {
return;
}
const getOverlap = (preview: Preview) =>
Math.max(
0,
Math.min(preview.end, range.end_time) -
Math.max(preview.start, range.start_time),
);
return matchingPreviews.reduce((best, current) => {
return getOverlap(current) > getOverlap(best) ? current : best;
});
}
function getRangeOverlapSeconds(
rangeStart: number,
rangeEnd: number,
recordingStart: number,
recordingEnd: number,
) {
return Math.max(
0,
Math.min(rangeEnd, recordingEnd) - Math.max(rangeStart, recordingStart),
);
}
function getMotionHeatmapForRange(
recordings: Recording[],
range: MotionOnlyRange,
) {
const weightedHeatmap = new Map<number, number>();
let totalWeight = 0;
recordings.forEach((recording) => {
const overlapSeconds = getRangeOverlapSeconds(
range.start_time,
range.end_time,
recording.start_time,
recording.end_time,
);
if (overlapSeconds <= 0) {
return;
}
totalWeight += overlapSeconds;
if (!recording.motion_heatmap) {
return;
}
Object.entries(recording.motion_heatmap).forEach(
([cellIndex, intensity]) => {
const index = Number(cellIndex);
const level = Number(intensity);
if (Number.isNaN(index) || Number.isNaN(level) || level <= 0) {
return;
}
const existingWeight = weightedHeatmap.get(index) ?? 0;
weightedHeatmap.set(index, existingWeight + level * overlapSeconds);
},
);
});
if (!totalWeight || weightedHeatmap.size === 0) {
return null;
}
const mergedHeatmap: Record<string, number> = {};
weightedHeatmap.forEach((weightedLevel, index) => {
const normalizedLevel = Math.max(
0,
Math.min(255, Math.round(weightedLevel / totalWeight)),
);
if (normalizedLevel > 0) {
mergedHeatmap[index.toString()] = normalizedLevel;
}
});
return Object.keys(mergedHeatmap).length > 0 ? mergedHeatmap : null;
}
type MotionPreviewClipProps = {
cameraName: string;
range: MotionOnlyRange;
playbackRate: number;
preview?: Preview;
fallbackFrameTimes?: number[];
motionHeatmap?: Record<string, number> | null;
nonMotionAlpha: number;
isVisible: boolean;
onSeek: (timestamp: number) => void;
};
function MotionPreviewClip({
cameraName,
range,
playbackRate,
preview,
fallbackFrameTimes,
motionHeatmap,
nonMotionAlpha,
isVisible,
onSeek,
}: MotionPreviewClipProps) {
const { t } = useTranslation(["views/events", "common"]);
const { data: config } = useSWR<FrigateConfig>("config");
const videoRef = useRef<HTMLVideoElement | null>(null);
const dimOverlayCanvasRef = useRef<HTMLCanvasElement | null>(null);
const overlayContainerRef = useRef<HTMLDivElement | null>(null);
const [{ width: overlayWidth, height: overlayHeight }] =
useResizeObserver(overlayContainerRef);
const [videoLoaded, setVideoLoaded] = useState(false);
const [videoPlaying, setVideoPlaying] = useState(false);
const [fallbackImageLoaded, setFallbackImageLoaded] = useState(false);
const [mediaDimensions, setMediaDimensions] = useState<{
width: number;
height: number;
} | null>(null);
const [fallbackFrameIndex, setFallbackFrameIndex] = useState(0);
const [fallbackFramesReady, setFallbackFramesReady] = useState(false);
const formattedDate = useFormattedTimestamp(
range.start_time,
config?.ui.time_format == "24hour"
? t("time.formattedTimestampMonthDayHourMinute.24hour", {
ns: "common",
})
: t("time.formattedTimestampMonthDayHourMinute.12hour", {
ns: "common",
}),
config?.ui.timezone,
);
const fallbackFrameSrcs = useMemo(() => {
if (!fallbackFrameTimes || fallbackFrameTimes.length === 0) {
return [] as string[];
}
return fallbackFrameTimes.map(
(frameTime) =>
`${baseUrl}api/preview/preview_${cameraName}-${frameTime}.webp/thumbnail.webp`,
);
}, [cameraName, fallbackFrameTimes]);
useEffect(() => {
setFallbackFrameIndex(0);
setFallbackFramesReady(false);
}, [range.start_time, range.end_time, fallbackFrameTimes]);
useEffect(() => {
if (fallbackFrameSrcs.length === 0) {
setFallbackFramesReady(false);
return;
}
let cancelled = false;
const preloadFrames = async () => {
await Promise.allSettled(
fallbackFrameSrcs.map(
(src) =>
new Promise<void>((resolve) => {
const image = new Image();
image.onload = () => resolve();
image.onerror = () => resolve();
image.src = src;
}),
),
);
if (!cancelled) {
setFallbackFramesReady(true);
}
};
void preloadFrames();
return () => {
cancelled = true;
};
}, [fallbackFrameSrcs]);
useEffect(() => {
if (!fallbackFramesReady || fallbackFrameSrcs.length <= 1 || !isVisible) {
return;
}
const intervalMs = Math.max(
50,
Math.round(1000 / Math.max(1, playbackRate)),
);
const intervalId = window.setInterval(() => {
setFallbackFrameIndex((previous) => {
return (previous + 1) % fallbackFrameSrcs.length;
});
}, intervalMs);
return () => {
window.clearInterval(intervalId);
};
}, [fallbackFrameSrcs.length, fallbackFramesReady, isVisible, playbackRate]);
const fallbackFrameSrc = useMemo(() => {
if (fallbackFrameSrcs.length === 0) {
return undefined;
}
return fallbackFrameSrcs[fallbackFrameIndex] ?? fallbackFrameSrcs[0];
}, [fallbackFrameIndex, fallbackFrameSrcs]);
useEffect(() => {
setVideoLoaded(false);
setVideoPlaying(false);
setMediaDimensions(null);
}, [preview?.src]);
useEffect(() => {
if (!preview || !isVisible || videoLoaded || !videoRef.current) {
return;
}
if (videoRef.current.currentSrc || videoRef.current.error) {
setVideoLoaded(true);
}
}, [isVisible, preview, videoLoaded]);
useEffect(() => {
setFallbackImageLoaded(false);
setMediaDimensions(null);
}, [fallbackFrameSrcs]);
useEffect(() => {
if (!fallbackFrameSrc || !isVisible || !fallbackFramesReady) {
return;
}
setFallbackImageLoaded(true);
}, [fallbackFrameSrc, fallbackFramesReady, isVisible]);
const showLoadingIndicator =
(preview != undefined && isVisible && !videoPlaying) ||
(fallbackFrameSrc != undefined && isVisible && !fallbackImageLoaded);
const clipStart = useMemo(() => {
if (!preview) {
return 0;
}
return Math.max(0, range.start_time - preview.start);
}, [preview, range.start_time]);
const clipEnd = useMemo(() => {
if (!preview) {
return 0;
}
const previewDuration = preview.end - preview.start;
return Math.min(
previewDuration,
Math.max(clipStart + 0.1, range.end_time - preview.start),
);
}, [clipStart, preview, range.end_time]);
const resetPlayback = useCallback(() => {
if (!videoRef.current || !preview) {
return;
}
videoRef.current.currentTime = clipStart;
videoRef.current.playbackRate = playbackRate;
}, [clipStart, playbackRate, preview]);
useEffect(() => {
if (!videoRef.current || !preview) {
return;
}
if (!isVisible) {
videoRef.current.pause();
videoRef.current.currentTime = clipStart;
return;
}
if (videoRef.current.readyState >= 2) {
resetPlayback();
void videoRef.current.play().catch(() => undefined);
}
}, [clipStart, isVisible, preview, resetPlayback]);
const drawDimOverlay = useCallback(() => {
if (!dimOverlayCanvasRef.current) {
return;
}
const canvas = dimOverlayCanvasRef.current;
const context = canvas.getContext("2d");
if (!context) {
return;
}
if (overlayWidth <= 0 || overlayHeight <= 0) {
return;
}
const width = Math.max(1, overlayWidth);
const height = Math.max(1, overlayHeight);
const dpr = window.devicePixelRatio || 1;
const pixelWidth = Math.max(1, Math.round(width * dpr));
const pixelHeight = Math.max(1, Math.round(height * dpr));
if (canvas.width !== pixelWidth || canvas.height !== pixelHeight) {
canvas.width = pixelWidth;
canvas.height = pixelHeight;
}
canvas.style.width = `${width}px`;
canvas.style.height = `${height}px`;
context.setTransform(dpr, 0, 0, dpr, 0, 0);
context.clearRect(0, 0, width, height);
if (!motionHeatmap) {
return;
}
// Calculate the actual rendered media area (object-contain letterboxing)
let drawX = 0;
let drawY = 0;
let drawWidth = width;
let drawHeight = height;
if (
mediaDimensions &&
mediaDimensions.width > 0 &&
mediaDimensions.height > 0
) {
const containerAspect = width / height;
const mediaAspect = mediaDimensions.width / mediaDimensions.height;
if (mediaAspect < containerAspect) {
// Portrait / tall: constrained by height, bars on left and right
drawHeight = height;
drawWidth = height * mediaAspect;
drawX = (width - drawWidth) / 2;
drawY = 0;
} else {
// Wide / landscape: constrained by width, bars on top and bottom
drawWidth = width;
drawHeight = width / mediaAspect;
drawX = 0;
drawY = (height - drawHeight) / 2;
}
}
const heatmapLevels = Object.values(motionHeatmap)
.map((value) => Number(value))
.filter((value) => Number.isFinite(value) && value > 0);
const maxHeatmapLevel =
heatmapLevels.length > 0 ? Math.max(...heatmapLevels) : 0;
const maskCanvas = document.createElement("canvas");
maskCanvas.width = MOTION_HEATMAP_GRID_SIZE;
maskCanvas.height = MOTION_HEATMAP_GRID_SIZE;
const maskContext = maskCanvas.getContext("2d");
if (!maskContext) {
return;
}
const imageData = maskContext.createImageData(
MOTION_HEATMAP_GRID_SIZE,
MOTION_HEATMAP_GRID_SIZE,
);
for (let index = 0; index < MOTION_HEATMAP_GRID_SIZE ** 2; index++) {
const level = Number(motionHeatmap[index.toString()] ?? 0);
const normalizedLevel =
maxHeatmapLevel > 0
? Math.min(1, Math.max(0, level / maxHeatmapLevel))
: 0;
const boostedLevel = Math.sqrt(normalizedLevel);
const alpha =
nonMotionAlpha -
boostedLevel * (nonMotionAlpha - MIN_MOTION_CELL_ALPHA);
const pixelOffset = index * 4;
imageData.data[pixelOffset] = 0;
imageData.data[pixelOffset + 1] = 0;
imageData.data[pixelOffset + 2] = 0;
imageData.data[pixelOffset + 3] = Math.round(
Math.max(0, Math.min(1, alpha)) * 255,
);
}
maskContext.putImageData(imageData, 0, 0);
context.imageSmoothingEnabled = true;
context.imageSmoothingQuality = "high";
context.drawImage(maskCanvas, drawX, drawY, drawWidth, drawHeight);
}, [
motionHeatmap,
nonMotionAlpha,
overlayHeight,
overlayWidth,
mediaDimensions,
]);
useEffect(() => {
drawDimOverlay();
}, [drawDimOverlay]);
return (
<div
ref={overlayContainerRef}
className="relative aspect-video size-full cursor-pointer overflow-hidden rounded-lg bg-black md:rounded-2xl"
onClick={() => onSeek(range.start_time)}
>
{showLoadingIndicator && (
<Skeleton className="absolute inset-0 z-10 rounded-lg md:rounded-2xl" />
)}
{preview ? (
<>
<video
ref={videoRef}
className="size-full bg-black object-contain"
playsInline
preload={isVisible ? "metadata" : "none"}
muted
autoPlay={isVisible}
onLoadedMetadata={() => {
setVideoLoaded(true);
if (videoRef.current) {
setMediaDimensions({
width: videoRef.current.videoWidth,
height: videoRef.current.videoHeight,
});
}
if (!isVisible) {
return;
}
resetPlayback();
if (videoRef.current) {
void videoRef.current.play().catch(() => undefined);
}
}}
onCanPlay={() => {
setVideoLoaded(true);
if (!isVisible) {
return;
}
if (videoRef.current) {
void videoRef.current.play().catch(() => undefined);
}
}}
onPlay={() => setVideoPlaying(true)}
onLoadedData={() => setVideoLoaded(true)}
onError={() => {
setVideoLoaded(true);
setVideoPlaying(true);
}}
onTimeUpdate={() => {
if (!videoRef.current || !preview || !isVisible) {
return;
}
if (videoRef.current.currentTime >= clipEnd) {
videoRef.current.currentTime = clipStart;
}
}}
>
{isVisible && (
<source
src={`${baseUrl}${preview.src.substring(1)}`}
type={preview.type}
/>
)}
</video>
{motionHeatmap && (
<canvas
ref={dimOverlayCanvasRef}
className="pointer-events-none absolute inset-0"
aria-hidden="true"
/>
)}
</>
) : fallbackFrameSrc ? (
<>
<img
src={fallbackFrameSrc}
className="size-full bg-black object-contain"
loading="lazy"
alt=""
onLoad={(e) => {
setFallbackImageLoaded(true);
const img = e.currentTarget;
if (img.naturalWidth > 0 && img.naturalHeight > 0) {
setMediaDimensions({
width: img.naturalWidth,
height: img.naturalHeight,
});
}
}}
onError={() => setFallbackImageLoaded(true)}
/>
{motionHeatmap && (
<canvas
ref={dimOverlayCanvasRef}
className="pointer-events-none absolute inset-0"
aria-hidden="true"
/>
)}
</>
) : (
<div className="flex size-full items-center justify-center text-sm text-muted-foreground">
{t("motionPreviews.noPreview")}
</div>
)}
<div className="pointer-events-none absolute bottom-0 left-0 right-0 z-30 p-2">
<div className="flex flex-col items-start text-xs text-white/90 drop-shadow-lg">
{range.end_time ? (
<TimeAgo time={range.start_time * 1000} dense />
) : (
<ActivityIndicator size={14} />
)}
{formattedDate}
</div>
</div>
</div>
);
}
type MotionPreviewsPaneProps = {
camera: CameraConfig;
contentRef: MutableRefObject<HTMLDivElement | null>;
cameraPreviews: Preview[];
motionRanges: MotionOnlyRange[];
isLoadingMotionRanges?: boolean;
playbackRate: number;
nonMotionAlpha: number;
onSeek: (timestamp: number) => void;
};
export default function MotionPreviewsPane({
camera,
contentRef,
cameraPreviews,
motionRanges,
isLoadingMotionRanges = false,
playbackRate,
nonMotionAlpha,
onSeek,
}: MotionPreviewsPaneProps) {
const { t } = useTranslation(["views/events"]);
const [scrollContainer, setScrollContainer] = useState<HTMLDivElement | null>(
null,
);
const [windowVisible, setWindowVisible] = useState(true);
useEffect(() => {
const visibilityListener = () => {
setWindowVisible(document.visibilityState == "visible");
};
addEventListener("visibilitychange", visibilityListener);
return () => {
removeEventListener("visibilitychange", visibilityListener);
};
}, []);
const [visibleClips, setVisibleClips] = useState<string[]>([]);
const [hasVisibilityData, setHasVisibilityData] = useState(false);
const clipObserver = useRef<IntersectionObserver | null>(null);
const recordingTimeRange = useMemo(() => {
if (!motionRanges.length) {
return null;
}
return motionRanges.reduce(
(bounds, range) => ({
after: Math.min(bounds.after, range.start_time),
before: Math.max(bounds.before, range.end_time),
}),
{
after: motionRanges[0].start_time,
before: motionRanges[0].end_time,
},
);
}, [motionRanges]);
const { data: cameraRecordings } = useSWR<Recording[]>(
recordingTimeRange
? [
`${camera.name}/recordings`,
{
after: Math.floor(recordingTimeRange.after),
before: Math.ceil(recordingTimeRange.before),
},
]
: null,
{
revalidateOnFocus: false,
revalidateOnReconnect: false,
},
);
const { data: previewFrames } = useSWR<string[]>(
recordingTimeRange
? `preview/${camera.name}/start/${Math.floor(recordingTimeRange.after)}/end/${Math.ceil(recordingTimeRange.before)}/frames`
: null,
{
revalidateOnFocus: false,
revalidateOnReconnect: false,
},
);
const previewFrameTimes = useMemo(() => {
if (!previewFrames) {
return [] as number[];
}
return previewFrames
.map((frame) => {
const timestampPart = frame.split("-").at(-1)?.replace(".webp", "");
return timestampPart ? Number(timestampPart) : NaN;
})
.filter((value) => Number.isFinite(value))
.sort((a, b) => a - b);
}, [previewFrames]);
const getFallbackFrameTimesForRange = useCallback(
(range: MotionOnlyRange) => {
if (!isCurrentHour(range.end_time) || previewFrameTimes.length === 0) {
return [] as number[];
}
const inRangeFrames = previewFrameTimes.filter(
(frameTime) =>
frameTime >= range.start_time && frameTime <= range.end_time,
);
// Use all in-range frames when enough data exists for natural animation
if (inRangeFrames.length > 1) {
return inRangeFrames;
}
// If sparse, keep the single in-range frame and add only the next 2 frames
if (inRangeFrames.length === 1) {
const inRangeFrame = inRangeFrames[0];
const nextFrames = previewFrameTimes
.filter((frameTime) => frameTime > inRangeFrame)
.slice(0, 2);
return [inRangeFrame, ...nextFrames];
}
const nextFramesFromStart = previewFrameTimes
.filter((frameTime) => frameTime >= range.start_time)
.slice(0, 3);
// If no in-range frame exists, take up to 3 frames starting at clip start
if (nextFramesFromStart.length > 0) {
return nextFramesFromStart;
}
const lastFrame = previewFrameTimes.at(-1);
return lastFrame != undefined ? [lastFrame] : [];
},
[previewFrameTimes],
);
const setContentNode = useCallback(
(node: HTMLDivElement | null) => {
contentRef.current = node;
setScrollContainer(node);
},
[contentRef],
);
useEffect(() => {
if (!scrollContainer) {
return;
}
const visibleClipIds = new Set<string>();
clipObserver.current = new IntersectionObserver(
(entries) => {
setHasVisibilityData(true);
entries.forEach((entry) => {
const clipId = (entry.target as HTMLElement).dataset.clipId;
if (!clipId) {
return;
}
if (entry.isIntersecting) {
visibleClipIds.add(clipId);
} else {
visibleClipIds.delete(clipId);
}
});
const rootRect = scrollContainer.getBoundingClientRect();
const prunedVisibleClipIds = [...visibleClipIds].filter((clipId) => {
const clipElement = scrollContainer.querySelector<HTMLElement>(
`[data-clip-id="${clipId}"]`,
);
if (!clipElement) {
return false;
}
const clipRect = clipElement.getBoundingClientRect();
return (
clipRect.bottom > rootRect.top && clipRect.top < rootRect.bottom
);
});
setVisibleClips(prunedVisibleClipIds);
},
{
root: scrollContainer,
threshold: 0,
},
);
scrollContainer
.querySelectorAll<HTMLElement>("[data-clip-id]")
.forEach((node) => {
clipObserver.current?.observe(node);
});
return () => {
clipObserver.current?.disconnect();
};
}, [scrollContainer]);
const clipRef = useCallback((node: HTMLElement | null) => {
if (!clipObserver.current) {
return;
}
try {
if (node) {
clipObserver.current.observe(node);
}
} catch {
// no op
}
}, []);
const clipData = useMemo(
() =>
motionRanges
.filter((range) => range.end_time > range.start_time)
.sort((left, right) => right.start_time - left.start_time)
.map((range) => {
const preview = getPreviewForMotionRange(
cameraPreviews,
camera.name,
range,
);
return {
range,
preview,
fallbackFrameTimes: !preview
? getFallbackFrameTimesForRange(range)
: undefined,
motionHeatmap: getMotionHeatmapForRange(
cameraRecordings ?? [],
range,
),
};
}),
[
cameraPreviews,
camera.name,
cameraRecordings,
getFallbackFrameTimesForRange,
motionRanges,
],
);
const hasCurrentHourRanges = useMemo(
() => motionRanges.some((range) => isCurrentHour(range.end_time)),
[motionRanges],
);
const isLoadingPane =
isLoadingMotionRanges ||
(motionRanges.length > 0 && cameraRecordings == undefined) ||
(hasCurrentHourRanges && previewFrames == undefined);
if (isLoadingPane) {
return (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
);
}
return (
<div className="flex min-h-0 flex-1 flex-col gap-3 overflow-hidden px-1 md:mx-2 md:gap-4">
<div
ref={setContentNode}
className="no-scrollbar min-h-0 flex-1 overflow-y-auto"
>
{clipData.length === 0 ? (
<div className="flex h-full items-center justify-center text-lg text-primary">
{t("motionPreviews.empty")}
</div>
) : (
<div className="grid grid-cols-1 gap-2 pb-2 sm:grid-cols-2 md:gap-4 xl:grid-cols-4">
{clipData.map(
({ range, preview, fallbackFrameTimes, motionHeatmap }, idx) => (
<div
key={`${camera.name}-${range.start_time}-${range.end_time}-${preview?.src ?? "none"}-${idx}`}
data-clip-id={`${camera.name}-${range.start_time}-${range.end_time}-${idx}`}
ref={clipRef}
>
<MotionPreviewClip
cameraName={camera.name}
range={range}
playbackRate={playbackRate}
preview={preview}
fallbackFrameTimes={fallbackFrameTimes}
motionHeatmap={motionHeatmap}
nonMotionAlpha={nonMotionAlpha}
isVisible={
windowVisible &&
(visibleClips.includes(
`${camera.name}-${range.start_time}-${range.end_time}-${idx}`,
) ||
(!hasVisibilityData && idx < 8))
}
onSeek={onSeek}
/>
</div>
),
)}
</div>
)}
</div>
</div>
);
}

View File

@ -0,0 +1,708 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import { isDesktop, isIOS, isMobile } from "react-device-detect";
import { FaArrowRight, FaCalendarAlt, FaCheckCircle } from "react-icons/fa";
import { MdOutlineRestartAlt, MdUndo } from "react-icons/md";
import { FrigateConfig } from "@/types/frigateConfig";
import { TimeRange } from "@/types/timeline";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { Drawer, DrawerContent } from "@/components/ui/drawer";
import { Label } from "@/components/ui/label";
import { Slider } from "@/components/ui/slider";
import { Switch } from "@/components/ui/switch";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { SelectSeparator } from "@/components/ui/select";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import ActivityIndicator from "@/components/indicators/activity-indicator";
import { CameraNameLabel } from "@/components/camera/FriendlyNameLabel";
import { TimezoneAwareCalendar } from "@/components/overlay/ReviewActivityCalendar";
import { useApiHost } from "@/api";
import { useResizeObserver } from "@/hooks/resize-observer";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import { getUTCOffset } from "@/utils/dateUtil";
import { cn } from "@/lib/utils";
import MotionSearchROICanvas from "./MotionSearchROICanvas";
import { TransformComponent, TransformWrapper } from "react-zoom-pan-pinch";
type MotionSearchDialogProps = {
open: boolean;
onOpenChange: (open: boolean) => void;
config: FrigateConfig;
cameras: string[];
selectedCamera: string | null;
onCameraSelect: (camera: string) => void;
cameraLocked?: boolean;
polygonPoints: number[][];
setPolygonPoints: React.Dispatch<React.SetStateAction<number[][]>>;
isDrawingROI: boolean;
setIsDrawingROI: React.Dispatch<React.SetStateAction<boolean>>;
parallelMode: boolean;
setParallelMode: React.Dispatch<React.SetStateAction<boolean>>;
threshold: number;
setThreshold: React.Dispatch<React.SetStateAction<number>>;
minArea: number;
setMinArea: React.Dispatch<React.SetStateAction<number>>;
frameSkip: number;
setFrameSkip: React.Dispatch<React.SetStateAction<number>>;
maxResults: number;
setMaxResults: React.Dispatch<React.SetStateAction<number>>;
searchRange?: TimeRange;
setSearchRange: React.Dispatch<React.SetStateAction<TimeRange | undefined>>;
defaultRange: TimeRange;
isSearching: boolean;
canStartSearch: boolean;
onStartSearch: () => void;
timezone?: string;
};
export default function MotionSearchDialog({
open,
onOpenChange,
config,
cameras,
selectedCamera,
onCameraSelect,
cameraLocked = false,
polygonPoints,
setPolygonPoints,
isDrawingROI,
setIsDrawingROI,
parallelMode,
setParallelMode,
threshold,
setThreshold,
minArea,
setMinArea,
frameSkip,
setFrameSkip,
maxResults,
setMaxResults,
searchRange,
setSearchRange,
defaultRange,
isSearching,
canStartSearch,
onStartSearch,
timezone,
}: MotionSearchDialogProps) {
const { t } = useTranslation(["views/motionSearch", "common"]);
const apiHost = useApiHost();
const containerRef = useRef<HTMLDivElement>(null);
const [{ width: containerWidth, height: containerHeight }] =
useResizeObserver(containerRef);
const [imageLoaded, setImageLoaded] = useState(false);
const cameraConfig = useMemo(() => {
if (!selectedCamera) return undefined;
return config.cameras[selectedCamera];
}, [config, selectedCamera]);
const polygonClosed = useMemo(
() => !isDrawingROI && polygonPoints.length >= 3,
[isDrawingROI, polygonPoints.length],
);
const undoPolygonPoint = useCallback(() => {
if (polygonPoints.length === 0 || isSearching) {
return;
}
setPolygonPoints((prev) => prev.slice(0, -1));
setIsDrawingROI(true);
}, [isSearching, setIsDrawingROI, setPolygonPoints, polygonPoints.length]);
const resetPolygon = useCallback(() => {
if (polygonPoints.length === 0 || isSearching) {
return;
}
setPolygonPoints([]);
setIsDrawingROI(true);
}, [isSearching, polygonPoints.length, setIsDrawingROI, setPolygonPoints]);
const imageSize = useMemo(() => {
if (!containerWidth || !containerHeight || !cameraConfig) {
return { width: 0, height: 0 };
}
const cameraAspectRatio =
cameraConfig.detect.width / cameraConfig.detect.height;
const availableAspectRatio = containerWidth / containerHeight;
if (availableAspectRatio >= cameraAspectRatio) {
return {
width: containerHeight * cameraAspectRatio,
height: containerHeight,
};
}
return {
width: containerWidth,
height: containerWidth / cameraAspectRatio,
};
}, [containerWidth, containerHeight, cameraConfig]);
useEffect(() => {
setImageLoaded(false);
}, [selectedCamera]);
const Overlay = isDesktop ? Dialog : Drawer;
const Content = isDesktop ? DialogContent : DrawerContent;
return (
<Overlay open={open} onOpenChange={onOpenChange}>
<Content
{...(isDesktop
? {
onOpenAutoFocus: (event: Event) => event.preventDefault(),
}
: {})}
className={cn(
isDesktop
? "scrollbar-container max-h-[90dvh] overflow-y-auto sm:max-w-[75%]"
: "flex max-h-[90dvh] flex-col overflow-hidden rounded-lg pb-4",
)}
>
<div
className={cn(
!isDesktop &&
"scrollbar-container flex min-h-0 w-full flex-col gap-4 overflow-y-auto overflow-x-hidden px-4",
)}
>
<DialogHeader>
<DialogTitle className="mt-4 md:mt-auto">
{t("dialog.title")}
</DialogTitle>
<p className="my-1 text-sm text-muted-foreground">
{t("description")}
</p>
</DialogHeader>
<DialogDescription className="hidden" />
<div
className={cn(
"flex gap-4",
isDesktop ? "mt-4 flex-row" : "flex-col landscape:flex-row",
)}
>
<div
className={cn("flex flex-1 flex-col", !isDesktop && "min-w-0")}
>
{(!cameraLocked || !selectedCamera) && (
<div className="flex items-end justify-between gap-2">
<div className="mt-2 md:min-w-64">
<div className="grid gap-2">
<Label htmlFor="motion-search-camera">
{t("dialog.cameraLabel")}
</Label>
<Select
value={selectedCamera ?? undefined}
onValueChange={(value) => onCameraSelect(value)}
>
<SelectTrigger id="motion-search-camera">
<SelectValue placeholder={t("selectCamera")} />
</SelectTrigger>
<SelectContent>
{cameras.map((camera) => (
<SelectItem
key={camera}
value={camera}
className="cursor-pointer hover:bg-accent hover:text-accent-foreground"
>
<CameraNameLabel camera={camera} />
</SelectItem>
))}
</SelectContent>
</Select>
</div>
</div>
</div>
)}
<TransformWrapper minScale={1.0} wheel={{ smoothStep: 0.005 }}>
<div className="flex flex-col gap-2">
<TransformComponent
wrapperStyle={{
width: "100%",
height: isDesktop ? "100%" : "auto",
}}
contentStyle={{
position: "relative",
width: "100%",
height: "100%",
}}
>
<div
ref={containerRef}
className="relative flex w-full items-center justify-center overflow-hidden rounded-lg border bg-secondary"
style={{ aspectRatio: "16 / 9" }}
>
{selectedCamera && cameraConfig && imageSize.width > 0 ? (
<div
className="relative"
style={{
width: imageSize.width,
height: imageSize.height,
}}
>
<img
alt={t("dialog.previewAlt", {
camera: selectedCamera,
})}
src={`${apiHost}api/${selectedCamera}/latest.jpg?h=500`}
className="h-full w-full object-contain"
onLoad={() => setImageLoaded(true)}
/>
{!imageLoaded && (
<div className="absolute inset-0 flex items-center justify-center">
<ActivityIndicator className="h-8 w-8" />
</div>
)}
<MotionSearchROICanvas
camera={selectedCamera}
width={cameraConfig.detect.width}
height={cameraConfig.detect.height}
polygonPoints={polygonPoints}
setPolygonPoints={setPolygonPoints}
isDrawing={isDrawingROI}
setIsDrawing={setIsDrawingROI}
isInteractive={true}
/>
</div>
) : (
<div className="flex h-full w-full items-center justify-center text-sm text-muted-foreground">
{t("selectCamera")}
</div>
)}
</div>
</TransformComponent>
</div>
</TransformWrapper>
{selectedCamera && (
<div className="my-2 flex w-full flex-row justify-between rounded-md bg-background_alt p-2 text-sm">
<div className="my-1 inline-flex items-center">
{t("polygonControls.points", {
count: polygonPoints.length,
})}
{polygonClosed && <FaCheckCircle className="ml-2 size-5" />}
</div>
<div className="flex flex-row justify-center gap-2">
<Tooltip>
<TooltipTrigger asChild>
<Button
variant="default"
className="size-6 rounded-md p-1"
aria-label={t("polygonControls.undo")}
disabled={polygonPoints.length === 0 || isSearching}
onClick={undoPolygonPoint}
>
<MdUndo className="text-secondary-foreground" />
</Button>
</TooltipTrigger>
<TooltipContent>
{t("polygonControls.undo")}
</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant="default"
className="size-6 rounded-md p-1"
aria-label={t("polygonControls.reset")}
disabled={polygonPoints.length === 0 || isSearching}
onClick={resetPolygon}
>
<MdOutlineRestartAlt className="text-secondary-foreground" />
</Button>
</TooltipTrigger>
<TooltipContent>
{t("polygonControls.reset")}
</TooltipContent>
</Tooltip>
</div>
</div>
)}
</div>
<div
className={cn(
"flex w-full flex-col gap-4 space-y-4 lg:w-[340px]",
isMobile && "landscape:w-[40%] landscape:flex-shrink-0",
)}
>
<div className="grid gap-3">
<h4 className="mb-4 font-medium leading-none">
{t("settings.title")}
</h4>
<div className="grid gap-4 space-y-2">
<div className="grid gap-2">
<Label htmlFor="threshold">{t("settings.threshold")}</Label>
<div className="flex items-center gap-2">
<Slider
id="threshold"
min={1}
max={255}
step={1}
value={[threshold]}
onValueChange={([value]) => setThreshold(value)}
/>
<span className="w-12 text-sm">{threshold}</span>
</div>
<p className="text-xs text-muted-foreground">
{t("settings.thresholdDesc")}
</p>
</div>
<div className="grid gap-2">
<Label htmlFor="minArea">{t("settings.minArea")}</Label>
<div className="flex items-center gap-2">
<Slider
id="minArea"
min={1}
max={100}
step={1}
value={[minArea]}
onValueChange={([value]) => setMinArea(value)}
/>
<span className="w-12 text-sm">{minArea}%</span>
</div>
<p className="text-xs text-muted-foreground">
{t("settings.minAreaDesc")}
</p>
</div>
<div className="grid gap-2">
<Label htmlFor="frameSkip">{t("settings.frameSkip")}</Label>
<div className="flex items-center gap-2">
<Slider
id="frameSkip"
min={1}
max={60}
step={1}
value={[frameSkip]}
onValueChange={([value]) => setFrameSkip(value)}
/>
<span className="w-12 text-sm">{frameSkip}</span>
</div>
<p className="text-xs text-muted-foreground">
{t("settings.frameSkipDesc")}
</p>
</div>
<div className="grid gap-2">
<div className="flex items-center justify-between gap-2">
<Label htmlFor="parallelMode">
{t("settings.parallelMode")}
</Label>
<Switch
id="parallelMode"
checked={parallelMode}
onCheckedChange={setParallelMode}
/>
</div>
<p className="text-xs text-muted-foreground">
{t("settings.parallelModeDesc")}
</p>
</div>
<div className="grid gap-2">
<Label htmlFor="maxResults">
{t("settings.maxResults")}
</Label>
<div className="flex items-center gap-2">
<Slider
id="maxResults"
min={1}
max={200}
step={1}
value={[maxResults]}
onValueChange={([value]) => setMaxResults(value)}
/>
<span className="w-12 text-sm">{maxResults}</span>
</div>
<p className="text-xs text-muted-foreground">
{t("settings.maxResultsDesc")}
</p>
</div>
</div>
</div>
<SearchRangeSelector
range={searchRange}
setRange={setSearchRange}
defaultRange={defaultRange}
timeFormat={config.ui?.time_format}
timezone={timezone}
/>
<Button
className="w-full"
variant="select"
onClick={onStartSearch}
disabled={!canStartSearch || isSearching}
>
{t("startSearch")}
</Button>
</div>
</div>
</div>
</Content>
</Overlay>
);
}
type SearchRangeSelectorProps = {
range?: TimeRange;
setRange: React.Dispatch<React.SetStateAction<TimeRange | undefined>>;
defaultRange: TimeRange;
timeFormat?: "browser" | "12hour" | "24hour";
timezone?: string;
};
function SearchRangeSelector({
range,
setRange,
defaultRange,
timeFormat,
timezone,
}: SearchRangeSelectorProps) {
const { t } = useTranslation(["views/motionSearch", "common"]);
const [startOpen, setStartOpen] = useState(false);
const [endOpen, setEndOpen] = useState(false);
const timezoneOffset = useMemo(
() =>
timezone ? Math.round(getUTCOffset(new Date(), timezone)) : undefined,
[timezone],
);
const localTimeOffset = useMemo(
() =>
Math.round(
getUTCOffset(
new Date(),
Intl.DateTimeFormat().resolvedOptions().timeZone,
),
),
[],
);
const startTime = useMemo(() => {
let time = range?.after ?? defaultRange.after;
if (timezoneOffset !== undefined) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, defaultRange, timezoneOffset, localTimeOffset]);
const endTime = useMemo(() => {
let time = range?.before ?? defaultRange.before;
if (timezoneOffset !== undefined) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, defaultRange, timezoneOffset, localTimeOffset]);
const formattedStart = useFormattedTimestamp(
startTime,
timeFormat === "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const formattedEnd = useFormattedTimestamp(
endTime,
timeFormat === "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const startClock = useMemo(() => {
const date = new Date(startTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date
.getMinutes()
.toString()
.padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [startTime]);
const endClock = useMemo(() => {
const date = new Date(endTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date
.getMinutes()
.toString()
.padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [endTime]);
return (
<div className="grid gap-2">
<Label>{t("timeRange.title")}</Label>
<div className="flex items-center rounded-lg bg-secondary px-2 py-1 text-secondary-foreground">
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
open={startOpen}
onOpenChange={(open) => {
if (!open) {
setStartOpen(false);
}
}}
modal={false}
>
<PopoverTrigger asChild>
<Button
className="text-primary"
aria-label={t("timeRange.start")}
variant={startOpen ? "select" : "default"}
size="sm"
onClick={() => {
setStartOpen(true);
setEndOpen(false);
}}
>
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={timezone}
selectedDay={new Date(startTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
before: endTime,
after: day.getTime() / 1000 + 1,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="startTime"
type="time"
value={startClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const start = new Date(startTime * 1000);
start.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: endTime,
after: start.getTime() / 1000,
});
}}
/>
</PopoverContent>
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
open={endOpen}
onOpenChange={(open) => {
if (!open) {
setEndOpen(false);
}
}}
modal={false}
>
<PopoverTrigger asChild>
<Button
className="text-primary"
aria-label={t("timeRange.end")}
variant={endOpen ? "select" : "default"}
size="sm"
onClick={() => {
setEndOpen(true);
setStartOpen(false);
}}
>
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={timezone}
selectedDay={new Date(endTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
after: startTime,
before: day.getTime() / 1000,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const end = new Date(endTime * 1000);
end.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: end.getTime() / 1000,
after: startTime,
});
}}
/>
</PopoverContent>
</Popover>
</div>
</div>
</div>
);
}

View File

@ -0,0 +1,398 @@
import { useCallback, useMemo, useRef } from "react";
import { Stage, Layer, Line, Circle, Image } from "react-konva";
import Konva from "konva";
import type { KonvaEventObject } from "konva/lib/Node";
import { flattenPoints } from "@/utils/canvasUtil";
import { cn } from "@/lib/utils";
import { useResizeObserver } from "@/hooks/resize-observer";
type MotionSearchROICanvasProps = {
camera: string;
width: number;
height: number;
polygonPoints: number[][];
setPolygonPoints: React.Dispatch<React.SetStateAction<number[][]>>;
isDrawing: boolean;
setIsDrawing: React.Dispatch<React.SetStateAction<boolean>>;
isInteractive?: boolean;
motionHeatmap?: Record<string, number> | null;
showMotionHeatmap?: boolean;
};
export default function MotionSearchROICanvas({
width,
height,
polygonPoints,
setPolygonPoints,
isDrawing,
setIsDrawing,
isInteractive = true,
motionHeatmap,
showMotionHeatmap = false,
}: MotionSearchROICanvasProps) {
const containerRef = useRef<HTMLDivElement>(null);
const stageRef = useRef<Konva.Stage>(null);
const [{ width: containerWidth, height: containerHeight }] =
useResizeObserver(containerRef);
const stageSize = useMemo(
() => ({
width: containerWidth > 0 ? Math.ceil(containerWidth) : 0,
height: containerHeight > 0 ? Math.ceil(containerHeight) : 0,
}),
[containerHeight, containerWidth],
);
const videoRect = useMemo(() => {
const stageWidth = stageSize.width;
const stageHeight = stageSize.height;
const sourceWidth = width > 0 ? width : 1;
const sourceHeight = height > 0 ? height : 1;
if (stageWidth <= 0 || stageHeight <= 0) {
return { x: 0, y: 0, width: 0, height: 0 };
}
const stageAspect = stageWidth / stageHeight;
const sourceAspect = sourceWidth / sourceHeight;
if (stageAspect > sourceAspect) {
const fittedHeight = stageHeight;
const fittedWidth = fittedHeight * sourceAspect;
return {
x: (stageWidth - fittedWidth) / 2,
y: 0,
width: fittedWidth,
height: fittedHeight,
};
}
const fittedWidth = stageWidth;
const fittedHeight = fittedWidth / sourceAspect;
return {
x: 0,
y: (stageHeight - fittedHeight) / 2,
width: fittedWidth,
height: fittedHeight,
};
}, [height, stageSize.height, stageSize.width, width]);
// Convert normalized points to stage coordinates
const scaledPoints = useMemo(() => {
return polygonPoints.map((point) => [
videoRect.x + point[0] * videoRect.width,
videoRect.y + point[1] * videoRect.height,
]);
}, [
polygonPoints,
videoRect.height,
videoRect.width,
videoRect.x,
videoRect.y,
]);
const flattenedPoints = useMemo(
() => flattenPoints(scaledPoints),
[scaledPoints],
);
const heatmapOverlayCanvas = useMemo(() => {
if (
!showMotionHeatmap ||
!motionHeatmap ||
videoRect.width === 0 ||
videoRect.height === 0
) {
return null;
}
const gridSize = 16;
const heatmapLevels = Object.values(motionHeatmap)
.map((value) => Number(value))
.filter((value) => Number.isFinite(value) && value > 0);
const maxHeatmapLevel =
heatmapLevels.length > 0 ? Math.max(...heatmapLevels) : 0;
if (maxHeatmapLevel <= 0) {
return null;
}
const maskCanvas = document.createElement("canvas");
maskCanvas.width = gridSize;
maskCanvas.height = gridSize;
const maskContext = maskCanvas.getContext("2d");
if (!maskContext) {
return null;
}
const imageData = maskContext.createImageData(gridSize, gridSize);
const heatmapStops = [
{ t: 0, r: 0, g: 0, b: 255 },
{ t: 0.25, r: 0, g: 255, b: 255 },
{ t: 0.5, r: 0, g: 255, b: 0 },
{ t: 0.75, r: 255, g: 255, b: 0 },
{ t: 1, r: 255, g: 0, b: 0 },
];
const getHeatmapColor = (value: number) => {
const clampedValue = Math.min(1, Math.max(0, value));
const upperIndex = heatmapStops.findIndex(
(stop) => stop.t >= clampedValue,
);
if (upperIndex <= 0) {
return heatmapStops[0];
}
const lower = heatmapStops[upperIndex - 1];
const upper = heatmapStops[upperIndex];
const range = upper.t - lower.t;
const blend = range > 0 ? (clampedValue - lower.t) / range : 0;
return {
r: Math.round(lower.r + (upper.r - lower.r) * blend),
g: Math.round(lower.g + (upper.g - lower.g) * blend),
b: Math.round(lower.b + (upper.b - lower.b) * blend),
};
};
for (let index = 0; index < gridSize ** 2; index++) {
const level = Number(motionHeatmap[index.toString()] ?? 0);
const normalizedLevel =
level > 0 ? Math.min(1, Math.max(0, level / maxHeatmapLevel)) : 0;
const alpha =
level > 0
? Math.min(0.95, Math.max(0.1, 0.15 + normalizedLevel * 0.5))
: 0;
const color = getHeatmapColor(normalizedLevel);
const pixelOffset = index * 4;
imageData.data[pixelOffset] = color.r;
imageData.data[pixelOffset + 1] = color.g;
imageData.data[pixelOffset + 2] = color.b;
imageData.data[pixelOffset + 3] = Math.round(alpha * 255);
}
maskContext.putImageData(imageData, 0, 0);
return maskCanvas;
}, [motionHeatmap, showMotionHeatmap, videoRect.height, videoRect.width]);
// Handle mouse click to add point
const handleMouseDown = useCallback(
(e: KonvaEventObject<MouseEvent | TouchEvent>) => {
if (!isInteractive || !isDrawing) return;
if (videoRect.width <= 0 || videoRect.height <= 0) return;
const stage = e.target.getStage();
if (!stage) return;
const mousePos = stage.getPointerPosition();
if (!mousePos) return;
const intersection = stage.getIntersection(mousePos);
// If clicking on first point and we have at least 3 points, close the polygon
if (polygonPoints.length >= 3 && intersection?.name() === "point-0") {
setIsDrawing(false);
return;
}
// Only add point if not clicking on an existing point
if (intersection?.getClassName() !== "Circle") {
const clampedX = Math.min(
Math.max(mousePos.x, videoRect.x),
videoRect.x + videoRect.width,
);
const clampedY = Math.min(
Math.max(mousePos.y, videoRect.y),
videoRect.y + videoRect.height,
);
// Convert to normalized coordinates (0-1)
const normalizedX = (clampedX - videoRect.x) / videoRect.width;
const normalizedY = (clampedY - videoRect.y) / videoRect.height;
setPolygonPoints([...polygonPoints, [normalizedX, normalizedY]]);
}
},
[
isDrawing,
polygonPoints,
setPolygonPoints,
setIsDrawing,
isInteractive,
videoRect.height,
videoRect.width,
videoRect.x,
videoRect.y,
],
);
// Handle point drag
const handlePointDragMove = useCallback(
(e: KonvaEventObject<MouseEvent | TouchEvent>, index: number) => {
if (!isInteractive) return;
const stage = e.target.getStage();
if (!stage) return;
const pos = { x: e.target.x(), y: e.target.y() };
// Constrain to fitted video boundaries
pos.x = Math.max(
videoRect.x,
Math.min(pos.x, videoRect.x + videoRect.width),
);
pos.y = Math.max(
videoRect.y,
Math.min(pos.y, videoRect.y + videoRect.height),
);
// Convert to normalized coordinates
const normalizedX = (pos.x - videoRect.x) / videoRect.width;
const normalizedY = (pos.y - videoRect.y) / videoRect.height;
const newPoints = [...polygonPoints];
newPoints[index] = [normalizedX, normalizedY];
setPolygonPoints(newPoints);
},
[
polygonPoints,
setPolygonPoints,
isInteractive,
videoRect.height,
videoRect.width,
videoRect.x,
videoRect.y,
],
);
// Handle right-click to delete point
const handleContextMenu = useCallback(
(e: KonvaEventObject<MouseEvent>, index: number) => {
if (!isInteractive) return;
e.evt.preventDefault();
if (polygonPoints.length <= 3 && !isDrawing) {
// Don't delete if we have a closed polygon with minimum points
return;
}
const newPoints = polygonPoints.filter((_, i) => i !== index);
setPolygonPoints(newPoints);
// If we deleted enough points, go back to drawing mode
if (newPoints.length < 3) {
setIsDrawing(true);
}
},
[polygonPoints, isDrawing, setPolygonPoints, setIsDrawing, isInteractive],
);
// Handle mouse hover on first point
const handleMouseOverPoint = useCallback(
(e: KonvaEventObject<MouseEvent | TouchEvent>, index: number) => {
if (!isInteractive) return;
if (!isDrawing || polygonPoints.length < 3 || index !== 0) return;
e.target.scale({ x: 2, y: 2 });
},
[isDrawing, isInteractive, polygonPoints.length],
);
const handleMouseOutPoint = useCallback(
(e: KonvaEventObject<MouseEvent | TouchEvent>, index: number) => {
if (!isInteractive) return;
if (index === 0) {
e.target.scale({ x: 1, y: 1 });
}
},
[isInteractive],
);
const vertexRadius = 6;
const polygonColorString = "rgba(66, 135, 245, 0.8)";
const polygonFillColor = "rgba(66, 135, 245, 0.2)";
return (
<div
ref={containerRef}
className={cn(
"absolute inset-0 z-10",
isInteractive ? "pointer-events-auto" : "pointer-events-none",
)}
style={{ cursor: isDrawing ? "crosshair" : "default" }}
>
{stageSize.width > 0 && stageSize.height > 0 && (
<Stage
ref={stageRef}
width={stageSize.width}
height={stageSize.height}
onMouseDown={handleMouseDown}
onTouchStart={handleMouseDown}
onContextMenu={(e) => e.evt.preventDefault()}
className="absolute inset-0"
>
<Layer>
{/* Segment heatmap overlay */}
{heatmapOverlayCanvas && (
<Image
image={heatmapOverlayCanvas}
x={videoRect.x}
y={videoRect.y}
width={videoRect.width}
height={videoRect.height}
listening={false}
/>
)}
{/* Polygon outline */}
{scaledPoints.length > 0 && (
<Line
points={flattenedPoints}
stroke={polygonColorString}
strokeWidth={2}
closed={!isDrawing && scaledPoints.length >= 3}
fill={
!isDrawing && scaledPoints.length >= 3
? polygonFillColor
: undefined
}
/>
)}
{/* Draw line from last point to cursor when drawing */}
{isDrawing && scaledPoints.length > 0 && (
<Line
points={flattenedPoints}
stroke={polygonColorString}
strokeWidth={2}
dash={[5, 5]}
/>
)}
{/* Vertex points */}
{scaledPoints.map((point, index) => (
<Circle
key={index}
name={`point-${index}`}
x={point[0]}
y={point[1]}
radius={vertexRadius}
fill={polygonColorString}
stroke="white"
strokeWidth={2}
draggable={!isDrawing && isInteractive}
onDragMove={(e) => handlePointDragMove(e, index)}
onMouseOver={(e) => handleMouseOverPoint(e, index)}
onMouseOut={(e) => handleMouseOutPoint(e, index)}
onContextMenu={(e) => handleContextMenu(e, index)}
/>
))}
</Layer>
</Stage>
)}
</div>
);
}

File diff suppressed because it is too large Load Diff