From 2babfd2ec9081825d051cf1f3dc34382c40189ba Mon Sep 17 00:00:00 2001 From: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com> Date: Thu, 5 Mar 2026 17:53:48 -0600 Subject: [PATCH] Improve motion review and add motion search (#22253) * implement motion search and motion previews * tweaks * fix merge issue * fix copilot instructions --- frigate/api/defs/tags.py | 1 + frigate/api/fastapi_app.py | 2 + frigate/api/motion_search.py | 292 ++++ frigate/api/record.py | 1 + frigate/app.py | 4 + frigate/jobs/motion_search.py | 864 ++++++++++ frigate/models.py | 1 + frigate/record/maintainer.py | 68 +- migrations/035_add_motion_heatmap.py | 34 + web/package-lock.json | 63 + web/package.json | 1 + web/public/locales/en/views/events.json | 22 +- web/public/locales/en/views/motionSearch.json | 75 + web/src/components/player/HlsVideoPlayer.tsx | 286 ++-- .../player/dynamic/DynamicVideoPlayer.tsx | 12 +- .../timeline/MotionReviewTimeline.tsx | 11 + .../components/timeline/ReviewTimeline.tsx | 7 +- web/src/components/ui/progress.tsx | 26 + web/src/hooks/use-camera-activity.ts | 129 +- web/src/pages/Events.tsx | 95 +- web/src/pages/MotionSearch.tsx | 112 ++ web/src/types/motionSearch.ts | 46 + web/src/types/record.ts | 1 + web/src/views/events/EventView.tsx | 825 ++++++--- web/src/views/events/MotionPreviewsPane.tsx | 898 ++++++++++ .../motion-search/MotionSearchDialog.tsx | 708 ++++++++ .../motion-search/MotionSearchROICanvas.tsx | 398 +++++ .../views/motion-search/MotionSearchView.tsx | 1491 +++++++++++++++++ 28 files changed, 6089 insertions(+), 384 deletions(-) create mode 100644 frigate/api/motion_search.py create mode 100644 frigate/jobs/motion_search.py create mode 100644 migrations/035_add_motion_heatmap.py create mode 100644 web/public/locales/en/views/motionSearch.json create mode 100644 web/src/components/ui/progress.tsx create mode 100644 web/src/pages/MotionSearch.tsx create mode 100644 web/src/types/motionSearch.ts create mode 100644 web/src/views/events/MotionPreviewsPane.tsx create mode 100644 web/src/views/motion-search/MotionSearchDialog.tsx create mode 100644 web/src/views/motion-search/MotionSearchROICanvas.tsx create mode 100644 web/src/views/motion-search/MotionSearchView.tsx diff --git a/frigate/api/defs/tags.py b/frigate/api/defs/tags.py index 3aaaa59ef..c6f37b67f 100644 --- a/frigate/api/defs/tags.py +++ b/frigate/api/defs/tags.py @@ -11,6 +11,7 @@ class Tags(Enum): classification = "Classification" logs = "Logs" media = "Media" + motion_search = "Motion Search" notifications = "Notifications" preview = "Preview" recordings = "Recordings" diff --git a/frigate/api/fastapi_app.py b/frigate/api/fastapi_app.py index 1e8c408e6..0a731bcee 100644 --- a/frigate/api/fastapi_app.py +++ b/frigate/api/fastapi_app.py @@ -22,6 +22,7 @@ from frigate.api import ( event, export, media, + motion_search, notification, preview, record, @@ -135,6 +136,7 @@ def create_fastapi_app( app.include_router(export.router) app.include_router(event.router) app.include_router(media.router) + app.include_router(motion_search.router) app.include_router(record.router) app.include_router(debug_replay.router) # App Properties diff --git a/frigate/api/motion_search.py b/frigate/api/motion_search.py new file mode 100644 index 000000000..09bf8026d --- /dev/null +++ b/frigate/api/motion_search.py @@ -0,0 +1,292 @@ +"""Motion search API for detecting changes within a region of interest.""" + +import logging +from typing import Any, List, Optional + +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse +from pydantic import BaseModel, Field + +from frigate.api.auth import require_camera_access +from frigate.api.defs.tags import Tags +from frigate.jobs.motion_search import ( + cancel_motion_search_job, + get_motion_search_job, + start_motion_search_job, +) +from frigate.types import JobStatusTypesEnum + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=[Tags.motion_search]) + + +class MotionSearchRequest(BaseModel): + """Request body for motion search.""" + + start_time: float = Field(description="Start timestamp for the search range") + end_time: float = Field(description="End timestamp for the search range") + polygon_points: List[List[float]] = Field( + description="List of [x, y] normalized coordinates (0-1) defining the ROI polygon" + ) + threshold: int = Field( + default=30, + ge=1, + le=255, + description="Pixel difference threshold (1-255)", + ) + min_area: float = Field( + default=5.0, + ge=0.1, + le=100.0, + description="Minimum change area as a percentage of the ROI", + ) + frame_skip: int = Field( + default=5, + ge=1, + le=30, + description="Process every Nth frame (1=all frames, 5=every 5th frame)", + ) + parallel: bool = Field( + default=False, + description="Enable parallel scanning across segments", + ) + max_results: int = Field( + default=25, + ge=1, + le=200, + description="Maximum number of search results to return", + ) + + +class MotionSearchResult(BaseModel): + """A single search result with timestamp and change info.""" + + timestamp: float = Field(description="Timestamp where change was detected") + change_percentage: float = Field(description="Percentage of ROI area that changed") + + +class MotionSearchMetricsResponse(BaseModel): + """Metrics collected during motion search execution.""" + + segments_scanned: int = 0 + segments_processed: int = 0 + metadata_inactive_segments: int = 0 + heatmap_roi_skip_segments: int = 0 + fallback_full_range_segments: int = 0 + frames_decoded: int = 0 + wall_time_seconds: float = 0.0 + segments_with_errors: int = 0 + + +class MotionSearchStartResponse(BaseModel): + """Response when motion search job starts.""" + + success: bool + message: str + job_id: str + + +class MotionSearchStatusResponse(BaseModel): + """Response containing job status and results.""" + + success: bool + message: str + status: str # "queued", "running", "success", "failed", or "cancelled" + results: Optional[List[MotionSearchResult]] = None + total_frames_processed: Optional[int] = None + error_message: Optional[str] = None + metrics: Optional[MotionSearchMetricsResponse] = None + + +@router.post( + "/{camera_name}/search/motion", + response_model=MotionSearchStartResponse, + dependencies=[Depends(require_camera_access)], + summary="Start motion search job", + description="""Starts an asynchronous search for significant motion changes within + a user-defined Region of Interest (ROI) over a specified time range. Returns a job_id + that can be used to poll for results.""", +) +async def start_motion_search( + request: Request, + camera_name: str, + body: MotionSearchRequest, +): + """Start an async motion search job.""" + config = request.app.frigate_config + + if camera_name not in config.cameras: + return JSONResponse( + content={"success": False, "message": f"Camera {camera_name} not found"}, + status_code=404, + ) + + # Validate polygon has at least 3 points + if len(body.polygon_points) < 3: + return JSONResponse( + content={ + "success": False, + "message": "Polygon must have at least 3 points", + }, + status_code=400, + ) + + # Validate time range + if body.start_time >= body.end_time: + return JSONResponse( + content={ + "success": False, + "message": "Start time must be before end time", + }, + status_code=400, + ) + + # Start the job using the jobs module + job_id = start_motion_search_job( + config=config, + camera_name=camera_name, + start_time=body.start_time, + end_time=body.end_time, + polygon_points=body.polygon_points, + threshold=body.threshold, + min_area=body.min_area, + frame_skip=body.frame_skip, + parallel=body.parallel, + max_results=body.max_results, + ) + + return JSONResponse( + content={ + "success": True, + "message": "Search job started", + "job_id": job_id, + } + ) + + +@router.get( + "/{camera_name}/search/motion/{job_id}", + response_model=MotionSearchStatusResponse, + dependencies=[Depends(require_camera_access)], + summary="Get motion search job status", + description="Returns the status and results (if complete) of a motion search job.", +) +async def get_motion_search_status_endpoint( + request: Request, + camera_name: str, + job_id: str, +): + """Get the status of a motion search job.""" + config = request.app.frigate_config + + if camera_name not in config.cameras: + return JSONResponse( + content={"success": False, "message": f"Camera {camera_name} not found"}, + status_code=404, + ) + + job = get_motion_search_job(job_id) + if not job: + return JSONResponse( + content={"success": False, "message": "Job not found"}, + status_code=404, + ) + + api_status = job.status + + # Build response content + response_content: dict[str, Any] = { + "success": api_status != JobStatusTypesEnum.failed, + "status": api_status, + } + + if api_status == JobStatusTypesEnum.failed: + response_content["message"] = job.error_message or "Search failed" + response_content["error_message"] = job.error_message + elif api_status == JobStatusTypesEnum.cancelled: + response_content["message"] = "Search cancelled" + response_content["total_frames_processed"] = job.total_frames_processed + elif api_status == JobStatusTypesEnum.success: + response_content["message"] = "Search complete" + if job.results: + response_content["results"] = job.results.get("results", []) + response_content["total_frames_processed"] = job.results.get( + "total_frames_processed", job.total_frames_processed + ) + else: + response_content["results"] = [] + response_content["total_frames_processed"] = job.total_frames_processed + else: + response_content["message"] = "Job processing" + response_content["total_frames_processed"] = job.total_frames_processed + # Include partial results if available (streaming) + if job.results: + response_content["results"] = job.results.get("results", []) + response_content["total_frames_processed"] = job.results.get( + "total_frames_processed", job.total_frames_processed + ) + + # Include metrics if available + if job.metrics: + response_content["metrics"] = job.metrics.to_dict() + + return JSONResponse(content=response_content) + + +@router.post( + "/{camera_name}/search/motion/{job_id}/cancel", + dependencies=[Depends(require_camera_access)], + summary="Cancel motion search job", + description="Cancels an active motion search job if it is still processing.", +) +async def cancel_motion_search_endpoint( + request: Request, + camera_name: str, + job_id: str, +): + """Cancel an active motion search job.""" + config = request.app.frigate_config + + if camera_name not in config.cameras: + return JSONResponse( + content={"success": False, "message": f"Camera {camera_name} not found"}, + status_code=404, + ) + + job = get_motion_search_job(job_id) + if not job: + return JSONResponse( + content={"success": False, "message": "Job not found"}, + status_code=404, + ) + + # Check if already finished + api_status = job.status + if api_status not in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running): + return JSONResponse( + content={ + "success": True, + "message": "Job already finished", + "status": api_status, + } + ) + + # Request cancellation + cancelled = cancel_motion_search_job(job_id) + if cancelled: + return JSONResponse( + content={ + "success": True, + "message": "Search cancelled", + "status": "cancelled", + } + ) + + return JSONResponse( + content={ + "success": False, + "message": "Failed to cancel job", + }, + status_code=500, + ) diff --git a/frigate/api/record.py b/frigate/api/record.py index 789aa4a80..6eeb9fbe6 100644 --- a/frigate/api/record.py +++ b/frigate/api/record.py @@ -261,6 +261,7 @@ async def recordings( Recordings.segment_size, Recordings.motion, Recordings.objects, + Recordings.motion_heatmap, Recordings.duration, ) .where( diff --git a/frigate/app.py b/frigate/app.py index 7c8ac47e3..0add3e3b8 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -51,6 +51,7 @@ from frigate.embeddings import EmbeddingProcess, EmbeddingsContext from frigate.events.audio import AudioProcessor from frigate.events.cleanup import EventCleanup from frigate.events.maintainer import EventProcessor +from frigate.jobs.motion_search import stop_all_motion_search_jobs from frigate.log import _stop_logging from frigate.models import ( Event, @@ -599,6 +600,9 @@ class FrigateApp: # used by the docker healthcheck Path("/dev/shm/.frigate-is-stopping").touch() + # Cancel any running motion search jobs before setting stop_event + stop_all_motion_search_jobs() + self.stop_event.set() # set an end_time on entries without an end_time before exiting diff --git a/frigate/jobs/motion_search.py b/frigate/jobs/motion_search.py new file mode 100644 index 000000000..d7c8f8fbc --- /dev/null +++ b/frigate/jobs/motion_search.py @@ -0,0 +1,864 @@ +"""Motion search job management with background execution and parallel verification.""" + +import logging +import os +import threading +from concurrent.futures import Future, ThreadPoolExecutor, as_completed +from dataclasses import asdict, dataclass, field +from datetime import datetime +from typing import Any, Optional + +import cv2 +import numpy as np + +from frigate.comms.inter_process import InterProcessRequestor +from frigate.config import FrigateConfig +from frigate.const import UPDATE_JOB_STATE +from frigate.jobs.job import Job +from frigate.jobs.manager import ( + get_job_by_id, + set_current_job, +) +from frigate.models import Recordings +from frigate.types import JobStatusTypesEnum + +logger = logging.getLogger(__name__) + +# Constants +HEATMAP_GRID_SIZE = 16 + + +@dataclass +class MotionSearchMetrics: + """Metrics collected during motion search execution.""" + + segments_scanned: int = 0 + segments_processed: int = 0 + metadata_inactive_segments: int = 0 + heatmap_roi_skip_segments: int = 0 + fallback_full_range_segments: int = 0 + frames_decoded: int = 0 + wall_time_seconds: float = 0.0 + segments_with_errors: int = 0 + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary.""" + return asdict(self) + + +@dataclass +class MotionSearchResult: + """A single search result with timestamp and change info.""" + + timestamp: float + change_percentage: float + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary.""" + return asdict(self) + + +@dataclass +class MotionSearchJob(Job): + """Job state for motion search operations.""" + + job_type: str = "motion_search" + camera: str = "" + start_time_range: float = 0.0 + end_time_range: float = 0.0 + polygon_points: list[list[float]] = field(default_factory=list) + threshold: int = 30 + min_area: float = 5.0 + frame_skip: int = 5 + parallel: bool = False + max_results: int = 25 + + # Track progress + total_frames_processed: int = 0 + + # Metrics for observability + metrics: Optional[MotionSearchMetrics] = None + + def to_dict(self) -> dict[str, Any]: + """Convert to dictionary for WebSocket transmission.""" + d = asdict(self) + if self.metrics: + d["metrics"] = self.metrics.to_dict() + return d + + +def create_polygon_mask( + polygon_points: list[list[float]], frame_width: int, frame_height: int +) -> np.ndarray: + """Create a binary mask from normalized polygon coordinates.""" + motion_points = np.array( + [[int(p[0] * frame_width), int(p[1] * frame_height)] for p in polygon_points], + dtype=np.int32, + ) + mask = np.zeros((frame_height, frame_width), dtype=np.uint8) + cv2.fillPoly(mask, [motion_points], 255) + return mask + + +def compute_roi_bbox_normalized( + polygon_points: list[list[float]], +) -> tuple[float, float, float, float]: + """Compute the bounding box of the ROI in normalized coordinates (0-1). + + Returns (x_min, y_min, x_max, y_max) in normalized coordinates. + """ + if not polygon_points: + return (0.0, 0.0, 1.0, 1.0) + + x_coords = [p[0] for p in polygon_points] + y_coords = [p[1] for p in polygon_points] + return (min(x_coords), min(y_coords), max(x_coords), max(y_coords)) + + +def heatmap_overlaps_roi( + heatmap: dict[str, int], roi_bbox: tuple[float, float, float, float] +) -> bool: + """Check if a sparse motion heatmap has any overlap with the ROI bounding box. + + Args: + heatmap: Sparse dict mapping cell index (str) to intensity (1-255). + roi_bbox: (x_min, y_min, x_max, y_max) in normalized coordinates (0-1). + + Returns: + True if there is overlap (any active cell in the ROI region). + """ + if not isinstance(heatmap, dict): + # Invalid heatmap, assume overlap to be safe + return True + + x_min, y_min, x_max, y_max = roi_bbox + + # Convert normalized coordinates to grid cells (0-15) + grid_x_min = max(0, int(x_min * HEATMAP_GRID_SIZE)) + grid_y_min = max(0, int(y_min * HEATMAP_GRID_SIZE)) + grid_x_max = min(HEATMAP_GRID_SIZE - 1, int(x_max * HEATMAP_GRID_SIZE)) + grid_y_max = min(HEATMAP_GRID_SIZE - 1, int(y_max * HEATMAP_GRID_SIZE)) + + # Check each cell in the ROI bbox + for y in range(grid_y_min, grid_y_max + 1): + for x in range(grid_x_min, grid_x_max + 1): + idx = str(y * HEATMAP_GRID_SIZE + x) + if idx in heatmap: + return True + + return False + + +def segment_passes_activity_gate(recording: Recordings) -> bool: + """Check if a segment passes the activity gate. + + Returns True if any of motion, objects, or regions is non-zero/non-null. + Returns True if all are null (old segments without data). + """ + motion = recording.motion + objects = recording.objects + regions = recording.regions + + # Old segments without metadata - pass through (conservative) + if motion is None and objects is None and regions is None: + return True + + # Pass if any activity indicator is positive + return bool(motion) or bool(objects) or bool(regions) + + +def segment_passes_heatmap_gate( + recording: Recordings, roi_bbox: tuple[float, float, float, float] +) -> bool: + """Check if a segment passes the heatmap overlap gate. + + Returns True if: + - No heatmap is stored (old segments). + - The heatmap overlaps with the ROI bbox. + """ + heatmap = getattr(recording, "motion_heatmap", None) + if heatmap is None: + # No heatmap stored, fall back to activity gate + return True + + return heatmap_overlaps_roi(heatmap, roi_bbox) + + +class MotionSearchRunner(threading.Thread): + """Thread-based runner for motion search jobs with parallel verification.""" + + def __init__( + self, + job: MotionSearchJob, + config: FrigateConfig, + cancel_event: threading.Event, + ) -> None: + super().__init__(daemon=True, name=f"motion_search_{job.id}") + self.job = job + self.config = config + self.cancel_event = cancel_event + self.internal_stop_event = threading.Event() + self.requestor = InterProcessRequestor() + self.metrics = MotionSearchMetrics() + self.job.metrics = self.metrics + + # Worker cap: min(4, cpu_count) + cpu_count = os.cpu_count() or 1 + self.max_workers = min(4, cpu_count) + + def run(self) -> None: + """Execute the motion search job.""" + try: + self.job.status = JobStatusTypesEnum.running + self.job.start_time = datetime.now().timestamp() + self._broadcast_status() + + results = self._execute_search() + + if self.cancel_event.is_set(): + self.job.status = JobStatusTypesEnum.cancelled + else: + self.job.status = JobStatusTypesEnum.success + self.job.results = { + "results": [r.to_dict() for r in results], + "total_frames_processed": self.job.total_frames_processed, + } + + self.job.end_time = datetime.now().timestamp() + self.metrics.wall_time_seconds = self.job.end_time - self.job.start_time + self.job.metrics = self.metrics + + logger.debug( + "Motion search job %s completed: status=%s, results=%d, frames=%d", + self.job.id, + self.job.status, + len(results), + self.job.total_frames_processed, + ) + self._broadcast_status() + + except Exception as e: + logger.exception("Motion search job %s failed: %s", self.job.id, e) + self.job.status = JobStatusTypesEnum.failed + self.job.error_message = str(e) + self.job.end_time = datetime.now().timestamp() + self.metrics.wall_time_seconds = self.job.end_time - ( + self.job.start_time or 0 + ) + self.job.metrics = self.metrics + self._broadcast_status() + + finally: + if self.requestor: + self.requestor.stop() + + def _broadcast_status(self) -> None: + """Broadcast job status update via IPC to WebSocket subscribers.""" + if self.job.status == JobStatusTypesEnum.running and self.job.start_time: + self.metrics.wall_time_seconds = ( + datetime.now().timestamp() - self.job.start_time + ) + + try: + self.requestor.send_data(UPDATE_JOB_STATE, self.job.to_dict()) + except Exception as e: + logger.warning("Failed to broadcast motion search status: %s", e) + + def _should_stop(self) -> bool: + """Check if processing should stop due to cancellation or internal limits.""" + return self.cancel_event.is_set() or self.internal_stop_event.is_set() + + def _execute_search(self) -> list[MotionSearchResult]: + """Main search execution logic.""" + camera_name = self.job.camera + camera_config = self.config.cameras.get(camera_name) + if not camera_config: + raise ValueError(f"Camera {camera_name} not found") + + frame_width = camera_config.detect.width + frame_height = camera_config.detect.height + + # Create polygon mask + polygon_mask = create_polygon_mask( + self.job.polygon_points, frame_width, frame_height + ) + + if np.count_nonzero(polygon_mask) == 0: + logger.warning("Polygon mask is empty for job %s", self.job.id) + return [] + + # Compute ROI bbox in normalized coordinates for heatmap gate + roi_bbox = compute_roi_bbox_normalized(self.job.polygon_points) + + # Query recordings + recordings = list( + Recordings.select() + .where( + ( + Recordings.start_time.between( + self.job.start_time_range, self.job.end_time_range + ) + ) + | ( + Recordings.end_time.between( + self.job.start_time_range, self.job.end_time_range + ) + ) + | ( + (self.job.start_time_range > Recordings.start_time) + & (self.job.end_time_range < Recordings.end_time) + ) + ) + .where(Recordings.camera == camera_name) + .order_by(Recordings.start_time.asc()) + ) + + if not recordings: + logger.debug("No recordings found for motion search job %s", self.job.id) + return [] + + logger.debug( + "Motion search job %s: queried %d recording segments for camera %s " + "(range %.1f - %.1f)", + self.job.id, + len(recordings), + camera_name, + self.job.start_time_range, + self.job.end_time_range, + ) + + self.metrics.segments_scanned = len(recordings) + + # Apply activity and heatmap gates + filtered_recordings = [] + for recording in recordings: + if not segment_passes_activity_gate(recording): + self.metrics.metadata_inactive_segments += 1 + self.metrics.segments_processed += 1 + logger.debug( + "Motion search job %s: segment %s skipped by activity gate " + "(motion=%s, objects=%s, regions=%s)", + self.job.id, + recording.id, + recording.motion, + recording.objects, + recording.regions, + ) + continue + if not segment_passes_heatmap_gate(recording, roi_bbox): + self.metrics.heatmap_roi_skip_segments += 1 + self.metrics.segments_processed += 1 + logger.debug( + "Motion search job %s: segment %s skipped by heatmap gate " + "(heatmap present=%s, roi_bbox=%s)", + self.job.id, + recording.id, + recording.motion_heatmap is not None, + roi_bbox, + ) + continue + filtered_recordings.append(recording) + + self._broadcast_status() + + # Fallback: if all segments were filtered out, scan all segments + # This allows motion search to find things the detector missed + if not filtered_recordings and recordings: + logger.info( + "All %d segments filtered by gates, falling back to full scan", + len(recordings), + ) + self.metrics.fallback_full_range_segments = len(recordings) + filtered_recordings = recordings + + logger.debug( + "Motion search job %s: %d/%d segments passed gates " + "(activity_skipped=%d, heatmap_skipped=%d)", + self.job.id, + len(filtered_recordings), + len(recordings), + self.metrics.metadata_inactive_segments, + self.metrics.heatmap_roi_skip_segments, + ) + + if self.job.parallel: + return self._search_motion_parallel(filtered_recordings, polygon_mask) + + return self._search_motion_sequential(filtered_recordings, polygon_mask) + + def _search_motion_parallel( + self, + recordings: list[Recordings], + polygon_mask: np.ndarray, + ) -> list[MotionSearchResult]: + """Search for motion in parallel across segments, streaming results.""" + all_results: list[MotionSearchResult] = [] + total_frames = 0 + next_recording_idx_to_merge = 0 + + logger.debug( + "Motion search job %s: starting motion search with %d workers " + "across %d segments", + self.job.id, + self.max_workers, + len(recordings), + ) + + # Initialize partial results on the job so they stream to the frontend + self.job.results = {"results": [], "total_frames_processed": 0} + + with ThreadPoolExecutor(max_workers=self.max_workers) as executor: + futures: dict[Future, int] = {} + completed_segments: dict[int, tuple[list[MotionSearchResult], int]] = {} + + for idx, recording in enumerate(recordings): + if self._should_stop(): + break + + future = executor.submit( + self._process_recording_for_motion, + recording.path, + recording.start_time, + recording.end_time, + self.job.start_time_range, + self.job.end_time_range, + polygon_mask, + self.job.threshold, + self.job.min_area, + self.job.frame_skip, + ) + futures[future] = idx + + for future in as_completed(futures): + if self._should_stop(): + # Cancel remaining futures + for f in futures: + f.cancel() + break + + recording_idx = futures[future] + recording = recordings[recording_idx] + + try: + results, frames = future.result() + self.metrics.segments_processed += 1 + completed_segments[recording_idx] = (results, frames) + + while next_recording_idx_to_merge in completed_segments: + segment_results, segment_frames = completed_segments.pop( + next_recording_idx_to_merge + ) + + all_results.extend(segment_results) + total_frames += segment_frames + self.job.total_frames_processed = total_frames + self.metrics.frames_decoded = total_frames + + if segment_results: + deduped = self._deduplicate_results(all_results) + self.job.results = { + "results": [ + r.to_dict() for r in deduped[: self.job.max_results] + ], + "total_frames_processed": total_frames, + } + + self._broadcast_status() + + if segment_results and len(deduped) >= self.job.max_results: + self.internal_stop_event.set() + for pending_future in futures: + pending_future.cancel() + break + + next_recording_idx_to_merge += 1 + + if self.internal_stop_event.is_set(): + break + + except Exception as e: + self.metrics.segments_processed += 1 + self.metrics.segments_with_errors += 1 + self._broadcast_status() + logger.warning( + "Error processing segment %s: %s", + recording.path, + e, + ) + + self.job.total_frames_processed = total_frames + self.metrics.frames_decoded = total_frames + + logger.debug( + "Motion search job %s: motion search complete, " + "found %d raw results, decoded %d frames, %d segment errors", + self.job.id, + len(all_results), + total_frames, + self.metrics.segments_with_errors, + ) + + # Sort and deduplicate results + all_results.sort(key=lambda x: x.timestamp) + return self._deduplicate_results(all_results)[: self.job.max_results] + + def _search_motion_sequential( + self, + recordings: list[Recordings], + polygon_mask: np.ndarray, + ) -> list[MotionSearchResult]: + """Search for motion sequentially across segments, streaming results.""" + all_results: list[MotionSearchResult] = [] + total_frames = 0 + + logger.debug( + "Motion search job %s: starting sequential motion search across %d segments", + self.job.id, + len(recordings), + ) + + self.job.results = {"results": [], "total_frames_processed": 0} + + for recording in recordings: + if self.cancel_event.is_set(): + break + + try: + results, frames = self._process_recording_for_motion( + recording.path, + recording.start_time, + recording.end_time, + self.job.start_time_range, + self.job.end_time_range, + polygon_mask, + self.job.threshold, + self.job.min_area, + self.job.frame_skip, + ) + all_results.extend(results) + total_frames += frames + + self.job.total_frames_processed = total_frames + self.metrics.frames_decoded = total_frames + self.metrics.segments_processed += 1 + + if results: + all_results.sort(key=lambda x: x.timestamp) + deduped = self._deduplicate_results(all_results)[ + : self.job.max_results + ] + self.job.results = { + "results": [r.to_dict() for r in deduped], + "total_frames_processed": total_frames, + } + + self._broadcast_status() + + if results and len(deduped) >= self.job.max_results: + break + + except Exception as e: + self.metrics.segments_processed += 1 + self.metrics.segments_with_errors += 1 + self._broadcast_status() + logger.warning("Error processing segment %s: %s", recording.path, e) + + self.job.total_frames_processed = total_frames + self.metrics.frames_decoded = total_frames + + logger.debug( + "Motion search job %s: sequential motion search complete, " + "found %d raw results, decoded %d frames, %d segment errors", + self.job.id, + len(all_results), + total_frames, + self.metrics.segments_with_errors, + ) + + all_results.sort(key=lambda x: x.timestamp) + return self._deduplicate_results(all_results)[: self.job.max_results] + + def _deduplicate_results( + self, results: list[MotionSearchResult], min_gap: float = 1.0 + ) -> list[MotionSearchResult]: + """Deduplicate results that are too close together.""" + if not results: + return results + + deduplicated: list[MotionSearchResult] = [] + last_timestamp = 0.0 + + for result in results: + if result.timestamp - last_timestamp >= min_gap: + deduplicated.append(result) + last_timestamp = result.timestamp + + return deduplicated + + def _process_recording_for_motion( + self, + recording_path: str, + recording_start: float, + recording_end: float, + search_start: float, + search_end: float, + polygon_mask: np.ndarray, + threshold: int, + min_area: float, + frame_skip: int, + ) -> tuple[list[MotionSearchResult], int]: + """Process a single recording file for motion detection. + + This method is designed to be called from a thread pool. + + Args: + min_area: Minimum change area as a percentage of the ROI (0-100). + """ + results: list[MotionSearchResult] = [] + frames_processed = 0 + + if not os.path.exists(recording_path): + logger.warning("Recording file not found: %s", recording_path) + return results, frames_processed + + cap = cv2.VideoCapture(recording_path) + if not cap.isOpened(): + logger.error("Could not open recording: %s", recording_path) + return results, frames_processed + + try: + fps = cap.get(cv2.CAP_PROP_FPS) or 30.0 + total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) + recording_duration = recording_end - recording_start + + # Calculate frame range + start_offset = max(0, search_start - recording_start) + end_offset = min(recording_duration, search_end - recording_start) + start_frame = int(start_offset * fps) + end_frame = int(end_offset * fps) + start_frame = max(0, min(start_frame, total_frames - 1)) + end_frame = max(0, min(end_frame, total_frames)) + + if start_frame >= end_frame: + return results, frames_processed + + cap.set(cv2.CAP_PROP_POS_FRAMES, start_frame) + + # Get ROI bounding box + roi_bbox = cv2.boundingRect(polygon_mask) + roi_x, roi_y, roi_w, roi_h = roi_bbox + + prev_frame_gray = None + frame_step = max(frame_skip, 1) + frame_idx = start_frame + + while frame_idx < end_frame: + if self._should_stop(): + break + + ret, frame = cap.read() + if not ret: + frame_idx += 1 + continue + + if (frame_idx - start_frame) % frame_step != 0: + frame_idx += 1 + continue + + frames_processed += 1 + + gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) + + # Handle frame dimension changes + if gray.shape != polygon_mask.shape: + resized_mask = cv2.resize( + polygon_mask, (gray.shape[1], gray.shape[0]), cv2.INTER_NEAREST + ) + current_bbox = cv2.boundingRect(resized_mask) + else: + resized_mask = polygon_mask + current_bbox = roi_bbox + + roi_x, roi_y, roi_w, roi_h = current_bbox + cropped_gray = gray[roi_y : roi_y + roi_h, roi_x : roi_x + roi_w] + cropped_mask = resized_mask[ + roi_y : roi_y + roi_h, roi_x : roi_x + roi_w + ] + + cropped_mask_area = np.count_nonzero(cropped_mask) + if cropped_mask_area == 0: + frame_idx += 1 + continue + + # Convert percentage to pixel count for this ROI + min_area_pixels = int((min_area / 100.0) * cropped_mask_area) + + masked_gray = cv2.bitwise_and( + cropped_gray, cropped_gray, mask=cropped_mask + ) + + if prev_frame_gray is not None: + diff = cv2.absdiff(prev_frame_gray, masked_gray) + diff_blurred = cv2.GaussianBlur(diff, (3, 3), 0) + _, thresh = cv2.threshold( + diff_blurred, threshold, 255, cv2.THRESH_BINARY + ) + thresh_dilated = cv2.dilate(thresh, None, iterations=1) + thresh_masked = cv2.bitwise_and( + thresh_dilated, thresh_dilated, mask=cropped_mask + ) + + change_pixels = cv2.countNonZero(thresh_masked) + if change_pixels > min_area_pixels: + contours, _ = cv2.findContours( + thresh_masked, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE + ) + total_change_area = sum( + cv2.contourArea(c) + for c in contours + if cv2.contourArea(c) >= min_area_pixels + ) + if total_change_area > 0: + frame_time_offset = (frame_idx - start_frame) / fps + timestamp = ( + recording_start + start_offset + frame_time_offset + ) + change_percentage = ( + total_change_area / cropped_mask_area + ) * 100 + results.append( + MotionSearchResult( + timestamp=timestamp, + change_percentage=round(change_percentage, 2), + ) + ) + + prev_frame_gray = masked_gray + frame_idx += 1 + + finally: + cap.release() + + logger.debug( + "Motion search segment complete: %s, %d frames processed, %d results found", + recording_path, + frames_processed, + len(results), + ) + return results, frames_processed + + +# Module-level state for managing per-camera jobs +_motion_search_jobs: dict[str, tuple[MotionSearchJob, threading.Event]] = {} +_jobs_lock = threading.Lock() + + +def stop_all_motion_search_jobs() -> None: + """Cancel all running motion search jobs for clean shutdown.""" + with _jobs_lock: + for job_id, (job, cancel_event) in _motion_search_jobs.items(): + if job.status in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running): + cancel_event.set() + logger.debug("Signalling motion search job %s to stop", job_id) + + +def start_motion_search_job( + config: FrigateConfig, + camera_name: str, + start_time: float, + end_time: float, + polygon_points: list[list[float]], + threshold: int = 30, + min_area: float = 5.0, + frame_skip: int = 5, + parallel: bool = False, + max_results: int = 25, +) -> str: + """Start a new motion search job. + + Returns the job ID. + """ + job = MotionSearchJob( + camera=camera_name, + start_time_range=start_time, + end_time_range=end_time, + polygon_points=polygon_points, + threshold=threshold, + min_area=min_area, + frame_skip=frame_skip, + parallel=parallel, + max_results=max_results, + ) + + cancel_event = threading.Event() + + with _jobs_lock: + _motion_search_jobs[job.id] = (job, cancel_event) + + set_current_job(job) + + runner = MotionSearchRunner(job, config, cancel_event) + runner.start() + + logger.debug( + "Started motion search job %s for camera %s: " + "time_range=%.1f-%.1f, threshold=%d, min_area=%.1f%%, " + "frame_skip=%d, parallel=%s, max_results=%d, polygon_points=%d vertices", + job.id, + camera_name, + start_time, + end_time, + threshold, + min_area, + frame_skip, + parallel, + max_results, + len(polygon_points), + ) + return job.id + + +def get_motion_search_job(job_id: str) -> Optional[MotionSearchJob]: + """Get a motion search job by ID.""" + with _jobs_lock: + job_entry = _motion_search_jobs.get(job_id) + if job_entry: + return job_entry[0] + # Check completed jobs via manager + return get_job_by_id("motion_search", job_id) + + +def cancel_motion_search_job(job_id: str) -> bool: + """Cancel a motion search job. + + Returns True if cancellation was initiated, False if job not found. + """ + with _jobs_lock: + job_entry = _motion_search_jobs.get(job_id) + if not job_entry: + return False + + job, cancel_event = job_entry + + if job.status not in (JobStatusTypesEnum.queued, JobStatusTypesEnum.running): + # Already finished + return True + + cancel_event.set() + job.status = JobStatusTypesEnum.cancelled + job_payload = job.to_dict() + logger.info("Cancelled motion search job %s", job_id) + + requestor: Optional[InterProcessRequestor] = None + try: + requestor = InterProcessRequestor() + requestor.send_data(UPDATE_JOB_STATE, job_payload) + except Exception as e: + logger.warning( + "Failed to broadcast cancelled motion search job %s: %s", job_id, e + ) + finally: + if requestor: + requestor.stop() + + return True diff --git a/frigate/models.py b/frigate/models.py index fd5061613..d927a12c8 100644 --- a/frigate/models.py +++ b/frigate/models.py @@ -78,6 +78,7 @@ class Recordings(Model): dBFS = IntegerField(null=True) segment_size = FloatField(default=0) # this should be stored as MB regions = IntegerField(null=True) + motion_heatmap = JSONField(null=True) # 16x16 grid, 256 values (0-255) class ExportCase(Model): diff --git a/frigate/record/maintainer.py b/frigate/record/maintainer.py index 7b54d6bd1..68040476a 100644 --- a/frigate/record/maintainer.py +++ b/frigate/record/maintainer.py @@ -50,11 +50,13 @@ class SegmentInfo: active_object_count: int, region_count: int, average_dBFS: int, + motion_heatmap: dict[str, int] | None = None, ) -> None: self.motion_count = motion_count self.active_object_count = active_object_count self.region_count = region_count self.average_dBFS = average_dBFS + self.motion_heatmap = motion_heatmap def should_discard_segment(self, retain_mode: RetainModeEnum) -> bool: keep = False @@ -454,6 +456,59 @@ class RecordingMaintainer(threading.Thread): if end_time < retain_cutoff: self.drop_segment(cache_path) + def _compute_motion_heatmap( + self, camera: str, motion_boxes: list[tuple[int, int, int, int]] + ) -> dict[str, int] | None: + """Compute a 16x16 motion intensity heatmap from motion boxes. + + Returns a sparse dict mapping cell index (as string) to intensity (1-255). + Only cells with motion are included. + + Args: + camera: Camera name to get detect dimensions from. + motion_boxes: List of (x1, y1, x2, y2) pixel coordinates. + + Returns: + Sparse dict like {"45": 3, "46": 5}, or None if no boxes. + """ + if not motion_boxes: + return None + + camera_config = self.config.cameras.get(camera) + if not camera_config: + return None + + frame_width = camera_config.detect.width + frame_height = camera_config.detect.height + + if frame_width <= 0 or frame_height <= 0: + return None + + GRID_SIZE = 16 + counts: dict[int, int] = {} + + for box in motion_boxes: + if len(box) < 4: + continue + x1, y1, x2, y2 = box + + # Convert pixel coordinates to grid cells + grid_x1 = max(0, int((x1 / frame_width) * GRID_SIZE)) + grid_y1 = max(0, int((y1 / frame_height) * GRID_SIZE)) + grid_x2 = min(GRID_SIZE - 1, int((x2 / frame_width) * GRID_SIZE)) + grid_y2 = min(GRID_SIZE - 1, int((y2 / frame_height) * GRID_SIZE)) + + for y in range(grid_y1, grid_y2 + 1): + for x in range(grid_x1, grid_x2 + 1): + idx = y * GRID_SIZE + x + counts[idx] = min(255, counts.get(idx, 0) + 1) + + if not counts: + return None + + # Convert to string keys for JSON storage + return {str(k): v for k, v in counts.items()} + def segment_stats( self, camera: str, start_time: datetime.datetime, end_time: datetime.datetime ) -> SegmentInfo: @@ -461,6 +516,8 @@ class RecordingMaintainer(threading.Thread): active_count = 0 region_count = 0 motion_count = 0 + all_motion_boxes: list[tuple[int, int, int, int]] = [] + for frame in self.object_recordings_info[camera]: # frame is after end time of segment if frame[0] > end_time.timestamp(): @@ -479,6 +536,8 @@ class RecordingMaintainer(threading.Thread): ) motion_count += len(frame[2]) region_count += len(frame[3]) + # Collect motion boxes for heatmap computation + all_motion_boxes.extend(frame[2]) audio_values = [] for frame in self.audio_recordings_info[camera]: @@ -498,8 +557,14 @@ class RecordingMaintainer(threading.Thread): average_dBFS = 0 if not audio_values else np.average(audio_values) + motion_heatmap = self._compute_motion_heatmap(camera, all_motion_boxes) + return SegmentInfo( - motion_count, active_count, region_count, round(average_dBFS) + motion_count, + active_count, + region_count, + round(average_dBFS), + motion_heatmap, ) async def move_segment( @@ -590,6 +655,7 @@ class RecordingMaintainer(threading.Thread): Recordings.regions.name: segment_info.region_count, Recordings.dBFS.name: segment_info.average_dBFS, Recordings.segment_size.name: segment_size, + Recordings.motion_heatmap.name: segment_info.motion_heatmap, } except Exception as e: logger.error(f"Unable to store recording segment {cache_path}") diff --git a/migrations/035_add_motion_heatmap.py b/migrations/035_add_motion_heatmap.py new file mode 100644 index 000000000..b6962083e --- /dev/null +++ b/migrations/035_add_motion_heatmap.py @@ -0,0 +1,34 @@ +"""Peewee migrations -- 035_add_motion_heatmap.py. + +Some examples (model - class or model name):: + + > Model = migrator.orm['model_name'] # Return model in current state by name + + > migrator.sql(sql) # Run custom SQL + > migrator.python(func, *args, **kwargs) # Run python code + > migrator.create_model(Model) # Create a model (could be used as decorator) + > migrator.remove_model(model, cascade=True) # Remove a model + > migrator.add_fields(model, **fields) # Add fields to a model + > migrator.change_fields(model, **fields) # Change fields + > migrator.remove_fields(model, *field_names, cascade=True) + > migrator.rename_field(model, old_field_name, new_field_name) + > migrator.rename_table(model, new_table_name) + > migrator.add_index(model, *col_names, unique=False) + > migrator.drop_index(model, *col_names) + > migrator.add_not_null(model, *field_names) + > migrator.drop_not_null(model, *field_names) + > migrator.add_default(model, field_name, default) + +""" + +import peewee as pw + +SQL = pw.SQL + + +def migrate(migrator, database, fake=False, **kwargs): + migrator.sql('ALTER TABLE "recordings" ADD COLUMN "motion_heatmap" TEXT NULL') + + +def rollback(migrator, database, fake=False, **kwargs): + pass diff --git a/web/package-lock.json b/web/package-lock.json index a767b3eff..a3135a345 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -22,6 +22,7 @@ "@radix-ui/react-hover-card": "^1.1.6", "@radix-ui/react-label": "^2.1.2", "@radix-ui/react-popover": "^1.1.6", + "@radix-ui/react-progress": "^1.1.8", "@radix-ui/react-radio-group": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-select": "^2.1.6", @@ -2922,6 +2923,68 @@ } } }, + "node_modules/@radix-ui/react-progress": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.8.tgz", + "integrity": "sha512-+gISHcSPUJ7ktBy9RnTqbdKW78bcGke3t6taawyZ71pio1JewwGSJizycs7rLhGTvMJYCQB1DBK4KQsxs7U8dA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.3", + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-context": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.3.tgz", + "integrity": "sha512-ieIFACdMpYfMEjF0rEf5KLvfVyIkOz6PDGyNnP+u+4xQ6jny3VCgA4OgXOwNx2aUkxn8zx9fiVcM8CfFYv9Lxw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-radio-group": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz", diff --git a/web/package.json b/web/package.json index 7762a7a56..acbbd8d88 100644 --- a/web/package.json +++ b/web/package.json @@ -28,6 +28,7 @@ "@radix-ui/react-hover-card": "^1.1.6", "@radix-ui/react-label": "^2.1.2", "@radix-ui/react-popover": "^1.1.6", + "@radix-ui/react-progress": "^1.1.8", "@radix-ui/react-radio-group": "^1.2.3", "@radix-ui/react-scroll-area": "^1.2.3", "@radix-ui/react-select": "^2.1.6", diff --git a/web/public/locales/en/views/events.json b/web/public/locales/en/views/events.json index ea3ee853d..ec0b29116 100644 --- a/web/public/locales/en/views/events.json +++ b/web/public/locales/en/views/events.json @@ -61,5 +61,25 @@ "detected": "detected", "normalActivity": "Normal", "needsReview": "Needs review", - "securityConcern": "Security concern" + "securityConcern": "Security concern", + "motionSearch": { + "menuItem": "Motion search", + "openMenu": "Camera options" + }, + "motionPreviews": { + "menuItem": "View motion previews", + "title": "Motion previews: {{camera}}", + "mobileSettingsTitle": "Motion Preview Settings", + "mobileSettingsDesc": "Adjust playback speed and dimming, and choose a date to review motion-only clips.", + "dim": "Dim", + "dimAria": "Adjust dimming intensity", + "dimDesc": "Increase dimming to increase motion area visibility.", + "speed": "Speed", + "speedAria": "Select preview playback speed", + "speedDesc": "Choose how quickly preview clips play.", + "back": "Back", + "empty": "No previews available", + "noPreview": "Preview unavailable", + "seekAria": "Seek {{camera}} player to {{time}}" + } } diff --git a/web/public/locales/en/views/motionSearch.json b/web/public/locales/en/views/motionSearch.json new file mode 100644 index 000000000..6e22c3203 --- /dev/null +++ b/web/public/locales/en/views/motionSearch.json @@ -0,0 +1,75 @@ +{ + "documentTitle": "Motion Search - Frigate", + "title": "Motion Search", + "description": "Draw a polygon to define the region of interest, and specify a time range to search for motion changes within that region.", + "selectCamera": "Motion Search is loading", + "startSearch": "Start Search", + "searchStarted": "Search started", + "searchCancelled": "Search cancelled", + "cancelSearch": "Cancel", + "searching": "Search in progress.", + "searchComplete": "Search complete", + "noResultsYet": "Run a search to find motion changes in the selected region", + "noChangesFound": "No pixel changes detected in the selected region", + "changesFound_one": "Found {{count}} motion change", + "changesFound_other": "Found {{count}} motion changes", + "framesProcessed": "{{count}} frames processed", + "jumpToTime": "Jump to this time", + "results": "Results", + "showSegmentHeatmap": "Heatmap", + "newSearch": "New Search", + "clearResults": "Clear Results", + "clearROI": "Clear polygon", + "polygonControls": { + "points_one": "{{count}} point", + "points_other": "{{count}} points", + "undo": "Undo last point", + "reset": "Reset polygon" + }, + "motionHeatmapLabel": "Motion Heatmap", + "dialog": { + "title": "Motion Search", + "cameraLabel": "Camera", + "previewAlt": "Camera preview for {{camera}}" + }, + "timeRange": { + "title": "Search Range", + "start": "Start time", + "end": "End time" + }, + "settings": { + "title": "Search Settings", + "parallelMode": "Parallel mode", + "parallelModeDesc": "Scan multiple recording segments at the same time (faster, but significantly more CPU intensive)", + "threshold": "Sensitivity Threshold", + "thresholdDesc": "Lower values detect smaller changes (1-255)", + "minArea": "Minimum Change Area", + "minAreaDesc": "Minimum percentage of the region of interest that must change to be considered significant", + "frameSkip": "Frame Skip", + "frameSkipDesc": "Process every Nth frame. Set this to your camera's frame rate to process one frame per second (e.g. 5 for a 5 FPS camera, 30 for a 30 FPS camera). Higher values will be faster, but may miss short motion events.", + "maxResults": "Maximum Results", + "maxResultsDesc": "Stop after this many matching timestamps" + }, + "errors": { + "noCamera": "Please select a camera", + "noROI": "Please draw a region of interest", + "noTimeRange": "Please select a time range", + "invalidTimeRange": "End time must be after start time", + "searchFailed": "Search failed: {{message}}", + "polygonTooSmall": "Polygon must have at least 3 points", + "unknown": "Unknown error" + }, + "changePercentage": "{{percentage}}% changed", + "metrics": { + "title": "Search Metrics", + "segmentsScanned": "Segments scanned", + "segmentsProcessed": "Processed", + "segmentsSkippedInactive": "Skipped (no activity)", + "segmentsSkippedHeatmap": "Skipped (no ROI overlap)", + "fallbackFullRange": "Fallback full-range scan", + "framesDecoded": "Frames decoded", + "wallTime": "Search time", + "segmentErrors": "Segment errors", + "seconds": "{{seconds}}s" + } +} diff --git a/web/src/components/player/HlsVideoPlayer.tsx b/web/src/components/player/HlsVideoPlayer.tsx index bef53519e..b91efd84b 100644 --- a/web/src/components/player/HlsVideoPlayer.tsx +++ b/web/src/components/player/HlsVideoPlayer.tsx @@ -1,5 +1,6 @@ import { MutableRefObject, + ReactNode, useCallback, useEffect, useRef, @@ -57,6 +58,7 @@ type HlsVideoPlayerProps = { isDetailMode?: boolean; camera?: string; currentTimeOverride?: number; + transformedOverlay?: ReactNode; }; export default function HlsVideoPlayer({ @@ -81,6 +83,7 @@ export default function HlsVideoPlayer({ isDetailMode = false, camera, currentTimeOverride, + transformedOverlay, }: HlsVideoPlayerProps) { const { t } = useTranslation("components/player"); const { data: config } = useSWR("config"); @@ -350,157 +353,162 @@ export default function HlsVideoPlayer({ height: isMobile ? "100%" : undefined, }} > - {isDetailMode && - camera && - currentTime && - loadedMetadata && - videoDimensions.width > 0 && - videoDimensions.height > 0 && ( -
- { - if (onSeekToTime) { - onSeekToTime(timestamp, play); - } +
+ {transformedOverlay} + {isDetailMode && + camera && + currentTime && + loadedMetadata && + videoDimensions.width > 0 && + videoDimensions.height > 0 && ( +
-
- )} -
+ )} +
); diff --git a/web/src/components/player/dynamic/DynamicVideoPlayer.tsx b/web/src/components/player/dynamic/DynamicVideoPlayer.tsx index 7fe5bd50b..c8d95090d 100644 --- a/web/src/components/player/dynamic/DynamicVideoPlayer.tsx +++ b/web/src/components/player/dynamic/DynamicVideoPlayer.tsx @@ -1,4 +1,11 @@ -import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { + ReactNode, + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from "react"; import { useApiHost } from "@/api"; import useSWR from "swr"; import { FrigateConfig } from "@/types/frigateConfig"; @@ -40,6 +47,7 @@ type DynamicVideoPlayerProps = { setFullResolution: React.Dispatch>; toggleFullscreen: () => void; containerRef?: React.MutableRefObject; + transformedOverlay?: ReactNode; }; export default function DynamicVideoPlayer({ className, @@ -58,6 +66,7 @@ export default function DynamicVideoPlayer({ setFullResolution, toggleFullscreen, containerRef, + transformedOverlay, }: DynamicVideoPlayerProps) { const { t } = useTranslation(["components/player"]); const apiHost = useApiHost(); @@ -312,6 +321,7 @@ export default function DynamicVideoPlayer({ isDetailMode={isDetailMode} camera={contextCamera || camera} currentTimeOverride={currentTime} + transformedOverlay={transformedOverlay} /> )} >; @@ -58,6 +59,7 @@ export function MotionReviewTimeline({ timestampSpread, timelineStart, timelineEnd, + scrollToTime, showHandlebar = false, handlebarTime, setHandlebarTime, @@ -176,6 +178,15 @@ export function MotionReviewTimeline({ [], ); + // allow callers to request the timeline center on a specific time + useEffect(() => { + if (scrollToTime == undefined) return; + + setTimeout(() => { + scrollToSegment(alignStartDateToTimeline(scrollToTime), true, "auto"); + }, 0); + }, [scrollToTime, scrollToSegment, alignStartDateToTimeline]); + // keep handlebar centered when zooming useEffect(() => { setTimeout(() => { diff --git a/web/src/components/timeline/ReviewTimeline.tsx b/web/src/components/timeline/ReviewTimeline.tsx index b96758493..21925fa00 100644 --- a/web/src/components/timeline/ReviewTimeline.tsx +++ b/web/src/components/timeline/ReviewTimeline.tsx @@ -343,9 +343,12 @@ export function ReviewTimeline({ useEffect(() => { if (onHandlebarDraggingChange) { - onHandlebarDraggingChange(isDraggingHandlebar); + // Keep existing callback name but treat it as a generic dragging signal. + // This allows consumers (e.g. export-handle timelines) to correctly + // enable preview scrubbing while dragging export handles. + onHandlebarDraggingChange(isDragging); } - }, [isDraggingHandlebar, onHandlebarDraggingChange]); + }, [isDragging, onHandlebarDraggingChange]); const isHandlebarInNoRecordingPeriod = useMemo(() => { if (!getRecordingAvailability || handlebarTime === undefined) return false; diff --git a/web/src/components/ui/progress.tsx b/web/src/components/ui/progress.tsx new file mode 100644 index 000000000..105fb6500 --- /dev/null +++ b/web/src/components/ui/progress.tsx @@ -0,0 +1,26 @@ +import * as React from "react" +import * as ProgressPrimitive from "@radix-ui/react-progress" + +import { cn } from "@/lib/utils" + +const Progress = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, value, ...props }, ref) => ( + + + +)) +Progress.displayName = ProgressPrimitive.Root.displayName + +export { Progress } diff --git a/web/src/hooks/use-camera-activity.ts b/web/src/hooks/use-camera-activity.ts index cf5bf4653..71507c2af 100644 --- a/web/src/hooks/use-camera-activity.ts +++ b/web/src/hooks/use-camera-activity.ts @@ -8,14 +8,19 @@ import { import { CameraConfig, FrigateConfig } from "@/types/frigateConfig"; import { MotionData, ReviewSegment } from "@/types/review"; import { useCallback, useEffect, useMemo, useState } from "react"; -import { useTimelineUtils } from "./use-timeline-utils"; import { AudioDetection, ObjectType } from "@/types/ws"; +import { useTimelineUtils } from "./use-timeline-utils"; import useDeepMemo from "./use-deep-memo"; import { isEqual } from "lodash"; import { useAutoFrigateStats } from "./use-stats"; import useSWR from "swr"; import { getAttributeLabels } from "@/utils/iconUtil"; +export type MotionOnlyRange = { + start_time: number; + end_time: number; +}; + type useCameraActivityReturn = { enabled?: boolean; activeTracking: boolean; @@ -204,9 +209,9 @@ export function useCameraMotionNextTimestamp( return []; } - const ranges = []; - let currentSegmentStart = null; - let currentSegmentEnd = null; + const ranges: [number, number][] = []; + let currentSegmentStart: number | null = null; + let currentSegmentEnd: number | null = null; // align motion start to timeline start const offset = @@ -215,13 +220,19 @@ export function useCameraMotionNextTimestamp( segmentDuration; const startIndex = Math.abs(Math.floor(offset / 15)); + const now = Date.now() / 1000; for ( let i = startIndex; i < motionData.length; i = i + segmentDuration / 15 ) { - const motionStart = motionData[i].start_time; + const motionStart = motionData[i]?.start_time; + + if (motionStart == undefined) { + continue; + } + const motionEnd = motionStart + segmentDuration; const segmentMotion = motionData @@ -230,10 +241,10 @@ export function useCameraMotionNextTimestamp( const overlappingReviewItems = reviewItems.some( (item) => (item.start_time >= motionStart && item.start_time < motionEnd) || - ((item.end_time ?? Date.now() / 1000) > motionStart && - (item.end_time ?? Date.now() / 1000) <= motionEnd) || + ((item.end_time ?? now) > motionStart && + (item.end_time ?? now) <= motionEnd) || (item.start_time <= motionStart && - (item.end_time ?? Date.now() / 1000) >= motionEnd), + (item.end_time ?? now) >= motionEnd), ); if (!segmentMotion || overlappingReviewItems) { @@ -241,16 +252,14 @@ export function useCameraMotionNextTimestamp( currentSegmentStart = motionStart; } currentSegmentEnd = motionEnd; - } else { - if (currentSegmentStart !== null) { - ranges.push([currentSegmentStart, currentSegmentEnd]); - currentSegmentStart = null; - currentSegmentEnd = null; - } + } else if (currentSegmentStart !== null && currentSegmentEnd !== null) { + ranges.push([currentSegmentStart, currentSegmentEnd]); + currentSegmentStart = null; + currentSegmentEnd = null; } } - if (currentSegmentStart !== null) { + if (currentSegmentStart !== null && currentSegmentEnd !== null) { ranges.push([currentSegmentStart, currentSegmentEnd]); } @@ -304,3 +313,93 @@ export function useCameraMotionNextTimestamp( return nextTimestamp; } + +export function useCameraMotionOnlyRanges( + segmentDuration: number, + reviewItems: ReviewSegment[], + motionData: MotionData[], +) { + const motionOnlyRanges = useMemo(() => { + if (!motionData?.length || !reviewItems) { + return []; + } + + const fallbackBucketDuration = Math.max(1, segmentDuration / 2); + const normalizedMotionData = Array.from( + motionData + .reduce((accumulator, item) => { + const currentMotion = accumulator.get(item.start_time) ?? 0; + accumulator.set( + item.start_time, + Math.max(currentMotion, item.motion ?? 0), + ); + return accumulator; + }, new Map()) + .entries(), + ) + .map(([start_time, motion]) => ({ start_time, motion })) + .sort((left, right) => left.start_time - right.start_time); + + const bucketRanges: MotionOnlyRange[] = []; + const now = Date.now() / 1000; + + for (let i = 0; i < normalizedMotionData.length; i++) { + const motionStart = normalizedMotionData[i].start_time; + const motionEnd = motionStart + fallbackBucketDuration; + + const overlappingReviewItems = reviewItems.some( + (item) => + (item.start_time >= motionStart && item.start_time < motionEnd) || + ((item.end_time ?? now) > motionStart && + (item.end_time ?? now) <= motionEnd) || + (item.start_time <= motionStart && + (item.end_time ?? now) >= motionEnd), + ); + + const isMotionOnlySegment = + (normalizedMotionData[i].motion ?? 0) > 0 && !overlappingReviewItems; + + if (!isMotionOnlySegment) { + continue; + } + + bucketRanges.push({ + start_time: motionStart, + end_time: motionEnd, + }); + } + + if (!bucketRanges.length) { + return []; + } + + const mergedRanges = bucketRanges.reduce( + (ranges, range) => { + if (!ranges.length) { + return [range]; + } + + const previousRange = ranges[ranges.length - 1]; + const isContiguous = + range.start_time <= previousRange.end_time + 0.001 && + range.start_time >= previousRange.end_time - 0.001; + + if (isContiguous) { + previousRange.end_time = Math.max( + previousRange.end_time, + range.end_time, + ); + return ranges; + } + + ranges.push(range); + return ranges; + }, + [], + ); + + return mergedRanges; + }, [motionData, reviewItems, segmentDuration]); + + return motionOnlyRanges; +} diff --git a/web/src/pages/Events.tsx b/web/src/pages/Events.tsx index 540a50777..e3f6e4fae 100644 --- a/web/src/pages/Events.tsx +++ b/web/src/pages/Events.tsx @@ -1,5 +1,6 @@ import ActivityIndicator from "@/components/indicators/activity-indicator"; import useApiFilter from "@/hooks/use-api-filter"; +import { useAllowedCameras } from "@/hooks/use-allowed-cameras"; import { useCameraPreviews } from "@/hooks/use-camera-previews"; import { useTimezone } from "@/hooks/use-date-utils"; import { useOverlayState, useSearchEffect } from "@/hooks/use-overlay-state"; @@ -21,6 +22,7 @@ import { getEndOfDayTimestamp, } from "@/utils/dateUtil"; import EventView from "@/views/events/EventView"; +import MotionSearchView from "@/views/motion-search/MotionSearchView"; import { RecordingView } from "@/views/recording/RecordingView"; import axios from "axios"; import { useCallback, useEffect, useMemo, useState } from "react"; @@ -34,6 +36,7 @@ export default function Events() { revalidateOnFocus: false, }); const timezone = useTimezone(config); + const allowedCameras = useAllowedCameras(); // recordings viewer @@ -52,6 +55,74 @@ export default function Events() { undefined, false, ); + const [motionPreviewsCamera, setMotionPreviewsCamera] = useOverlayState< + string | undefined + >("motionPreviewsCamera", undefined); + + const [motionSearchCamera, setMotionSearchCamera] = useState( + null, + ); + const [motionSearchDay, setMotionSearchDay] = useState( + undefined, + ); + + const motionSearchCameras = useMemo(() => { + if (!config?.cameras) { + return [] as string[]; + } + + return Object.keys(config.cameras).filter((cam) => + allowedCameras.includes(cam), + ); + }, [allowedCameras, config?.cameras]); + + const selectedMotionSearchCamera = useMemo(() => { + if (!motionSearchCamera) { + return null; + } + + if (motionSearchCameras.includes(motionSearchCamera)) { + return motionSearchCamera; + } + + return motionSearchCameras[0] ?? null; + }, [motionSearchCamera, motionSearchCameras]); + + const motionSearchTimeRange = useMemo(() => { + if (motionSearchDay) { + return { + after: getBeginningOfDayTimestamp(new Date(motionSearchDay)), + before: getEndOfDayTimestamp(new Date(motionSearchDay)), + }; + } + + const now = Date.now() / 1000; + return { + after: now - 86400, + before: now, + }; + }, [motionSearchDay]); + + const closeMotionSearch = useCallback(() => { + setMotionSearchCamera(null); + setMotionSearchDay(undefined); + setBeforeTs(Date.now() / 1000); + }, []); + + const handleMotionSearchCameraSelect = useCallback((camera: string) => { + setMotionSearchCamera(camera); + }, []); + + const handleMotionSearchDaySelect = useCallback((day: Date | undefined) => { + if (day == undefined) { + setMotionSearchDay(undefined); + return; + } + + const normalizedDay = new Date(day); + normalizedDay.setHours(0, 0, 0, 0); + setMotionSearchDay(normalizedDay); + }, []); const [notificationTab, setNotificationTab] = useState("timeline"); @@ -508,7 +579,24 @@ export default function Events() { ); } } else { - return ( + return motionSearchCamera ? ( + !config || !selectedMotionSearchCamera ? ( + + ) : ( + + ) + ) : ( + setMotionPreviewsCamera(camera ?? undefined) + } + setMotionSearchCamera={setMotionSearchCamera} pullLatestData={reloadData} updateFilter={onUpdateFilter} /> diff --git a/web/src/pages/MotionSearch.tsx b/web/src/pages/MotionSearch.tsx new file mode 100644 index 000000000..c1651b72e --- /dev/null +++ b/web/src/pages/MotionSearch.tsx @@ -0,0 +1,112 @@ +import { useEffect, useMemo, useState, useCallback } from "react"; +import { useTranslation } from "react-i18next"; +import useSWR from "swr"; +import { FrigateConfig } from "@/types/frigateConfig"; +import { useTimezone } from "@/hooks/use-date-utils"; +import MotionSearchView from "@/views/motion-search/MotionSearchView"; +import { + getBeginningOfDayTimestamp, + getEndOfDayTimestamp, +} from "@/utils/dateUtil"; +import { useAllowedCameras } from "@/hooks/use-allowed-cameras"; +import { useSearchEffect } from "@/hooks/use-overlay-state"; +import ActivityIndicator from "@/components/indicators/activity-indicator"; + +export default function MotionSearch() { + const { t } = useTranslation(["views/motionSearch"]); + + const { data: config } = useSWR("config", { + revalidateOnFocus: false, + }); + + const timezone = useTimezone(config); + + useEffect(() => { + document.title = t("documentTitle"); + }, [t]); + + // Get allowed cameras + const allowedCameras = useAllowedCameras(); + + const cameras = useMemo(() => { + if (!config?.cameras) return []; + return Object.keys(config.cameras).filter((cam) => + allowedCameras.includes(cam), + ); + }, [config?.cameras, allowedCameras]); + + // Selected camera state + const [selectedCamera, setSelectedCamera] = useState(null); + const [cameraLocked, setCameraLocked] = useState(false); + + useSearchEffect("camera", (camera: string) => { + if (cameras.length > 0 && cameras.includes(camera)) { + setSelectedCamera(camera); + setCameraLocked(true); + } + return false; + }); + + // Initialize with first camera when available (only if not set by camera param) + useEffect(() => { + if (cameras.length === 0) return; + if (!selectedCamera) { + setSelectedCamera(cameras[0]); + } + }, [cameras, selectedCamera]); + + // Time range state - default to last 24 hours + const [selectedDay, setSelectedDay] = useState(undefined); + + const timeRange = useMemo(() => { + if (selectedDay) { + return { + after: getBeginningOfDayTimestamp(new Date(selectedDay)), + before: getEndOfDayTimestamp(new Date(selectedDay)), + }; + } + // Default to last 24 hours + const now = Date.now() / 1000; + return { + after: now - 86400, + before: now, + }; + }, [selectedDay]); + + const handleCameraSelect = useCallback((camera: string) => { + setSelectedCamera(camera); + }, []); + + const handleDaySelect = useCallback((day: Date | undefined) => { + if (day == undefined) { + setSelectedDay(undefined); + return; + } + + const normalizedDay = new Date(day); + normalizedDay.setHours(0, 0, 0, 0); + setSelectedDay(normalizedDay); + }, []); + + if (!config || cameras.length === 0) { + return ( +
+ +
+ ); + } + + return ( + + ); +} diff --git a/web/src/types/motionSearch.ts b/web/src/types/motionSearch.ts new file mode 100644 index 000000000..6e919c22f --- /dev/null +++ b/web/src/types/motionSearch.ts @@ -0,0 +1,46 @@ +/** + * Types for the Motion Search feature + */ + +export interface MotionSearchResult { + timestamp: number; + change_percentage: number; +} + +export interface MotionSearchRequest { + start_time: number; + end_time: number; + polygon_points: number[][]; + parallel?: boolean; + threshold?: number; + min_area?: number; + frame_skip?: number; + max_results?: number; +} + +export interface MotionSearchStartResponse { + success: boolean; + message: string; + job_id: string; +} + +export interface MotionSearchMetrics { + segments_scanned: number; + segments_processed: number; + metadata_inactive_segments: number; + heatmap_roi_skip_segments: number; + fallback_full_range_segments: number; + frames_decoded: number; + wall_time_seconds: number; + segments_with_errors: number; +} + +export interface MotionSearchStatusResponse { + success: boolean; + message: string; + status: "queued" | "running" | "success" | "failed" | "cancelled"; + results?: MotionSearchResult[]; + total_frames_processed?: number; + error_message?: string; + metrics?: MotionSearchMetrics; +} diff --git a/web/src/types/record.ts b/web/src/types/record.ts index dbe43653a..107a8d86e 100644 --- a/web/src/types/record.ts +++ b/web/src/types/record.ts @@ -11,6 +11,7 @@ export type Recording = { duration: number; motion: number; objects: number; + motion_heatmap?: Record | null; dBFS: number; }; diff --git a/web/src/views/events/EventView.tsx b/web/src/views/events/EventView.tsx index 70067ff5c..b8167b7dd 100644 --- a/web/src/views/events/EventView.tsx +++ b/web/src/views/events/EventView.tsx @@ -1,11 +1,21 @@ import Logo from "@/components/Logo"; import NewReviewData from "@/components/dynamic/NewReviewData"; +import CalendarFilterButton from "@/components/filter/CalendarFilterButton"; import ReviewActionGroup from "@/components/filter/ReviewActionGroup"; import ReviewFilterGroup from "@/components/filter/ReviewFilterGroup"; import PreviewThumbnailPlayer from "@/components/player/PreviewThumbnailPlayer"; import EventReviewTimeline from "@/components/timeline/EventReviewTimeline"; import ActivityIndicator from "@/components/indicators/activity-indicator"; import { ToggleGroup, ToggleGroupItem } from "@/components/ui/toggle-group"; +import { VolumeSlider } from "@/components/ui/slider"; +import { + Select, + SelectContent, + SelectItem, + SelectSeparator, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; import { useTimelineUtils } from "@/hooks/use-timeline-utils"; import { useScrollLockout } from "@/hooks/use-mouse-listener"; import { FrigateConfig } from "@/types/frigateConfig"; @@ -22,6 +32,7 @@ import { ZoomLevel, } from "@/types/review"; import { getChunkedTimeRange } from "@/utils/timelineUtil"; +import { getEndOfDayTimestamp } from "@/utils/dateUtil"; import axios from "axios"; import { MutableRefObject, @@ -34,9 +45,18 @@ import { import { isDesktop, isMobile, isMobileOnly } from "react-device-detect"; import { LuFolderCheck, LuFolderX } from "react-icons/lu"; import { MdCircle } from "react-icons/md"; +import { FiMoreVertical } from "react-icons/fi"; +import { IoMdArrowRoundBack } from "react-icons/io"; import useSWR from "swr"; import MotionReviewTimeline from "@/components/timeline/MotionReviewTimeline"; import { Button } from "@/components/ui/button"; +import BlurredIconButton from "@/components/button/BlurredIconButton"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; import PreviewPlayer, { PreviewController, } from "@/components/player/PreviewPlayer"; @@ -44,7 +64,10 @@ import SummaryTimeline from "@/components/timeline/SummaryTimeline"; import { RecordingStartingPoint } from "@/types/record"; import VideoControls from "@/components/player/VideoControls"; import { TimeRange } from "@/types/timeline"; -import { useCameraMotionNextTimestamp } from "@/hooks/use-camera-activity"; +import { + useCameraMotionNextTimestamp, + useCameraMotionOnlyRanges, +} from "@/hooks/use-camera-activity"; import useOptimisticState from "@/hooks/use-optimistic-state"; import { Skeleton } from "@/components/ui/skeleton"; import scrollIntoView from "scroll-into-view-if-needed"; @@ -56,6 +79,10 @@ import { GiSoundWaves } from "react-icons/gi"; import useKeyboardListener from "@/hooks/use-keyboard-listener"; import { useTimelineZoom } from "@/hooks/use-timeline-zoom"; import { useTranslation } from "react-i18next"; +import { FaCog } from "react-icons/fa"; +import ReviewActivityCalendar from "@/components/overlay/ReviewActivityCalendar"; +import PlatformAwareDialog from "@/components/overlay/dialog/PlatformAwareDialog"; +import MotionPreviewsPane from "./MotionPreviewsPane"; import { EmptyCard } from "@/components/card/EmptyCard"; import { EmptyCardData } from "@/types/card"; @@ -75,6 +102,9 @@ type EventViewProps = { markItemAsReviewed: (review: ReviewSegment) => void; markAllItemsAsReviewed: (currentItems: ReviewSegment[]) => void; onOpenRecording: (recordingInfo: RecordingStartingPoint) => void; + motionPreviewsCamera: string | null; + setMotionPreviewsCamera: (camera: string | null) => void; + setMotionSearchCamera: (camera: string) => void; pullLatestData: () => void; updateFilter: (filter: ReviewFilter) => void; }; @@ -94,6 +124,9 @@ export default function EventView({ markItemAsReviewed, markAllItemsAsReviewed, onOpenRecording, + motionPreviewsCamera, + setMotionPreviewsCamera, + setMotionSearchCamera, pullLatestData, updateFilter, }: EventViewProps) { @@ -274,6 +307,15 @@ export default function EventView({ 100, ); + const motionPreviewsOpen = + severity === "significant_motion" && motionPreviewsCamera != null; + + useEffect(() => { + if (severity !== "significant_motion") { + setMotionPreviewsCamera(null); + } + }, [setMotionPreviewsCamera, severity]); + // review filter info const reviewFilterList = useMemo(() => { @@ -301,124 +343,136 @@ export default function EventView({ return (
-
- {isMobile && ( - - )} - - value ? setSeverityToggle(value) : null - } // don't allow the severity to be unselected - > - + {isMobile && ( + + )} + + value ? setSeverityToggle(value) : null + } // don't allow the severity to be unselected > -
- {reviewCounts.alert > -1 ? ( - reviewCounts.alert - ) : ( - - )} -
-
- -
- {t("alerts")} +
{reviewCounts.alert > -1 ? ( - ` ∙ ${reviewCounts.alert}` + reviewCounts.alert ) : ( - + )}
-
- - -
+ +
+ {t("alerts")} + {reviewCounts.alert > -1 ? ( + ` ∙ ${reviewCounts.alert}` + ) : ( + + )} +
+
+
+ - {reviewCounts.detection > -1 ? ( - reviewCounts.detection - ) : ( - - )} -
-
- -
- {t("detections")} +
{reviewCounts.detection > -1 ? ( - ` ∙ ${reviewCounts.detection}` + reviewCounts.detection ) : ( - + )}
-
- - - -
- -
{t("motion.label")}
-
-
- +
+ +
+ {t("detections")} + {reviewCounts.detection > -1 ? ( + ` ∙ ${reviewCounts.detection}` + ) : ( + + )} +
+
+ + + +
+ +
{t("motion.label")}
+
+
+ - {selectedReviews.length <= 0 ? ( - - ) : ( - + {selectedReviews.length <= 0 ? ( + + ) : ( + + )} +
+ )} + +
- -
+ > {severity != "significant_motion" && ( @@ -898,10 +958,16 @@ type MotionReviewProps = { significant_motion: ReviewSegment[]; }; relevantPreviews?: Preview[]; + reviewSummary?: ReviewSummary; + recordingsSummary?: RecordingsSummary; timeRange: TimeRange; startTime?: number; filter?: ReviewFilter; motionOnly?: boolean; + updateFilter: (filter: ReviewFilter) => void; + motionPreviewsCamera: string | null; + setMotionPreviewsCamera: (camera: string | null) => void; + setMotionSearchCamera: (camera: string) => void; emptyCardData: EmptyCardData; onOpenRecording: (data: RecordingStartingPoint) => void; }; @@ -909,13 +975,20 @@ function MotionReview({ contentRef, reviewItems, relevantPreviews, + reviewSummary, + recordingsSummary, timeRange, startTime, filter, motionOnly = false, + updateFilter, + motionPreviewsCamera, + setMotionPreviewsCamera, + setMotionSearchCamera, emptyCardData, onOpenRecording, }: MotionReviewProps) { + const { t } = useTranslation(["views/events", "common"]); const segmentDuration = 30; const { data: config } = useSWR("config"); @@ -961,6 +1034,15 @@ function MotionReview({ }, ]); + const { data: overlapReviewSegments } = useSWR([ + "review", + { + before: alignedBefore, + after: alignedAfter, + cameras: filter?.cameras?.join(",") ?? null, + }, + ]); + // timeline time const timeRangeSegments = useMemo( @@ -973,19 +1055,29 @@ function MotionReview({ return timeRangeSegments.ranges.length - 1; } - return timeRangeSegments.ranges.findIndex( + const index = timeRangeSegments.ranges.findIndex( (seg) => seg.after <= startTime && seg.before >= startTime, ); + + if (index === -1) { + return timeRangeSegments.ranges.length - 1; + } + + return index; // only render once // eslint-disable-next-line react-hooks/exhaustive-deps }, []); const [selectedRangeIdx, setSelectedRangeIdx] = useState(initialIndex); const [currentTime, setCurrentTime] = useState( - startTime ?? timeRangeSegments.ranges[selectedRangeIdx]?.before, + startTime ?? + timeRangeSegments.ranges[selectedRangeIdx]?.before ?? + timeRangeSegments.end, ); const currentTimeRange = useMemo( - () => timeRangeSegments.ranges[selectedRangeIdx], + () => + timeRangeSegments.ranges[selectedRangeIdx] ?? + timeRangeSegments.ranges[timeRangeSegments.ranges.length - 1], [selectedRangeIdx, timeRangeSegments], ); @@ -1023,18 +1115,86 @@ function MotionReview({ const [playbackRate, setPlaybackRate] = useState(8); const [controlsOpen, setControlsOpen] = useState(false); + const [dimStrength, setDimStrength] = useState(82); + const [isPreviewSettingsOpen, setIsPreviewSettingsOpen] = useState(false); + + const objectReviewItems = useMemo( + () => + (overlapReviewSegments ?? []).filter( + (item) => + item.severity === "alert" || + item.severity === "detection" || + (item.data.detections?.length ?? 0) > 0 || + (item.data.objects?.length ?? 0) > 0, + ), + [overlapReviewSegments], + ); const nextTimestamp = useCameraMotionNextTimestamp( timeRangeSegments.end, segmentDuration, motionOnly, - reviewItems?.all ?? [], + objectReviewItems, motionData ?? [], currentTime, ); const timeoutIdRef = useRef(null); + const selectedMotionPreviewCamera = useMemo( + () => + reviewCameras.find((camera) => camera.name === motionPreviewsCamera) ?? + null, + [motionPreviewsCamera, reviewCameras], + ); + + const onUpdateSelectedDay = useCallback( + (day?: Date) => { + updateFilter({ + ...filter, + after: day == undefined ? undefined : day.getTime() / 1000, + before: day == undefined ? undefined : getEndOfDayTimestamp(day), + }); + }, + [filter, updateFilter], + ); + + const selectedCameraMotionData = useMemo(() => { + if (!motionPreviewsCamera) { + return []; + } + + return (motionData ?? []).filter((item) => { + const cameras = item.camera.split(",").map((camera) => camera.trim()); + return cameras.includes(motionPreviewsCamera); + }); + }, [motionData, motionPreviewsCamera]); + + const selectedCameraReviewItems = useMemo(() => { + if (!motionPreviewsCamera) { + return []; + } + + return objectReviewItems.filter( + (item) => item.camera === motionPreviewsCamera, + ); + }, [motionPreviewsCamera, objectReviewItems]); + + const motionPreviewRanges = useCameraMotionOnlyRanges( + segmentDuration, + selectedCameraReviewItems, + selectedCameraMotionData, + ); + + useEffect(() => { + if ( + motionPreviewsCamera && + !reviewCameras.some((camera) => camera.name === motionPreviewsCamera) + ) { + setMotionPreviewsCamera(null); + } + }, [motionPreviewsCamera, reviewCameras, setMotionPreviewsCamera]); + useEffect(() => { if (nextTimestamp) { if (!playing && timeoutIdRef.current != null) { @@ -1124,132 +1284,349 @@ function MotionReview({ return ( <> -
-
3 && - isMobile && - "portrait:md:grid-cols-2 landscape:md:grid-cols-3", - isDesktop && "grid-cols-2 lg:grid-cols-3", - "gap-2 overflow-auto px-1 md:mx-2 md:gap-4 xl:grid-cols-3 3xl:grid-cols-4", - )} - > - {reviewCameras.map((camera) => { - let grow; - let spans; - const aspectRatio = camera.detect.width / camera.detect.height; - if (aspectRatio > 2) { - grow = "aspect-wide"; - spans = "sm:col-span-2"; - } else if (aspectRatio < 1) { - grow = "h-full aspect-tall"; - spans = "md:row-span-2"; - } else { - grow = "aspect-video"; - } - const detectionType = getDetectionType(camera.name); - return ( -
- {motionData ? ( - <> - { - videoPlayersRef.current[camera.name] = controller; - }} - onClick={() => - onOpenRecording({ - camera: camera.name, - startTime: Math.min( - currentTime, - Date.now() / 1000 - 30, - ), - severity: "significant_motion", - }) - } - /> -
- - ) : ( - + {motionPreviewsCamera && selectedMotionPreviewCamera ? ( + <> +
+ + +
+ {isDesktop && ( + + )} + + + {isDesktop && t("motionPreviews.mobileSettingsTitle")} + + } + content={ +
+ {!isDesktop && ( +
+
+ {t("motionPreviews.mobileSettingsTitle")} +
+
+ {t("motionPreviews.mobileSettingsDesc")} +
+
+ )} + +
+
+
+ {t("motionPreviews.speed")} +
+
+ {t("motionPreviews.speedDesc")} +
+
+ +
+ +
+
+
{t("motionPreviews.dim")}
+
+ {t("motionPreviews.dimDesc")} +
+
+
+ { + const nextValue = values[0]; + if (nextValue == undefined) { + return; + } + + setDimStrength(nextValue); + }} + /> +
+
+ + {!isDesktop && ( + <> + + +
+ { + onUpdateSelectedDay(day); + setIsPreviewSettingsOpen(false); + }} + /> +
+
+ +
+ + )} +
+ } + contentClassName={cn( + isDesktop + ? "w-80" + : "scrollbar-container max-h-[75dvh] overflow-y-auto overflow-x-hidden px-4", )} -
- ); - })} -
-
-
- {motionData ? ( - +
+
+ + { - if (playing && scrubbing) { - setPlaying(false); - } - - setScrubbing(scrubbing); + cameraPreviews={relevantPreviews} + motionRanges={motionPreviewRanges} + isLoadingMotionRanges={ + motionData == undefined || overlapReviewSegments == undefined + } + playbackRate={playbackRate} + nonMotionAlpha={dimStrength / 100} + onSeek={(timestamp) => { + onOpenRecording({ + camera: selectedMotionPreviewCamera.name, + startTime: timestamp, + severity: "significant_motion", + }); }} - dense={isMobileOnly} - isZooming={false} - zoomDirection={null} - alwaysShowMotionLine={true} /> - ) : ( - - )} -
+ + ) : ( +
+
3 && + isMobile && + "portrait:md:grid-cols-2 landscape:md:grid-cols-3", + isDesktop && "grid-cols-2 lg:grid-cols-3", + "gap-2 overflow-auto px-1 md:mx-2 md:gap-4 xl:grid-cols-3 3xl:grid-cols-4", + )} + > + {reviewCameras.map((camera) => { + let grow; + let spans; + const aspectRatio = camera.detect.width / camera.detect.height; + if (aspectRatio > 2) { + grow = "aspect-wide"; + spans = "sm:col-span-2"; + } else if (aspectRatio < 1) { + grow = "h-full aspect-tall"; + spans = "md:row-span-2"; + } else { + grow = "aspect-video"; + } + const detectionType = getDetectionType(camera.name); + return ( +
+ {motionData ? ( + <> + { + videoPlayersRef.current[camera.name] = controller; + }} + onClick={() => + onOpenRecording({ + camera: camera.name, + startTime: Math.min( + currentTime, + Date.now() / 1000 - 30, + ), + severity: "significant_motion", + }) + } + /> +
+
+ + + e.stopPropagation()} + > + + + + + { + e.stopPropagation(); + setMotionPreviewsCamera(camera.name); + }} + > + {t("motionPreviews.menuItem")} + + { + e.stopPropagation(); + setMotionSearchCamera(camera.name); + }} + > + {t("motionSearch.menuItem")} + + + +
+ + ) : ( + + )} +
+ ); + })} +
+
+ )} + {!selectedMotionPreviewCamera && ( +
+ {motionData ? ( + { + if (playing && scrubbing) { + setPlaying(false); + } - { - const wasPlaying = playing; + setScrubbing(scrubbing); + }} + dense={isMobileOnly} + isZooming={false} + zoomDirection={null} + alwaysShowMotionLine={true} + /> + ) : ( + + )} +
+ )} - if (wasPlaying) { - setPlaying(false); - } + {!selectedMotionPreviewCamera && ( + { + const wasPlaying = playing; - setCurrentTime(currentTime + diff); + if (wasPlaying) { + setPlaying(false); + } - if (wasPlaying) { - setTimeout(() => setPlaying(true), 100); - } - }} - onSetPlaybackRate={setPlaybackRate} - /> + setCurrentTime(currentTime + diff); + + if (wasPlaying) { + setTimeout(() => setPlaying(true), 100); + } + }} + onSetPlaybackRate={setPlaybackRate} + /> + )} ); } diff --git a/web/src/views/events/MotionPreviewsPane.tsx b/web/src/views/events/MotionPreviewsPane.tsx new file mode 100644 index 000000000..331a9af33 --- /dev/null +++ b/web/src/views/events/MotionPreviewsPane.tsx @@ -0,0 +1,898 @@ +import { MotionOnlyRange } from "@/hooks/use-camera-activity"; +import { Preview } from "@/types/preview"; +import { + MutableRefObject, + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from "react"; +import { isCurrentHour } from "@/utils/dateUtil"; +import { useTranslation } from "react-i18next"; +import { CameraConfig } from "@/types/frigateConfig"; +import useSWR from "swr"; +import { baseUrl } from "@/api/baseUrl"; +import { Recording } from "@/types/record"; +import { useResizeObserver } from "@/hooks/resize-observer"; +import { Skeleton } from "@/components/ui/skeleton"; +import ActivityIndicator from "@/components/indicators/activity-indicator"; +import TimeAgo from "@/components/dynamic/TimeAgo"; +import { useFormattedTimestamp } from "@/hooks/use-date-utils"; +import { FrigateConfig } from "@/types/frigateConfig"; + +const MOTION_HEATMAP_GRID_SIZE = 16; +const MIN_MOTION_CELL_ALPHA = 0.06; + +function getPreviewForMotionRange( + cameraPreviews: Preview[], + cameraName: string, + range: MotionOnlyRange, +) { + const matchingPreviews = cameraPreviews.filter( + (preview) => + preview.camera === cameraName && + preview.end > range.start_time && + preview.start < range.end_time, + ); + + if (!matchingPreviews.length) { + return; + } + + const getOverlap = (preview: Preview) => + Math.max( + 0, + Math.min(preview.end, range.end_time) - + Math.max(preview.start, range.start_time), + ); + + return matchingPreviews.reduce((best, current) => { + return getOverlap(current) > getOverlap(best) ? current : best; + }); +} + +function getRangeOverlapSeconds( + rangeStart: number, + rangeEnd: number, + recordingStart: number, + recordingEnd: number, +) { + return Math.max( + 0, + Math.min(rangeEnd, recordingEnd) - Math.max(rangeStart, recordingStart), + ); +} + +function getMotionHeatmapForRange( + recordings: Recording[], + range: MotionOnlyRange, +) { + const weightedHeatmap = new Map(); + let totalWeight = 0; + + recordings.forEach((recording) => { + const overlapSeconds = getRangeOverlapSeconds( + range.start_time, + range.end_time, + recording.start_time, + recording.end_time, + ); + + if (overlapSeconds <= 0) { + return; + } + + totalWeight += overlapSeconds; + + if (!recording.motion_heatmap) { + return; + } + + Object.entries(recording.motion_heatmap).forEach( + ([cellIndex, intensity]) => { + const index = Number(cellIndex); + const level = Number(intensity); + + if (Number.isNaN(index) || Number.isNaN(level) || level <= 0) { + return; + } + + const existingWeight = weightedHeatmap.get(index) ?? 0; + weightedHeatmap.set(index, existingWeight + level * overlapSeconds); + }, + ); + }); + + if (!totalWeight || weightedHeatmap.size === 0) { + return null; + } + + const mergedHeatmap: Record = {}; + weightedHeatmap.forEach((weightedLevel, index) => { + const normalizedLevel = Math.max( + 0, + Math.min(255, Math.round(weightedLevel / totalWeight)), + ); + + if (normalizedLevel > 0) { + mergedHeatmap[index.toString()] = normalizedLevel; + } + }); + + return Object.keys(mergedHeatmap).length > 0 ? mergedHeatmap : null; +} + +type MotionPreviewClipProps = { + cameraName: string; + range: MotionOnlyRange; + playbackRate: number; + preview?: Preview; + fallbackFrameTimes?: number[]; + motionHeatmap?: Record | null; + nonMotionAlpha: number; + isVisible: boolean; + onSeek: (timestamp: number) => void; +}; + +function MotionPreviewClip({ + cameraName, + range, + playbackRate, + preview, + fallbackFrameTimes, + motionHeatmap, + nonMotionAlpha, + isVisible, + onSeek, +}: MotionPreviewClipProps) { + const { t } = useTranslation(["views/events", "common"]); + const { data: config } = useSWR("config"); + const videoRef = useRef(null); + const dimOverlayCanvasRef = useRef(null); + const overlayContainerRef = useRef(null); + const [{ width: overlayWidth, height: overlayHeight }] = + useResizeObserver(overlayContainerRef); + const [videoLoaded, setVideoLoaded] = useState(false); + const [videoPlaying, setVideoPlaying] = useState(false); + const [fallbackImageLoaded, setFallbackImageLoaded] = useState(false); + const [mediaDimensions, setMediaDimensions] = useState<{ + width: number; + height: number; + } | null>(null); + + const [fallbackFrameIndex, setFallbackFrameIndex] = useState(0); + const [fallbackFramesReady, setFallbackFramesReady] = useState(false); + + const formattedDate = useFormattedTimestamp( + range.start_time, + config?.ui.time_format == "24hour" + ? t("time.formattedTimestampMonthDayHourMinute.24hour", { + ns: "common", + }) + : t("time.formattedTimestampMonthDayHourMinute.12hour", { + ns: "common", + }), + config?.ui.timezone, + ); + const fallbackFrameSrcs = useMemo(() => { + if (!fallbackFrameTimes || fallbackFrameTimes.length === 0) { + return [] as string[]; + } + + return fallbackFrameTimes.map( + (frameTime) => + `${baseUrl}api/preview/preview_${cameraName}-${frameTime}.webp/thumbnail.webp`, + ); + }, [cameraName, fallbackFrameTimes]); + + useEffect(() => { + setFallbackFrameIndex(0); + setFallbackFramesReady(false); + }, [range.start_time, range.end_time, fallbackFrameTimes]); + + useEffect(() => { + if (fallbackFrameSrcs.length === 0) { + setFallbackFramesReady(false); + return; + } + + let cancelled = false; + + const preloadFrames = async () => { + await Promise.allSettled( + fallbackFrameSrcs.map( + (src) => + new Promise((resolve) => { + const image = new Image(); + image.onload = () => resolve(); + image.onerror = () => resolve(); + image.src = src; + }), + ), + ); + + if (!cancelled) { + setFallbackFramesReady(true); + } + }; + + void preloadFrames(); + + return () => { + cancelled = true; + }; + }, [fallbackFrameSrcs]); + + useEffect(() => { + if (!fallbackFramesReady || fallbackFrameSrcs.length <= 1 || !isVisible) { + return; + } + + const intervalMs = Math.max( + 50, + Math.round(1000 / Math.max(1, playbackRate)), + ); + const intervalId = window.setInterval(() => { + setFallbackFrameIndex((previous) => { + return (previous + 1) % fallbackFrameSrcs.length; + }); + }, intervalMs); + + return () => { + window.clearInterval(intervalId); + }; + }, [fallbackFrameSrcs.length, fallbackFramesReady, isVisible, playbackRate]); + + const fallbackFrameSrc = useMemo(() => { + if (fallbackFrameSrcs.length === 0) { + return undefined; + } + + return fallbackFrameSrcs[fallbackFrameIndex] ?? fallbackFrameSrcs[0]; + }, [fallbackFrameIndex, fallbackFrameSrcs]); + + useEffect(() => { + setVideoLoaded(false); + setVideoPlaying(false); + setMediaDimensions(null); + }, [preview?.src]); + + useEffect(() => { + if (!preview || !isVisible || videoLoaded || !videoRef.current) { + return; + } + + if (videoRef.current.currentSrc || videoRef.current.error) { + setVideoLoaded(true); + } + }, [isVisible, preview, videoLoaded]); + + useEffect(() => { + setFallbackImageLoaded(false); + setMediaDimensions(null); + }, [fallbackFrameSrcs]); + + useEffect(() => { + if (!fallbackFrameSrc || !isVisible || !fallbackFramesReady) { + return; + } + + setFallbackImageLoaded(true); + }, [fallbackFrameSrc, fallbackFramesReady, isVisible]); + + const showLoadingIndicator = + (preview != undefined && isVisible && !videoPlaying) || + (fallbackFrameSrc != undefined && isVisible && !fallbackImageLoaded); + + const clipStart = useMemo(() => { + if (!preview) { + return 0; + } + + return Math.max(0, range.start_time - preview.start); + }, [preview, range.start_time]); + + const clipEnd = useMemo(() => { + if (!preview) { + return 0; + } + + const previewDuration = preview.end - preview.start; + return Math.min( + previewDuration, + Math.max(clipStart + 0.1, range.end_time - preview.start), + ); + }, [clipStart, preview, range.end_time]); + + const resetPlayback = useCallback(() => { + if (!videoRef.current || !preview) { + return; + } + + videoRef.current.currentTime = clipStart; + videoRef.current.playbackRate = playbackRate; + }, [clipStart, playbackRate, preview]); + + useEffect(() => { + if (!videoRef.current || !preview) { + return; + } + + if (!isVisible) { + videoRef.current.pause(); + videoRef.current.currentTime = clipStart; + return; + } + + if (videoRef.current.readyState >= 2) { + resetPlayback(); + void videoRef.current.play().catch(() => undefined); + } + }, [clipStart, isVisible, preview, resetPlayback]); + + const drawDimOverlay = useCallback(() => { + if (!dimOverlayCanvasRef.current) { + return; + } + + const canvas = dimOverlayCanvasRef.current; + const context = canvas.getContext("2d"); + + if (!context) { + return; + } + + if (overlayWidth <= 0 || overlayHeight <= 0) { + return; + } + + const width = Math.max(1, overlayWidth); + const height = Math.max(1, overlayHeight); + const dpr = window.devicePixelRatio || 1; + const pixelWidth = Math.max(1, Math.round(width * dpr)); + const pixelHeight = Math.max(1, Math.round(height * dpr)); + + if (canvas.width !== pixelWidth || canvas.height !== pixelHeight) { + canvas.width = pixelWidth; + canvas.height = pixelHeight; + } + + canvas.style.width = `${width}px`; + canvas.style.height = `${height}px`; + + context.setTransform(dpr, 0, 0, dpr, 0, 0); + context.clearRect(0, 0, width, height); + + if (!motionHeatmap) { + return; + } + + // Calculate the actual rendered media area (object-contain letterboxing) + let drawX = 0; + let drawY = 0; + let drawWidth = width; + let drawHeight = height; + + if ( + mediaDimensions && + mediaDimensions.width > 0 && + mediaDimensions.height > 0 + ) { + const containerAspect = width / height; + const mediaAspect = mediaDimensions.width / mediaDimensions.height; + + if (mediaAspect < containerAspect) { + // Portrait / tall: constrained by height, bars on left and right + drawHeight = height; + drawWidth = height * mediaAspect; + drawX = (width - drawWidth) / 2; + drawY = 0; + } else { + // Wide / landscape: constrained by width, bars on top and bottom + drawWidth = width; + drawHeight = width / mediaAspect; + drawX = 0; + drawY = (height - drawHeight) / 2; + } + } + + const heatmapLevels = Object.values(motionHeatmap) + .map((value) => Number(value)) + .filter((value) => Number.isFinite(value) && value > 0); + + const maxHeatmapLevel = + heatmapLevels.length > 0 ? Math.max(...heatmapLevels) : 0; + + const maskCanvas = document.createElement("canvas"); + maskCanvas.width = MOTION_HEATMAP_GRID_SIZE; + maskCanvas.height = MOTION_HEATMAP_GRID_SIZE; + + const maskContext = maskCanvas.getContext("2d"); + if (!maskContext) { + return; + } + + const imageData = maskContext.createImageData( + MOTION_HEATMAP_GRID_SIZE, + MOTION_HEATMAP_GRID_SIZE, + ); + + for (let index = 0; index < MOTION_HEATMAP_GRID_SIZE ** 2; index++) { + const level = Number(motionHeatmap[index.toString()] ?? 0); + const normalizedLevel = + maxHeatmapLevel > 0 + ? Math.min(1, Math.max(0, level / maxHeatmapLevel)) + : 0; + const boostedLevel = Math.sqrt(normalizedLevel); + const alpha = + nonMotionAlpha - + boostedLevel * (nonMotionAlpha - MIN_MOTION_CELL_ALPHA); + + const pixelOffset = index * 4; + imageData.data[pixelOffset] = 0; + imageData.data[pixelOffset + 1] = 0; + imageData.data[pixelOffset + 2] = 0; + imageData.data[pixelOffset + 3] = Math.round( + Math.max(0, Math.min(1, alpha)) * 255, + ); + } + + maskContext.putImageData(imageData, 0, 0); + context.imageSmoothingEnabled = true; + context.imageSmoothingQuality = "high"; + context.drawImage(maskCanvas, drawX, drawY, drawWidth, drawHeight); + }, [ + motionHeatmap, + nonMotionAlpha, + overlayHeight, + overlayWidth, + mediaDimensions, + ]); + + useEffect(() => { + drawDimOverlay(); + }, [drawDimOverlay]); + + return ( +
onSeek(range.start_time)} + > + {showLoadingIndicator && ( + + )} + {preview ? ( + <> + + {motionHeatmap && ( +
+ ); +} + +type MotionPreviewsPaneProps = { + camera: CameraConfig; + contentRef: MutableRefObject; + cameraPreviews: Preview[]; + motionRanges: MotionOnlyRange[]; + isLoadingMotionRanges?: boolean; + playbackRate: number; + nonMotionAlpha: number; + onSeek: (timestamp: number) => void; +}; + +export default function MotionPreviewsPane({ + camera, + contentRef, + cameraPreviews, + motionRanges, + isLoadingMotionRanges = false, + playbackRate, + nonMotionAlpha, + onSeek, +}: MotionPreviewsPaneProps) { + const { t } = useTranslation(["views/events"]); + const [scrollContainer, setScrollContainer] = useState( + null, + ); + + const [windowVisible, setWindowVisible] = useState(true); + useEffect(() => { + const visibilityListener = () => { + setWindowVisible(document.visibilityState == "visible"); + }; + + addEventListener("visibilitychange", visibilityListener); + + return () => { + removeEventListener("visibilitychange", visibilityListener); + }; + }, []); + + const [visibleClips, setVisibleClips] = useState([]); + const [hasVisibilityData, setHasVisibilityData] = useState(false); + const clipObserver = useRef(null); + + const recordingTimeRange = useMemo(() => { + if (!motionRanges.length) { + return null; + } + + return motionRanges.reduce( + (bounds, range) => ({ + after: Math.min(bounds.after, range.start_time), + before: Math.max(bounds.before, range.end_time), + }), + { + after: motionRanges[0].start_time, + before: motionRanges[0].end_time, + }, + ); + }, [motionRanges]); + + const { data: cameraRecordings } = useSWR( + recordingTimeRange + ? [ + `${camera.name}/recordings`, + { + after: Math.floor(recordingTimeRange.after), + before: Math.ceil(recordingTimeRange.before), + }, + ] + : null, + { + revalidateOnFocus: false, + revalidateOnReconnect: false, + }, + ); + const { data: previewFrames } = useSWR( + recordingTimeRange + ? `preview/${camera.name}/start/${Math.floor(recordingTimeRange.after)}/end/${Math.ceil(recordingTimeRange.before)}/frames` + : null, + { + revalidateOnFocus: false, + revalidateOnReconnect: false, + }, + ); + + const previewFrameTimes = useMemo(() => { + if (!previewFrames) { + return [] as number[]; + } + + return previewFrames + .map((frame) => { + const timestampPart = frame.split("-").at(-1)?.replace(".webp", ""); + return timestampPart ? Number(timestampPart) : NaN; + }) + .filter((value) => Number.isFinite(value)) + .sort((a, b) => a - b); + }, [previewFrames]); + + const getFallbackFrameTimesForRange = useCallback( + (range: MotionOnlyRange) => { + if (!isCurrentHour(range.end_time) || previewFrameTimes.length === 0) { + return [] as number[]; + } + + const inRangeFrames = previewFrameTimes.filter( + (frameTime) => + frameTime >= range.start_time && frameTime <= range.end_time, + ); + + // Use all in-range frames when enough data exists for natural animation + if (inRangeFrames.length > 1) { + return inRangeFrames; + } + + // If sparse, keep the single in-range frame and add only the next 2 frames + if (inRangeFrames.length === 1) { + const inRangeFrame = inRangeFrames[0]; + const nextFrames = previewFrameTimes + .filter((frameTime) => frameTime > inRangeFrame) + .slice(0, 2); + + return [inRangeFrame, ...nextFrames]; + } + + const nextFramesFromStart = previewFrameTimes + .filter((frameTime) => frameTime >= range.start_time) + .slice(0, 3); + // If no in-range frame exists, take up to 3 frames starting at clip start + if (nextFramesFromStart.length > 0) { + return nextFramesFromStart; + } + + const lastFrame = previewFrameTimes.at(-1); + return lastFrame != undefined ? [lastFrame] : []; + }, + [previewFrameTimes], + ); + + const setContentNode = useCallback( + (node: HTMLDivElement | null) => { + contentRef.current = node; + setScrollContainer(node); + }, + [contentRef], + ); + + useEffect(() => { + if (!scrollContainer) { + return; + } + + const visibleClipIds = new Set(); + clipObserver.current = new IntersectionObserver( + (entries) => { + setHasVisibilityData(true); + + entries.forEach((entry) => { + const clipId = (entry.target as HTMLElement).dataset.clipId; + + if (!clipId) { + return; + } + + if (entry.isIntersecting) { + visibleClipIds.add(clipId); + } else { + visibleClipIds.delete(clipId); + } + }); + + const rootRect = scrollContainer.getBoundingClientRect(); + const prunedVisibleClipIds = [...visibleClipIds].filter((clipId) => { + const clipElement = scrollContainer.querySelector( + `[data-clip-id="${clipId}"]`, + ); + + if (!clipElement) { + return false; + } + + const clipRect = clipElement.getBoundingClientRect(); + + return ( + clipRect.bottom > rootRect.top && clipRect.top < rootRect.bottom + ); + }); + + setVisibleClips(prunedVisibleClipIds); + }, + { + root: scrollContainer, + threshold: 0, + }, + ); + + scrollContainer + .querySelectorAll("[data-clip-id]") + .forEach((node) => { + clipObserver.current?.observe(node); + }); + + return () => { + clipObserver.current?.disconnect(); + }; + }, [scrollContainer]); + + const clipRef = useCallback((node: HTMLElement | null) => { + if (!clipObserver.current) { + return; + } + + try { + if (node) { + clipObserver.current.observe(node); + } + } catch { + // no op + } + }, []); + + const clipData = useMemo( + () => + motionRanges + .filter((range) => range.end_time > range.start_time) + .sort((left, right) => right.start_time - left.start_time) + .map((range) => { + const preview = getPreviewForMotionRange( + cameraPreviews, + camera.name, + range, + ); + + return { + range, + preview, + fallbackFrameTimes: !preview + ? getFallbackFrameTimesForRange(range) + : undefined, + motionHeatmap: getMotionHeatmapForRange( + cameraRecordings ?? [], + range, + ), + }; + }), + [ + cameraPreviews, + camera.name, + cameraRecordings, + getFallbackFrameTimesForRange, + motionRanges, + ], + ); + + const hasCurrentHourRanges = useMemo( + () => motionRanges.some((range) => isCurrentHour(range.end_time)), + [motionRanges], + ); + + const isLoadingPane = + isLoadingMotionRanges || + (motionRanges.length > 0 && cameraRecordings == undefined) || + (hasCurrentHourRanges && previewFrames == undefined); + + if (isLoadingPane) { + return ( + + ); + } + + return ( +
+
+ {clipData.length === 0 ? ( +
+ {t("motionPreviews.empty")} +
+ ) : ( +
+ {clipData.map( + ({ range, preview, fallbackFrameTimes, motionHeatmap }, idx) => ( +
+ +
+ ), + )} +
+ )} +
+
+ ); +} diff --git a/web/src/views/motion-search/MotionSearchDialog.tsx b/web/src/views/motion-search/MotionSearchDialog.tsx new file mode 100644 index 000000000..5f31f1d07 --- /dev/null +++ b/web/src/views/motion-search/MotionSearchDialog.tsx @@ -0,0 +1,708 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useTranslation } from "react-i18next"; +import { isDesktop, isIOS, isMobile } from "react-device-detect"; +import { FaArrowRight, FaCalendarAlt, FaCheckCircle } from "react-icons/fa"; +import { MdOutlineRestartAlt, MdUndo } from "react-icons/md"; + +import { FrigateConfig } from "@/types/frigateConfig"; +import { TimeRange } from "@/types/timeline"; + +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { Drawer, DrawerContent } from "@/components/ui/drawer"; +import { Label } from "@/components/ui/label"; +import { Slider } from "@/components/ui/slider"; +import { Switch } from "@/components/ui/switch"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "@/components/ui/popover"; +import { SelectSeparator } from "@/components/ui/select"; +import { + Tooltip, + TooltipContent, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import ActivityIndicator from "@/components/indicators/activity-indicator"; +import { CameraNameLabel } from "@/components/camera/FriendlyNameLabel"; +import { TimezoneAwareCalendar } from "@/components/overlay/ReviewActivityCalendar"; + +import { useApiHost } from "@/api"; +import { useResizeObserver } from "@/hooks/resize-observer"; +import { useFormattedTimestamp } from "@/hooks/use-date-utils"; +import { getUTCOffset } from "@/utils/dateUtil"; +import { cn } from "@/lib/utils"; +import MotionSearchROICanvas from "./MotionSearchROICanvas"; +import { TransformComponent, TransformWrapper } from "react-zoom-pan-pinch"; + +type MotionSearchDialogProps = { + open: boolean; + onOpenChange: (open: boolean) => void; + config: FrigateConfig; + cameras: string[]; + selectedCamera: string | null; + onCameraSelect: (camera: string) => void; + cameraLocked?: boolean; + polygonPoints: number[][]; + setPolygonPoints: React.Dispatch>; + isDrawingROI: boolean; + setIsDrawingROI: React.Dispatch>; + parallelMode: boolean; + setParallelMode: React.Dispatch>; + threshold: number; + setThreshold: React.Dispatch>; + minArea: number; + setMinArea: React.Dispatch>; + frameSkip: number; + setFrameSkip: React.Dispatch>; + maxResults: number; + setMaxResults: React.Dispatch>; + searchRange?: TimeRange; + setSearchRange: React.Dispatch>; + defaultRange: TimeRange; + isSearching: boolean; + canStartSearch: boolean; + onStartSearch: () => void; + timezone?: string; +}; + +export default function MotionSearchDialog({ + open, + onOpenChange, + config, + cameras, + selectedCamera, + onCameraSelect, + cameraLocked = false, + polygonPoints, + setPolygonPoints, + isDrawingROI, + setIsDrawingROI, + parallelMode, + setParallelMode, + threshold, + setThreshold, + minArea, + setMinArea, + frameSkip, + setFrameSkip, + maxResults, + setMaxResults, + searchRange, + setSearchRange, + defaultRange, + isSearching, + canStartSearch, + onStartSearch, + timezone, +}: MotionSearchDialogProps) { + const { t } = useTranslation(["views/motionSearch", "common"]); + const apiHost = useApiHost(); + const containerRef = useRef(null); + const [{ width: containerWidth, height: containerHeight }] = + useResizeObserver(containerRef); + const [imageLoaded, setImageLoaded] = useState(false); + + const cameraConfig = useMemo(() => { + if (!selectedCamera) return undefined; + return config.cameras[selectedCamera]; + }, [config, selectedCamera]); + + const polygonClosed = useMemo( + () => !isDrawingROI && polygonPoints.length >= 3, + [isDrawingROI, polygonPoints.length], + ); + + const undoPolygonPoint = useCallback(() => { + if (polygonPoints.length === 0 || isSearching) { + return; + } + + setPolygonPoints((prev) => prev.slice(0, -1)); + setIsDrawingROI(true); + }, [isSearching, setIsDrawingROI, setPolygonPoints, polygonPoints.length]); + + const resetPolygon = useCallback(() => { + if (polygonPoints.length === 0 || isSearching) { + return; + } + + setPolygonPoints([]); + setIsDrawingROI(true); + }, [isSearching, polygonPoints.length, setIsDrawingROI, setPolygonPoints]); + + const imageSize = useMemo(() => { + if (!containerWidth || !containerHeight || !cameraConfig) { + return { width: 0, height: 0 }; + } + + const cameraAspectRatio = + cameraConfig.detect.width / cameraConfig.detect.height; + const availableAspectRatio = containerWidth / containerHeight; + + if (availableAspectRatio >= cameraAspectRatio) { + return { + width: containerHeight * cameraAspectRatio, + height: containerHeight, + }; + } + + return { + width: containerWidth, + height: containerWidth / cameraAspectRatio, + }; + }, [containerWidth, containerHeight, cameraConfig]); + + useEffect(() => { + setImageLoaded(false); + }, [selectedCamera]); + + const Overlay = isDesktop ? Dialog : Drawer; + const Content = isDesktop ? DialogContent : DrawerContent; + + return ( + + event.preventDefault(), + } + : {})} + className={cn( + isDesktop + ? "scrollbar-container max-h-[90dvh] overflow-y-auto sm:max-w-[75%]" + : "flex max-h-[90dvh] flex-col overflow-hidden rounded-lg pb-4", + )} + > +
+ + + {t("dialog.title")} + +

+ {t("description")} +

+
+ +
+
+ {(!cameraLocked || !selectedCamera) && ( +
+
+
+ + +
+
+
+ )} + + +
+ +
+ {selectedCamera && cameraConfig && imageSize.width > 0 ? ( +
+ {t("dialog.previewAlt", setImageLoaded(true)} + /> + {!imageLoaded && ( +
+ +
+ )} + +
+ ) : ( +
+ {t("selectCamera")} +
+ )} +
+
+
+
+ + {selectedCamera && ( +
+
+ {t("polygonControls.points", { + count: polygonPoints.length, + })} + {polygonClosed && } +
+
+ + + + + + {t("polygonControls.undo")} + + + + + + + + {t("polygonControls.reset")} + + +
+
+ )} +
+ +
+
+

+ {t("settings.title")} +

+
+
+ +
+ setThreshold(value)} + /> + {threshold} +
+

+ {t("settings.thresholdDesc")} +

+
+
+ +
+ setMinArea(value)} + /> + {minArea}% +
+

+ {t("settings.minAreaDesc")} +

+
+
+ +
+ setFrameSkip(value)} + /> + {frameSkip} +
+

+ {t("settings.frameSkipDesc")} +

+
+
+
+ + +
+

+ {t("settings.parallelModeDesc")} +

+
+
+ +
+ setMaxResults(value)} + /> + {maxResults} +
+

+ {t("settings.maxResultsDesc")} +

+
+
+
+ + + + +
+
+
+
+
+ ); +} + +type SearchRangeSelectorProps = { + range?: TimeRange; + setRange: React.Dispatch>; + defaultRange: TimeRange; + timeFormat?: "browser" | "12hour" | "24hour"; + timezone?: string; +}; + +function SearchRangeSelector({ + range, + setRange, + defaultRange, + timeFormat, + timezone, +}: SearchRangeSelectorProps) { + const { t } = useTranslation(["views/motionSearch", "common"]); + const [startOpen, setStartOpen] = useState(false); + const [endOpen, setEndOpen] = useState(false); + + const timezoneOffset = useMemo( + () => + timezone ? Math.round(getUTCOffset(new Date(), timezone)) : undefined, + [timezone], + ); + const localTimeOffset = useMemo( + () => + Math.round( + getUTCOffset( + new Date(), + Intl.DateTimeFormat().resolvedOptions().timeZone, + ), + ), + [], + ); + + const startTime = useMemo(() => { + let time = range?.after ?? defaultRange.after; + + if (timezoneOffset !== undefined) { + time = time + (timezoneOffset - localTimeOffset) * 60; + } + + return time; + }, [range, defaultRange, timezoneOffset, localTimeOffset]); + + const endTime = useMemo(() => { + let time = range?.before ?? defaultRange.before; + + if (timezoneOffset !== undefined) { + time = time + (timezoneOffset - localTimeOffset) * 60; + } + + return time; + }, [range, defaultRange, timezoneOffset, localTimeOffset]); + + const formattedStart = useFormattedTimestamp( + startTime, + timeFormat === "24hour" + ? t("time.formattedTimestamp.24hour", { ns: "common" }) + : t("time.formattedTimestamp.12hour", { ns: "common" }), + ); + const formattedEnd = useFormattedTimestamp( + endTime, + timeFormat === "24hour" + ? t("time.formattedTimestamp.24hour", { ns: "common" }) + : t("time.formattedTimestamp.12hour", { ns: "common" }), + ); + + const startClock = useMemo(() => { + const date = new Date(startTime * 1000); + return `${date.getHours().toString().padStart(2, "0")}:${date + .getMinutes() + .toString() + .padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`; + }, [startTime]); + + const endClock = useMemo(() => { + const date = new Date(endTime * 1000); + return `${date.getHours().toString().padStart(2, "0")}:${date + .getMinutes() + .toString() + .padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`; + }, [endTime]); + + return ( +
+ +
+ +
+ { + if (!open) { + setStartOpen(false); + } + }} + modal={false} + > + + + + + { + if (!day) { + return; + } + + setRange({ + before: endTime, + after: day.getTime() / 1000 + 1, + }); + }} + /> + + { + const clock = e.target.value; + const [hour, minute, second] = isIOS + ? [...clock.split(":"), "00"] + : clock.split(":"); + + const start = new Date(startTime * 1000); + start.setHours( + parseInt(hour), + parseInt(minute), + parseInt(second ?? 0), + 0, + ); + setRange({ + before: endTime, + after: start.getTime() / 1000, + }); + }} + /> + + + + { + if (!open) { + setEndOpen(false); + } + }} + modal={false} + > + + + + + { + if (!day) { + return; + } + + setRange({ + after: startTime, + before: day.getTime() / 1000, + }); + }} + /> + + { + const clock = e.target.value; + const [hour, minute, second] = isIOS + ? [...clock.split(":"), "00"] + : clock.split(":"); + + const end = new Date(endTime * 1000); + end.setHours( + parseInt(hour), + parseInt(minute), + parseInt(second ?? 0), + 0, + ); + setRange({ + before: end.getTime() / 1000, + after: startTime, + }); + }} + /> + + +
+
+
+ ); +} diff --git a/web/src/views/motion-search/MotionSearchROICanvas.tsx b/web/src/views/motion-search/MotionSearchROICanvas.tsx new file mode 100644 index 000000000..f393a9cfb --- /dev/null +++ b/web/src/views/motion-search/MotionSearchROICanvas.tsx @@ -0,0 +1,398 @@ +import { useCallback, useMemo, useRef } from "react"; +import { Stage, Layer, Line, Circle, Image } from "react-konva"; +import Konva from "konva"; +import type { KonvaEventObject } from "konva/lib/Node"; +import { flattenPoints } from "@/utils/canvasUtil"; +import { cn } from "@/lib/utils"; +import { useResizeObserver } from "@/hooks/resize-observer"; + +type MotionSearchROICanvasProps = { + camera: string; + width: number; + height: number; + polygonPoints: number[][]; + setPolygonPoints: React.Dispatch>; + isDrawing: boolean; + setIsDrawing: React.Dispatch>; + isInteractive?: boolean; + motionHeatmap?: Record | null; + showMotionHeatmap?: boolean; +}; + +export default function MotionSearchROICanvas({ + width, + height, + polygonPoints, + setPolygonPoints, + isDrawing, + setIsDrawing, + isInteractive = true, + motionHeatmap, + showMotionHeatmap = false, +}: MotionSearchROICanvasProps) { + const containerRef = useRef(null); + const stageRef = useRef(null); + const [{ width: containerWidth, height: containerHeight }] = + useResizeObserver(containerRef); + + const stageSize = useMemo( + () => ({ + width: containerWidth > 0 ? Math.ceil(containerWidth) : 0, + height: containerHeight > 0 ? Math.ceil(containerHeight) : 0, + }), + [containerHeight, containerWidth], + ); + + const videoRect = useMemo(() => { + const stageWidth = stageSize.width; + const stageHeight = stageSize.height; + const sourceWidth = width > 0 ? width : 1; + const sourceHeight = height > 0 ? height : 1; + + if (stageWidth <= 0 || stageHeight <= 0) { + return { x: 0, y: 0, width: 0, height: 0 }; + } + + const stageAspect = stageWidth / stageHeight; + const sourceAspect = sourceWidth / sourceHeight; + + if (stageAspect > sourceAspect) { + const fittedHeight = stageHeight; + const fittedWidth = fittedHeight * sourceAspect; + return { + x: (stageWidth - fittedWidth) / 2, + y: 0, + width: fittedWidth, + height: fittedHeight, + }; + } + + const fittedWidth = stageWidth; + const fittedHeight = fittedWidth / sourceAspect; + return { + x: 0, + y: (stageHeight - fittedHeight) / 2, + width: fittedWidth, + height: fittedHeight, + }; + }, [height, stageSize.height, stageSize.width, width]); + + // Convert normalized points to stage coordinates + const scaledPoints = useMemo(() => { + return polygonPoints.map((point) => [ + videoRect.x + point[0] * videoRect.width, + videoRect.y + point[1] * videoRect.height, + ]); + }, [ + polygonPoints, + videoRect.height, + videoRect.width, + videoRect.x, + videoRect.y, + ]); + + const flattenedPoints = useMemo( + () => flattenPoints(scaledPoints), + [scaledPoints], + ); + + const heatmapOverlayCanvas = useMemo(() => { + if ( + !showMotionHeatmap || + !motionHeatmap || + videoRect.width === 0 || + videoRect.height === 0 + ) { + return null; + } + + const gridSize = 16; + const heatmapLevels = Object.values(motionHeatmap) + .map((value) => Number(value)) + .filter((value) => Number.isFinite(value) && value > 0); + + const maxHeatmapLevel = + heatmapLevels.length > 0 ? Math.max(...heatmapLevels) : 0; + + if (maxHeatmapLevel <= 0) { + return null; + } + + const maskCanvas = document.createElement("canvas"); + maskCanvas.width = gridSize; + maskCanvas.height = gridSize; + + const maskContext = maskCanvas.getContext("2d"); + if (!maskContext) { + return null; + } + + const imageData = maskContext.createImageData(gridSize, gridSize); + const heatmapStops = [ + { t: 0, r: 0, g: 0, b: 255 }, + { t: 0.25, r: 0, g: 255, b: 255 }, + { t: 0.5, r: 0, g: 255, b: 0 }, + { t: 0.75, r: 255, g: 255, b: 0 }, + { t: 1, r: 255, g: 0, b: 0 }, + ]; + + const getHeatmapColor = (value: number) => { + const clampedValue = Math.min(1, Math.max(0, value)); + + const upperIndex = heatmapStops.findIndex( + (stop) => stop.t >= clampedValue, + ); + if (upperIndex <= 0) { + return heatmapStops[0]; + } + + const lower = heatmapStops[upperIndex - 1]; + const upper = heatmapStops[upperIndex]; + const range = upper.t - lower.t; + const blend = range > 0 ? (clampedValue - lower.t) / range : 0; + + return { + r: Math.round(lower.r + (upper.r - lower.r) * blend), + g: Math.round(lower.g + (upper.g - lower.g) * blend), + b: Math.round(lower.b + (upper.b - lower.b) * blend), + }; + }; + + for (let index = 0; index < gridSize ** 2; index++) { + const level = Number(motionHeatmap[index.toString()] ?? 0); + const normalizedLevel = + level > 0 ? Math.min(1, Math.max(0, level / maxHeatmapLevel)) : 0; + const alpha = + level > 0 + ? Math.min(0.95, Math.max(0.1, 0.15 + normalizedLevel * 0.5)) + : 0; + const color = getHeatmapColor(normalizedLevel); + + const pixelOffset = index * 4; + imageData.data[pixelOffset] = color.r; + imageData.data[pixelOffset + 1] = color.g; + imageData.data[pixelOffset + 2] = color.b; + imageData.data[pixelOffset + 3] = Math.round(alpha * 255); + } + + maskContext.putImageData(imageData, 0, 0); + + return maskCanvas; + }, [motionHeatmap, showMotionHeatmap, videoRect.height, videoRect.width]); + + // Handle mouse click to add point + const handleMouseDown = useCallback( + (e: KonvaEventObject) => { + if (!isInteractive || !isDrawing) return; + if (videoRect.width <= 0 || videoRect.height <= 0) return; + + const stage = e.target.getStage(); + if (!stage) return; + + const mousePos = stage.getPointerPosition(); + if (!mousePos) return; + + const intersection = stage.getIntersection(mousePos); + + // If clicking on first point and we have at least 3 points, close the polygon + if (polygonPoints.length >= 3 && intersection?.name() === "point-0") { + setIsDrawing(false); + return; + } + + // Only add point if not clicking on an existing point + if (intersection?.getClassName() !== "Circle") { + const clampedX = Math.min( + Math.max(mousePos.x, videoRect.x), + videoRect.x + videoRect.width, + ); + const clampedY = Math.min( + Math.max(mousePos.y, videoRect.y), + videoRect.y + videoRect.height, + ); + + // Convert to normalized coordinates (0-1) + const normalizedX = (clampedX - videoRect.x) / videoRect.width; + const normalizedY = (clampedY - videoRect.y) / videoRect.height; + + setPolygonPoints([...polygonPoints, [normalizedX, normalizedY]]); + } + }, + [ + isDrawing, + polygonPoints, + setPolygonPoints, + setIsDrawing, + isInteractive, + videoRect.height, + videoRect.width, + videoRect.x, + videoRect.y, + ], + ); + + // Handle point drag + const handlePointDragMove = useCallback( + (e: KonvaEventObject, index: number) => { + if (!isInteractive) return; + const stage = e.target.getStage(); + if (!stage) return; + + const pos = { x: e.target.x(), y: e.target.y() }; + + // Constrain to fitted video boundaries + pos.x = Math.max( + videoRect.x, + Math.min(pos.x, videoRect.x + videoRect.width), + ); + pos.y = Math.max( + videoRect.y, + Math.min(pos.y, videoRect.y + videoRect.height), + ); + + // Convert to normalized coordinates + const normalizedX = (pos.x - videoRect.x) / videoRect.width; + const normalizedY = (pos.y - videoRect.y) / videoRect.height; + + const newPoints = [...polygonPoints]; + newPoints[index] = [normalizedX, normalizedY]; + setPolygonPoints(newPoints); + }, + [ + polygonPoints, + setPolygonPoints, + isInteractive, + videoRect.height, + videoRect.width, + videoRect.x, + videoRect.y, + ], + ); + + // Handle right-click to delete point + const handleContextMenu = useCallback( + (e: KonvaEventObject, index: number) => { + if (!isInteractive) return; + e.evt.preventDefault(); + + if (polygonPoints.length <= 3 && !isDrawing) { + // Don't delete if we have a closed polygon with minimum points + return; + } + + const newPoints = polygonPoints.filter((_, i) => i !== index); + setPolygonPoints(newPoints); + + // If we deleted enough points, go back to drawing mode + if (newPoints.length < 3) { + setIsDrawing(true); + } + }, + [polygonPoints, isDrawing, setPolygonPoints, setIsDrawing, isInteractive], + ); + + // Handle mouse hover on first point + const handleMouseOverPoint = useCallback( + (e: KonvaEventObject, index: number) => { + if (!isInteractive) return; + if (!isDrawing || polygonPoints.length < 3 || index !== 0) return; + e.target.scale({ x: 2, y: 2 }); + }, + [isDrawing, isInteractive, polygonPoints.length], + ); + + const handleMouseOutPoint = useCallback( + (e: KonvaEventObject, index: number) => { + if (!isInteractive) return; + if (index === 0) { + e.target.scale({ x: 1, y: 1 }); + } + }, + [isInteractive], + ); + + const vertexRadius = 6; + const polygonColorString = "rgba(66, 135, 245, 0.8)"; + const polygonFillColor = "rgba(66, 135, 245, 0.2)"; + + return ( +
+ {stageSize.width > 0 && stageSize.height > 0 && ( + e.evt.preventDefault()} + className="absolute inset-0" + > + + {/* Segment heatmap overlay */} + {heatmapOverlayCanvas && ( + + )} + + {/* Polygon outline */} + {scaledPoints.length > 0 && ( + = 3} + fill={ + !isDrawing && scaledPoints.length >= 3 + ? polygonFillColor + : undefined + } + /> + )} + + {/* Draw line from last point to cursor when drawing */} + {isDrawing && scaledPoints.length > 0 && ( + + )} + + {/* Vertex points */} + {scaledPoints.map((point, index) => ( + handlePointDragMove(e, index)} + onMouseOver={(e) => handleMouseOverPoint(e, index)} + onMouseOut={(e) => handleMouseOutPoint(e, index)} + onContextMenu={(e) => handleContextMenu(e, index)} + /> + ))} + + + )} +
+ ); +} diff --git a/web/src/views/motion-search/MotionSearchView.tsx b/web/src/views/motion-search/MotionSearchView.tsx new file mode 100644 index 000000000..6789dad89 --- /dev/null +++ b/web/src/views/motion-search/MotionSearchView.tsx @@ -0,0 +1,1491 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useTranslation } from "react-i18next"; +import useSWR from "swr"; +import axios from "axios"; +import { isDesktop, isMobile } from "react-device-detect"; +import Logo from "@/components/Logo"; +import { FrigateConfig } from "@/types/frigateConfig"; +import { TimeRange } from "@/types/timeline"; +import { RecordingsSummary } from "@/types/review"; +import { ExportMode } from "@/types/filter"; +import { + MotionSearchRequest, + MotionSearchStartResponse, + MotionSearchStatusResponse, + MotionSearchResult, + MotionSearchMetrics, +} from "@/types/motionSearch"; + +import { Button } from "@/components/ui/button"; +import { Switch } from "@/components/ui/switch"; +import { Toaster } from "@/components/ui/sonner"; +import { toast } from "sonner"; +import { cn } from "@/lib/utils"; +import { Skeleton } from "@/components/ui/skeleton"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Progress } from "@/components/ui/progress"; + +import DynamicVideoPlayer from "@/components/player/dynamic/DynamicVideoPlayer"; +import { DynamicVideoController } from "@/components/player/dynamic/DynamicVideoController"; +import { DetailStreamProvider } from "@/context/detail-stream-context"; +import MotionReviewTimeline from "@/components/timeline/MotionReviewTimeline"; +import CalendarFilterButton from "@/components/filter/CalendarFilterButton"; +import ExportDialog from "@/components/overlay/ExportDialog"; +import SaveExportOverlay from "@/components/overlay/SaveExportOverlay"; +import ReviewActivityCalendar from "@/components/overlay/ReviewActivityCalendar"; +import { Drawer, DrawerContent, DrawerTrigger } from "@/components/ui/drawer"; +import { SelectSeparator } from "@/components/ui/select"; + +import { useResizeObserver } from "@/hooks/resize-observer"; +import { useFullscreen } from "@/hooks/use-fullscreen"; +import { useTimelineZoom } from "@/hooks/use-timeline-zoom"; +import { useTimelineUtils } from "@/hooks/use-timeline-utils"; +import { useCameraPreviews } from "@/hooks/use-camera-previews"; +import { getChunkedTimeDay } from "@/utils/timelineUtil"; + +import { MotionData, ZoomLevel } from "@/types/review"; +import { + ASPECT_VERTICAL_LAYOUT, + ASPECT_WIDE_LAYOUT, + Recording, + RecordingSegment, +} from "@/types/record"; +import { VideoResolutionType } from "@/types/live"; +import { useFormattedTimestamp } from "@/hooks/use-date-utils"; +import MotionSearchROICanvas from "./MotionSearchROICanvas"; +import MotionSearchDialog from "./MotionSearchDialog"; +import { IoMdArrowRoundBack } from "react-icons/io"; +import { FaArrowDown, FaCalendarAlt, FaCog, FaFire } from "react-icons/fa"; +import { useNavigate } from "react-router-dom"; +import { LuSearch } from "react-icons/lu"; +import ActivityIndicator from "@/components/indicators/activity-indicator"; + +type MotionSearchViewProps = { + config: FrigateConfig; + cameras: string[]; + selectedCamera: string | null; + onCameraSelect: (camera: string) => void; + cameraLocked?: boolean; + selectedDay: Date | undefined; + onDaySelect: (day: Date | undefined) => void; + timeRange: TimeRange; + timezone: string | undefined; + onBack?: () => void; +}; + +const DEFAULT_EXPORT_WINDOW_SECONDS = 60; + +export default function MotionSearchView({ + config, + cameras, + selectedCamera, + onCameraSelect, + cameraLocked = false, + selectedDay, + onDaySelect, + timeRange, + timezone, + onBack, +}: MotionSearchViewProps) { + const { t } = useTranslation([ + "views/motionSearch", + "common", + "views/recording", + ]); + const navigate = useNavigate(); + + const resultTimestampFormat = useMemo( + () => + config.ui?.time_format === "24hour" + ? t("time.formattedTimestamp.24hour", { ns: "common" }) + : t("time.formattedTimestamp.12hour", { ns: "common" }), + [config.ui?.time_format, t], + ); + + // Refs + const contentRef = useRef(null); + const mainLayoutRef = useRef(null); + const timelineRef = useRef(null); + const mainControllerRef = useRef(null); + const jobIdRef = useRef(null); + const jobCameraRef = useRef(null); + + const [isSearchDialogOpen, setIsSearchDialogOpen] = useState(true); + const [isMobileSettingsOpen, setIsMobileSettingsOpen] = useState(false); + const [mobileSettingsMode, setMobileSettingsMode] = useState< + "actions" | "calendar" + >("actions"); + + // Recordings summary for calendar – defer until dialog is closed + // so the preview image in the dialog loads without competing requests + const { data: recordingsSummary } = useSWR( + selectedCamera && !isSearchDialogOpen + ? [ + "recordings/summary", + { + timezone: timezone, + cameras: selectedCamera, + }, + ] + : null, + ); + + // Camera previews – defer until dialog is closed + const allPreviews = useCameraPreviews( + isSearchDialogOpen ? { after: 0, before: 0 } : timeRange, + { + camera: selectedCamera ?? undefined, + }, + ); + + // ROI state + const [polygonPoints, setPolygonPoints] = useState([]); + const [isDrawingROI, setIsDrawingROI] = useState(true); + + // Search settings + const [parallelMode, setParallelMode] = useState(false); + const [threshold, setThreshold] = useState(30); + const [minArea, setMinArea] = useState(20); + const [frameSkip, setFrameSkip] = useState(10); + const [maxResults, setMaxResults] = useState(25); + + // Job state + const [jobId, setJobId] = useState(null); + const [jobCamera, setJobCamera] = useState(null); + + // Job polling with SWR + const { data: jobStatus } = useSWR( + jobId && jobCamera ? [`${jobCamera}/search/motion/${jobId}`] : null, + { refreshInterval: 1000 }, + ); + + // Search state + const [isSearching, setIsSearching] = useState(false); + const [searchResults, setSearchResults] = useState([]); + const [showSegmentHeatmap, setShowSegmentHeatmap] = useState(false); + const [searchMetrics, setSearchMetrics] = + useState(null); + const [hasSearched, setHasSearched] = useState(false); + const [searchRange, setSearchRange] = useState( + undefined, + ); + const [pendingSeekTime, setPendingSeekTime] = useState(null); + + // Export state + const [exportMode, setExportMode] = useState("none"); + const [exportRange, setExportRange] = useState(); + const [showExportPreview, setShowExportPreview] = useState(false); + + // Timeline state + const initialStartTime = timeRange.before - 60; + const [scrubbing, setScrubbing] = useState(false); + const [currentTime, setCurrentTime] = useState(initialStartTime); + const [playerTime, setPlayerTime] = useState(initialStartTime); + const [playbackStart, setPlaybackStart] = useState(initialStartTime); + + const chunkedTimeRange = useMemo( + () => getChunkedTimeDay(timeRange), + [timeRange], + ); + + const [selectedRangeIdx, setSelectedRangeIdx] = useState(() => { + const ranges = getChunkedTimeDay(timeRange); + const index = ranges.findIndex( + (chunk) => + chunk.after <= initialStartTime && chunk.before >= initialStartTime, + ); + return index === -1 ? ranges.length - 1 : index; + }); + + const currentTimeRange = useMemo( + () => + chunkedTimeRange[selectedRangeIdx] ?? + chunkedTimeRange[chunkedTimeRange.length - 1], + [selectedRangeIdx, chunkedTimeRange], + ); + + const clampExportTime = useCallback( + (value: number) => + Math.min(timeRange.before, Math.max(timeRange.after, value)), + [timeRange.after, timeRange.before], + ); + + const buildDefaultExportRange = useCallback( + (anchorTime: number): TimeRange => { + const halfWindow = DEFAULT_EXPORT_WINDOW_SECONDS / 2; + let after = clampExportTime(anchorTime - halfWindow); + let before = clampExportTime(anchorTime + halfWindow); + + if (before <= after) { + before = clampExportTime(timeRange.before); + after = clampExportTime(before - DEFAULT_EXPORT_WINDOW_SECONDS); + } + + return { after, before }; + }, + [clampExportTime, timeRange.before], + ); + + const setExportStartTime = useCallback< + React.Dispatch> + >( + (value) => { + setExportRange((prev) => { + const resolvedValue = + typeof value === "function" + ? value(prev?.after ?? currentTime) + : value; + const after = clampExportTime(resolvedValue); + const before = Math.max( + after, + clampExportTime( + prev?.before ?? after + DEFAULT_EXPORT_WINDOW_SECONDS, + ), + ); + return { after, before }; + }); + }, + [clampExportTime, currentTime], + ); + + const setExportEndTime = useCallback< + React.Dispatch> + >( + (value) => { + setExportRange((prev) => { + const resolvedValue = + typeof value === "function" + ? value(prev?.before ?? currentTime) + : value; + const before = clampExportTime(resolvedValue); + const after = Math.min( + before, + clampExportTime( + prev?.after ?? before - DEFAULT_EXPORT_WINDOW_SECONDS, + ), + ); + return { after, before }; + }); + }, + [clampExportTime, currentTime], + ); + + useEffect(() => { + if (exportMode !== "timeline" || exportRange) { + return; + } + + setExportRange(buildDefaultExportRange(currentTime)); + }, [exportMode, exportRange, buildDefaultExportRange, currentTime]); + + const handleExportPreview = useCallback(() => { + if (!exportRange) { + toast.error( + t("export.toast.error.noVaildTimeSelected", { + ns: "components/dialog", + }), + { + position: "top-center", + }, + ); + return; + } + + setShowExportPreview(true); + }, [exportRange, setShowExportPreview, t]); + + const handleExportCancel = useCallback(() => { + setShowExportPreview(false); + setExportRange(undefined); + setExportMode("none"); + }, [setExportMode, setExportRange, setShowExportPreview]); + + const setExportRangeWithPause = useCallback( + (range: TimeRange | undefined) => { + setExportRange(range); + + if (range != undefined) { + mainControllerRef.current?.pause(); + } + }, + [setExportRange], + ); + + const openMobileExport = useCallback(() => { + const now = new Date(timeRange.before * 1000); + now.setHours(now.getHours() - 1); + + setExportRangeWithPause({ + before: timeRange.before, + after: now.getTime() / 1000, + }); + setExportMode("select"); + setIsMobileSettingsOpen(false); + setMobileSettingsMode("actions"); + }, [setExportRangeWithPause, timeRange.before]); + + const handleExportSave = useCallback(() => { + if (!exportRange || !selectedCamera) { + toast.error( + t("export.toast.error.noVaildTimeSelected", { + ns: "components/dialog", + }), + { + position: "top-center", + }, + ); + return; + } + + if (exportRange.before < exportRange.after) { + toast.error( + t("export.toast.error.endTimeMustAfterStartTime", { + ns: "components/dialog", + }), + { position: "top-center" }, + ); + return; + } + + axios + .post( + `export/${selectedCamera}/start/${Math.round(exportRange.after)}/end/${Math.round(exportRange.before)}`, + { + playback: "realtime", + }, + ) + .then((response) => { + if (response.status == 200) { + toast.success( + t("export.toast.success", { ns: "components/dialog" }), + { + position: "top-center", + action: ( + + + + ), + }, + ); + setShowExportPreview(false); + setExportRange(undefined); + setExportMode("none"); + } + }) + .catch((error) => { + const errorMessage = + error.response?.data?.message || + error.response?.data?.detail || + "Unknown error"; + toast.error( + t("export.toast.error.failed", { + ns: "components/dialog", + error: errorMessage, + }), + { position: "top-center" }, + ); + }); + }, [ + exportRange, + selectedCamera, + setExportMode, + setExportRange, + setShowExportPreview, + t, + ]); + + useEffect(() => { + if (!searchRange) { + setSearchRange(timeRange); + } + }, [searchRange, timeRange]); + + // Video player state + const [fullResolution, setFullResolution] = useState({ + width: 0, + height: 0, + }); + + // Fullscreen + const { fullscreen, toggleFullscreen, supportsFullScreen } = + useFullscreen(mainLayoutRef); + + // Timeline zoom settings + const [zoomSettings, setZoomSettings] = useState({ + segmentDuration: 30, + timestampSpread: 15, + }); + + const possibleZoomLevels: ZoomLevel[] = useMemo( + () => [ + { segmentDuration: 30, timestampSpread: 15 }, + { segmentDuration: 15, timestampSpread: 5 }, + { segmentDuration: 5, timestampSpread: 1 }, + ], + [], + ); + + const handleZoomChange = useCallback( + (newZoomLevel: number) => { + setZoomSettings(possibleZoomLevels[newZoomLevel]); + }, + [possibleZoomLevels], + ); + + const currentZoomLevel = useMemo( + () => + possibleZoomLevels.findIndex( + (level) => level.segmentDuration === zoomSettings.segmentDuration, + ), + [possibleZoomLevels, zoomSettings.segmentDuration], + ); + + const { isZooming, zoomDirection } = useTimelineZoom({ + zoomSettings, + zoomLevels: possibleZoomLevels, + onZoomChange: handleZoomChange, + timelineRef: timelineRef, + timelineDuration: timeRange.after - timeRange.before, + }); + + // Motion data for timeline + const { alignStartDateToTimeline, alignEndDateToTimeline } = useTimelineUtils( + { segmentDuration: zoomSettings.segmentDuration }, + ); + + const alignedAfter = alignStartDateToTimeline(timeRange.after); + const alignedBefore = alignEndDateToTimeline(timeRange.before); + + const { data: motionData, isLoading: isMotionLoading } = useSWR( + selectedCamera && !isSearchDialogOpen + ? [ + "review/activity/motion", + { + before: alignedBefore, + after: alignedAfter, + scale: Math.round(zoomSettings.segmentDuration / 2), + cameras: selectedCamera, + }, + ] + : null, + ); + + const { data: noRecordings } = useSWR( + selectedCamera && !isSearchDialogOpen + ? [ + "recordings/unavailable", + { + before: alignedBefore, + after: alignedAfter, + scale: Math.round(zoomSettings.segmentDuration), + cameras: selectedCamera, + }, + ] + : null, + ); + + const recordingParams = useMemo( + () => ({ + before: currentTimeRange.before, + after: currentTimeRange.after, + }), + [currentTimeRange], + ); + + const { data: playbackRecordings } = useSWR( + selectedCamera && !isSearchDialogOpen + ? [`${selectedCamera}/recordings`, recordingParams] + : null, + { revalidateOnFocus: false }, + ); + + const activeSegmentHeatmap = useMemo(() => { + if (!showSegmentHeatmap || !playbackRecordings?.length) { + return null; + } + + const activeSegment = playbackRecordings.find( + (recording) => + recording.start_time <= currentTime && + recording.end_time >= currentTime, + ); + + return activeSegment?.motion_heatmap ?? null; + }, [currentTime, playbackRecordings, showSegmentHeatmap]); + + // Camera aspect ratio + const getCameraAspect = useCallback( + (cam: string) => { + if (!config) return undefined; + if ( + cam === selectedCamera && + fullResolution.width && + fullResolution.height + ) { + return fullResolution.width / fullResolution.height; + } + const camera = config.cameras[cam]; + if (!camera) return undefined; + return camera.detect.width / camera.detect.height; + }, + [config, fullResolution, selectedCamera], + ); + + const mainCameraAspect = useMemo(() => { + if (!selectedCamera) return "normal"; + const aspectRatio = getCameraAspect(selectedCamera); + if (!aspectRatio) return "normal"; + if (aspectRatio > ASPECT_WIDE_LAYOUT) return "wide"; + if (aspectRatio < ASPECT_VERTICAL_LAYOUT) return "tall"; + return "normal"; + }, [getCameraAspect, selectedCamera]); + + const grow = useMemo(() => { + if (mainCameraAspect === "wide") return "w-full aspect-wide"; + if (mainCameraAspect === "tall") { + return isDesktop + ? "size-full aspect-tall flex flex-col justify-center" + : "size-full"; + } + return "w-full aspect-video"; + }, [mainCameraAspect]); + + // Container resize observer + const [{ width: containerWidth, height: containerHeight }] = + useResizeObserver(mainLayoutRef); + + const useHeightBased = useMemo(() => { + if (!containerWidth || !containerHeight || !selectedCamera) return false; + const cameraAspectRatio = getCameraAspect(selectedCamera); + if (!cameraAspectRatio) return false; + const availableAspectRatio = containerWidth / containerHeight; + return availableAspectRatio >= cameraAspectRatio; + }, [containerWidth, containerHeight, getCameraAspect, selectedCamera]); + + const onClipEnded = useCallback(() => { + if (!mainControllerRef.current) { + return; + } + + if (selectedRangeIdx < chunkedTimeRange.length - 1) { + setSelectedRangeIdx(selectedRangeIdx + 1); + } + }, [selectedRangeIdx, chunkedTimeRange]); + + const updateSelectedSegment = useCallback( + (nextTime: number, updateStartTime: boolean) => { + const index = chunkedTimeRange.findIndex( + (segment) => segment.after <= nextTime && segment.before >= nextTime, + ); + + if (index != -1) { + if (updateStartTime) { + setPlaybackStart(nextTime); + } + + setSelectedRangeIdx(index); + } + }, + [chunkedTimeRange], + ); + + // Handle scrubbing + useEffect(() => { + if (scrubbing || exportRange) { + if ( + currentTime > currentTimeRange.before + 60 || + currentTime < currentTimeRange.after - 60 + ) { + updateSelectedSegment(currentTime, false); + return; + } + + mainControllerRef.current?.scrubToTimestamp(currentTime); + } + // we only want to seek when current time updates + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ + currentTime, + scrubbing, + timeRange, + currentTimeRange, + updateSelectedSegment, + ]); + + useEffect(() => { + if (pendingSeekTime != null) { + return; + } + + const nextTime = timeRange.before - 60; + const index = chunkedTimeRange.findIndex( + (segment) => segment.after <= nextTime && segment.before >= nextTime, + ); + + setCurrentTime(nextTime); + setPlayerTime(nextTime); + setPlaybackStart(nextTime); + setSelectedRangeIdx(index === -1 ? chunkedTimeRange.length - 1 : index); + mainControllerRef.current?.seekToTimestamp(nextTime, true); + }, [pendingSeekTime, timeRange, chunkedTimeRange]); + + useEffect(() => { + if (!scrubbing) { + if (Math.abs(currentTime - playerTime) > 10) { + if ( + currentTimeRange.after <= currentTime && + currentTimeRange.before >= currentTime + ) { + mainControllerRef.current?.seekToTimestamp(currentTime, true); + } else { + updateSelectedSegment(currentTime, true); + } + } else if (playerTime != currentTime) { + mainControllerRef.current?.play(); + } + } + // we only want to seek when current time doesn't match the player update time + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [currentTime, scrubbing, playerTime]); + + // Manually seek to timestamp + const manuallySetCurrentTime = useCallback( + (time: number, play: boolean = false) => { + if (!currentTimeRange) { + return; + } + + setCurrentTime(time); + + if (currentTimeRange.after <= time && currentTimeRange.before >= time) { + mainControllerRef.current?.seekToTimestamp(time, play); + } else { + updateSelectedSegment(time, true); + } + }, + [currentTimeRange, updateSelectedSegment], + ); + + const canStartSearch = Boolean( + selectedCamera && + searchRange && + searchRange.before >= searchRange.after && + polygonPoints.length >= 3 && + !isDrawingROI, + ); + + const cancelMotionSearchJob = useCallback( + async (jobIdToCancel: string | null, cameraToCancel: string | null) => { + if (!jobIdToCancel || !cameraToCancel) { + return; + } + + try { + await axios.post( + `${cameraToCancel}/search/motion/${jobIdToCancel}/cancel`, + ); + } catch { + // Best effort cancellation. + } + }, + [], + ); + + const cancelMotionSearchJobViaBeacon = useCallback( + (jobIdToCancel: string | null, cameraToCancel: string | null) => { + if (!jobIdToCancel || !cameraToCancel) { + return; + } + + const url = `${window.location.origin}/api/${cameraToCancel}/search/motion/${jobIdToCancel}/cancel`; + + const xhr = new XMLHttpRequest(); + try { + xhr.open("POST", url, false); + xhr.setRequestHeader("Content-Type", "application/json"); + xhr.setRequestHeader("X-CSRF-TOKEN", "1"); + xhr.setRequestHeader("X-CACHE-BYPASS", "1"); + xhr.withCredentials = true; + xhr.send("{}"); + } catch { + // Best effort cancellation during unload. + } + }, + [], + ); + + useEffect(() => { + jobIdRef.current = jobId; + }, [jobId]); + + useEffect(() => { + jobCameraRef.current = jobCamera; + }, [jobCamera]); + + useEffect(() => { + return () => { + cancelMotionSearchJobViaBeacon(jobIdRef.current, jobCameraRef.current); + void cancelMotionSearchJob(jobIdRef.current, jobCameraRef.current); + }; + }, [cancelMotionSearchJob, cancelMotionSearchJobViaBeacon]); + + useEffect(() => { + const handleBeforeUnload = () => { + cancelMotionSearchJobViaBeacon(jobIdRef.current, jobCameraRef.current); + }; + + window.addEventListener("beforeunload", handleBeforeUnload); + + return () => { + window.removeEventListener("beforeunload", handleBeforeUnload); + }; + }, [cancelMotionSearchJobViaBeacon]); + + const handleNewSearch = useCallback(() => { + if (jobId && jobCamera) { + void cancelMotionSearchJob(jobId, jobCamera); + if (isSearching) { + toast.message(t("searchCancelled")); + } + } + setSearchResults([]); + setSearchMetrics(null); + setIsSearching(false); + setJobId(null); + setJobCamera(null); + setHasSearched(false); + setPendingSeekTime(null); + setSearchRange(timeRange); + setIsSearchDialogOpen(true); + }, [cancelMotionSearchJob, isSearching, jobCamera, jobId, t, timeRange]); + + // Perform motion search + const performSearch = useCallback(async () => { + if (!selectedCamera) { + toast.error(t("errors.noCamera")); + return; + } + + if (polygonPoints.length < 3) { + toast.error(t("errors.polygonTooSmall")); + return; + } + + if (!searchRange) { + toast.error(t("errors.noTimeRange")); + return; + } + + if (searchRange.before < searchRange.after) { + toast.error(t("errors.invalidTimeRange")); + return; + } + + setIsSearching(true); + setSearchResults([]); + setHasSearched(true); + + try { + const request: MotionSearchRequest = { + start_time: searchRange.after, + end_time: searchRange.before, + polygon_points: polygonPoints, + parallel: parallelMode, + threshold, + min_area: minArea, + frame_skip: frameSkip, + max_results: maxResults, + }; + + const response = await axios.post( + `${selectedCamera}/search/motion`, + request, + ); + + if (response.data.success) { + setJobId(response.data.job_id); + setJobCamera(selectedCamera); + setIsSearchDialogOpen(false); + toast.success(t("searchStarted")); + } else { + toast.error( + t("errors.searchFailed", { message: response.data.message }), + ); + setIsSearching(false); + } + } catch (error) { + let errorMessage = t("errors.unknown"); + + if (axios.isAxiosError<{ message?: string; detail?: string }>(error)) { + const responseData = error.response?.data as + | { + message?: unknown; + detail?: unknown; + error?: unknown; + errors?: unknown; + } + | string + | undefined; + + if (typeof responseData === "string") { + errorMessage = responseData; + } else if (responseData) { + const apiMessage = + responseData.message ?? + responseData.detail ?? + responseData.error ?? + responseData.errors; + + if (Array.isArray(apiMessage)) { + errorMessage = apiMessage.join(", "); + } else if (typeof apiMessage === "string") { + errorMessage = apiMessage; + } else if (apiMessage) { + errorMessage = JSON.stringify(apiMessage); + } else { + errorMessage = error.message || errorMessage; + } + } else { + errorMessage = error.message || errorMessage; + } + } else if (error instanceof Error) { + errorMessage = error.message; + } + + toast.error(t("errors.searchFailed", { message: errorMessage })); + setIsSearching(false); + } + }, [ + selectedCamera, + polygonPoints, + searchRange, + parallelMode, + threshold, + minArea, + frameSkip, + maxResults, + t, + ]); + + // Monitor job status and update UI when complete + useEffect(() => { + if (!jobStatus) { + return; + } + + if (jobStatus.status === "success") { + setSearchResults(jobStatus.results ?? []); + setSearchMetrics(jobStatus.metrics ?? null); + setIsSearching(false); + setJobId(null); + setJobCamera(null); + toast.success( + t("changesFound", { count: jobStatus.results?.length ?? 0 }), + ); + } else if ( + jobStatus.status === "queued" || + jobStatus.status === "running" + ) { + setSearchMetrics(jobStatus.metrics ?? null); + // Stream partial results as they arrive + if (jobStatus.results && jobStatus.results.length > 0) { + setSearchResults(jobStatus.results); + } + } else if (jobStatus.status === "failed") { + setIsSearching(false); + setJobId(null); + setJobCamera(null); + toast.error( + t("errors.searchFailed", { + message: jobStatus.error_message || jobStatus.message, + }), + ); + } else if (jobStatus.status === "cancelled") { + setIsSearching(false); + setJobId(null); + setJobCamera(null); + toast.message(t("searchCancelled")); + } + }, [jobStatus, t]); + + // Handle result click + const handleResultClick = useCallback( + (result: MotionSearchResult) => { + if ( + result.timestamp < timeRange.after || + result.timestamp > timeRange.before + ) { + setPendingSeekTime(result.timestamp); + onDaySelect(new Date(result.timestamp * 1000)); + return; + } + + manuallySetCurrentTime(result.timestamp, true); + }, + [manuallySetCurrentTime, onDaySelect, timeRange], + ); + + useEffect(() => { + if (pendingSeekTime == null) { + return; + } + + if ( + pendingSeekTime >= timeRange.after && + pendingSeekTime <= timeRange.before + ) { + manuallySetCurrentTime(pendingSeekTime, true); + setPendingSeekTime(null); + } + }, [pendingSeekTime, timeRange, manuallySetCurrentTime]); + + if (!selectedCamera) { + return ( +
+

{t("selectCamera")}

+
+ ); + } + + const timelinePanel = ( + <> +
+
+ + + + {!isMotionLoading ? ( + setScrubbing(dragging)} + showExportHandles={exportMode === "timeline" && Boolean(exportRange)} + exportStartTime={exportRange?.after} + exportEndTime={exportRange?.before} + setExportStartTime={setExportStartTime} + setExportEndTime={setExportEndTime} + isZooming={isZooming} + zoomDirection={zoomDirection} + onZoomChange={handleZoomChange} + possibleZoomLevels={possibleZoomLevels} + currentZoomLevel={currentZoomLevel} + /> + ) : ( + + )} + + ); + + const progressMetrics = jobStatus?.metrics ?? searchMetrics; + const progressValue = + progressMetrics && progressMetrics.segments_scanned > 0 + ? Math.min( + 100, + (progressMetrics.segments_processed / + progressMetrics.segments_scanned) * + 100, + ) + : 0; + + const resultsPanel = ( + <> +
+

{t("results")}

+
+ + + {isSearching && ( +
+
+
+ +
{t("searching")}
+
+ +
+ +
+ )} + {searchMetrics && searchResults.length > 0 && ( +
+
+
+ {t("metrics.segmentsScanned")} + + {searchMetrics.segments_scanned} + +
+ {searchMetrics.segments_processed > 0 && ( +
+ {t("metrics.segmentsProcessed")} + + {searchMetrics.segments_processed} + +
+ )} + {searchMetrics.metadata_inactive_segments > 0 && ( +
+ {t("metrics.segmentsSkippedInactive")} + + {searchMetrics.metadata_inactive_segments} + +
+ )} + {searchMetrics.heatmap_roi_skip_segments > 0 && ( +
+ {t("metrics.segmentsSkippedHeatmap")} + + {searchMetrics.heatmap_roi_skip_segments} + +
+ )} + {searchMetrics.fallback_full_range_segments > 0 && ( +
+ {t("metrics.fallbackFullRange")} + + {searchMetrics.fallback_full_range_segments} + +
+ )} +
+ {t("metrics.framesDecoded")} + + {searchMetrics.frames_decoded} + +
+
+ {t("metrics.wallTime")} + + {t("metrics.seconds", { + seconds: searchMetrics.wall_time_seconds.toFixed(1), + })} + +
+ {searchMetrics.segments_with_errors > 0 && ( +
+ {t("metrics.segmentErrors")} + + {searchMetrics.segments_with_errors} + +
+ )} +
+
+ )} + + {searchResults.length === 0 && !isSearching ? ( +
+ {hasSearched ? t("noChangesFound") : t("noResultsYet")} +
+ ) : searchResults.length > 0 ? ( +
+ {searchResults.map((result, index) => ( + handleResultClick(result)} + /> + ))} +
+ ) : null} +
+ + ); + + return ( + +
+ + + + {/* Header */} +
+ {isMobile && ( + + )} + {(cameraLocked || onBack) && ( +
+ +
+ )} +
+
+ + +
+ + {isDesktop ? ( + <> + + + + ) : ( + { + setIsMobileSettingsOpen(open); + + if (!open) { + setMobileSettingsMode("actions"); + } + }} + > + + + + + {mobileSettingsMode == "actions" ? ( +
+ + +
+ ) : ( +
+
+
setMobileSettingsMode("actions")} + > + {t("button.back", { ns: "common" })} +
+
+ {t("calendar", { ns: "views/recording" })} +
+
+
+ { + onDaySelect(day); + setIsMobileSettingsOpen(false); + setMobileSettingsMode("actions"); + }} + /> +
+ +
+ +
+
+ )} +
+
+ )} + +
+
+ + {!isDesktop && ( +
+ +
+ )} + + {/* Main Content */} +
+ {/* Video Player with ROI Canvas */} +
+
+ {/* Video Player */} + { + setPlayerTime(timestamp); + setCurrentTime(timestamp); + }} + onClipEnded={onClipEnded} + onSeekToTime={manuallySetCurrentTime} + onControllerReady={(controller) => { + mainControllerRef.current = controller; + }} + isScrubbing={scrubbing || exportMode == "timeline"} + supportsFullscreen={supportsFullScreen} + setFullResolution={setFullResolution} + toggleFullscreen={toggleFullscreen} + containerRef={mainLayoutRef} + transformedOverlay={ + + } + /> +
+
+ + {isDesktop ? ( + <> +
+ {timelinePanel} +
+ +
{resultsPanel}
+ + ) : ( +
+
+ {timelinePanel} +
+ +
+ {resultsPanel} +
+
+ )} +
+
+
+ ); +} + +type SearchResultItemProps = { + result: MotionSearchResult; + timezone: string | undefined; + timestampFormat: string; + onClick: () => void; +}; + +function SearchResultItem({ + result, + timezone, + timestampFormat, + onClick, +}: SearchResultItemProps) { + const { t } = useTranslation(["views/motionSearch"]); + const formattedTime = useFormattedTimestamp( + result.timestamp, + timestampFormat, + timezone, + ); + + return ( + + ); +}