debug replay implementation

This commit is contained in:
Josh Hawkins 2026-02-28 15:57:32 -06:00
parent 4232cc483d
commit 153d4f2e27
52 changed files with 3530 additions and 455 deletions

View File

@ -105,6 +105,7 @@ genai:
keep_alive: -1
options:
num_ctx: 8192 # make sure the context matches other services that are using ollama
num_ctx: 8192 # make sure the context matches other services that are using ollama
```
### OpenAI-Compatible

View File

@ -11,6 +11,7 @@ By default, descriptions will be generated for all tracked objects and all zones
Optionally, you can generate the description using a snapshot (if enabled) by setting `use_snapshot` to `True`. By default, this is set to `False`, which sends the uncompressed images from the `detect` stream collected over the object's lifetime to the model. Once the object lifecycle ends, only a single compressed and cropped thumbnail is saved with the tracked object. Using a snapshot might be useful when you want to _regenerate_ a tracked object's description as it will provide the AI with a higher-quality image (typically downscaled by the AI itself) than the cropped/compressed thumbnail. Using a snapshot otherwise has a trade-off in that only a single image is sent to your provider, which will limit the model's ability to determine object movement or direction.
Generative AI object descriptions can also be toggled dynamically for a camera via MQTT with the topic `frigate/<camera_name>/object_descriptions/set`. See the [MQTT documentation](/integrations/mqtt#frigatecamera_nameobject_descriptionsset).
Generative AI object descriptions can also be toggled dynamically for a camera via MQTT with the topic `frigate/<camera_name>/object_descriptions/set`. See the [MQTT documentation](/integrations/mqtt#frigatecamera_nameobject_descriptionsset).
## Usage and Best Practices
@ -75,4 +76,4 @@ Many providers also have a public facing chat interface for their models. Downlo
- OpenAI - [ChatGPT](https://chatgpt.com)
- Gemini - [Google AI Studio](https://aistudio.google.com)
- Ollama - [Open WebUI](https://docs.openwebui.com/)
- Ollama - [Open WebUI](https://docs.openwebui.com/)

165
frigate/api/debug_replay.py Normal file
View File

@ -0,0 +1,165 @@
"""Debug replay API endpoints."""
import asyncio
import logging
from datetime import datetime
from fastapi import APIRouter, Depends, Request
from fastapi.responses import JSONResponse
from pydantic import BaseModel, Field
from frigate.api.auth import require_role
from frigate.api.defs.tags import Tags
logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.app])
class DebugReplayStartBody(BaseModel):
"""Request body for starting a debug replay session."""
camera: str = Field(title="Source camera name")
start_time: float = Field(title="Start timestamp")
end_time: float = Field(title="End timestamp")
class DebugReplayStartResponse(BaseModel):
"""Response for starting a debug replay session."""
success: bool
replay_camera: str
class DebugReplayStatusResponse(BaseModel):
"""Response for debug replay status."""
active: bool
replay_camera: str | None = None
source_camera: str | None = None
start_time: float | None = None
end_time: float | None = None
live_ready: bool = False
class DebugReplayStopResponse(BaseModel):
"""Response for stopping a debug replay session."""
success: bool
@router.post(
"/debug_replay/start",
response_model=DebugReplayStartResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Start debug replay",
description="Start a debug replay session from camera recordings.",
)
async def start_debug_replay(request: Request, body: DebugReplayStartBody):
"""Start a debug replay session."""
replay_manager = request.app.replay_manager
if replay_manager.active:
return JSONResponse(
content={
"success": False,
"message": "A replay session is already active",
},
status_code=409,
)
try:
replay_camera = await asyncio.to_thread(
replay_manager.start,
source_camera=body.camera,
start_ts=body.start_time,
end_ts=body.end_time,
frigate_config=request.app.frigate_config,
config_publisher=request.app.config_publisher,
)
except ValueError as e:
return JSONResponse(
content={"success": False, "message": str(e)},
status_code=400,
)
except RuntimeError as e:
return JSONResponse(
content={"success": False, "message": str(e)},
status_code=500,
)
return DebugReplayStartResponse(
success=True,
replay_camera=replay_camera,
)
@router.get(
"/debug_replay/status",
response_model=DebugReplayStatusResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Get debug replay status",
description="Get the status of the current debug replay session.",
)
def get_debug_replay_status(request: Request):
"""Get the current replay session status."""
replay_manager = request.app.replay_manager
live_ready = False
replay_camera = replay_manager.replay_camera_name
if replay_manager.active and replay_camera:
frame_processor = request.app.detected_frames_processor
frame = frame_processor.get_current_frame(replay_camera)
if frame is not None:
frame_time = frame_processor.get_current_frame_time(replay_camera)
camera_config = request.app.frigate_config.cameras.get(replay_camera)
retry_interval = 10
if camera_config is not None:
retry_interval = float(camera_config.ffmpeg.retry_interval or 10)
live_ready = datetime.now().timestamp() <= frame_time + retry_interval
return DebugReplayStatusResponse(
active=replay_manager.active,
replay_camera=replay_camera,
source_camera=replay_manager.source_camera,
start_time=replay_manager.start_ts,
end_time=replay_manager.end_ts,
live_ready=live_ready,
)
@router.post(
"/debug_replay/stop",
response_model=DebugReplayStopResponse,
dependencies=[Depends(require_role(["admin"]))],
summary="Stop debug replay",
description="Stop the active debug replay session and clean up all artifacts.",
)
async def stop_debug_replay(request: Request):
"""Stop the active replay session."""
replay_manager = request.app.replay_manager
if not replay_manager.active:
return JSONResponse(
content={"success": False, "message": "No active replay session"},
status_code=400,
)
try:
await asyncio.to_thread(
replay_manager.stop,
frigate_config=request.app.frigate_config,
config_publisher=request.app.config_publisher,
)
except (ValueError, RuntimeError, OSError) as e:
logger.error("Error stopping replay: %s", e)
return JSONResponse(
content={"success": False, "message": str(e)},
status_code=500,
)
return DebugReplayStopResponse(success=True)

View File

@ -18,6 +18,7 @@ from frigate.api import (
camera,
chat,
classification,
debug_replay,
event,
export,
media,
@ -32,6 +33,7 @@ from frigate.comms.event_metadata_updater import (
)
from frigate.config import FrigateConfig
from frigate.config.camera.updater import CameraConfigUpdatePublisher
from frigate.debug_replay import DebugReplayManager
from frigate.embeddings import EmbeddingsContext
from frigate.genai import GenAIClientManager
from frigate.ptz.onvif import OnvifController
@ -65,6 +67,7 @@ def create_fastapi_app(
stats_emitter: StatsEmitter,
event_metadata_updater: EventMetadataPublisher,
config_publisher: CameraConfigUpdatePublisher,
replay_manager: DebugReplayManager,
enforce_default_admin: bool = True,
):
logger.info("Starting FastAPI app")
@ -133,6 +136,7 @@ def create_fastapi_app(
app.include_router(event.router)
app.include_router(media.router)
app.include_router(record.router)
app.include_router(debug_replay.router)
# App Properties
app.frigate_config = frigate_config
app.genai_manager = GenAIClientManager(frigate_config)
@ -144,6 +148,7 @@ def create_fastapi_app(
app.stats_emitter = stats_emitter
app.event_metadata_updater = event_metadata_updater
app.config_publisher = config_publisher
app.replay_manager = replay_manager
if frigate_config.auth.enabled:
secret = get_jwt_secret()

View File

@ -43,6 +43,11 @@ from frigate.const import (
)
from frigate.data_processing.types import DataProcessorMetrics
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
from frigate.debug_replay import (
DebugReplayManager,
cleanup_replay_cameras,
cleanup_replay_cameras_db,
)
from frigate.embeddings import EmbeddingProcess, EmbeddingsContext
from frigate.events.audio import AudioProcessor
from frigate.events.cleanup import EventCleanup
@ -531,6 +536,10 @@ class FrigateApp:
set_file_limit()
# Start frigate services.
# Clean up any stale replay cameras before services iterate the cameras dict
stale_replay_cameras = cleanup_replay_cameras(self.config)
self.replay_manager = DebugReplayManager()
self.init_camera_metrics()
self.init_queues()
self.init_database()
@ -541,6 +550,9 @@ class FrigateApp:
self.init_embeddings_manager()
self.bind_database()
self.check_db_data_migrations()
# Deferred DB cleanup for replay cameras (database is now bound)
cleanup_replay_cameras_db(stale_replay_cameras)
self.init_inter_process_communicator()
self.start_detectors()
self.init_dispatcher()
@ -572,6 +584,7 @@ class FrigateApp:
self.stats_emitter,
self.event_metadata_updater,
self.inter_config_updater,
self.replay_manager,
),
host="127.0.0.1",
port=5001,
@ -637,6 +650,7 @@ class FrigateApp:
self.record_cleanup.join()
self.stats_emitter.join()
self.frigate_watchdog.join()
self.camera_maintainer.join()
self.db.stop()
# Save embeddings stats to disk

View File

@ -57,6 +57,9 @@ class CameraActivityManager:
all_objects: list[dict[str, Any]] = []
for camera in new_activity.keys():
if camera not in self.config.cameras:
continue
# handle cameras that were added dynamically
if camera not in self.camera_all_object_counts:
self.__init_camera(self.config.cameras[camera])
@ -124,7 +127,11 @@ class CameraActivityManager:
any_changed = False
# run through each object and check what topics need to be updated
for label in self.config.cameras[camera].objects.track:
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
for label in camera_config.objects.track:
if label in self.config.model.non_logo_attributes:
continue
@ -174,6 +181,9 @@ class AudioActivityManager:
now = datetime.datetime.now().timestamp()
for camera in new_activity.keys():
if camera not in self.config.cameras:
continue
# handle cameras that were added dynamically
if camera not in self.current_audio_detections:
self.__init_camera(self.config.cameras[camera])
@ -193,7 +203,11 @@ class AudioActivityManager:
def compare_audio_activity(
self, camera: str, new_detections: list[tuple[str, float]], now: float
) -> None:
max_not_heard = self.config.cameras[camera].audio.max_not_heard
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False
max_not_heard = camera_config.audio.max_not_heard
current = self.current_audio_detections[camera]
any_changed = False

View File

@ -55,8 +55,20 @@ class CameraMaintainer(threading.Thread):
self.shm_count = self.__calculate_shm_frame_count()
self.camera_processes: dict[str, mp.Process] = {}
self.capture_processes: dict[str, mp.Process] = {}
self.camera_stop_events: dict[str, MpEvent] = {}
self.metrics_manager = metrics_manager
def __ensure_camera_stop_event(self, camera: str) -> MpEvent:
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is None:
camera_stop_event = mp.Event()
self.camera_stop_events[camera] = camera_stop_event
else:
camera_stop_event.clear()
return camera_stop_event
def __init_historical_regions(self) -> None:
# delete region grids for removed or renamed cameras
cameras = list(self.config.cameras.keys())
@ -99,6 +111,8 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Camera processor not started for disabled camera {name}")
return
camera_stop_event = self.__ensure_camera_stop_event(name)
if runtime:
self.camera_metrics[name] = CameraMetrics(self.metrics_manager)
self.ptz_metrics[name] = PTZMetrics(autotracker_enabled=False)
@ -135,7 +149,7 @@ class CameraMaintainer(threading.Thread):
self.camera_metrics[name],
self.ptz_metrics[name],
self.region_grids[name],
self.stop_event,
camera_stop_event,
self.config.logger,
)
self.camera_processes[config.name] = camera_process
@ -150,6 +164,8 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Capture process not started for disabled camera {name}")
return
camera_stop_event = self.__ensure_camera_stop_event(name)
# pre-create shms
count = 10 if runtime else self.shm_count
for i in range(count):
@ -160,7 +176,7 @@ class CameraMaintainer(threading.Thread):
config,
count,
self.camera_metrics[name],
self.stop_event,
camera_stop_event,
self.config.logger,
)
capture_process.daemon = True
@ -170,18 +186,36 @@ class CameraMaintainer(threading.Thread):
logger.info(f"Capture process started for {name}: {capture_process.pid}")
def __stop_camera_capture_process(self, camera: str) -> None:
capture_process = self.capture_processes[camera]
capture_process = self.capture_processes.get(camera)
if capture_process is not None:
logger.info(f"Waiting for capture process for {camera} to stop")
capture_process.terminate()
capture_process.join()
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is not None:
camera_stop_event.set()
capture_process.join(timeout=10)
if capture_process.is_alive():
logger.warning(
f"Capture process for {camera} didn't exit, forcing termination"
)
capture_process.terminate()
capture_process.join()
def __stop_camera_process(self, camera: str) -> None:
camera_process = self.camera_processes[camera]
camera_process = self.camera_processes.get(camera)
if camera_process is not None:
logger.info(f"Waiting for process for {camera} to stop")
camera_process.terminate()
camera_process.join()
camera_stop_event = self.camera_stop_events.get(camera)
if camera_stop_event is not None:
camera_stop_event.set()
camera_process.join(timeout=10)
if camera_process.is_alive():
logger.warning(f"Process for {camera} didn't exit, forcing termination")
camera_process.terminate()
camera_process.join()
logger.info(f"Closing frame queue for {camera}")
empty_and_close_queue(self.camera_metrics[camera].frame_queue)
@ -199,6 +233,12 @@ class CameraMaintainer(threading.Thread):
for update_type, updated_cameras in updates.items():
if update_type == CameraConfigUpdateEnum.add.name:
for camera in updated_cameras:
if (
camera in self.camera_processes
or camera in self.capture_processes
):
continue
self.__start_camera_processor(
camera,
self.update_subscriber.camera_configs[camera],
@ -210,15 +250,22 @@ class CameraMaintainer(threading.Thread):
runtime=True,
)
elif update_type == CameraConfigUpdateEnum.remove.name:
self.__stop_camera_capture_process(camera)
self.__stop_camera_process(camera)
for camera in updated_cameras:
self.__stop_camera_capture_process(camera)
self.__stop_camera_process(camera)
self.capture_processes.pop(camera, None)
self.camera_processes.pop(camera, None)
self.camera_stop_events.pop(camera, None)
self.region_grids.pop(camera, None)
self.camera_metrics.pop(camera, None)
self.ptz_metrics.pop(camera, None)
# ensure the capture processes are done
for camera in self.camera_processes.keys():
for camera in self.capture_processes.keys():
self.__stop_camera_capture_process(camera)
# ensure the camera processors are done
for camera in self.capture_processes.keys():
for camera in self.camera_processes.keys():
self.__stop_camera_process(camera)
self.update_subscriber.stop()

View File

@ -26,8 +26,8 @@ class ConfigPublisher:
def stop(self) -> None:
self.stop_event.set()
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)
class ConfigSubscriber:
@ -55,5 +55,5 @@ class ConfigSubscriber:
return (None, None)
def stop(self) -> None:
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)

View File

@ -110,6 +110,9 @@ class Dispatcher:
payload: str,
sub_command: str | None = None,
) -> None:
if camera_name not in self.config.cameras:
return
try:
if command_type == "set":
if sub_command:
@ -131,6 +134,9 @@ class Dispatcher:
def handle_request_region_grid() -> Any:
camera = payload
if camera not in self.config.cameras:
return None
grid = get_camera_regions_grid(
camera,
self.config.cameras[camera].detect,
@ -243,7 +249,11 @@ class Dispatcher:
self.publish("birdseye_layout", json.dumps(self.birdseye_layout.copy()))
def handle_on_connect() -> None:
camera_status = self.camera_activity.last_camera_activity.copy()
camera_status = {
camera: status
for camera, status in self.camera_activity.last_camera_activity.copy().items()
if camera in self.config.cameras
}
audio_detections = self.audio_activity.current_audio_detections.copy()
cameras_with_status = camera_status.keys()
@ -346,7 +356,8 @@ class Dispatcher:
# example /cam_name/notifications/suspend payload=duration
camera_name = parts[-3]
command = parts[-2]
self._on_camera_notification_suspend(camera_name, payload)
if camera_name in self.config.cameras:
self._on_camera_notification_suspend(camera_name, payload)
except IndexError:
logger.error(
f"Received invalid {topic.split('/')[-1]} command: {topic}"

View File

@ -61,8 +61,8 @@ class InterProcessCommunicator(Communicator):
def stop(self) -> None:
self.stop_event.set()
self.reader_thread.join()
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)
class InterProcessRequestor:
@ -82,5 +82,5 @@ class InterProcessRequestor:
return ""
def stop(self) -> None:
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)

View File

@ -43,7 +43,7 @@ class ZmqProxy:
def stop(self) -> None:
# destroying the context will tell the proxy to stop
self.context.destroy()
self.context.destroy(linger=0)
self.runner.join()
@ -66,8 +66,8 @@ class Publisher(Generic[T]):
self.socket.send_string(f"{self.topic}{sub_topic} {json.dumps(payload)}")
def stop(self) -> None:
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)
class Subscriber(Generic[T]):
@ -96,8 +96,8 @@ class Subscriber(Generic[T]):
return self._return_object("", None)
def stop(self) -> None:
self.socket.close()
self.context.destroy()
self.socket.close(linger=0)
self.context.destroy(linger=0)
def _return_object(self, topic: str, payload: T | None) -> T | None:
return payload

View File

@ -80,8 +80,8 @@ class CameraConfigUpdateSubscriber:
self.camera_configs[camera] = updated_config
return
elif update_type == CameraConfigUpdateEnum.remove:
self.config.cameras.pop(camera)
self.camera_configs.pop(camera)
self.config.cameras.pop(camera, None)
self.camera_configs.pop(camera, None)
return
config = self.camera_configs.get(camera)

View File

@ -14,6 +14,8 @@ RECORD_DIR = f"{BASE_DIR}/recordings"
TRIGGER_DIR = f"{CLIPS_DIR}/triggers"
BIRDSEYE_PIPE = "/tmp/cache/birdseye"
CACHE_DIR = "/tmp/cache"
REPLAY_CAMERA_PREFIX = "_replay_"
REPLAY_DIR = os.path.join(CACHE_DIR, "replay")
PLUS_ENV_VAR = "PLUS_API_KEY"
PLUS_API_HOST = "https://api.frigate.video"

468
frigate/debug_replay.py Normal file
View File

@ -0,0 +1,468 @@
"""Debug replay camera management for replaying recordings with detection overlays."""
import logging
import os
import shutil
import subprocess as sp
import threading
from frigate.config import FrigateConfig
from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdatePublisher,
CameraConfigUpdateTopic,
)
from frigate.const import (
CLIPS_DIR,
RECORD_DIR,
REPLAY_CAMERA_PREFIX,
REPLAY_DIR,
THUMB_DIR,
)
from frigate.models import Event, Recordings, ReviewSegment, Timeline
from frigate.util.builtin import update_yaml_file_bulk
from frigate.util.config import find_config_file
logger = logging.getLogger(__name__)
class DebugReplayManager:
"""Manages a single debug replay session."""
def __init__(self) -> None:
self._lock = threading.Lock()
self.replay_camera_name: str | None = None
self.source_camera: str | None = None
self.clip_path: str | None = None
self.start_ts: float | None = None
self.end_ts: float | None = None
@property
def active(self) -> bool:
"""Whether a replay session is currently active."""
return self.replay_camera_name is not None
def start(
self,
source_camera: str,
start_ts: float,
end_ts: float,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> str:
"""Start a debug replay session.
Args:
source_camera: Name of the source camera to replay
start_ts: Start timestamp
end_ts: End timestamp
frigate_config: Current Frigate configuration
config_publisher: Publisher for camera config updates
Returns:
The replay camera name
Raises:
ValueError: If a session is already active or parameters are invalid
RuntimeError: If clip generation fails
"""
with self._lock:
return self._start_locked(
source_camera, start_ts, end_ts, frigate_config, config_publisher
)
def _start_locked(
self,
source_camera: str,
start_ts: float,
end_ts: float,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> str:
if self.active:
raise ValueError("A replay session is already active")
if source_camera not in frigate_config.cameras:
raise ValueError(f"Camera '{source_camera}' not found")
if end_ts <= start_ts:
raise ValueError("End time must be after start time")
# Query recordings for the source camera in the time range
recordings = (
Recordings.select(
Recordings.path,
Recordings.start_time,
Recordings.end_time,
)
.where(
Recordings.start_time.between(start_ts, end_ts)
| Recordings.end_time.between(start_ts, end_ts)
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
)
.where(Recordings.camera == source_camera)
.order_by(Recordings.start_time.asc())
)
if not recordings.count():
raise ValueError(
f"No recordings found for camera '{source_camera}' in the specified time range"
)
# Create replay directory
os.makedirs(REPLAY_DIR, exist_ok=True)
# Generate replay camera name
replay_name = f"{REPLAY_CAMERA_PREFIX}{source_camera}"
# Build concat file for ffmpeg
concat_file = os.path.join(REPLAY_DIR, f"{replay_name}_concat.txt")
clip_path = os.path.join(REPLAY_DIR, f"{replay_name}.mp4")
with open(concat_file, "w") as f:
for recording in recordings:
f.write(f"file '{recording.path}'\n")
# Concatenate recordings into a single clip with -c copy (fast)
ffmpeg_cmd = [
frigate_config.ffmpeg.ffmpeg_path,
"-hide_banner",
"-y",
"-f",
"concat",
"-safe",
"0",
"-i",
concat_file,
"-c",
"copy",
"-movflags",
"+faststart",
clip_path,
]
logger.info(
"Generating replay clip for %s (%.1f - %.1f)",
source_camera,
start_ts,
end_ts,
)
try:
result = sp.run(
ffmpeg_cmd,
capture_output=True,
text=True,
timeout=120,
)
if result.returncode != 0:
logger.error("FFmpeg error: %s", result.stderr)
raise RuntimeError(
f"Failed to generate replay clip: {result.stderr[-500:]}"
)
except sp.TimeoutExpired:
raise RuntimeError("Clip generation timed out")
finally:
# Clean up concat file
if os.path.exists(concat_file):
os.remove(concat_file)
if not os.path.exists(clip_path):
raise RuntimeError("Clip file was not created")
# Build camera config dict for the replay camera
source_config = frigate_config.cameras[source_camera]
camera_dict = self._build_camera_config_dict(
source_config, replay_name, clip_path
)
# Write to YAML config
config_file = find_config_file()
update_yaml_file_bulk(config_file, {f"cameras.{replay_name}": camera_dict})
# Re-parse the full config to get a fully initialized CameraConfig
with open(config_file, "r") as f:
new_raw_config = f.read()
try:
new_config = FrigateConfig.parse(new_raw_config)
except Exception as e:
# Rollback YAML change
try:
update_yaml_file_bulk(config_file, {f"cameras.{replay_name}": ""})
except Exception:
logger.warning("Failed to rollback replay camera YAML entry")
raise RuntimeError(f"Failed to validate replay camera config: {e}")
# Update the running config
frigate_config.cameras[replay_name] = new_config.cameras[replay_name]
# Publish the add event
config_publisher.publish_update(
CameraConfigUpdateTopic(CameraConfigUpdateEnum.add, replay_name),
new_config.cameras[replay_name],
)
# Store session state
self.replay_camera_name = replay_name
self.source_camera = source_camera
self.clip_path = clip_path
self.start_ts = start_ts
self.end_ts = end_ts
logger.info("Debug replay started: %s -> %s", source_camera, replay_name)
return replay_name
def stop(
self,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> None:
"""Stop the active replay session and clean up all artifacts.
Args:
frigate_config: Current Frigate configuration
config_publisher: Publisher for camera config updates
"""
with self._lock:
self._stop_locked(frigate_config, config_publisher)
def _stop_locked(
self,
frigate_config: FrigateConfig,
config_publisher: CameraConfigUpdatePublisher,
) -> None:
if not self.active:
logger.warning("No active replay session to stop")
return
replay_name = self.replay_camera_name
# Publish remove event so subscribers stop and remove from their config
if replay_name in frigate_config.cameras:
config_publisher.publish_update(
CameraConfigUpdateTopic(CameraConfigUpdateEnum.remove, replay_name),
frigate_config.cameras[replay_name],
)
# Do NOT pop here — let subscribers handle removal from the shared
# config dict when they process the ZMQ message to avoid race conditions
# Defensive DB cleanup
self._cleanup_db(replay_name)
# Remove filesystem artifacts
self._cleanup_files(replay_name)
# Remove from YAML config
config_file = find_config_file()
try:
update_yaml_file_bulk(config_file, {f"cameras.{replay_name}": ""})
except Exception as e:
logger.error("Failed to remove replay camera from YAML: %s", e)
# Reset state
self.replay_camera_name = None
self.source_camera = None
self.clip_path = None
self.start_ts = None
self.end_ts = None
logger.info("Debug replay stopped and cleaned up: %s", replay_name)
def _build_camera_config_dict(
self,
source_config,
replay_name: str,
clip_path: str,
) -> dict:
"""Build a camera config dictionary for the replay camera.
Args:
source_config: Source camera's CameraConfig
replay_name: Name for the replay camera
clip_path: Path to the replay clip file
Returns:
Camera config as a dictionary
"""
# Extract detect config (exclude computed fields)
detect_dict = source_config.detect.model_dump(
exclude={"min_initialized", "max_disappeared", "enabled_in_config"}
)
# Extract objects config, using .dict() on filters to convert
# RuntimeFilterConfig ndarray masks back to string coordinates
objects_dict = {
"track": source_config.objects.track,
"mask": source_config.objects.mask,
"filters": {
name: filt.dict() if hasattr(filt, "dict") else filt.model_dump()
for name, filt in source_config.objects.filters.items()
},
}
# Extract zones (exclude_defaults avoids serializing empty defaults
# like distances=[] that fail validation on re-parse)
zones_dict = {}
for zone_name, zone_config in source_config.zones.items():
zone_dump = zone_config.model_dump(
exclude={"contour", "color"}, exclude_defaults=True
)
# Always include required fields
zone_dump.setdefault("coordinates", zone_config.coordinates)
zones_dict[zone_name] = zone_dump
# Extract motion config (exclude runtime fields)
motion_dict = {}
if source_config.motion is not None:
motion_dict = source_config.motion.model_dump(
exclude={
"frame_shape",
"raw_mask",
"mask",
"improved_contrast_enabled",
}
)
return {
"enabled": True,
"ffmpeg": {
"inputs": [
{
"path": clip_path,
"roles": ["detect"],
"input_args": "-re -stream_loop -1 -fflags +genpts",
}
],
"hwaccel_args": [],
},
"detect": detect_dict,
"objects": objects_dict,
"zones": zones_dict,
"motion": motion_dict,
"record": {"enabled": False},
"snapshots": {"enabled": False},
"review": {
"alerts": {"enabled": False},
"detections": {"enabled": False},
},
"birdseye": {"enabled": False},
"audio": {"enabled": False},
"lpr": {"enabled": False},
"face_recognition": {"enabled": False},
}
def _cleanup_db(self, camera_name: str) -> None:
"""Defensively remove any database rows for the replay camera."""
try:
Event.delete().where(Event.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay events: %s", e)
try:
Timeline.delete().where(Timeline.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay timeline: %s", e)
try:
Recordings.delete().where(Recordings.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay recordings: %s", e)
try:
ReviewSegment.delete().where(ReviewSegment.camera == camera_name).execute()
except Exception as e:
logger.error("Failed to delete replay review segments: %s", e)
def _cleanup_files(self, camera_name: str) -> None:
"""Remove filesystem artifacts for the replay camera."""
dirs_to_clean = [
os.path.join(RECORD_DIR, camera_name),
os.path.join(CLIPS_DIR, camera_name),
os.path.join(THUMB_DIR, camera_name),
]
for dir_path in dirs_to_clean:
if os.path.exists(dir_path):
try:
shutil.rmtree(dir_path)
logger.debug("Removed replay directory: %s", dir_path)
except Exception as e:
logger.error("Failed to remove %s: %s", dir_path, e)
# Remove replay clip and any related files
if os.path.exists(REPLAY_DIR):
try:
shutil.rmtree(REPLAY_DIR)
logger.debug("Removed replay cache directory")
except Exception as e:
logger.error("Failed to remove replay cache: %s", e)
def cleanup_replay_cameras(frigate_config: FrigateConfig) -> list[str]:
"""Remove any stale replay cameras from config and YAML on startup.
This must be called BEFORE services start iterating the cameras dict.
DB cleanup is deferred to cleanup_replay_cameras_db() after the database
is bound.
Args:
frigate_config: The current Frigate configuration
Returns:
List of removed replay camera names (for deferred DB cleanup)
"""
replay_cameras = [
name
for name in list(frigate_config.cameras.keys())
if name.startswith(REPLAY_CAMERA_PREFIX)
]
if not replay_cameras:
return []
logger.info("Cleaning up stale replay cameras: %s", replay_cameras)
config_file = find_config_file()
updates = {}
for camera_name in replay_cameras:
# Remove from running config
frigate_config.cameras.pop(camera_name, None)
# Mark for YAML removal
updates[f"cameras.{camera_name}"] = ""
# Remove from YAML
try:
update_yaml_file_bulk(config_file, updates)
except Exception as e:
logger.error("Failed to clean up replay cameras from YAML: %s", e)
# Clean replay directory and files (no DB needed)
manager = DebugReplayManager()
for camera_name in replay_cameras:
manager._cleanup_files(camera_name)
if os.path.exists(REPLAY_DIR):
try:
shutil.rmtree(REPLAY_DIR)
except Exception as e:
logger.error("Failed to remove replay cache directory: %s", e)
return replay_cameras
def cleanup_replay_cameras_db(replay_cameras: list[str]) -> None:
"""Clean up database rows for stale replay cameras.
Must be called AFTER the database is bound.
Args:
replay_cameras: List of replay camera names from cleanup_replay_cameras()
"""
if not replay_cameras:
return
manager = DebugReplayManager()
for camera_name in replay_cameras:
manager._cleanup_db(camera_name)

View File

@ -421,7 +421,9 @@ class EmbeddingMaintainer(threading.Thread):
if self.config.semantic_search.enabled:
self.embeddings.update_stats()
camera_config = self.config.cameras[camera]
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
# no need to process updated objects if no processors are active
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
@ -639,7 +641,10 @@ class EmbeddingMaintainer(threading.Thread):
if not camera or camera not in self.config.cameras:
return
camera_config = self.config.cameras[camera]
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
dedicated_lpr_enabled = (
camera_config.type == CameraTypeEnum.lpr
and "license_plate" not in camera_config.objects.track

View File

@ -7,6 +7,7 @@ from typing import Dict
from frigate.comms.events_updater import EventEndPublisher, EventUpdateSubscriber
from frigate.config import FrigateConfig
from frigate.config.classification import ObjectClassificationType
from frigate.const import REPLAY_CAMERA_PREFIX
from frigate.events.types import EventStateEnum, EventTypeEnum
from frigate.models import Event
from frigate.util.builtin import to_relative_box
@ -146,7 +147,9 @@ class EventProcessor(threading.Thread):
if should_update_db(self.events_in_process[event_data["id"]], event_data):
updated_db = True
camera_config = self.config.cameras[camera]
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return
width = camera_config.detect.width
height = camera_config.detect.height
first_detector = list(self.config.detectors.values())[0]
@ -283,6 +286,10 @@ class EventProcessor(threading.Thread):
def handle_external_detection(
self, event_type: EventStateEnum, event_data: Event
) -> None:
# Skip replay cameras
if event_data.get("camera", "").startswith(REPLAY_CAMERA_PREFIX):
return
if event_type == EventStateEnum.start:
event = {
Event.id: event_data["id"],

View File

@ -420,7 +420,8 @@ class BirdsEyeFrameManager:
[
cam
for cam, cam_data in self.cameras.items()
if self.config.cameras[cam].birdseye.enabled
if cam in self.config.cameras
and self.config.cameras[cam].birdseye.enabled
and self.config.cameras[cam].enabled_in_config
and self.config.cameras[cam].enabled
and cam_data["last_active_frame"] > 0
@ -723,8 +724,11 @@ class BirdsEyeFrameManager:
Update birdseye for a specific camera with new frame data.
Returns (frame_changed, layout_changed) to indicate if the frame or layout changed.
"""
# don't process if birdseye is disabled for this camera
camera_config = self.config.cameras[camera]
# don't process if camera was removed or birdseye is disabled
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False, False
force_update = False
# disabling birdseye is a little tricky

View File

@ -8,6 +8,7 @@ import threading
from multiprocessing.synchronize import Event as MpEvent
from wsgiref.simple_server import make_server
import cv2
from ws4py.server.wsgirefserver import (
WebSocketWSGIHandler,
WebSocketWSGIRequestHandler,
@ -22,7 +23,12 @@ from frigate.config.camera.updater import (
CameraConfigUpdateEnum,
CameraConfigUpdateSubscriber,
)
from frigate.const import CACHE_DIR, CLIPS_DIR, PROCESS_PRIORITY_MED
from frigate.const import (
CACHE_DIR,
CLIPS_DIR,
PROCESS_PRIORITY_MED,
REPLAY_CAMERA_PREFIX,
)
from frigate.output.birdseye import Birdseye
from frigate.output.camera import JsmpegCamera
from frigate.output.preview import PreviewRecorder
@ -124,8 +130,9 @@ class OutputProcess(FrigateProcess):
jsmpeg_cameras[camera] = JsmpegCamera(
cam_config, self.stop_event, websocket_server
)
preview_recorders[camera] = PreviewRecorder(cam_config)
preview_write_times[camera] = 0
if not camera.startswith(REPLAY_CAMERA_PREFIX):
preview_recorders[camera] = PreviewRecorder(cam_config)
preview_write_times[camera] = 0
if self.config.birdseye.enabled:
birdseye = Birdseye(self.config, self.stop_event, websocket_server)
@ -138,13 +145,13 @@ class OutputProcess(FrigateProcess):
if CameraConfigUpdateEnum.add in updates:
for camera in updates["add"]:
camera_config = self.config.cameras[camera]
jsmpeg_cameras[camera] = JsmpegCamera(
self.config.cameras[camera], self.stop_event, websocket_server
camera_config, self.stop_event, websocket_server
)
preview_recorders[camera] = PreviewRecorder(
self.config.cameras[camera]
)
preview_write_times[camera] = 0
if not camera.startswith(REPLAY_CAMERA_PREFIX):
preview_recorders[camera] = PreviewRecorder(camera_config)
preview_write_times[camera] = 0
if (
self.config.birdseye.enabled
@ -174,7 +181,10 @@ class OutputProcess(FrigateProcess):
_,
) = data
if not self.config.cameras[camera].enabled:
if (
camera not in self.config.cameras
or not self.config.cameras[camera].enabled
):
continue
frame = frame_manager.get(
@ -198,10 +208,22 @@ class OutputProcess(FrigateProcess):
failed_frame_requests[camera] = 0
# send frames for low fps recording
preview_recorders[camera].write_data(
current_tracked_objects, motion_boxes, frame_time, frame
)
preview_write_times[camera] = frame_time
preview_recorder = preview_recorders.get(camera)
if preview_recorder is not None:
try:
preview_recorder.write_data(
current_tracked_objects, motion_boxes, frame_time, frame
)
preview_write_times[camera] = frame_time
except cv2.error:
if camera.startswith(REPLAY_CAMERA_PREFIX):
logger.debug(
"Skipping preview frame write for replay camera %s",
camera,
)
else:
raise
# send camera frame to ffmpeg process if websockets are connected
if any(
@ -219,6 +241,10 @@ class OutputProcess(FrigateProcess):
for ws in websocket_server.manager
)
):
if camera.startswith(REPLAY_CAMERA_PREFIX):
frame_manager.close(frame_name)
continue
birdseye.write_data(
camera,
current_tracked_objects,

View File

@ -287,11 +287,12 @@ class RecordingMaintainer(threading.Thread):
)
# publish most recently available recording time and None if disabled
camera_cfg = self.config.cameras.get(camera)
self.recordings_publisher.publish(
(
camera,
recordings[0]["start_time"].timestamp()
if self.config.cameras[camera].record.enabled
if camera_cfg and camera_cfg.record.enabled
else None,
None,
),
@ -315,9 +316,8 @@ class RecordingMaintainer(threading.Thread):
) -> Optional[Recordings]:
cache_path: str = recording["cache_path"]
start_time: datetime.datetime = recording["start_time"]
record_config = self.config.cameras[camera].record
# Just delete files if recordings are turned off
# Just delete files if camera removed or recordings are turned off
if (
camera not in self.config.cameras
or not self.config.cameras[camera].record.enabled

View File

@ -652,6 +652,9 @@ class ReviewSegmentMaintainer(threading.Thread):
if camera not in self.indefinite_events:
self.indefinite_events[camera] = {}
if camera not in self.config.cameras:
continue
if (
not self.config.cameras[camera].enabled
or not self.config.cameras[camera].record.enabled

View File

@ -340,6 +340,9 @@ def stats_snapshot(
stats["cameras"] = {}
for name, camera_stats in camera_metrics.items():
if name not in config.cameras:
continue
total_camera_fps += camera_stats.camera_fps.value
total_process_fps += camera_stats.process_fps.value
total_skipped_fps += camera_stats.skipped_fps.value

View File

@ -8,7 +8,7 @@ from pathlib import Path
from peewee import SQL, fn
from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR
from frigate.const import RECORD_DIR, REPLAY_CAMERA_PREFIX
from frigate.models import Event, Recordings
from frigate.util.builtin import clear_and_unlink
@ -32,6 +32,10 @@ class StorageMaintainer(threading.Thread):
def calculate_camera_bandwidth(self) -> None:
"""Calculate an average MB/hr for each camera."""
for camera in self.config.cameras.keys():
# Skip replay cameras
if camera.startswith(REPLAY_CAMERA_PREFIX):
continue
# cameras with < 50 segments should be refreshed to keep size accurate
# when few segments are available
if self.camera_storage_stats.get(camera, {}).get("needs_refresh", True):
@ -77,6 +81,10 @@ class StorageMaintainer(threading.Thread):
usages: dict[str, dict] = {}
for camera in self.config.cameras.keys():
# Skip replay cameras
if camera.startswith(REPLAY_CAMERA_PREFIX):
continue
camera_storage = (
Recordings.select(fn.SUM(Recordings.segment_size))
.where(Recordings.camera == camera, Recordings.segment_size != 0)

View File

@ -13,6 +13,7 @@ from pydantic import Json
from frigate.api.fastapi_app import create_fastapi_app
from frigate.config import FrigateConfig
from frigate.const import BASE_DIR, CACHE_DIR
from frigate.debug_replay import DebugReplayManager
from frigate.models import Event, Recordings, ReviewSegment
from frigate.review.types import SeverityEnum
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
@ -141,6 +142,7 @@ class BaseTestHttp(unittest.TestCase):
stats,
event_metadata_publisher,
None,
DebugReplayManager(),
enforce_default_admin=False,
)

View File

@ -86,7 +86,10 @@ class TimelineProcessor(threading.Thread):
event_data: dict[Any, Any],
) -> bool:
"""Handle object detection."""
camera_config = self.config.cameras[camera]
save = False
camera_config = self.config.cameras.get(camera)
if camera_config is None:
return False
event_id = event_data["id"]
# Base timeline entry data that all entries will share

View File

@ -693,6 +693,10 @@ class TrackedObjectProcessor(threading.Thread):
camera_state = self.camera_states[camera]
camera_state.shutdown()
self.camera_states.pop(camera)
self.camera_activity.pop(camera, None)
self.last_motion_detected.pop(camera, None)
self.requestor.send_data(UPDATE_CAMERA_ACTIVITY, self.camera_activity)
# manage camera disabled state
for camera, config in self.config.cameras.items():
@ -752,7 +756,11 @@ class TrackedObjectProcessor(threading.Thread):
except queue.Empty:
continue
if not self.config.cameras[camera].enabled:
camera_config = self.config.cameras.get(camera)
if camera_config is None:
continue
if not camera_config.enabled:
logger.debug(f"Camera {camera} disabled, skipping update")
continue

View File

@ -16,7 +16,7 @@ from frigate.config import (
SnapshotsConfig,
UIConfig,
)
from frigate.const import CLIPS_DIR, THUMB_DIR
from frigate.const import CLIPS_DIR, REPLAY_CAMERA_PREFIX, THUMB_DIR
from frigate.detectors.detector_config import ModelConfig
from frigate.review.types import SeverityEnum
from frigate.util.builtin import sanitize_float
@ -621,6 +621,9 @@ class TrackedObject:
if not self.camera_config.name:
return
if self.camera_config.name.startswith(REPLAY_CAMERA_PREFIX):
return
directory = os.path.join(THUMB_DIR, self.camera_config.name)
if not os.path.exists(directory):

View File

@ -244,6 +244,35 @@ class CameraWatchdog(threading.Thread):
self._last_detect_status = status
self._last_status_update_time = now
def _send_record_status(self, status: str, now: float) -> None:
"""Send record status only if changed or retry_interval has elapsed."""
if (
status != self._last_record_status
or (now - self._last_status_update_time) >= self.sleeptime
):
self.requestor.send_data(f"{self.config.name}/status/record", status)
self._last_record_status = status
self._last_status_update_time = now
# Stall tracking (based on last processed frame)
self._stall_timestamps: deque[float] = deque()
self._stall_active: bool = False
# Status caching to reduce message volume
self._last_detect_status: str | None = None
self._last_record_status: str | None = None
self._last_status_update_time: float = 0.0
def _send_detect_status(self, status: str, now: float) -> None:
"""Send detect status only if changed or retry_interval has elapsed."""
if (
status != self._last_detect_status
or (now - self._last_status_update_time) >= self.sleeptime
):
self.requestor.send_data(f"{self.config.name}/status/detect", status)
self._last_detect_status = status
self._last_status_update_time = now
def _send_record_status(self, status: str, now: float) -> None:
"""Send record status only if changed or retry_interval has elapsed."""
if (

View File

@ -252,6 +252,7 @@
"review": "Review",
"explore": "Explore",
"export": "Export",
"actions": "Actions",
"uiPlayground": "UI Playground",
"faceLibrary": "Face Library",
"classification": "Classification",

View File

@ -0,0 +1,53 @@
{
"title": "Debug Replay",
"description": "Replay camera recordings for debugging. The object list shows a time-delayed summary of detected objects and the Messages tab shows a stream of Frigate's internal messages from the replay footage.",
"websocket_messages": "Messages",
"dialog": {
"title": "Start Debug Replay",
"description": "Create a temporary replay camera that loops historical footage for debugging object detection and tracking issues. The replay camera will have the same detection configuration as the source camera. Choose a time range to begin.",
"camera": "Source Camera",
"timeRange": "Time Range",
"preset": {
"1m": "Last 1 Minute",
"5m": "Last 5 Minutes",
"timeline": "From Timeline",
"custom": "Custom"
},
"startButton": "Start Replay",
"selectFromTimeline": "Select",
"starting": "Starting replay...",
"startLabel": "Start",
"endLabel": "End",
"toast": {
"success": "Debug replay started successfully",
"error": "Failed to start debug replay: {{error}}",
"alreadyActive": "A replay session is already active",
"stopped": "Debug replay stopped",
"stopError": "Failed to stop debug replay: {{error}}",
"goToReplay": "Go to Replay"
}
},
"page": {
"noSession": "No Active Replay Session",
"noSessionDesc": "Start a debug replay from the History view by clicking the Debug Replay button in the toolbar.",
"goToRecordings": "Go to History",
"sourceCamera": "Source Camera",
"replayCamera": "Replay Camera",
"initializingReplay": "Initializing replay...",
"stoppingReplay": "Stopping replay...",
"stopReplay": "Stop Replay",
"confirmStop": {
"title": "Stop Debug Replay?",
"description": "This will stop the replay session and clean up all temporary data. Are you sure?",
"confirm": "Stop Replay",
"cancel": "Cancel"
},
"activity": "Activity",
"objects": "Object List",
"audioDetections": "Audio Detections",
"noActivity": "No activity detected",
"activeTracking": "Active tracking",
"noActiveTracking": "No active tracking",
"configuration": "Configuration"
}
}

View File

@ -7,12 +7,32 @@
"logs": {
"frigate": "Frigate Logs - Frigate",
"go2rtc": "Go2RTC Logs - Frigate",
"nginx": "Nginx Logs - Frigate"
"nginx": "Nginx Logs - Frigate",
"websocket": "Messages Logs - Frigate"
}
},
"title": "System",
"metrics": "System metrics",
"logs": {
"websocket": {
"label": "Messages",
"pause": "Pause",
"resume": "Resume",
"clear": "Clear",
"filter": {
"all": "All topics",
"events": "Events",
"camera_activity": "Camera activity",
"system": "System",
"camera": "Camera",
"all_cameras": "All cameras"
},
"empty": "No messages captured yet",
"count": "{{count}} messages",
"expanded": {
"payload": "Payload"
}
},
"download": {
"label": "Download Logs"
},
@ -189,7 +209,8 @@
"cameraIsOffline": "{{camera}} is offline",
"detectIsSlow": "{{detect}} is slow ({{speed}} ms)",
"detectIsVerySlow": "{{detect}} is very slow ({{speed}} ms)",
"shmTooLow": "/dev/shm allocation ({{total}} MB) should be increased to at least {{min}} MB."
"shmTooLow": "/dev/shm allocation ({{total}} MB) should be increased to at least {{min}} MB.",
"debugReplayActive": "Debug replay session is active"
},
"enrichments": {
"title": "Enrichments",

View File

@ -30,6 +30,7 @@ const Classification = lazy(() => import("@/pages/ClassificationModel"));
const Chat = lazy(() => import("@/pages/Chat"));
const Logs = lazy(() => import("@/pages/Logs"));
const AccessDenied = lazy(() => import("@/pages/AccessDenied"));
const Replay = lazy(() => import("@/pages/Replay"));
function App() {
const { data: config } = useSWR<FrigateConfig>("config", {
@ -108,7 +109,8 @@ function DefaultAppView() {
<Route path="/faces" element={<FaceLibrary />} />
<Route path="/classification" element={<Classification />} />
<Route path="/chat" element={<Chat />} />
<Route path="/playground" element={<UIPlayground />} />
<Route path="/playground" element={<UIPlayground />} />{" "}
<Route path="/replay" element={<Replay />} />{" "}
</Route>
<Route path="/unauthorized" element={<AccessDenied />} />
<Route path="*" element={<Redirect to="/" />} />

View File

@ -1,5 +1,5 @@
import { baseUrl } from "./baseUrl";
import { useCallback, useEffect, useState } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import useWebSocket, { ReadyState } from "react-use-websocket";
import {
EmbeddingsReindexProgressType,
@ -17,6 +17,13 @@ import { FrigateStats } from "@/types/stats";
import { createContainer } from "react-tracked";
import useDeepMemo from "@/hooks/use-deep-memo";
export type WsFeedMessage = {
topic: string;
payload: unknown;
timestamp: number;
id: string;
};
type Update = {
topic: string;
payload: unknown;
@ -29,6 +36,9 @@ type WsState = {
type useValueReturn = [WsState, (update: Update) => void];
const wsMessageSubscribers = new Set<(msg: WsFeedMessage) => void>();
let wsMessageIdCounter = 0;
function useValue(): useValueReturn {
const wsUrl = `${baseUrl.replace(/^http/, "ws")}ws`;
@ -43,8 +53,13 @@ function useValue(): useValueReturn {
return;
}
const cameraActivity: { [key: string]: FrigateCameraState } =
JSON.parse(activityValue);
let cameraActivity: { [key: string]: Partial<FrigateCameraState> };
try {
cameraActivity = JSON.parse(activityValue);
} catch {
return;
}
if (Object.keys(cameraActivity).length === 0) {
return;
@ -53,6 +68,12 @@ function useValue(): useValueReturn {
const cameraStates: WsState = {};
Object.entries(cameraActivity).forEach(([name, state]) => {
const cameraConfig = state?.config;
if (!cameraConfig) {
return;
}
const {
record,
detect,
@ -67,7 +88,7 @@ function useValue(): useValueReturn {
detections,
object_descriptions,
review_descriptions,
} = state["config"];
} = cameraConfig;
cameraStates[`${name}/recordings/state`] = record ? "ON" : "OFF";
cameraStates[`${name}/enabled/state`] = enabled ? "ON" : "OFF";
cameraStates[`${name}/detect/state`] = detect ? "ON" : "OFF";
@ -115,6 +136,17 @@ function useValue(): useValueReturn {
...prevState,
[data.topic]: data.payload,
}));
// Notify feed subscribers
if (wsMessageSubscribers.size > 0) {
const feedMsg: WsFeedMessage = {
topic: data.topic,
payload: data.payload,
timestamp: Date.now(),
id: String(wsMessageIdCounter++),
};
wsMessageSubscribers.forEach((cb) => cb(feedMsg));
}
}
},
onOpen: () => {
@ -740,3 +772,16 @@ export function useJobStatus(
return { payload: currentJob as Job | null };
}
export function useWsMessageSubscribe(callback: (msg: WsFeedMessage) => void) {
const callbackRef = useRef(callback);
callbackRef.current = callback;
useEffect(() => {
const handler = (msg: WsFeedMessage) => callbackRef.current(msg);
wsMessageSubscribers.add(handler);
return () => {
wsMessageSubscribers.delete(handler);
};
}, []);
}

View File

@ -26,7 +26,8 @@ export default function CameraImage({
const containerRef = useRef<HTMLDivElement | null>(null);
const imgRef = useRef<HTMLImageElement | null>(null);
const { name } = config ? config.cameras[camera] : "";
const cameraConfig = config?.cameras?.[camera];
const { name } = cameraConfig ?? { name: camera };
const { payload: enabledState } = useEnabledState(camera);
const enabled = enabledState ? enabledState === "ON" : true;
@ -34,15 +35,15 @@ export default function CameraImage({
useResizeObserver(containerRef);
const requestHeight = useMemo(() => {
if (!config || containerHeight == 0) {
if (!cameraConfig || containerHeight == 0) {
return 360;
}
return Math.min(
config.cameras[camera].detect.height,
cameraConfig.detect.height,
Math.round(containerHeight * (isDesktop ? 1.1 : 1.25)),
);
}, [config, camera, containerHeight]);
}, [cameraConfig, containerHeight]);
const [isPortraitImage, setIsPortraitImage] = useState(false);

View File

@ -0,0 +1,46 @@
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "../ui/dropdown-menu";
import { Button } from "../ui/button";
import { useTranslation } from "react-i18next";
import { FaEllipsisVertical } from "react-icons/fa6";
type ActionsDropdownProps = {
onDebugReplayClick: () => void;
onExportClick: () => void;
};
export default function ActionsDropdown({
onDebugReplayClick,
onExportClick,
}: ActionsDropdownProps) {
const { t } = useTranslation(["components/dialog", "views/replay", "common"]);
return (
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.actions", { ns: "common" })}
size="sm"
>
<FaEllipsisVertical className="size-5 text-secondary-foreground" />
<div className="text-primary">
{t("menu.actions", { ns: "common" })}
</div>
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem onClick={onExportClick}>
{t("menu.export", { ns: "common" })}
</DropdownMenuItem>
<DropdownMenuItem onClick={onDebugReplayClick}>
{t("title", { ns: "views/replay" })}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
);
}

View File

@ -0,0 +1,240 @@
import { useMemo, useState } from "react";
import { Button } from "../ui/button";
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
import { SelectSeparator } from "../ui/select";
import { TimeRange } from "@/types/timeline";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import { getUTCOffset } from "@/utils/dateUtil";
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
import { FaArrowRight, FaCalendarAlt } from "react-icons/fa";
import { isDesktop, isIOS } from "react-device-detect";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { useTranslation } from "react-i18next";
type CustomTimeSelectorProps = {
latestTime: number;
range?: TimeRange;
setRange: (range: TimeRange | undefined) => void;
startLabel: string;
endLabel: string;
};
export function CustomTimeSelector({
latestTime,
range,
setRange,
startLabel,
endLabel,
}: CustomTimeSelectorProps) {
const { t } = useTranslation(["common"]);
const { data: config } = useSWR<FrigateConfig>("config");
// times
const timezoneOffset = useMemo(
() =>
config?.ui.timezone
? Math.round(getUTCOffset(new Date(), config.ui.timezone))
: undefined,
[config?.ui.timezone],
);
const localTimeOffset = useMemo(
() =>
Math.round(
getUTCOffset(
new Date(),
Intl.DateTimeFormat().resolvedOptions().timeZone,
),
),
[],
);
const startTime = useMemo(() => {
let time = range?.after || latestTime - 3600;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const endTime = useMemo(() => {
let time = range?.before || latestTime;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const formattedStart = useFormattedTimestamp(
startTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour")
: t("time.formattedTimestamp.12hour"),
);
const formattedEnd = useFormattedTimestamp(
endTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour")
: t("time.formattedTimestamp.12hour"),
);
const startClock = useMemo(() => {
const date = new Date(startTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [startTime]);
const endClock = useMemo(() => {
const date = new Date(endTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [endTime]);
// calendars
const [startOpen, setStartOpen] = useState(false);
const [endOpen, setEndOpen] = useState(false);
return (
<div
className={`mt-3 flex items-center rounded-lg bg-secondary text-secondary-foreground ${isDesktop ? "mx-8 gap-2 px-2" : "pl-2"}`}
>
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
open={startOpen}
onOpenChange={(open) => {
if (!open) {
setStartOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={startLabel}
variant={startOpen ? "select" : "default"}
size="sm"
onClick={() => {
setStartOpen(true);
setEndOpen(false);
}}
>
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent className="flex flex-col items-center" disablePortal>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
before: endTime,
after: day.getTime() / 1000 + 1,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="startTime"
type="time"
value={startClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const start = new Date(startTime * 1000);
start.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: endTime,
after: start.getTime() / 1000,
});
}}
/>
</PopoverContent>
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
open={endOpen}
onOpenChange={(open) => {
if (!open) {
setEndOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={endLabel}
variant={endOpen ? "select" : "default"}
size="sm"
onClick={() => {
setEndOpen(true);
setStartOpen(false);
}}
>
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent className="flex flex-col items-center" disablePortal>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
after: startTime,
before: day.getTime() / 1000,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const end = new Date(endTime * 1000);
end.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: end.getTime() / 1000,
after: startTime,
});
}}
/>
</PopoverContent>
</Popover>
</div>
</div>
);
}

View File

@ -0,0 +1,367 @@
import { useCallback, useState } from "react";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "../ui/dialog";
import { Label } from "../ui/label";
import { RadioGroup, RadioGroupItem } from "../ui/radio-group";
import { Button } from "../ui/button";
import axios from "axios";
import { toast } from "sonner";
import { isDesktop } from "react-device-detect";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import { useNavigate } from "react-router-dom";
import { useTranslation } from "react-i18next";
import { SelectSeparator } from "../ui/select";
import ActivityIndicator from "../indicators/activity-indicator";
import { LuBug, LuPlay, LuX } from "react-icons/lu";
import { ExportMode } from "@/types/filter";
import { TimeRange } from "@/types/timeline";
import { cn } from "@/lib/utils";
import { CustomTimeSelector } from "./CustomTimeSelector";
const REPLAY_TIME_OPTIONS = ["1", "5", "timeline", "custom"] as const;
type ReplayTimeOption = (typeof REPLAY_TIME_OPTIONS)[number];
type DebugReplayContentProps = {
currentTime: number;
latestTime: number;
range?: TimeRange;
selectedOption: ReplayTimeOption;
isStarting: boolean;
onSelectedOptionChange: (option: ReplayTimeOption) => void;
onStart: () => void;
onCancel: () => void;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
};
export function DebugReplayContent({
currentTime,
latestTime,
range,
selectedOption,
isStarting,
onSelectedOptionChange,
onStart,
onCancel,
setRange,
setMode,
}: DebugReplayContentProps) {
const { t } = useTranslation(["views/replay"]);
return (
<div className="w-full">
{isDesktop && (
<>
<DialogHeader>
<DialogTitle>{t("dialog.title")}</DialogTitle>
<DialogDescription>{t("dialog.description")}</DialogDescription>
</DialogHeader>
<SelectSeparator className="my-4 bg-secondary" />
</>
)}
{/* Time range */}
<div className="mt-4 flex flex-col gap-2">
<RadioGroup
className="mt-2 flex flex-col gap-4"
value={selectedOption}
onValueChange={(value) =>
onSelectedOptionChange(value as ReplayTimeOption)
}
>
{REPLAY_TIME_OPTIONS.map((opt) => (
<div key={opt} className="flex items-center gap-2">
<RadioGroupItem
className={
opt === selectedOption
? "bg-selected from-selected/50 to-selected/90 text-selected"
: "bg-secondary from-secondary/50 to-secondary/90 text-secondary"
}
id={`replay-${opt}`}
value={opt}
/>
<Label className="cursor-pointer" htmlFor={`replay-${opt}`}>
{opt === "custom"
? t("dialog.preset.custom")
: opt === "timeline"
? t("dialog.preset.timeline")
: t(`dialog.preset.${opt}m`)}
</Label>
</div>
))}
</RadioGroup>
</div>
{/* Custom time inputs */}
{selectedOption === "custom" && (
<CustomTimeSelector
latestTime={latestTime}
range={range}
setRange={setRange}
startLabel={t("dialog.startLabel")}
endLabel={t("dialog.endLabel")}
/>
)}
{isDesktop && <SelectSeparator className="my-4 bg-secondary" />}
<DialogFooter
className={isDesktop ? "" : "mt-3 flex flex-col-reverse gap-4"}
>
<div
className={`cursor-pointer p-2 text-center ${isDesktop ? "" : "w-full"}`}
onClick={onCancel}
>
{t("button.cancel", { ns: "common" })}
</div>
<Button
className={isDesktop ? "" : "w-full"}
variant="select"
size="sm"
disabled={isStarting}
onClick={() => {
if (selectedOption === "timeline") {
setRange({
after: currentTime - 30,
before: currentTime + 30,
});
setMode("timeline");
} else {
onStart();
}
}}
>
{isStarting ? <ActivityIndicator className="mr-2" /> : null}
{isStarting
? t("dialog.starting")
: selectedOption === "timeline"
? t("dialog.selectFromTimeline")
: t("dialog.startButton")}
</Button>
</DialogFooter>
</div>
);
}
type DebugReplayDialogProps = {
camera: string;
currentTime: number;
latestTime: number;
range?: TimeRange;
mode: ExportMode;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
};
export default function DebugReplayDialog({
camera,
currentTime,
latestTime,
range,
mode,
setRange,
setMode,
}: DebugReplayDialogProps) {
const { t } = useTranslation(["views/replay"]);
const navigate = useNavigate();
const [selectedOption, setSelectedOption] = useState<ReplayTimeOption>("1");
const [isStarting, setIsStarting] = useState(false);
const handleTimeOptionChange = useCallback(
(option: ReplayTimeOption) => {
setSelectedOption(option);
if (option === "custom" || option === "timeline") {
return;
}
const minutes = parseInt(option, 10);
const end = latestTime;
setRange({ after: end - minutes * 60, before: end });
},
[latestTime, setRange],
);
const handleStart = useCallback(() => {
if (!range || range.before <= range.after) {
toast.error(
t("dialog.toast.error", { error: "End time must be after start time" }),
{ position: "top-center" },
);
return;
}
setIsStarting(true);
axios
.post("debug_replay/start", {
camera: camera,
start_time: range.after,
end_time: range.before,
})
.then((response) => {
if (response.status === 200) {
toast.success(t("dialog.toast.success"), {
position: "top-center",
});
setMode("none");
setRange(undefined);
navigate("/replay");
}
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
if (error.response?.status === 409) {
toast.error(t("dialog.toast.alreadyActive"), {
position: "top-center",
closeButton: true,
dismissible: false,
action: (
<a href="/replay" target="_blank" rel="noopener noreferrer">
<Button>{t("dialog.toast.goToReplay")}</Button>
</a>
),
});
} else {
toast.error(t("dialog.toast.error", { error: errorMessage }), {
position: "top-center",
});
}
})
.finally(() => {
setIsStarting(false);
});
}, [camera, range, navigate, setMode, setRange, t]);
const handleCancel = useCallback(() => {
setMode("none");
setRange(undefined);
}, [setMode, setRange]);
const Overlay = isDesktop ? Dialog : Drawer;
const Trigger = isDesktop ? DialogTrigger : DrawerTrigger;
const Content = isDesktop ? DialogContent : DrawerContent;
return (
<>
<SaveDebugReplayOverlay
className="pointer-events-none absolute left-1/2 top-8 z-50 -translate-x-1/2"
show={mode == "timeline"}
isStarting={isStarting}
onSave={handleStart}
onCancel={handleCancel}
/>
<Overlay
open={mode == "select"}
onOpenChange={(open) => {
if (!open) {
setMode("none");
}
}}
>
{!isDesktop && (
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("title")}
size="sm"
onClick={() => {
const end = latestTime;
setRange({ after: end - 60, before: end });
setSelectedOption("1");
setMode("select");
}}
>
<LuBug className="size-5 rounded-md bg-secondary-foreground fill-secondary stroke-secondary p-1" />
{isDesktop && <div className="text-primary">{t("title")}</div>}
</Button>
</Trigger>
)}
<Content
className={
isDesktop
? "max-h-[90dvh] w-auto max-w-2xl overflow-visible sm:rounded-lg md:rounded-2xl"
: "max-h-[75dvh] overflow-y-auto rounded-lg px-4 pb-4 md:rounded-2xl"
}
>
<DebugReplayContent
currentTime={currentTime}
latestTime={latestTime}
range={range}
selectedOption={selectedOption}
isStarting={isStarting}
onSelectedOptionChange={handleTimeOptionChange}
onStart={handleStart}
onCancel={handleCancel}
setRange={setRange}
setMode={setMode}
/>
</Content>
</Overlay>
</>
);
}
type SaveDebugReplayOverlayProps = {
className: string;
show: boolean;
isStarting: boolean;
onSave: () => void;
onCancel: () => void;
};
export function SaveDebugReplayOverlay({
className,
show,
isStarting,
onSave,
onCancel,
}: SaveDebugReplayOverlayProps) {
const { t } = useTranslation(["views/replay"]);
return (
<div className={className}>
<div
className={cn(
"pointer-events-auto flex items-center justify-center gap-2 rounded-lg px-2",
show ? "duration-500 animate-in slide-in-from-top" : "invisible",
"mx-auto mt-5 text-center",
)}
>
<Button
className="flex items-center gap-1 text-primary"
aria-label={t("button.cancel", { ns: "common" })}
size="sm"
disabled={isStarting}
onClick={onCancel}
>
<LuX />
{t("button.cancel", { ns: "common" })}
</Button>
<Button
className="flex items-center gap-1"
aria-label={t("dialog.startButton")}
variant="select"
size="sm"
disabled={isStarting}
onClick={onSave}
>
{isStarting ? <ActivityIndicator className="size-4" /> : <LuPlay />}
{isStarting ? t("dialog.starting") : t("dialog.startButton")}
</Button>
</div>
</div>
);
}

View File

@ -1,4 +1,4 @@
import { useCallback, useMemo, useState } from "react";
import { useCallback, useState } from "react";
import {
Dialog,
DialogContent,
@ -12,16 +12,12 @@ import { Label } from "../ui/label";
import { RadioGroup, RadioGroupItem } from "../ui/radio-group";
import { Button } from "../ui/button";
import { ExportMode } from "@/types/filter";
import { FaArrowDown, FaArrowRight, FaCalendarAlt } from "react-icons/fa";
import { FaArrowDown } from "react-icons/fa";
import axios from "axios";
import { toast } from "sonner";
import { Input } from "../ui/input";
import { TimeRange } from "@/types/timeline";
import { useFormattedTimestamp } from "@/hooks/use-date-utils";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
import {
Select,
SelectContent,
@ -30,15 +26,15 @@ import {
SelectTrigger,
SelectValue,
} from "../ui/select";
import { isDesktop, isIOS, isMobile } from "react-device-detect";
import { isDesktop, isMobile } from "react-device-detect";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import SaveExportOverlay from "./SaveExportOverlay";
import { getUTCOffset } from "@/utils/dateUtil";
import { baseUrl } from "@/api/baseUrl";
import { cn } from "@/lib/utils";
import { GenericVideoPlayer } from "../player/GenericVideoPlayer";
import { useTranslation } from "react-i18next";
import { ExportCase } from "@/types/export";
import { CustomTimeSelector } from "./CustomTimeSelector";
const EXPORT_OPTIONS = [
"1",
@ -167,31 +163,33 @@ export default function ExportDialog({
}
}}
>
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.export", { ns: "common" })}
size="sm"
onClick={() => {
const now = new Date(latestTime * 1000);
let start = 0;
now.setHours(now.getHours() - 1);
start = now.getTime() / 1000;
setRange({
before: latestTime,
after: start,
});
setMode("select");
}}
>
<FaArrowDown className="rounded-md bg-secondary-foreground fill-secondary p-1" />
{isDesktop && (
<div className="text-primary">
{t("menu.export", { ns: "common" })}
</div>
)}
</Button>
</Trigger>
{!isDesktop && (
<Trigger asChild>
<Button
className="flex items-center gap-2"
aria-label={t("menu.export", { ns: "common" })}
size="sm"
onClick={() => {
const now = new Date(latestTime * 1000);
let start = 0;
now.setHours(now.getHours() - 1);
start = now.getTime() / 1000;
setRange({
before: latestTime,
after: start,
});
setMode("select");
}}
>
<FaArrowDown className="rounded-md bg-secondary-foreground fill-secondary p-1" />
{isDesktop && (
<div className="text-primary">
{t("menu.export", { ns: "common" })}
</div>
)}
</Button>
</Trigger>
)}
<Content
className={
isDesktop
@ -332,6 +330,8 @@ export function ExportContent({
latestTime={latestTime}
range={range}
setRange={setRange}
startLabel={t("export.time.start.title")}
endLabel={t("export.time.end.title")}
/>
)}
<Input
@ -414,234 +414,6 @@ export function ExportContent({
);
}
type CustomTimeSelectorProps = {
latestTime: number;
range?: TimeRange;
setRange: (range: TimeRange | undefined) => void;
};
function CustomTimeSelector({
latestTime,
range,
setRange,
}: CustomTimeSelectorProps) {
const { t } = useTranslation(["components/dialog"]);
const { data: config } = useSWR<FrigateConfig>("config");
// times
const timezoneOffset = useMemo(
() =>
config?.ui.timezone
? Math.round(getUTCOffset(new Date(), config.ui.timezone))
: undefined,
[config?.ui.timezone],
);
const localTimeOffset = useMemo(
() =>
Math.round(
getUTCOffset(
new Date(),
Intl.DateTimeFormat().resolvedOptions().timeZone,
),
),
[],
);
const startTime = useMemo(() => {
let time = range?.after || latestTime - 3600;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const endTime = useMemo(() => {
let time = range?.before || latestTime;
if (timezoneOffset) {
time = time + (timezoneOffset - localTimeOffset) * 60;
}
return time;
}, [range, latestTime, timezoneOffset, localTimeOffset]);
const formattedStart = useFormattedTimestamp(
startTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const formattedEnd = useFormattedTimestamp(
endTime,
config?.ui.time_format == "24hour"
? t("time.formattedTimestamp.24hour", { ns: "common" })
: t("time.formattedTimestamp.12hour", { ns: "common" }),
);
const startClock = useMemo(() => {
const date = new Date(startTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [startTime]);
const endClock = useMemo(() => {
const date = new Date(endTime * 1000);
return `${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
}, [endTime]);
// calendars
const [startOpen, setStartOpen] = useState(false);
const [endOpen, setEndOpen] = useState(false);
return (
<div
className={`mt-3 flex items-center rounded-lg bg-secondary text-secondary-foreground ${isDesktop ? "mx-8 gap-2 px-2" : "pl-2"}`}
>
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
modal={false}
open={startOpen}
onOpenChange={(open) => {
if (!open) {
setStartOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={t("export.time.start.title")}
variant={startOpen ? "select" : "default"}
size="sm"
onClick={() => {
setStartOpen(true);
setEndOpen(false);
}}
>
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
before: endTime,
after: day.getTime() / 1000 + 1,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="startTime"
type="time"
value={startClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const start = new Date(startTime * 1000);
start.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: endTime,
after: start.getTime() / 1000,
});
}}
/>
</PopoverContent>
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
modal={false}
open={endOpen}
onOpenChange={(open) => {
if (!open) {
setEndOpen(false);
}
}}
>
<PopoverTrigger asChild>
<Button
className={`text-primary ${isDesktop ? "" : "text-xs"}`}
aria-label={t("export.time.end.title")}
variant={endOpen ? "select" : "default"}
size="sm"
onClick={() => {
setEndOpen(true);
setStartOpen(false);
}}
>
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)}
onSelect={(day) => {
if (!day) {
return;
}
setRange({
after: startTime,
before: day.getTime() / 1000,
});
}}
/>
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}
onChange={(e) => {
const clock = e.target.value;
const [hour, minute, second] = isIOS
? [...clock.split(":"), "00"]
: clock.split(":");
const end = new Date(endTime * 1000);
end.setHours(
parseInt(hour),
parseInt(minute),
parseInt(second ?? 0),
0,
);
setRange({
before: end.getTime() / 1000,
after: startTime,
});
}}
/>
</PopoverContent>
</Popover>
</div>
</div>
);
}
type ExportPreviewDialogProps = {
camera: string;
range?: TimeRange;

View File

@ -2,8 +2,13 @@ import { useCallback, useState } from "react";
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
import { Button } from "../ui/button";
import { FaArrowDown, FaCalendarAlt, FaCog, FaFilter } from "react-icons/fa";
import { LuBug } from "react-icons/lu";
import { TimeRange } from "@/types/timeline";
import { ExportContent, ExportPreviewDialog } from "./ExportDialog";
import {
DebugReplayContent,
SaveDebugReplayOverlay,
} from "./DebugReplayDialog";
import { ExportMode, GeneralFilter } from "@/types/filter";
import ReviewActivityCalendar from "./ReviewActivityCalendar";
import { SelectSeparator } from "../ui/select";
@ -16,19 +21,32 @@ import {
import { getEndOfDayTimestamp } from "@/utils/dateUtil";
import { GeneralFilterContent } from "../filter/ReviewFilterGroup";
import { toast } from "sonner";
import axios from "axios";
import axios, { AxiosError } from "axios";
import SaveExportOverlay from "./SaveExportOverlay";
import { isIOS, isMobile } from "react-device-detect";
import { useTranslation } from "react-i18next";
import { useNavigate } from "react-router-dom";
type DrawerMode = "none" | "select" | "export" | "calendar" | "filter";
type DrawerMode =
| "none"
| "select"
| "export"
| "calendar"
| "filter"
| "debug-replay";
const DRAWER_FEATURES = ["export", "calendar", "filter"] as const;
const DRAWER_FEATURES = [
"export",
"calendar",
"filter",
"debug-replay",
] as const;
export type DrawerFeatures = (typeof DRAWER_FEATURES)[number];
const DEFAULT_DRAWER_FEATURES: DrawerFeatures[] = [
"export",
"calendar",
"filter",
"debug-replay",
];
type MobileReviewSettingsDrawerProps = {
@ -45,6 +63,10 @@ type MobileReviewSettingsDrawerProps = {
recordingsSummary?: RecordingsSummary;
allLabels: string[];
allZones: string[];
debugReplayMode?: ExportMode;
debugReplayRange?: TimeRange;
setDebugReplayMode?: (mode: ExportMode) => void;
setDebugReplayRange?: (range: TimeRange | undefined) => void;
onUpdateFilter: (filter: ReviewFilter) => void;
setRange: (range: TimeRange | undefined) => void;
setMode: (mode: ExportMode) => void;
@ -64,13 +86,26 @@ export default function MobileReviewSettingsDrawer({
recordingsSummary,
allLabels,
allZones,
debugReplayMode = "none",
debugReplayRange,
setDebugReplayMode = () => {},
setDebugReplayRange = () => {},
onUpdateFilter,
setRange,
setMode,
setShowExportPreview,
}: MobileReviewSettingsDrawerProps) {
const { t } = useTranslation(["views/recording", "components/dialog"]);
const { t } = useTranslation([
"views/recording",
"components/dialog",
"views/replay",
]);
const navigate = useNavigate();
const [drawerMode, setDrawerMode] = useState<DrawerMode>("none");
const [selectedReplayOption, setSelectedReplayOption] = useState<
"1" | "4" | "8" | "timeline" | "custom"
>("1");
const [isDebugReplayStarting, setIsDebugReplayStarting] = useState(false);
// exports
@ -140,6 +175,76 @@ export default function MobileReviewSettingsDrawer({
});
}, [camera, name, range, selectedCaseId, setRange, setName, setMode, t]);
const onStartDebugReplay = useCallback(async () => {
if (
!debugReplayRange ||
debugReplayRange.before <= debugReplayRange.after
) {
toast.error(
t("dialog.toast.error", {
error: "End time must be after start time",
ns: "views/replay",
}),
{ position: "top-center" },
);
return;
}
setIsDebugReplayStarting(true);
try {
const response = await axios.post("debug_replay/start", {
camera: camera,
start_time: debugReplayRange.after,
end_time: debugReplayRange.before,
});
if (response.status === 200) {
toast.success(t("dialog.toast.success", { ns: "views/replay" }), {
position: "top-center",
});
setDebugReplayMode("none");
setDebugReplayRange(undefined);
setDrawerMode("none");
navigate("/replay");
}
} catch (error) {
const axiosError = error as AxiosError<{
message?: string;
detail?: string;
}>;
const errorMessage =
axiosError.response?.data?.message ||
axiosError.response?.data?.detail ||
"Unknown error";
if (axiosError.response?.status === 409) {
toast.error(t("dialog.toast.alreadyActive", { ns: "views/replay" }), {
position: "top-center",
});
} else {
toast.error(
t("dialog.toast.error", {
error: errorMessage,
ns: "views/replay",
}),
{
position: "top-center",
},
);
}
} finally {
setIsDebugReplayStarting(false);
}
}, [
camera,
debugReplayRange,
navigate,
setDebugReplayMode,
setDebugReplayRange,
t,
]);
// filters
const [currentFilter, setCurrentFilter] = useState<GeneralFilter>({
@ -196,6 +301,26 @@ export default function MobileReviewSettingsDrawer({
{t("filter")}
</Button>
)}
{features.includes("debug-replay") && (
<Button
className="flex w-full items-center justify-center gap-2"
aria-label={t("title", { ns: "views/replay" })}
onClick={() => {
const now = new Date(latestTime * 1000);
now.setHours(now.getHours() - 1);
setDebugReplayRange({
after: now.getTime() / 1000,
before: latestTime,
});
setSelectedReplayOption("1");
setDrawerMode("debug-replay");
setDebugReplayMode("select");
}}
>
<LuBug className="size-5 rounded-md bg-secondary-foreground fill-secondary stroke-secondary p-1" />
{t("title", { ns: "views/replay" })}
</Button>
)}
</div>
);
} else if (drawerMode == "export") {
@ -311,6 +436,47 @@ export default function MobileReviewSettingsDrawer({
/>
</div>
);
} else if (drawerMode == "debug-replay") {
const handleTimeOptionChange = (
option: "1" | "4" | "8" | "timeline" | "custom",
) => {
setSelectedReplayOption(option);
if (option === "custom" || option === "timeline") {
return;
}
const hours = parseInt(option);
const end = latestTime;
const now = new Date(end * 1000);
now.setHours(now.getHours() - hours);
setDebugReplayRange({ after: now.getTime() / 1000, before: end });
};
content = (
<DebugReplayContent
currentTime={currentTime}
latestTime={latestTime}
range={debugReplayRange}
selectedOption={selectedReplayOption}
isStarting={isDebugReplayStarting}
onSelectedOptionChange={handleTimeOptionChange}
onStart={onStartDebugReplay}
onCancel={() => {
setDebugReplayMode("none");
setDebugReplayRange(undefined);
setDrawerMode("select");
}}
setRange={setDebugReplayRange}
setMode={(mode) => {
setDebugReplayMode(mode);
if (mode == "timeline") {
setDrawerMode("none");
}
}}
/>
);
}
return (
@ -322,6 +488,16 @@ export default function MobileReviewSettingsDrawer({
onCancel={() => setMode("none")}
onPreview={() => setShowExportPreview(true)}
/>
<SaveDebugReplayOverlay
className="pointer-events-none absolute left-1/2 top-8 z-50 -translate-x-1/2"
show={debugReplayRange != undefined && debugReplayMode == "timeline"}
isStarting={isDebugReplayStarting}
onSave={onStartDebugReplay}
onCancel={() => {
setDebugReplayMode("none");
setDebugReplayRange(undefined);
}}
/>
<ExportPreviewDialog
camera={camera}
range={range}
@ -354,7 +530,9 @@ export default function MobileReviewSettingsDrawer({
/>
</Button>
</DrawerTrigger>
<DrawerContent className="mx-1 flex max-h-[80dvh] flex-col items-center gap-2 overflow-hidden rounded-t-2xl px-4 pb-4">
<DrawerContent
className={`mx-1 flex max-h-[80dvh] flex-col items-center gap-2 rounded-t-2xl px-4 pb-4 ${drawerMode == "export" || drawerMode == "debug-replay" ? "overflow-visible" : "overflow-hidden"}`}
>
{content}
</DrawerContent>
</Drawer>

View File

@ -0,0 +1,244 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useTranslation } from "react-i18next";
import useSWR from "swr";
import { WsFeedMessage } from "@/api/ws";
import { useWsMessageBuffer } from "@/hooks/use-ws-message-buffer";
import WsMessageRow from "./WsMessageRow";
import { Button } from "@/components/ui/button";
import { ToggleGroup, ToggleGroupItem } from "@/components/ui/toggle-group";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Badge } from "@/components/ui/badge";
import { FaEraser, FaPause, FaPlay } from "react-icons/fa";
import { FrigateConfig } from "@/types/frigateConfig";
type TopicPreset = "all" | "events" | "camera_activity" | "system";
const PRESET_TOPICS: Record<TopicPreset, Set<string> | "all"> = {
all: "all",
events: new Set(["events", "reviews", "tracked_object_update", "triggers"]),
camera_activity: new Set(["camera_activity", "audio_detections"]),
system: new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]),
};
// camera_activity preset also matches topics with camera prefix patterns
const CAMERA_ACTIVITY_TOPIC_PATTERNS = [
"/motion",
"/audio",
"/detect",
"/recordings",
"/enabled",
"/snapshots",
"/ptz",
];
function matchesPreset(topic: string, preset: TopicPreset): boolean {
const topicSet = PRESET_TOPICS[preset];
if (topicSet === "all") return true;
if (topicSet.has(topic)) return true;
if (preset === "camera_activity") {
return CAMERA_ACTIVITY_TOPIC_PATTERNS.some((pattern) =>
topic.includes(pattern),
);
}
return false;
}
type WsMessageFeedProps = {
maxSize?: number;
defaultCamera?: string;
lockedCamera?: string;
showCameraBadge?: boolean;
};
export default function WsMessageFeed({
maxSize = 500,
defaultCamera,
lockedCamera,
showCameraBadge = true,
}: WsMessageFeedProps) {
const { t } = useTranslation(["views/system"]);
const [paused, setPaused] = useState(false);
const [topicPreset, setTopicPreset] = useState<TopicPreset>("all");
const [cameraFilter, setCameraFilter] = useState<string>(
lockedCamera ?? defaultCamera ?? "all",
);
const { messages, clear } = useWsMessageBuffer(maxSize, paused, {
cameraFilter,
});
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
const cameraNames = useMemo(() => {
if (!config?.cameras) return [];
return Object.keys(config.cameras).sort();
}, [config]);
const filteredMessages = useMemo(() => {
return messages.filter((msg: WsFeedMessage) => {
if (!matchesPreset(msg.topic, topicPreset)) return false;
return true;
});
}, [messages, topicPreset]);
// Auto-scroll logic
const scrollContainerRef = useRef<HTMLDivElement>(null);
const autoScrollRef = useRef(true);
const handleScroll = useCallback(() => {
const el = scrollContainerRef.current;
if (!el) return;
const atBottom = el.scrollHeight - el.scrollTop - el.clientHeight < 40;
autoScrollRef.current = atBottom;
}, []);
useEffect(() => {
const el = scrollContainerRef.current;
if (!el || !autoScrollRef.current) return;
el.scrollTop = el.scrollHeight;
}, [filteredMessages.length]);
return (
<div className="flex size-full flex-col">
{/* Toolbar */}
<div className="flex flex-row items-start justify-between gap-2 border-b border-secondary p-2">
<div className="flex flex-col flex-wrap items-start gap-2">
<ToggleGroup
type="single"
size="sm"
value={topicPreset}
onValueChange={(val: string) => {
if (val) setTopicPreset(val as TopicPreset);
}}
className="flex-wrap"
>
<ToggleGroupItem
value="all"
className={topicPreset === "all" ? "" : "text-muted-foreground"}
>
{t("logs.websocket.filter.all")}
</ToggleGroupItem>
<ToggleGroupItem
value="events"
className={
topicPreset === "events" ? "" : "text-muted-foreground"
}
>
{t("logs.websocket.filter.events")}
</ToggleGroupItem>
<ToggleGroupItem
value="camera_activity"
className={
topicPreset === "camera_activity" ? "" : "text-muted-foreground"
}
>
{t("logs.websocket.filter.camera_activity")}
</ToggleGroupItem>
<ToggleGroupItem
value="system"
className={
topicPreset === "system" ? "" : "text-muted-foreground"
}
>
{t("logs.websocket.filter.system")}
</ToggleGroupItem>
</ToggleGroup>
{!lockedCamera && (
<Select value={cameraFilter} onValueChange={setCameraFilter}>
<SelectTrigger className="h-8 w-[140px] text-xs">
<SelectValue placeholder={t("logs.websocket.filter.camera")} />
</SelectTrigger>
<SelectContent>
<SelectItem value="all">
{t("logs.websocket.filter.all_cameras")}
</SelectItem>
{cameraNames.map((cam) => (
<SelectItem key={cam} value={cam}>
{config?.cameras[cam]?.friendly_name || cam}
</SelectItem>
))}
</SelectContent>
</Select>
)}
</div>
<div className="flex flex-col items-end gap-3">
<Badge variant="secondary" className="text-xs text-primary-variant">
{t("logs.websocket.count", {
count: filteredMessages.length,
})}
</Badge>
<div className="flex items-center gap-1">
<Button
variant="outline"
size="sm"
className="h-7 gap-1 px-2 text-xs"
onClick={() => setPaused(!paused)}
aria-label={
paused ? t("logs.websocket.resume") : t("logs.websocket.pause")
}
>
{paused ? (
<FaPlay className="size-2.5" />
) : (
<FaPause className="size-2.5" />
)}
{paused ? t("logs.websocket.resume") : t("logs.websocket.pause")}
</Button>
<Button
variant="outline"
size="sm"
className="h-7 gap-1 px-2 text-xs"
onClick={clear}
aria-label={t("logs.websocket.clear")}
>
<FaEraser className="size-2.5" />
{t("logs.websocket.clear")}
</Button>
</div>
</div>
</div>
{/* Feed area */}
<div
ref={scrollContainerRef}
onScroll={handleScroll}
className="scrollbar-container flex-1 overflow-y-auto"
>
{filteredMessages.length === 0 ? (
<div className="flex size-full items-center justify-center p-8 text-sm text-muted-foreground">
{t("logs.websocket.empty")}
</div>
) : (
filteredMessages.map((msg: WsFeedMessage) => (
<WsMessageRow
key={msg.id}
message={msg}
showCameraBadge={showCameraBadge}
/>
))
)}
</div>
</div>
);
}

View File

@ -0,0 +1,429 @@
import { memo, useCallback, useState } from "react";
import { WsFeedMessage } from "@/api/ws";
import { cn } from "@/lib/utils";
import { ChevronRight } from "lucide-react";
import { useTranslation } from "react-i18next";
import { extractCameraName } from "@/utils/wsUtil";
import { getIconForLabel } from "@/utils/iconUtil";
import { LuCheck, LuCopy } from "react-icons/lu";
type TopicCategory = "events" | "camera_activity" | "system" | "other";
const TOPIC_CATEGORY_COLORS: Record<TopicCategory, string> = {
events: "bg-blue-500/20 text-blue-700 dark:text-blue-300 border-blue-500/30",
camera_activity:
"bg-green-500/20 text-green-700 dark:text-green-300 border-green-500/30",
system:
"bg-purple-500/20 text-purple-700 dark:text-purple-300 border-purple-500/30",
other: "bg-gray-500/20 text-gray-700 dark:text-gray-300 border-gray-500/30",
};
const EVENT_TYPE_COLORS: Record<string, string> = {
start:
"bg-green-500/20 text-green-700 dark:text-green-300 border-green-500/30",
update: "bg-cyan-500/20 text-cyan-700 dark:text-cyan-300 border-cyan-500/30",
end: "bg-red-500/20 text-red-700 dark:text-red-300 border-red-500/30",
};
const TRACKED_OBJECT_UPDATE_COLORS: Record<string, string> = {
description:
"bg-amber-500/20 text-amber-700 dark:text-amber-300 border-amber-500/30",
face: "bg-pink-500/20 text-pink-700 dark:text-pink-300 border-pink-500/30",
lpr: "bg-yellow-500/20 text-yellow-700 dark:text-yellow-300 border-yellow-500/30",
classification:
"bg-violet-500/20 text-violet-700 dark:text-violet-300 border-violet-500/30",
};
function getEventTypeColor(eventType: string): string {
return (
EVENT_TYPE_COLORS[eventType] ||
"bg-orange-500/20 text-orange-700 dark:text-orange-300 border-orange-500/30"
);
}
function getTrackedObjectTypeColor(objectType: string): string {
return (
TRACKED_OBJECT_UPDATE_COLORS[objectType] ||
"bg-orange-500/20 text-orange-700 dark:text-orange-300 border-orange-500/30"
);
}
const EVENT_TOPICS = new Set([
"events",
"reviews",
"tracked_object_update",
"triggers",
]);
const SYSTEM_TOPICS = new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]);
function getTopicCategory(topic: string): TopicCategory {
if (EVENT_TOPICS.has(topic)) return "events";
if (SYSTEM_TOPICS.has(topic)) return "system";
if (
topic === "camera_activity" ||
topic === "audio_detections" ||
topic.includes("/motion") ||
topic.includes("/audio") ||
topic.includes("/detect") ||
topic.includes("/recordings") ||
topic.includes("/enabled") ||
topic.includes("/snapshots") ||
topic.includes("/ptz")
) {
return "camera_activity";
}
return "other";
}
function formatTimestamp(ts: number): string {
const d = new Date(ts);
const hh = String(d.getHours()).padStart(2, "0");
const mm = String(d.getMinutes()).padStart(2, "0");
const ss = String(d.getSeconds()).padStart(2, "0");
const ms = String(d.getMilliseconds()).padStart(3, "0");
return `${hh}:${mm}:${ss}.${ms}`;
}
function getPayloadSummary(
topic: string,
payload: unknown,
hideType: boolean = false,
): string {
if (payload === null || payload === undefined) return "";
try {
const data = typeof payload === "string" ? JSON.parse(payload) : payload;
if (typeof data === "object" && data !== null) {
// Topic-specific summary handlers
if (topic === "tracked_object_update") {
return getTrackedObjectUpdateSummary(data);
}
if ("type" in data && "label" in (data.after || data)) {
const after = data.after || data;
const parts: string[] = [];
if (!hideType) {
parts.push(`type: ${data.type}`);
}
parts.push(`label: ${after.label || "?"}`);
// Add sub_label for events topic if present
if (topic === "events" && after.sub_label) {
parts.push(`sub_label: ${after.sub_label}`);
}
return parts.join(", ");
}
if ("type" in data && "camera" in data) {
if (hideType) {
return `camera: ${data.camera}`;
}
return `type: ${data.type}, camera: ${data.camera}`;
}
const keys = Object.keys(data);
if (keys.length <= 3) {
return keys
.map((k) => {
const v = data[k];
if (typeof v === "string" || typeof v === "number") {
return `${k}: ${v}`;
}
return k;
})
.join(", ");
}
return `{${keys.length} keys}`;
}
const str = String(data);
return str.length > 80 ? str.slice(0, 80) + "…" : str;
} catch {
const str = String(payload);
return str.length > 80 ? str.slice(0, 80) + "…" : str;
}
}
function getTrackedObjectUpdateSummary(data: unknown): string {
if (typeof data !== "object" || data === null) return "";
const obj = data as Record<string, unknown>;
const type = obj.type as string;
switch (type) {
case "description":
return obj.description ? `${obj.description}` : "no description";
case "face": {
const name = obj.name as string | undefined;
return name || "unknown";
}
case "lpr": {
const name = obj.name as string | undefined;
const plate = obj.plate as string | undefined;
return name || plate || "unknown";
}
case "classification": {
const parts: string[] = [];
const model = obj.model as string | undefined;
const subLabel = obj.sub_label as string | undefined;
const attribute = obj.attribute as string | undefined;
if (model) parts.push(`model: ${model}`);
if (subLabel) parts.push(`sub_label: ${subLabel}`);
if (attribute) parts.push(`attribute: ${attribute}`);
return parts.length > 0 ? parts.join(", ") : "classification";
}
default:
return type || "unknown";
}
}
function extractTypeForBadge(payload: unknown): string | null {
if (payload === null || payload === undefined) return null;
try {
const data = typeof payload === "string" ? JSON.parse(payload) : payload;
if (typeof data === "object" && data !== null && "type" in data) {
return data.type as string;
}
} catch {
// ignore
}
return null;
}
function shouldShowTypeBadge(topic: string, type: string | null): boolean {
if (!type) return false;
return true;
}
function shouldShowSummary(topic: string): boolean {
// Hide summary for reviews topic
return topic !== "reviews";
}
function escapeHtml(s: string): string {
return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
function highlightJson(value: unknown): string {
// Try to auto-parse JSON strings
if (typeof value === "string") {
try {
const parsed = JSON.parse(value);
if (typeof parsed === "object" && parsed !== null) {
value = parsed;
}
} catch {
// not JSON
}
}
const raw = JSON.stringify(value, null, 2) ?? String(value);
// Single regex pass to colorize JSON tokens
return raw.replace(
/("(?:[^"\\]|\\.)*")\s*:|("(?:[^"\\]|\\.)*")|(true|false|null)|(-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?)/g,
(match, key: string, str: string, keyword: string, num: string) => {
if (key) {
return `<span class="text-indigo-400">${escapeHtml(key)}</span>:`;
}
if (str) {
const content = escapeHtml(str);
return `<span class="text-green-500">${content}</span>`;
}
if (keyword) {
return `<span class="text-orange-500">${keyword}</span>`;
}
if (num) {
return `<span class="text-cyan-500">${num}</span>`;
}
return match;
},
);
}
function CopyJsonButton({ payload }: { payload: unknown }) {
const [copied, setCopied] = useState(false);
const handleCopy = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation();
const text =
typeof payload === "string"
? payload
: JSON.stringify(payload, null, 2);
navigator.clipboard.writeText(text).then(() => {
setCopied(true);
setTimeout(() => setCopied(false), 2000);
});
},
[payload],
);
return (
<button
onClick={handleCopy}
className="rounded p-1 text-muted-foreground transition-colors hover:bg-secondary hover:text-foreground"
aria-label="Copy JSON"
>
{copied ? (
<LuCheck className="size-3.5 text-green-500" />
) : (
<LuCopy className="size-3.5" />
)}
</button>
);
}
type WsMessageRowProps = {
message: WsFeedMessage;
showCameraBadge?: boolean;
};
const WsMessageRow = memo(function WsMessageRow({
message,
showCameraBadge = true,
}: WsMessageRowProps) {
const { t } = useTranslation(["views/system"]);
const [expanded, setExpanded] = useState(false);
const category = getTopicCategory(message.topic);
const cameraName = extractCameraName(message);
const messageType = extractTypeForBadge(message.payload);
const showTypeBadge = shouldShowTypeBadge(message.topic, messageType);
const summary = getPayloadSummary(message.topic, message.payload);
const eventLabel = (() => {
try {
const data =
typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
if (typeof data === "object" && data !== null) {
return (data.after?.label as string) || (data.label as string) || null;
}
} catch {
// ignore
}
return null;
})();
const parsedPayload = (() => {
try {
return typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
} catch {
return message.payload;
}
})();
const handleToggle = useCallback(() => {
setExpanded((prev) => !prev);
}, []);
// Determine which color function to use based on topic
const getTypeBadgeColor = (type: string | null) => {
if (!type) return "";
if (message.topic === "tracked_object_update") {
return getTrackedObjectTypeColor(type);
}
return getEventTypeColor(type);
};
return (
<div className="border-b border-secondary/50">
<div
className={cn(
"flex cursor-pointer items-center gap-2 px-2 py-1.5 transition-colors hover:bg-muted/50",
expanded && "bg-muted/30",
)}
onClick={handleToggle}
>
<ChevronRight
className={cn(
"size-3.5 shrink-0 text-muted-foreground transition-transform",
expanded && "rotate-90",
)}
/>
<span className="font-mono shrink-0 text-xs text-muted-foreground">
{formatTimestamp(message.timestamp)}
</span>
<span
className={cn(
"font-mono shrink-0 rounded border px-1.5 py-0.5 text-xs",
TOPIC_CATEGORY_COLORS[category],
)}
>
{message.topic}
</span>
{showTypeBadge && messageType && (
<span
className={cn(
"shrink-0 rounded border px-1.5 py-0.5 text-xs",
getTypeBadgeColor(messageType),
)}
>
{messageType}
</span>
)}
{showCameraBadge && cameraName && (
<span className="shrink-0 rounded bg-secondary px-1.5 py-0.5 text-xs text-secondary-foreground">
{cameraName}
</span>
)}
{eventLabel && (
<span className="shrink-0">
{getIconForLabel(eventLabel, "size-3.5 text-primary-variant")}
</span>
)}
{shouldShowSummary(message.topic) && (
<span className="min-w-0 truncate text-xs text-muted-foreground">
{summary}
</span>
)}
</div>
{expanded && (
<div className="border-t border-secondary/30 bg-background_alt/50 px-4 py-2">
<div className="mb-1 flex items-center justify-between">
<span className="text-xs font-medium uppercase tracking-wider text-muted-foreground">
{t("logs.websocket.expanded.payload")}
</span>
<CopyJsonButton payload={parsedPayload} />
</div>
<pre
className="font-mono scrollbar-container max-h-[60vh] overflow-auto rounded bg-background p-2 text-[11px] leading-relaxed"
dangerouslySetInnerHTML={{ __html: highlightJson(parsedPayload) }}
/>
</div>
)}
</div>
);
});
export default WsMessageRow;

View File

@ -2,6 +2,7 @@ import { useContext } from "react";
import { AuthContext } from "@/context/auth-context";
import useSWR from "swr";
import { FrigateConfig } from "@/types/frigateConfig";
import { isReplayCamera } from "@/utils/cameraUtil";
export function useAllowedCameras() {
const { auth } = useContext(AuthContext);
@ -14,9 +15,11 @@ export function useAllowedCameras() {
auth.user?.role === "admin" ||
!auth.isAuthenticated // anonymous internal port
) {
// return all cameras
return config?.cameras ? Object.keys(config.cameras) : [];
// return all cameras, excluding replay cameras
return config?.cameras
? Object.keys(config.cameras).filter((name) => !isReplayCamera(name))
: [];
}
return auth.allowedCameras || [];
return (auth.allowedCameras || []).filter((name) => !isReplayCamera(name));
}

View File

@ -26,7 +26,7 @@ type useCameraActivityReturn = {
};
export function useCameraActivity(
camera: CameraConfig,
camera: CameraConfig | undefined,
revalidateOnFocus: boolean = true,
): useCameraActivityReturn {
const { data: config } = useSWR<FrigateConfig>("config", {
@ -47,7 +47,7 @@ export function useCameraActivity(
// init camera activity
const { payload: updatedCameraState } = useInitialCameraState(
camera.name,
camera?.name ?? "",
revalidateOnFocus,
);
useEffect(() => {
@ -60,7 +60,7 @@ export function useCameraActivity(
const memoizedAudioState = useDeepMemo(updatedAudioState);
useEffect(() => {
if (memoizedAudioState) {
if (memoizedAudioState && camera?.name) {
setAudioDetections(memoizedAudioState[camera.name]);
}
}, [memoizedAudioState, camera]);
@ -72,8 +72,8 @@ export function useCameraActivity(
[objects],
);
const { payload: cameraEnabled } = useEnabledState(camera.name);
const { payload: detectingMotion } = useMotionActivity(camera.name);
const { payload: cameraEnabled } = useEnabledState(camera?.name ?? "");
const { payload: detectingMotion } = useMotionActivity(camera?.name ?? "");
const { payload: event } = useFrigateEvents();
const updatedEvent = useDeepMemo(event);
@ -91,7 +91,7 @@ export function useCameraActivity(
return;
}
if (updatedEvent.after.camera !== camera.name) {
if (!camera?.name || updatedEvent.after.camera !== camera.name) {
return;
}
@ -158,6 +158,10 @@ export function useCameraActivity(
return false;
}
if (!camera?.name) {
return false;
}
return (
cameras[camera.name]?.camera_fps == 0 && stats["service"].uptime > 60
);

View File

@ -9,6 +9,7 @@ import { useMemo } from "react";
import useSWR from "swr";
import useDeepMemo from "./use-deep-memo";
import { capitalizeAll, capitalizeFirstLetter } from "@/utils/stringUtil";
import { isReplayCamera } from "@/utils/cameraUtil";
import { useFrigateStats } from "@/api/ws";
import { useTranslation } from "react-i18next";
@ -16,6 +17,9 @@ import { useTranslation } from "react-i18next";
export default function useStats(stats: FrigateStats | undefined) {
const { t } = useTranslation(["views/system"]);
const { data: config } = useSWR<FrigateConfig>("config");
const { data: debugReplayStatus } = useSWR("debug_replay/status", {
revalidateOnFocus: false,
});
const memoizedStats = useDeepMemo(stats);
@ -74,6 +78,11 @@ export default function useStats(stats: FrigateStats | undefined) {
return;
}
// Skip replay cameras
if (isReplayCamera(name)) {
return;
}
const cameraName = config.cameras?.[name]?.friendly_name ?? name;
if (config.cameras[name].enabled && cam["camera_fps"] == 0) {
problems.push({
@ -96,7 +105,15 @@ export default function useStats(stats: FrigateStats | undefined) {
);
const cameraName = config?.cameras?.[name]?.friendly_name ?? name;
if (!isNaN(ffmpegAvg) && ffmpegAvg >= CameraFfmpegThreshold.error) {
// Skip ffmpeg warnings for replay cameras when debug replay is active
if (
!isNaN(ffmpegAvg) &&
ffmpegAvg >= CameraFfmpegThreshold.error &&
!(
debugReplayStatus?.active && debugReplayStatus?.replay_camera === name
)
) {
problems.push({
text: t("stats.ffmpegHighCpuUsage", {
camera: capitalizeFirstLetter(capitalizeAll(cameraName)),
@ -119,8 +136,19 @@ export default function useStats(stats: FrigateStats | undefined) {
}
});
// Add message if debug replay is active
if (debugReplayStatus?.active) {
problems.push({
text: t("stats.debugReplayActive", {
defaultValue: "Debug replay session is active",
}),
color: "text-selected",
relevantLink: "/replay",
});
}
return problems;
}, [config, memoizedStats, t]);
}, [config, memoizedStats, t, debugReplayStatus]);
return { potentialProblems };
}

View File

@ -0,0 +1,89 @@
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useWsMessageSubscribe, WsFeedMessage } from "@/api/ws";
import { extractCameraName } from "@/utils/wsUtil";
type UseWsMessageBufferReturn = {
messages: WsFeedMessage[];
clear: () => void;
};
type MessageFilter = {
cameraFilter?: string; // "all" or specific camera name
};
export function useWsMessageBuffer(
maxSize: number = 2000,
paused: boolean = false,
filter?: MessageFilter,
): UseWsMessageBufferReturn {
const bufferRef = useRef<WsFeedMessage[]>([]);
const [version, setVersion] = useState(0);
const pausedRef = useRef(paused);
const filterRef = useRef(filter);
pausedRef.current = paused;
filterRef.current = filter;
const batchTimerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const dirtyRef = useRef(false);
useEffect(() => {
batchTimerRef.current = setInterval(() => {
if (dirtyRef.current) {
dirtyRef.current = false;
setVersion((v) => v + 1);
}
}, 200);
return () => {
if (batchTimerRef.current) {
clearInterval(batchTimerRef.current);
}
};
}, []);
const shouldIncludeMessage = useCallback((msg: WsFeedMessage): boolean => {
const currentFilter = filterRef.current;
if (!currentFilter) return true;
// Check camera filter
if (currentFilter.cameraFilter && currentFilter.cameraFilter !== "all") {
const msgCamera = extractCameraName(msg);
if (msgCamera !== currentFilter.cameraFilter) {
return false;
}
}
return true;
}, []);
useWsMessageSubscribe(
useCallback(
(msg: WsFeedMessage) => {
if (pausedRef.current) return;
if (!shouldIncludeMessage(msg)) return;
const buf = bufferRef.current;
buf.push(msg);
if (buf.length > maxSize) {
buf.splice(0, buf.length - maxSize);
}
dirtyRef.current = true;
},
[shouldIncludeMessage, maxSize],
),
);
const clear = useCallback(() => {
bufferRef.current = [];
setVersion((v) => v + 1);
}, []);
// version is used to trigger re-renders; we spread the buffer
// into a new array so that downstream useMemo dependencies
// see a new reference and recompute.
// eslint-disable-next-line react-hooks/exhaustive-deps
const messages = useMemo(() => [...bufferRef.current], [version]);
return { messages, clear };
}

View File

@ -642,4 +642,4 @@ function CaseAssignmentDialog({
);
}
export default Exports;
export default Exports;

View File

@ -35,10 +35,12 @@ import { isIOS, isMobile } from "react-device-detect";
import { isPWA } from "@/utils/isPWA";
import { isInIframe } from "@/utils/isIFrame";
import { useTranslation } from "react-i18next";
import WsMessageFeed from "@/components/ws/WsMessageFeed";
function Logs() {
const { t } = useTranslation(["views/system"]);
const [logService, setLogService] = useState<LogType>("frigate");
const isWebsocket = logService === "websocket";
const tabsRef = useRef<HTMLDivElement | null>(null);
const lazyLogWrapperRef = useRef<HTMLDivElement>(null);
const [logs, setLogs] = useState<string[]>([]);
@ -216,6 +218,12 @@ function Logs() {
}, [logService, filterSeverity, t]);
useEffect(() => {
if (isWebsocket) {
setIsLoading(false);
setLogs([]);
return;
}
setIsLoading(true);
setLogs([]);
lastFetchedIndexRef.current = -1;
@ -494,116 +502,128 @@ function Logs() {
data-nav-item={item}
aria-label={`Select ${item}`}
>
<div className="smart-capitalize">{item}</div>
<div
className={item !== "websocket" ? "smart-capitalize" : ""}
>
{item === "websocket" ? t("logs.websocket.label") : item}
</div>
</ToggleGroupItem>
))}
</ToggleGroup>
<ScrollBar orientation="horizontal" className="h-0" />
</div>
</ScrollArea>
<div className="flex items-center gap-2">
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.copy.label")}
size="sm"
onClick={handleCopyLogs}
>
<FaCopy className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("logs.copy.label")}
</div>
</Button>
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.download.label")}
size="sm"
onClick={handleDownloadLogs}
>
<FaDownload className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("button.download", { ns: "common" })}
</div>
</Button>
<LogSettingsButton
selectedLabels={filterSeverity}
updateLabelFilter={setFilterSeverity}
logSettings={logSettings}
setLogSettings={setLogSettings}
/>
</div>
</div>
<div className="relative my-2 flex size-full flex-col overflow-hidden whitespace-pre-wrap rounded-md border border-secondary bg-background_alt font-mono text-xs sm:p-1">
<div className="grid grid-cols-5 *:px-0 *:py-3 *:text-sm *:text-primary/40 md:grid-cols-12">
<div className="col-span-3 lg:col-span-2">
<div className="flex w-full flex-row items-center">
<div className="ml-1 min-w-16 smart-capitalize lg:min-w-20">
{t("logs.type.label")}
{!isWebsocket && (
<div className="flex items-center gap-2">
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.copy.label")}
size="sm"
onClick={handleCopyLogs}
>
<FaCopy className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("logs.copy.label")}
</div>
<div className="mr-3">{t("logs.type.timestamp")}</div>
</Button>
<Button
className="flex items-center justify-between gap-2"
aria-label={t("logs.download.label")}
size="sm"
onClick={handleDownloadLogs}
>
<FaDownload className="text-secondary-foreground" />
<div className="hidden text-primary md:block">
{t("button.download", { ns: "common" })}
</div>
</Button>
<LogSettingsButton
selectedLabels={filterSeverity}
updateLabelFilter={setFilterSeverity}
logSettings={logSettings}
setLogSettings={setLogSettings}
/>
</div>
)}
</div>
{isWebsocket ? (
<div className="my-2 flex size-full flex-col overflow-hidden rounded-md border border-secondary bg-background_alt">
<WsMessageFeed />
</div>
) : (
<div className="relative my-2 flex size-full flex-col overflow-hidden whitespace-pre-wrap rounded-md border border-secondary bg-background_alt font-mono text-xs sm:p-1">
<div className="grid grid-cols-5 *:px-0 *:py-3 *:text-sm *:text-primary/40 md:grid-cols-12">
<div className="col-span-3 lg:col-span-2">
<div className="flex w-full flex-row items-center">
<div className="ml-1 min-w-16 smart-capitalize lg:min-w-20">
{t("logs.type.label")}
</div>
<div className="mr-3">{t("logs.type.timestamp")}</div>
</div>
</div>
<div
className={cn(
"flex items-center",
logService == "frigate" ? "col-span-2" : "col-span-1",
)}
>
{t("logs.type.tag")}
</div>
<div
className={cn(
"col-span-5 flex items-center",
logService == "frigate"
? "md:col-span-7 lg:col-span-8"
: "md:col-span-8 lg:col-span-9",
)}
>
<div className="flex flex-1">{t("logs.type.message")}</div>
</div>
</div>
<div
className={cn(
"flex items-center",
logService == "frigate" ? "col-span-2" : "col-span-1",
)}
>
{t("logs.type.tag")}
</div>
<div
className={cn(
"col-span-5 flex items-center",
logService == "frigate"
? "md:col-span-7 lg:col-span-8"
: "md:col-span-8 lg:col-span-9",
)}
>
<div className="flex flex-1">{t("logs.type.message")}</div>
</div>
</div>
<div ref={lazyLogWrapperRef} className="size-full">
{isLoading ? (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
) : (
<EnhancedScrollFollow
startFollowing={!isLoading}
onCustomScroll={handleScroll}
render={({ follow, onScroll }) => (
<>
{follow && !logSettings.disableStreaming && (
<div className="absolute right-1 top-3">
<Tooltip>
<TooltipTrigger>
<MdCircle className="mr-2 size-2 animate-pulse cursor-default text-selected shadow-selected drop-shadow-md" />
</TooltipTrigger>
<TooltipContent>{t("logs.tips")}</TooltipContent>
</Tooltip>
</div>
)}
<LazyLog
ref={lazyLogRef}
enableLineNumbers={false}
selectableLines
lineClassName="text-primary bg-background"
highlightLineClassName="bg-primary/20"
onRowClick={handleRowClick}
formatPart={formatPart}
text={logs.join("\n")}
follow={follow}
onScroll={onScroll}
loadingComponent={
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
}
loading={isLoading}
/>
</>
)}
/>
)}
<div ref={lazyLogWrapperRef} className="size-full">
{isLoading ? (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
) : (
<EnhancedScrollFollow
startFollowing={!isLoading}
onCustomScroll={handleScroll}
render={({ follow, onScroll }) => (
<>
{follow && !logSettings.disableStreaming && (
<div className="absolute right-1 top-3">
<Tooltip>
<TooltipTrigger>
<MdCircle className="mr-2 size-2 animate-pulse cursor-default text-selected shadow-selected drop-shadow-md" />
</TooltipTrigger>
<TooltipContent>{t("logs.tips")}</TooltipContent>
</Tooltip>
</div>
)}
<LazyLog
ref={lazyLogRef}
enableLineNumbers={false}
selectableLines
lineClassName="text-primary bg-background"
highlightLineClassName="bg-primary/20"
onRowClick={handleRowClick}
formatPart={formatPart}
text={logs.join("\n")}
follow={follow}
onScroll={onScroll}
loadingComponent={
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
}
loading={isLoading}
/>
</>
)}
/>
)}
</div>
</div>
</div>
)}
</div>
);
}

565
web/src/pages/Replay.tsx Normal file
View File

@ -0,0 +1,565 @@
import { useCallback, useEffect, useMemo, useState } from "react";
import { useNavigate } from "react-router-dom";
import { Trans, useTranslation } from "react-i18next";
import useSWR from "swr";
import axios from "axios";
import { toast } from "sonner";
import AutoUpdatingCameraImage from "@/components/camera/AutoUpdatingCameraImage";
import { Button, buttonVariants } from "@/components/ui/button";
import { Skeleton } from "@/components/ui/skeleton";
import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch";
import ActivityIndicator from "@/components/indicators/activity-indicator";
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { useCameraActivity } from "@/hooks/use-camera-activity";
import { cn } from "@/lib/utils";
import Heading from "@/components/ui/heading";
import { Toaster } from "@/components/ui/sonner";
import { CameraConfig, FrigateConfig } from "@/types/frigateConfig";
import { getIconForLabel } from "@/utils/iconUtil";
import { getTranslatedLabel } from "@/utils/i18n";
import { ObjectType } from "@/types/ws";
import WsMessageFeed from "@/components/ws/WsMessageFeed";
import { LuInfo } from "react-icons/lu";
import { MdReplay } from "react-icons/md";
type DebugReplayStatus = {
active: boolean;
replay_camera: string | null;
source_camera: string | null;
start_time: number | null;
end_time: number | null;
live_ready: boolean;
};
type DebugOptions = {
bbox: boolean;
timestamp: boolean;
zones: boolean;
mask: boolean;
motion: boolean;
regions: boolean;
paths: boolean;
};
const DEFAULT_OPTIONS: DebugOptions = {
bbox: true,
timestamp: false,
zones: false,
mask: false,
motion: true,
regions: false,
paths: false,
};
const DEBUG_OPTION_KEYS: (keyof DebugOptions)[] = [
"bbox",
"timestamp",
"zones",
"mask",
"motion",
"regions",
"paths",
];
const DEBUG_OPTION_I18N_KEY: Record<keyof DebugOptions, string> = {
bbox: "boundingBoxes",
timestamp: "timestamp",
zones: "zones",
mask: "mask",
motion: "motion",
regions: "regions",
paths: "paths",
};
const REPLAY_INIT_SKELETON_TIMEOUT_MS = 8000;
export default function Replay() {
const { t } = useTranslation(["views/replay", "views/settings", "common"]);
const navigate = useNavigate();
const {
data: status,
mutate: refreshStatus,
isLoading,
} = useSWR<DebugReplayStatus>("debug_replay/status", {
refreshInterval: 1000,
});
const [isInitializing, setIsInitializing] = useState(true);
// Refresh status immediately on mount to avoid showing "no session" briefly
useEffect(() => {
const initializeStatus = async () => {
await refreshStatus();
setIsInitializing(false);
};
initializeStatus();
}, [refreshStatus]);
useEffect(() => {
if (status?.live_ready) {
setShowReplayInitSkeleton(false);
}
}, [status?.live_ready]);
const [options, setOptions] = useState<DebugOptions>(DEFAULT_OPTIONS);
const [isStopping, setIsStopping] = useState(false);
const searchParams = useMemo(() => {
const params = new URLSearchParams();
for (const key of DEBUG_OPTION_KEYS) {
params.set(key, options[key] ? "1" : "0");
}
return params;
}, [options]);
const handleSetOption = useCallback(
(key: keyof DebugOptions, value: boolean) => {
setOptions((prev) => ({ ...prev, [key]: value }));
},
[],
);
const handleStop = useCallback(() => {
setIsStopping(true);
axios
.post("debug_replay/stop")
.then(() => {
toast.success(t("dialog.toast.stopped"), {
position: "top-center",
});
refreshStatus();
navigate("/review");
})
.catch((error) => {
const errorMessage =
error.response?.data?.message ||
error.response?.data?.detail ||
"Unknown error";
toast.error(t("dialog.toast.stopError", { error: errorMessage }), {
position: "top-center",
});
})
.finally(() => {
setIsStopping(false);
});
}, [navigate, refreshStatus, t]);
// Camera activity for the replay camera
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
const replayCameraName = status?.replay_camera ?? "";
const replayCameraConfig = replayCameraName
? config?.cameras?.[replayCameraName]
: undefined;
const { objects } = useCameraActivity(replayCameraConfig);
const [showReplayInitSkeleton, setShowReplayInitSkeleton] = useState(false);
useEffect(() => {
if (!status?.active || !status.replay_camera) {
setShowReplayInitSkeleton(false);
return;
}
setShowReplayInitSkeleton(true);
const timeout = window.setTimeout(() => {
setShowReplayInitSkeleton(false);
}, REPLAY_INIT_SKELETON_TIMEOUT_MS);
return () => {
window.clearTimeout(timeout);
};
}, [status?.active, status?.replay_camera]);
useEffect(() => {
if (status?.live_ready) {
setShowReplayInitSkeleton(false);
}
}, [status?.live_ready]);
// Format time range for display
const timeRangeDisplay = useMemo(() => {
if (!status?.start_time || !status?.end_time) return "";
const start = new Date(status.start_time * 1000).toLocaleString();
const end = new Date(status.end_time * 1000).toLocaleString();
return `${start}${end}`;
}, [status]);
// Show loading state
if (isInitializing || (isLoading && !status?.active)) {
return (
<div className="flex size-full items-center justify-center">
<ActivityIndicator />
</div>
);
}
// No active session
if (!status?.active) {
return (
<div className="flex size-full flex-col items-center justify-center gap-4 p-8">
<MdReplay className="size-12" />
<Heading as="h2" className="text-center">
{t("page.noSession")}
</Heading>
<p className="max-w-md text-center text-muted-foreground">
{t("page.noSessionDesc")}
</p>
<Button variant="default" onClick={() => navigate("/review")}>
{t("page.goToRecordings")}
</Button>
</div>
);
}
return (
<div className="flex size-full flex-col overflow-hidden">
<Toaster position="top-center" closeButton={true} />
{/* Top bar */}
<div className="flex items-center justify-between p-2 md:p-4">
<div className="flex flex-col gap-1">
<div className="flex items-center gap-2">
<Heading as="h3">{t("title")}</Heading>
</div>
<div className="flex flex-wrap items-center gap-2 text-sm text-muted-foreground">
<span>
{t("page.sourceCamera")}: <strong>{status.source_camera}</strong>
</span>
{timeRangeDisplay && (
<>
<span className="hidden md:inline"></span>
<span className="hidden md:inline">{timeRangeDisplay}</span>
</>
)}
</div>
</div>
<AlertDialog>
<AlertDialogTrigger asChild>
<Button
variant="destructive"
size="sm"
className="flex items-center gap-2 text-white"
disabled={isStopping}
>
{isStopping && <ActivityIndicator className="size-4" />}
{t("page.stopReplay")}
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t("page.confirmStop.title")}</AlertDialogTitle>
<AlertDialogDescription>
{t("page.confirmStop.description")}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>
{t("page.confirmStop.cancel")}
</AlertDialogCancel>
<AlertDialogAction
onClick={handleStop}
className={cn(
buttonVariants({ variant: "destructive" }),
"text-white",
)}
>
{t("page.confirmStop.confirm")}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
{/* Main content */}
<div className="mt-1 flex flex-1 flex-col overflow-hidden pb-2 md:flex-row">
{/* Camera feed */}
<div className="flex max-h-[40%] px-2 md:h-dvh md:max-h-full md:w-7/12 md:grow md:px-4">
{isStopping ? (
<div className="flex size-full items-center justify-center rounded-lg bg-background_alt">
<div className="flex flex-col items-center justify-center gap-2">
<ActivityIndicator className="size-8" />
<div className="text-secondary-foreground">
{t("page.stoppingReplay")}
</div>
</div>
</div>
) : (
status.replay_camera && (
<div className="relative size-full min-h-10">
<AutoUpdatingCameraImage
className="size-full"
cameraClasses="relative w-full h-full flex flex-col justify-start"
searchParams={searchParams}
camera={status.replay_camera}
showFps={false}
/>
{showReplayInitSkeleton && (
<div className="pointer-events-none absolute inset-0 z-10 size-full rounded-lg bg-background">
<Skeleton className="size-full rounded-lg" />
<div className="absolute left-1/2 top-1/2 flex -translate-x-1/2 -translate-y-1/2 flex-col items-center justify-center gap-2">
<ActivityIndicator className="size-8" />
<div className="text-secondary-foreground">
{t("page.initializingReplay")}
</div>
</div>
</div>
)}
</div>
)
)}
</div>
{/* Side panel */}
<div className="scrollbar-container order-last mb-2 mt-2 flex h-full w-full flex-col overflow-y-auto rounded-lg border-[1px] border-secondary-foreground bg-background_alt p-2 md:order-none md:mb-0 md:mr-2 md:mt-0 md:w-4/12">
<Heading as="h4" className="mb-2">
{t("title")}
</Heading>
<div className="mb-5 space-y-3 text-sm text-muted-foreground">
<p>{t("description")}</p>
</div>
<Tabs defaultValue="debug" className="flex h-full w-full flex-col">
<TabsList className="grid w-full grid-cols-3">
<TabsTrigger value="debug">
{t("debug.debugging", { ns: "views/settings" })}
</TabsTrigger>
<TabsTrigger value="objects">{t("page.objects")}</TabsTrigger>
<TabsTrigger value="messages">
{t("websocket_messages")}
</TabsTrigger>
</TabsList>
<TabsContent value="debug" className="mt-2">
<div className="mt-2 space-y-6">
<div className="my-2.5 flex flex-col gap-2.5">
{DEBUG_OPTION_KEYS.map((key) => {
const i18nKey = DEBUG_OPTION_I18N_KEY[key];
return (
<div
key={key}
className="flex w-full flex-row items-center justify-between"
>
<div className="mb-1 flex flex-col">
<div className="flex items-center gap-2">
<Label
className="mb-0 cursor-pointer text-primary smart-capitalize"
htmlFor={`debug-${key}`}
>
{t(`debug.${i18nKey}.title`, {
ns: "views/settings",
})}
</Label>
{(key === "bbox" ||
key === "motion" ||
key === "regions" ||
key === "paths") && (
<Popover>
<PopoverTrigger asChild>
<div className="cursor-pointer p-0">
<LuInfo className="size-4" />
<span className="sr-only">
{t("button.info", { ns: "common" })}
</span>
</div>
</PopoverTrigger>
<PopoverContent className="w-80 text-sm">
{key === "bbox" ? (
<>
<p className="mb-2">
<strong>
{t(
"debug.boundingBoxes.colors.label",
{
ns: "views/settings",
},
)}
</strong>
</p>
<ul className="list-disc space-y-1 pl-5">
<Trans ns="views/settings">
debug.boundingBoxes.colors.info
</Trans>
</ul>
</>
) : (
<Trans ns="views/settings">
{`debug.${i18nKey}.tips`}
</Trans>
)}
</PopoverContent>
</Popover>
)}
</div>
<div className="mt-1 text-xs text-muted-foreground">
{t(`debug.${i18nKey}.desc`, {
ns: "views/settings",
})}
</div>
</div>
<Switch
id={`debug-${key}`}
className="ml-1"
checked={options[key]}
onCheckedChange={(checked) =>
handleSetOption(key, checked)
}
/>
</div>
);
})}
</div>
</div>
</TabsContent>
<TabsContent value="objects" className="mt-2">
<ObjectList
cameraConfig={replayCameraConfig}
objects={objects}
config={config}
/>
</TabsContent>
<TabsContent
value="messages"
className="mt-2 flex min-h-0 flex-1 flex-col"
>
<div className="flex h-full flex-col overflow-hidden rounded-md border border-secondary">
<WsMessageFeed
maxSize={200}
lockedCamera={status.replay_camera ?? undefined}
showCameraBadge={false}
/>
</div>
</TabsContent>
</Tabs>
</div>
</div>
</div>
);
}
type ObjectListProps = {
cameraConfig?: CameraConfig;
objects?: ObjectType[];
config?: FrigateConfig;
};
function ObjectList({ cameraConfig, objects, config }: ObjectListProps) {
const { t } = useTranslation(["views/settings"]);
const colormap = useMemo(() => {
if (!config) {
return;
}
return config.model?.colormap;
}, [config]);
const getColorForObjectName = useCallback(
(objectName: string) => {
return colormap && colormap[objectName]
? `rgb(${colormap[objectName][2]}, ${colormap[objectName][1]}, ${colormap[objectName][0]})`
: "rgb(128, 128, 128)";
},
[colormap],
);
if (!objects || objects.length === 0) {
return (
<div className="p-3 text-center text-sm text-muted-foreground">
{t("debug.noObjects", { ns: "views/settings" })}
</div>
);
}
return (
<div className="flex w-full flex-col gap-2">
{objects.map((obj: ObjectType) => {
return (
<div
key={obj.id}
className="flex flex-col rounded-lg bg-secondary/30 p-2"
>
<div className="flex flex-row items-center gap-3 pb-1">
<div
className="rounded-lg p-2"
style={{
backgroundColor: obj.stationary
? "rgb(110,110,110)"
: getColorForObjectName(obj.label),
}}
>
{getIconForLabel(obj.label, "size-4 text-white")}
</div>
<div className="text-sm font-medium">
{getTranslatedLabel(obj.label)}
</div>
</div>
<div className="flex flex-col gap-1 pl-1 text-xs text-primary-variant">
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.score", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">
{obj.score ? (obj.score * 100).toFixed(1) : "-"}%
</span>
</div>
{obj.ratio && (
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.ratio", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">{obj.ratio.toFixed(2)}</span>
</div>
)}
{obj.area && cameraConfig && (
<div className="flex items-center justify-between">
<span>
{t("debug.objectShapeFilterDrawing.area", {
ns: "views/settings",
})}
:
</span>
<span className="text-primary">
{obj.area} px (
{(
(obj.area /
(cameraConfig.detect.width *
cameraConfig.detect.height)) *
100
).toFixed(2)}
%)
</span>
</div>
)}
</div>
</div>
);
})}
</div>
);
}

View File

@ -12,7 +12,7 @@ export type LogLine = {
content: string;
};
export const logTypes = ["frigate", "go2rtc", "nginx"] as const;
export const logTypes = ["frigate", "go2rtc", "nginx", "websocket"] as const;
export type LogType = (typeof logTypes)[number];
export type LogSettingsType = {

View File

@ -148,3 +148,15 @@ export function detectCameraAudioFeatures(
audioOutput: !!audioOutput,
};
}
const REPLAY_CAMERA_PREFIX = "_replay_";
/**
* Check if a camera name is a debug replay camera.
*
* @param name - The camera name to check
* @returns true if the camera is a replay camera
*/
export function isReplayCamera(name: string): boolean {
return name.startsWith(REPLAY_CAMERA_PREFIX);
}

53
web/src/utils/wsUtil.ts Normal file
View File

@ -0,0 +1,53 @@
import { WsFeedMessage } from "@/api/ws";
const EVENT_TOPICS = new Set([
"events",
"reviews",
"tracked_object_update",
"triggers",
]);
const SYSTEM_TOPICS = new Set([
"stats",
"model_state",
"job_state",
"embeddings_reindex_progress",
"audio_transcription_state",
"birdseye_layout",
]);
export function extractCameraName(message: WsFeedMessage): string | null {
// Try extracting from topic pattern: {camera}/motion, {camera}/audio/rms, etc.
const topicParts = message.topic.split("/");
if (
topicParts.length >= 2 &&
!EVENT_TOPICS.has(message.topic) &&
!SYSTEM_TOPICS.has(message.topic) &&
message.topic !== "camera_activity" &&
message.topic !== "audio_detections" &&
message.topic !== "restart" &&
message.topic !== "notification_test"
) {
return topicParts[0];
}
// Try extracting from payload
try {
const data =
typeof message.payload === "string"
? JSON.parse(message.payload)
: message.payload;
if (typeof data === "object" && data !== null) {
if ("camera" in data) return data.camera as string;
if ("after" in data && data.after?.camera)
return data.after.camera as string;
if ("before" in data && data.before?.camera)
return data.before.camera as string;
}
} catch {
// ignore parse errors
}
return null;
}

View File

@ -1,6 +1,8 @@
import ReviewCard from "@/components/card/ReviewCard";
import ReviewFilterGroup from "@/components/filter/ReviewFilterGroup";
import DebugReplayDialog from "@/components/overlay/DebugReplayDialog";
import ExportDialog from "@/components/overlay/ExportDialog";
import ActionsDropdown from "@/components/overlay/ActionsDropdown";
import PreviewPlayer, {
PreviewController,
} from "@/components/player/PreviewPlayer";
@ -199,6 +201,11 @@ export function RecordingView({
const [exportRange, setExportRange] = useState<TimeRange>();
const [showExportPreview, setShowExportPreview] = useState(false);
// debug replay
const [debugReplayMode, setDebugReplayMode] = useState<ExportMode>("none");
const [debugReplayRange, setDebugReplayRange] = useState<TimeRange>();
// move to next clip
const onClipEnded = useCallback(() => {
@ -269,7 +276,7 @@ export function RecordingView({
);
useEffect(() => {
if (scrubbing || exportRange) {
if (scrubbing || exportRange || debugReplayRange) {
if (
currentTime > currentTimeRange.before + 60 ||
currentTime < currentTimeRange.after - 60
@ -591,6 +598,23 @@ export function RecordingView({
selected={mainCamera}
onSelectCamera={onSelectCamera}
/>
{isDesktop && (
<DebugReplayDialog
camera={mainCamera}
currentTime={currentTime}
latestTime={timeRange.before}
mode={debugReplayMode}
range={debugReplayRange}
setRange={(range: TimeRange | undefined) => {
setDebugReplayRange(range);
if (range != undefined) {
mainControllerRef.current?.pause();
}
}}
setMode={setDebugReplayMode}
/>
)}
{isDesktop && (
<ExportDialog
camera={mainCamera}
@ -639,6 +663,28 @@ export function RecordingView({
setMotionOnly={() => {}}
/>
)}
{isDesktop && (
<ActionsDropdown
onDebugReplayClick={() => {
const now = new Date(timeRange.before * 1000);
now.setHours(now.getHours() - 1);
setDebugReplayRange({
after: now.getTime() / 1000,
before: timeRange.before,
});
setDebugReplayMode("select");
}}
onExportClick={() => {
const now = new Date(timeRange.before * 1000);
now.setHours(now.getHours() - 1);
setExportRange({
before: timeRange.before,
after: now.getTime() / 1000,
});
setExportMode("select");
}}
/>
)}
{isDesktop ? (
<ToggleGroup
className="*:rounded-md *:px-3 *:py-4"
@ -688,6 +734,16 @@ export function RecordingView({
showExportPreview={showExportPreview}
allLabels={reviewFilterList.labels}
allZones={reviewFilterList.zones}
debugReplayMode={debugReplayMode}
debugReplayRange={debugReplayRange}
setDebugReplayMode={setDebugReplayMode}
setDebugReplayRange={(range: TimeRange | undefined) => {
setDebugReplayRange(range);
if (range != undefined) {
mainControllerRef.current?.pause();
}
}}
onUpdateFilter={updateFilter}
setRange={setExportRange}
setMode={setExportMode}
@ -758,7 +814,9 @@ export function RecordingView({
timeRange={currentTimeRange}
cameraPreviews={allPreviews ?? []}
startTimestamp={playbackStart}
hotKeys={exportMode != "select"}
hotKeys={
exportMode != "select" && debugReplayMode != "select"
}
fullscreen={fullscreen}
onTimestampUpdate={(timestamp) => {
setPlayerTime(timestamp);
@ -772,7 +830,11 @@ export function RecordingView({
onControllerReady={(controller) => {
mainControllerRef.current = controller;
}}
isScrubbing={scrubbing || exportMode == "timeline"}
isScrubbing={
scrubbing ||
exportMode == "timeline" ||
debugReplayMode == "timeline"
}
supportsFullscreen={supportsFullScreen}
setFullResolution={setFullResolution}
toggleFullscreen={toggleFullscreen}
@ -840,18 +902,29 @@ export function RecordingView({
contentRef={contentRef}
mainCamera={mainCamera}
timelineType={
(exportRange == undefined ? timelineType : "timeline") ??
"timeline"
(exportRange == undefined && debugReplayRange == undefined
? timelineType
: "timeline") ?? "timeline"
}
timeRange={timeRange}
mainCameraReviewItems={mainCameraReviewItems}
activeReviewItem={activeReviewItem}
currentTime={currentTime}
exportRange={exportMode == "timeline" ? exportRange : undefined}
exportRange={
exportMode == "timeline"
? exportRange
: debugReplayMode == "timeline"
? debugReplayRange
: undefined
}
setCurrentTime={setCurrentTime}
manuallySetCurrentTime={manuallySetCurrentTime}
setScrubbing={setScrubbing}
setExportRange={setExportRange}
setExportRange={
debugReplayMode == "timeline"
? setDebugReplayRange
: setExportRange
}
onAnalysisOpen={onAnalysisOpen}
isPlaying={mainControllerRef?.current?.isPlaying() ?? false}
/>

View File

@ -4,7 +4,7 @@ import { defineConfig } from "vite";
import react from "@vitejs/plugin-react-swc";
import monacoEditorPlugin from "vite-plugin-monaco-editor";
const proxyHost = process.env.PROXY_HOST || "localhost:5000";
const proxyHost = process.env.PROXY_HOST || "1ocalhost:5000";
// https://vitejs.dev/config/
export default defineConfig({