mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-05-09 15:05:26 +03:00
Compare commits
2 Commits
aa0b082184
...
1c95eb2c39
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1c95eb2c39 | ||
|
|
26744efb1e |
@ -1,8 +1,7 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional, Union
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from pydantic.json_schema import SkipJsonSchema
|
|
||||||
|
|
||||||
|
|
||||||
class Extension(str, Enum):
|
class Extension(str, Enum):
|
||||||
@ -48,15 +47,3 @@ class MediaMjpegFeedQueryParams(BaseModel):
|
|||||||
mask: Optional[int] = None
|
mask: Optional[int] = None
|
||||||
motion: Optional[int] = None
|
motion: Optional[int] = None
|
||||||
regions: Optional[int] = None
|
regions: Optional[int] = None
|
||||||
|
|
||||||
|
|
||||||
class MediaRecordingsSummaryQueryParams(BaseModel):
|
|
||||||
timezone: str = "utc"
|
|
||||||
cameras: Optional[str] = "all"
|
|
||||||
|
|
||||||
|
|
||||||
class MediaRecordingsAvailabilityQueryParams(BaseModel):
|
|
||||||
cameras: str = "all"
|
|
||||||
before: Union[float, SkipJsonSchema[None]] = None
|
|
||||||
after: Union[float, SkipJsonSchema[None]] = None
|
|
||||||
scale: int = 30
|
|
||||||
|
|||||||
21
frigate/api/defs/query/recordings_query_parameters.py
Normal file
21
frigate/api/defs/query/recordings_query_parameters.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from pydantic.json_schema import SkipJsonSchema
|
||||||
|
|
||||||
|
|
||||||
|
class MediaRecordingsSummaryQueryParams(BaseModel):
|
||||||
|
timezone: str = "utc"
|
||||||
|
cameras: Optional[str] = "all"
|
||||||
|
|
||||||
|
|
||||||
|
class MediaRecordingsAvailabilityQueryParams(BaseModel):
|
||||||
|
cameras: str = "all"
|
||||||
|
before: Union[float, SkipJsonSchema[None]] = None
|
||||||
|
after: Union[float, SkipJsonSchema[None]] = None
|
||||||
|
scale: int = 30
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingsDeleteQueryParams(BaseModel):
|
||||||
|
keep: Optional[str] = None
|
||||||
|
cameras: Optional[str] = "all"
|
||||||
@ -3,13 +3,14 @@ from enum import Enum
|
|||||||
|
|
||||||
class Tags(Enum):
|
class Tags(Enum):
|
||||||
app = "App"
|
app = "App"
|
||||||
|
auth = "Auth"
|
||||||
camera = "Camera"
|
camera = "Camera"
|
||||||
preview = "Preview"
|
events = "Events"
|
||||||
|
export = "Export"
|
||||||
|
classification = "Classification"
|
||||||
logs = "Logs"
|
logs = "Logs"
|
||||||
media = "Media"
|
media = "Media"
|
||||||
notifications = "Notifications"
|
notifications = "Notifications"
|
||||||
|
preview = "Preview"
|
||||||
|
recordings = "Recordings"
|
||||||
review = "Review"
|
review = "Review"
|
||||||
export = "Export"
|
|
||||||
events = "Events"
|
|
||||||
classification = "Classification"
|
|
||||||
auth = "Auth"
|
|
||||||
|
|||||||
@ -22,6 +22,7 @@ from frigate.api import (
|
|||||||
media,
|
media,
|
||||||
notification,
|
notification,
|
||||||
preview,
|
preview,
|
||||||
|
record,
|
||||||
review,
|
review,
|
||||||
)
|
)
|
||||||
from frigate.api.auth import get_jwt_secret, limiter, require_admin_by_default
|
from frigate.api.auth import get_jwt_secret, limiter, require_admin_by_default
|
||||||
@ -128,6 +129,7 @@ def create_fastapi_app(
|
|||||||
app.include_router(export.router)
|
app.include_router(export.router)
|
||||||
app.include_router(event.router)
|
app.include_router(event.router)
|
||||||
app.include_router(media.router)
|
app.include_router(media.router)
|
||||||
|
app.include_router(record.router)
|
||||||
# App Properties
|
# App Properties
|
||||||
app.frigate_config = frigate_config
|
app.frigate_config = frigate_config
|
||||||
app.embeddings = embeddings
|
app.embeddings = embeddings
|
||||||
|
|||||||
@ -8,9 +8,8 @@ import os
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import time
|
import time
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from functools import reduce
|
|
||||||
from pathlib import Path as FilePath
|
from pathlib import Path as FilePath
|
||||||
from typing import Any, List
|
from typing import Any
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
@ -19,12 +18,11 @@ import pytz
|
|||||||
from fastapi import APIRouter, Depends, Path, Query, Request, Response
|
from fastapi import APIRouter, Depends, Path, Query, Request, Response
|
||||||
from fastapi.responses import FileResponse, JSONResponse, StreamingResponse
|
from fastapi.responses import FileResponse, JSONResponse, StreamingResponse
|
||||||
from pathvalidate import sanitize_filename
|
from pathvalidate import sanitize_filename
|
||||||
from peewee import DoesNotExist, fn, operator
|
from peewee import DoesNotExist, fn
|
||||||
from tzlocal import get_localzone_name
|
from tzlocal import get_localzone_name
|
||||||
|
|
||||||
from frigate.api.auth import (
|
from frigate.api.auth import (
|
||||||
allow_any_authenticated,
|
allow_any_authenticated,
|
||||||
get_allowed_cameras_for_filter,
|
|
||||||
require_camera_access,
|
require_camera_access,
|
||||||
)
|
)
|
||||||
from frigate.api.defs.query.media_query_parameters import (
|
from frigate.api.defs.query.media_query_parameters import (
|
||||||
@ -32,8 +30,6 @@ from frigate.api.defs.query.media_query_parameters import (
|
|||||||
MediaEventsSnapshotQueryParams,
|
MediaEventsSnapshotQueryParams,
|
||||||
MediaLatestFrameQueryParams,
|
MediaLatestFrameQueryParams,
|
||||||
MediaMjpegFeedQueryParams,
|
MediaMjpegFeedQueryParams,
|
||||||
MediaRecordingsAvailabilityQueryParams,
|
|
||||||
MediaRecordingsSummaryQueryParams,
|
|
||||||
)
|
)
|
||||||
from frigate.api.defs.tags import Tags
|
from frigate.api.defs.tags import Tags
|
||||||
from frigate.camera.state import CameraState
|
from frigate.camera.state import CameraState
|
||||||
@ -44,13 +40,11 @@ from frigate.const import (
|
|||||||
INSTALL_DIR,
|
INSTALL_DIR,
|
||||||
MAX_SEGMENT_DURATION,
|
MAX_SEGMENT_DURATION,
|
||||||
PREVIEW_FRAME_TYPE,
|
PREVIEW_FRAME_TYPE,
|
||||||
RECORD_DIR,
|
|
||||||
)
|
)
|
||||||
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
||||||
from frigate.track.object_processing import TrackedObjectProcessor
|
from frigate.track.object_processing import TrackedObjectProcessor
|
||||||
from frigate.util.file import get_event_thumbnail_bytes
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.image import get_image_from_recording
|
from frigate.util.image import get_image_from_recording
|
||||||
from frigate.util.time import get_dst_transitions
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -397,333 +391,6 @@ async def submit_recording_snapshot_to_plus(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/recordings/storage", dependencies=[Depends(allow_any_authenticated())])
|
|
||||||
def get_recordings_storage_usage(request: Request):
|
|
||||||
recording_stats = request.app.stats_emitter.get_latest_stats()["service"][
|
|
||||||
"storage"
|
|
||||||
][RECORD_DIR]
|
|
||||||
|
|
||||||
if not recording_stats:
|
|
||||||
return JSONResponse({})
|
|
||||||
|
|
||||||
total_mb = recording_stats["total"]
|
|
||||||
|
|
||||||
camera_usages: dict[str, dict] = (
|
|
||||||
request.app.storage_maintainer.calculate_camera_usages()
|
|
||||||
)
|
|
||||||
|
|
||||||
for camera_name in camera_usages.keys():
|
|
||||||
if camera_usages.get(camera_name, {}).get("usage"):
|
|
||||||
camera_usages[camera_name]["usage_percent"] = (
|
|
||||||
camera_usages.get(camera_name, {}).get("usage", 0) / total_mb
|
|
||||||
) * 100
|
|
||||||
|
|
||||||
return JSONResponse(content=camera_usages)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/recordings/summary", dependencies=[Depends(allow_any_authenticated())])
|
|
||||||
def all_recordings_summary(
|
|
||||||
request: Request,
|
|
||||||
params: MediaRecordingsSummaryQueryParams = Depends(),
|
|
||||||
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
|
||||||
):
|
|
||||||
"""Returns true/false by day indicating if recordings exist"""
|
|
||||||
|
|
||||||
cameras = params.cameras
|
|
||||||
if cameras != "all":
|
|
||||||
requested = set(unquote(cameras).split(","))
|
|
||||||
filtered = requested.intersection(allowed_cameras)
|
|
||||||
if not filtered:
|
|
||||||
return JSONResponse(content={})
|
|
||||||
camera_list = list(filtered)
|
|
||||||
else:
|
|
||||||
camera_list = allowed_cameras
|
|
||||||
|
|
||||||
time_range_query = (
|
|
||||||
Recordings.select(
|
|
||||||
fn.MIN(Recordings.start_time).alias("min_time"),
|
|
||||||
fn.MAX(Recordings.start_time).alias("max_time"),
|
|
||||||
)
|
|
||||||
.where(Recordings.camera << camera_list)
|
|
||||||
.dicts()
|
|
||||||
.get()
|
|
||||||
)
|
|
||||||
|
|
||||||
min_time = time_range_query.get("min_time")
|
|
||||||
max_time = time_range_query.get("max_time")
|
|
||||||
|
|
||||||
if min_time is None or max_time is None:
|
|
||||||
return JSONResponse(content={})
|
|
||||||
|
|
||||||
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
|
|
||||||
|
|
||||||
days: dict[str, bool] = {}
|
|
||||||
|
|
||||||
for period_start, period_end, period_offset in dst_periods:
|
|
||||||
hours_offset = int(period_offset / 60 / 60)
|
|
||||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
|
||||||
period_hour_modifier = f"{hours_offset} hour"
|
|
||||||
period_minute_modifier = f"{minutes_offset} minute"
|
|
||||||
|
|
||||||
period_query = (
|
|
||||||
Recordings.select(
|
|
||||||
fn.strftime(
|
|
||||||
"%Y-%m-%d",
|
|
||||||
fn.datetime(
|
|
||||||
Recordings.start_time,
|
|
||||||
"unixepoch",
|
|
||||||
period_hour_modifier,
|
|
||||||
period_minute_modifier,
|
|
||||||
),
|
|
||||||
).alias("day")
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
(Recordings.camera << camera_list)
|
|
||||||
& (Recordings.end_time >= period_start)
|
|
||||||
& (Recordings.start_time <= period_end)
|
|
||||||
)
|
|
||||||
.group_by(
|
|
||||||
fn.strftime(
|
|
||||||
"%Y-%m-%d",
|
|
||||||
fn.datetime(
|
|
||||||
Recordings.start_time,
|
|
||||||
"unixepoch",
|
|
||||||
period_hour_modifier,
|
|
||||||
period_minute_modifier,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.order_by(Recordings.start_time.desc())
|
|
||||||
.namedtuples()
|
|
||||||
)
|
|
||||||
|
|
||||||
for g in period_query:
|
|
||||||
days[g.day] = True
|
|
||||||
|
|
||||||
return JSONResponse(content=dict(sorted(days.items())))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/{camera_name}/recordings/summary", dependencies=[Depends(require_camera_access)]
|
|
||||||
)
|
|
||||||
async def recordings_summary(camera_name: str, timezone: str = "utc"):
|
|
||||||
"""Returns hourly summary for recordings of given camera"""
|
|
||||||
|
|
||||||
time_range_query = (
|
|
||||||
Recordings.select(
|
|
||||||
fn.MIN(Recordings.start_time).alias("min_time"),
|
|
||||||
fn.MAX(Recordings.start_time).alias("max_time"),
|
|
||||||
)
|
|
||||||
.where(Recordings.camera == camera_name)
|
|
||||||
.dicts()
|
|
||||||
.get()
|
|
||||||
)
|
|
||||||
|
|
||||||
min_time = time_range_query.get("min_time")
|
|
||||||
max_time = time_range_query.get("max_time")
|
|
||||||
|
|
||||||
days: dict[str, dict] = {}
|
|
||||||
|
|
||||||
if min_time is None or max_time is None:
|
|
||||||
return JSONResponse(content=list(days.values()))
|
|
||||||
|
|
||||||
dst_periods = get_dst_transitions(timezone, min_time, max_time)
|
|
||||||
|
|
||||||
for period_start, period_end, period_offset in dst_periods:
|
|
||||||
hours_offset = int(period_offset / 60 / 60)
|
|
||||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
|
||||||
period_hour_modifier = f"{hours_offset} hour"
|
|
||||||
period_minute_modifier = f"{minutes_offset} minute"
|
|
||||||
|
|
||||||
recording_groups = (
|
|
||||||
Recordings.select(
|
|
||||||
fn.strftime(
|
|
||||||
"%Y-%m-%d %H",
|
|
||||||
fn.datetime(
|
|
||||||
Recordings.start_time,
|
|
||||||
"unixepoch",
|
|
||||||
period_hour_modifier,
|
|
||||||
period_minute_modifier,
|
|
||||||
),
|
|
||||||
).alias("hour"),
|
|
||||||
fn.SUM(Recordings.duration).alias("duration"),
|
|
||||||
fn.SUM(Recordings.motion).alias("motion"),
|
|
||||||
fn.SUM(Recordings.objects).alias("objects"),
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
(Recordings.camera == camera_name)
|
|
||||||
& (Recordings.end_time >= period_start)
|
|
||||||
& (Recordings.start_time <= period_end)
|
|
||||||
)
|
|
||||||
.group_by((Recordings.start_time + period_offset).cast("int") / 3600)
|
|
||||||
.order_by(Recordings.start_time.desc())
|
|
||||||
.namedtuples()
|
|
||||||
)
|
|
||||||
|
|
||||||
event_groups = (
|
|
||||||
Event.select(
|
|
||||||
fn.strftime(
|
|
||||||
"%Y-%m-%d %H",
|
|
||||||
fn.datetime(
|
|
||||||
Event.start_time,
|
|
||||||
"unixepoch",
|
|
||||||
period_hour_modifier,
|
|
||||||
period_minute_modifier,
|
|
||||||
),
|
|
||||||
).alias("hour"),
|
|
||||||
fn.COUNT(Event.id).alias("count"),
|
|
||||||
)
|
|
||||||
.where(Event.camera == camera_name, Event.has_clip)
|
|
||||||
.where(
|
|
||||||
(Event.start_time >= period_start) & (Event.start_time <= period_end)
|
|
||||||
)
|
|
||||||
.group_by((Event.start_time + period_offset).cast("int") / 3600)
|
|
||||||
.namedtuples()
|
|
||||||
)
|
|
||||||
|
|
||||||
event_map = {g.hour: g.count for g in event_groups}
|
|
||||||
|
|
||||||
for recording_group in recording_groups:
|
|
||||||
parts = recording_group.hour.split()
|
|
||||||
hour = parts[1]
|
|
||||||
day = parts[0]
|
|
||||||
events_count = event_map.get(recording_group.hour, 0)
|
|
||||||
hour_data = {
|
|
||||||
"hour": hour,
|
|
||||||
"events": events_count,
|
|
||||||
"motion": recording_group.motion,
|
|
||||||
"objects": recording_group.objects,
|
|
||||||
"duration": round(recording_group.duration),
|
|
||||||
}
|
|
||||||
if day in days:
|
|
||||||
# merge counts if already present (edge-case at DST boundary)
|
|
||||||
days[day]["events"] += events_count or 0
|
|
||||||
days[day]["hours"].append(hour_data)
|
|
||||||
else:
|
|
||||||
days[day] = {
|
|
||||||
"events": events_count or 0,
|
|
||||||
"hours": [hour_data],
|
|
||||||
"day": day,
|
|
||||||
}
|
|
||||||
|
|
||||||
return JSONResponse(content=list(days.values()))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/{camera_name}/recordings", dependencies=[Depends(require_camera_access)])
|
|
||||||
async def recordings(
|
|
||||||
camera_name: str,
|
|
||||||
after: float = (datetime.now() - timedelta(hours=1)).timestamp(),
|
|
||||||
before: float = datetime.now().timestamp(),
|
|
||||||
):
|
|
||||||
"""Return specific camera recordings between the given 'after'/'end' times. If not provided the last hour will be used"""
|
|
||||||
recordings = (
|
|
||||||
Recordings.select(
|
|
||||||
Recordings.id,
|
|
||||||
Recordings.start_time,
|
|
||||||
Recordings.end_time,
|
|
||||||
Recordings.segment_size,
|
|
||||||
Recordings.motion,
|
|
||||||
Recordings.objects,
|
|
||||||
Recordings.duration,
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
Recordings.camera == camera_name,
|
|
||||||
Recordings.end_time >= after,
|
|
||||||
Recordings.start_time <= before,
|
|
||||||
)
|
|
||||||
.order_by(Recordings.start_time)
|
|
||||||
.dicts()
|
|
||||||
.iterator()
|
|
||||||
)
|
|
||||||
|
|
||||||
return JSONResponse(content=list(recordings))
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/recordings/unavailable",
|
|
||||||
response_model=list[dict],
|
|
||||||
dependencies=[Depends(allow_any_authenticated())],
|
|
||||||
)
|
|
||||||
async def no_recordings(
|
|
||||||
request: Request,
|
|
||||||
params: MediaRecordingsAvailabilityQueryParams = Depends(),
|
|
||||||
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
|
||||||
):
|
|
||||||
"""Get time ranges with no recordings."""
|
|
||||||
cameras = params.cameras
|
|
||||||
if cameras != "all":
|
|
||||||
requested = set(unquote(cameras).split(","))
|
|
||||||
filtered = requested.intersection(allowed_cameras)
|
|
||||||
if not filtered:
|
|
||||||
return JSONResponse(content=[])
|
|
||||||
cameras = ",".join(filtered)
|
|
||||||
else:
|
|
||||||
cameras = allowed_cameras
|
|
||||||
|
|
||||||
before = params.before or datetime.datetime.now().timestamp()
|
|
||||||
after = (
|
|
||||||
params.after
|
|
||||||
or (datetime.datetime.now() - datetime.timedelta(hours=1)).timestamp()
|
|
||||||
)
|
|
||||||
scale = params.scale
|
|
||||||
|
|
||||||
clauses = [(Recordings.end_time >= after) & (Recordings.start_time <= before)]
|
|
||||||
if cameras != "all":
|
|
||||||
camera_list = cameras.split(",")
|
|
||||||
clauses.append((Recordings.camera << camera_list))
|
|
||||||
else:
|
|
||||||
camera_list = allowed_cameras
|
|
||||||
|
|
||||||
# Get recording start times
|
|
||||||
data: list[Recordings] = (
|
|
||||||
Recordings.select(Recordings.start_time, Recordings.end_time)
|
|
||||||
.where(reduce(operator.and_, clauses))
|
|
||||||
.order_by(Recordings.start_time.asc())
|
|
||||||
.dicts()
|
|
||||||
.iterator()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert recordings to list of (start, end) tuples
|
|
||||||
recordings = [(r["start_time"], r["end_time"]) for r in data]
|
|
||||||
|
|
||||||
# Iterate through time segments and check if each has any recording
|
|
||||||
no_recording_segments = []
|
|
||||||
current = after
|
|
||||||
current_gap_start = None
|
|
||||||
|
|
||||||
while current < before:
|
|
||||||
segment_end = min(current + scale, before)
|
|
||||||
|
|
||||||
# Check if this segment overlaps with any recording
|
|
||||||
has_recording = any(
|
|
||||||
rec_start < segment_end and rec_end > current
|
|
||||||
for rec_start, rec_end in recordings
|
|
||||||
)
|
|
||||||
|
|
||||||
if not has_recording:
|
|
||||||
# This segment has no recordings
|
|
||||||
if current_gap_start is None:
|
|
||||||
current_gap_start = current # Start a new gap
|
|
||||||
else:
|
|
||||||
# This segment has recordings
|
|
||||||
if current_gap_start is not None:
|
|
||||||
# End the current gap and append it
|
|
||||||
no_recording_segments.append(
|
|
||||||
{"start_time": int(current_gap_start), "end_time": int(current)}
|
|
||||||
)
|
|
||||||
current_gap_start = None
|
|
||||||
|
|
||||||
current = segment_end
|
|
||||||
|
|
||||||
# Append the last gap if it exists
|
|
||||||
if current_gap_start is not None:
|
|
||||||
no_recording_segments.append(
|
|
||||||
{"start_time": int(current_gap_start), "end_time": int(before)}
|
|
||||||
)
|
|
||||||
|
|
||||||
return JSONResponse(content=no_recording_segments)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/{camera_name}/start/{start_ts}/end/{end_ts}/clip.mp4",
|
"/{camera_name}/start/{start_ts}/end/{end_ts}/clip.mp4",
|
||||||
dependencies=[Depends(require_camera_access)],
|
dependencies=[Depends(require_camera_access)],
|
||||||
|
|||||||
479
frigate/api/record.py
Normal file
479
frigate/api/record.py
Normal file
@ -0,0 +1,479 @@
|
|||||||
|
"""Recording APIs."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from functools import reduce
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
from fastapi import Path as PathParam
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
from peewee import fn, operator
|
||||||
|
|
||||||
|
from frigate.api.auth import (
|
||||||
|
allow_any_authenticated,
|
||||||
|
get_allowed_cameras_for_filter,
|
||||||
|
require_camera_access,
|
||||||
|
require_role,
|
||||||
|
)
|
||||||
|
from frigate.api.defs.query.recordings_query_parameters import (
|
||||||
|
MediaRecordingsAvailabilityQueryParams,
|
||||||
|
MediaRecordingsSummaryQueryParams,
|
||||||
|
RecordingsDeleteQueryParams,
|
||||||
|
)
|
||||||
|
from frigate.api.defs.response.generic_response import GenericResponse
|
||||||
|
from frigate.api.defs.tags import Tags
|
||||||
|
from frigate.const import RECORD_DIR
|
||||||
|
from frigate.models import Event, Recordings
|
||||||
|
from frigate.util.time import get_dst_transitions
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(tags=[Tags.recordings])
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/recordings/storage", dependencies=[Depends(allow_any_authenticated())])
|
||||||
|
def get_recordings_storage_usage(request: Request):
|
||||||
|
recording_stats = request.app.stats_emitter.get_latest_stats()["service"][
|
||||||
|
"storage"
|
||||||
|
][RECORD_DIR]
|
||||||
|
|
||||||
|
if not recording_stats:
|
||||||
|
return JSONResponse({})
|
||||||
|
|
||||||
|
total_mb = recording_stats["total"]
|
||||||
|
|
||||||
|
camera_usages: dict[str, dict] = (
|
||||||
|
request.app.storage_maintainer.calculate_camera_usages()
|
||||||
|
)
|
||||||
|
|
||||||
|
for camera_name in camera_usages.keys():
|
||||||
|
if camera_usages.get(camera_name, {}).get("usage"):
|
||||||
|
camera_usages[camera_name]["usage_percent"] = (
|
||||||
|
camera_usages.get(camera_name, {}).get("usage", 0) / total_mb
|
||||||
|
) * 100
|
||||||
|
|
||||||
|
return JSONResponse(content=camera_usages)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/recordings/summary", dependencies=[Depends(allow_any_authenticated())])
|
||||||
|
def all_recordings_summary(
|
||||||
|
request: Request,
|
||||||
|
params: MediaRecordingsSummaryQueryParams = Depends(),
|
||||||
|
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||||
|
):
|
||||||
|
"""Returns true/false by day indicating if recordings exist"""
|
||||||
|
|
||||||
|
cameras = params.cameras
|
||||||
|
if cameras != "all":
|
||||||
|
requested = set(unquote(cameras).split(","))
|
||||||
|
filtered = requested.intersection(allowed_cameras)
|
||||||
|
if not filtered:
|
||||||
|
return JSONResponse(content={})
|
||||||
|
camera_list = list(filtered)
|
||||||
|
else:
|
||||||
|
camera_list = allowed_cameras
|
||||||
|
|
||||||
|
time_range_query = (
|
||||||
|
Recordings.select(
|
||||||
|
fn.MIN(Recordings.start_time).alias("min_time"),
|
||||||
|
fn.MAX(Recordings.start_time).alias("max_time"),
|
||||||
|
)
|
||||||
|
.where(Recordings.camera << camera_list)
|
||||||
|
.dicts()
|
||||||
|
.get()
|
||||||
|
)
|
||||||
|
|
||||||
|
min_time = time_range_query.get("min_time")
|
||||||
|
max_time = time_range_query.get("max_time")
|
||||||
|
|
||||||
|
if min_time is None or max_time is None:
|
||||||
|
return JSONResponse(content={})
|
||||||
|
|
||||||
|
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
|
||||||
|
|
||||||
|
days: dict[str, bool] = {}
|
||||||
|
|
||||||
|
for period_start, period_end, period_offset in dst_periods:
|
||||||
|
hours_offset = int(period_offset / 60 / 60)
|
||||||
|
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||||
|
period_hour_modifier = f"{hours_offset} hour"
|
||||||
|
period_minute_modifier = f"{minutes_offset} minute"
|
||||||
|
|
||||||
|
period_query = (
|
||||||
|
Recordings.select(
|
||||||
|
fn.strftime(
|
||||||
|
"%Y-%m-%d",
|
||||||
|
fn.datetime(
|
||||||
|
Recordings.start_time,
|
||||||
|
"unixepoch",
|
||||||
|
period_hour_modifier,
|
||||||
|
period_minute_modifier,
|
||||||
|
),
|
||||||
|
).alias("day")
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
(Recordings.camera << camera_list)
|
||||||
|
& (Recordings.end_time >= period_start)
|
||||||
|
& (Recordings.start_time <= period_end)
|
||||||
|
)
|
||||||
|
.group_by(
|
||||||
|
fn.strftime(
|
||||||
|
"%Y-%m-%d",
|
||||||
|
fn.datetime(
|
||||||
|
Recordings.start_time,
|
||||||
|
"unixepoch",
|
||||||
|
period_hour_modifier,
|
||||||
|
period_minute_modifier,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(Recordings.start_time.desc())
|
||||||
|
.namedtuples()
|
||||||
|
)
|
||||||
|
|
||||||
|
for g in period_query:
|
||||||
|
days[g.day] = True
|
||||||
|
|
||||||
|
return JSONResponse(content=dict(sorted(days.items())))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/{camera_name}/recordings/summary", dependencies=[Depends(require_camera_access)]
|
||||||
|
)
|
||||||
|
async def recordings_summary(camera_name: str, timezone: str = "utc"):
|
||||||
|
"""Returns hourly summary for recordings of given camera"""
|
||||||
|
|
||||||
|
time_range_query = (
|
||||||
|
Recordings.select(
|
||||||
|
fn.MIN(Recordings.start_time).alias("min_time"),
|
||||||
|
fn.MAX(Recordings.start_time).alias("max_time"),
|
||||||
|
)
|
||||||
|
.where(Recordings.camera == camera_name)
|
||||||
|
.dicts()
|
||||||
|
.get()
|
||||||
|
)
|
||||||
|
|
||||||
|
min_time = time_range_query.get("min_time")
|
||||||
|
max_time = time_range_query.get("max_time")
|
||||||
|
|
||||||
|
days: dict[str, dict] = {}
|
||||||
|
|
||||||
|
if min_time is None or max_time is None:
|
||||||
|
return JSONResponse(content=list(days.values()))
|
||||||
|
|
||||||
|
dst_periods = get_dst_transitions(timezone, min_time, max_time)
|
||||||
|
|
||||||
|
for period_start, period_end, period_offset in dst_periods:
|
||||||
|
hours_offset = int(period_offset / 60 / 60)
|
||||||
|
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||||
|
period_hour_modifier = f"{hours_offset} hour"
|
||||||
|
period_minute_modifier = f"{minutes_offset} minute"
|
||||||
|
|
||||||
|
recording_groups = (
|
||||||
|
Recordings.select(
|
||||||
|
fn.strftime(
|
||||||
|
"%Y-%m-%d %H",
|
||||||
|
fn.datetime(
|
||||||
|
Recordings.start_time,
|
||||||
|
"unixepoch",
|
||||||
|
period_hour_modifier,
|
||||||
|
period_minute_modifier,
|
||||||
|
),
|
||||||
|
).alias("hour"),
|
||||||
|
fn.SUM(Recordings.duration).alias("duration"),
|
||||||
|
fn.SUM(Recordings.motion).alias("motion"),
|
||||||
|
fn.SUM(Recordings.objects).alias("objects"),
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
(Recordings.camera == camera_name)
|
||||||
|
& (Recordings.end_time >= period_start)
|
||||||
|
& (Recordings.start_time <= period_end)
|
||||||
|
)
|
||||||
|
.group_by((Recordings.start_time + period_offset).cast("int") / 3600)
|
||||||
|
.order_by(Recordings.start_time.desc())
|
||||||
|
.namedtuples()
|
||||||
|
)
|
||||||
|
|
||||||
|
event_groups = (
|
||||||
|
Event.select(
|
||||||
|
fn.strftime(
|
||||||
|
"%Y-%m-%d %H",
|
||||||
|
fn.datetime(
|
||||||
|
Event.start_time,
|
||||||
|
"unixepoch",
|
||||||
|
period_hour_modifier,
|
||||||
|
period_minute_modifier,
|
||||||
|
),
|
||||||
|
).alias("hour"),
|
||||||
|
fn.COUNT(Event.id).alias("count"),
|
||||||
|
)
|
||||||
|
.where(Event.camera == camera_name, Event.has_clip)
|
||||||
|
.where(
|
||||||
|
(Event.start_time >= period_start) & (Event.start_time <= period_end)
|
||||||
|
)
|
||||||
|
.group_by((Event.start_time + period_offset).cast("int") / 3600)
|
||||||
|
.namedtuples()
|
||||||
|
)
|
||||||
|
|
||||||
|
event_map = {g.hour: g.count for g in event_groups}
|
||||||
|
|
||||||
|
for recording_group in recording_groups:
|
||||||
|
parts = recording_group.hour.split()
|
||||||
|
hour = parts[1]
|
||||||
|
day = parts[0]
|
||||||
|
events_count = event_map.get(recording_group.hour, 0)
|
||||||
|
hour_data = {
|
||||||
|
"hour": hour,
|
||||||
|
"events": events_count,
|
||||||
|
"motion": recording_group.motion,
|
||||||
|
"objects": recording_group.objects,
|
||||||
|
"duration": round(recording_group.duration),
|
||||||
|
}
|
||||||
|
if day in days:
|
||||||
|
# merge counts if already present (edge-case at DST boundary)
|
||||||
|
days[day]["events"] += events_count or 0
|
||||||
|
days[day]["hours"].append(hour_data)
|
||||||
|
else:
|
||||||
|
days[day] = {
|
||||||
|
"events": events_count or 0,
|
||||||
|
"hours": [hour_data],
|
||||||
|
"day": day,
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSONResponse(content=list(days.values()))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/{camera_name}/recordings", dependencies=[Depends(require_camera_access)])
|
||||||
|
async def recordings(
|
||||||
|
camera_name: str,
|
||||||
|
after: float = (datetime.now() - timedelta(hours=1)).timestamp(),
|
||||||
|
before: float = datetime.now().timestamp(),
|
||||||
|
):
|
||||||
|
"""Return specific camera recordings between the given 'after'/'end' times. If not provided the last hour will be used"""
|
||||||
|
recordings = (
|
||||||
|
Recordings.select(
|
||||||
|
Recordings.id,
|
||||||
|
Recordings.start_time,
|
||||||
|
Recordings.end_time,
|
||||||
|
Recordings.segment_size,
|
||||||
|
Recordings.motion,
|
||||||
|
Recordings.objects,
|
||||||
|
Recordings.duration,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
Recordings.camera == camera_name,
|
||||||
|
Recordings.end_time >= after,
|
||||||
|
Recordings.start_time <= before,
|
||||||
|
)
|
||||||
|
.order_by(Recordings.start_time)
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(content=list(recordings))
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/recordings/unavailable",
|
||||||
|
response_model=list[dict],
|
||||||
|
dependencies=[Depends(allow_any_authenticated())],
|
||||||
|
)
|
||||||
|
async def no_recordings(
|
||||||
|
request: Request,
|
||||||
|
params: MediaRecordingsAvailabilityQueryParams = Depends(),
|
||||||
|
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||||
|
):
|
||||||
|
"""Get time ranges with no recordings."""
|
||||||
|
cameras = params.cameras
|
||||||
|
if cameras != "all":
|
||||||
|
requested = set(unquote(cameras).split(","))
|
||||||
|
filtered = requested.intersection(allowed_cameras)
|
||||||
|
if not filtered:
|
||||||
|
return JSONResponse(content=[])
|
||||||
|
cameras = ",".join(filtered)
|
||||||
|
else:
|
||||||
|
cameras = allowed_cameras
|
||||||
|
|
||||||
|
before = params.before or datetime.datetime.now().timestamp()
|
||||||
|
after = (
|
||||||
|
params.after
|
||||||
|
or (datetime.datetime.now() - datetime.timedelta(hours=1)).timestamp()
|
||||||
|
)
|
||||||
|
scale = params.scale
|
||||||
|
|
||||||
|
clauses = [(Recordings.end_time >= after) & (Recordings.start_time <= before)]
|
||||||
|
if cameras != "all":
|
||||||
|
camera_list = cameras.split(",")
|
||||||
|
clauses.append((Recordings.camera << camera_list))
|
||||||
|
else:
|
||||||
|
camera_list = allowed_cameras
|
||||||
|
|
||||||
|
# Get recording start times
|
||||||
|
data: list[Recordings] = (
|
||||||
|
Recordings.select(Recordings.start_time, Recordings.end_time)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.order_by(Recordings.start_time.asc())
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert recordings to list of (start, end) tuples
|
||||||
|
recordings = [(r["start_time"], r["end_time"]) for r in data]
|
||||||
|
|
||||||
|
# Iterate through time segments and check if each has any recording
|
||||||
|
no_recording_segments = []
|
||||||
|
current = after
|
||||||
|
current_gap_start = None
|
||||||
|
|
||||||
|
while current < before:
|
||||||
|
segment_end = min(current + scale, before)
|
||||||
|
|
||||||
|
# Check if this segment overlaps with any recording
|
||||||
|
has_recording = any(
|
||||||
|
rec_start < segment_end and rec_end > current
|
||||||
|
for rec_start, rec_end in recordings
|
||||||
|
)
|
||||||
|
|
||||||
|
if not has_recording:
|
||||||
|
# This segment has no recordings
|
||||||
|
if current_gap_start is None:
|
||||||
|
current_gap_start = current # Start a new gap
|
||||||
|
else:
|
||||||
|
# This segment has recordings
|
||||||
|
if current_gap_start is not None:
|
||||||
|
# End the current gap and append it
|
||||||
|
no_recording_segments.append(
|
||||||
|
{"start_time": int(current_gap_start), "end_time": int(current)}
|
||||||
|
)
|
||||||
|
current_gap_start = None
|
||||||
|
|
||||||
|
current = segment_end
|
||||||
|
|
||||||
|
# Append the last gap if it exists
|
||||||
|
if current_gap_start is not None:
|
||||||
|
no_recording_segments.append(
|
||||||
|
{"start_time": int(current_gap_start), "end_time": int(before)}
|
||||||
|
)
|
||||||
|
|
||||||
|
return JSONResponse(content=no_recording_segments)
|
||||||
|
|
||||||
|
|
||||||
|
@router.delete(
|
||||||
|
"/recordings/start/{start}/end/{end}",
|
||||||
|
response_model=GenericResponse,
|
||||||
|
dependencies=[Depends(require_role(["admin"]))],
|
||||||
|
summary="Delete recordings",
|
||||||
|
description="""Deletes recordings within the specified time range.
|
||||||
|
Recordings can be filtered by cameras and kept based on motion, objects, or audio attributes.
|
||||||
|
""",
|
||||||
|
)
|
||||||
|
async def delete_recordings(
|
||||||
|
start: float = PathParam(..., description="Start timestamp (unix)"),
|
||||||
|
end: float = PathParam(..., description="End timestamp (unix)"),
|
||||||
|
params: RecordingsDeleteQueryParams = Depends(),
|
||||||
|
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||||
|
):
|
||||||
|
"""Delete recordings in the specified time range."""
|
||||||
|
if start >= end:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"success": False,
|
||||||
|
"message": "Start time must be less than end time.",
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
cameras = params.cameras
|
||||||
|
|
||||||
|
if cameras != "all":
|
||||||
|
requested = set(cameras.split(","))
|
||||||
|
filtered = requested.intersection(allowed_cameras)
|
||||||
|
|
||||||
|
if not filtered:
|
||||||
|
return JSONResponse(
|
||||||
|
content={
|
||||||
|
"success": False,
|
||||||
|
"message": "No valid cameras found in the request.",
|
||||||
|
},
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
camera_list = list(filtered)
|
||||||
|
else:
|
||||||
|
camera_list = allowed_cameras
|
||||||
|
|
||||||
|
# Parse keep parameter
|
||||||
|
keep_set = set()
|
||||||
|
|
||||||
|
if params.keep:
|
||||||
|
keep_set = set(params.keep.split(","))
|
||||||
|
|
||||||
|
# Build query to find overlapping recordings
|
||||||
|
clauses = [
|
||||||
|
(
|
||||||
|
Recordings.start_time.between(start, end)
|
||||||
|
| Recordings.end_time.between(start, end)
|
||||||
|
| ((start > Recordings.start_time) & (end < Recordings.end_time))
|
||||||
|
),
|
||||||
|
(Recordings.camera << camera_list),
|
||||||
|
]
|
||||||
|
|
||||||
|
keep_clauses = []
|
||||||
|
|
||||||
|
if "motion" in keep_set:
|
||||||
|
keep_clauses.append(Recordings.motion.is_null(False) & (Recordings.motion > 0))
|
||||||
|
|
||||||
|
if "object" in keep_set:
|
||||||
|
keep_clauses.append(
|
||||||
|
Recordings.objects.is_null(False) & (Recordings.objects > 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
if "audio" in keep_set:
|
||||||
|
keep_clauses.append(Recordings.dBFS.is_null(False))
|
||||||
|
|
||||||
|
if keep_clauses:
|
||||||
|
keep_condition = reduce(operator.or_, keep_clauses)
|
||||||
|
clauses.append(~keep_condition)
|
||||||
|
|
||||||
|
recordings_to_delete = (
|
||||||
|
Recordings.select(Recordings.id, Recordings.path)
|
||||||
|
.where(reduce(operator.and_, clauses))
|
||||||
|
.dicts()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
recording_ids = []
|
||||||
|
deleted_count = 0
|
||||||
|
error_count = 0
|
||||||
|
|
||||||
|
for recording in recordings_to_delete:
|
||||||
|
recording_ids.append(recording["id"])
|
||||||
|
|
||||||
|
try:
|
||||||
|
Path(recording["path"]).unlink(missing_ok=True)
|
||||||
|
deleted_count += 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete recording file {recording['path']}: {e}")
|
||||||
|
error_count += 1
|
||||||
|
|
||||||
|
if recording_ids:
|
||||||
|
max_deletes = 100000
|
||||||
|
recording_ids_list = list(recording_ids)
|
||||||
|
|
||||||
|
for i in range(0, len(recording_ids_list), max_deletes):
|
||||||
|
Recordings.delete().where(
|
||||||
|
Recordings.id << recording_ids_list[i : i + max_deletes]
|
||||||
|
).execute()
|
||||||
|
|
||||||
|
message = f"Successfully deleted {deleted_count} recording(s)."
|
||||||
|
|
||||||
|
if error_count > 0:
|
||||||
|
message += f" {error_count} file deletion error(s) occurred."
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
content={"success": True, "message": message},
|
||||||
|
status_code=200,
|
||||||
|
)
|
||||||
@ -48,6 +48,10 @@
|
|||||||
"name": {
|
"name": {
|
||||||
"placeholder": "Name the Export"
|
"placeholder": "Name the Export"
|
||||||
},
|
},
|
||||||
|
"case": {
|
||||||
|
"label": "Case",
|
||||||
|
"placeholder": "Select a case"
|
||||||
|
},
|
||||||
"select": "Select",
|
"select": "Select",
|
||||||
"export": "Export",
|
"export": "Export",
|
||||||
"selectOrExport": "Select or Export",
|
"selectOrExport": "Select or Export",
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import ActivityIndicator from "../indicators/activity-indicator";
|
import ActivityIndicator from "../indicators/activity-indicator";
|
||||||
import { Button } from "../ui/button";
|
import { Button } from "../ui/button";
|
||||||
import { useCallback, useState } from "react";
|
import { useCallback, useMemo, useState } from "react";
|
||||||
import { isMobile } from "react-device-detect";
|
import { isMobile } from "react-device-detect";
|
||||||
import { FiMoreVertical } from "react-icons/fi";
|
import { FiMoreVertical } from "react-icons/fi";
|
||||||
import { Skeleton } from "../ui/skeleton";
|
import { Skeleton } from "../ui/skeleton";
|
||||||
@ -32,18 +32,37 @@ import { FaFolder } from "react-icons/fa";
|
|||||||
type CaseCardProps = {
|
type CaseCardProps = {
|
||||||
className: string;
|
className: string;
|
||||||
exportCase: ExportCase;
|
exportCase: ExportCase;
|
||||||
|
exports: Export[];
|
||||||
onSelect: () => void;
|
onSelect: () => void;
|
||||||
};
|
};
|
||||||
export function CaseCard({ className, exportCase, onSelect }: CaseCardProps) {
|
export function CaseCard({
|
||||||
|
className,
|
||||||
|
exportCase,
|
||||||
|
exports,
|
||||||
|
onSelect,
|
||||||
|
}: CaseCardProps) {
|
||||||
|
const firstExport = useMemo(
|
||||||
|
() => exports.find((exp) => exp.thumb_path && exp.thumb_path.length > 0),
|
||||||
|
[exports],
|
||||||
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
"relative flex aspect-video size-full cursor-pointer items-center justify-center rounded-lg bg-secondary md:rounded-2xl",
|
"relative flex aspect-video size-full cursor-pointer items-center justify-center overflow-hidden rounded-lg bg-secondary md:rounded-2xl",
|
||||||
className,
|
className,
|
||||||
)}
|
)}
|
||||||
onClick={() => onSelect()}
|
onClick={() => onSelect()}
|
||||||
>
|
>
|
||||||
<div className="absolute bottom-2 left-2 flex items-center justify-start gap-2">
|
{firstExport && (
|
||||||
|
<img
|
||||||
|
className="absolute inset-0 size-full object-cover"
|
||||||
|
src={`${baseUrl}${firstExport.thumb_path.replace("/media/frigate/", "")}`}
|
||||||
|
alt=""
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
<div className="pointer-events-none absolute inset-x-0 bottom-0 z-10 h-16 bg-gradient-to-t from-black/60 to-transparent" />
|
||||||
|
<div className="absolute bottom-2 left-2 z-20 flex items-center justify-start gap-2 text-white">
|
||||||
<FaFolder />
|
<FaFolder />
|
||||||
<div className="capitalize">{exportCase.name}</div>
|
<div className="capitalize">{exportCase.name}</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -22,7 +22,14 @@ import useSWR from "swr";
|
|||||||
import { FrigateConfig } from "@/types/frigateConfig";
|
import { FrigateConfig } from "@/types/frigateConfig";
|
||||||
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
|
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
|
||||||
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
|
import { TimezoneAwareCalendar } from "./ReviewActivityCalendar";
|
||||||
import { SelectSeparator } from "../ui/select";
|
import {
|
||||||
|
Select,
|
||||||
|
SelectContent,
|
||||||
|
SelectItem,
|
||||||
|
SelectSeparator,
|
||||||
|
SelectTrigger,
|
||||||
|
SelectValue,
|
||||||
|
} from "../ui/select";
|
||||||
import { isDesktop, isIOS, isMobile } from "react-device-detect";
|
import { isDesktop, isIOS, isMobile } from "react-device-detect";
|
||||||
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
|
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
|
||||||
import SaveExportOverlay from "./SaveExportOverlay";
|
import SaveExportOverlay from "./SaveExportOverlay";
|
||||||
@ -31,6 +38,7 @@ import { baseUrl } from "@/api/baseUrl";
|
|||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { GenericVideoPlayer } from "../player/GenericVideoPlayer";
|
import { GenericVideoPlayer } from "../player/GenericVideoPlayer";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
|
import { ExportCase } from "@/types/export";
|
||||||
|
|
||||||
const EXPORT_OPTIONS = [
|
const EXPORT_OPTIONS = [
|
||||||
"1",
|
"1",
|
||||||
@ -67,6 +75,9 @@ export default function ExportDialog({
|
|||||||
}: ExportDialogProps) {
|
}: ExportDialogProps) {
|
||||||
const { t } = useTranslation(["components/dialog"]);
|
const { t } = useTranslation(["components/dialog"]);
|
||||||
const [name, setName] = useState("");
|
const [name, setName] = useState("");
|
||||||
|
const [selectedCaseId, setSelectedCaseId] = useState<string | undefined>(
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
|
|
||||||
const onStartExport = useCallback(() => {
|
const onStartExport = useCallback(() => {
|
||||||
if (!range) {
|
if (!range) {
|
||||||
@ -89,6 +100,7 @@ export default function ExportDialog({
|
|||||||
{
|
{
|
||||||
playback: "realtime",
|
playback: "realtime",
|
||||||
name,
|
name,
|
||||||
|
export_case_id: selectedCaseId || undefined,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
@ -102,6 +114,7 @@ export default function ExportDialog({
|
|||||||
),
|
),
|
||||||
});
|
});
|
||||||
setName("");
|
setName("");
|
||||||
|
setSelectedCaseId(undefined);
|
||||||
setRange(undefined);
|
setRange(undefined);
|
||||||
setMode("none");
|
setMode("none");
|
||||||
}
|
}
|
||||||
@ -118,10 +131,11 @@ export default function ExportDialog({
|
|||||||
{ position: "top-center" },
|
{ position: "top-center" },
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}, [camera, name, range, setRange, setName, setMode, t]);
|
}, [camera, name, range, selectedCaseId, setRange, setName, setMode, t]);
|
||||||
|
|
||||||
const handleCancel = useCallback(() => {
|
const handleCancel = useCallback(() => {
|
||||||
setName("");
|
setName("");
|
||||||
|
setSelectedCaseId(undefined);
|
||||||
setMode("none");
|
setMode("none");
|
||||||
setRange(undefined);
|
setRange(undefined);
|
||||||
}, [setMode, setRange]);
|
}, [setMode, setRange]);
|
||||||
@ -190,8 +204,10 @@ export default function ExportDialog({
|
|||||||
currentTime={currentTime}
|
currentTime={currentTime}
|
||||||
range={range}
|
range={range}
|
||||||
name={name}
|
name={name}
|
||||||
|
selectedCaseId={selectedCaseId}
|
||||||
onStartExport={onStartExport}
|
onStartExport={onStartExport}
|
||||||
setName={setName}
|
setName={setName}
|
||||||
|
setSelectedCaseId={setSelectedCaseId}
|
||||||
setRange={setRange}
|
setRange={setRange}
|
||||||
setMode={setMode}
|
setMode={setMode}
|
||||||
onCancel={handleCancel}
|
onCancel={handleCancel}
|
||||||
@ -207,8 +223,10 @@ type ExportContentProps = {
|
|||||||
currentTime: number;
|
currentTime: number;
|
||||||
range?: TimeRange;
|
range?: TimeRange;
|
||||||
name: string;
|
name: string;
|
||||||
|
selectedCaseId?: string;
|
||||||
onStartExport: () => void;
|
onStartExport: () => void;
|
||||||
setName: (name: string) => void;
|
setName: (name: string) => void;
|
||||||
|
setSelectedCaseId: (caseId: string | undefined) => void;
|
||||||
setRange: (range: TimeRange | undefined) => void;
|
setRange: (range: TimeRange | undefined) => void;
|
||||||
setMode: (mode: ExportMode) => void;
|
setMode: (mode: ExportMode) => void;
|
||||||
onCancel: () => void;
|
onCancel: () => void;
|
||||||
@ -218,14 +236,17 @@ export function ExportContent({
|
|||||||
currentTime,
|
currentTime,
|
||||||
range,
|
range,
|
||||||
name,
|
name,
|
||||||
|
selectedCaseId,
|
||||||
onStartExport,
|
onStartExport,
|
||||||
setName,
|
setName,
|
||||||
|
setSelectedCaseId,
|
||||||
setRange,
|
setRange,
|
||||||
setMode,
|
setMode,
|
||||||
onCancel,
|
onCancel,
|
||||||
}: ExportContentProps) {
|
}: ExportContentProps) {
|
||||||
const { t } = useTranslation(["components/dialog"]);
|
const { t } = useTranslation(["components/dialog"]);
|
||||||
const [selectedOption, setSelectedOption] = useState<ExportOption>("1");
|
const [selectedOption, setSelectedOption] = useState<ExportOption>("1");
|
||||||
|
const { data: cases } = useSWR<ExportCase[]>("cases");
|
||||||
|
|
||||||
const onSelectTime = useCallback(
|
const onSelectTime = useCallback(
|
||||||
(option: ExportOption) => {
|
(option: ExportOption) => {
|
||||||
@ -320,6 +341,44 @@ export function ExportContent({
|
|||||||
value={name}
|
value={name}
|
||||||
onChange={(e) => setName(e.target.value)}
|
onChange={(e) => setName(e.target.value)}
|
||||||
/>
|
/>
|
||||||
|
<div className="my-4">
|
||||||
|
<Label className="text-sm text-secondary-foreground">
|
||||||
|
{t("export.case.label", { defaultValue: "Case (optional)" })}
|
||||||
|
</Label>
|
||||||
|
<Select
|
||||||
|
value={selectedCaseId || "none"}
|
||||||
|
onValueChange={(value) =>
|
||||||
|
setSelectedCaseId(value === "none" ? undefined : value)
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<SelectTrigger className="mt-2">
|
||||||
|
<SelectValue
|
||||||
|
placeholder={t("export.case.placeholder", {
|
||||||
|
defaultValue: "Select a case (optional)",
|
||||||
|
})}
|
||||||
|
/>
|
||||||
|
</SelectTrigger>
|
||||||
|
<SelectContent>
|
||||||
|
<SelectItem
|
||||||
|
value="none"
|
||||||
|
className="cursor-pointer hover:bg-accent hover:text-accent-foreground"
|
||||||
|
>
|
||||||
|
{t("label.none", { ns: "common" })}
|
||||||
|
</SelectItem>
|
||||||
|
{cases
|
||||||
|
?.sort((a, b) => a.name.localeCompare(b.name))
|
||||||
|
.map((caseItem) => (
|
||||||
|
<SelectItem
|
||||||
|
key={caseItem.id}
|
||||||
|
value={caseItem.id}
|
||||||
|
className="cursor-pointer hover:bg-accent hover:text-accent-foreground"
|
||||||
|
>
|
||||||
|
{caseItem.name}
|
||||||
|
</SelectItem>
|
||||||
|
))}
|
||||||
|
</SelectContent>
|
||||||
|
</Select>
|
||||||
|
</div>
|
||||||
{isDesktop && <SelectSeparator className="my-4 bg-secondary" />}
|
{isDesktop && <SelectSeparator className="my-4 bg-secondary" />}
|
||||||
<DialogFooter
|
<DialogFooter
|
||||||
className={isDesktop ? "" : "mt-3 flex flex-col-reverse gap-4"}
|
className={isDesktop ? "" : "mt-3 flex flex-col-reverse gap-4"}
|
||||||
|
|||||||
@ -75,6 +75,9 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
// exports
|
// exports
|
||||||
|
|
||||||
const [name, setName] = useState("");
|
const [name, setName] = useState("");
|
||||||
|
const [selectedCaseId, setSelectedCaseId] = useState<string | undefined>(
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
const onStartExport = useCallback(() => {
|
const onStartExport = useCallback(() => {
|
||||||
if (!range) {
|
if (!range) {
|
||||||
toast.error(t("toast.error.noValidTimeSelected"), {
|
toast.error(t("toast.error.noValidTimeSelected"), {
|
||||||
@ -96,6 +99,7 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
{
|
{
|
||||||
playback: "realtime",
|
playback: "realtime",
|
||||||
name,
|
name,
|
||||||
|
export_case_id: selectedCaseId || undefined,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
@ -114,6 +118,7 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
setName("");
|
setName("");
|
||||||
|
setSelectedCaseId(undefined);
|
||||||
setRange(undefined);
|
setRange(undefined);
|
||||||
setMode("none");
|
setMode("none");
|
||||||
}
|
}
|
||||||
@ -133,7 +138,7 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
},
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}, [camera, name, range, setRange, setName, setMode, t]);
|
}, [camera, name, range, selectedCaseId, setRange, setName, setMode, t]);
|
||||||
|
|
||||||
// filters
|
// filters
|
||||||
|
|
||||||
@ -200,8 +205,10 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
currentTime={currentTime}
|
currentTime={currentTime}
|
||||||
range={range}
|
range={range}
|
||||||
name={name}
|
name={name}
|
||||||
|
selectedCaseId={selectedCaseId}
|
||||||
onStartExport={onStartExport}
|
onStartExport={onStartExport}
|
||||||
setName={setName}
|
setName={setName}
|
||||||
|
setSelectedCaseId={setSelectedCaseId}
|
||||||
setRange={setRange}
|
setRange={setRange}
|
||||||
setMode={(mode) => {
|
setMode={(mode) => {
|
||||||
setMode(mode);
|
setMode(mode);
|
||||||
@ -213,6 +220,7 @@ export default function MobileReviewSettingsDrawer({
|
|||||||
onCancel={() => {
|
onCancel={() => {
|
||||||
setMode("none");
|
setMode("none");
|
||||||
setRange(undefined);
|
setRange(undefined);
|
||||||
|
setSelectedCaseId(undefined);
|
||||||
setDrawerMode("select");
|
setDrawerMode("select");
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -321,6 +321,7 @@ function Exports() {
|
|||||||
search={search}
|
search={search}
|
||||||
cases={filteredCases}
|
cases={filteredCases}
|
||||||
exports={exports}
|
exports={exports}
|
||||||
|
exportsByCase={exportsByCase}
|
||||||
setSelectedCaseId={setSelectedCaseId}
|
setSelectedCaseId={setSelectedCaseId}
|
||||||
setSelected={setSelected}
|
setSelected={setSelected}
|
||||||
renameClip={onHandleRename}
|
renameClip={onHandleRename}
|
||||||
@ -337,6 +338,7 @@ type AllExportsViewProps = {
|
|||||||
search: string;
|
search: string;
|
||||||
cases?: ExportCase[];
|
cases?: ExportCase[];
|
||||||
exports: Export[];
|
exports: Export[];
|
||||||
|
exportsByCase: { [caseId: string]: Export[] };
|
||||||
setSelectedCaseId: (id: string) => void;
|
setSelectedCaseId: (id: string) => void;
|
||||||
setSelected: (e: Export) => void;
|
setSelected: (e: Export) => void;
|
||||||
renameClip: (id: string, update: string) => void;
|
renameClip: (id: string, update: string) => void;
|
||||||
@ -348,6 +350,7 @@ function AllExportsView({
|
|||||||
search,
|
search,
|
||||||
cases,
|
cases,
|
||||||
exports,
|
exports,
|
||||||
|
exportsByCase,
|
||||||
setSelectedCaseId,
|
setSelectedCaseId,
|
||||||
setSelected,
|
setSelected,
|
||||||
renameClip,
|
renameClip,
|
||||||
@ -404,6 +407,7 @@ function AllExportsView({
|
|||||||
: "hidden"
|
: "hidden"
|
||||||
}
|
}
|
||||||
exportCase={item}
|
exportCase={item}
|
||||||
|
exports={exportsByCase[item.id] || []}
|
||||||
onSelect={() => {
|
onSelect={() => {
|
||||||
setSelectedCaseId(item.id);
|
setSelectedCaseId(item.id);
|
||||||
}}
|
}}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user