Compare commits

...

3 Commits

Author SHA1 Message Date
Nicolas Mowen
6c441e02bf
Merge 5486dc581c into 1cc50f68a0 2026-01-18 00:01:35 +01:00
John Shaw
1cc50f68a0
Optimize empty directory cleanup for recordings (#21695)
The previous empty directory cleanup did a full recursive directory
walk, which can be extremely slow. This new implementation only removes
directories which have a chance of being empty due to a recent file
deletion.
2026-01-17 15:47:21 -07:00
Nicolas Mowen
38a630af57
Refactor Time-Lapse Export (#21668)
* refactor time lapse creation to be a separate API call with ability to pass arbitrary ffmpeg args

* Add CPU fallback
2026-01-15 10:30:55 -07:00
7 changed files with 306 additions and 70 deletions

View File

@ -3,16 +3,10 @@ from typing import Optional, Union
from pydantic import BaseModel, Field
from pydantic.json_schema import SkipJsonSchema
from frigate.record.export import (
PlaybackFactorEnum,
PlaybackSourceEnum,
)
from frigate.record.export import PlaybackSourceEnum
class ExportRecordingsBody(BaseModel):
playback: PlaybackFactorEnum = Field(
default=PlaybackFactorEnum.realtime, title="Playback factor"
)
source: PlaybackSourceEnum = Field(
default=PlaybackSourceEnum.recordings, title="Playback source"
)
@ -24,3 +18,32 @@ class ExportRecordingsBody(BaseModel):
max_length=30,
description="ID of the export case to assign this export to",
)
class ExportRecordingsCustomBody(BaseModel):
source: PlaybackSourceEnum = Field(
default=PlaybackSourceEnum.recordings, title="Playback source"
)
name: str = Field(title="Friendly name", default=None, max_length=256)
image_path: Union[str, SkipJsonSchema[None]] = None
export_case_id: Optional[str] = Field(
default=None,
title="Export case ID",
max_length=30,
description="ID of the export case to assign this export to",
)
ffmpeg_input_args: Optional[str] = Field(
default=None,
title="FFmpeg input arguments",
description="Custom FFmpeg input arguments. If not provided, defaults to timelapse input args.",
)
ffmpeg_output_args: Optional[str] = Field(
default=None,
title="FFmpeg output arguments",
description="Custom FFmpeg output arguments. If not provided, defaults to timelapse output args.",
)
cpu_fallback: bool = Field(
default=False,
title="CPU Fallback",
description="If true, retry export without hardware acceleration if the initial export fails.",
)

View File

@ -24,7 +24,10 @@ from frigate.api.defs.request.export_case_body import (
ExportCaseCreateBody,
ExportCaseUpdateBody,
)
from frigate.api.defs.request.export_recordings_body import ExportRecordingsBody
from frigate.api.defs.request.export_recordings_body import (
ExportRecordingsBody,
ExportRecordingsCustomBody,
)
from frigate.api.defs.request.export_rename_body import ExportRenameBody
from frigate.api.defs.response.export_case_response import (
ExportCaseModel,
@ -40,7 +43,7 @@ from frigate.api.defs.tags import Tags
from frigate.const import CLIPS_DIR, EXPORT_DIR
from frigate.models import Export, ExportCase, Previews, Recordings
from frigate.record.export import (
PlaybackFactorEnum,
DEFAULT_TIME_LAPSE_FFMPEG_ARGS,
PlaybackSourceEnum,
RecordingExporter,
)
@ -262,7 +265,6 @@ def export_recording(
status_code=404,
)
playback_factor = body.playback
playback_source = body.source
friendly_name = body.name
existing_image = sanitize_filepath(body.image_path) if body.image_path else None
@ -335,11 +337,6 @@ def export_recording(
existing_image,
int(start_time),
int(end_time),
(
PlaybackFactorEnum[playback_factor]
if playback_factor in PlaybackFactorEnum.__members__.values()
else PlaybackFactorEnum.realtime
),
(
PlaybackSourceEnum[playback_source]
if playback_source in PlaybackSourceEnum.__members__.values()
@ -456,6 +453,138 @@ async def export_delete(event_id: str, request: Request):
)
@router.post(
"/export/custom/{camera_name}/start/{start_time}/end/{end_time}",
response_model=StartExportResponse,
dependencies=[Depends(require_camera_access)],
summary="Start custom recording export",
description="""Starts an export of a recording for the specified time range using custom FFmpeg arguments.
The export can be from recordings or preview footage. Returns the export ID if
successful, or an error message if the camera is invalid or no recordings/previews
are found for the time range. If ffmpeg_input_args and ffmpeg_output_args are not provided,
defaults to timelapse export settings.""",
)
def export_recording_custom(
request: Request,
camera_name: str,
start_time: float,
end_time: float,
body: ExportRecordingsCustomBody,
):
if not camera_name or not request.app.frigate_config.cameras.get(camera_name):
return JSONResponse(
content=(
{"success": False, "message": f"{camera_name} is not a valid camera."}
),
status_code=404,
)
playback_source = body.source
friendly_name = body.name
existing_image = sanitize_filepath(body.image_path) if body.image_path else None
ffmpeg_input_args = body.ffmpeg_input_args
ffmpeg_output_args = body.ffmpeg_output_args
cpu_fallback = body.cpu_fallback
export_case_id = body.export_case_id
if export_case_id is not None:
try:
ExportCase.get(ExportCase.id == export_case_id)
except DoesNotExist:
return JSONResponse(
content={"success": False, "message": "Export case not found"},
status_code=404,
)
# Ensure that existing_image is a valid path
if existing_image and not existing_image.startswith(CLIPS_DIR):
return JSONResponse(
content=({"success": False, "message": "Invalid image path"}),
status_code=400,
)
if playback_source == "recordings":
recordings_count = (
Recordings.select()
.where(
Recordings.start_time.between(start_time, end_time)
| Recordings.end_time.between(start_time, end_time)
| (
(start_time > Recordings.start_time)
& (end_time < Recordings.end_time)
)
)
.where(Recordings.camera == camera_name)
.count()
)
if recordings_count <= 0:
return JSONResponse(
content=(
{"success": False, "message": "No recordings found for time range"}
),
status_code=400,
)
else:
previews_count = (
Previews.select()
.where(
Previews.start_time.between(start_time, end_time)
| Previews.end_time.between(start_time, end_time)
| ((start_time > Previews.start_time) & (end_time < Previews.end_time))
)
.where(Previews.camera == camera_name)
.count()
)
if not is_current_hour(start_time) and previews_count <= 0:
return JSONResponse(
content=(
{"success": False, "message": "No previews found for time range"}
),
status_code=400,
)
export_id = f"{camera_name}_{''.join(random.choices(string.ascii_lowercase + string.digits, k=6))}"
# Set default values if not provided (timelapse defaults)
if ffmpeg_input_args is None:
ffmpeg_input_args = ""
if ffmpeg_output_args is None:
ffmpeg_output_args = DEFAULT_TIME_LAPSE_FFMPEG_ARGS
exporter = RecordingExporter(
request.app.frigate_config,
export_id,
camera_name,
friendly_name,
existing_image,
int(start_time),
int(end_time),
(
PlaybackSourceEnum[playback_source]
if playback_source in PlaybackSourceEnum.__members__.values()
else PlaybackSourceEnum.recordings
),
export_case_id,
ffmpeg_input_args,
ffmpeg_output_args,
cpu_fallback,
)
exporter.start()
return JSONResponse(
content=(
{
"success": True,
"message": "Starting export of recording.",
"export_id": export_id,
}
),
status_code=200,
)
@router.get(
"/exports/{export_id}",
response_model=ExportModel,

View File

@ -19,8 +19,6 @@ __all__ = [
"RetainModeEnum",
]
DEFAULT_TIME_LAPSE_FFMPEG_ARGS = "-vf setpts=0.04*PTS -r 30"
class RecordRetainConfig(FrigateBaseModel):
days: float = Field(default=0, ge=0, title="Default retention period.")
@ -67,9 +65,6 @@ class RecordPreviewConfig(FrigateBaseModel):
class RecordExportConfig(FrigateBaseModel):
timelapse_args: str = Field(
default=DEFAULT_TIME_LAPSE_FFMPEG_ARGS, title="Timelapse Args"
)
hwaccel_args: Union[str, list[str]] = Field(
default="auto", title="Export-specific FFmpeg hardware acceleration arguments."
)

View File

@ -11,7 +11,7 @@ from pathlib import Path
from playhouse.sqlite_ext import SqliteExtDatabase
from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE
from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus
from frigate.util.builtin import clear_and_unlink
from frigate.util.media import remove_empty_directories
@ -60,7 +60,7 @@ class RecordingCleanup(threading.Thread):
db.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);")
db.close()
def expire_review_segments(self, config: CameraConfig, now: datetime) -> None:
def expire_review_segments(self, config: CameraConfig, now: datetime) -> set[Path]:
"""Delete review segments that are expired"""
alert_expire_date = (
now - datetime.timedelta(days=config.record.alerts.retain.days)
@ -84,9 +84,12 @@ class RecordingCleanup(threading.Thread):
.namedtuples()
)
maybe_empty_dirs = set()
thumbs_to_delete = list(map(lambda x: x[1], expired_reviews))
for thumb_path in thumbs_to_delete:
Path(thumb_path).unlink(missing_ok=True)
thumb_path = Path(thumb_path)
thumb_path.unlink(missing_ok=True)
maybe_empty_dirs.add(thumb_path.parent)
max_deletes = 100000
deleted_reviews_list = list(map(lambda x: x[0], expired_reviews))
@ -99,13 +102,15 @@ class RecordingCleanup(threading.Thread):
<< deleted_reviews_list[i : i + max_deletes]
).execute()
return maybe_empty_dirs
def expire_existing_camera_recordings(
self,
continuous_expire_date: float,
motion_expire_date: float,
config: CameraConfig,
reviews: ReviewSegment,
) -> None:
) -> set[Path]:
"""Delete recordings for existing camera based on retention config."""
# Get the timestamp for cutoff of retained days
@ -134,6 +139,8 @@ class RecordingCleanup(threading.Thread):
.iterator()
)
maybe_empty_dirs = set()
# loop over recordings and see if they overlap with any non-expired reviews
# TODO: expire segments based on segment stats according to config
review_start = 0
@ -187,8 +194,10 @@ class RecordingCleanup(threading.Thread):
)
or (mode == RetainModeEnum.active_objects and recording.objects == 0)
):
Path(recording.path).unlink(missing_ok=True)
recording_path = Path(recording.path)
recording_path.unlink(missing_ok=True)
deleted_recordings.add(recording.id)
maybe_empty_dirs.add(recording_path.parent)
else:
kept_recordings.append((recording.start_time, recording.end_time))
@ -249,8 +258,10 @@ class RecordingCleanup(threading.Thread):
# Delete previews without any relevant recordings
if not keep:
Path(preview.path).unlink(missing_ok=True)
preview_path = Path(preview.path)
preview_path.unlink(missing_ok=True)
deleted_previews.add(preview.id)
maybe_empty_dirs.add(preview_path.parent)
# expire previews
logger.debug(f"Expiring {len(deleted_previews)} previews")
@ -262,7 +273,9 @@ class RecordingCleanup(threading.Thread):
Previews.id << deleted_previews_list[i : i + max_deletes]
).execute()
def expire_recordings(self) -> None:
return maybe_empty_dirs
def expire_recordings(self) -> set[Path]:
"""Delete recordings based on retention config."""
logger.debug("Start expire recordings.")
logger.debug("Start deleted cameras.")
@ -287,10 +300,14 @@ class RecordingCleanup(threading.Thread):
.iterator()
)
maybe_empty_dirs = set()
deleted_recordings = set()
for recording in no_camera_recordings:
Path(recording.path).unlink(missing_ok=True)
recording_path = Path(recording.path)
recording_path.unlink(missing_ok=True)
deleted_recordings.add(recording.id)
maybe_empty_dirs.add(recording_path.parent)
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
# delete up to 100,000 at a time
@ -307,7 +324,7 @@ class RecordingCleanup(threading.Thread):
logger.debug(f"Start camera: {camera}.")
now = datetime.datetime.now()
self.expire_review_segments(config, now)
maybe_empty_dirs |= self.expire_review_segments(config, now)
continuous_expire_date = (
now - datetime.timedelta(days=config.record.continuous.days)
).timestamp()
@ -337,7 +354,7 @@ class RecordingCleanup(threading.Thread):
.namedtuples()
)
self.expire_existing_camera_recordings(
maybe_empty_dirs |= self.expire_existing_camera_recordings(
continuous_expire_date, motion_expire_date, config, reviews
)
logger.debug(f"End camera: {camera}.")
@ -345,6 +362,8 @@ class RecordingCleanup(threading.Thread):
logger.debug("End all cameras.")
logger.debug("End expire recordings.")
return maybe_empty_dirs
def run(self) -> None:
# Expire tmp clips every minute, recordings and clean directories every hour.
for counter in itertools.cycle(range(self.config.record.expire_interval)):
@ -356,6 +375,6 @@ class RecordingCleanup(threading.Thread):
if counter == 0:
self.clean_tmp_clips()
self.expire_recordings()
remove_empty_directories(RECORD_DIR)
maybe_empty_dirs = self.expire_recordings()
remove_empty_directories(maybe_empty_dirs)
self.truncate_wal()

View File

@ -33,6 +33,7 @@ from frigate.util.time import is_current_hour
logger = logging.getLogger(__name__)
DEFAULT_TIME_LAPSE_FFMPEG_ARGS = "-vf setpts=0.04*PTS -r 30"
TIMELAPSE_DATA_INPUT_ARGS = "-an -skip_frame nokey"
@ -40,11 +41,6 @@ def lower_priority():
os.nice(PROCESS_PRIORITY_LOW)
class PlaybackFactorEnum(str, Enum):
realtime = "realtime"
timelapse_25x = "timelapse_25x"
class PlaybackSourceEnum(str, Enum):
recordings = "recordings"
preview = "preview"
@ -62,9 +58,11 @@ class RecordingExporter(threading.Thread):
image: Optional[str],
start_time: int,
end_time: int,
playback_factor: PlaybackFactorEnum,
playback_source: PlaybackSourceEnum,
export_case_id: Optional[str] = None,
ffmpeg_input_args: Optional[str] = None,
ffmpeg_output_args: Optional[str] = None,
cpu_fallback: bool = False,
) -> None:
super().__init__()
self.config = config
@ -74,9 +72,11 @@ class RecordingExporter(threading.Thread):
self.user_provided_image = image
self.start_time = start_time
self.end_time = end_time
self.playback_factor = playback_factor
self.playback_source = playback_source
self.export_case_id = export_case_id
self.ffmpeg_input_args = ffmpeg_input_args
self.ffmpeg_output_args = ffmpeg_output_args
self.cpu_fallback = cpu_fallback
# ensure export thumb dir
Path(os.path.join(CLIPS_DIR, "export")).mkdir(exist_ok=True)
@ -181,7 +181,9 @@ class RecordingExporter(threading.Thread):
return thumb_path
def get_record_export_command(self, video_path: str) -> list[str]:
def get_record_export_command(
self, video_path: str, use_hwaccel: bool = True
) -> list[str]:
if (self.end_time - self.start_time) <= MAX_PLAYLIST_SECONDS:
playlist_lines = f"http://127.0.0.1:5000/vod/{self.camera}/start/{self.start_time}/end/{self.end_time}/index.m3u8"
ffmpeg_input = (
@ -220,20 +222,25 @@ class RecordingExporter(threading.Thread):
ffmpeg_input = "-y -protocol_whitelist pipe,file,http,tcp -f concat -safe 0 -i /dev/stdin"
if self.playback_factor == PlaybackFactorEnum.realtime:
ffmpeg_cmd = (
f"{self.config.ffmpeg.ffmpeg_path} -hide_banner {ffmpeg_input} -c copy -movflags +faststart"
).split(" ")
elif self.playback_factor == PlaybackFactorEnum.timelapse_25x:
if self.ffmpeg_input_args is not None and self.ffmpeg_output_args is not None:
hwaccel_args = (
self.config.cameras[self.camera].record.export.hwaccel_args
if use_hwaccel
else None
)
ffmpeg_cmd = (
parse_preset_hardware_acceleration_encode(
self.config.ffmpeg.ffmpeg_path,
self.config.cameras[self.camera].record.export.hwaccel_args,
f"-an {ffmpeg_input}",
f"{self.config.cameras[self.camera].record.export.timelapse_args} -movflags +faststart",
hwaccel_args,
f"{self.ffmpeg_input_args} -an {ffmpeg_input}".strip(),
f"{self.ffmpeg_output_args} -movflags +faststart".strip(),
EncodeTypeEnum.timelapse,
)
).split(" ")
else:
ffmpeg_cmd = (
f"{self.config.ffmpeg.ffmpeg_path} -hide_banner {ffmpeg_input} -c copy -movflags +faststart"
).split(" ")
# add metadata
title = f"Frigate Recording for {self.camera}, {self.get_datetime_from_timestamp(self.start_time)} - {self.get_datetime_from_timestamp(self.end_time)}"
@ -243,7 +250,9 @@ class RecordingExporter(threading.Thread):
return ffmpeg_cmd, playlist_lines
def get_preview_export_command(self, video_path: str) -> list[str]:
def get_preview_export_command(
self, video_path: str, use_hwaccel: bool = True
) -> list[str]:
playlist_lines = []
codec = "-c copy"
@ -311,20 +320,25 @@ class RecordingExporter(threading.Thread):
"-y -protocol_whitelist pipe,file,tcp -f concat -safe 0 -i /dev/stdin"
)
if self.playback_factor == PlaybackFactorEnum.realtime:
ffmpeg_cmd = (
f"{self.config.ffmpeg.ffmpeg_path} -hide_banner {ffmpeg_input} {codec} -movflags +faststart {video_path}"
).split(" ")
elif self.playback_factor == PlaybackFactorEnum.timelapse_25x:
if self.ffmpeg_input_args is not None and self.ffmpeg_output_args is not None:
hwaccel_args = (
self.config.cameras[self.camera].record.export.hwaccel_args
if use_hwaccel
else None
)
ffmpeg_cmd = (
parse_preset_hardware_acceleration_encode(
self.config.ffmpeg.ffmpeg_path,
self.config.cameras[self.camera].record.export.hwaccel_args,
f"{TIMELAPSE_DATA_INPUT_ARGS} {ffmpeg_input}",
f"{self.config.cameras[self.camera].record.export.timelapse_args} -movflags +faststart {video_path}",
hwaccel_args,
f"{self.ffmpeg_input_args} {TIMELAPSE_DATA_INPUT_ARGS} {ffmpeg_input}".strip(),
f"{self.ffmpeg_output_args} -movflags +faststart {video_path}".strip(),
EncodeTypeEnum.timelapse,
)
).split(" ")
else:
ffmpeg_cmd = (
f"{self.config.ffmpeg.ffmpeg_path} -hide_banner {ffmpeg_input} {codec} -movflags +faststart {video_path}"
).split(" ")
# add metadata
title = f"Frigate Preview for {self.camera}, {self.get_datetime_from_timestamp(self.start_time)} - {self.get_datetime_from_timestamp(self.end_time)}"
@ -381,6 +395,34 @@ class RecordingExporter(threading.Thread):
capture_output=True,
)
# If export failed and cpu_fallback is enabled, retry without hwaccel
if (
p.returncode != 0
and self.cpu_fallback
and self.ffmpeg_input_args is not None
and self.ffmpeg_output_args is not None
):
logger.warning(
f"Export with hardware acceleration failed, retrying without hwaccel for {self.export_id}"
)
if self.playback_source == PlaybackSourceEnum.recordings:
ffmpeg_cmd, playlist_lines = self.get_record_export_command(
video_path, use_hwaccel=False
)
else:
ffmpeg_cmd, playlist_lines = self.get_preview_export_command(
video_path, use_hwaccel=False
)
p = sp.run(
ffmpeg_cmd,
input="\n".join(playlist_lines),
encoding="ascii",
preexec_fn=lower_priority,
capture_output=True,
)
if p.returncode != 0:
logger.error(
f"Failed to export {self.playback_source.value} for command {' '.join(ffmpeg_cmd)}"

View File

@ -442,13 +442,35 @@ def migrate_018_0(config: dict[str, dict[str, Any]]) -> dict[str, dict[str, Any]
if new_config.get("record", {}).get("sync_recordings") is not None:
del new_config["record"]["sync_recordings"]
# Remove deprecated sync_recordings from camera-specific record configs
# Remove deprecated timelapse_args from global record export config
if new_config.get("record", {}).get("export", {}).get("timelapse_args") is not None:
del new_config["record"]["export"]["timelapse_args"]
# Remove export section if empty
if not new_config.get("record", {}).get("export"):
del new_config["record"]["export"]
# Remove record section if empty
if not new_config.get("record"):
del new_config["record"]
# Remove deprecated sync_recordings and timelapse_args from camera-specific record configs
for name, camera in config.get("cameras", {}).items():
camera_config: dict[str, dict[str, Any]] = camera.copy()
if camera_config.get("record", {}).get("sync_recordings") is not None:
del camera_config["record"]["sync_recordings"]
if (
camera_config.get("record", {}).get("export", {}).get("timelapse_args")
is not None
):
del camera_config["record"]["export"]["timelapse_args"]
# Remove export section if empty
if not camera_config.get("record", {}).get("export"):
del camera_config["record"]["export"]
# Remove record section if empty
if not camera_config.get("record"):
del camera_config["record"]
new_config["cameras"][name] = camera_config
new_config["version"] = "0.18-0"

View File

@ -1,9 +1,12 @@
"""Recordings Utilities."""
import datetime
import errno
import logging
import os
from dataclasses import dataclass, field
from pathlib import Path
from typing import Iterable
from peewee import DatabaseError, chunked
@ -47,20 +50,23 @@ class SyncResult:
}
def remove_empty_directories(directory: str) -> None:
# list all directories recursively and sort them by path,
# longest first
paths = sorted(
[x[0] for x in os.walk(directory)],
key=lambda p: len(str(p)),
reverse=True,
)
def remove_empty_directories(paths: Iterable[Path]) -> None:
"""
Remove directories if they exist and are empty.
Silently ignores non-existent and non-empty directories.
"""
count = 0
for path in paths:
# don't delete the parent
if path == directory:
try:
path.rmdir()
except FileNotFoundError:
continue
if len(os.listdir(path)) == 0:
os.rmdir(path)
except OSError as e:
if e.errno == errno.ENOTEMPTY:
continue
raise
count += 1
logger.debug("Removed {count} empty directories")
def sync_recordings(