mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 20:18:30 +03:00
Optimize empty directory cleanup for recordings
The previous empty directory cleanup did a full recursive directory walk, which can be extremely slow. This new implementation only removes directories which have a chance of being empty due to a recent file deletion.
This commit is contained in:
parent
d9f8e603c9
commit
d05241cd02
@ -11,7 +11,7 @@ from pathlib import Path
|
||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||
|
||||
from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
|
||||
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR
|
||||
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE
|
||||
from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.util.builtin import clear_and_unlink
|
||||
from frigate.util.media import remove_empty_directories
|
||||
@ -60,7 +60,7 @@ class RecordingCleanup(threading.Thread):
|
||||
db.execute_sql("PRAGMA wal_checkpoint(TRUNCATE);")
|
||||
db.close()
|
||||
|
||||
def expire_review_segments(self, config: CameraConfig, now: datetime) -> None:
|
||||
def expire_review_segments(self, config: CameraConfig, now: datetime) -> set[Path]:
|
||||
"""Delete review segments that are expired"""
|
||||
alert_expire_date = (
|
||||
now - datetime.timedelta(days=config.record.alerts.retain.days)
|
||||
@ -84,9 +84,12 @@ class RecordingCleanup(threading.Thread):
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
maybe_empty_dirs = set()
|
||||
thumbs_to_delete = list(map(lambda x: x[1], expired_reviews))
|
||||
for thumb_path in thumbs_to_delete:
|
||||
Path(thumb_path).unlink(missing_ok=True)
|
||||
thumb_path = Path(thumb_path)
|
||||
thumb_path.unlink(missing_ok=True)
|
||||
maybe_empty_dirs.add(thumb_path.parent)
|
||||
|
||||
max_deletes = 100000
|
||||
deleted_reviews_list = list(map(lambda x: x[0], expired_reviews))
|
||||
@ -99,13 +102,15 @@ class RecordingCleanup(threading.Thread):
|
||||
<< deleted_reviews_list[i : i + max_deletes]
|
||||
).execute()
|
||||
|
||||
return maybe_empty_dirs
|
||||
|
||||
def expire_existing_camera_recordings(
|
||||
self,
|
||||
continuous_expire_date: float,
|
||||
motion_expire_date: float,
|
||||
config: CameraConfig,
|
||||
reviews: ReviewSegment,
|
||||
) -> None:
|
||||
) -> set[Path]:
|
||||
"""Delete recordings for existing camera based on retention config."""
|
||||
# Get the timestamp for cutoff of retained days
|
||||
|
||||
@ -134,6 +139,8 @@ class RecordingCleanup(threading.Thread):
|
||||
.iterator()
|
||||
)
|
||||
|
||||
maybe_empty_dirs = set()
|
||||
|
||||
# loop over recordings and see if they overlap with any non-expired reviews
|
||||
# TODO: expire segments based on segment stats according to config
|
||||
review_start = 0
|
||||
@ -187,8 +194,10 @@ class RecordingCleanup(threading.Thread):
|
||||
)
|
||||
or (mode == RetainModeEnum.active_objects and recording.objects == 0)
|
||||
):
|
||||
Path(recording.path).unlink(missing_ok=True)
|
||||
recording_path = Path(recording.path)
|
||||
recording_path.unlink(missing_ok=True)
|
||||
deleted_recordings.add(recording.id)
|
||||
maybe_empty_dirs.add(recording_path.parent)
|
||||
else:
|
||||
kept_recordings.append((recording.start_time, recording.end_time))
|
||||
|
||||
@ -249,8 +258,10 @@ class RecordingCleanup(threading.Thread):
|
||||
|
||||
# Delete previews without any relevant recordings
|
||||
if not keep:
|
||||
Path(preview.path).unlink(missing_ok=True)
|
||||
preview_path = Path(preview.path)
|
||||
preview_path.unlink(missing_ok=True)
|
||||
deleted_previews.add(preview.id)
|
||||
maybe_empty_dirs.add(preview_path.parent)
|
||||
|
||||
# expire previews
|
||||
logger.debug(f"Expiring {len(deleted_previews)} previews")
|
||||
@ -262,7 +273,9 @@ class RecordingCleanup(threading.Thread):
|
||||
Previews.id << deleted_previews_list[i : i + max_deletes]
|
||||
).execute()
|
||||
|
||||
def expire_recordings(self) -> None:
|
||||
return maybe_empty_dirs
|
||||
|
||||
def expire_recordings(self) -> set[Path]:
|
||||
"""Delete recordings based on retention config."""
|
||||
logger.debug("Start expire recordings.")
|
||||
logger.debug("Start deleted cameras.")
|
||||
@ -287,10 +300,14 @@ class RecordingCleanup(threading.Thread):
|
||||
.iterator()
|
||||
)
|
||||
|
||||
maybe_empty_dirs = set()
|
||||
|
||||
deleted_recordings = set()
|
||||
for recording in no_camera_recordings:
|
||||
Path(recording.path).unlink(missing_ok=True)
|
||||
recording_path = Path(recording.path)
|
||||
recording_path.unlink(missing_ok=True)
|
||||
deleted_recordings.add(recording.id)
|
||||
maybe_empty_dirs.add(recording_path.parent)
|
||||
|
||||
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
|
||||
# delete up to 100,000 at a time
|
||||
@ -307,7 +324,7 @@ class RecordingCleanup(threading.Thread):
|
||||
logger.debug(f"Start camera: {camera}.")
|
||||
now = datetime.datetime.now()
|
||||
|
||||
self.expire_review_segments(config, now)
|
||||
maybe_empty_dirs |= self.expire_review_segments(config, now)
|
||||
continuous_expire_date = (
|
||||
now - datetime.timedelta(days=config.record.continuous.days)
|
||||
).timestamp()
|
||||
@ -337,7 +354,7 @@ class RecordingCleanup(threading.Thread):
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
self.expire_existing_camera_recordings(
|
||||
maybe_empty_dirs |= self.expire_existing_camera_recordings(
|
||||
continuous_expire_date, motion_expire_date, config, reviews
|
||||
)
|
||||
logger.debug(f"End camera: {camera}.")
|
||||
@ -345,6 +362,8 @@ class RecordingCleanup(threading.Thread):
|
||||
logger.debug("End all cameras.")
|
||||
logger.debug("End expire recordings.")
|
||||
|
||||
return maybe_empty_dirs
|
||||
|
||||
def run(self) -> None:
|
||||
# Expire tmp clips every minute, recordings and clean directories every hour.
|
||||
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
||||
@ -356,6 +375,6 @@ class RecordingCleanup(threading.Thread):
|
||||
|
||||
if counter == 0:
|
||||
self.clean_tmp_clips()
|
||||
self.expire_recordings()
|
||||
remove_empty_directories(RECORD_DIR)
|
||||
maybe_empty_dirs = self.expire_recordings()
|
||||
remove_empty_directories(maybe_empty_dirs)
|
||||
self.truncate_wal()
|
||||
|
||||
@ -1,9 +1,12 @@
|
||||
"""Recordings Utilities."""
|
||||
|
||||
import datetime
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
from peewee import DatabaseError, chunked
|
||||
|
||||
@ -47,20 +50,23 @@ class SyncResult:
|
||||
}
|
||||
|
||||
|
||||
def remove_empty_directories(directory: str) -> None:
|
||||
# list all directories recursively and sort them by path,
|
||||
# longest first
|
||||
paths = sorted(
|
||||
[x[0] for x in os.walk(directory)],
|
||||
key=lambda p: len(str(p)),
|
||||
reverse=True,
|
||||
)
|
||||
def remove_empty_directories(paths: Iterable[Path]) -> None:
|
||||
"""
|
||||
Remove directories if they exist and are empty.
|
||||
Silently ignores non-existent and non-empty directories.
|
||||
"""
|
||||
count = 0
|
||||
for path in paths:
|
||||
# don't delete the parent
|
||||
if path == directory:
|
||||
try:
|
||||
path.rmdir()
|
||||
except FileNotFoundError:
|
||||
continue
|
||||
if len(os.listdir(path)) == 0:
|
||||
os.rmdir(path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY:
|
||||
continue
|
||||
raise
|
||||
count += 1
|
||||
logger.debug("Removed {count} empty directories")
|
||||
|
||||
|
||||
def sync_recordings(
|
||||
|
||||
Loading…
Reference in New Issue
Block a user