mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-02 17:25:22 +03:00
Handle case where segments are not deleted on initial run or is only retained segments
This commit is contained in:
parent
4895e69a28
commit
fce03ec90e
@ -66,34 +66,10 @@ class StorageMaintainer(threading.Thread):
|
||||
remaining_storage = round(shutil.disk_usage(RECORD_DIR).free / 1000000, 1)
|
||||
return remaining_storage < self.avg_segment_sizes["total"]["hour"]
|
||||
|
||||
def reduce_storage_consumption(self) -> None:
|
||||
"""Cleanup the last 2 hours of recordings."""
|
||||
logger.debug("Start all cameras.")
|
||||
for camera in self.config.cameras.keys():
|
||||
logger.debug(f"Start camera: {camera}.")
|
||||
# Get last 24 hours of recordings segments
|
||||
segment_count = int(
|
||||
7200 / self.avg_segment_sizes[camera]["segment_duration"]
|
||||
)
|
||||
recordings: Recordings = (
|
||||
Recordings.select()
|
||||
.where(Recordings.camera == camera)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
.limit(segment_count * 12)
|
||||
)
|
||||
|
||||
# Get retained events to check against
|
||||
retained_events: Event = (
|
||||
Event.select()
|
||||
.where(
|
||||
Event.camera == camera,
|
||||
Event.retain_indefinitely == True,
|
||||
Event.has_clip,
|
||||
)
|
||||
.order_by(Event.start_time)
|
||||
.objects()
|
||||
)
|
||||
|
||||
def delete_recording_segments(
|
||||
self, recordings, retained_events, segment_count: int
|
||||
) -> set[str]:
|
||||
"""Delete Recording Segments"""
|
||||
# loop over recordings and see if they overlap with any retained events
|
||||
# TODO: expire segments based on segment stats according to config
|
||||
event_start = 0
|
||||
@ -133,6 +109,66 @@ class StorageMaintainer(threading.Thread):
|
||||
Path(recording.path).unlink(missing_ok=True)
|
||||
deleted_recordings.add(recording.id)
|
||||
|
||||
return deleted_recordings
|
||||
|
||||
def reduce_storage_consumption(self) -> None:
|
||||
"""Cleanup the last 2 hours of recordings."""
|
||||
logger.debug("Start all cameras.")
|
||||
for camera in self.config.cameras.keys():
|
||||
logger.debug(f"Start camera: {camera}.")
|
||||
# Get last 24 hours of recordings segments
|
||||
segment_count = int(
|
||||
7200 / self.avg_segment_sizes[camera]["segment_duration"]
|
||||
)
|
||||
recordings: Recordings = (
|
||||
Recordings.select()
|
||||
.where(Recordings.camera == camera)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
.limit(segment_count * 12)
|
||||
)
|
||||
|
||||
# Get retained events to check against
|
||||
retained_events: Event = (
|
||||
Event.select()
|
||||
.where(
|
||||
Event.camera == camera,
|
||||
Event.retain_indefinitely == True,
|
||||
Event.has_clip,
|
||||
)
|
||||
.order_by(Event.start_time)
|
||||
.objects()
|
||||
)
|
||||
|
||||
deleted_recordings: set[str] = self.deleted_recordings(
|
||||
recordings, retained_events, segment_count
|
||||
)
|
||||
|
||||
# check if 2 hours of segments were deleted from the 24 retrieved
|
||||
if len(deleted_recordings) < segment_count:
|
||||
# get the rest of the recording segments to look through
|
||||
recordings: Recordings = (
|
||||
Recordings.select()
|
||||
.where(Recordings.camera == camera)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
)
|
||||
second_run: set[str] = self.delete_recording_segments(
|
||||
recordings, retained_events, segment_count
|
||||
)
|
||||
deleted_recordings = deleted_recordings.union(second_run)
|
||||
|
||||
# check if still 2 hour quota still not meant
|
||||
if len(deleted_recordings) < segment_count:
|
||||
recordings: Recordings = (
|
||||
Recordings.select()
|
||||
.where(Recordings.camera == camera)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
)
|
||||
# delete segments including retained events
|
||||
last_run: set[str] = self.delete_recording_segments(
|
||||
recordings, [], segment_count
|
||||
)
|
||||
deleted_recordings = deleted_recordings.union(last_run)
|
||||
|
||||
logger.debug(f"Expiring {len(deleted_recordings)} recordings")
|
||||
# delete up to 100,000 at a time
|
||||
max_deletes = 100000
|
||||
|
||||
Loading…
Reference in New Issue
Block a user