Compare commits

...

3 Commits

Author SHA1 Message Date
Patrick Decat
cbb582c2d2
Merge 7ec1d5d2c6 into cfeb86646f 2026-01-17 21:57:49 -03:00
Kirill Kulakov
cfeb86646f
fix(recording): handle unexpected filenames in cache maintainer to prevent crash (#21676)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled
* fix(recording): handle unexpected filenames in cache maintainer to prevent crash

* test(recording): add test for maintainer cache file parsing

* Prevent log spam from unexpected cache files

Addresses PR review feedback: Add deduplication to prevent warning
messages from being logged repeatedly for the same unexpected file
in the cache directory. Each unexpected filename is only logged once
per RecordingMaintainer instance lifecycle.

Also adds test to verify warning is only emitted once per filename.

* Fix code formatting for test_maintainer.py

* fixes + ruff
2026-01-16 19:23:23 -07:00
Patrick Decat
7ec1d5d2c6
feat: add max_size to recording settings
Resolves #9275
2025-12-31 19:34:12 +01:00
8 changed files with 223 additions and 4 deletions

View File

@ -510,6 +510,12 @@ record:
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
expire_interval: 60
# Optional: Maximum size of recordings in MB or string format (e.g. 10GB). (default: shown below)
# This serves as a hard limit for the size of the recordings for this camera.
# If the total size of recordings exceeds this limit, the oldest recordings will be deleted
# until the total size is below the limit, regardless of retention settings.
# 0 means no limit.
max_size: 0
# Optional: Two-way sync recordings database with disk on startup and once a day (default: shown below).
sync_recordings: False
# Optional: Continuous retention settings

View File

@ -1,10 +1,11 @@
from enum import Enum
from typing import Optional
from typing import Optional, Union
from pydantic import Field
from pydantic import Field, field_validator
from frigate.const import MAX_PRE_CAPTURE
from frigate.review.types import SeverityEnum
from frigate.util.size import parse_size_to_mb
from ..base import FrigateBaseModel
@ -81,6 +82,10 @@ class RecordConfig(FrigateBaseModel):
default=60,
title="Number of minutes to wait between cleanup runs.",
)
max_size: Union[float, str] = Field(
default=0,
title="Maximum size of recordings in MB or string format (e.g. 10GB).",
)
continuous: RecordRetainConfig = Field(
default_factory=RecordRetainConfig,
title="Continuous recording retention settings.",
@ -104,6 +109,16 @@ class RecordConfig(FrigateBaseModel):
default=None, title="Keep track of original state of recording."
)
@field_validator("max_size", mode="before")
@classmethod
def parse_max_size(cls, v: Union[float, str], info: object) -> float:
if isinstance(v, str):
try:
return parse_size_to_mb(v)
except ValueError:
raise ValueError(f"Invalid size string: {v}")
return v
@property
def event_pre_capture(self) -> int:
return max(

View File

@ -20,6 +20,17 @@ from frigate.util.time import get_tomorrow_at_time
logger = logging.getLogger(__name__)
def get_directory_size(directory: str) -> float:
"""Get the size of a directory in MB."""
total_size = 0
for dirpath, dirnames, filenames in os.walk(directory):
for f in filenames:
fp = os.path.join(dirpath, f)
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size / 1000000
class RecordingCleanup(threading.Thread):
"""Cleanup existing recordings based on retention config."""
@ -120,6 +131,7 @@ class RecordingCleanup(threading.Thread):
Recordings.objects,
Recordings.motion,
Recordings.dBFS,
Recordings.segment_size,
)
.where(
(Recordings.camera == config.name)
@ -206,6 +218,10 @@ class RecordingCleanup(threading.Thread):
Recordings.id << deleted_recordings_list[i : i + max_deletes]
).execute()
# Check if we need to enforce max_size
if config.record.max_size > 0:
self.enforce_max_size(config, deleted_recordings)
previews: list[Previews] = (
Previews.select(
Previews.id,
@ -266,6 +282,52 @@ class RecordingCleanup(threading.Thread):
Previews.id << deleted_previews_list[i : i + max_deletes]
).execute()
def enforce_max_size(
self, config: CameraConfig, deleted_recordings: set[str]
) -> None:
"""Ensure that the camera recordings do not exceed the max size."""
# Get all recordings for this camera
recordings: Recordings = (
Recordings.select(
Recordings.id,
Recordings.path,
Recordings.segment_size,
)
.where(
(Recordings.camera == config.name)
& (Recordings.id.not_in(list(deleted_recordings)))
)
.order_by(Recordings.start_time)
.namedtuples()
.iterator()
)
total_size = 0
recordings_list = []
for recording in recordings:
recordings_list.append(recording)
total_size += recording.segment_size
# If the total size is less than the max size, we are good
if total_size <= config.record.max_size:
return
# Delete recordings until we are under the max size
recordings_to_delete = []
for recording in recordings_list:
total_size -= recording.segment_size
recordings_to_delete.append(recording.id)
Path(recording.path).unlink(missing_ok=True)
if total_size <= config.record.max_size:
break
# Delete from database
max_deletes = 100000
for i in range(0, len(recordings_to_delete), max_deletes):
Recordings.delete().where(
Recordings.id << recordings_to_delete[i : i + max_deletes]
).execute()
def expire_recordings(self) -> None:
"""Delete recordings based on retention config."""
logger.debug("Start expire recordings.")

View File

@ -97,6 +97,7 @@ class RecordingMaintainer(threading.Thread):
self.object_recordings_info: dict[str, list] = defaultdict(list)
self.audio_recordings_info: dict[str, list] = defaultdict(list)
self.end_time_cache: dict[str, Tuple[datetime.datetime, float]] = {}
self.unexpected_cache_files_logged: bool = False
async def move_files(self) -> None:
cache_files = [
@ -112,7 +113,14 @@ class RecordingMaintainer(threading.Thread):
for cache in cache_files:
cache_path = os.path.join(CACHE_DIR, cache)
basename = os.path.splitext(cache)[0]
camera, date = basename.rsplit("@", maxsplit=1)
try:
camera, date = basename.rsplit("@", maxsplit=1)
except ValueError:
if not self.unexpected_cache_files_logged:
logger.warning("Skipping unexpected files in cache")
self.unexpected_cache_files_logged = True
continue
start_time = datetime.datetime.strptime(
date, CACHE_SEGMENT_FORMAT
).astimezone(datetime.timezone.utc)
@ -164,7 +172,13 @@ class RecordingMaintainer(threading.Thread):
cache_path = os.path.join(CACHE_DIR, cache)
basename = os.path.splitext(cache)[0]
camera, date = basename.rsplit("@", maxsplit=1)
try:
camera, date = basename.rsplit("@", maxsplit=1)
except ValueError:
if not self.unexpected_cache_files_logged:
logger.warning("Skipping unexpected files in cache")
self.unexpected_cache_files_logged = True
continue
# important that start_time is utc because recordings are stored and compared in utc
start_time = datetime.datetime.strptime(

View File

@ -429,6 +429,29 @@ class TestConfig(unittest.TestCase):
frigate_config = FrigateConfig(**config)
assert "-rtsp_transport" in frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"]
def test_record_max_size_validation(self):
config = {
"mqtt": {"host": "mqtt"},
"record": {"max_size": "10GB"},
"cameras": {
"back": {
"ffmpeg": {
"inputs": [
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
frigate_config = FrigateConfig(**config)
assert frigate_config.record.max_size == 10000
def test_ffmpeg_params_global(self):
config = {
"ffmpeg": {"input_args": "-re"},

View File

@ -0,0 +1,66 @@
import sys
import unittest
from unittest.mock import MagicMock, patch
# Mock complex imports before importing maintainer
sys.modules["frigate.comms.inter_process"] = MagicMock()
sys.modules["frigate.comms.detections_updater"] = MagicMock()
sys.modules["frigate.comms.recordings_updater"] = MagicMock()
sys.modules["frigate.config.camera.updater"] = MagicMock()
# Now import the class under test
from frigate.config import FrigateConfig # noqa: E402
from frigate.record.maintainer import RecordingMaintainer # noqa: E402
class TestMaintainer(unittest.IsolatedAsyncioTestCase):
async def test_move_files_survives_bad_filename(self):
config = MagicMock(spec=FrigateConfig)
config.cameras = {}
stop_event = MagicMock()
maintainer = RecordingMaintainer(config, stop_event)
# We need to mock end_time_cache to avoid key errors if logic proceeds
maintainer.end_time_cache = {}
# Mock filesystem
# One bad file, one good file
files = ["bad_filename.mp4", "camera@20210101000000+0000.mp4"]
with patch("os.listdir", return_value=files):
with patch("os.path.isfile", return_value=True):
with patch(
"frigate.record.maintainer.psutil.process_iter", return_value=[]
):
with patch("frigate.record.maintainer.logger.warning") as warn:
# Mock validate_and_move_segment to avoid further logic
maintainer.validate_and_move_segment = MagicMock()
try:
await maintainer.move_files()
except ValueError as e:
if "not enough values to unpack" in str(e):
self.fail("move_files() crashed on bad filename!")
raise e
except Exception:
# Ignore other errors (like DB connection) as we only care about the unpack crash
pass
# The bad filename is encountered in multiple loops, but should only warn once.
matching = [
c
for c in warn.call_args_list
if c.args
and isinstance(c.args[0], str)
and "Skipping unexpected files in cache" in c.args[0]
]
self.assertEqual(
1,
len(matching),
f"Expected a single warning for unexpected files, got {len(matching)}",
)
if __name__ == "__main__":
unittest.main()

View File

@ -31,3 +31,15 @@ class TestRecordRetention(unittest.TestCase):
)
assert not segment_info.should_discard_segment(RetainModeEnum.motion)
assert segment_info.should_discard_segment(RetainModeEnum.active_objects)
def test_size_utility(self):
from frigate.util.size import parse_size_to_mb
assert parse_size_to_mb("10GB") == 10240
assert parse_size_to_mb("10MB") == 10
assert parse_size_to_mb("1024KB") == 1
assert parse_size_to_mb("1048576B") == 1
assert parse_size_to_mb("10") == 10
with self.assertRaises(ValueError):
parse_size_to_mb("invalid")

21
frigate/util/size.py Normal file
View File

@ -0,0 +1,21 @@
"""Utility for parsing size strings."""
def parse_size_to_mb(size_str: str) -> float:
"""Parse a size string to megabytes."""
size_str = size_str.strip().upper()
if size_str.endswith("TB"):
return float(size_str[:-2]) * 1024 * 1024
elif size_str.endswith("GB"):
return float(size_str[:-2]) * 1024
elif size_str.endswith("MB"):
return float(size_str[:-2])
elif size_str.endswith("KB"):
return float(size_str[:-2]) / 1024
elif size_str.endswith("B"):
return float(size_str[:-1]) / (1024 * 1024)
else:
try:
return float(size_str)
except ValueError:
raise ValueError(f"Invalid size string: {size_str}")