mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 12:08:29 +03:00
parent
b5d2f86a9b
commit
7ec1d5d2c6
@ -510,6 +510,12 @@ record:
|
|||||||
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
|
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
|
||||||
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
|
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
|
||||||
expire_interval: 60
|
expire_interval: 60
|
||||||
|
# Optional: Maximum size of recordings in MB or string format (e.g. 10GB). (default: shown below)
|
||||||
|
# This serves as a hard limit for the size of the recordings for this camera.
|
||||||
|
# If the total size of recordings exceeds this limit, the oldest recordings will be deleted
|
||||||
|
# until the total size is below the limit, regardless of retention settings.
|
||||||
|
# 0 means no limit.
|
||||||
|
max_size: 0
|
||||||
# Optional: Two-way sync recordings database with disk on startup and once a day (default: shown below).
|
# Optional: Two-way sync recordings database with disk on startup and once a day (default: shown below).
|
||||||
sync_recordings: False
|
sync_recordings: False
|
||||||
# Optional: Continuous retention settings
|
# Optional: Continuous retention settings
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional, Union
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field, field_validator
|
||||||
|
|
||||||
from frigate.const import MAX_PRE_CAPTURE
|
from frigate.const import MAX_PRE_CAPTURE
|
||||||
from frigate.review.types import SeverityEnum
|
from frigate.review.types import SeverityEnum
|
||||||
|
from frigate.util.size import parse_size_to_mb
|
||||||
|
|
||||||
from ..base import FrigateBaseModel
|
from ..base import FrigateBaseModel
|
||||||
|
|
||||||
@ -81,6 +82,10 @@ class RecordConfig(FrigateBaseModel):
|
|||||||
default=60,
|
default=60,
|
||||||
title="Number of minutes to wait between cleanup runs.",
|
title="Number of minutes to wait between cleanup runs.",
|
||||||
)
|
)
|
||||||
|
max_size: Union[float, str] = Field(
|
||||||
|
default=0,
|
||||||
|
title="Maximum size of recordings in MB or string format (e.g. 10GB).",
|
||||||
|
)
|
||||||
continuous: RecordRetainConfig = Field(
|
continuous: RecordRetainConfig = Field(
|
||||||
default_factory=RecordRetainConfig,
|
default_factory=RecordRetainConfig,
|
||||||
title="Continuous recording retention settings.",
|
title="Continuous recording retention settings.",
|
||||||
@ -104,6 +109,16 @@ class RecordConfig(FrigateBaseModel):
|
|||||||
default=None, title="Keep track of original state of recording."
|
default=None, title="Keep track of original state of recording."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@field_validator("max_size", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def parse_max_size(cls, v: Union[float, str], info: object) -> float:
|
||||||
|
if isinstance(v, str):
|
||||||
|
try:
|
||||||
|
return parse_size_to_mb(v)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid size string: {v}")
|
||||||
|
return v
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def event_pre_capture(self) -> int:
|
def event_pre_capture(self) -> int:
|
||||||
return max(
|
return max(
|
||||||
|
|||||||
@ -20,6 +20,17 @@ from frigate.util.time import get_tomorrow_at_time
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_directory_size(directory: str) -> float:
|
||||||
|
"""Get the size of a directory in MB."""
|
||||||
|
total_size = 0
|
||||||
|
for dirpath, dirnames, filenames in os.walk(directory):
|
||||||
|
for f in filenames:
|
||||||
|
fp = os.path.join(dirpath, f)
|
||||||
|
if not os.path.islink(fp):
|
||||||
|
total_size += os.path.getsize(fp)
|
||||||
|
return total_size / 1000000
|
||||||
|
|
||||||
|
|
||||||
class RecordingCleanup(threading.Thread):
|
class RecordingCleanup(threading.Thread):
|
||||||
"""Cleanup existing recordings based on retention config."""
|
"""Cleanup existing recordings based on retention config."""
|
||||||
|
|
||||||
@ -120,6 +131,7 @@ class RecordingCleanup(threading.Thread):
|
|||||||
Recordings.objects,
|
Recordings.objects,
|
||||||
Recordings.motion,
|
Recordings.motion,
|
||||||
Recordings.dBFS,
|
Recordings.dBFS,
|
||||||
|
Recordings.segment_size,
|
||||||
)
|
)
|
||||||
.where(
|
.where(
|
||||||
(Recordings.camera == config.name)
|
(Recordings.camera == config.name)
|
||||||
@ -206,6 +218,10 @@ class RecordingCleanup(threading.Thread):
|
|||||||
Recordings.id << deleted_recordings_list[i : i + max_deletes]
|
Recordings.id << deleted_recordings_list[i : i + max_deletes]
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
|
# Check if we need to enforce max_size
|
||||||
|
if config.record.max_size > 0:
|
||||||
|
self.enforce_max_size(config, deleted_recordings)
|
||||||
|
|
||||||
previews: list[Previews] = (
|
previews: list[Previews] = (
|
||||||
Previews.select(
|
Previews.select(
|
||||||
Previews.id,
|
Previews.id,
|
||||||
@ -266,6 +282,52 @@ class RecordingCleanup(threading.Thread):
|
|||||||
Previews.id << deleted_previews_list[i : i + max_deletes]
|
Previews.id << deleted_previews_list[i : i + max_deletes]
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
|
def enforce_max_size(
|
||||||
|
self, config: CameraConfig, deleted_recordings: set[str]
|
||||||
|
) -> None:
|
||||||
|
"""Ensure that the camera recordings do not exceed the max size."""
|
||||||
|
# Get all recordings for this camera
|
||||||
|
recordings: Recordings = (
|
||||||
|
Recordings.select(
|
||||||
|
Recordings.id,
|
||||||
|
Recordings.path,
|
||||||
|
Recordings.segment_size,
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
(Recordings.camera == config.name)
|
||||||
|
& (Recordings.id.not_in(list(deleted_recordings)))
|
||||||
|
)
|
||||||
|
.order_by(Recordings.start_time)
|
||||||
|
.namedtuples()
|
||||||
|
.iterator()
|
||||||
|
)
|
||||||
|
|
||||||
|
total_size = 0
|
||||||
|
recordings_list = []
|
||||||
|
for recording in recordings:
|
||||||
|
recordings_list.append(recording)
|
||||||
|
total_size += recording.segment_size
|
||||||
|
|
||||||
|
# If the total size is less than the max size, we are good
|
||||||
|
if total_size <= config.record.max_size:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Delete recordings until we are under the max size
|
||||||
|
recordings_to_delete = []
|
||||||
|
for recording in recordings_list:
|
||||||
|
total_size -= recording.segment_size
|
||||||
|
recordings_to_delete.append(recording.id)
|
||||||
|
Path(recording.path).unlink(missing_ok=True)
|
||||||
|
if total_size <= config.record.max_size:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Delete from database
|
||||||
|
max_deletes = 100000
|
||||||
|
for i in range(0, len(recordings_to_delete), max_deletes):
|
||||||
|
Recordings.delete().where(
|
||||||
|
Recordings.id << recordings_to_delete[i : i + max_deletes]
|
||||||
|
).execute()
|
||||||
|
|
||||||
def expire_recordings(self) -> None:
|
def expire_recordings(self) -> None:
|
||||||
"""Delete recordings based on retention config."""
|
"""Delete recordings based on retention config."""
|
||||||
logger.debug("Start expire recordings.")
|
logger.debug("Start expire recordings.")
|
||||||
|
|||||||
@ -429,6 +429,29 @@ class TestConfig(unittest.TestCase):
|
|||||||
frigate_config = FrigateConfig(**config)
|
frigate_config = FrigateConfig(**config)
|
||||||
assert "-rtsp_transport" in frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"]
|
assert "-rtsp_transport" in frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"]
|
||||||
|
|
||||||
|
def test_record_max_size_validation(self):
|
||||||
|
config = {
|
||||||
|
"mqtt": {"host": "mqtt"},
|
||||||
|
"record": {"max_size": "10GB"},
|
||||||
|
"cameras": {
|
||||||
|
"back": {
|
||||||
|
"ffmpeg": {
|
||||||
|
"inputs": [
|
||||||
|
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"detect": {
|
||||||
|
"height": 1080,
|
||||||
|
"width": 1920,
|
||||||
|
"fps": 5,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
frigate_config = FrigateConfig(**config)
|
||||||
|
assert frigate_config.record.max_size == 10000
|
||||||
|
|
||||||
def test_ffmpeg_params_global(self):
|
def test_ffmpeg_params_global(self):
|
||||||
config = {
|
config = {
|
||||||
"ffmpeg": {"input_args": "-re"},
|
"ffmpeg": {"input_args": "-re"},
|
||||||
|
|||||||
@ -31,3 +31,15 @@ class TestRecordRetention(unittest.TestCase):
|
|||||||
)
|
)
|
||||||
assert not segment_info.should_discard_segment(RetainModeEnum.motion)
|
assert not segment_info.should_discard_segment(RetainModeEnum.motion)
|
||||||
assert segment_info.should_discard_segment(RetainModeEnum.active_objects)
|
assert segment_info.should_discard_segment(RetainModeEnum.active_objects)
|
||||||
|
|
||||||
|
def test_size_utility(self):
|
||||||
|
from frigate.util.size import parse_size_to_mb
|
||||||
|
|
||||||
|
assert parse_size_to_mb("10GB") == 10240
|
||||||
|
assert parse_size_to_mb("10MB") == 10
|
||||||
|
assert parse_size_to_mb("1024KB") == 1
|
||||||
|
assert parse_size_to_mb("1048576B") == 1
|
||||||
|
assert parse_size_to_mb("10") == 10
|
||||||
|
|
||||||
|
with self.assertRaises(ValueError):
|
||||||
|
parse_size_to_mb("invalid")
|
||||||
|
|||||||
21
frigate/util/size.py
Normal file
21
frigate/util/size.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
"""Utility for parsing size strings."""
|
||||||
|
|
||||||
|
|
||||||
|
def parse_size_to_mb(size_str: str) -> float:
|
||||||
|
"""Parse a size string to megabytes."""
|
||||||
|
size_str = size_str.strip().upper()
|
||||||
|
if size_str.endswith("TB"):
|
||||||
|
return float(size_str[:-2]) * 1024 * 1024
|
||||||
|
elif size_str.endswith("GB"):
|
||||||
|
return float(size_str[:-2]) * 1024
|
||||||
|
elif size_str.endswith("MB"):
|
||||||
|
return float(size_str[:-2])
|
||||||
|
elif size_str.endswith("KB"):
|
||||||
|
return float(size_str[:-2]) / 1024
|
||||||
|
elif size_str.endswith("B"):
|
||||||
|
return float(size_str[:-1]) / (1024 * 1024)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return float(size_str)
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError(f"Invalid size string: {size_str}")
|
||||||
Loading…
Reference in New Issue
Block a user