Merge pull request #2 from ibs0d/codex/add-missing-features-to-frigate-repository-r828wp

Fix multi-root cleanup/sync and honor configured recording roots
This commit is contained in:
ibs0d 2026-03-07 10:40:02 +11:00 committed by GitHub
commit ba045ab3cf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 306 additions and 56 deletions

View File

@ -788,7 +788,7 @@ def restart():
description="""Start an asynchronous media sync job to find and (optionally) remove orphaned media files. description="""Start an asynchronous media sync job to find and (optionally) remove orphaned media files.
Returns 202 with job details when queued, or 409 if a job is already running.""", Returns 202 with job details when queued, or 409 if a job is already running.""",
) )
def sync_media(body: MediaSyncBody = Body(...)): def sync_media(request: Request, body: MediaSyncBody = Body(...)):
"""Start async media sync job - remove orphaned files. """Start async media sync job - remove orphaned files.
Syncs specified media types: event snapshots, event thumbnails, review thumbnails, Syncs specified media types: event snapshots, event thumbnails, review thumbnails,
@ -804,7 +804,10 @@ def sync_media(body: MediaSyncBody = Body(...)):
202 Accepted with job_id, or 409 Conflict if job already running. 202 Accepted with job_id, or 409 Conflict if job already running.
""" """
job_id = start_media_sync_job( job_id = start_media_sync_job(
dry_run=body.dry_run, media_types=body.media_types, force=body.force dry_run=body.dry_run,
media_types=body.media_types,
force=body.force,
recordings_roots=request.app.frigate_config.get_recordings_paths(),
) )
if job_id is None: if job_id is None:

View File

@ -25,7 +25,6 @@ from frigate.api.defs.query.recordings_query_parameters import (
) )
from frigate.api.defs.response.generic_response import GenericResponse from frigate.api.defs.response.generic_response import GenericResponse
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
from frigate.const import RECORD_DIR
from frigate.models import Event, Recordings from frigate.models import Event, Recordings
from frigate.util.time import get_dst_transitions from frigate.util.time import get_dst_transitions
@ -36,15 +35,15 @@ router = APIRouter(tags=[Tags.recordings])
@router.get("/recordings/storage", dependencies=[Depends(allow_any_authenticated())]) @router.get("/recordings/storage", dependencies=[Depends(allow_any_authenticated())])
def get_recordings_storage_usage(request: Request): def get_recordings_storage_usage(request: Request):
recording_stats = request.app.stats_emitter.get_latest_stats()["service"][ storage_stats = request.app.stats_emitter.get_latest_stats()["service"]["storage"]
"storage"
][RECORD_DIR]
if not recording_stats: recording_paths = request.app.frigate_config.get_recordings_paths()
recording_stats = [storage_stats.get(path, {}) for path in recording_paths]
total_mb = sum(stat.get("total", 0) for stat in recording_stats)
if total_mb == 0:
return JSONResponse({}) return JSONResponse({})
total_mb = recording_stats["total"]
camera_usages: dict[str, dict] = ( camera_usages: dict[str, dict] = (
request.app.storage_maintainer.calculate_camera_usages() request.app.storage_maintainer.calculate_camera_usages()
) )

View File

@ -131,6 +131,8 @@ class FrigateApp:
EXPORT_DIR, EXPORT_DIR,
] ]
dirs.extend(self.config.get_recordings_paths())
if self.config.face_recognition.enabled: if self.config.face_recognition.enabled:
dirs.append(FACE_DIR) dirs.append(FACE_DIR)

View File

@ -2,7 +2,7 @@ import os
from enum import Enum from enum import Enum
from typing import Optional from typing import Optional
from pydantic import Field, PrivateAttr, model_validator from pydantic import Field, PrivateAttr, field_validator, model_validator
from frigate.const import CACHE_DIR, CACHE_SEGMENT_FORMAT, REGEX_CAMERA_NAME from frigate.const import CACHE_DIR, CACHE_SEGMENT_FORMAT, REGEX_CAMERA_NAME
from frigate.ffmpeg_presets import ( from frigate.ffmpeg_presets import (
@ -72,6 +72,20 @@ class CameraConfig(FrigateBaseModel):
enabled: bool = Field(default=True, title="Enabled", description="Enabled") enabled: bool = Field(default=True, title="Enabled", description="Enabled")
path: str = Field(
default="/media/frigate/recordings",
title="Recordings path",
description="Absolute base path for this camera's recordings. Recordings are stored in camera/date subdirectories under this path.",
)
@field_validator("path")
@classmethod
def validate_path(cls, value: str) -> str:
if not os.path.isabs(value):
raise ValueError("Camera path must be an absolute path.")
return value.rstrip("/") or "/"
# Options with global fallback # Options with global fallback
audio: AudioConfig = Field( audio: AudioConfig = Field(
default_factory=AudioConfig, default_factory=AudioConfig,

View File

@ -19,7 +19,7 @@ from pydantic import (
from ruamel.yaml import YAML from ruamel.yaml import YAML
from typing_extensions import Self from typing_extensions import Self
from frigate.const import REGEX_JSON from frigate.const import RECORD_DIR, REGEX_JSON
from frigate.detectors import DetectorConfig, ModelConfig from frigate.detectors import DetectorConfig, ModelConfig
from frigate.detectors.detector_config import BaseDetectorConfig from frigate.detectors.detector_config import BaseDetectorConfig
from frigate.plus import PlusApi from frigate.plus import PlusApi
@ -942,6 +942,12 @@ class FrigateConfig(FrigateBaseModel):
raise ValueError("Zones cannot share names with cameras") raise ValueError("Zones cannot share names with cameras")
return v return v
def get_camera_recordings_path(self, camera: str) -> str:
return self.cameras[camera].path
def get_recordings_paths(self) -> list[str]:
return sorted({camera.path for camera in self.cameras.values()})
@classmethod @classmethod
def load(cls, **kwargs): def load(cls, **kwargs):
"""Loads the Frigate config file, runs migrations, and creates the config object.""" """Loads the Frigate config file, runs migrations, and creates the config object."""

View File

@ -29,6 +29,7 @@ class MediaSyncJob(Job):
dry_run: bool = False dry_run: bool = False
media_types: list[str] = field(default_factory=lambda: ["all"]) media_types: list[str] = field(default_factory=lambda: ["all"])
force: bool = False force: bool = False
recordings_roots: list[str] = field(default_factory=list)
class MediaSyncRunner(threading.Thread): class MediaSyncRunner(threading.Thread):
@ -59,6 +60,7 @@ class MediaSyncRunner(threading.Thread):
dry_run=self.job.dry_run, dry_run=self.job.dry_run,
media_types=self.job.media_types, media_types=self.job.media_types,
force=self.job.force, force=self.job.force,
recordings_roots=self.job.recordings_roots,
) )
# Store results and mark as complete # Store results and mark as complete
@ -95,6 +97,7 @@ def start_media_sync_job(
dry_run: bool = False, dry_run: bool = False,
media_types: Optional[list[str]] = None, media_types: Optional[list[str]] = None,
force: bool = False, force: bool = False,
recordings_roots: Optional[list[str]] = None,
) -> Optional[str]: ) -> Optional[str]:
"""Start a new media sync job if none is currently running. """Start a new media sync job if none is currently running.
@ -113,6 +116,7 @@ def start_media_sync_job(
dry_run=dry_run, dry_run=dry_run,
media_types=media_types or ["all"], media_types=media_types or ["all"],
force=force, force=force,
recordings_roots=recordings_roots or [],
) )
logger.debug(f"Creating new media sync job: {job.id}") logger.debug(f"Creating new media sync job: {job.id}")

View File

@ -11,7 +11,7 @@ from pathlib import Path
from playhouse.sqlite_ext import SqliteExtDatabase from playhouse.sqlite_ext import SqliteExtDatabase
from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE
from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus
from frigate.util.builtin import clear_and_unlink from frigate.util.builtin import clear_and_unlink
from frigate.util.media import remove_empty_directories from frigate.util.media import remove_empty_directories
@ -379,5 +379,13 @@ class RecordingCleanup(threading.Thread):
if counter == 0: if counter == 0:
self.clean_tmp_clips() self.clean_tmp_clips()
maybe_empty_dirs = self.expire_recordings() maybe_empty_dirs = self.expire_recordings()
remove_empty_directories(Path(RECORD_DIR), maybe_empty_dirs) for recordings_root in self.config.get_recordings_paths():
remove_empty_directories(
Path(recordings_root),
[
path
for path in maybe_empty_dirs
if Path(recordings_root) in path.parents
],
)
self.truncate_wal() self.truncate_wal()

View File

@ -34,7 +34,6 @@ from frigate.const import (
INSERT_MANY_RECORDINGS, INSERT_MANY_RECORDINGS,
MAX_SEGMENT_DURATION, MAX_SEGMENT_DURATION,
MAX_SEGMENTS_IN_CACHE, MAX_SEGMENTS_IN_CACHE,
RECORD_DIR,
) )
from frigate.models import Recordings, ReviewSegment from frigate.models import Recordings, ReviewSegment
from frigate.review.types import SeverityEnum from frigate.review.types import SeverityEnum
@ -585,7 +584,7 @@ class RecordingMaintainer(threading.Thread):
# directory will be in utc due to start_time being in utc # directory will be in utc due to start_time being in utc
directory = os.path.join( directory = os.path.join(
RECORD_DIR, self.config.get_camera_recordings_path(camera),
start_time.strftime("%Y-%m-%d/%H"), start_time.strftime("%Y-%m-%d/%H"),
camera, camera,
) )

View File

@ -12,7 +12,7 @@ import requests
from requests.exceptions import RequestException from requests.exceptions import RequestException
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.const import CACHE_DIR, CLIPS_DIR, RECORD_DIR from frigate.const import CACHE_DIR, CLIPS_DIR
from frigate.data_processing.types import DataProcessorMetrics from frigate.data_processing.types import DataProcessorMetrics
from frigate.object_detection.base import ObjectDetectProcess from frigate.object_detection.base import ObjectDetectProcess
from frigate.types import StatsTrackingTypes from frigate.types import StatsTrackingTypes
@ -483,7 +483,7 @@ def stats_snapshot(
"last_updated": int(time.time()), "last_updated": int(time.time()),
} }
for path in [RECORD_DIR, CLIPS_DIR, CACHE_DIR]: for path in [*config.get_recordings_paths(), CLIPS_DIR, CACHE_DIR]:
try: try:
storage_stats = shutil.disk_usage(path) storage_stats = shutil.disk_usage(path)
except (FileNotFoundError, OSError): except (FileNotFoundError, OSError):

View File

@ -8,7 +8,7 @@ from pathlib import Path
from peewee import SQL, fn from peewee import SQL, fn
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, REPLAY_CAMERA_PREFIX from frigate.const import REPLAY_CAMERA_PREFIX
from frigate.models import Event, Recordings from frigate.models import Event, Recordings
from frigate.util.builtin import clear_and_unlink from frigate.util.builtin import clear_and_unlink
@ -103,26 +103,41 @@ class StorageMaintainer(threading.Thread):
return usages return usages
def check_storage_needs_cleanup(self) -> bool: def _get_path_bandwidths(self) -> dict[str, float]:
"""Return if storage needs cleanup.""" bandwidth_per_path: dict[str, float] = {}
for camera, stats in self.camera_storage_stats.items():
path = self.config.get_camera_recordings_path(camera)
bandwidth_per_path[path] = bandwidth_per_path.get(path, 0) + stats.get(
"bandwidth", 0
)
return bandwidth_per_path
def check_storage_needs_cleanup(self) -> str | None:
"""Return recordings root path that needs cleanup, if any."""
# currently runs cleanup if less than 1 hour of space is left # currently runs cleanup if less than 1 hour of space is left
# disk_usage should not spin up disks # disk_usage should not spin up disks
hourly_bandwidth = sum( for path, hourly_bandwidth in self._get_path_bandwidths().items():
[b["bandwidth"] for b in self.camera_storage_stats.values()] try:
) remaining_storage = round(shutil.disk_usage(path).free / pow(2, 20), 1)
remaining_storage = round(shutil.disk_usage(RECORD_DIR).free / pow(2, 20), 1) except (FileNotFoundError, OSError):
logger.debug( continue
f"Storage cleanup check: {hourly_bandwidth} hourly with remaining storage: {remaining_storage}."
)
return remaining_storage < hourly_bandwidth
def reduce_storage_consumption(self) -> None: logger.debug(
f"Storage cleanup check: {hourly_bandwidth} hourly with remaining storage: {remaining_storage} for path {path}."
)
if remaining_storage < hourly_bandwidth:
return path
return None
def reduce_storage_consumption(self, recordings_root: str) -> None:
"""Remove oldest hour of recordings.""" """Remove oldest hour of recordings."""
logger.debug("Starting storage cleanup.") logger.debug("Starting storage cleanup.")
deleted_segments_size = 0 deleted_segments_size = 0
hourly_bandwidth = sum( hourly_bandwidth = self._get_path_bandwidths().get(recordings_root, 0)
[b["bandwidth"] for b in self.camera_storage_stats.values()]
)
recordings: Recordings = ( recordings: Recordings = (
Recordings.select( Recordings.select(
@ -133,6 +148,7 @@ class StorageMaintainer(threading.Thread):
Recordings.segment_size, Recordings.segment_size,
Recordings.path, Recordings.path,
) )
.where(Recordings.path.startswith(f"{recordings_root}/"))
.order_by(Recordings.start_time.asc()) .order_by(Recordings.start_time.asc())
.namedtuples() .namedtuples()
.iterator() .iterator()
@ -207,6 +223,7 @@ class StorageMaintainer(threading.Thread):
Recordings.path, Recordings.path,
Recordings.segment_size, Recordings.segment_size,
) )
.where(Recordings.path.startswith(f"{recordings_root}/"))
.order_by(Recordings.start_time.asc()) .order_by(Recordings.start_time.asc())
.namedtuples() .namedtuples()
.iterator() .iterator()
@ -288,10 +305,11 @@ class StorageMaintainer(threading.Thread):
self.calculate_camera_bandwidth() self.calculate_camera_bandwidth()
logger.debug(f"Default camera bandwidths: {self.camera_storage_stats}.") logger.debug(f"Default camera bandwidths: {self.camera_storage_stats}.")
if self.check_storage_needs_cleanup(): cleanup_root = self.check_storage_needs_cleanup()
if cleanup_root:
logger.info( logger.info(
"Less than 1 hour of recording space left, running storage maintenance..." f"Less than 1 hour of recording space left for {cleanup_root}, running storage maintenance..."
) )
self.reduce_storage_consumption() self.reduce_storage_consumption(cleanup_root)
logger.info("Exiting storage maintainer...") logger.info("Exiting storage maintainer...")

View File

@ -68,6 +68,44 @@ class TestConfig(unittest.TestCase):
assert frigate_config.detectors["cpu"].type == DetectorTypeEnum.cpu assert frigate_config.detectors["cpu"].type == DetectorTypeEnum.cpu
assert frigate_config.detectors["cpu"].model.width == 320 assert frigate_config.detectors["cpu"].model.width == 320
def test_default_camera_recordings_path(self):
frigate_config = FrigateConfig(**self.minimal)
assert (
frigate_config.cameras["back"].path == "/media/frigate/recordings"
)
def test_camera_recordings_path_must_be_absolute(self):
config = deep_merge(
self.minimal,
{"cameras": {"back": {"path": "recordings"}}},
override=True,
)
self.assertRaises(ValidationError, lambda: FrigateConfig(**config))
def test_get_recordings_paths_uses_only_configured_paths(self):
config = deep_merge(
self.minimal,
{
"cameras": {
"back": {"path": "/video2"},
"side": {
"path": "/video3",
"ffmpeg": {
"inputs": [
{"path": "rtsp://10.0.0.2:554/video", "roles": ["detect"]}
]
},
"detect": {"height": 1080, "width": 1920, "fps": 5},
},
}
},
override=True,
)
frigate_config = FrigateConfig(**config)
assert frigate_config.get_recordings_paths() == ["/video2", "/video3"]
@patch("frigate.detectors.detector_config.load_labels") @patch("frigate.detectors.detector_config.load_labels")
def test_detector_custom_model_path(self, mock_labels): def test_detector_custom_model_path(self, mock_labels):
mock_labels.return_value = {} mock_labels.return_value = {}

View File

@ -0,0 +1,86 @@
import os
import tempfile
import unittest
from peewee_migrate import Router
from playhouse.sqlite_ext import SqliteExtDatabase
from playhouse.sqliteq import SqliteQueueDatabase
from frigate.models import Recordings, RecordingsToDelete
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
from frigate.util.media import sync_recordings
class TestMediaSync(unittest.TestCase):
def setUp(self):
migrate_db = SqliteExtDatabase(TEST_DB)
router = Router(migrate_db)
router.run()
migrate_db.close()
self.db = SqliteQueueDatabase(TEST_DB)
models = [Recordings, RecordingsToDelete]
self.db.bind(models)
self.root_a = tempfile.mkdtemp()
self.root_b = tempfile.mkdtemp()
def tearDown(self):
if not self.db.is_closed():
self.db.close()
try:
for file in TEST_DB_CLEANUPS:
os.remove(file)
except OSError:
pass
def test_sync_recordings_scans_all_configured_roots(self):
recording_path = os.path.join(self.root_a, "recording_a.mp4")
orphan_path = os.path.join(self.root_b, "orphan_b.mp4")
with open(recording_path, "w"):
pass
with open(orphan_path, "w"):
pass
Recordings.insert(
id="rec_a",
camera="front_door",
path=recording_path,
start_time=1,
end_time=2,
duration=1,
motion=True,
objects=True,
segment_size=1,
).execute()
result = sync_recordings(
dry_run=True,
force=True,
recordings_roots=[self.root_a, self.root_b],
)
assert result.files_checked == 2
assert result.orphans_found == 1
assert orphan_path in result.orphan_paths
def test_sync_recordings_does_not_scan_unconfigured_roots(self):
orphan_path = os.path.join(self.root_b, "orphan_b.mp4")
with open(orphan_path, "w"):
pass
result = sync_recordings(
dry_run=True,
force=True,
recordings_roots=[self.root_a],
)
assert result.files_checked == 0
assert result.orphans_found == 0
if __name__ == "__main__":
unittest.main()

View File

@ -200,7 +200,7 @@ class TestHttp(unittest.TestCase):
) )
storage.calculate_camera_bandwidth() storage.calculate_camera_bandwidth()
storage.reduce_storage_consumption() storage.reduce_storage_consumption(config.get_camera_recordings_path("front_door"))
with self.assertRaises(DoesNotExist): with self.assertRaises(DoesNotExist):
assert Recordings.get(Recordings.id == rec_k_id) assert Recordings.get(Recordings.id == rec_k_id)
assert Recordings.get(Recordings.id == rec_k2_id) assert Recordings.get(Recordings.id == rec_k2_id)
@ -255,12 +255,79 @@ class TestHttp(unittest.TestCase):
) )
storage.calculate_camera_bandwidth() storage.calculate_camera_bandwidth()
storage.reduce_storage_consumption() storage.reduce_storage_consumption(config.get_camera_recordings_path("front_door"))
assert Recordings.get(Recordings.id == rec_k_id) assert Recordings.get(Recordings.id == rec_k_id)
assert Recordings.get(Recordings.id == rec_k2_id) assert Recordings.get(Recordings.id == rec_k2_id)
assert Recordings.get(Recordings.id == rec_k3_id) assert Recordings.get(Recordings.id == rec_k3_id)
def test_storage_cleanup_only_for_overflowed_root(self):
"""Ensure cleanup removes recordings only from the targeted recordings root."""
root_a = tempfile.mkdtemp()
root_b = tempfile.mkdtemp()
config = FrigateConfig(
**{
"mqtt": {"host": "mqtt"},
"cameras": {
"front_door": {
"path": root_a,
"ffmpeg": {
"inputs": [
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
"detect": {"height": 1080, "width": 1920, "fps": 5},
},
"back_door": {
"path": root_b,
"ffmpeg": {
"inputs": [
{"path": "rtsp://10.0.0.2:554/video", "roles": ["detect"]}
]
},
"detect": {"height": 1080, "width": 1920, "fps": 5},
},
},
}
)
storage = StorageMaintainer(config, MagicMock())
t0 = datetime.datetime.now().timestamp()
delete_a = "a.delete"
keep_b = "b.keep"
_insert_mock_recording(
delete_a,
os.path.join(root_a, f"{delete_a}.tmp"),
t0 - 100,
t0 - 90,
camera="front_door",
seg_size=8,
seg_dur=10,
)
_insert_mock_recording(
keep_b,
os.path.join(root_b, f"{keep_b}.tmp"),
t0 - 80,
t0 - 70,
camera="back_door",
seg_size=8,
seg_dur=10,
)
storage.camera_storage_stats = {
"front_door": {"bandwidth": 4, "needs_refresh": False},
"back_door": {"bandwidth": 0, "needs_refresh": False},
}
storage.reduce_storage_consumption(root_a)
with self.assertRaises(DoesNotExist):
Recordings.get(Recordings.id == delete_a)
assert Recordings.get(Recordings.id == keep_b)
def _insert_mock_event( def _insert_mock_event(
id: str, id: str,
start: int, start: int,

View File

@ -84,7 +84,10 @@ def remove_empty_directories(root: Path, paths: Iterable[Path]) -> None:
def sync_recordings( def sync_recordings(
limited: bool = False, dry_run: bool = False, force: bool = False limited: bool = False,
dry_run: bool = False,
force: bool = False,
recordings_roots: list[str] | None = None,
) -> SyncResult: ) -> SyncResult:
"""Sync recordings between the database and disk using the SyncResult format.""" """Sync recordings between the database and disk using the SyncResult format."""
@ -110,6 +113,7 @@ def sync_recordings(
page_size = 1000 page_size = 1000
num_pages = (recordings_count + page_size - 1) // page_size num_pages = (recordings_count + page_size - 1) // page_size
recordings_to_delete: list[dict] = [] recordings_to_delete: list[dict] = []
configured_roots = set(recordings_roots or [RECORD_DIR])
for page in range(num_pages): for page in range(num_pages):
for recording in recordings_query.paginate(page, page_size): for recording in recordings_query.paginate(page, page_size):
@ -175,22 +179,19 @@ def sync_recordings(
return result return result
# Only try to cleanup files if db cleanup was successful or dry_run # Only try to cleanup files if db cleanup was successful or dry_run
if limited: # get recording files on disk and put them in a set
# get recording files from last 36 hours files_on_disk = set()
hour_check = f"{RECORD_DIR}/{check_point.strftime('%Y-%m-%d/%H')}" for recordings_root in configured_roots:
files_on_disk = { for root, _, files in os.walk(recordings_root):
os.path.join(root, file) for file in files:
for root, _, files in os.walk(RECORD_DIR) file_path = os.path.join(root, file)
for file in files
if root > hour_check if limited:
} file_mtime = os.path.getmtime(file_path)
else: if file_mtime < check_point.timestamp():
# get all recordings files on disk and put them in a set continue
files_on_disk = {
os.path.join(root, file) files_on_disk.add(file_path)
for root, _, files in os.walk(RECORD_DIR)
for file in files
}
result.files_checked = len(files_on_disk) result.files_checked = len(files_on_disk)
@ -759,7 +760,10 @@ class MediaSyncResults:
def sync_all_media( def sync_all_media(
dry_run: bool = False, media_types: list[str] = ["all"], force: bool = False dry_run: bool = False,
media_types: list[str] = ["all"],
force: bool = False,
recordings_roots: list[str] | None = None,
) -> MediaSyncResults: ) -> MediaSyncResults:
"""Sync specified media types with the database. """Sync specified media types with the database.
@ -797,7 +801,9 @@ def sync_all_media(
results.exports = sync_exports(dry_run=dry_run, force=force) results.exports = sync_exports(dry_run=dry_run, force=force)
if sync_all or "recordings" in media_types: if sync_all or "recordings" in media_types:
results.recordings = sync_recordings(dry_run=dry_run, force=force) results.recordings = sync_recordings(
dry_run=dry_run, force=force, recordings_roots=recordings_roots
)
logger.info( logger.info(
f"Media sync complete: checked {results.total_files_checked} files, " f"Media sync complete: checked {results.total_files_checked} files, "