Prevent log spam from unexpected cache files

Addresses PR review feedback: Add deduplication to prevent warning
messages from being logged repeatedly for the same unexpected file
in the cache directory. Each unexpected filename is only logged once
per RecordingMaintainer instance lifecycle.

Also adds test to verify warning is only emitted once per filename.
This commit is contained in:
kirill kulakov 2026-01-15 21:51:10 -06:00
parent cff9c32db7
commit 0e27fe6530
2 changed files with 35 additions and 14 deletions

View File

@ -97,6 +97,7 @@ class RecordingMaintainer(threading.Thread):
self.object_recordings_info: dict[str, list] = defaultdict(list) self.object_recordings_info: dict[str, list] = defaultdict(list)
self.audio_recordings_info: dict[str, list] = defaultdict(list) self.audio_recordings_info: dict[str, list] = defaultdict(list)
self.end_time_cache: dict[str, Tuple[datetime.datetime, float]] = {} self.end_time_cache: dict[str, Tuple[datetime.datetime, float]] = {}
self.unexpected_cache_files_seen: set[str] = set()
async def move_files(self) -> None: async def move_files(self) -> None:
cache_files = [ cache_files = [
@ -115,7 +116,9 @@ class RecordingMaintainer(threading.Thread):
try: try:
camera, date = basename.rsplit("@", maxsplit=1) camera, date = basename.rsplit("@", maxsplit=1)
except ValueError: except ValueError:
logger.warning(f"Skipping unexpected file in cache: {cache}") if cache not in self.unexpected_cache_files_seen:
logger.warning(f"Skipping unexpected file in cache: {cache}")
self.unexpected_cache_files_seen.add(cache)
continue continue
start_time = datetime.datetime.strptime( start_time = datetime.datetime.strptime(
@ -172,7 +175,9 @@ class RecordingMaintainer(threading.Thread):
try: try:
camera, date = basename.rsplit("@", maxsplit=1) camera, date = basename.rsplit("@", maxsplit=1)
except ValueError: except ValueError:
logger.warning(f"Skipping unexpected file in cache: {cache}") if cache not in self.unexpected_cache_files_seen:
logger.warning(f"Skipping unexpected file in cache: {cache}")
self.unexpected_cache_files_seen.add(cache)
continue continue
# important that start_time is utc because recordings are stored and compared in utc # important that start_time is utc because recordings are stored and compared in utc

View File

@ -32,18 +32,34 @@ class TestMaintainer(unittest.IsolatedAsyncioTestCase):
with patch('os.listdir', return_value=files): with patch('os.listdir', return_value=files):
with patch('os.path.isfile', return_value=True): with patch('os.path.isfile', return_value=True):
with patch('frigate.record.maintainer.psutil.process_iter', return_value=[]): with patch('frigate.record.maintainer.psutil.process_iter', return_value=[]):
# Mock validate_and_move_segment to avoid further logic with patch('frigate.record.maintainer.logger.warning') as warn:
maintainer.validate_and_move_segment = MagicMock() # Mock validate_and_move_segment to avoid further logic
maintainer.validate_and_move_segment = MagicMock()
try:
await maintainer.move_files() try:
except ValueError as e: await maintainer.move_files()
if "not enough values to unpack" in str(e): except ValueError as e:
self.fail("move_files() crashed on bad filename!") if "not enough values to unpack" in str(e):
raise e self.fail("move_files() crashed on bad filename!")
except Exception: raise e
# Ignore other errors (like DB connection) as we only care about the unpack crash except Exception:
pass # Ignore other errors (like DB connection) as we only care about the unpack crash
pass
# The bad filename is encountered in multiple loops, but should only warn once.
matching = [
c
for c in warn.call_args_list
if c.args
and isinstance(c.args[0], str)
and "Skipping unexpected file in cache: bad_filename.mp4"
in c.args[0]
]
self.assertEqual(
1,
len(matching),
f"Expected a single warning for bad filename, got {len(matching)}",
)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()