2023-04-26 16:25:26 +03:00
""" Maintain recording segments in cache. """
2023-06-16 16:09:13 +03:00
import asyncio
2020-11-30 06:31:02 +03:00
import datetime
import logging
import os
2021-06-07 04:24:36 +03:00
import random
import string
2020-11-30 06:31:02 +03:00
import threading
2024-03-23 22:45:15 +03:00
import time
2021-10-24 00:18:13 +03:00
from collections import defaultdict
2023-04-26 16:25:26 +03:00
from multiprocessing . synchronize import Event as MpEvent
2020-11-30 06:31:02 +03:00
from pathlib import Path
2023-07-15 03:05:14 +03:00
from typing import Any , Optional , Tuple
2021-07-09 23:14:16 +03:00
2023-07-15 03:05:14 +03:00
import numpy as np
2023-05-29 13:31:17 +03:00
import psutil
2024-02-19 16:26:59 +03:00
from frigate . comms . config_updater import ConfigSubscriber
from frigate . comms . detections_updater import DetectionSubscriber , DetectionTypeEnum
2024-02-15 03:24:36 +03:00
from frigate . comms . inter_process import InterProcessRequestor
2023-05-29 13:31:17 +03:00
from frigate . config import FrigateConfig , RetainModeEnum
2023-07-26 13:55:08 +03:00
from frigate . const import (
CACHE_DIR ,
2023-11-05 23:30:29 +03:00
CACHE_SEGMENT_FORMAT ,
2023-07-26 13:55:08 +03:00
INSERT_MANY_RECORDINGS ,
MAX_SEGMENT_DURATION ,
2023-11-19 00:37:06 +03:00
MAX_SEGMENTS_IN_CACHE ,
2023-07-26 13:55:08 +03:00
RECORD_DIR ,
)
2024-09-02 16:22:53 +03:00
from frigate . models import Recordings , ReviewSegment
2023-07-06 17:28:50 +03:00
from frigate . util . services import get_video_properties
2020-11-30 06:31:02 +03:00
logger = logging . getLogger ( __name__ )
2023-11-07 14:32:11 +03:00
QUEUE_READ_TIMEOUT = 0.00001 # seconds
2020-12-01 06:08:47 +03:00
2023-07-16 21:07:15 +03:00
class SegmentInfo :
def __init__ (
2024-03-15 18:29:22 +03:00
self ,
2024-03-15 22:13:40 +03:00
motion_count : int ,
2024-03-15 18:29:22 +03:00
active_object_count : int ,
region_count : int ,
average_dBFS : int ,
2023-07-16 21:07:15 +03:00
) - > None :
2024-03-15 22:13:40 +03:00
self . motion_count = motion_count
2023-07-16 21:07:15 +03:00
self . active_object_count = active_object_count
2024-03-15 18:29:22 +03:00
self . region_count = region_count
2023-07-16 21:07:15 +03:00
self . average_dBFS = average_dBFS
def should_discard_segment ( self , retain_mode : RetainModeEnum ) - > bool :
return (
retain_mode == RetainModeEnum . motion
2024-03-15 22:13:40 +03:00
and self . motion_count == 0
2023-07-16 21:07:15 +03:00
and self . average_dBFS == 0
) or (
retain_mode == RetainModeEnum . active_objects
and self . active_object_count == 0
)
2020-11-30 06:31:02 +03:00
class RecordingMaintainer ( threading . Thread ) :
2024-02-19 16:26:59 +03:00
def __init__ ( self , config : FrigateConfig , stop_event : MpEvent ) :
2020-11-30 06:31:02 +03:00
threading . Thread . __init__ ( self )
2023-04-26 16:25:26 +03:00
self . name = " recording_maintainer "
2020-11-30 06:31:02 +03:00
self . config = config
2024-02-15 03:24:36 +03:00
# create communication for retained recordings
self . requestor = InterProcessRequestor ( )
2024-02-19 16:26:59 +03:00
self . config_subscriber = ConfigSubscriber ( " config/record/ " )
self . detection_subscriber = DetectionSubscriber ( DetectionTypeEnum . all )
2024-02-15 03:24:36 +03:00
2020-11-30 06:31:02 +03:00
self . stop_event = stop_event
2023-07-15 03:05:14 +03:00
self . object_recordings_info : dict [ str , list ] = defaultdict ( list )
self . audio_recordings_info : dict [ str , list ] = defaultdict ( list )
2023-04-26 16:25:26 +03:00
self . end_time_cache : dict [ str , Tuple [ datetime . datetime , float ] ] = { }
2020-11-30 06:31:02 +03:00
2023-06-16 16:09:13 +03:00
async def move_files ( self ) - > None :
2023-11-05 23:30:29 +03:00
cache_files = [
d
for d in os . listdir ( CACHE_DIR )
if os . path . isfile ( os . path . join ( CACHE_DIR , d ) )
and d . endswith ( " .mp4 " )
2024-06-09 21:45:26 +03:00
and not d . startswith ( " preview_ " )
2023-11-05 23:30:29 +03:00
]
2020-11-30 06:31:02 +03:00
files_in_use = [ ]
for process in psutil . process_iter ( ) :
try :
2021-02-17 16:23:32 +03:00
if process . name ( ) != " ffmpeg " :
2020-12-24 23:23:59 +03:00
continue
2020-11-30 06:31:02 +03:00
flist = process . open_files ( )
if flist :
for nt in flist :
2021-07-09 23:14:16 +03:00
if nt . path . startswith ( CACHE_DIR ) :
2021-02-17 16:23:32 +03:00
files_in_use . append ( nt . path . split ( " / " ) [ - 1 ] )
2023-05-29 13:31:17 +03:00
except psutil . Error :
2020-11-30 06:31:02 +03:00
continue
2021-10-24 00:18:13 +03:00
# group recordings by camera
2023-04-26 16:25:26 +03:00
grouped_recordings : defaultdict [ str , list [ dict [ str , Any ] ] ] = defaultdict ( list )
for cache in cache_files :
2021-07-11 22:34:48 +03:00
# Skip files currently in use
2023-04-26 16:25:26 +03:00
if cache in files_in_use :
2020-11-30 06:31:02 +03:00
continue
2023-04-26 16:25:26 +03:00
cache_path = os . path . join ( CACHE_DIR , cache )
basename = os . path . splitext ( cache ) [ 0 ]
2023-11-05 23:30:29 +03:00
camera , date = basename . rsplit ( " @ " , maxsplit = 1 )
# important that start_time is utc because recordings are stored and compared in utc
start_time = datetime . datetime . strptime (
date , CACHE_SEGMENT_FORMAT
) . astimezone ( datetime . timezone . utc )
2021-05-22 06:35:25 +03:00
2021-10-24 00:18:13 +03:00
grouped_recordings [ camera ] . append (
{
" cache_path " : cache_path ,
" start_time " : start_time ,
}
2021-02-17 16:23:32 +03:00
)
2020-11-30 06:31:02 +03:00
2023-11-19 00:37:06 +03:00
# delete all cached files past the most recent MAX_SEGMENTS_IN_CACHE
keep_count = MAX_SEGMENTS_IN_CACHE
2021-11-17 17:57:57 +03:00
for camera in grouped_recordings . keys ( ) :
2023-11-05 23:30:29 +03:00
# sort based on start time
grouped_recordings [ camera ] = sorted (
grouped_recordings [ camera ] , key = lambda s : s [ " start_time " ]
)
2022-07-19 15:24:44 +03:00
segment_count = len ( grouped_recordings [ camera ] )
if segment_count > keep_count :
2023-01-31 02:42:53 +03:00
logger . warning (
f " Unable to keep up with recording segments in cache for { camera } . Keeping the { keep_count } most recent segments out of { segment_count } and discarding the rest... "
)
2021-12-11 07:56:29 +03:00
to_remove = grouped_recordings [ camera ] [ : - keep_count ]
2023-04-26 16:25:26 +03:00
for rec in to_remove :
cache_path = rec [ " cache_path " ]
2022-07-19 15:24:44 +03:00
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
2021-12-11 07:56:29 +03:00
grouped_recordings [ camera ] = grouped_recordings [ camera ] [ - keep_count : ]
2021-11-11 06:12:41 +03:00
2023-07-21 15:29:50 +03:00
tasks = [ ]
2021-10-24 00:18:13 +03:00
for camera , recordings in grouped_recordings . items ( ) :
2023-07-15 03:05:14 +03:00
# clear out all the object recording info for old frames
2021-12-11 07:56:29 +03:00
while (
2023-07-15 03:05:14 +03:00
len ( self . object_recordings_info [ camera ] ) > 0
and self . object_recordings_info [ camera ] [ 0 ] [ 0 ]
2021-12-11 07:56:29 +03:00
< recordings [ 0 ] [ " start_time " ] . timestamp ( )
) :
2023-07-15 03:05:14 +03:00
self . object_recordings_info [ camera ] . pop ( 0 )
# clear out all the audio recording info for old frames
while (
len ( self . audio_recordings_info [ camera ] ) > 0
and self . audio_recordings_info [ camera ] [ 0 ] [ 0 ]
< recordings [ 0 ] [ " start_time " ] . timestamp ( )
) :
self . audio_recordings_info [ camera ] . pop ( 0 )
2021-12-11 07:56:29 +03:00
2024-09-02 16:22:53 +03:00
# get all reviews with the end time after the start of the oldest cache file
2021-10-24 00:18:13 +03:00
# or with end_time None
2024-09-02 16:22:53 +03:00
reviews : ReviewSegment = (
ReviewSegment . select (
ReviewSegment . start_time ,
ReviewSegment . end_time ,
ReviewSegment . data ,
2023-09-12 01:07:04 +03:00
)
2021-10-24 00:18:13 +03:00
. where (
2024-09-02 16:22:53 +03:00
ReviewSegment . camera == camera ,
( ReviewSegment . end_time == None )
| (
ReviewSegment . end_time
> = recordings [ 0 ] [ " start_time " ] . timestamp ( )
) ,
2021-10-24 00:18:13 +03:00
)
2024-09-02 16:22:53 +03:00
. order_by ( ReviewSegment . start_time )
2021-10-24 00:18:13 +03:00
)
2020-11-30 06:31:02 +03:00
2023-07-21 15:29:50 +03:00
tasks . extend (
2024-09-02 16:22:53 +03:00
[ self . validate_and_move_segment ( camera , reviews , r ) for r in recordings ]
2023-06-16 16:09:13 +03:00
)
2022-11-02 14:37:27 +03:00
2023-07-21 15:29:50 +03:00
recordings_to_insert : list [ Optional [ Recordings ] ] = await asyncio . gather ( * tasks )
2023-07-26 13:55:08 +03:00
# fire and forget recordings entries
2024-02-15 03:24:36 +03:00
self . requestor . send_data (
INSERT_MANY_RECORDINGS ,
[ r for r in recordings_to_insert if r is not None ] ,
2023-07-26 13:55:08 +03:00
)
2023-07-21 15:29:50 +03:00
2023-06-16 16:09:13 +03:00
async def validate_and_move_segment (
2024-09-02 16:22:53 +03:00
self , camera : str , reviews : list [ ReviewSegment ] , recording : dict [ str , any ]
2023-06-16 16:09:13 +03:00
) - > None :
cache_path = recording [ " cache_path " ]
start_time = recording [ " start_time " ]
2024-09-02 16:22:53 +03:00
record_config = self . config . cameras [ camera ] . record
2021-10-22 15:23:18 +03:00
2023-06-16 16:09:13 +03:00
# Just delete files if recordings are turned off
if (
camera not in self . config . cameras
2024-02-19 16:26:59 +03:00
or not self . config . cameras [ camera ] . record . enabled
2023-06-16 16:09:13 +03:00
) :
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
return
if cache_path in self . end_time_cache :
end_time , duration = self . end_time_cache [ cache_path ]
else :
2023-07-26 13:55:08 +03:00
segment_info = await get_video_properties ( cache_path , get_duration = True )
2023-06-16 16:09:13 +03:00
if segment_info [ " duration " ] :
duration = float ( segment_info [ " duration " ] )
else :
duration = - 1
# ensure duration is within expected length
if 0 < duration < MAX_SEGMENT_DURATION :
end_time = start_time + datetime . timedelta ( seconds = duration )
self . end_time_cache [ cache_path ] = ( end_time , duration )
else :
if duration == - 1 :
logger . warning ( f " Failed to probe corrupt segment { cache_path } " )
logger . warning ( f " Discarding a corrupt recording segment: { cache_path } " )
Path ( cache_path ) . unlink ( missing_ok = True )
return
# if cached file's start_time is earlier than the retain days for the camera
if start_time < = (
2023-11-18 17:04:43 +03:00
datetime . datetime . now ( ) . astimezone ( datetime . timezone . utc )
- datetime . timedelta ( days = self . config . cameras [ camera ] . record . retain . days )
2023-06-16 16:09:13 +03:00
) :
# if the cached segment overlaps with the events:
overlaps = False
2024-09-02 16:22:53 +03:00
for review in reviews :
2023-06-16 16:09:13 +03:00
# if the event starts in the future, stop checking events
# and remove this segment
2024-09-02 16:22:53 +03:00
if review . start_time > end_time . timestamp ( ) :
2021-10-24 00:18:13 +03:00
overlaps = False
2023-06-16 16:09:13 +03:00
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
break
# if the event is in progress or ends after the recording starts, keep it
# and stop looking at events
2024-09-02 16:22:53 +03:00
if review . end_time is None or review . end_time > = start_time . timestamp ( ) :
2023-06-16 16:09:13 +03:00
overlaps = True
break
if overlaps :
2024-09-02 16:22:53 +03:00
record_mode = (
record_config . alerts . retain . mode
if review . severity == " alert "
else record_config . detections . retain . mode
)
2023-06-16 16:09:13 +03:00
# move from cache to recordings immediately
2023-07-26 13:55:08 +03:00
return await self . move_segment (
2023-06-16 16:09:13 +03:00
camera ,
start_time ,
end_time ,
duration ,
cache_path ,
record_mode ,
)
# if it doesn't overlap with an event, go ahead and drop the segment
# if it ends more than the configured pre_capture for the camera
else :
2024-09-02 16:22:53 +03:00
pre_capture = max (
record_config . alerts . pre_capture ,
record_config . detections . pre_capture ,
)
2023-10-26 03:42:13 +03:00
camera_info = self . object_recordings_info [ camera ]
most_recently_processed_frame_time = (
camera_info [ - 1 ] [ 0 ] if len ( camera_info ) > 0 else 0
)
2023-10-31 03:25:21 +03:00
retain_cutoff = datetime . datetime . fromtimestamp (
most_recently_processed_frame_time - pre_capture
2023-11-07 14:32:31 +03:00
) . astimezone ( datetime . timezone . utc )
2023-11-05 23:30:29 +03:00
if end_time < retain_cutoff :
2023-06-16 16:09:13 +03:00
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
# else retain days includes this segment
else :
2023-10-26 03:23:15 +03:00
# assume that empty means the relevant recording info has not been received yet
camera_info = self . object_recordings_info [ camera ]
most_recently_processed_frame_time = (
camera_info [ - 1 ] [ 0 ] if len ( camera_info ) > 0 else 0
)
2023-10-25 22:06:57 +03:00
# ensure delayed segment info does not lead to lost segments
2023-11-05 23:30:29 +03:00
if (
datetime . datetime . fromtimestamp (
most_recently_processed_frame_time
) . astimezone ( datetime . timezone . utc )
> = end_time
2023-10-31 03:25:21 +03:00
) :
2023-10-25 22:06:57 +03:00
record_mode = self . config . cameras [ camera ] . record . retain . mode
return await self . move_segment (
camera , start_time , end_time , duration , cache_path , record_mode
)
2021-10-24 00:18:13 +03:00
2023-04-26 16:25:26 +03:00
def segment_stats (
self , camera : str , start_time : datetime . datetime , end_time : datetime . datetime
2023-07-16 21:07:15 +03:00
) - > SegmentInfo :
2024-03-14 22:57:14 +03:00
video_frame_count = 0
2021-12-11 22:11:39 +03:00
active_count = 0
2024-03-15 18:29:22 +03:00
region_count = 0
2024-03-15 22:13:40 +03:00
motion_count = 0
2023-07-15 03:05:14 +03:00
for frame in self . object_recordings_info [ camera ] :
2021-12-11 22:11:39 +03:00
# frame is after end time of segment
if frame [ 0 ] > end_time . timestamp ( ) :
break
# frame is before start time of segment
if frame [ 0 ] < start_time . timestamp ( ) :
continue
2024-03-14 22:57:14 +03:00
video_frame_count + = 1
2021-12-11 22:11:39 +03:00
active_count + = len (
[
o
for o in frame [ 1 ]
2022-02-06 18:56:06 +03:00
if not o [ " false_positive " ] and o [ " motionless_count " ] == 0
2021-12-11 22:11:39 +03:00
]
)
2024-03-15 22:13:40 +03:00
motion_count + = len ( frame [ 2 ] )
2024-03-15 18:29:22 +03:00
region_count + = len ( frame [ 3 ] )
2024-03-14 22:57:14 +03:00
2023-07-15 03:05:14 +03:00
audio_values = [ ]
for frame in self . audio_recordings_info [ camera ] :
# frame is after end time of segment
if frame [ 0 ] > end_time . timestamp ( ) :
break
# frame is before start time of segment
if frame [ 0 ] < start_time . timestamp ( ) :
continue
2023-10-25 02:26:46 +03:00
# add active audio label count to count of active objects
active_count + = len ( frame [ 2 ] )
# add sound level to audio values
2023-07-15 03:05:14 +03:00
audio_values . append ( frame [ 1 ] )
average_dBFS = 0 if not audio_values else np . average ( audio_values )
2024-03-15 18:29:22 +03:00
return SegmentInfo (
2024-03-15 22:13:40 +03:00
motion_count , active_count , region_count , round ( average_dBFS )
2024-03-15 18:29:22 +03:00
)
2021-12-11 22:11:39 +03:00
2023-07-26 13:55:08 +03:00
async def move_segment (
2021-12-11 22:11:39 +03:00
self ,
2023-04-26 16:25:26 +03:00
camera : str ,
2022-12-11 16:45:32 +03:00
start_time : datetime . datetime ,
end_time : datetime . datetime ,
2023-04-26 16:25:26 +03:00
duration : float ,
cache_path : str ,
2021-12-11 22:11:39 +03:00
store_mode : RetainModeEnum ,
2023-07-21 15:29:50 +03:00
) - > Optional [ Recordings ] :
2023-07-16 21:07:15 +03:00
segment_info = self . segment_stats ( camera , start_time , end_time )
2021-12-11 22:11:39 +03:00
# check if the segment shouldn't be stored
2024-09-02 16:22:53 +03:00
if segment_info . should_discard_segment ( store_mode ) :
2021-12-11 22:11:39 +03:00
Path ( cache_path ) . unlink ( missing_ok = True )
self . end_time_cache . pop ( cache_path , None )
return
2023-11-05 23:30:29 +03:00
# directory will be in utc due to start_time being in utc
2022-12-11 16:45:32 +03:00
directory = os . path . join (
RECORD_DIR ,
2023-11-05 23:30:29 +03:00
start_time . strftime ( " % Y- % m- %d / % H " ) ,
2022-12-11 16:45:32 +03:00
camera ,
)
2021-10-24 00:18:13 +03:00
if not os . path . exists ( directory ) :
os . makedirs ( directory )
2023-11-05 23:30:29 +03:00
# file will be in utc due to start_time being in utc
file_name = f " { start_time . strftime ( ' % M. % S.mp4 ' ) } "
2021-10-24 00:18:13 +03:00
file_path = os . path . join ( directory , file_name )
2021-11-09 16:05:21 +03:00
try :
2022-10-02 02:11:29 +03:00
if not os . path . exists ( file_path ) :
start_frame = datetime . datetime . now ( ) . timestamp ( )
2022-12-18 02:53:34 +03:00
# add faststart to kept segments to improve metadata reading
2023-07-26 13:55:08 +03:00
p = await asyncio . create_subprocess_exec (
2022-12-18 02:53:34 +03:00
" ffmpeg " ,
2023-06-01 13:46:34 +03:00
" -hide_banner " ,
2022-12-18 02:53:34 +03:00
" -y " ,
" -i " ,
cache_path ,
" -c " ,
" copy " ,
" -movflags " ,
" +faststart " ,
file_path ,
2023-07-26 13:55:08 +03:00
stderr = asyncio . subprocess . PIPE ,
2023-10-14 01:04:38 +03:00
stdout = asyncio . subprocess . DEVNULL ,
2022-10-02 02:11:29 +03:00
)
2023-07-26 13:55:08 +03:00
await p . wait ( )
2021-11-09 16:05:21 +03:00
2022-12-18 02:53:34 +03:00
if p . returncode != 0 :
logger . error ( f " Unable to convert { cache_path } to { file_path } " )
2023-07-26 13:55:08 +03:00
logger . error ( ( await p . stderr . read ( ) ) . decode ( " ascii " ) )
2023-07-21 15:29:50 +03:00
return None
2022-12-18 02:53:34 +03:00
else :
logger . debug (
f " Copied { file_path } in { datetime . datetime . now ( ) . timestamp ( ) - start_frame } seconds. "
)
2022-10-09 14:28:26 +03:00
try :
2022-12-18 02:53:34 +03:00
# get the segment size of the cache file
# file without faststart is same size
2022-10-09 14:28:26 +03:00
segment_size = round (
2023-06-11 22:49:13 +03:00
float ( os . path . getsize ( cache_path ) ) / pow ( 2 , 20 ) , 1
2022-10-09 14:28:26 +03:00
)
except OSError :
segment_size = 0
os . remove ( cache_path )
2022-10-02 02:11:29 +03:00
rand_id = " " . join (
random . choices ( string . ascii_lowercase + string . digits , k = 6 )
)
2023-07-21 15:29:50 +03:00
return {
2024-07-24 17:58:23 +03:00
Recordings . id . name : f " { start_time . timestamp ( ) } - { rand_id } " ,
Recordings . camera . name : camera ,
Recordings . path . name : file_path ,
Recordings . start_time . name : start_time . timestamp ( ) ,
Recordings . end_time . name : end_time . timestamp ( ) ,
Recordings . duration . name : duration ,
Recordings . motion . name : segment_info . motion_count ,
2022-10-02 02:11:29 +03:00
# TODO: update this to store list of active objects at some point
2024-09-02 16:22:53 +03:00
Recordings . objects . name : segment_info . active_object_count ,
2024-07-24 17:58:23 +03:00
Recordings . regions . name : segment_info . region_count ,
Recordings . dBFS . name : segment_info . average_dBFS ,
Recordings . segment_size . name : segment_size ,
2023-07-21 15:29:50 +03:00
}
2021-11-09 16:05:21 +03:00
except Exception as e :
logger . error ( f " Unable to store recording segment { cache_path } " )
Path ( cache_path ) . unlink ( missing_ok = True )
logger . error ( e )
2020-11-30 06:31:02 +03:00
2021-11-19 16:19:45 +03:00
# clear end_time cache
self . end_time_cache . pop ( cache_path , None )
2023-07-21 15:29:50 +03:00
return None
2021-11-19 16:19:45 +03:00
2023-04-26 16:25:26 +03:00
def run ( self ) - > None :
2021-07-11 22:34:48 +03:00
# Check for new files every 5 seconds
2023-07-21 15:29:50 +03:00
wait_time = 0.0
2024-03-23 22:45:15 +03:00
while not self . stop_event . is_set ( ) :
time . sleep ( wait_time )
if self . stop_event . is_set ( ) :
break
2021-10-22 15:23:18 +03:00
run_start = datetime . datetime . now ( ) . timestamp ( )
2024-02-19 16:26:59 +03:00
# check if there is an updated config
while True :
(
updated_topic ,
updated_record_config ,
) = self . config_subscriber . check_for_update ( )
if not updated_topic :
break
camera_name = updated_topic . rpartition ( " / " ) [ - 1 ]
self . config . cameras [ camera_name ] . record = updated_record_config
2023-10-18 14:52:48 +03:00
stale_frame_count = 0
stale_frame_count_threshold = 10
2023-07-15 03:05:14 +03:00
# empty the object recordings info queue
2021-12-11 07:56:29 +03:00
while True :
2024-06-22 00:30:19 +03:00
( topic , data ) = self . detection_subscriber . check_for_update (
2024-02-19 16:26:59 +03:00
timeout = QUEUE_READ_TIMEOUT
)
if not topic :
break
if topic == DetectionTypeEnum . video :
2021-12-11 07:56:29 +03:00
(
camera ,
frame_time ,
current_tracked_objects ,
motion_boxes ,
regions ,
2024-02-19 16:26:59 +03:00
) = data
2023-10-18 14:52:48 +03:00
2024-02-19 16:26:59 +03:00
if self . config . cameras [ camera ] . record . enabled :
2023-07-15 03:05:14 +03:00
self . object_recordings_info [ camera ] . append (
2021-12-11 07:56:29 +03:00
(
frame_time ,
current_tracked_objects ,
motion_boxes ,
regions ,
)
)
2024-02-19 16:26:59 +03:00
elif topic == DetectionTypeEnum . audio :
(
camera ,
frame_time ,
dBFS ,
audio_detections ,
) = data
2023-10-18 14:52:48 +03:00
2024-02-19 16:26:59 +03:00
if self . config . cameras [ camera ] . record . enabled :
self . audio_recordings_info [ camera ] . append (
(
frame_time ,
dBFS ,
audio_detections ,
)
2023-11-07 14:32:11 +03:00
)
2024-04-02 16:07:50 +03:00
elif topic == DetectionTypeEnum . api :
continue
2023-10-18 14:52:48 +03:00
2024-02-19 16:26:59 +03:00
if frame_time < run_start - stale_frame_count_threshold :
stale_frame_count + = 1
2023-07-15 03:05:14 +03:00
2024-02-19 16:26:59 +03:00
if stale_frame_count > 0 :
logger . debug ( f " Found { stale_frame_count } old frames. " )
2023-10-18 14:52:48 +03:00
2021-11-09 16:05:21 +03:00
try :
2023-06-16 16:09:13 +03:00
asyncio . run ( self . move_files ( ) )
2021-11-09 16:05:21 +03:00
except Exception as e :
logger . error (
" Error occurred when attempting to maintain recording cache "
)
logger . error ( e )
2021-11-17 17:57:57 +03:00
duration = datetime . datetime . now ( ) . timestamp ( ) - run_start
wait_time = max ( 0 , 5 - duration )
2021-07-11 22:34:48 +03:00
2024-02-15 03:24:36 +03:00
self . requestor . stop ( )
2024-02-19 16:26:59 +03:00
self . config_subscriber . stop ( )
self . detection_subscriber . stop ( )
2023-05-29 13:31:17 +03:00
logger . info ( " Exiting recording maintenance... " )