adjust SharedMemory size to largest detector model shape

This commit is contained in:
Dennis George 2022-12-09 15:21:17 -06:00
parent bbadb70365
commit f5ff975530
2 changed files with 10 additions and 27 deletions

View File

@ -186,10 +186,11 @@ class FrigateApp:
self.detection_out_events[name] = mp.Event() self.detection_out_events[name] = mp.Event()
try: try:
size = max([det.model.height * det.model.width * 3 for (name, det) in self.config.detectors.items()])
shm_in = mp.shared_memory.SharedMemory( shm_in = mp.shared_memory.SharedMemory(
name=name, name=name,
create=True, create=True,
size=self.config.model.height * self.config.model.width * 3, size=size,
) )
except FileExistsError: except FileExistsError:
shm_in = mp.shared_memory.SharedMemory(name=name) shm_in = mp.shared_memory.SharedMemory(name=name)
@ -204,15 +205,12 @@ class FrigateApp:
self.detection_shms.append(shm_in) self.detection_shms.append(shm_in)
self.detection_shms.append(shm_out) self.detection_shms.append(shm_out)
for name, detector in self.config.detectors.items(): for name, detector_config in self.config.detectors.items():
self.detectors[name] = ObjectDetectProcess( self.detectors[name] = ObjectDetectProcess(
name, name,
self.detection_queue, self.detection_queue,
self.detection_out_events, self.detection_out_events,
self.config.model, detector_config,
detector.type,
detector.device,
detector.num_threads,
) )
def start_detected_frames_processor(self) -> None: def start_detected_frames_processor(self) -> None:

View File

@ -79,10 +79,7 @@ def run_detector(
out_events: dict[str, mp.Event], out_events: dict[str, mp.Event],
avg_speed, avg_speed,
start, start,
model_config, detector_config,
det_type,
det_device,
num_threads,
): ):
threading.current_thread().name = f"detector:{name}" threading.current_thread().name = f"detector:{name}"
logger = logging.getLogger(f"detector.{name}") logger = logging.getLogger(f"detector.{name}")
@ -100,10 +97,7 @@ def run_detector(
frame_manager = SharedMemoryFrameManager() frame_manager = SharedMemoryFrameManager()
object_detector = LocalObjectDetector( object_detector = LocalObjectDetector(
det_type=det_type, detector_config=detector_config
det_device=det_device,
model_config=model_config,
num_threads=num_threads,
) )
outputs = {} outputs = {}
@ -118,7 +112,7 @@ def run_detector(
except queue.Empty: except queue.Empty:
continue continue
input_frame = frame_manager.get( input_frame = frame_manager.get(
connection_id, (1, model_config.height, model_config.width, 3) connection_id, (1, detector_config.model.height, detector_config.model.width, 3)
) )
if input_frame is None: if input_frame is None:
@ -141,10 +135,7 @@ class ObjectDetectProcess:
name, name,
detection_queue, detection_queue,
out_events, out_events,
model_config, detector_config,
det_type=None,
det_device=None,
num_threads=3,
): ):
self.name = name self.name = name
self.out_events = out_events self.out_events = out_events
@ -152,10 +143,7 @@ class ObjectDetectProcess:
self.avg_inference_speed = mp.Value("d", 0.01) self.avg_inference_speed = mp.Value("d", 0.01)
self.detection_start = mp.Value("d", 0.0) self.detection_start = mp.Value("d", 0.0)
self.detect_process = None self.detect_process = None
self.model_config = model_config self.detector_config = detector_config
self.det_type = det_type
self.det_device = det_device
self.num_threads = num_threads
self.start_or_restart() self.start_or_restart()
def stop(self): def stop(self):
@ -180,10 +168,7 @@ class ObjectDetectProcess:
self.out_events, self.out_events,
self.avg_inference_speed, self.avg_inference_speed,
self.detection_start, self.detection_start,
self.model_config, self.detector_config,
self.det_type,
self.det_device,
self.num_threads,
), ),
) )
self.detect_process.daemon = True self.detect_process.daemon = True