diff --git a/frigate/detectors/plugins/onnx.py b/frigate/detectors/plugins/onnx.py index 82e7c4b5f..3e58df72a 100644 --- a/frigate/detectors/plugins/onnx.py +++ b/frigate/detectors/plugins/onnx.py @@ -1,5 +1,4 @@ import logging -import os import numpy as np from pydantic import Field @@ -10,6 +9,7 @@ from frigate.detectors.detector_config import ( BaseDetectorConfig, ModelTypeEnum, ) +from frigate.util.model import get_ort_providers logger = logging.getLogger(__name__) @@ -38,37 +38,9 @@ class ONNXDetector(DetectionApi): path = detector_config.model.path logger.info(f"ONNX: loading {detector_config.model.path}") - providers = ( - ["CPUExecutionProvider"] - if detector_config.device == "CPU" - else ort.get_available_providers() + providers, options = get_ort_providers( + detector_config.device == "CPU", detector_config.device ) - options = [] - - for provider in providers: - if provider == "TensorrtExecutionProvider": - os.makedirs( - "/config/model_cache/tensorrt/ort/trt-engines", exist_ok=True - ) - options.append( - { - "trt_timing_cache_enable": True, - "trt_engine_cache_enable": True, - "trt_timing_cache_path": "/config/model_cache/tensorrt/ort", - "trt_engine_cache_path": "/config/model_cache/tensorrt/ort/trt-engines", - } - ) - elif provider == "OpenVINOExecutionProvider": - os.makedirs("/config/model_cache/openvino/ort", exist_ok=True) - options.append( - { - "cache_dir": "/config/model_cache/openvino/ort", - "device_type": detector_config.device, - } - ) - else: - options.append({}) - self.model = ort.InferenceSession( path, providers=providers, provider_options=options ) diff --git a/frigate/util/model.py b/frigate/util/model.py new file mode 100644 index 000000000..6716b2405 --- /dev/null +++ b/frigate/util/model.py @@ -0,0 +1,39 @@ +"""Model Utils""" + +import os + +import onnxruntime as ort + + +def get_ort_providers( + force_cpu: bool = False, openvino_device: str = "AUTO" +) -> tuple[list[str], list[dict[str, any]]]: + if force_cpu: + return (["CPUExecutionProvider"], [{}]) + + providers = ort.get_available_providers() + options = [] + + for provider in providers: + if provider == "TensorrtExecutionProvider": + os.makedirs("/config/model_cache/tensorrt/ort/trt-engines", exist_ok=True) + options.append( + { + "trt_timing_cache_enable": True, + "trt_engine_cache_enable": True, + "trt_timing_cache_path": "/config/model_cache/tensorrt/ort", + "trt_engine_cache_path": "/config/model_cache/tensorrt/ort/trt-engines", + } + ) + elif provider == "OpenVINOExecutionProvider": + os.makedirs("/config/model_cache/openvino/ort", exist_ok=True) + options.append( + { + "cache_dir": "/config/model_cache/openvino/ort", + "device_type": openvino_device, + } + ) + else: + options.append({}) + + return (providers, options)