mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-03 01:35:22 +03:00
Initial work for adding OpenVino detector. Not functional
This commit is contained in:
parent
4523c9b06d
commit
4b675692ec
@ -54,6 +54,7 @@ class FrigateBaseModel(BaseModel):
|
||||
|
||||
class DetectorTypeEnum(str, Enum):
|
||||
edgetpu = "edgetpu"
|
||||
openvino = "openvino"
|
||||
cpu = "cpu"
|
||||
|
||||
|
||||
|
||||
88
frigate/detectors/openvino.py
Normal file
88
frigate/detectors/openvino.py
Normal file
@ -0,0 +1,88 @@
|
||||
import logging
|
||||
import numpy as np
|
||||
import openvino.runtime as ov
|
||||
|
||||
from frigate.detectors.detection_api import DetectionApi
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OvDetector(DetectionApi):
|
||||
def __init__(self, det_device=None, model_path=None, num_threads=1):
|
||||
self.ovCore = ov.Core()
|
||||
self.ovModel = self.ovCore.read_model(model_path)
|
||||
self.interpreter = self.ovCore.compile_model(
|
||||
model=self.ovModel, device_name=det_device
|
||||
)
|
||||
|
||||
self.interpreter.allocate_tensors()
|
||||
|
||||
self.tensor_input_details = self.interpreter.get_input_details()
|
||||
self.tensor_output_details = self.interpreter.get_output_details()
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
self.interpreter.set_tensor(self.tensor_input_details[0]["index"], tensor_input)
|
||||
self.interpreter.invoke()
|
||||
|
||||
boxes = self.interpreter.tensor(self.tensor_output_details[0]["index"])()[0]
|
||||
class_ids = self.interpreter.tensor(self.tensor_output_details[1]["index"])()[0]
|
||||
scores = self.interpreter.tensor(self.tensor_output_details[2]["index"])()[0]
|
||||
count = int(
|
||||
self.interpreter.tensor(self.tensor_output_details[3]["index"])()[0]
|
||||
)
|
||||
|
||||
detections = np.zeros((20, 6), np.float32)
|
||||
|
||||
for i in range(count):
|
||||
if scores[i] < 0.4 or i == 20:
|
||||
break
|
||||
detections[i] = [
|
||||
class_ids[i],
|
||||
float(scores[i]),
|
||||
boxes[i][0],
|
||||
boxes[i][1],
|
||||
boxes[i][2],
|
||||
boxes[i][3],
|
||||
]
|
||||
|
||||
return detections
|
||||
|
||||
|
||||
class GpuOpenVino(DetectionApi):
|
||||
def __init__(self, det_device=None, model_path=None, num_threads=1):
|
||||
self.interpreter = tflite.Interpreter(
|
||||
model_path=model_path or "/cpu_model.tflite", num_threads=3
|
||||
)
|
||||
|
||||
self.interpreter.allocate_tensors()
|
||||
|
||||
self.tensor_input_details = self.interpreter.get_input_details()
|
||||
self.tensor_output_details = self.interpreter.get_output_details()
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
self.interpreter.set_tensor(self.tensor_input_details[0]["index"], tensor_input)
|
||||
self.interpreter.invoke()
|
||||
|
||||
boxes = self.interpreter.tensor(self.tensor_output_details[0]["index"])()[0]
|
||||
class_ids = self.interpreter.tensor(self.tensor_output_details[1]["index"])()[0]
|
||||
scores = self.interpreter.tensor(self.tensor_output_details[2]["index"])()[0]
|
||||
count = int(
|
||||
self.interpreter.tensor(self.tensor_output_details[3]["index"])()[0]
|
||||
)
|
||||
|
||||
detections = np.zeros((20, 6), np.float32)
|
||||
|
||||
for i in range(count):
|
||||
if scores[i] < 0.4 or i == 20:
|
||||
break
|
||||
detections[i] = [
|
||||
class_ids[i],
|
||||
float(scores[i]),
|
||||
boxes[i][0],
|
||||
boxes[i][1],
|
||||
boxes[i][2],
|
||||
boxes[i][3],
|
||||
]
|
||||
|
||||
return detections
|
||||
@ -12,6 +12,7 @@ from setproctitle import setproctitle
|
||||
|
||||
from frigate.config import DetectorTypeEnum, InputTensorEnum
|
||||
from frigate.detectors.edgetpu_tfl import EdgeTpuTfl
|
||||
from frigate.detectors.openvino import OvDetector
|
||||
from frigate.detectors.cpu_tfl import CpuTfl
|
||||
|
||||
from frigate.util import EventsPerSecond, SharedMemoryFrameManager, listen, load_labels
|
||||
@ -57,6 +58,10 @@ class LocalObjectDetector(ObjectDetector):
|
||||
self.detect_api = EdgeTpuTfl(
|
||||
det_device=det_device, model_config=model_config
|
||||
)
|
||||
elif det_type == DetectorTypeEnum.openvino:
|
||||
self.detect_api = OvDetector(
|
||||
det_device=det_device, model_config=model_config
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
"CPU detectors are not recommended and should only be used for testing or for trial purposes."
|
||||
|
||||
Loading…
Reference in New Issue
Block a user