diff --git a/docs/docs/configuration/object_detectors.md b/docs/docs/configuration/object_detectors.md index 015f4a50f..0feb30936 100644 --- a/docs/docs/configuration/object_detectors.md +++ b/docs/docs/configuration/object_detectors.md @@ -131,7 +131,7 @@ model: labelmap_path: /openvino-model/coco_91cl_bkgr.txt ``` -This detector also supports some YOLO variants: YOLOX and YOLOv5 specifically. Other YOLO variants are not officially supported/tested. Frigate does not come with any yolo models preloaded, so you will need to supply your own models. This detector has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. You can follow [these instructions](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny#download-a-model-and-convert-it-into-openvino-ir-format) to retrieve the OpenVINO-compatible `yolox_tiny` model. Make sure that the model input dimensions match the `width` and `height` parameters, and `model_type` is set accordingly. See [Full Configuration Reference](/configuration/reference.md) for a list of possible `model_type` options. Below is an example of how `yolox_tiny` can be used in Frigate: +This detector also supports YOLOX. Other YOLO variants are not officially supported/tested. Frigate does not come with any yolo models preloaded, so you will need to supply your own models. This detector has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. You can follow [these instructions](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny#download-a-model-and-convert-it-into-openvino-ir-format) to retrieve the OpenVINO-compatible `yolox_tiny` model. Make sure that the model input dimensions match the `width` and `height` parameters, and `model_type` is set accordingly. See [Full Configuration Reference](/configuration/reference.md) for a list of possible `model_type` options. Below is an example of how `yolox_tiny` can be used in Frigate: ```yaml detectors: diff --git a/docs/docs/configuration/reference.md b/docs/docs/configuration/reference.md index c4a02491b..0591f9757 100644 --- a/docs/docs/configuration/reference.md +++ b/docs/docs/configuration/reference.md @@ -80,7 +80,7 @@ model: # Valid values are nhwc or nchw (default: shown below) input_tensor: nhwc # Optional: Object detection model type, currently only used with the OpenVINO detector - # Valid values are ssd, yolox or yolov5 (default: shown below) + # Valid values are ssd, yolox (default: shown below) model_type: ssd # Optional: Label name modifications. These are merged into the standard labelmap. labelmap: diff --git a/frigate/detectors/detector_config.py b/frigate/detectors/detector_config.py index 5e35e2e29..a517e5a36 100644 --- a/frigate/detectors/detector_config.py +++ b/frigate/detectors/detector_config.py @@ -30,7 +30,6 @@ class InputTensorEnum(str, Enum): class ModelTypeEnum(str, Enum): ssd = "ssd" yolox = "yolox" - yolov5 = "yolov5" class ModelConfig(BaseModel): diff --git a/frigate/detectors/plugins/onnx.py b/frigate/detectors/plugins/onnx.py index 0564fbc2c..f7f4637ab 100644 --- a/frigate/detectors/plugins/onnx.py +++ b/frigate/detectors/plugins/onnx.py @@ -31,7 +31,6 @@ class ONNXDetector(DetectionApi): ) raise - path = detector_config.model.path logger.info(f"ONNX: loading {detector_config.model.path}") self.model = onnxruntime.InferenceSession(path) @@ -45,4 +44,6 @@ class ONNXDetector(DetectionApi): tensor_output = self.model.run(None, {model_input_name: tensor_input})[0] - raise Exception("No models are currently supported via onnx. See the docs for more info.") + raise Exception( + "No models are currently supported via onnx. See the docs for more info." + ) diff --git a/frigate/detectors/plugins/openvino.py b/frigate/detectors/plugins/openvino.py index 1b6c16de6..c259bda64 100644 --- a/frigate/detectors/plugins/openvino.py +++ b/frigate/detectors/plugins/openvino.py @@ -131,21 +131,3 @@ class OvDetector(DetectionApi): object_detected[6], object_detected[5], object_detected[:4] ) return detections - elif self.ov_model_type == ModelTypeEnum.yolov5: - out_tensor = infer_request.get_output_tensor() - output_data = out_tensor.data[0] - # filter out lines with scores below threshold - conf_mask = (output_data[:, 4] >= 0.5).squeeze() - output_data = output_data[conf_mask] - # limit to top 20 scores, descending order - ordered = output_data[output_data[:, 4].argsort()[::-1]][:20] - - detections = np.zeros((20, 6), np.float32) - - for i, object_detected in enumerate(ordered): - detections[i] = self.process_yolo( - np.argmax(object_detected[5:]), - object_detected[4], - object_detected[:4], - ) - return detections diff --git a/frigate/detectors/plugins/rknn.py b/frigate/detectors/plugins/rknn.py index bb22eaae0..399152624 100644 --- a/frigate/detectors/plugins/rknn.py +++ b/frigate/detectors/plugins/rknn.py @@ -98,7 +98,9 @@ class Rknn(DetectionApi): "Error initializing rknn runtime. Do you run docker in privileged mode?" ) - raise Exception("RKNN does not currently support any models. Please see the docs for more info.") + raise Exception( + "RKNN does not currently support any models. Please see the docs for more info." + ) def __del__(self): self.rknn.release() diff --git a/frigate/detectors/plugins/rocm.py b/frigate/detectors/plugins/rocm.py index 13d31f5fc..76b8542e8 100644 --- a/frigate/detectors/plugins/rocm.py +++ b/frigate/detectors/plugins/rocm.py @@ -118,4 +118,6 @@ class ROCmDetector(DetectionApi): addr, shape=detector_result.get_shape().lens() ) - raise Exception("No models are currently supported for rocm. See the docs for more info.") + raise Exception( + "No models are currently supported for rocm. See the docs for more info." + )