mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-03 17:55:21 +03:00
Fixed lint formatting issues
This commit is contained in:
parent
a3bd13b7ea
commit
cf70808c77
@ -22,10 +22,12 @@ class InputTensorEnum(str, Enum):
|
|||||||
nchw = "nchw"
|
nchw = "nchw"
|
||||||
nhwc = "nhwc"
|
nhwc = "nhwc"
|
||||||
|
|
||||||
|
|
||||||
class ModelTypeEnum(str, Enum):
|
class ModelTypeEnum(str, Enum):
|
||||||
ssd = "ssd"
|
ssd = "ssd"
|
||||||
yolox = "yolox"
|
yolox = "yolox"
|
||||||
|
|
||||||
|
|
||||||
class ModelConfig(BaseModel):
|
class ModelConfig(BaseModel):
|
||||||
path: Optional[str] = Field(title="Custom Object detection model path.")
|
path: Optional[str] = Field(title="Custom Object detection model path.")
|
||||||
labelmap_path: Optional[str] = Field(title="Label map for custom object detector.")
|
labelmap_path: Optional[str] = Field(title="Label map for custom object detector.")
|
||||||
|
|||||||
@ -17,6 +17,7 @@ class OvDetectorConfig(BaseDetectorConfig):
|
|||||||
type: Literal[DETECTOR_KEY]
|
type: Literal[DETECTOR_KEY]
|
||||||
device: str = Field(default=None, title="Device Type")
|
device: str = Field(default=None, title="Device Type")
|
||||||
|
|
||||||
|
|
||||||
class OvDetector(DetectionApi):
|
class OvDetector(DetectionApi):
|
||||||
type_key = DETECTOR_KEY
|
type_key = DETECTOR_KEY
|
||||||
|
|
||||||
@ -43,9 +44,8 @@ class OvDetector(DetectionApi):
|
|||||||
except:
|
except:
|
||||||
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
||||||
break
|
break
|
||||||
|
if self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
if(self.ov_model_type == ModelTypeEnum.yolox):
|
self.num_classes = tensor_shape[2] - 5
|
||||||
self.num_classes = tensor_shape[2]-5
|
|
||||||
logger.info(f"YOLOX model has {self.num_classes} classes")
|
logger.info(f"YOLOX model has {self.num_classes} classes")
|
||||||
self.set_strides_grids()
|
self.set_strides_grids()
|
||||||
|
|
||||||
@ -64,7 +64,6 @@ class OvDetector(DetectionApi):
|
|||||||
grids.append(grid)
|
grids.append(grid)
|
||||||
shape = grid.shape[:2]
|
shape = grid.shape[:2]
|
||||||
expanded_strides.append(np.full((*shape, 1), stride))
|
expanded_strides.append(np.full((*shape, 1), stride))
|
||||||
|
|
||||||
self.grids = np.concatenate(grids, 1)
|
self.grids = np.concatenate(grids, 1)
|
||||||
self.expanded_strides = np.concatenate(expanded_strides, 1)
|
self.expanded_strides = np.concatenate(expanded_strides, 1)
|
||||||
|
|
||||||
@ -72,7 +71,7 @@ class OvDetector(DetectionApi):
|
|||||||
infer_request = self.interpreter.create_infer_request()
|
infer_request = self.interpreter.create_infer_request()
|
||||||
infer_request.infer([tensor_input])
|
infer_request.infer([tensor_input])
|
||||||
|
|
||||||
if(self.ov_model_type == ModelTypeEnum.ssd):
|
if self.ov_model_type == ModelTypeEnum.ssd:
|
||||||
results = infer_request.get_output_tensor()
|
results = infer_request.get_output_tensor()
|
||||||
|
|
||||||
detections = np.zeros((20, 6), np.float32)
|
detections = np.zeros((20, 6), np.float32)
|
||||||
@ -92,7 +91,7 @@ class OvDetector(DetectionApi):
|
|||||||
]
|
]
|
||||||
i += 1
|
i += 1
|
||||||
return detections
|
return detections
|
||||||
elif(self.ov_model_type == ModelTypeEnum.yolox):
|
elif self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
out_tensor = infer_request.get_output_tensor()
|
out_tensor = infer_request.get_output_tensor()
|
||||||
# [x, y, h, w, box_score, class_no_1, ..., class_no_80],
|
# [x, y, h, w, box_score, class_no_1, ..., class_no_80],
|
||||||
results = out_tensor.data
|
results = out_tensor.data
|
||||||
@ -100,8 +99,10 @@ class OvDetector(DetectionApi):
|
|||||||
results[..., 2:4] = np.exp(results[..., 2:4]) * self.expanded_strides
|
results[..., 2:4] = np.exp(results[..., 2:4]) * self.expanded_strides
|
||||||
image_pred = results[0, ...]
|
image_pred = results[0, ...]
|
||||||
|
|
||||||
class_conf = np.max(image_pred[:, 5:5+self.num_classes], axis=1, keepdims=True)
|
class_conf = np.max(
|
||||||
class_pred = np.argmax(image_pred[: , 5:5+self.num_classes], axis=1)
|
image_pred[:, 5 : 5 + self.num_classes], axis=1, keepdims=True
|
||||||
|
)
|
||||||
|
class_pred = np.argmax(image_pred[:, 5 : 5 + self.num_classes], axis=1)
|
||||||
class_pred = np.expand_dims(class_pred, axis=1)
|
class_pred = np.expand_dims(class_pred, axis=1)
|
||||||
|
|
||||||
conf_mask = (image_pred[:, 4] * class_conf.squeeze() >= 0.3).squeeze()
|
conf_mask = (image_pred[:, 4] * class_conf.squeeze() >= 0.3).squeeze()
|
||||||
@ -119,13 +120,16 @@ class OvDetector(DetectionApi):
|
|||||||
detections[i] = [
|
detections[i] = [
|
||||||
object_detected[6], # Label ID
|
object_detected[6], # Label ID
|
||||||
object_detected[5], # Confidence
|
object_detected[5], # Confidence
|
||||||
(object_detected[1]-(object_detected[3]/2))/self.h, # y_min
|
(object_detected[1] - (object_detected[3] / 2))
|
||||||
(object_detected[0]-(object_detected[2]/2))/self.w, # x_min
|
/ self.h, # y_min
|
||||||
(object_detected[1]+(object_detected[3]/2))/self.h, # y_max
|
(object_detected[0] - (object_detected[2] / 2))
|
||||||
(object_detected[0]+(object_detected[2]/2))/self.w, # x_max
|
/ self.w, # x_min
|
||||||
|
(object_detected[1] + (object_detected[3] / 2))
|
||||||
|
/ self.h, # y_max
|
||||||
|
(object_detected[0] + (object_detected[2] / 2))
|
||||||
|
/ self.w, # x_max
|
||||||
]
|
]
|
||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
return detections
|
return detections
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user