mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-05-05 04:57:42 +03:00
auto select LPR model backend for inference
This commit is contained in:
parent
7bfcf2040d
commit
6da3b0fe86
@ -789,9 +789,7 @@ class LicensePlateProcessingMixin:
|
||||
input_w = int(input_h * max_wh_ratio)
|
||||
|
||||
# check for model-specific input width
|
||||
model_input_w = self.model_runner.recognition_model.runner.ort.get_inputs()[
|
||||
0
|
||||
].shape[3]
|
||||
model_input_w = self.model_runner.recognition_model.runner.get_input_width()
|
||||
if isinstance(model_input_w, int) and model_input_w > 0:
|
||||
input_w = model_input_w
|
||||
|
||||
|
||||
@ -108,7 +108,7 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
|
||||
# model runners to share between realtime and post processors
|
||||
if self.config.lpr.enabled:
|
||||
lpr_model_runner = LicensePlateModelRunner(self.requestor)
|
||||
lpr_model_runner = LicensePlateModelRunner(self.requestor, device="AUTO")
|
||||
|
||||
# realtime processors
|
||||
self.realtime_processors: list[RealTimeProcessorApi] = []
|
||||
|
||||
@ -65,6 +65,31 @@ class ONNXModelRunner:
|
||||
elif self.type == "ort":
|
||||
return [input.name for input in self.ort.get_inputs()]
|
||||
|
||||
def get_input_width(self):
|
||||
"""Get the input width of the model regardless of backend."""
|
||||
if self.type == "ort":
|
||||
return self.ort.get_inputs()[0].shape[3]
|
||||
elif self.type == "ov":
|
||||
input_info = self.interpreter.inputs
|
||||
first_input = input_info[0]
|
||||
|
||||
try:
|
||||
partial_shape = first_input.get_partial_shape()
|
||||
# width dimension
|
||||
if len(partial_shape) >= 4 and partial_shape[3].is_static:
|
||||
return partial_shape[3].get_length()
|
||||
|
||||
# If width is dynamic or we can't determine it
|
||||
return -1
|
||||
except Exception:
|
||||
try:
|
||||
# gemini says some ov versions might still allow this
|
||||
input_shape = first_input.shape
|
||||
return input_shape[3] if len(input_shape) >= 4 else -1
|
||||
except Exception:
|
||||
return -1
|
||||
return -1
|
||||
|
||||
def run(self, input: dict[str, Any]) -> Any:
|
||||
if self.type == "ov":
|
||||
infer_request = self.interpreter.create_infer_request()
|
||||
|
||||
Loading…
Reference in New Issue
Block a user