diff --git a/frigate/embeddings/embeddings.py b/frigate/embeddings/embeddings.py index f33bb823d..5fe0566f2 100644 --- a/frigate/embeddings/embeddings.py +++ b/frigate/embeddings/embeddings.py @@ -97,6 +97,7 @@ class Embeddings: "text_model_fp16.onnx": "https://huggingface.co/jinaai/jina-clip-v1/resolve/main/onnx/text_model_fp16.onnx", }, embedding_function=jina_text_embedding_function, + model_size=config.model_size, model_type="text", requestor=self.requestor, device="CPU", @@ -118,6 +119,7 @@ class Embeddings: model_file=model_file, download_urls=download_urls, embedding_function=jina_vision_embedding_function, + model_size=config.model_size, model_type="vision", requestor=self.requestor, device=self.config.device, diff --git a/frigate/embeddings/functions/onnx.py b/frigate/embeddings/functions/onnx.py index f0757cfa2..ae9fe33bc 100644 --- a/frigate/embeddings/functions/onnx.py +++ b/frigate/embeddings/functions/onnx.py @@ -41,6 +41,7 @@ class GenericONNXEmbedding: model_file: str, download_urls: Dict[str, str], embedding_function: Callable[[List[np.ndarray]], np.ndarray], + model_size: str, model_type: str, requestor: InterProcessRequestor, tokenizer_file: Optional[str] = None, @@ -55,8 +56,7 @@ class GenericONNXEmbedding: self.model_type = model_type # 'text' or 'vision' self.providers, self.provider_options = get_ort_providers( force_cpu=device == "CPU", - requires_fp16=self.config.model_size == "large" - or self.model_type == "text", + requires_fp16=model_size == "large" or self.model_type == "text", openvino_device=device, )