diff --git a/docker/main/requirements-wheels.txt b/docker/main/requirements-wheels.txt index 30e32ba6a..f81fefea4 100644 --- a/docker/main/requirements-wheels.txt +++ b/docker/main/requirements-wheels.txt @@ -47,7 +47,7 @@ onnxruntime == 1.22.* # Embeddings transformers == 4.45.* # Generative AI -google-generativeai == 0.8.* +google-genai == 1.58.* ollama == 0.6.* openai == 1.65.* # push notifications diff --git a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/go2rtc/run b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/go2rtc/run index 349027bd1..7df29f8f5 100755 --- a/docker/main/rootfs/etc/s6-overlay/s6-rc.d/go2rtc/run +++ b/docker/main/rootfs/etc/s6-overlay/s6-rc.d/go2rtc/run @@ -69,15 +69,15 @@ function setup_homekit_config() { local cleaned_json="/tmp/cache/homekit_cleaned.json" jq ' # Keep only the homekit section if it exists, otherwise empty object - if has("homekit") then {homekit: .homekit} else {homekit: {}} end + if has("homekit") then {homekit: .homekit} else {} end ' "${temp_json}" > "${cleaned_json}" 2>/dev/null || { - echo '{"homekit": {}}' > "${cleaned_json}" + echo '{}' > "${cleaned_json}" } # Convert back to YAML and write to the config file yq eval -P "${cleaned_json}" > "${config_path}" 2>/dev/null || { echo "[WARNING] Failed to convert cleaned config to YAML, creating minimal config" - echo 'homekit: {}' > "${config_path}" + echo '{}' > "${config_path}" } # Clean up temp files diff --git a/frigate/api/camera.py b/frigate/api/camera.py index 936a0bb09..488ec1e1f 100644 --- a/frigate/api/camera.py +++ b/frigate/api/camera.py @@ -848,9 +848,10 @@ async def onvif_probe( try: if isinstance(uri, str) and uri.startswith("rtsp://"): if username and password and "@" not in uri: - # Inject URL-encoded credentials and add only the - # authenticated version. - cred = f"{quote_plus(username)}:{quote_plus(password)}@" + # Inject raw credentials and add only the + # authenticated version. The credentials will be encoded + # later by ffprobe_stream or the config system. + cred = f"{username}:{password}@" injected = uri.replace( "rtsp://", f"rtsp://{cred}", 1 ) @@ -903,12 +904,8 @@ async def onvif_probe( "/cam/realmonitor?channel=1&subtype=0", "/11", ] - # Use URL-encoded credentials for pattern fallback URIs when provided - auth_str = ( - f"{quote_plus(username)}:{quote_plus(password)}@" - if username and password - else "" - ) + # Use raw credentials for pattern fallback URIs when provided + auth_str = f"{username}:{password}@" if username and password else "" rtsp_port = 554 for path in common_paths: uri = f"rtsp://{auth_str}{host}:{rtsp_port}{path}" @@ -930,7 +927,7 @@ async def onvif_probe( and uri.startswith("rtsp://") and "@" not in uri ): - cred = f"{quote_plus(username)}:{quote_plus(password)}@" + cred = f"{username}:{password}@" cred_uri = uri.replace("rtsp://", f"rtsp://{cred}", 1) if cred_uri not in to_test: to_test.append(cred_uri) diff --git a/frigate/embeddings/onnx/jina_v1_embedding.py b/frigate/embeddings/onnx/jina_v1_embedding.py index 519247f3c..5e3ee7f3b 100644 --- a/frigate/embeddings/onnx/jina_v1_embedding.py +++ b/frigate/embeddings/onnx/jina_v1_embedding.py @@ -2,6 +2,7 @@ import logging import os +import threading import warnings from transformers import AutoFeatureExtractor, AutoTokenizer @@ -54,6 +55,7 @@ class JinaV1TextEmbedding(BaseEmbedding): self.tokenizer = None self.feature_extractor = None self.runner = None + self._lock = threading.Lock() files_names = list(self.download_urls.keys()) + [self.tokenizer_file] if not all( @@ -134,17 +136,18 @@ class JinaV1TextEmbedding(BaseEmbedding): ) def _preprocess_inputs(self, raw_inputs): - max_length = max(len(self.tokenizer.encode(text)) for text in raw_inputs) - return [ - self.tokenizer( - text, - padding="max_length", - truncation=True, - max_length=max_length, - return_tensors="np", - ) - for text in raw_inputs - ] + with self._lock: + max_length = max(len(self.tokenizer.encode(text)) for text in raw_inputs) + return [ + self.tokenizer( + text, + padding="max_length", + truncation=True, + max_length=max_length, + return_tensors="np", + ) + for text in raw_inputs + ] class JinaV1ImageEmbedding(BaseEmbedding): @@ -174,6 +177,7 @@ class JinaV1ImageEmbedding(BaseEmbedding): self.download_path = os.path.join(MODEL_CACHE_DIR, self.model_name) self.feature_extractor = None self.runner: BaseModelRunner | None = None + self._lock = threading.Lock() files_names = list(self.download_urls.keys()) if not all( os.path.exists(os.path.join(self.download_path, n)) for n in files_names @@ -216,8 +220,9 @@ class JinaV1ImageEmbedding(BaseEmbedding): ) def _preprocess_inputs(self, raw_inputs): - processed_images = [self._process_image(img) for img in raw_inputs] - return [ - self.feature_extractor(images=image, return_tensors="np") - for image in processed_images - ] + with self._lock: + processed_images = [self._process_image(img) for img in raw_inputs] + return [ + self.feature_extractor(images=image, return_tensors="np") + for image in processed_images + ] diff --git a/frigate/genai/gemini.py b/frigate/genai/gemini.py index 01e8ef758..36e708594 100644 --- a/frigate/genai/gemini.py +++ b/frigate/genai/gemini.py @@ -3,8 +3,8 @@ import logging from typing import Optional -import google.generativeai as genai -from google.api_core.exceptions import GoogleAPICallError +from google import genai +from google.genai import errors, types from frigate.config import GenAIProviderEnum from frigate.genai import GenAIClient, register_genai_provider @@ -16,44 +16,51 @@ logger = logging.getLogger(__name__) class GeminiClient(GenAIClient): """Generative AI client for Frigate using Gemini.""" - provider: genai.GenerativeModel + provider: genai.Client def _init_provider(self): """Initialize the client.""" - genai.configure(api_key=self.genai_config.api_key) - return genai.GenerativeModel( - self.genai_config.model, **self.genai_config.provider_options + # Merge provider_options into HttpOptions + http_options_dict = { + "api_version": "v1", + "timeout": int(self.timeout * 1000), # requires milliseconds + } + + if isinstance(self.genai_config.provider_options, dict): + http_options_dict.update(self.genai_config.provider_options) + + return genai.Client( + api_key=self.genai_config.api_key, + http_options=types.HttpOptions(**http_options_dict), ) def _send(self, prompt: str, images: list[bytes]) -> Optional[str]: """Submit a request to Gemini.""" - data = [ - { - "mime_type": "image/jpeg", - "data": img, - } - for img in images + contents = [ + types.Part.from_bytes(data=img, mime_type="image/jpeg") for img in images ] + [prompt] try: # Merge runtime_options into generation_config if provided generation_config_dict = {"candidate_count": 1} generation_config_dict.update(self.genai_config.runtime_options) - response = self.provider.generate_content( - data, - generation_config=genai.types.GenerationConfig( - **generation_config_dict - ), - request_options=genai.types.RequestOptions( - timeout=self.timeout, + response = self.provider.models.generate_content( + model=self.genai_config.model, + contents=contents, + config=types.GenerateContentConfig( + **generation_config_dict, ), ) - except GoogleAPICallError as e: + except errors.APIError as e: logger.warning("Gemini returned an error: %s", str(e)) return None + except Exception as e: + logger.warning("An unexpected error occurred with Gemini: %s", str(e)) + return None + try: description = response.text.strip() - except ValueError: + except (ValueError, AttributeError): # No description was generated return None return description diff --git a/frigate/log.py b/frigate/log.py index 717cce19f..5cec0e0d8 100644 --- a/frigate/log.py +++ b/frigate/log.py @@ -89,6 +89,7 @@ def apply_log_levels(default: str, log_levels: dict[str, LogLevel]) -> None: "ws4py": LogLevel.error, "PIL": LogLevel.warning, "numba": LogLevel.warning, + "google_genai.models": LogLevel.warning, **log_levels, } diff --git a/frigate/util/services.py b/frigate/util/services.py index c51fe923a..64d83833d 100644 --- a/frigate/util/services.py +++ b/frigate/util/services.py @@ -540,9 +540,16 @@ def get_jetson_stats() -> Optional[dict[int, dict]]: try: results["mem"] = "-" # no discrete gpu memory - with open("/sys/devices/gpu.0/load", "r") as f: - gpuload = float(f.readline()) / 10 - results["gpu"] = f"{gpuload}%" + if os.path.exists("/sys/devices/gpu.0/load"): + with open("/sys/devices/gpu.0/load", "r") as f: + gpuload = float(f.readline()) / 10 + results["gpu"] = f"{gpuload}%" + elif os.path.exists("/sys/devices/platform/gpu.0/load"): + with open("/sys/devices/platform/gpu.0/load", "r") as f: + gpuload = float(f.readline()) / 10 + results["gpu"] = f"{gpuload}%" + else: + results["gpu"] = "-" except Exception: return None diff --git a/web/src/components/timeline/DetailStream.tsx b/web/src/components/timeline/DetailStream.tsx index ac560a4df..ef9cd6364 100644 --- a/web/src/components/timeline/DetailStream.tsx +++ b/web/src/components/timeline/DetailStream.tsx @@ -887,7 +887,10 @@ function LifecycleItem({ {attributeAreaPx}{" "} - {t("information.pixels", { ns: "common" })}{" "} + {t("information.pixels", { + ns: "common", + area: attributeAreaPx, + })}{" "} ยท{" "} {attributeAreaPct}% diff --git a/web/src/utils/cameraUtil.ts b/web/src/utils/cameraUtil.ts index 4802c5e5f..543605ad0 100644 --- a/web/src/utils/cameraUtil.ts +++ b/web/src/utils/cameraUtil.ts @@ -81,7 +81,8 @@ export async function detectReolinkCamera( export function maskUri(uri: string): string { try { // Handle RTSP URLs with user:pass@host format - const rtspMatch = uri.match(/rtsp:\/\/([^:]+):([^@]+)@(.+)/); + // Use greedy match for password to handle passwords with @ + const rtspMatch = uri.match(/rtsp:\/\/([^:]+):(.+)@(.+)/); if (rtspMatch) { return `rtsp://${rtspMatch[1]}:${"*".repeat(4)}@${rtspMatch[3]}`; } diff --git a/web/src/views/classification/ModelSelectionView.tsx b/web/src/views/classification/ModelSelectionView.tsx index 0c1cfa1e6..3cd450bba 100644 --- a/web/src/views/classification/ModelSelectionView.tsx +++ b/web/src/views/classification/ModelSelectionView.tsx @@ -266,7 +266,10 @@ function ModelCard({ config, onClick, onUpdate, onDelete }: ModelCardProps) { return undefined; } - const keys = Object.keys(dataset.categories).filter((key) => key != "none"); + const keys = Object.keys(dataset.categories).filter( + (key) => key != "none" && key.toLowerCase() != "unknown", + ); + if (keys.length === 0) { return undefined; } diff --git a/web/src/views/classification/ModelTrainingView.tsx b/web/src/views/classification/ModelTrainingView.tsx index 10d52075d..81a8d613f 100644 --- a/web/src/views/classification/ModelTrainingView.tsx +++ b/web/src/views/classification/ModelTrainingView.tsx @@ -75,6 +75,7 @@ import SearchDetailDialog, { } from "@/components/overlay/detail/SearchDetailDialog"; import { SearchResult } from "@/types/search"; import { HiSparkles } from "react-icons/hi"; +import { capitalizeFirstLetter } from "@/utils/stringUtil"; type ModelTrainingViewProps = { model: CustomClassificationModelConfig; @@ -88,7 +89,7 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) { // title useEffect(() => { - document.title = `${model.name.toUpperCase()} - ${t("documentTitle")}`; + document.title = `${capitalizeFirstLetter(model.name)} - ${t("documentTitle")}`; }, [model.name, t]); // model state