Miscellaneous fixes (0.17 beta) (#21655)
Some checks failed
CI / AMD64 Build (push) Has been cancelled
CI / ARM Build (push) Has been cancelled
CI / Jetson Jetpack 6 (push) Has been cancelled
CI / AMD64 Extra Build (push) Has been cancelled
CI / ARM Extra Build (push) Has been cancelled
CI / Synaptics Build (push) Has been cancelled
CI / Assemble and push default build (push) Has been cancelled

* Fix jetson stats reading

* Return result

* Avoid unknown class for cover image

* fix double encoding of passwords in camera wizard

* formatting

* empty homekit config fixes

* add locks to jina v1 embeddings

protect tokenizer and feature extractor in jina_v1_embedding with per-instance thread lock to avoid the "Already borrowed" RuntimeError during concurrent tokenization

* Capitalize correctly

* replace deprecated google-generativeai with google-genai

update gemini genai provider with new calls from SDK
provider_options specifies any http options
suppress unneeded info logging

* fix attribute area on detail stream hover

---------

Co-authored-by: Josh Hawkins <32435876+hawkeye217@users.noreply.github.com>
This commit is contained in:
Nicolas Mowen 2026-01-15 07:08:49 -07:00 committed by GitHub
parent 2e1706baa0
commit bf099c3edd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 83 additions and 58 deletions

View File

@ -47,7 +47,7 @@ onnxruntime == 1.22.*
# Embeddings # Embeddings
transformers == 4.45.* transformers == 4.45.*
# Generative AI # Generative AI
google-generativeai == 0.8.* google-genai == 1.58.*
ollama == 0.6.* ollama == 0.6.*
openai == 1.65.* openai == 1.65.*
# push notifications # push notifications

View File

@ -69,15 +69,15 @@ function setup_homekit_config() {
local cleaned_json="/tmp/cache/homekit_cleaned.json" local cleaned_json="/tmp/cache/homekit_cleaned.json"
jq ' jq '
# Keep only the homekit section if it exists, otherwise empty object # Keep only the homekit section if it exists, otherwise empty object
if has("homekit") then {homekit: .homekit} else {homekit: {}} end if has("homekit") then {homekit: .homekit} else {} end
' "${temp_json}" > "${cleaned_json}" 2>/dev/null || { ' "${temp_json}" > "${cleaned_json}" 2>/dev/null || {
echo '{"homekit": {}}' > "${cleaned_json}" echo '{}' > "${cleaned_json}"
} }
# Convert back to YAML and write to the config file # Convert back to YAML and write to the config file
yq eval -P "${cleaned_json}" > "${config_path}" 2>/dev/null || { yq eval -P "${cleaned_json}" > "${config_path}" 2>/dev/null || {
echo "[WARNING] Failed to convert cleaned config to YAML, creating minimal config" echo "[WARNING] Failed to convert cleaned config to YAML, creating minimal config"
echo 'homekit: {}' > "${config_path}" echo '{}' > "${config_path}"
} }
# Clean up temp files # Clean up temp files

View File

@ -848,9 +848,10 @@ async def onvif_probe(
try: try:
if isinstance(uri, str) and uri.startswith("rtsp://"): if isinstance(uri, str) and uri.startswith("rtsp://"):
if username and password and "@" not in uri: if username and password and "@" not in uri:
# Inject URL-encoded credentials and add only the # Inject raw credentials and add only the
# authenticated version. # authenticated version. The credentials will be encoded
cred = f"{quote_plus(username)}:{quote_plus(password)}@" # later by ffprobe_stream or the config system.
cred = f"{username}:{password}@"
injected = uri.replace( injected = uri.replace(
"rtsp://", f"rtsp://{cred}", 1 "rtsp://", f"rtsp://{cred}", 1
) )
@ -903,12 +904,8 @@ async def onvif_probe(
"/cam/realmonitor?channel=1&subtype=0", "/cam/realmonitor?channel=1&subtype=0",
"/11", "/11",
] ]
# Use URL-encoded credentials for pattern fallback URIs when provided # Use raw credentials for pattern fallback URIs when provided
auth_str = ( auth_str = f"{username}:{password}@" if username and password else ""
f"{quote_plus(username)}:{quote_plus(password)}@"
if username and password
else ""
)
rtsp_port = 554 rtsp_port = 554
for path in common_paths: for path in common_paths:
uri = f"rtsp://{auth_str}{host}:{rtsp_port}{path}" uri = f"rtsp://{auth_str}{host}:{rtsp_port}{path}"
@ -930,7 +927,7 @@ async def onvif_probe(
and uri.startswith("rtsp://") and uri.startswith("rtsp://")
and "@" not in uri and "@" not in uri
): ):
cred = f"{quote_plus(username)}:{quote_plus(password)}@" cred = f"{username}:{password}@"
cred_uri = uri.replace("rtsp://", f"rtsp://{cred}", 1) cred_uri = uri.replace("rtsp://", f"rtsp://{cred}", 1)
if cred_uri not in to_test: if cred_uri not in to_test:
to_test.append(cred_uri) to_test.append(cred_uri)

View File

@ -2,6 +2,7 @@
import logging import logging
import os import os
import threading
import warnings import warnings
from transformers import AutoFeatureExtractor, AutoTokenizer from transformers import AutoFeatureExtractor, AutoTokenizer
@ -54,6 +55,7 @@ class JinaV1TextEmbedding(BaseEmbedding):
self.tokenizer = None self.tokenizer = None
self.feature_extractor = None self.feature_extractor = None
self.runner = None self.runner = None
self._lock = threading.Lock()
files_names = list(self.download_urls.keys()) + [self.tokenizer_file] files_names = list(self.download_urls.keys()) + [self.tokenizer_file]
if not all( if not all(
@ -134,17 +136,18 @@ class JinaV1TextEmbedding(BaseEmbedding):
) )
def _preprocess_inputs(self, raw_inputs): def _preprocess_inputs(self, raw_inputs):
max_length = max(len(self.tokenizer.encode(text)) for text in raw_inputs) with self._lock:
return [ max_length = max(len(self.tokenizer.encode(text)) for text in raw_inputs)
self.tokenizer( return [
text, self.tokenizer(
padding="max_length", text,
truncation=True, padding="max_length",
max_length=max_length, truncation=True,
return_tensors="np", max_length=max_length,
) return_tensors="np",
for text in raw_inputs )
] for text in raw_inputs
]
class JinaV1ImageEmbedding(BaseEmbedding): class JinaV1ImageEmbedding(BaseEmbedding):
@ -174,6 +177,7 @@ class JinaV1ImageEmbedding(BaseEmbedding):
self.download_path = os.path.join(MODEL_CACHE_DIR, self.model_name) self.download_path = os.path.join(MODEL_CACHE_DIR, self.model_name)
self.feature_extractor = None self.feature_extractor = None
self.runner: BaseModelRunner | None = None self.runner: BaseModelRunner | None = None
self._lock = threading.Lock()
files_names = list(self.download_urls.keys()) files_names = list(self.download_urls.keys())
if not all( if not all(
os.path.exists(os.path.join(self.download_path, n)) for n in files_names os.path.exists(os.path.join(self.download_path, n)) for n in files_names
@ -216,8 +220,9 @@ class JinaV1ImageEmbedding(BaseEmbedding):
) )
def _preprocess_inputs(self, raw_inputs): def _preprocess_inputs(self, raw_inputs):
processed_images = [self._process_image(img) for img in raw_inputs] with self._lock:
return [ processed_images = [self._process_image(img) for img in raw_inputs]
self.feature_extractor(images=image, return_tensors="np") return [
for image in processed_images self.feature_extractor(images=image, return_tensors="np")
] for image in processed_images
]

View File

@ -3,8 +3,8 @@
import logging import logging
from typing import Optional from typing import Optional
import google.generativeai as genai from google import genai
from google.api_core.exceptions import GoogleAPICallError from google.genai import errors, types
from frigate.config import GenAIProviderEnum from frigate.config import GenAIProviderEnum
from frigate.genai import GenAIClient, register_genai_provider from frigate.genai import GenAIClient, register_genai_provider
@ -16,44 +16,51 @@ logger = logging.getLogger(__name__)
class GeminiClient(GenAIClient): class GeminiClient(GenAIClient):
"""Generative AI client for Frigate using Gemini.""" """Generative AI client for Frigate using Gemini."""
provider: genai.GenerativeModel provider: genai.Client
def _init_provider(self): def _init_provider(self):
"""Initialize the client.""" """Initialize the client."""
genai.configure(api_key=self.genai_config.api_key) # Merge provider_options into HttpOptions
return genai.GenerativeModel( http_options_dict = {
self.genai_config.model, **self.genai_config.provider_options "api_version": "v1",
"timeout": int(self.timeout * 1000), # requires milliseconds
}
if isinstance(self.genai_config.provider_options, dict):
http_options_dict.update(self.genai_config.provider_options)
return genai.Client(
api_key=self.genai_config.api_key,
http_options=types.HttpOptions(**http_options_dict),
) )
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]: def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
"""Submit a request to Gemini.""" """Submit a request to Gemini."""
data = [ contents = [
{ types.Part.from_bytes(data=img, mime_type="image/jpeg") for img in images
"mime_type": "image/jpeg",
"data": img,
}
for img in images
] + [prompt] ] + [prompt]
try: try:
# Merge runtime_options into generation_config if provided # Merge runtime_options into generation_config if provided
generation_config_dict = {"candidate_count": 1} generation_config_dict = {"candidate_count": 1}
generation_config_dict.update(self.genai_config.runtime_options) generation_config_dict.update(self.genai_config.runtime_options)
response = self.provider.generate_content( response = self.provider.models.generate_content(
data, model=self.genai_config.model,
generation_config=genai.types.GenerationConfig( contents=contents,
**generation_config_dict config=types.GenerateContentConfig(
), **generation_config_dict,
request_options=genai.types.RequestOptions(
timeout=self.timeout,
), ),
) )
except GoogleAPICallError as e: except errors.APIError as e:
logger.warning("Gemini returned an error: %s", str(e)) logger.warning("Gemini returned an error: %s", str(e))
return None return None
except Exception as e:
logger.warning("An unexpected error occurred with Gemini: %s", str(e))
return None
try: try:
description = response.text.strip() description = response.text.strip()
except ValueError: except (ValueError, AttributeError):
# No description was generated # No description was generated
return None return None
return description return description

View File

@ -89,6 +89,7 @@ def apply_log_levels(default: str, log_levels: dict[str, LogLevel]) -> None:
"ws4py": LogLevel.error, "ws4py": LogLevel.error,
"PIL": LogLevel.warning, "PIL": LogLevel.warning,
"numba": LogLevel.warning, "numba": LogLevel.warning,
"google_genai.models": LogLevel.warning,
**log_levels, **log_levels,
} }

View File

@ -540,9 +540,16 @@ def get_jetson_stats() -> Optional[dict[int, dict]]:
try: try:
results["mem"] = "-" # no discrete gpu memory results["mem"] = "-" # no discrete gpu memory
with open("/sys/devices/gpu.0/load", "r") as f: if os.path.exists("/sys/devices/gpu.0/load"):
gpuload = float(f.readline()) / 10 with open("/sys/devices/gpu.0/load", "r") as f:
results["gpu"] = f"{gpuload}%" gpuload = float(f.readline()) / 10
results["gpu"] = f"{gpuload}%"
elif os.path.exists("/sys/devices/platform/gpu.0/load"):
with open("/sys/devices/platform/gpu.0/load", "r") as f:
gpuload = float(f.readline()) / 10
results["gpu"] = f"{gpuload}%"
else:
results["gpu"] = "-"
except Exception: except Exception:
return None return None

View File

@ -887,7 +887,10 @@ function LifecycleItem({
</span> </span>
<span className="font-medium text-foreground"> <span className="font-medium text-foreground">
{attributeAreaPx}{" "} {attributeAreaPx}{" "}
{t("information.pixels", { ns: "common" })}{" "} {t("information.pixels", {
ns: "common",
area: attributeAreaPx,
})}{" "}
<span className="text-secondary-foreground">·</span>{" "} <span className="text-secondary-foreground">·</span>{" "}
{attributeAreaPct}% {attributeAreaPct}%
</span> </span>

View File

@ -81,7 +81,8 @@ export async function detectReolinkCamera(
export function maskUri(uri: string): string { export function maskUri(uri: string): string {
try { try {
// Handle RTSP URLs with user:pass@host format // Handle RTSP URLs with user:pass@host format
const rtspMatch = uri.match(/rtsp:\/\/([^:]+):([^@]+)@(.+)/); // Use greedy match for password to handle passwords with @
const rtspMatch = uri.match(/rtsp:\/\/([^:]+):(.+)@(.+)/);
if (rtspMatch) { if (rtspMatch) {
return `rtsp://${rtspMatch[1]}:${"*".repeat(4)}@${rtspMatch[3]}`; return `rtsp://${rtspMatch[1]}:${"*".repeat(4)}@${rtspMatch[3]}`;
} }

View File

@ -266,7 +266,10 @@ function ModelCard({ config, onClick, onUpdate, onDelete }: ModelCardProps) {
return undefined; return undefined;
} }
const keys = Object.keys(dataset.categories).filter((key) => key != "none"); const keys = Object.keys(dataset.categories).filter(
(key) => key != "none" && key.toLowerCase() != "unknown",
);
if (keys.length === 0) { if (keys.length === 0) {
return undefined; return undefined;
} }

View File

@ -75,6 +75,7 @@ import SearchDetailDialog, {
} from "@/components/overlay/detail/SearchDetailDialog"; } from "@/components/overlay/detail/SearchDetailDialog";
import { SearchResult } from "@/types/search"; import { SearchResult } from "@/types/search";
import { HiSparkles } from "react-icons/hi"; import { HiSparkles } from "react-icons/hi";
import { capitalizeFirstLetter } from "@/utils/stringUtil";
type ModelTrainingViewProps = { type ModelTrainingViewProps = {
model: CustomClassificationModelConfig; model: CustomClassificationModelConfig;
@ -88,7 +89,7 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) {
// title // title
useEffect(() => { useEffect(() => {
document.title = `${model.name.toUpperCase()} - ${t("documentTitle")}`; document.title = `${capitalizeFirstLetter(model.name)} - ${t("documentTitle")}`;
}, [model.name, t]); }, [model.name, t]);
// model state // model state