diff --git a/frigate/genai/__init__.py b/frigate/genai/__init__.py index e26b50757..3bc98100c 100644 --- a/frigate/genai/__init__.py +++ b/frigate/genai/__init__.py @@ -153,9 +153,6 @@ Each line represents a detection state, not necessarily unique individuals. The if "other_concerns" in schema.get("required", []): schema["required"].remove("other_concerns") - # OpenAI strict mode requires additionalProperties: false on all objects - schema["additionalProperties"] = False - response_format = { "type": "json_schema", "json_schema": { diff --git a/frigate/genai/gemini.py b/frigate/genai/gemini.py index c4befbe90..eec22a991 100644 --- a/frigate/genai/gemini.py +++ b/frigate/genai/gemini.py @@ -136,11 +136,29 @@ class GeminiClient(GenAIClient): ) ) elif role == "assistant": - gemini_messages.append( - types.Content( - role="model", parts=[types.Part.from_text(text=content)] - ) - ) + parts: list[types.Part] = [] + if content: + parts.append(types.Part.from_text(text=content)) + for tc in msg.get("tool_calls") or []: + func = tc.get("function") or {} + tc_name = func.get("name") or "" + tc_args: Any = func.get("arguments") + if isinstance(tc_args, str): + try: + tc_args = json.loads(tc_args) + except (json.JSONDecodeError, TypeError): + tc_args = {} + if not isinstance(tc_args, dict): + tc_args = {} + if tc_name: + parts.append( + types.Part.from_function_call( + name=tc_name, args=tc_args + ) + ) + if not parts: + parts.append(types.Part.from_text(text=" ")) + gemini_messages.append(types.Content(role="model", parts=parts)) elif role == "tool": # Handle tool response response_payload = ( @@ -151,7 +169,9 @@ class GeminiClient(GenAIClient): role="function", parts=[ types.Part.from_function_response( - name=msg.get("name", ""), + name=msg.get("name") + or msg.get("tool_call_id") + or "", response=response_payload, ) ], @@ -345,11 +365,29 @@ class GeminiClient(GenAIClient): ) ) elif role == "assistant": - gemini_messages.append( - types.Content( - role="model", parts=[types.Part.from_text(text=content)] - ) - ) + parts: list[types.Part] = [] + if content: + parts.append(types.Part.from_text(text=content)) + for tc in msg.get("tool_calls") or []: + func = tc.get("function") or {} + tc_name = func.get("name") or "" + tc_args: Any = func.get("arguments") + if isinstance(tc_args, str): + try: + tc_args = json.loads(tc_args) + except (json.JSONDecodeError, TypeError): + tc_args = {} + if not isinstance(tc_args, dict): + tc_args = {} + if tc_name: + parts.append( + types.Part.from_function_call( + name=tc_name, args=tc_args + ) + ) + if not parts: + parts.append(types.Part.from_text(text=" ")) + gemini_messages.append(types.Content(role="model", parts=parts)) elif role == "tool": # Handle tool response response_payload = ( @@ -360,7 +398,9 @@ class GeminiClient(GenAIClient): role="function", parts=[ types.Part.from_function_response( - name=msg.get("name", ""), + name=msg.get("name") + or msg.get("tool_call_id") + or "", response=response_payload, ) ], diff --git a/frigate/genai/openai.py b/frigate/genai/openai.py index af94859de..432641332 100644 --- a/frigate/genai/openai.py +++ b/frigate/genai/openai.py @@ -73,8 +73,17 @@ class OpenAIClient(GenAIClient): **self.genai_config.runtime_options, } if response_format: + # OpenAI strict mode requires additionalProperties: false on the schema + if response_format.get("type") == "json_schema" and response_format.get( + "json_schema", {} + ).get("strict"): + schema = response_format.get("json_schema", {}).get("schema") + if isinstance(schema, dict): + schema["additionalProperties"] = False request_params["response_format"] = response_format + result = self.provider.chat.completions.create(**request_params) + if ( result is not None and hasattr(result, "choices")