frigate/frigate/genai/utils.py
Josh Hawkins 65db9b0aec
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
Fixes (#22280)
* fix ollama chat tool calling

handle dict arguments, streaming fallback, and message format

* pin setuptools<81 to ensure pkg_resources remains available

When ensure_torch_dependencies() installs torch/torchvision via pip, it can upgrade setuptools to >=81.0.0, which removed the pkg_resources module. rknn-toolkit2 depends on pkg_resources internally, so subsequent RKNN conversion fails with No module named 'pkg_resources'.
2026-03-05 14:11:32 -06:00

76 lines
2.4 KiB
Python

"""Shared helpers for GenAI providers and chat (OpenAI-style messages, tool call parsing)."""
import json
import logging
from typing import Any, List, Optional
logger = logging.getLogger(__name__)
def parse_tool_calls_from_message(
message: dict[str, Any],
) -> Optional[list[dict[str, Any]]]:
"""
Parse tool_calls from an OpenAI-style message dict.
Message may have "tool_calls" as a list of:
{"id": str, "function": {"name": str, "arguments": str}, ...}
Returns a list of {"id", "name", "arguments"} with arguments parsed as dict,
or None if no tool_calls. Used by Ollama and LlamaCpp (non-stream) responses.
"""
raw = message.get("tool_calls")
if not raw or not isinstance(raw, list):
return None
result = []
for idx, tool_call in enumerate(raw):
function_data = tool_call.get("function") or {}
raw_arguments = function_data.get("arguments") or {}
if isinstance(raw_arguments, dict):
arguments = raw_arguments
elif isinstance(raw_arguments, str):
try:
arguments = json.loads(raw_arguments)
except (json.JSONDecodeError, KeyError, TypeError) as e:
logger.warning(
"Failed to parse tool call arguments: %s, tool: %s",
e,
function_data.get("name", "unknown"),
)
arguments = {}
else:
arguments = {}
result.append(
{
"id": tool_call.get("id", "") or f"call_{idx}",
"name": function_data.get("name", ""),
"arguments": arguments,
}
)
return result if result else None
def build_assistant_message_for_conversation(
content: Any,
tool_calls_raw: Optional[List[dict[str, Any]]],
) -> dict[str, Any]:
"""
Build the assistant message dict in OpenAI format for appending to a conversation.
tool_calls_raw: list of {"id", "name", "arguments"} (arguments as dict), or None.
"""
msg: dict[str, Any] = {"role": "assistant", "content": content}
if tool_calls_raw:
msg["tool_calls"] = [
{
"id": tc["id"],
"type": "function",
"function": {
"name": tc["name"],
"arguments": json.dumps(tc.get("arguments") or {}),
},
}
for tc in tool_calls_raw
]
return msg