This commit is contained in:
Nicolas Mowen 2026-04-30 00:05:18 +00:00 committed by GitHub
commit ab6392d8f4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 97 additions and 31 deletions

View File

@ -1368,12 +1368,17 @@ def preview_gif(
file_start = f"preview_{camera_name}-"
start_file = f"{file_start}{start_ts}.{PREVIEW_FRAME_TYPE}"
end_file = f"{file_start}{end_ts}.{PREVIEW_FRAME_TYPE}"
camera_files = [
entry.name
for entry in os.scandir(preview_dir)
if entry.name.startswith(file_start)
]
camera_files.sort()
selected_previews = []
for file in sorted(os.listdir(preview_dir)):
if not file.startswith(file_start):
continue
for file in camera_files:
if file < start_file:
continue
@ -1550,12 +1555,17 @@ def preview_mp4(
file_start = f"preview_{camera_name}-"
start_file = f"{file_start}{start_ts}.{PREVIEW_FRAME_TYPE}"
end_file = f"{file_start}{end_ts}.{PREVIEW_FRAME_TYPE}"
camera_files = [
entry.name
for entry in os.scandir(preview_dir)
if entry.name.startswith(file_start)
]
camera_files.sort()
selected_previews = []
for file in sorted(os.listdir(preview_dir)):
if not file.startswith(file_start):
continue
for file in camera_files:
if file < start_file:
continue

View File

@ -148,12 +148,17 @@ def get_preview_frames_from_cache(camera_name: str, start_ts: float, end_ts: flo
file_start = f"preview_{camera_name}-"
start_file = f"{file_start}{start_ts}.{PREVIEW_FRAME_TYPE}"
end_file = f"{file_start}{end_ts}.{PREVIEW_FRAME_TYPE}"
camera_files = [
entry.name
for entry in os.scandir(preview_dir)
if entry.name.startswith(file_start)
]
camera_files.sort()
selected_previews = []
for file in sorted(os.listdir(preview_dir)):
if not file.startswith(file_start):
continue
for file in camera_files:
if file < start_file:
continue

View File

@ -366,12 +366,17 @@ class ReviewDescriptionProcessor(PostProcessorApi):
file_start = f"preview_{camera}-"
start_file = f"{file_start}{start_time}.webp"
end_file = f"{file_start}{end_time}.webp"
camera_files = [
entry.name
for entry in os.scandir(preview_dir)
if entry.name.startswith(file_start)
]
camera_files.sort()
all_frames: list[str] = []
for file in sorted(os.listdir(preview_dir)):
if not file.startswith(file_start):
continue
for file in camera_files:
if file < start_file:
if len(all_frames):
all_frames[0] = os.path.join(preview_dir, file)

View File

@ -153,9 +153,6 @@ Each line represents a detection state, not necessarily unique individuals. The
if "other_concerns" in schema.get("required", []):
schema["required"].remove("other_concerns")
# OpenAI strict mode requires additionalProperties: false on all objects
schema["additionalProperties"] = False
response_format = {
"type": "json_schema",
"json_schema": {

View File

@ -136,11 +136,29 @@ class GeminiClient(GenAIClient):
)
)
elif role == "assistant":
gemini_messages.append(
types.Content(
role="model", parts=[types.Part.from_text(text=content)]
)
)
parts: list[types.Part] = []
if content:
parts.append(types.Part.from_text(text=content))
for tc in msg.get("tool_calls") or []:
func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool":
# Handle tool response
response_payload = (
@ -151,7 +169,9 @@ class GeminiClient(GenAIClient):
role="function",
parts=[
types.Part.from_function_response(
name=msg.get("name", ""),
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
],
@ -345,11 +365,29 @@ class GeminiClient(GenAIClient):
)
)
elif role == "assistant":
gemini_messages.append(
types.Content(
role="model", parts=[types.Part.from_text(text=content)]
)
)
parts: list[types.Part] = []
if content:
parts.append(types.Part.from_text(text=content))
for tc in msg.get("tool_calls") or []:
func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool":
# Handle tool response
response_payload = (
@ -360,7 +398,9 @@ class GeminiClient(GenAIClient):
role="function",
parts=[
types.Part.from_function_response(
name=msg.get("name", ""),
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
],

View File

@ -73,8 +73,17 @@ class OpenAIClient(GenAIClient):
**self.genai_config.runtime_options,
}
if response_format:
# OpenAI strict mode requires additionalProperties: false on the schema
if response_format.get("type") == "json_schema" and response_format.get(
"json_schema", {}
).get("strict"):
schema = response_format.get("json_schema", {}).get("schema")
if isinstance(schema, dict):
schema["additionalProperties"] = False
request_params["response_format"] = response_format
result = self.provider.chat.completions.create(**request_params)
if (
result is not None
and hasattr(result, "choices")