Compare commits

..

5 Commits

Author SHA1 Message Date
Nicolas Mowen
b47a47c44a Fix gemini tool calling 2026-04-29 17:03:11 -06:00
Nicolas Mowen
33abaaa9f8 Move openai specific workaround so it doesn't apply to other providers 2026-04-29 16:51:59 -06:00
Josh Hawkins
95b5b89ed9
Miscellaneous fixes (#23032)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* ensure embeddings process restarts after maintainer thread crash

* add docs link to media sync settings

* fix color

Co-authored-by: Copilot <copilot@github.com>

* match link color with other sections

* ensure recording staleness threshold scales with segment_time

* docs tweak

* Fix llama.cpp media marker

* Fix gemini tools call

---------

Co-authored-by: Copilot <copilot@github.com>
Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>
2026-04-29 16:20:19 -06:00
Nicolas Mowen
a182385618
Fix ROCm build (#23040)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-29 09:30:16 -05:00
Nicolas Mowen
088e1ad7ef
Add ability to download case as zip (#23034)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2026-04-28 19:11:41 -05:00
6 changed files with 280 additions and 72 deletions

View File

@ -32,11 +32,14 @@ RUN echo /opt/rocm/lib|tee /opt/rocm-dist/etc/ld.so.conf.d/rocm.conf
FROM deps AS deps-prelim FROM deps AS deps-prelim
COPY docker/rocm/debian-backports.sources /etc/apt/sources.list.d/debian-backports.sources COPY docker/rocm/debian-backports.sources /etc/apt/sources.list.d/debian-backports.sources
RUN apt-get update && \ # install_deps.sh upgraded libstdc++6 from trixie for Battlemage; the matching
# -dev package must also come from trixie or apt refuses to satisfy it.
RUN echo "deb http://deb.debian.org/debian trixie main" > /etc/apt/sources.list.d/trixie.list && \
apt-get update && \
apt-get install -y libnuma1 && \ apt-get install -y libnuma1 && \
apt-get install -qq -y -t bookworm-backports mesa-va-drivers mesa-vulkan-drivers && \ apt-get install -qq -y -t bookworm-backports mesa-va-drivers mesa-vulkan-drivers && \
# Install C++ standard library headers for HIPRTC kernel compilation fallback apt-get install -qq -y -t trixie libstdc++-14-dev && \
apt-get install -qq -y libstdc++-12-dev && \ rm -f /etc/apt/sources.list.d/trixie.list && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
WORKDIR /opt/frigate WORKDIR /opt/frigate

View File

@ -5,13 +5,15 @@ import logging
import random import random
import string import string
import time import time
import zipfile
from collections import deque
from pathlib import Path from pathlib import Path
from typing import List, Optional from typing import Iterator, List, Optional
import psutil import psutil
from fastapi import APIRouter, Depends, Query, Request from fastapi import APIRouter, Depends, Query, Request
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse, StreamingResponse
from pathvalidate import sanitize_filepath from pathvalidate import sanitize_filename, sanitize_filepath
from peewee import DoesNotExist from peewee import DoesNotExist
from playhouse.shortcuts import model_to_dict from playhouse.shortcuts import model_to_dict
@ -361,6 +363,136 @@ def get_export_case(case_id: str):
) )
_ZIP_STREAM_CHUNK_SIZE = 1024 * 1024 # 1 MiB
class _StreamingZipBuffer:
"""File-like sink for ZipFile that exposes written bytes via drain().
ZipFile writes synchronously into this buffer; the generator drains the
queue between writes so StreamingResponse can yield bytes without
materializing the whole archive in memory.
"""
def __init__(self) -> None:
self._queue: deque[bytes] = deque()
self._offset = 0
def write(self, data: bytes) -> int:
if data:
self._queue.append(bytes(data))
self._offset += len(data)
return len(data)
def tell(self) -> int:
return self._offset
def flush(self) -> None:
pass
def drain(self) -> Iterator[bytes]:
while self._queue:
yield self._queue.popleft()
def _unique_archive_name(export: Export, used: set[str]) -> str:
base = sanitize_filename(export.name) if export.name else None
if not base:
base = f"{export.camera}_{int(datetime.datetime.timestamp(export.date))}"
candidate = f"{base}.mp4"
counter = 1
while candidate in used:
candidate = f"{base}_{counter}.mp4"
counter += 1
used.add(candidate)
return candidate
def _stream_case_archive(exports: List[Export]) -> Iterator[bytes]:
"""Yield bytes of a zip archive built from the given exports' mp4 files."""
buffer = _StreamingZipBuffer()
used_names: set[str] = set()
# ZIP_STORED: mp4 is already compressed, recompressing wastes CPU for ~0% size win.
with zipfile.ZipFile(
buffer,
mode="w",
compression=zipfile.ZIP_STORED,
allowZip64=True,
) as archive:
for export in exports:
source = Path(export.video_path)
if not source.exists():
continue
arcname = _unique_archive_name(export, used_names)
with (
archive.open(arcname, mode="w", force_zip64=True) as entry,
source.open("rb") as src,
):
while True:
chunk = src.read(_ZIP_STREAM_CHUNK_SIZE)
if not chunk:
break
entry.write(chunk)
yield from buffer.drain()
yield from buffer.drain()
yield from buffer.drain()
@router.get(
"/cases/{case_id}/download",
dependencies=[Depends(allow_any_authenticated())],
summary="Download export case as zip",
description="Streams a zip archive containing every completed export's mp4 for the given case.",
)
def download_export_case(
case_id: str,
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
try:
case = ExportCase.get(ExportCase.id == case_id)
except DoesNotExist:
return JSONResponse(
content={"success": False, "message": "Export case not found"},
status_code=404,
)
exports = list(
Export.select()
.where(
Export.export_case == case_id,
~Export.in_progress,
Export.camera << allowed_cameras,
)
.order_by(Export.date.asc())
)
if not exports:
return JSONResponse(
content={"success": False, "message": "No exports available to download."},
status_code=404,
)
archive_base = sanitize_filename(case.name) if case.name else ""
if not archive_base:
archive_base = case_id
return StreamingResponse(
_stream_case_archive(exports),
media_type="application/zip",
headers={
"Content-Disposition": f'attachment; filename="{archive_base}.zip"',
},
)
@router.patch( @router.patch(
"/cases/{case_id}", "/cases/{case_id}",
response_model=GenericResponse, response_model=GenericResponse,

View File

@ -153,9 +153,6 @@ Each line represents a detection state, not necessarily unique individuals. The
if "other_concerns" in schema.get("required", []): if "other_concerns" in schema.get("required", []):
schema["required"].remove("other_concerns") schema["required"].remove("other_concerns")
# OpenAI strict mode requires additionalProperties: false on all objects
schema["additionalProperties"] = False
response_format = { response_format = {
"type": "json_schema", "type": "json_schema",
"json_schema": { "json_schema": {

View File

@ -136,22 +136,44 @@ class GeminiClient(GenAIClient):
) )
) )
elif role == "assistant": elif role == "assistant":
gemini_messages.append( parts: list[types.Part] = []
types.Content( if content:
role="model", parts=[types.Part.from_text(text=content)] parts.append(types.Part.from_text(text=content))
) for tc in msg.get("tool_calls") or []:
) func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool": elif role == "tool":
# Handle tool response # Handle tool response
function_response = { response_payload = (
"name": msg.get("name", ""), content if isinstance(content, dict) else {"result": content}
"response": content, )
}
gemini_messages.append( gemini_messages.append(
types.Content( types.Content(
role="function", role="function",
parts=[ parts=[
types.Part.from_function_response(function_response) # type: ignore[misc,call-arg,arg-type] types.Part.from_function_response(
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
], ],
) )
) )
@ -343,22 +365,44 @@ class GeminiClient(GenAIClient):
) )
) )
elif role == "assistant": elif role == "assistant":
gemini_messages.append( parts: list[types.Part] = []
types.Content( if content:
role="model", parts=[types.Part.from_text(text=content)] parts.append(types.Part.from_text(text=content))
) for tc in msg.get("tool_calls") or []:
) func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool": elif role == "tool":
# Handle tool response # Handle tool response
function_response = { response_payload = (
"name": msg.get("name", ""), content if isinstance(content, dict) else {"result": content}
"response": content, )
}
gemini_messages.append( gemini_messages.append(
types.Content( types.Content(
role="function", role="function",
parts=[ parts=[
types.Part.from_function_response(function_response) # type: ignore[misc,call-arg,arg-type] types.Part.from_function_response(
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
], ],
) )
) )

View File

@ -73,8 +73,17 @@ class OpenAIClient(GenAIClient):
**self.genai_config.runtime_options, **self.genai_config.runtime_options,
} }
if response_format: if response_format:
# OpenAI strict mode requires additionalProperties: false on the schema
if response_format.get("type") == "json_schema" and response_format.get(
"json_schema", {}
).get("strict"):
schema = response_format.get("json_schema", {}).get("schema")
if isinstance(schema, dict):
schema["additionalProperties"] = False
request_params["response_format"] = response_format request_params["response_format"] = response_format
result = self.provider.chat.completions.create(**request_params) result = self.provider.chat.completions.create(**request_params)
if ( if (
result is not None result is not None
and hasattr(result, "choices") and hasattr(result, "choices")

View File

@ -57,6 +57,7 @@ import { useTranslation } from "react-i18next";
import { IoMdArrowRoundBack } from "react-icons/io"; import { IoMdArrowRoundBack } from "react-icons/io";
import { import {
LuDownload,
LuFolderPlus, LuFolderPlus,
LuFolderX, LuFolderX,
LuPencil, LuPencil,
@ -777,54 +778,76 @@ function Exports() {
filters={["cameras"]} filters={["cameras"]}
onUpdateFilter={setExportFilter} onUpdateFilter={setExportFilter}
/> />
{isAdmin && ( <div className="flex items-center gap-1 md:gap-2">
<div className="flex items-center gap-1 md:gap-2"> {(exportsByCase[selectedCase.id]?.length ?? 0) > 0 && (
<Button <Button
asChild
className="flex items-center gap-2 p-2" className="flex items-center gap-2 p-2"
size="sm" size="sm"
aria-label={t("toolbar.addExport")} aria-label={t("button.download", { ns: "common" })}
onClick={() => setCaseForAddExport(selectedCase)}
> >
<LuPlus className="text-secondary-foreground" /> <a
{!isMobile && ( download
<div className="text-primary"> href={`${baseUrl}api/cases/${selectedCase.id}/download`}
{t("toolbar.addExport")} >
</div> <LuDownload className="text-secondary-foreground" />
)} {!isMobile && (
<div className="text-primary">
{t("button.download", { ns: "common" })}
</div>
)}
</a>
</Button> </Button>
<Button )}
className="flex items-center gap-2 p-2" {isAdmin && (
size="sm" <>
aria-label={t("toolbar.editCase")} <Button
onClick={() => className="flex items-center gap-2 p-2"
setCaseDialog({ size="sm"
mode: "edit", aria-label={t("toolbar.addExport")}
exportCase: selectedCase, onClick={() => setCaseForAddExport(selectedCase)}
}) >
} <LuPlus className="text-secondary-foreground" />
> {!isMobile && (
<LuPencil className="text-secondary-foreground" /> <div className="text-primary">
{!isMobile && ( {t("toolbar.addExport")}
<div className="text-primary"> </div>
{t("toolbar.editCase")} )}
</div> </Button>
)} <Button
</Button> className="flex items-center gap-2 p-2"
<Button size="sm"
className="flex items-center gap-2 p-2" aria-label={t("toolbar.editCase")}
size="sm" onClick={() =>
aria-label={t("toolbar.deleteCase")} setCaseDialog({
onClick={() => setCaseToDelete(selectedCase)} mode: "edit",
> exportCase: selectedCase,
<LuTrash2 className="text-secondary-foreground" /> })
{!isMobile && ( }
<div className="text-primary"> >
{t("toolbar.deleteCase")} <LuPencil className="text-secondary-foreground" />
</div> {!isMobile && (
)} <div className="text-primary">
</Button> {t("toolbar.editCase")}
</div> </div>
)} )}
</Button>
<Button
className="flex items-center gap-2 p-2"
size="sm"
aria-label={t("toolbar.deleteCase")}
onClick={() => setCaseToDelete(selectedCase)}
>
<LuTrash2 className="text-secondary-foreground" />
{!isMobile && (
<div className="text-primary">
{t("toolbar.deleteCase")}
</div>
)}
</Button>
</>
)}
</div>
</div> </div>
)} )}
</> </>