Compare commits

..

7 Commits

Author SHA1 Message Date
Nicolas Mowen
11bb9fed4c Fix llama.cpp media marker 2026-04-29 08:43:32 -06:00
Josh Hawkins
3201985359 docs tweak 2026-04-29 09:26:51 -05:00
Josh Hawkins
914941090b ensure recording staleness threshold scales with segment_time 2026-04-29 08:42:57 -05:00
Josh Hawkins
d315e0874a match link color with other sections 2026-04-28 22:42:10 -05:00
Josh Hawkins
7cb29420c1 fix color
Co-authored-by: Copilot <copilot@github.com>
2026-04-28 22:39:14 -05:00
Josh Hawkins
a92215480e add docs link to media sync settings 2026-04-28 22:36:44 -05:00
Josh Hawkins
24b7653ea8 ensure embeddings process restarts after maintainer thread crash 2026-04-28 14:29:54 -05:00
6 changed files with 72 additions and 280 deletions

View File

@ -32,14 +32,11 @@ RUN echo /opt/rocm/lib|tee /opt/rocm-dist/etc/ld.so.conf.d/rocm.conf
FROM deps AS deps-prelim
COPY docker/rocm/debian-backports.sources /etc/apt/sources.list.d/debian-backports.sources
# install_deps.sh upgraded libstdc++6 from trixie for Battlemage; the matching
# -dev package must also come from trixie or apt refuses to satisfy it.
RUN echo "deb http://deb.debian.org/debian trixie main" > /etc/apt/sources.list.d/trixie.list && \
apt-get update && \
RUN apt-get update && \
apt-get install -y libnuma1 && \
apt-get install -qq -y -t bookworm-backports mesa-va-drivers mesa-vulkan-drivers && \
apt-get install -qq -y -t trixie libstdc++-14-dev && \
rm -f /etc/apt/sources.list.d/trixie.list && \
# Install C++ standard library headers for HIPRTC kernel compilation fallback
apt-get install -qq -y libstdc++-12-dev && \
rm -rf /var/lib/apt/lists/*
WORKDIR /opt/frigate

View File

@ -5,15 +5,13 @@ import logging
import random
import string
import time
import zipfile
from collections import deque
from pathlib import Path
from typing import Iterator, List, Optional
from typing import List, Optional
import psutil
from fastapi import APIRouter, Depends, Query, Request
from fastapi.responses import JSONResponse, StreamingResponse
from pathvalidate import sanitize_filename, sanitize_filepath
from fastapi.responses import JSONResponse
from pathvalidate import sanitize_filepath
from peewee import DoesNotExist
from playhouse.shortcuts import model_to_dict
@ -363,136 +361,6 @@ def get_export_case(case_id: str):
)
_ZIP_STREAM_CHUNK_SIZE = 1024 * 1024 # 1 MiB
class _StreamingZipBuffer:
"""File-like sink for ZipFile that exposes written bytes via drain().
ZipFile writes synchronously into this buffer; the generator drains the
queue between writes so StreamingResponse can yield bytes without
materializing the whole archive in memory.
"""
def __init__(self) -> None:
self._queue: deque[bytes] = deque()
self._offset = 0
def write(self, data: bytes) -> int:
if data:
self._queue.append(bytes(data))
self._offset += len(data)
return len(data)
def tell(self) -> int:
return self._offset
def flush(self) -> None:
pass
def drain(self) -> Iterator[bytes]:
while self._queue:
yield self._queue.popleft()
def _unique_archive_name(export: Export, used: set[str]) -> str:
base = sanitize_filename(export.name) if export.name else None
if not base:
base = f"{export.camera}_{int(datetime.datetime.timestamp(export.date))}"
candidate = f"{base}.mp4"
counter = 1
while candidate in used:
candidate = f"{base}_{counter}.mp4"
counter += 1
used.add(candidate)
return candidate
def _stream_case_archive(exports: List[Export]) -> Iterator[bytes]:
"""Yield bytes of a zip archive built from the given exports' mp4 files."""
buffer = _StreamingZipBuffer()
used_names: set[str] = set()
# ZIP_STORED: mp4 is already compressed, recompressing wastes CPU for ~0% size win.
with zipfile.ZipFile(
buffer,
mode="w",
compression=zipfile.ZIP_STORED,
allowZip64=True,
) as archive:
for export in exports:
source = Path(export.video_path)
if not source.exists():
continue
arcname = _unique_archive_name(export, used_names)
with (
archive.open(arcname, mode="w", force_zip64=True) as entry,
source.open("rb") as src,
):
while True:
chunk = src.read(_ZIP_STREAM_CHUNK_SIZE)
if not chunk:
break
entry.write(chunk)
yield from buffer.drain()
yield from buffer.drain()
yield from buffer.drain()
@router.get(
"/cases/{case_id}/download",
dependencies=[Depends(allow_any_authenticated())],
summary="Download export case as zip",
description="Streams a zip archive containing every completed export's mp4 for the given case.",
)
def download_export_case(
case_id: str,
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
try:
case = ExportCase.get(ExportCase.id == case_id)
except DoesNotExist:
return JSONResponse(
content={"success": False, "message": "Export case not found"},
status_code=404,
)
exports = list(
Export.select()
.where(
Export.export_case == case_id,
~Export.in_progress,
Export.camera << allowed_cameras,
)
.order_by(Export.date.asc())
)
if not exports:
return JSONResponse(
content={"success": False, "message": "No exports available to download."},
status_code=404,
)
archive_base = sanitize_filename(case.name) if case.name else ""
if not archive_base:
archive_base = case_id
return StreamingResponse(
_stream_case_archive(exports),
media_type="application/zip",
headers={
"Content-Disposition": f'attachment; filename="{archive_base}.zip"',
},
)
@router.patch(
"/cases/{case_id}",
response_model=GenericResponse,

View File

@ -153,6 +153,9 @@ Each line represents a detection state, not necessarily unique individuals. The
if "other_concerns" in schema.get("required", []):
schema["required"].remove("other_concerns")
# OpenAI strict mode requires additionalProperties: false on all objects
schema["additionalProperties"] = False
response_format = {
"type": "json_schema",
"json_schema": {

View File

@ -136,44 +136,22 @@ class GeminiClient(GenAIClient):
)
)
elif role == "assistant":
parts: list[types.Part] = []
if content:
parts.append(types.Part.from_text(text=content))
for tc in msg.get("tool_calls") or []:
func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
gemini_messages.append(
types.Content(
role="model", parts=[types.Part.from_text(text=content)]
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool":
# Handle tool response
response_payload = (
content if isinstance(content, dict) else {"result": content}
)
function_response = {
"name": msg.get("name", ""),
"response": content,
}
gemini_messages.append(
types.Content(
role="function",
parts=[
types.Part.from_function_response(
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
types.Part.from_function_response(function_response) # type: ignore[misc,call-arg,arg-type]
],
)
)
@ -365,44 +343,22 @@ class GeminiClient(GenAIClient):
)
)
elif role == "assistant":
parts: list[types.Part] = []
if content:
parts.append(types.Part.from_text(text=content))
for tc in msg.get("tool_calls") or []:
func = tc.get("function") or {}
tc_name = func.get("name") or ""
tc_args: Any = func.get("arguments")
if isinstance(tc_args, str):
try:
tc_args = json.loads(tc_args)
except (json.JSONDecodeError, TypeError):
tc_args = {}
if not isinstance(tc_args, dict):
tc_args = {}
if tc_name:
parts.append(
types.Part.from_function_call(
name=tc_name, args=tc_args
gemini_messages.append(
types.Content(
role="model", parts=[types.Part.from_text(text=content)]
)
)
if not parts:
parts.append(types.Part.from_text(text=" "))
gemini_messages.append(types.Content(role="model", parts=parts))
elif role == "tool":
# Handle tool response
response_payload = (
content if isinstance(content, dict) else {"result": content}
)
function_response = {
"name": msg.get("name", ""),
"response": content,
}
gemini_messages.append(
types.Content(
role="function",
parts=[
types.Part.from_function_response(
name=msg.get("name")
or msg.get("tool_call_id")
or "",
response=response_payload,
)
types.Part.from_function_response(function_response) # type: ignore[misc,call-arg,arg-type]
],
)
)

View File

@ -73,17 +73,8 @@ class OpenAIClient(GenAIClient):
**self.genai_config.runtime_options,
}
if response_format:
# OpenAI strict mode requires additionalProperties: false on the schema
if response_format.get("type") == "json_schema" and response_format.get(
"json_schema", {}
).get("strict"):
schema = response_format.get("json_schema", {}).get("schema")
if isinstance(schema, dict):
schema["additionalProperties"] = False
request_params["response_format"] = response_format
result = self.provider.chat.completions.create(**request_params)
if (
result is not None
and hasattr(result, "choices")

View File

@ -57,7 +57,6 @@ import { useTranslation } from "react-i18next";
import { IoMdArrowRoundBack } from "react-icons/io";
import {
LuDownload,
LuFolderPlus,
LuFolderX,
LuPencil,
@ -778,29 +777,8 @@ function Exports() {
filters={["cameras"]}
onUpdateFilter={setExportFilter}
/>
<div className="flex items-center gap-1 md:gap-2">
{(exportsByCase[selectedCase.id]?.length ?? 0) > 0 && (
<Button
asChild
className="flex items-center gap-2 p-2"
size="sm"
aria-label={t("button.download", { ns: "common" })}
>
<a
download
href={`${baseUrl}api/cases/${selectedCase.id}/download`}
>
<LuDownload className="text-secondary-foreground" />
{!isMobile && (
<div className="text-primary">
{t("button.download", { ns: "common" })}
</div>
)}
</a>
</Button>
)}
{isAdmin && (
<>
<div className="flex items-center gap-1 md:gap-2">
<Button
className="flex items-center gap-2 p-2"
size="sm"
@ -845,9 +823,8 @@ function Exports() {
</div>
)}
</Button>
</>
)}
</div>
)}
</div>
)}
</>