add individual detectors to schema

add detector titles and descriptions (docstrings in pydantic are used for descriptions) and add i18n keys to globals
This commit is contained in:
Josh Hawkins 2026-02-05 20:55:32 -06:00
parent 710e34b4ab
commit d55b6226d0
17 changed files with 369 additions and 33 deletions

View File

@ -54,6 +54,7 @@ from frigate.util.builtin import (
update_yaml_file_bulk,
)
from frigate.util.config import find_config_file
from frigate.util.schema import get_config_schema
from frigate.util.services import (
get_nvidia_driver_info,
process_logs,
@ -78,9 +79,7 @@ def is_healthy():
@router.get("/config/schema.json", dependencies=[Depends(allow_public())])
def config_schema(request: Request):
return Response(
content=request.app.frigate_config.schema_json(), media_type="application/json"
)
return JSONResponse(content=get_config_schema(FrigateConfig))
@router.get(

View File

@ -1,6 +1,6 @@
import logging
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -21,8 +21,18 @@ DETECTOR_KEY = "cpu"
class CpuDetectorConfig(BaseDetectorConfig):
"""CPU TFLite detector that runs TensorFlow Lite models on the host CPU without hardware acceleration. Not recommended."""
model_config = ConfigDict(
title="CPU",
)
type: Literal[DETECTOR_KEY]
num_threads: int = Field(default=3, title="Number of detection threads")
num_threads: int = Field(
default=3,
title="Number of detection threads",
description="The number of threads used for CPU-based inference.",
)
class CpuTfl(DetectionApi):

View File

@ -4,7 +4,7 @@ import logging
import numpy as np
import requests
from PIL import Image
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -16,12 +16,28 @@ DETECTOR_KEY = "deepstack"
class DeepstackDetectorConfig(BaseDetectorConfig):
"""DeepStack/CodeProject.AI detector that sends images to a remote DeepStack HTTP API for inference. Not recommended."""
model_config = ConfigDict(
title="DeepStack",
)
type: Literal[DETECTOR_KEY]
api_url: str = Field(
default="http://localhost:80/v1/vision/detection", title="DeepStack API URL"
default="http://localhost:80/v1/vision/detection",
title="DeepStack API URL",
description="The URL of the DeepStack API.",
)
api_timeout: float = Field(
default=0.1,
title="DeepStack API timeout (in seconds)",
description="Maximum time allowed for a DeepStack API request.",
)
api_key: str = Field(
default="",
title="DeepStack API key (if required)",
description="Optional API key for authenticated DeepStack services.",
)
api_timeout: float = Field(default=0.1, title="DeepStack API timeout (in seconds)")
api_key: str = Field(default="", title="DeepStack API key (if required)")
class DeepStack(DetectionApi):

View File

@ -2,7 +2,7 @@ import logging
import queue
import numpy as np
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -14,10 +14,28 @@ DETECTOR_KEY = "degirum"
### DETECTOR CONFIG ###
class DGDetectorConfig(BaseDetectorConfig):
"""DeGirum detector for running models via DeGirum cloud or local inference services."""
model_config = ConfigDict(
title="DeGirum",
)
type: Literal[DETECTOR_KEY]
location: str = Field(default=None, title="Inference Location")
zoo: str = Field(default=None, title="Model Zoo")
token: str = Field(default=None, title="DeGirum Cloud Token")
location: str = Field(
default=None,
title="Inference Location",
description="Location of the DeGirim inference engine (e.g. '@cloud', '127.0.0.1').",
)
zoo: str = Field(
default=None,
title="Model Zoo",
description="Path or URL to the DeGirum model zoo.",
)
token: str = Field(
default=None,
title="DeGirum Cloud Token",
description="Token for DeGirum Cloud access.",
)
### ACTUAL DETECTOR ###

View File

@ -4,7 +4,7 @@ import os
import cv2
import numpy as np
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -21,8 +21,18 @@ DETECTOR_KEY = "edgetpu"
class EdgeTpuDetectorConfig(BaseDetectorConfig):
"""EdgeTPU detector that runs TensorFlow Lite models compiled for Coral EdgeTPU using the EdgeTPU delegate."""
model_config = ConfigDict(
title="EdgeTPU",
)
type: Literal[DETECTOR_KEY]
device: str = Field(default=None, title="Device Type")
device: str = Field(
default=None,
title="Device Type",
description="The device to use for EdgeTPU inference (e.g. 'usb', 'pci').",
)
class EdgeTpuTfl(DetectionApi):

View File

@ -8,7 +8,7 @@ from typing import Dict, List, Optional, Tuple
import cv2
import numpy as np
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.const import MODEL_CACHE_DIR
@ -410,5 +410,15 @@ class HailoDetector(DetectionApi):
# ----------------- HailoDetectorConfig Class ----------------- #
class HailoDetectorConfig(BaseDetectorConfig):
"""Hailo-8/Hailo-8L detector using HEF models and the HailoRT SDK for inference on Hailo hardware."""
model_config = ConfigDict(
title="Hailo-8/Hailo-8L",
)
type: Literal[DETECTOR_KEY]
device: str = Field(default="PCIe", title="Device Type")
device: str = Field(
default="PCIe",
title="Device Type",
description="The device to use for Hailo inference (e.g. 'PCIe', 'M.2').",
)

View File

@ -8,7 +8,7 @@ from queue import Queue
import cv2
import numpy as np
from pydantic import BaseModel, Field
from pydantic import BaseModel, ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -30,8 +30,18 @@ class ModelConfig(BaseModel):
class MemryXDetectorConfig(BaseDetectorConfig):
"""MemryX MX3 detector that runs compiled DFP models on MemryX accelerators."""
model_config = ConfigDict(
title="MemryX",
)
type: Literal[DETECTOR_KEY]
device: str = Field(default="PCIe", title="Device Path")
device: str = Field(
default="PCIe",
title="Device Path",
description="The device to use for MemryX inference (e.g. 'PCIe').",
)
class MemryXDetector(DetectionApi):

View File

@ -1,7 +1,7 @@
import logging
import numpy as np
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -23,8 +23,18 @@ DETECTOR_KEY = "onnx"
class ONNXDetectorConfig(BaseDetectorConfig):
"""ONNX detector for running ONNX models; will use available acceleration backends (CUDA/ROCm/OpenVINO) when available."""
model_config = ConfigDict(
title="ONNX",
)
type: Literal[DETECTOR_KEY]
device: str = Field(default="AUTO", title="Device Type")
device: str = Field(
default="AUTO",
title="Device Type",
description="The device to use for ONNX inference (e.g. 'AUTO', 'CPU', 'GPU').",
)
class ONNXDetector(DetectionApi):

View File

@ -2,7 +2,7 @@ import logging
import numpy as np
import openvino as ov
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -20,8 +20,18 @@ DETECTOR_KEY = "openvino"
class OvDetectorConfig(BaseDetectorConfig):
"""OpenVINO detector for AMD and Intel CPUs, Intel GPUs and Intel VPU hardware."""
model_config = ConfigDict(
title="OpenVINO",
)
type: Literal[DETECTOR_KEY]
device: str = Field(default=None, title="Device Type")
device: str = Field(
default=None,
title="Device Type",
description="The device to use for OpenVINO inference (e.g. 'CPU', 'GPU', 'NPU').",
)
class OvDetector(DetectionApi):

View File

@ -6,7 +6,7 @@ from typing import Literal
import cv2
import numpy as np
from pydantic import Field
from pydantic import ConfigDict, Field
from frigate.const import MODEL_CACHE_DIR, SUPPORTED_RK_SOCS
from frigate.detectors.detection_api import DetectionApi
@ -29,8 +29,20 @@ model_cache_dir = os.path.join(MODEL_CACHE_DIR, "rknn_cache/")
class RknnDetectorConfig(BaseDetectorConfig):
"""RKNN detector for Rockchip NPUs; runs compiled RKNN models on Rockchip hardware."""
model_config = ConfigDict(
title="RKNN",
)
type: Literal[DETECTOR_KEY]
num_cores: int = Field(default=0, ge=0, le=3, title="Number of NPU cores to use.")
num_cores: int = Field(
default=0,
ge=0,
le=3,
title="Number of NPU cores to use.",
description="The number of NPU cores to use (0 for auto).",
)
class Rknn(DetectionApi):

View File

@ -2,6 +2,7 @@ import logging
import os
import numpy as np
from pydantic import ConfigDict
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -27,6 +28,12 @@ DETECTOR_KEY = "synaptics"
class SynapDetectorConfig(BaseDetectorConfig):
"""Synaptics NPU detector for models in .synap format using the Synap SDK on Synaptics hardware."""
model_config = ConfigDict(
title="Synaptics",
)
type: Literal[DETECTOR_KEY]

View File

@ -1,5 +1,6 @@
import logging
from pydantic import ConfigDict
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -18,6 +19,12 @@ DETECTOR_KEY = "teflon_tfl"
class TeflonDetectorConfig(BaseDetectorConfig):
"""Teflon delegate detector for TFLite using Mesa Teflon delegate library to accelerate inference on supported GPUs."""
model_config = ConfigDict(
title="Teflon",
)
type: Literal[DETECTOR_KEY]

View File

@ -14,7 +14,7 @@ try:
except ModuleNotFoundError:
TRT_SUPPORT = False
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -46,8 +46,16 @@ if TRT_SUPPORT:
class TensorRTDetectorConfig(BaseDetectorConfig):
"""TensorRT detector for Nvidia Jetson devices using serialized TensorRT engines for accelerated inference."""
model_config = ConfigDict(
title="TensorRT",
)
type: Literal[DETECTOR_KEY]
device: int = Field(default=0, title="GPU Device Index")
device: int = Field(
default=0, title="GPU Device Index", description="The GPU device index to use."
)
class HostDeviceMem(object):

View File

@ -5,7 +5,7 @@ from typing import Any, List
import numpy as np
import zmq
from pydantic import Field
from pydantic import ConfigDict, Field
from typing_extensions import Literal
from frigate.detectors.detection_api import DetectionApi
@ -17,14 +17,28 @@ DETECTOR_KEY = "zmq"
class ZmqDetectorConfig(BaseDetectorConfig):
"""ZMQ IPC detector that offloads inference to an external process via a ZeroMQ IPC endpoint."""
model_config = ConfigDict(
title="ZMQ IPC",
)
type: Literal[DETECTOR_KEY]
endpoint: str = Field(
default="ipc:///tmp/cache/zmq_detector", title="ZMQ IPC endpoint"
default="ipc:///tmp/cache/zmq_detector",
title="ZMQ IPC endpoint",
description="The ZMQ endpoint to connect to.",
)
request_timeout_ms: int = Field(
default=200, title="ZMQ request timeout in milliseconds"
default=200,
title="ZMQ request timeout in milliseconds",
description="Timeout for ZMQ requests in milliseconds.",
)
linger_ms: int = Field(
default=0,
title="ZMQ socket linger in milliseconds",
description="Socket linger period in milliseconds.",
)
linger_ms: int = Field(default=0, title="ZMQ socket linger in milliseconds")
class ZmqIpcDetector(DetectionApi):

46
frigate/util/schema.py Normal file
View File

@ -0,0 +1,46 @@
"""JSON schema utilities for Frigate."""
from typing import Any, Dict, Type
from pydantic import BaseModel, TypeAdapter
def get_config_schema(config_class: Type[BaseModel]) -> Dict[str, Any]:
"""
Returns the JSON schema for FrigateConfig with polymorphic detectors.
This utility patches the FrigateConfig schema to include the full polymorphic
definitions for detectors. By default, Pydantic's schema for Dict[str, BaseDetectorConfig]
only includes the base class fields. This function replaces it with a reference
to the DetectorConfig union, which includes all available detector subclasses.
"""
# Import here to ensure all detector plugins are loaded through the detectors module
from frigate.detectors import DetectorConfig
# Get the base schema for FrigateConfig
schema = config_class.model_json_schema()
# Get the schema for the polymorphic DetectorConfig union
detector_adapter: TypeAdapter = TypeAdapter(DetectorConfig)
detector_schema = detector_adapter.json_schema()
# Ensure $defs exists in FrigateConfig schema
if "$defs" not in schema:
schema["$defs"] = {}
# Merge $defs from DetectorConfig into FrigateConfig schema
# This includes the specific schemas for each detector plugin (OvDetectorConfig, etc.)
if "$defs" in detector_schema:
schema["$defs"].update(detector_schema["$defs"])
# Extract the union schema (oneOf/discriminator) and add it as a definition
detector_union_schema = {k: v for k, v in detector_schema.items() if k != "$defs"}
schema["$defs"]["DetectorConfig"] = detector_union_schema
# Update the 'detectors' property to use the polymorphic DetectorConfig definition
if "detectors" in schema.get("properties", {}):
schema["properties"]["detectors"]["additionalProperties"] = {
"$ref": "#/$defs/DetectorConfig"
}
return schema

View File

@ -13,6 +13,7 @@ from pathlib import Path
from typing import Any, Dict, get_args, get_origin
from frigate.config.config import FrigateConfig
from frigate.util.schema import get_config_schema
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@ -187,6 +188,49 @@ def generate_section_translation(config_class: type) -> Dict[str, Any]:
return extract_translations_from_schema(schema)
def get_detector_translations(
config_schema: Dict[str, Any],
) -> tuple[Dict[str, Any], Dict[str, Any]]:
"""Build detector field and type translations based on schema definitions."""
defs = config_schema.get("$defs", {})
detector_schema = defs.get("DetectorConfig", {})
discriminator = detector_schema.get("discriminator", {})
mapping = discriminator.get("mapping", {})
type_translations: Dict[str, Any] = {}
field_translations: Dict[str, Any] = {}
for detector_type, ref in mapping.items():
if not isinstance(ref, str):
continue
if not ref.startswith("#/$defs/"):
continue
ref_name = ref.split("/")[-1]
ref_schema = defs.get(ref_name, {})
if not ref_schema:
continue
type_entry: Dict[str, str] = {}
title = ref_schema.get("title")
description = ref_schema.get("description")
if title:
type_entry["label"] = title
if description:
type_entry["description"] = description
if type_entry:
type_translations[detector_type] = type_entry
nested = extract_translations_from_schema(ref_schema, defs=defs)
nested_without_root = {
k: v for k, v in nested.items() if k not in ("label", "description")
}
field_translations.update(nested_without_root)
return field_translations, type_translations
def main():
"""Main function to generate config translations."""
@ -207,6 +251,7 @@ def main():
)
config_fields = FrigateConfig.model_fields
config_schema = get_config_schema(FrigateConfig)
logger.info(f"Found {len(config_fields)} top-level config sections")
global_translations = {}
@ -255,6 +300,11 @@ def main():
}
section_data.update(nested_without_root)
if field_name == "detectors":
detector_fields, detector_types = get_detector_translations(config_schema)
section_data.update(detector_fields)
section_data.update(detector_types)
if not section_data:
logger.warning(f"No translations found for section: {field_name}")
continue

View File

@ -287,8 +287,7 @@
"label": "Detector hardware",
"description": "Configuration for object detectors (CPU, GPU, ONNX backends) and any detector-specific model settings.",
"type": {
"label": "Detector Type",
"description": "Type of detector to use for object detection (for example 'cpu', 'edgetpu', 'openvino')."
"label": "Type"
},
"model": {
"label": "Detector specific model configuration",
@ -337,6 +336,106 @@
"model_path": {
"label": "Detector specific model path",
"description": "File path to the detector model binary if required by the chosen detector."
},
"num_threads": {
"label": "Number of detection threads",
"description": "The number of threads used for CPU-based inference."
},
"api_url": {
"label": "DeepStack API URL",
"description": "The URL of the DeepStack API."
},
"api_timeout": {
"label": "DeepStack API timeout (in seconds)",
"description": "Maximum time allowed for a DeepStack API request."
},
"api_key": {
"label": "DeepStack API key (if required)",
"description": "Optional API key for authenticated DeepStack services."
},
"location": {
"label": "Inference Location",
"description": "Location of the DeGirim inference engine (e.g. '@cloud', '127.0.0.1')."
},
"zoo": {
"label": "Model Zoo",
"description": "Path or URL to the DeGirum model zoo."
},
"token": {
"label": "DeGirum Cloud Token",
"description": "Token for DeGirum Cloud access."
},
"device": {
"label": "GPU Device Index",
"description": "The GPU device index to use."
},
"num_cores": {
"label": "Number of NPU cores to use.",
"description": "The number of NPU cores to use (0 for auto)."
},
"endpoint": {
"label": "ZMQ IPC endpoint",
"description": "The ZMQ endpoint to connect to."
},
"request_timeout_ms": {
"label": "ZMQ request timeout in milliseconds",
"description": "Timeout for ZMQ requests in milliseconds."
},
"linger_ms": {
"label": "ZMQ socket linger in milliseconds",
"description": "Socket linger period in milliseconds."
},
"cpu": {
"label": "CPU",
"description": "CPU TFLite detector that runs TensorFlow Lite models on the host CPU without hardware acceleration. Not recommended."
},
"deepstack": {
"label": "DeepStack",
"description": "DeepStack/CodeProject.AI detector that sends images to a remote DeepStack HTTP API for inference. Not recommended."
},
"degirum": {
"label": "DeGirum",
"description": "DeGirum detector for running models via DeGirum cloud or local inference services."
},
"edgetpu": {
"label": "EdgeTPU",
"description": "EdgeTPU detector that runs TensorFlow Lite models compiled for Coral EdgeTPU using the EdgeTPU delegate."
},
"hailo8l": {
"label": "Hailo-8/Hailo-8L",
"description": "Hailo-8/Hailo-8L detector using HEF models and the HailoRT SDK for inference on Hailo hardware."
},
"memryx": {
"label": "MemryX",
"description": "MemryX MX3 detector that runs compiled DFP models on MemryX accelerators."
},
"onnx": {
"label": "ONNX",
"description": "ONNX detector for running ONNX models; will use available acceleration backends (CUDA/ROCm/OpenVINO) when available."
},
"openvino": {
"label": "OpenVINO",
"description": "OpenVINO detector for AMD and Intel CPUs, Intel GPUs and Intel VPU hardware."
},
"rknn": {
"label": "RKNN",
"description": "RKNN detector for Rockchip NPUs; runs compiled RKNN models on Rockchip hardware."
},
"synaptics": {
"label": "Synaptics",
"description": "Synaptics NPU detector for models in .synap format using the Synap SDK on Synaptics hardware."
},
"teflon_tfl": {
"label": "Teflon",
"description": "Teflon delegate detector for TFLite using Mesa Teflon delegate library to accelerate inference on supported GPUs."
},
"tensorrt": {
"label": "TensorRT",
"description": "TensorRT detector for Nvidia Jetson devices using serialized TensorRT engines for accelerated inference."
},
"zmq": {
"label": "ZMQ IPC",
"description": "ZMQ IPC detector that offloads inference to an external process via a ZeroMQ IPC endpoint."
}
},
"model": {