mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 20:18:30 +03:00
Merge 865bbbd5ea into fb9604fbcc
This commit is contained in:
commit
ccdbadec53
@ -11,18 +11,24 @@ Requests for a description are sent off automatically to your AI provider at the
|
||||
|
||||
Generative AI can be enabled for all cameras or only for specific cameras. If GenAI is disabled for a camera, you can still manually generate descriptions for events using the HTTP API. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
||||
|
||||
To use Generative AI, you must define a single provider at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||
To use Generative AI, you must define one or more providers at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
- name: gemini
|
||||
provider: gemini
|
||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||
model: gemini-2.0-flash
|
||||
- name: ollama
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: qwen3-vl:4b
|
||||
|
||||
cameras:
|
||||
front_camera:
|
||||
genai:
|
||||
enabled: True # <- enable GenAI for your front camera
|
||||
provider: gemini # <- specify which provider to use
|
||||
use_snapshot: True
|
||||
objects:
|
||||
- person
|
||||
@ -72,7 +78,7 @@ Ollama also supports [cloud models](https://ollama.com/cloud), where your local
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: ollama
|
||||
- provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: qwen3-vl:4b
|
||||
```
|
||||
@ -98,7 +104,7 @@ To start using Gemini, you must first get an API key from [Google AI Studio](htt
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: gemini
|
||||
- provider: gemini
|
||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||
model: gemini-2.0-flash
|
||||
```
|
||||
@ -125,7 +131,7 @@ To start using OpenAI, you must first [create an API key](https://platform.opena
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: openai
|
||||
- provider: openai
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
model: gpt-4o
|
||||
```
|
||||
@ -152,7 +158,7 @@ To start using Azure OpenAI, you must first [create a resource](https://learn.mi
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: azure_openai
|
||||
- provider: azure_openai
|
||||
base_url: https://instance.cognitiveservices.azure.com/openai/responses?api-version=2025-04-01-preview
|
||||
model: gpt-5-mini
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
@ -171,6 +177,7 @@ Frigate provides an [MQTT topic](/integrations/mqtt), `frigate/tracked_object_up
|
||||
If looking to get notifications earlier than when an object ceases to be tracked, an additional send trigger can be configured of `after_significant_updates`.
|
||||
|
||||
```yaml
|
||||
objects:
|
||||
genai:
|
||||
send_triggers:
|
||||
tracked_object_end: true # default
|
||||
|
||||
@ -7,7 +7,7 @@ title: Configuring Generative AI
|
||||
|
||||
A Generative AI provider can be configured in the global config, which will make the Generative AI features available for use. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
||||
|
||||
To use Generative AI, you must define a single provider at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||
To use Generative AI, you must define one or more providers at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||
|
||||
## Ollama
|
||||
|
||||
@ -58,7 +58,7 @@ You should have at least 8 GB of RAM available (or VRAM if running on GPU) to ru
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: ollama
|
||||
- provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: minicpm-v:8b
|
||||
provider_options: # other Ollama client options can be defined
|
||||
@ -88,7 +88,7 @@ To start using Gemini, you must first get an API key from [Google AI Studio](htt
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: gemini
|
||||
- provider: gemini
|
||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||
model: gemini-1.5-flash
|
||||
```
|
||||
@ -109,7 +109,7 @@ To start using OpenAI, you must first [create an API key](https://platform.opena
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: openai
|
||||
- provider: openai
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
model: gpt-4o
|
||||
```
|
||||
@ -136,7 +136,7 @@ To start using Azure OpenAI, you must first [create a resource](https://learn.mi
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: azure_openai
|
||||
- provider: azure_openai
|
||||
base_url: https://example-endpoint.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2023-03-15-preview
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
```
|
||||
|
||||
@ -37,7 +37,7 @@ You are also able to define custom prompts in your configuration.
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
provider: ollama
|
||||
- provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
|
||||
|
||||
@ -686,7 +686,7 @@ lpr:
|
||||
# the camera level to enhance privacy for indoor cameras.
|
||||
genai:
|
||||
# Required: Provider must be one of ollama, gemini, or openai
|
||||
provider: ollama
|
||||
- provider: ollama
|
||||
# Required if provider is ollama. May also be used for an OpenAI API compatible backend with the openai provider.
|
||||
base_url: http://localhost::11434
|
||||
# Required if gemini or openai
|
||||
|
||||
@ -6,7 +6,7 @@ from pydantic import Field
|
||||
from ..base import FrigateBaseModel
|
||||
from ..env import EnvString
|
||||
|
||||
__all__ = ["GenAIConfig", "GenAIProviderEnum"]
|
||||
__all__ = ["GenAIProviderConfig", "GenAIProviderEnum"]
|
||||
|
||||
|
||||
class GenAIProviderEnum(str, Enum):
|
||||
@ -16,9 +16,10 @@ class GenAIProviderEnum(str, Enum):
|
||||
ollama = "ollama"
|
||||
|
||||
|
||||
class GenAIConfig(FrigateBaseModel):
|
||||
class GenAIProviderConfig(FrigateBaseModel):
|
||||
"""Primary GenAI Config to define GenAI Provider."""
|
||||
|
||||
name: str = Field(default="default", title="Provider Name")
|
||||
api_key: Optional[EnvString] = Field(default=None, title="Provider API key.")
|
||||
base_url: Optional[str] = Field(default=None, title="Provider base url.")
|
||||
model: str = Field(default="gpt-4o", title="GenAI model.")
|
||||
|
||||
@ -62,6 +62,7 @@ class GenAIObjectTriggerConfig(FrigateBaseModel):
|
||||
|
||||
class GenAIObjectConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable GenAI for camera.")
|
||||
provider: Optional[str] = Field(default=None, title="GenAI provider name.")
|
||||
use_snapshot: bool = Field(
|
||||
default=False, title="Use snapshots for generating descriptions."
|
||||
)
|
||||
|
||||
@ -83,6 +83,7 @@ class GenAIReviewConfig(FrigateBaseModel):
|
||||
default=False,
|
||||
title="Enable GenAI descriptions for review items.",
|
||||
)
|
||||
provider: Optional[str] = Field(default=None, title="GenAI provider name.")
|
||||
alerts: bool = Field(default=True, title="Enable GenAI for alerts.")
|
||||
detections: bool = Field(default=False, title="Enable GenAI for detections.")
|
||||
image_source: ImageSourceEnum = Field(
|
||||
|
||||
@ -45,7 +45,7 @@ from .camera.audio import AudioConfig
|
||||
from .camera.birdseye import BirdseyeConfig
|
||||
from .camera.detect import DetectConfig
|
||||
from .camera.ffmpeg import FfmpegConfig
|
||||
from .camera.genai import GenAIConfig
|
||||
from .camera.genai import GenAIProviderConfig
|
||||
from .camera.motion import MotionConfig
|
||||
from .camera.notification import NotificationConfig
|
||||
from .camera.objects import FilterConfig, ObjectConfig
|
||||
@ -348,10 +348,19 @@ class FrigateConfig(FrigateBaseModel):
|
||||
)
|
||||
|
||||
# GenAI config
|
||||
genai: GenAIConfig = Field(
|
||||
default_factory=GenAIConfig, title="Generative AI configuration."
|
||||
genai: List[GenAIProviderConfig] = Field(
|
||||
default_factory=list, title="Generative AI configuration."
|
||||
)
|
||||
|
||||
@field_validator("genai", mode="before")
|
||||
@classmethod
|
||||
def validate_genai(cls, v: Any):
|
||||
if v is None:
|
||||
return []
|
||||
if isinstance(v, dict):
|
||||
return [v]
|
||||
return v
|
||||
|
||||
# Camera config
|
||||
cameras: Dict[str, CameraConfig] = Field(title="Camera configuration.")
|
||||
audio: AudioConfig = Field(
|
||||
|
||||
@ -16,7 +16,7 @@ from frigate.config import CameraConfig, FrigateConfig
|
||||
from frigate.const import CLIPS_DIR, UPDATE_EVENT_DESCRIPTION
|
||||
from frigate.data_processing.post.semantic_trigger import SemanticTriggerProcessor
|
||||
from frigate.data_processing.types import PostProcessDataEnum
|
||||
from frigate.genai import GenAIClient
|
||||
from frigate.genai import GenAIClient, GenAIManager
|
||||
from frigate.models import Event
|
||||
from frigate.types import TrackedObjectUpdateTypesEnum
|
||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||
@ -41,7 +41,7 @@ class ObjectDescriptionProcessor(PostProcessorApi):
|
||||
embeddings: "Embeddings",
|
||||
requestor: InterProcessRequestor,
|
||||
metrics: DataProcessorMetrics,
|
||||
client: GenAIClient,
|
||||
client: GenAIManager,
|
||||
semantic_trigger_processor: SemanticTriggerProcessor | None,
|
||||
):
|
||||
super().__init__(config, metrics, None)
|
||||
|
||||
@ -21,7 +21,7 @@ from frigate.config.camera import CameraConfig
|
||||
from frigate.config.camera.review import GenAIReviewConfig, ImageSourceEnum
|
||||
from frigate.const import CACHE_DIR, CLIPS_DIR, UPDATE_REVIEW_DESCRIPTION
|
||||
from frigate.data_processing.types import PostProcessDataEnum
|
||||
from frigate.genai import GenAIClient
|
||||
from frigate.genai import GenAIClient, GenAIManager
|
||||
from frigate.models import Recordings, ReviewSegment
|
||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||
from frigate.util.image import get_image_from_recording
|
||||
@ -41,7 +41,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
||||
config: FrigateConfig,
|
||||
requestor: InterProcessRequestor,
|
||||
metrics: DataProcessorMetrics,
|
||||
client: GenAIClient,
|
||||
client: GenAIManager,
|
||||
):
|
||||
super().__init__(config, metrics, None)
|
||||
self.requestor = requestor
|
||||
@ -63,7 +63,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
||||
Estimates ~1 token per 1250 pixels. Targets 98% context utilization with safety margin.
|
||||
Capped at 20 frames.
|
||||
"""
|
||||
context_size = self.genai_client.get_context_size()
|
||||
context_size = self.genai_client.get_context_size(camera)
|
||||
camera_config = self.config.cameras[camera]
|
||||
|
||||
detect_width = camera_config.detect.width
|
||||
@ -484,7 +484,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
||||
@staticmethod
|
||||
def run_analysis(
|
||||
requestor: InterProcessRequestor,
|
||||
genai_client: GenAIClient,
|
||||
genai_client: GenAIManager,
|
||||
review_inference_speed: InferenceSpeed,
|
||||
camera_config: CameraConfig,
|
||||
final_data: dict[str, str],
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
"""Generative AI module for Frigate."""
|
||||
"Generative AI module for Frigate."
|
||||
|
||||
import datetime
|
||||
import importlib
|
||||
@ -9,7 +9,12 @@ from typing import Any, Optional
|
||||
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.config import CameraConfig, FrigateConfig, GenAIConfig, GenAIProviderEnum
|
||||
from frigate.config import (
|
||||
CameraConfig,
|
||||
FrigateConfig,
|
||||
GenAIProviderConfig,
|
||||
GenAIProviderEnum,
|
||||
)
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.data_processing.post.types import ReviewMetadata
|
||||
from frigate.models import Event
|
||||
@ -32,8 +37,8 @@ def register_genai_provider(key: GenAIProviderEnum):
|
||||
class GenAIClient:
|
||||
"""Generative AI client for Frigate."""
|
||||
|
||||
def __init__(self, genai_config: GenAIConfig, timeout: int = 120) -> None:
|
||||
self.genai_config: GenAIConfig = genai_config
|
||||
def __init__(self, genai_config: GenAIProviderConfig, timeout: int = 120) -> None:
|
||||
self.genai_config: GenAIProviderConfig = genai_config
|
||||
self.timeout = timeout
|
||||
self.provider = self._init_provider()
|
||||
|
||||
@ -293,18 +298,128 @@ Guidelines:
|
||||
return 4096
|
||||
|
||||
|
||||
def get_genai_client(config: FrigateConfig) -> Optional[GenAIClient]:
|
||||
"""Get the GenAI client."""
|
||||
if not config.genai.provider:
|
||||
return None
|
||||
class GenAIManager:
|
||||
"""Manager for multiple GenAI clients."""
|
||||
|
||||
def __init__(self, config: FrigateConfig):
|
||||
self.config = config
|
||||
self.providers: dict[str, GenAIClient] = {}
|
||||
load_providers()
|
||||
provider = PROVIDERS.get(config.genai.provider)
|
||||
if provider:
|
||||
return provider(config.genai)
|
||||
self._init_providers()
|
||||
|
||||
def _init_providers(self):
|
||||
for genai_config in self.config.genai:
|
||||
if not genai_config.provider:
|
||||
continue
|
||||
|
||||
provider_class = PROVIDERS.get(genai_config.provider)
|
||||
|
||||
if not provider_class:
|
||||
logger.error(f"Provider {genai_config.provider} not found")
|
||||
continue
|
||||
|
||||
if genai_config.name in self.providers:
|
||||
logger.warning(
|
||||
f"Duplicate GenAI provider name '{genai_config.name}' found. Overwriting previous provider."
|
||||
)
|
||||
|
||||
self.providers[genai_config.name] = provider_class(genai_config)
|
||||
|
||||
def _get_provider(self, name: str | None = None) -> GenAIClient | None:
|
||||
if not self.providers:
|
||||
return None
|
||||
|
||||
if name and name in self.providers:
|
||||
return self.providers[name]
|
||||
|
||||
if name:
|
||||
logger.error(
|
||||
f"GenAI provider '{name}' not found in configuration. Falling back to default provider."
|
||||
)
|
||||
|
||||
# Default to first provider
|
||||
return next(iter(self.providers.values()))
|
||||
|
||||
def generate_review_description(
|
||||
self,
|
||||
review_data: dict[str, Any],
|
||||
thumbnails: list[bytes],
|
||||
concerns: list[str],
|
||||
preferred_language: str | None,
|
||||
debug_save: bool,
|
||||
activity_context_prompt: str,
|
||||
) -> ReviewMetadata | None:
|
||||
camera_name = review_data.get("camera")
|
||||
provider_name = None
|
||||
if camera_name:
|
||||
camera_config = self.config.cameras.get(camera_name)
|
||||
if camera_config:
|
||||
provider_name = camera_config.review.genai.provider
|
||||
|
||||
client = self._get_provider(provider_name)
|
||||
|
||||
if not client:
|
||||
return None
|
||||
|
||||
return client.generate_review_description(
|
||||
review_data,
|
||||
thumbnails,
|
||||
concerns,
|
||||
preferred_language,
|
||||
debug_save,
|
||||
activity_context_prompt,
|
||||
)
|
||||
|
||||
def generate_review_summary(
|
||||
self,
|
||||
start_ts: float,
|
||||
end_ts: float,
|
||||
events: list[dict[str, Any]],
|
||||
debug_save: bool,
|
||||
) -> str | None:
|
||||
provider_name = self.config.review.genai.provider
|
||||
client = self._get_provider(provider_name)
|
||||
|
||||
if not client:
|
||||
return None
|
||||
|
||||
return client.generate_review_summary(start_ts, end_ts, events, debug_save)
|
||||
|
||||
def generate_object_description(
|
||||
self,
|
||||
camera_config: CameraConfig,
|
||||
thumbnails: list[bytes],
|
||||
event: Event,
|
||||
) -> Optional[str]:
|
||||
provider_name = camera_config.objects.genai.provider
|
||||
client = self._get_provider(provider_name)
|
||||
|
||||
if not client:
|
||||
return None
|
||||
|
||||
return client.generate_object_description(camera_config, thumbnails, event)
|
||||
|
||||
def get_context_size(self, camera_name: str | None = None) -> int:
|
||||
provider_name = None
|
||||
if camera_name:
|
||||
camera_config = self.config.cameras.get(camera_name)
|
||||
if camera_config:
|
||||
provider_name = camera_config.review.genai.provider
|
||||
|
||||
client = self._get_provider(provider_name)
|
||||
|
||||
if not client:
|
||||
return 4096
|
||||
|
||||
return client.get_context_size()
|
||||
|
||||
|
||||
def get_genai_client(config: FrigateConfig) -> Optional[GenAIManager]:
|
||||
"""Get the GenAI manager."""
|
||||
if not config.genai:
|
||||
return None
|
||||
|
||||
return GenAIManager(config)
|
||||
|
||||
def load_providers():
|
||||
package_dir = os.path.dirname(__file__)
|
||||
|
||||
Loading…
Reference in New Issue
Block a user