mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 20:18:30 +03:00
feat(genai): Support multiple GenAI providers and per-camera configuration
This commit is contained in:
parent
e2a1208c90
commit
865bbbd5ea
@ -11,18 +11,24 @@ Requests for a description are sent off automatically to your AI provider at the
|
|||||||
|
|
||||||
Generative AI can be enabled for all cameras or only for specific cameras. If GenAI is disabled for a camera, you can still manually generate descriptions for events using the HTTP API. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
Generative AI can be enabled for all cameras or only for specific cameras. If GenAI is disabled for a camera, you can still manually generate descriptions for events using the HTTP API. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
||||||
|
|
||||||
To use Generative AI, you must define a single provider at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
To use Generative AI, you must define one or more providers at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: gemini
|
- name: gemini
|
||||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
provider: gemini
|
||||||
model: gemini-2.0-flash
|
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||||
|
model: gemini-2.0-flash
|
||||||
|
- name: ollama
|
||||||
|
provider: ollama
|
||||||
|
base_url: http://localhost:11434
|
||||||
|
model: qwen3-vl:4b
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
front_camera:
|
front_camera:
|
||||||
genai:
|
genai:
|
||||||
enabled: True # <- enable GenAI for your front camera
|
enabled: True # <- enable GenAI for your front camera
|
||||||
|
provider: gemini # <- specify which provider to use
|
||||||
use_snapshot: True
|
use_snapshot: True
|
||||||
objects:
|
objects:
|
||||||
- person
|
- person
|
||||||
@ -72,9 +78,9 @@ Ollama also supports [cloud models](https://ollama.com/cloud), where your local
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: ollama
|
- provider: ollama
|
||||||
base_url: http://localhost:11434
|
base_url: http://localhost:11434
|
||||||
model: qwen3-vl:4b
|
model: qwen3-vl:4b
|
||||||
```
|
```
|
||||||
|
|
||||||
## Google Gemini
|
## Google Gemini
|
||||||
@ -98,9 +104,9 @@ To start using Gemini, you must first get an API key from [Google AI Studio](htt
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: gemini
|
- provider: gemini
|
||||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||||
model: gemini-2.0-flash
|
model: gemini-2.0-flash
|
||||||
```
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
@ -125,9 +131,9 @@ To start using OpenAI, you must first [create an API key](https://platform.opena
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: openai
|
- provider: openai
|
||||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||||
model: gpt-4o
|
model: gpt-4o
|
||||||
```
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
@ -152,10 +158,10 @@ To start using Azure OpenAI, you must first [create a resource](https://learn.mi
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: azure_openai
|
- provider: azure_openai
|
||||||
base_url: https://instance.cognitiveservices.azure.com/openai/responses?api-version=2025-04-01-preview
|
base_url: https://instance.cognitiveservices.azure.com/openai/responses?api-version=2025-04-01-preview
|
||||||
model: gpt-5-mini
|
model: gpt-5-mini
|
||||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage and Best Practices
|
## Usage and Best Practices
|
||||||
@ -171,10 +177,11 @@ Frigate provides an [MQTT topic](/integrations/mqtt), `frigate/tracked_object_up
|
|||||||
If looking to get notifications earlier than when an object ceases to be tracked, an additional send trigger can be configured of `after_significant_updates`.
|
If looking to get notifications earlier than when an object ceases to be tracked, an additional send trigger can be configured of `after_significant_updates`.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
objects:
|
||||||
send_triggers:
|
genai:
|
||||||
tracked_object_end: true # default
|
send_triggers:
|
||||||
after_significant_updates: 3 # how many updates to a tracked object before we should send an image
|
tracked_object_end: true # default
|
||||||
|
after_significant_updates: 3 # how many updates to a tracked object before we should send an image
|
||||||
```
|
```
|
||||||
|
|
||||||
## Custom Prompts
|
## Custom Prompts
|
||||||
|
|||||||
@ -7,7 +7,7 @@ title: Configuring Generative AI
|
|||||||
|
|
||||||
A Generative AI provider can be configured in the global config, which will make the Generative AI features available for use. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
A Generative AI provider can be configured in the global config, which will make the Generative AI features available for use. There are currently 3 native providers available to integrate with Frigate. Other providers that support the OpenAI standard API can also be used. See the OpenAI section below.
|
||||||
|
|
||||||
To use Generative AI, you must define a single provider at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
To use Generative AI, you must define one or more providers at the global level of your Frigate configuration. If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||||
|
|
||||||
## Ollama
|
## Ollama
|
||||||
|
|
||||||
@ -58,13 +58,13 @@ You should have at least 8 GB of RAM available (or VRAM if running on GPU) to ru
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: ollama
|
- provider: ollama
|
||||||
base_url: http://localhost:11434
|
base_url: http://localhost:11434
|
||||||
model: minicpm-v:8b
|
model: minicpm-v:8b
|
||||||
provider_options: # other Ollama client options can be defined
|
provider_options: # other Ollama client options can be defined
|
||||||
keep_alive: -1
|
keep_alive: -1
|
||||||
options:
|
options:
|
||||||
num_ctx: 8192 # make sure the context matches other services that are using ollama
|
num_ctx: 8192 # make sure the context matches other services that are using ollama
|
||||||
```
|
```
|
||||||
|
|
||||||
## Google Gemini
|
## Google Gemini
|
||||||
@ -88,9 +88,9 @@ To start using Gemini, you must first get an API key from [Google AI Studio](htt
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: gemini
|
- provider: gemini
|
||||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||||
model: gemini-1.5-flash
|
model: gemini-1.5-flash
|
||||||
```
|
```
|
||||||
|
|
||||||
## OpenAI
|
## OpenAI
|
||||||
@ -109,9 +109,9 @@ To start using OpenAI, you must first [create an API key](https://platform.opena
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: openai
|
- provider: openai
|
||||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||||
model: gpt-4o
|
model: gpt-4o
|
||||||
```
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
@ -136,7 +136,7 @@ To start using Azure OpenAI, you must first [create a resource](https://learn.mi
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: azure_openai
|
- provider: azure_openai
|
||||||
base_url: https://example-endpoint.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2023-03-15-preview
|
base_url: https://example-endpoint.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2023-03-15-preview
|
||||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||||
```
|
```
|
||||||
|
|||||||
@ -37,9 +37,9 @@ You are also able to define custom prompts in your configuration.
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
genai:
|
genai:
|
||||||
provider: ollama
|
- provider: ollama
|
||||||
base_url: http://localhost:11434
|
base_url: http://localhost:11434
|
||||||
model: llava
|
model: llava
|
||||||
|
|
||||||
objects:
|
objects:
|
||||||
prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance."
|
prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance."
|
||||||
|
|||||||
@ -686,16 +686,16 @@ lpr:
|
|||||||
# the camera level to enhance privacy for indoor cameras.
|
# the camera level to enhance privacy for indoor cameras.
|
||||||
genai:
|
genai:
|
||||||
# Required: Provider must be one of ollama, gemini, or openai
|
# Required: Provider must be one of ollama, gemini, or openai
|
||||||
provider: ollama
|
- provider: ollama
|
||||||
# Required if provider is ollama. May also be used for an OpenAI API compatible backend with the openai provider.
|
# Required if provider is ollama. May also be used for an OpenAI API compatible backend with the openai provider.
|
||||||
base_url: http://localhost::11434
|
base_url: http://localhost::11434
|
||||||
# Required if gemini or openai
|
# Required if gemini or openai
|
||||||
api_key: "{FRIGATE_GENAI_API_KEY}"
|
api_key: "{FRIGATE_GENAI_API_KEY}"
|
||||||
# Required: The model to use with the provider.
|
# Required: The model to use with the provider.
|
||||||
model: gemini-1.5-flash
|
model: gemini-1.5-flash
|
||||||
# Optional additional args to pass to the GenAI Provider (default: None)
|
# Optional additional args to pass to the GenAI Provider (default: None)
|
||||||
provider_options:
|
provider_options:
|
||||||
keep_alive: -1
|
keep_alive: -1
|
||||||
|
|
||||||
# Optional: Configuration for audio transcription
|
# Optional: Configuration for audio transcription
|
||||||
# NOTE: only the enabled option can be overridden at the camera level
|
# NOTE: only the enabled option can be overridden at the camera level
|
||||||
|
|||||||
@ -6,7 +6,7 @@ from pydantic import Field
|
|||||||
from ..base import FrigateBaseModel
|
from ..base import FrigateBaseModel
|
||||||
from ..env import EnvString
|
from ..env import EnvString
|
||||||
|
|
||||||
__all__ = ["GenAIConfig", "GenAIProviderEnum"]
|
__all__ = ["GenAIProviderConfig", "GenAIProviderEnum"]
|
||||||
|
|
||||||
|
|
||||||
class GenAIProviderEnum(str, Enum):
|
class GenAIProviderEnum(str, Enum):
|
||||||
@ -16,9 +16,10 @@ class GenAIProviderEnum(str, Enum):
|
|||||||
ollama = "ollama"
|
ollama = "ollama"
|
||||||
|
|
||||||
|
|
||||||
class GenAIConfig(FrigateBaseModel):
|
class GenAIProviderConfig(FrigateBaseModel):
|
||||||
"""Primary GenAI Config to define GenAI Provider."""
|
"""Primary GenAI Config to define GenAI Provider."""
|
||||||
|
|
||||||
|
name: str = Field(default="default", title="Provider Name")
|
||||||
api_key: Optional[EnvString] = Field(default=None, title="Provider API key.")
|
api_key: Optional[EnvString] = Field(default=None, title="Provider API key.")
|
||||||
base_url: Optional[str] = Field(default=None, title="Provider base url.")
|
base_url: Optional[str] = Field(default=None, title="Provider base url.")
|
||||||
model: str = Field(default="gpt-4o", title="GenAI model.")
|
model: str = Field(default="gpt-4o", title="GenAI model.")
|
||||||
|
|||||||
@ -62,6 +62,7 @@ class GenAIObjectTriggerConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
class GenAIObjectConfig(FrigateBaseModel):
|
class GenAIObjectConfig(FrigateBaseModel):
|
||||||
enabled: bool = Field(default=False, title="Enable GenAI for camera.")
|
enabled: bool = Field(default=False, title="Enable GenAI for camera.")
|
||||||
|
provider: Optional[str] = Field(default=None, title="GenAI provider name.")
|
||||||
use_snapshot: bool = Field(
|
use_snapshot: bool = Field(
|
||||||
default=False, title="Use snapshots for generating descriptions."
|
default=False, title="Use snapshots for generating descriptions."
|
||||||
)
|
)
|
||||||
|
|||||||
@ -83,6 +83,7 @@ class GenAIReviewConfig(FrigateBaseModel):
|
|||||||
default=False,
|
default=False,
|
||||||
title="Enable GenAI descriptions for review items.",
|
title="Enable GenAI descriptions for review items.",
|
||||||
)
|
)
|
||||||
|
provider: Optional[str] = Field(default=None, title="GenAI provider name.")
|
||||||
alerts: bool = Field(default=True, title="Enable GenAI for alerts.")
|
alerts: bool = Field(default=True, title="Enable GenAI for alerts.")
|
||||||
detections: bool = Field(default=False, title="Enable GenAI for detections.")
|
detections: bool = Field(default=False, title="Enable GenAI for detections.")
|
||||||
image_source: ImageSourceEnum = Field(
|
image_source: ImageSourceEnum = Field(
|
||||||
|
|||||||
@ -45,7 +45,7 @@ from .camera.audio import AudioConfig
|
|||||||
from .camera.birdseye import BirdseyeConfig
|
from .camera.birdseye import BirdseyeConfig
|
||||||
from .camera.detect import DetectConfig
|
from .camera.detect import DetectConfig
|
||||||
from .camera.ffmpeg import FfmpegConfig
|
from .camera.ffmpeg import FfmpegConfig
|
||||||
from .camera.genai import GenAIConfig
|
from .camera.genai import GenAIProviderConfig
|
||||||
from .camera.motion import MotionConfig
|
from .camera.motion import MotionConfig
|
||||||
from .camera.notification import NotificationConfig
|
from .camera.notification import NotificationConfig
|
||||||
from .camera.objects import FilterConfig, ObjectConfig
|
from .camera.objects import FilterConfig, ObjectConfig
|
||||||
@ -348,10 +348,19 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# GenAI config
|
# GenAI config
|
||||||
genai: GenAIConfig = Field(
|
genai: List[GenAIProviderConfig] = Field(
|
||||||
default_factory=GenAIConfig, title="Generative AI configuration."
|
default_factory=list, title="Generative AI configuration."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@field_validator("genai", mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_genai(cls, v: Any):
|
||||||
|
if v is None:
|
||||||
|
return []
|
||||||
|
if isinstance(v, dict):
|
||||||
|
return [v]
|
||||||
|
return v
|
||||||
|
|
||||||
# Camera config
|
# Camera config
|
||||||
cameras: Dict[str, CameraConfig] = Field(title="Camera configuration.")
|
cameras: Dict[str, CameraConfig] = Field(title="Camera configuration.")
|
||||||
audio: AudioConfig = Field(
|
audio: AudioConfig = Field(
|
||||||
|
|||||||
@ -16,7 +16,7 @@ from frigate.config import CameraConfig, FrigateConfig
|
|||||||
from frigate.const import CLIPS_DIR, UPDATE_EVENT_DESCRIPTION
|
from frigate.const import CLIPS_DIR, UPDATE_EVENT_DESCRIPTION
|
||||||
from frigate.data_processing.post.semantic_trigger import SemanticTriggerProcessor
|
from frigate.data_processing.post.semantic_trigger import SemanticTriggerProcessor
|
||||||
from frigate.data_processing.types import PostProcessDataEnum
|
from frigate.data_processing.types import PostProcessDataEnum
|
||||||
from frigate.genai import GenAIClient
|
from frigate.genai import GenAIClient, GenAIManager
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
from frigate.types import TrackedObjectUpdateTypesEnum
|
from frigate.types import TrackedObjectUpdateTypesEnum
|
||||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||||
@ -41,7 +41,7 @@ class ObjectDescriptionProcessor(PostProcessorApi):
|
|||||||
embeddings: "Embeddings",
|
embeddings: "Embeddings",
|
||||||
requestor: InterProcessRequestor,
|
requestor: InterProcessRequestor,
|
||||||
metrics: DataProcessorMetrics,
|
metrics: DataProcessorMetrics,
|
||||||
client: GenAIClient,
|
client: GenAIManager,
|
||||||
semantic_trigger_processor: SemanticTriggerProcessor | None,
|
semantic_trigger_processor: SemanticTriggerProcessor | None,
|
||||||
):
|
):
|
||||||
super().__init__(config, metrics, None)
|
super().__init__(config, metrics, None)
|
||||||
|
|||||||
@ -21,7 +21,7 @@ from frigate.config.camera import CameraConfig
|
|||||||
from frigate.config.camera.review import GenAIReviewConfig, ImageSourceEnum
|
from frigate.config.camera.review import GenAIReviewConfig, ImageSourceEnum
|
||||||
from frigate.const import CACHE_DIR, CLIPS_DIR, UPDATE_REVIEW_DESCRIPTION
|
from frigate.const import CACHE_DIR, CLIPS_DIR, UPDATE_REVIEW_DESCRIPTION
|
||||||
from frigate.data_processing.types import PostProcessDataEnum
|
from frigate.data_processing.types import PostProcessDataEnum
|
||||||
from frigate.genai import GenAIClient
|
from frigate.genai import GenAIClient, GenAIManager
|
||||||
from frigate.models import Recordings, ReviewSegment
|
from frigate.models import Recordings, ReviewSegment
|
||||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||||
from frigate.util.image import get_image_from_recording
|
from frigate.util.image import get_image_from_recording
|
||||||
@ -41,7 +41,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
|||||||
config: FrigateConfig,
|
config: FrigateConfig,
|
||||||
requestor: InterProcessRequestor,
|
requestor: InterProcessRequestor,
|
||||||
metrics: DataProcessorMetrics,
|
metrics: DataProcessorMetrics,
|
||||||
client: GenAIClient,
|
client: GenAIManager,
|
||||||
):
|
):
|
||||||
super().__init__(config, metrics, None)
|
super().__init__(config, metrics, None)
|
||||||
self.requestor = requestor
|
self.requestor = requestor
|
||||||
@ -63,7 +63,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
|||||||
Estimates ~1 token per 1250 pixels. Targets 98% context utilization with safety margin.
|
Estimates ~1 token per 1250 pixels. Targets 98% context utilization with safety margin.
|
||||||
Capped at 20 frames.
|
Capped at 20 frames.
|
||||||
"""
|
"""
|
||||||
context_size = self.genai_client.get_context_size()
|
context_size = self.genai_client.get_context_size(camera)
|
||||||
camera_config = self.config.cameras[camera]
|
camera_config = self.config.cameras[camera]
|
||||||
|
|
||||||
detect_width = camera_config.detect.width
|
detect_width = camera_config.detect.width
|
||||||
@ -484,7 +484,7 @@ class ReviewDescriptionProcessor(PostProcessorApi):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def run_analysis(
|
def run_analysis(
|
||||||
requestor: InterProcessRequestor,
|
requestor: InterProcessRequestor,
|
||||||
genai_client: GenAIClient,
|
genai_client: GenAIManager,
|
||||||
review_inference_speed: InferenceSpeed,
|
review_inference_speed: InferenceSpeed,
|
||||||
camera_config: CameraConfig,
|
camera_config: CameraConfig,
|
||||||
final_data: dict[str, str],
|
final_data: dict[str, str],
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
"""Generative AI module for Frigate."""
|
"Generative AI module for Frigate."
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import importlib
|
import importlib
|
||||||
@ -9,7 +9,12 @@ from typing import Any, Optional
|
|||||||
|
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
|
||||||
from frigate.config import CameraConfig, FrigateConfig, GenAIConfig, GenAIProviderEnum
|
from frigate.config import (
|
||||||
|
CameraConfig,
|
||||||
|
FrigateConfig,
|
||||||
|
GenAIProviderConfig,
|
||||||
|
GenAIProviderEnum,
|
||||||
|
)
|
||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
from frigate.data_processing.post.types import ReviewMetadata
|
from frigate.data_processing.post.types import ReviewMetadata
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
@ -32,8 +37,8 @@ def register_genai_provider(key: GenAIProviderEnum):
|
|||||||
class GenAIClient:
|
class GenAIClient:
|
||||||
"""Generative AI client for Frigate."""
|
"""Generative AI client for Frigate."""
|
||||||
|
|
||||||
def __init__(self, genai_config: GenAIConfig, timeout: int = 120) -> None:
|
def __init__(self, genai_config: GenAIProviderConfig, timeout: int = 120) -> None:
|
||||||
self.genai_config: GenAIConfig = genai_config
|
self.genai_config: GenAIProviderConfig = genai_config
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.provider = self._init_provider()
|
self.provider = self._init_provider()
|
||||||
|
|
||||||
@ -293,22 +298,132 @@ Guidelines:
|
|||||||
return 4096
|
return 4096
|
||||||
|
|
||||||
|
|
||||||
def get_genai_client(config: FrigateConfig) -> Optional[GenAIClient]:
|
class GenAIManager:
|
||||||
"""Get the GenAI client."""
|
"""Manager for multiple GenAI clients."""
|
||||||
if not config.genai.provider:
|
|
||||||
|
def __init__(self, config: FrigateConfig):
|
||||||
|
self.config = config
|
||||||
|
self.providers: dict[str, GenAIClient] = {}
|
||||||
|
load_providers()
|
||||||
|
self._init_providers()
|
||||||
|
|
||||||
|
def _init_providers(self):
|
||||||
|
for genai_config in self.config.genai:
|
||||||
|
if not genai_config.provider:
|
||||||
|
continue
|
||||||
|
|
||||||
|
provider_class = PROVIDERS.get(genai_config.provider)
|
||||||
|
|
||||||
|
if not provider_class:
|
||||||
|
logger.error(f"Provider {genai_config.provider} not found")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if genai_config.name in self.providers:
|
||||||
|
logger.warning(
|
||||||
|
f"Duplicate GenAI provider name '{genai_config.name}' found. Overwriting previous provider."
|
||||||
|
)
|
||||||
|
|
||||||
|
self.providers[genai_config.name] = provider_class(genai_config)
|
||||||
|
|
||||||
|
def _get_provider(self, name: str | None = None) -> GenAIClient | None:
|
||||||
|
if not self.providers:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if name and name in self.providers:
|
||||||
|
return self.providers[name]
|
||||||
|
|
||||||
|
if name:
|
||||||
|
logger.error(
|
||||||
|
f"GenAI provider '{name}' not found in configuration. Falling back to default provider."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default to first provider
|
||||||
|
return next(iter(self.providers.values()))
|
||||||
|
|
||||||
|
def generate_review_description(
|
||||||
|
self,
|
||||||
|
review_data: dict[str, Any],
|
||||||
|
thumbnails: list[bytes],
|
||||||
|
concerns: list[str],
|
||||||
|
preferred_language: str | None,
|
||||||
|
debug_save: bool,
|
||||||
|
activity_context_prompt: str,
|
||||||
|
) -> ReviewMetadata | None:
|
||||||
|
camera_name = review_data.get("camera")
|
||||||
|
provider_name = None
|
||||||
|
if camera_name:
|
||||||
|
camera_config = self.config.cameras.get(camera_name)
|
||||||
|
if camera_config:
|
||||||
|
provider_name = camera_config.review.genai.provider
|
||||||
|
|
||||||
|
client = self._get_provider(provider_name)
|
||||||
|
|
||||||
|
if not client:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return client.generate_review_description(
|
||||||
|
review_data,
|
||||||
|
thumbnails,
|
||||||
|
concerns,
|
||||||
|
preferred_language,
|
||||||
|
debug_save,
|
||||||
|
activity_context_prompt,
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_review_summary(
|
||||||
|
self,
|
||||||
|
start_ts: float,
|
||||||
|
end_ts: float,
|
||||||
|
events: list[dict[str, Any]],
|
||||||
|
debug_save: bool,
|
||||||
|
) -> str | None:
|
||||||
|
provider_name = self.config.review.genai.provider
|
||||||
|
client = self._get_provider(provider_name)
|
||||||
|
|
||||||
|
if not client:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return client.generate_review_summary(start_ts, end_ts, events, debug_save)
|
||||||
|
|
||||||
|
def generate_object_description(
|
||||||
|
self,
|
||||||
|
camera_config: CameraConfig,
|
||||||
|
thumbnails: list[bytes],
|
||||||
|
event: Event,
|
||||||
|
) -> Optional[str]:
|
||||||
|
provider_name = camera_config.objects.genai.provider
|
||||||
|
client = self._get_provider(provider_name)
|
||||||
|
|
||||||
|
if not client:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return client.generate_object_description(camera_config, thumbnails, event)
|
||||||
|
|
||||||
|
def get_context_size(self, camera_name: str | None = None) -> int:
|
||||||
|
provider_name = None
|
||||||
|
if camera_name:
|
||||||
|
camera_config = self.config.cameras.get(camera_name)
|
||||||
|
if camera_config:
|
||||||
|
provider_name = camera_config.review.genai.provider
|
||||||
|
|
||||||
|
client = self._get_provider(provider_name)
|
||||||
|
|
||||||
|
if not client:
|
||||||
|
return 4096
|
||||||
|
|
||||||
|
return client.get_context_size()
|
||||||
|
|
||||||
|
|
||||||
|
def get_genai_client(config: FrigateConfig) -> Optional[GenAIManager]:
|
||||||
|
"""Get the GenAI manager."""
|
||||||
|
if not config.genai:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
load_providers()
|
return GenAIManager(config)
|
||||||
provider = PROVIDERS.get(config.genai.provider)
|
|
||||||
if provider:
|
|
||||||
return provider(config.genai)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def load_providers():
|
def load_providers():
|
||||||
package_dir = os.path.dirname(__file__)
|
package_dir = os.path.dirname(__file__)
|
||||||
for filename in os.listdir(package_dir):
|
for filename in os.listdir(package_dir):
|
||||||
if filename.endswith(".py") and filename != "__init__.py":
|
if filename.endswith(".py") and filename != "__init__.py":
|
||||||
module_name = f"frigate.genai.{filename[:-3]}"
|
module_name = f"frigate.genai.{filename[:-3]}"
|
||||||
importlib.import_module(module_name)
|
importlib.import_module(module_name)
|
||||||
Loading…
Reference in New Issue
Block a user