mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-06 05:24:11 +03:00
Set default values for LLM performance (#20606)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
This commit is contained in:
parent
21ff257705
commit
007371019a
@ -1,7 +1,7 @@
|
|||||||
"""Ollama Provider for Frigate AI."""
|
"""Ollama Provider for Frigate AI."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
from httpx import TimeoutException
|
from httpx import TimeoutException
|
||||||
from ollama import Client as ApiClient
|
from ollama import Client as ApiClient
|
||||||
@ -17,10 +17,24 @@ logger = logging.getLogger(__name__)
|
|||||||
class OllamaClient(GenAIClient):
|
class OllamaClient(GenAIClient):
|
||||||
"""Generative AI client for Frigate using Ollama."""
|
"""Generative AI client for Frigate using Ollama."""
|
||||||
|
|
||||||
|
LOCAL_OPTIMIZED_OPTIONS = {
|
||||||
|
"options": {
|
||||||
|
"temperature": 0.5,
|
||||||
|
"repeat_penalty": 1.15,
|
||||||
|
"presence_penalty": 0.1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
provider: ApiClient
|
provider: ApiClient
|
||||||
|
provider_options: dict[str, Any]
|
||||||
|
|
||||||
def _init_provider(self):
|
def _init_provider(self):
|
||||||
"""Initialize the client."""
|
"""Initialize the client."""
|
||||||
|
self.provider_options = {
|
||||||
|
**self.LOCAL_OPTIMIZED_OPTIONS,
|
||||||
|
**self.genai_config.provider_options,
|
||||||
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = ApiClient(host=self.genai_config.base_url, timeout=self.timeout)
|
client = ApiClient(host=self.genai_config.base_url, timeout=self.timeout)
|
||||||
# ensure the model is available locally
|
# ensure the model is available locally
|
||||||
@ -48,7 +62,7 @@ class OllamaClient(GenAIClient):
|
|||||||
self.genai_config.model,
|
self.genai_config.model,
|
||||||
prompt,
|
prompt,
|
||||||
images=images if images else None,
|
images=images if images else None,
|
||||||
**self.genai_config.provider_options,
|
**self.provider_options,
|
||||||
)
|
)
|
||||||
return result["response"].strip()
|
return result["response"].strip()
|
||||||
except (TimeoutException, ResponseError) as e:
|
except (TimeoutException, ResponseError) as e:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user