mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-04-11 17:47:37 +03:00
Pull context size from openai models
This commit is contained in:
parent
685f2c5030
commit
09b01b0538
@ -18,6 +18,7 @@ class OpenAIClient(GenAIClient):
|
|||||||
"""Generative AI client for Frigate using OpenAI."""
|
"""Generative AI client for Frigate using OpenAI."""
|
||||||
|
|
||||||
provider: OpenAI
|
provider: OpenAI
|
||||||
|
context_size: Optional[int] = None
|
||||||
|
|
||||||
def _init_provider(self):
|
def _init_provider(self):
|
||||||
"""Initialize the client."""
|
"""Initialize the client."""
|
||||||
@ -69,5 +70,33 @@ class OpenAIClient(GenAIClient):
|
|||||||
|
|
||||||
def get_context_size(self) -> int:
|
def get_context_size(self) -> int:
|
||||||
"""Get the context window size for OpenAI."""
|
"""Get the context window size for OpenAI."""
|
||||||
# OpenAI GPT-4 Vision models have 128K token context window
|
if self.context_size is not None:
|
||||||
return 128000
|
return self.context_size
|
||||||
|
|
||||||
|
try:
|
||||||
|
models = self.provider.models.list()
|
||||||
|
for model in models.data:
|
||||||
|
if model.id == self.genai_config.model:
|
||||||
|
if hasattr(model, "max_model_len") and model.max_model_len:
|
||||||
|
self.context_size = model.max_model_len
|
||||||
|
logger.debug(
|
||||||
|
f"Retrieved context size {self.context_size} for model {self.genai_config.model}"
|
||||||
|
)
|
||||||
|
return self.context_size
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(
|
||||||
|
f"Failed to fetch model context size from API: {e}, using default"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default to 128K for ChatGPT models, 8K for others
|
||||||
|
model_name = self.genai_config.model.lower()
|
||||||
|
if "gpt-4o" in model_name:
|
||||||
|
self.context_size = 128000
|
||||||
|
else:
|
||||||
|
self.context_size = 8192
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Using default context size {self.context_size} for model {self.genai_config.model}"
|
||||||
|
)
|
||||||
|
return self.context_size
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user