Set model in llama.cpp config

This commit is contained in:
Nicolas Mowen 2026-02-20 14:13:28 -07:00
parent e6387dac05
commit dc39d2f0ef

View File

@ -67,6 +67,7 @@ class LlamaCppClient(GenAIClient):
# Build request payload with llama.cpp native options # Build request payload with llama.cpp native options
payload = { payload = {
"model": self.genai_config.model,
"messages": [ "messages": [
{ {
"role": "user", "role": "user",
@ -134,6 +135,7 @@ class LlamaCppClient(GenAIClient):
openai_tool_choice = "required" openai_tool_choice = "required"
payload = { payload = {
"model": self.genai_config.model,
"messages": messages, "messages": messages,
} }