Fix prompt ordering for generate calls

This commit is contained in:
Nicolas Mowen 2026-03-27 07:26:59 -06:00
parent e8a63d138f
commit b826d507cf
2 changed files with 8 additions and 9 deletions

View File

@ -50,9 +50,9 @@ class GeminiClient(GenAIClient):
response_format: Optional[dict] = None, response_format: Optional[dict] = None,
) -> Optional[str]: ) -> Optional[str]:
"""Submit a request to Gemini.""" """Submit a request to Gemini."""
contents = [ contents = [prompt] + [
types.Part.from_bytes(data=img, mime_type="image/jpeg") for img in images types.Part.from_bytes(data=img, mime_type="image/jpeg") for img in images
] + [prompt] ]
try: try:
# Merge runtime_options into generation_config if provided # Merge runtime_options into generation_config if provided
generation_config_dict: dict[str, Any] = {"candidate_count": 1} generation_config_dict: dict[str, Any] = {"candidate_count": 1}

View File

@ -44,7 +44,12 @@ class OpenAIClient(GenAIClient):
) -> Optional[str]: ) -> Optional[str]:
"""Submit a request to OpenAI.""" """Submit a request to OpenAI."""
encoded_images = [base64.b64encode(image).decode("utf-8") for image in images] encoded_images = [base64.b64encode(image).decode("utf-8") for image in images]
messages_content = [] messages_content: list[dict] = [
{
"type": "text",
"text": prompt,
}
]
for image in encoded_images: for image in encoded_images:
messages_content.append( messages_content.append(
{ {
@ -55,12 +60,6 @@ class OpenAIClient(GenAIClient):
}, },
} }
) )
messages_content.append(
{
"type": "text",
"text": prompt,
}
)
try: try:
request_params = { request_params = {
"model": self.genai_config.model, "model": self.genai_config.model,