Update message format for Gemini models to ensure compatibility with LiteLLM

This commit is contained in:
Steve White 2025-02-28 12:20:30 -06:00
parent 82f081f731
commit a09eb1519e
2 changed files with 44 additions and 2 deletions

View File

@ -127,7 +127,28 @@ class LLMInterface:
"""
try:
params = self._get_completion_params()
# Special handling for Gemini models
if "gemini" in params.get('model', '').lower():
# Format messages for Gemini models
formatted_messages = []
for msg in messages:
role = msg['role']
# Gemini uses 'user' and 'model' roles (not 'assistant')
if role == 'assistant':
role = 'model'
# For system messages, convert to user messages with a prefix
if role == 'system':
formatted_messages.append({
"role": "user",
"content": f"System instruction: {msg['content']}"
})
else:
formatted_messages.append({"role": role, "content": msg['content']})
params['messages'] = formatted_messages
else:
params['messages'] = messages
params['stream'] = stream
response = completion(**params)

View File

@ -123,7 +123,28 @@ class ReportSynthesizer:
"""
try:
params = self._get_completion_params()
# Special handling for Gemini models
if "gemini" in params.get('model', '').lower():
# Format messages for Gemini models
formatted_messages = []
for msg in messages:
role = msg['role']
# Gemini uses 'user' and 'model' roles (not 'assistant')
if role == 'assistant':
role = 'model'
# For system messages, convert to user messages with a prefix
if role == 'system':
formatted_messages.append({
"role": "user",
"content": f"System instruction: {msg['content']}"
})
else:
formatted_messages.append({"role": role, "content": msg['content']})
params['messages'] = formatted_messages
else:
params['messages'] = messages
params['stream'] = stream
logger.info(f"Generating completion with model: {params.get('model')}")