Fix Gemini model integration with correct model name and endpoint
This commit is contained in:
parent
a09eb1519e
commit
f94bde875b
|
@ -84,11 +84,11 @@ models:
|
||||||
|
|
||||||
gemini-2.0-flash-lite:
|
gemini-2.0-flash-lite:
|
||||||
provider: "google"
|
provider: "google"
|
||||||
model_name: "gemini-2.0-flash-lite-001"
|
model_name: "gemini-2.0-flash-lite"
|
||||||
temperature: 0.5
|
temperature: 0.5
|
||||||
max_tokens: 2048
|
max_tokens: 2048
|
||||||
top_p: 1.0
|
top_p: 1.0
|
||||||
endpoint: "https://generativelanguage.googleapis.com/v1"
|
endpoint: "https://generativelanguage.googleapis.com/v1beta"
|
||||||
|
|
||||||
# Default model to use if not specified for a module
|
# Default model to use if not specified for a module
|
||||||
default_model: "llama-3.1-8b-instant" # Using Groq's Llama 3.1 8B model for testing
|
default_model: "llama-3.1-8b-instant" # Using Groq's Llama 3.1 8B model for testing
|
||||||
|
|
|
@ -106,7 +106,10 @@ class LLMInterface:
|
||||||
# Special handling for Google Gemini models
|
# Special handling for Google Gemini models
|
||||||
params['model'] = f"gemini/{self.model_config.get('model_name', self.model_name)}"
|
params['model'] = f"gemini/{self.model_config.get('model_name', self.model_name)}"
|
||||||
# Google Gemini uses a different API base
|
# Google Gemini uses a different API base
|
||||||
params['api_base'] = self.model_config.get('endpoint', 'https://generativelanguage.googleapis.com/v1')
|
params['api_base'] = self.model_config.get('endpoint', 'https://generativelanguage.googleapis.com/v1beta')
|
||||||
|
|
||||||
|
# Add additional parameters for Gemini
|
||||||
|
params['custom_llm_provider'] = 'gemini'
|
||||||
else:
|
else:
|
||||||
# Standard provider (OpenAI, Anthropic, etc.)
|
# Standard provider (OpenAI, Anthropic, etc.)
|
||||||
params['model'] = self.model_name
|
params['model'] = self.model_name
|
||||||
|
|
|
@ -102,7 +102,10 @@ class ReportSynthesizer:
|
||||||
# Special handling for Google Gemini models
|
# Special handling for Google Gemini models
|
||||||
params['model'] = f"gemini/{self.model_config.get('model_name', self.model_name)}"
|
params['model'] = f"gemini/{self.model_config.get('model_name', self.model_name)}"
|
||||||
# Google Gemini uses a different API base
|
# Google Gemini uses a different API base
|
||||||
params['api_base'] = self.model_config.get('endpoint', 'https://generativelanguage.googleapis.com/v1')
|
params['api_base'] = self.model_config.get('endpoint', 'https://generativelanguage.googleapis.com/v1beta')
|
||||||
|
|
||||||
|
# Add additional parameters for Gemini
|
||||||
|
params['custom_llm_provider'] = 'gemini'
|
||||||
else:
|
else:
|
||||||
# Standard provider (OpenAI, Anthropic, etc.)
|
# Standard provider (OpenAI, Anthropic, etc.)
|
||||||
params['model'] = self.model_name
|
params['model'] = self.model_name
|
||||||
|
|
Loading…
Reference in New Issue