add support for Google Gemini API integration

- Introduced GEMINI_API_KEY in setup and environment files.
- Updated SetupWizard to include Google Gemini as a supported LLM provider.
- Adjusted model selection logic to accommodate Gemini models.
- Modified billing and configuration files to handle Gemini model pricing and aliases.
- Updated documentation to reflect the addition of GEMINI_API_KEY.
This commit is contained in:
mykonos-ibiza 2025-07-26 20:51:09 +05:30
parent aed291e7c3
commit ad9c1dabc8
8 changed files with 26 additions and 10 deletions

View File

@ -28,6 +28,7 @@ AWS_REGION_NAME=
GROQ_API_KEY=
OPENROUTER_API_KEY=
GEMINI_API_KEY=
MORPH_API_KEY=
# DATA APIS

View File

@ -86,6 +86,7 @@ RABBITMQ_PORT=5672
ANTHROPIC_API_KEY=your-anthropic-key
OPENAI_API_KEY=your-openai-key
OPENROUTER_API_KEY=your-openrouter-key
GEMINI_API_KEY=your-gemini-api-key
MODEL_TO_USE=anthropic/claude-sonnet-4-20250514
# Search and Web Scraping

View File

@ -1197,9 +1197,14 @@ async def get_available_models(
if '/' in model:
models_to_try.append(model.split('/', 1)[1])
# Special handling for Google models accessed via OpenRouter
if model.startswith('openrouter/google/'):
google_model_name = model.replace('openrouter/', '')
# Special handling for Google models accessed via Google API
if model.startswith('gemini/'):
google_model_name = model.replace('gemini/', '')
models_to_try.append(google_model_name)
# Special handling for Google models accessed via Google API
if model.startswith('gemini/'):
google_model_name = model.replace('gemini/', '')
models_to_try.append(google_model_name)
# Try each model name variation until we find one that works

View File

@ -38,7 +38,7 @@ class LLMRetryError(LLMError):
def setup_api_keys() -> None:
"""Set up API keys from environment variables."""
providers = ['OPENAI', 'ANTHROPIC', 'GROQ', 'OPENROUTER', 'XAI', 'MORPH']
providers = ['OPENAI', 'ANTHROPIC', 'GROQ', 'OPENROUTER', 'XAI', 'MORPH', 'GEMINI']
for provider in providers:
key = getattr(config, f'{provider}_API_KEY')
if key:
@ -76,6 +76,7 @@ def get_openrouter_fallback(model_name: str) -> Optional[str]:
"anthropic/claude-3-7-sonnet-latest": "openrouter/anthropic/claude-3.7-sonnet",
"anthropic/claude-sonnet-4-20250514": "openrouter/anthropic/claude-sonnet-4",
"xai/grok-4": "openrouter/x-ai/grok-4",
"gemini/gemini-2.5-pro": "openrouter/google/gemini-2.5-pro",
}
# Check for exact match first

View File

@ -176,6 +176,7 @@ class Configuration:
OPENROUTER_API_KEY: Optional[str] = None
XAI_API_KEY: Optional[str] = None
MORPH_API_KEY: Optional[str] = None
GEMINI_API_KEY: Optional[str] = None
OPENROUTER_API_BASE: Optional[str] = "https://openrouter.ai/api/v1"
OR_SITE_URL: Optional[str] = "https://kortix.ai"
OR_APP_NAME: Optional[str] = "Kortix AI"

View File

@ -60,7 +60,7 @@ MODELS = {
},
# Paid tier only models
"openrouter/google/gemini-2.5-pro": {
"gemini/gemini-2.5-pro": {
"aliases": ["google/gemini-2.5-pro"],
"pricing": {
"input_cost_per_million_tokens": 1.25,
@ -145,8 +145,8 @@ def _generate_model_structures():
elif model_name.startswith("openrouter/qwen/"):
legacy_name = model_name.replace("openrouter/", "")
pricing[legacy_name] = config["pricing"]
elif model_name.startswith("openrouter/google/"):
legacy_name = model_name.replace("openrouter/", "")
elif model_name.startswith("gemini/"):
legacy_name = model_name.replace("gemini/", "")
pricing[legacy_name] = config["pricing"]
elif model_name.startswith("anthropic/"):
# Add anthropic/claude-sonnet-4 alias for claude-sonnet-4-20250514

View File

@ -172,6 +172,7 @@ RABBITMQ_PORT=5672
ANTHROPIC_API_KEY=your-anthropic-key
OPENAI_API_KEY=your-openai-key
OPENROUTER_API_KEY=your-openrouter-key
GEMINI_API_KEY=your-gemini-api-key
MORPH_API_KEY=
MODEL_TO_USE=anthropic/claude-sonnet-4-20250514

View File

@ -130,6 +130,7 @@ def load_existing_env_vars():
"ANTHROPIC_API_KEY": backend_env.get("ANTHROPIC_API_KEY", ""),
"OPENROUTER_API_KEY": backend_env.get("OPENROUTER_API_KEY", ""),
"MORPH_API_KEY": backend_env.get("MORPH_API_KEY", ""),
"GEMINI_API_KEY": backend_env.get("GEMINI_API_KEY", ""),
"MODEL_TO_USE": backend_env.get("MODEL_TO_USE", ""),
},
"search": {
@ -685,7 +686,7 @@ class SetupWizard:
)
else:
print_info(
"Suna requires at least one LLM provider. Supported: OpenAI, Anthropic, OpenRouter."
"Suna requires at least one LLM provider. Supported: OpenAI, Anthropic, Google Gemini, OpenRouter."
)
# Don't clear existing keys if we're updating
@ -700,7 +701,8 @@ class SetupWizard:
providers = {
"1": ("OpenAI", "OPENAI_API_KEY"),
"2": ("Anthropic", "ANTHROPIC_API_KEY"),
"3": ("OpenRouter", "OPENROUTER_API_KEY"),
"3": ("Google Gemini", "GEMINI_API_KEY"),
"4": ("OpenRouter", "OPENROUTER_API_KEY"),
}
print(
f"\n{Colors.CYAN}Select LLM providers to configure (e.g., 1,3):{Colors.ENDC}"
@ -748,10 +750,14 @@ class SetupWizard:
self.env_vars["llm"][
"MODEL_TO_USE"
] = "anthropic/claude-sonnet-4-20250514"
elif self.env_vars["llm"].get("GEMINI_API_KEY"):
self.env_vars["llm"][
"MODEL_TO_USE"
] = "gemini/gemini-2.5-pro"
elif self.env_vars["llm"].get("OPENROUTER_API_KEY"):
self.env_vars["llm"][
"MODEL_TO_USE"
] = "openrouter/google/gemini-flash-1.5"
] = "openrouter/google/gemini-2.5-pro"
print_success(
f"LLM keys saved. Default model: {self.env_vars['llm'].get('MODEL_TO_USE', 'Not set')}"