mirror of https://github.com/kortix-ai/suna.git
kimi default free tier
This commit is contained in:
parent
ab156abb84
commit
28b0d9fef0
|
@ -84,7 +84,7 @@ ANTHROPIC_API_KEY=your-anthropic-key
|
|||
OPENAI_API_KEY=your-openai-key
|
||||
OPENROUTER_API_KEY=your-openrouter-key
|
||||
GEMINI_API_KEY=your-gemini-api-key
|
||||
MODEL_TO_USE=anthropic/claude-sonnet-4-20250514
|
||||
MODEL_TO_USE=openrouter/moonshotai/kimi-k2
|
||||
|
||||
# Search and Web Scraping
|
||||
TAVILY_API_KEY=your-tavily-key
|
||||
|
|
|
@ -48,7 +48,7 @@ REDIS_RESPONSE_LIST_TTL = 3600 * 24
|
|||
|
||||
|
||||
class AgentStartRequest(BaseModel):
|
||||
model_name: Optional[str] = None # Will be set from config.MODEL_TO_USE in the endpoint
|
||||
model_name: Optional[str] = None # Will be set to default model in the endpoint
|
||||
enable_thinking: Optional[bool] = False
|
||||
reasoning_effort: Optional[str] = 'low'
|
||||
stream: Optional[bool] = True
|
||||
|
@ -324,9 +324,9 @@ async def start_agent(
|
|||
model_name = body.model_name
|
||||
logger.info(f"Original model_name from request: {model_name}")
|
||||
|
||||
if model_name is None:
|
||||
model_name = config.MODEL_TO_USE
|
||||
logger.info(f"Using model from config: {model_name}")
|
||||
# if model_name is None:
|
||||
# model_name = "openrouter/moonshotai/kimi-k2"
|
||||
# logger.info(f"Using default model: {model_name}")
|
||||
|
||||
# Log the model name after alias resolution
|
||||
resolved_model = MODEL_NAME_ALIASES.get(model_name, model_name)
|
||||
|
@ -964,7 +964,7 @@ async def generate_and_update_project_name(project_id: str, prompt: str):
|
|||
@router.post("/agent/initiate", response_model=InitiateAgentResponse)
|
||||
async def initiate_agent_with_files(
|
||||
prompt: str = Form(...),
|
||||
model_name: Optional[str] = Form(None), # Default to None to use config.MODEL_TO_USE
|
||||
model_name: Optional[str] = Form(None), # Default to None to use default model
|
||||
enable_thinking: Optional[bool] = Form(False),
|
||||
reasoning_effort: Optional[str] = Form("low"),
|
||||
stream: Optional[bool] = Form(True),
|
||||
|
@ -988,8 +988,8 @@ async def initiate_agent_with_files(
|
|||
logger.info(f"Original model_name from request: {model_name}")
|
||||
|
||||
if model_name is None:
|
||||
model_name = config.MODEL_TO_USE
|
||||
logger.info(f"Using model from config: {model_name}")
|
||||
model_name = "openrouter/moonshotai/kimi-k2"
|
||||
logger.info(f"Using default model: {model_name}")
|
||||
|
||||
# Log the model name after alias resolution
|
||||
resolved_model = MODEL_NAME_ALIASES.get(model_name, model_name)
|
||||
|
|
|
@ -46,7 +46,7 @@ class AgentConfig:
|
|||
stream: bool
|
||||
native_max_auto_continues: int = 25
|
||||
max_iterations: int = 100
|
||||
model_name: str = "anthropic/claude-sonnet-4-20250514"
|
||||
model_name: str = "openrouter/moonshotai/kimi-k2"
|
||||
enable_thinking: Optional[bool] = False
|
||||
reasoning_effort: Optional[str] = 'low'
|
||||
enable_context_manager: bool = True
|
||||
|
@ -80,9 +80,7 @@ class ToolManager:
|
|||
# self.thread_manager.add_tool(SandboxWebDevTool, project_id=self.project_id, thread_id=self.thread_id, thread_manager=self.thread_manager)
|
||||
if config.RAPID_API_KEY:
|
||||
self.thread_manager.add_tool(DataProvidersTool)
|
||||
|
||||
|
||||
|
||||
|
||||
# Add Browser Tool
|
||||
from agent.tools.browser_tool import BrowserTool
|
||||
self.thread_manager.add_tool(BrowserTool, project_id=self.project_id, thread_id=self.thread_id, thread_manager=self.thread_manager)
|
||||
|
@ -659,7 +657,7 @@ async def run_agent(
|
|||
thread_manager: Optional[ThreadManager] = None,
|
||||
native_max_auto_continues: int = 25,
|
||||
max_iterations: int = 100,
|
||||
model_name: str = "anthropic/claude-sonnet-4-20250514",
|
||||
model_name: str = "openrouter/moonshotai/kimi-k2",
|
||||
enable_thinking: Optional[bool] = False,
|
||||
reasoning_effort: Optional[str] = 'low',
|
||||
enable_context_manager: bool = True,
|
||||
|
@ -669,10 +667,10 @@ async def run_agent(
|
|||
target_agent_id: Optional[str] = None
|
||||
):
|
||||
effective_model = model_name
|
||||
if model_name == "anthropic/claude-sonnet-4-20250514" and agent_config and agent_config.get('model'):
|
||||
if model_name == "openrouter/moonshotai/kimi-k2" and agent_config and agent_config.get('model'):
|
||||
effective_model = agent_config['model']
|
||||
logger.info(f"Using model from agent config: {effective_model} (no user selection)")
|
||||
elif model_name != "anthropic/claude-sonnet-4-20250514":
|
||||
elif model_name != "openrouter/moonshotai/kimi-k2":
|
||||
logger.info(f"Using user-selected model: {effective_model}")
|
||||
else:
|
||||
logger.info(f"Using default model: {effective_model}")
|
||||
|
|
|
@ -7,7 +7,7 @@ class SunaConfig:
|
|||
DESCRIPTION = "Suna is your AI assistant with access to various tools and integrations to help you with tasks across domains."
|
||||
AVATAR = "🌞"
|
||||
AVATAR_COLOR = "#F59E0B"
|
||||
DEFAULT_MODEL = "anthropic/claude-sonnet-4-20250514"
|
||||
DEFAULT_MODEL = "openrouter/moonshotai/kimi-k2"
|
||||
SYSTEM_PROMPT = SYSTEM_PROMPT
|
||||
|
||||
DEFAULT_TOOLS = {
|
||||
|
|
|
@ -116,7 +116,7 @@ async def run_agent_background(
|
|||
})
|
||||
|
||||
effective_model = model_name
|
||||
if model_name == "anthropic/claude-sonnet-4-20250514" and agent_config and agent_config.get('model'):
|
||||
if model_name == "openrouter/moonshotai/kimi-k2" and agent_config and agent_config.get('model'):
|
||||
agent_model = agent_config['model']
|
||||
from utils.constants import MODEL_NAME_ALIASES
|
||||
resolved_agent_model = MODEL_NAME_ALIASES.get(agent_model, agent_model)
|
||||
|
@ -125,7 +125,7 @@ async def run_agent_background(
|
|||
else:
|
||||
from utils.constants import MODEL_NAME_ALIASES
|
||||
effective_model = MODEL_NAME_ALIASES.get(model_name, model_name)
|
||||
if model_name != "anthropic/claude-sonnet-4-20250514":
|
||||
if model_name != "openrouter/moonshotai/kimi-k2":
|
||||
logger.info(f"Using user-selected model: {model_name} -> {effective_model}")
|
||||
else:
|
||||
logger.info(f"Using default model: {effective_model}")
|
||||
|
|
|
@ -786,7 +786,7 @@ async def execute_agent_workflow(
|
|||
if active_version and active_version.model:
|
||||
model_name = active_version.model
|
||||
else:
|
||||
model_name = config.MODEL_TO_USE or "anthropic/claude-sonnet-4-20250514"
|
||||
model_name = "openrouter/moonshotai/kimi-k2"
|
||||
|
||||
can_use, model_message, allowed_models = await can_use_model(client, account_id, model_name)
|
||||
if not can_use:
|
||||
|
|
|
@ -360,7 +360,7 @@ class AgentExecutor:
|
|||
trigger_variables: Dict[str, Any]
|
||||
) -> str:
|
||||
client = await self._db.client
|
||||
model_name = agent_config.get('model') or "anthropic/claude-sonnet-4-20250514"
|
||||
model_name = agent_config.get('model') or "openrouter/moonshotai/kimi-k2"
|
||||
|
||||
account_id = agent_config.get('account_id')
|
||||
if not account_id:
|
||||
|
@ -591,7 +591,7 @@ class WorkflowExecutor:
|
|||
from services.billing import check_billing_status, can_use_model
|
||||
|
||||
client = await self._db.client
|
||||
model_name = config.MODEL_TO_USE or "anthropic/claude-sonnet-4-20250514"
|
||||
model_name = "openrouter/moonshotai/kimi-k2"
|
||||
|
||||
can_use, model_message, _ = await can_use_model(client, account_id, model_name)
|
||||
if not can_use:
|
||||
|
@ -638,7 +638,7 @@ class WorkflowExecutor:
|
|||
agent_config: Dict[str, Any]
|
||||
) -> str:
|
||||
client = await self._db.client
|
||||
model_name = agent_config.get('model') or config.MODEL_TO_USE or "anthropic/claude-sonnet-4-20250514"
|
||||
model_name = agent_config.get('model') or "openrouter/moonshotai/kimi-k2"
|
||||
|
||||
account_id = agent_config.get('account_id')
|
||||
if not account_id:
|
||||
|
|
|
@ -215,8 +215,7 @@ class Configuration:
|
|||
AWS_SECRET_ACCESS_KEY: Optional[str] = None
|
||||
AWS_REGION_NAME: Optional[str] = None
|
||||
|
||||
# Model configuration
|
||||
MODEL_TO_USE: Optional[str] = "anthropic/claude-sonnet-4-20250514"
|
||||
|
||||
|
||||
# Supabase configuration
|
||||
SUPABASE_URL: str
|
||||
|
|
|
@ -8,7 +8,7 @@ MODELS = {
|
|||
"input_cost_per_million_tokens": 3.00,
|
||||
"output_cost_per_million_tokens": 15.00
|
||||
},
|
||||
"tier_availability": ["free", "paid"]
|
||||
"tier_availability": ["paid"]
|
||||
},
|
||||
# "openrouter/deepseek/deepseek-chat": {
|
||||
# "aliases": ["deepseek"],
|
||||
|
|
|
@ -162,7 +162,7 @@ OPENAI_API_KEY=your-openai-key
|
|||
OPENROUTER_API_KEY=your-openrouter-key
|
||||
GEMINI_API_KEY=your-gemini-api-key
|
||||
MORPH_API_KEY=
|
||||
MODEL_TO_USE=anthropic/claude-sonnet-4-20250514
|
||||
|
||||
|
||||
# WEB SEARCH
|
||||
TAVILY_API_KEY=your-tavily-key
|
||||
|
|
|
@ -25,7 +25,7 @@ export async function generateMetadata({ params }): Promise<Metadata> {
|
|||
}
|
||||
|
||||
const isDevelopment =
|
||||
process.env.NODE_ENV === 'development' ||
|
||||
// process.env.NODE_ENV === 'development' ||
|
||||
process.env.NEXT_PUBLIC_ENV_MODE === 'LOCAL' ||
|
||||
process.env.NEXT_PUBLIC_ENV_MODE === 'local';
|
||||
|
||||
|
|
|
@ -8,8 +8,7 @@ import { useAvailableModels } from '@/hooks/react-query/subscriptions/use-model'
|
|||
export const STORAGE_KEY_MODEL = 'suna-preferred-model-v3';
|
||||
export const STORAGE_KEY_CUSTOM_MODELS = 'customModels';
|
||||
export const DEFAULT_PREMIUM_MODEL_ID = 'claude-sonnet-4';
|
||||
// export const DEFAULT_FREE_MODEL_ID = 'moonshotai/kimi-k2';
|
||||
export const DEFAULT_FREE_MODEL_ID = 'claude-sonnet-4';
|
||||
export const DEFAULT_FREE_MODEL_ID = 'moonshotai/kimi-k2';
|
||||
|
||||
export type SubscriptionStatus = 'no_subscription' | 'active';
|
||||
|
||||
|
@ -30,9 +29,9 @@ export interface CustomModel {
|
|||
|
||||
// SINGLE SOURCE OF TRUTH for all model data - aligned with backend constants
|
||||
export const MODELS = {
|
||||
// Free tier models (available to all users)
|
||||
// Premium tier models (require subscription)
|
||||
'claude-sonnet-4': {
|
||||
tier: 'free',
|
||||
tier: 'premium',
|
||||
priority: 100,
|
||||
recommended: true,
|
||||
lowQuality: false
|
||||
|
@ -51,11 +50,11 @@ export const MODELS = {
|
|||
// lowQuality: false
|
||||
// },
|
||||
|
||||
// Premium/Paid tier models (require subscription) - except specific free models
|
||||
// Free tier models (available to all users)
|
||||
'moonshotai/kimi-k2': {
|
||||
tier: 'free',
|
||||
priority: 96,
|
||||
recommended: false,
|
||||
priority: 100,
|
||||
recommended: true,
|
||||
lowQuality: false
|
||||
},
|
||||
'grok-4': {
|
||||
|
|
|
@ -208,12 +208,12 @@ function getEnvironmentMode(): EnvMode {
|
|||
return EnvMode.STAGING;
|
||||
case 'PRODUCTION':
|
||||
return EnvMode.PRODUCTION;
|
||||
default:
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
return EnvMode.LOCAL;
|
||||
} else {
|
||||
return EnvMode.PRODUCTION;
|
||||
}
|
||||
// default:
|
||||
// if (process.env.NODE_ENV === 'development') {
|
||||
// return EnvMode.LOCAL;
|
||||
// } else {
|
||||
// return EnvMode.PRODUCTION;
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
32
setup.py
32
setup.py
|
@ -131,7 +131,7 @@ def load_existing_env_vars():
|
|||
"OPENROUTER_API_KEY": backend_env.get("OPENROUTER_API_KEY", ""),
|
||||
"MORPH_API_KEY": backend_env.get("MORPH_API_KEY", ""),
|
||||
"GEMINI_API_KEY": backend_env.get("GEMINI_API_KEY", ""),
|
||||
"MODEL_TO_USE": backend_env.get("MODEL_TO_USE", ""),
|
||||
|
||||
},
|
||||
"search": {
|
||||
"TAVILY_API_KEY": backend_env.get("TAVILY_API_KEY", ""),
|
||||
|
@ -308,7 +308,7 @@ class SetupWizard:
|
|||
llm_keys = [
|
||||
k
|
||||
for k in self.env_vars["llm"]
|
||||
if k != "MODEL_TO_USE" and self.env_vars["llm"][k] and k != "MORPH_API_KEY"
|
||||
if self.env_vars["llm"][k] and k != "MORPH_API_KEY"
|
||||
]
|
||||
if llm_keys:
|
||||
providers = [k.split("_")[0].capitalize() for k in llm_keys]
|
||||
|
@ -682,7 +682,7 @@ class SetupWizard:
|
|||
|
||||
# Check if we already have any LLM keys configured
|
||||
existing_keys = {
|
||||
k: v for k, v in self.env_vars["llm"].items() if v and k != "MODEL_TO_USE"
|
||||
k: v for k, v in self.env_vars["llm"].items() if v
|
||||
}
|
||||
has_existing = bool(existing_keys)
|
||||
|
||||
|
@ -706,7 +706,7 @@ class SetupWizard:
|
|||
while not any(
|
||||
k
|
||||
for k in self.env_vars["llm"]
|
||||
if k != "MODEL_TO_USE" and self.env_vars["llm"][k]
|
||||
if self.env_vars["llm"][k]
|
||||
):
|
||||
providers = {
|
||||
"1": ("OpenAI", "OPENAI_API_KEY"),
|
||||
|
@ -752,26 +752,7 @@ class SetupWizard:
|
|||
)
|
||||
self.env_vars["llm"][key] = api_key
|
||||
|
||||
# Set a default model if not already set
|
||||
if not self.env_vars["llm"].get("MODEL_TO_USE"):
|
||||
if self.env_vars["llm"].get("OPENAI_API_KEY"):
|
||||
self.env_vars["llm"]["MODEL_TO_USE"] = "openai/gpt-5"
|
||||
elif self.env_vars["llm"].get("ANTHROPIC_API_KEY"):
|
||||
self.env_vars["llm"][
|
||||
"MODEL_TO_USE"
|
||||
] = "anthropic/claude-sonnet-4-20250514"
|
||||
elif self.env_vars["llm"].get("GEMINI_API_KEY"):
|
||||
self.env_vars["llm"][
|
||||
"MODEL_TO_USE"
|
||||
] = "gemini/gemini-2.5-pro"
|
||||
elif self.env_vars["llm"].get("OPENROUTER_API_KEY"):
|
||||
self.env_vars["llm"][
|
||||
"MODEL_TO_USE"
|
||||
] = "openrouter/google/gemini-2.5-pro"
|
||||
|
||||
print_success(
|
||||
f"LLM keys saved. Default model: {self.env_vars['llm'].get('MODEL_TO_USE', 'Not set')}"
|
||||
)
|
||||
print_success("LLM keys saved.")
|
||||
|
||||
def collect_morph_api_key(self):
|
||||
"""Collects the optional MorphLLM API key for code editing."""
|
||||
|
@ -1326,9 +1307,8 @@ class SetupWizard:
|
|||
"""Shows final instructions to the user."""
|
||||
print(f"\n{Colors.GREEN}{Colors.BOLD}✨ Suna Setup Complete! ✨{Colors.ENDC}\n")
|
||||
|
||||
default_model = self.env_vars.get("llm", {}).get("MODEL_TO_USE", "N/A")
|
||||
print_info(
|
||||
f"Suna is configured to use {Colors.GREEN}{default_model}{Colors.ENDC} as the default LLM."
|
||||
f"Suna is configured with your LLM API keys and ready to use."
|
||||
)
|
||||
print_info(
|
||||
f"Delete the {Colors.RED}.setup_progress{Colors.ENDC} file to reset the setup."
|
||||
|
|
Loading…
Reference in New Issue