2025-05-13 23:46:28 +08:00
|
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import time
|
|
|
|
|
import platform
|
|
|
|
|
import subprocess
|
|
|
|
|
import re
|
2025-05-31 12:43:17 +08:00
|
|
|
|
import json
|
2025-06-23 23:51:29 +08:00
|
|
|
|
import secrets
|
|
|
|
|
import base64
|
2025-05-31 12:43:17 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# --- Constants ---
|
|
|
|
|
IS_WINDOWS = platform.system() == "Windows"
|
|
|
|
|
PROGRESS_FILE = ".setup_progress"
|
|
|
|
|
ENV_DATA_FILE = ".setup_env.json"
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-05-28 18:56:46 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# --- ANSI Colors ---
|
2025-05-13 23:46:28 +08:00
|
|
|
|
class Colors:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
HEADER = "\033[95m"
|
|
|
|
|
BLUE = "\033[94m"
|
|
|
|
|
CYAN = "\033[96m"
|
|
|
|
|
GREEN = "\033[92m"
|
|
|
|
|
YELLOW = "\033[93m"
|
|
|
|
|
RED = "\033[91m"
|
|
|
|
|
ENDC = "\033[0m"
|
|
|
|
|
BOLD = "\033[1m"
|
|
|
|
|
UNDERLINE = "\033[4m"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# --- UI Helpers ---
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_banner():
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints the Suna setup banner."""
|
|
|
|
|
print(
|
|
|
|
|
f"""
|
2025-05-13 23:46:28 +08:00
|
|
|
|
{Colors.BLUE}{Colors.BOLD}
|
|
|
|
|
███████╗██╗ ██╗███╗ ██╗ █████╗
|
|
|
|
|
██╔════╝██║ ██║████╗ ██║██╔══██╗
|
|
|
|
|
███████╗██║ ██║██╔██╗ ██║███████║
|
|
|
|
|
╚════██║██║ ██║██║╚██╗██║██╔══██║
|
|
|
|
|
███████║╚██████╔╝██║ ╚████║██║ ██║
|
|
|
|
|
╚══════╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
Installation Wizard
|
2025-05-13 23:46:28 +08:00
|
|
|
|
{Colors.ENDC}
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""
|
|
|
|
|
)
|
2025-05-31 12:43:17 +08:00
|
|
|
|
|
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_step(step_num, total_steps, step_name):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints a formatted step header."""
|
|
|
|
|
print(
|
|
|
|
|
f"\n{Colors.BLUE}{Colors.BOLD}Step {step_num}/{total_steps}: {step_name}{Colors.ENDC}"
|
|
|
|
|
)
|
2025-05-13 23:46:28 +08:00
|
|
|
|
print(f"{Colors.CYAN}{'='*50}{Colors.ENDC}\n")
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_info(message):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints an informational message."""
|
2025-05-18 05:58:49 +08:00
|
|
|
|
print(f"{Colors.CYAN}ℹ️ {message}{Colors.ENDC}")
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_success(message):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints a success message."""
|
2025-05-18 05:58:49 +08:00
|
|
|
|
print(f"{Colors.GREEN}✅ {message}{Colors.ENDC}")
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_warning(message):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints a warning message."""
|
2025-05-18 05:58:49 +08:00
|
|
|
|
print(f"{Colors.YELLOW}⚠️ {message}{Colors.ENDC}")
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def print_error(message):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Prints an error message."""
|
2025-05-18 05:58:49 +08:00
|
|
|
|
print(f"{Colors.RED}❌ {message}{Colors.ENDC}")
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
# --- Environment File Parsing ---
|
|
|
|
|
def parse_env_file(filepath):
|
|
|
|
|
"""Parses a .env file and returns a dictionary of key-value pairs."""
|
|
|
|
|
env_vars = {}
|
|
|
|
|
if not os.path.exists(filepath):
|
|
|
|
|
return env_vars
|
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
try:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
with open(filepath, "r") as f:
|
|
|
|
|
for line in f:
|
|
|
|
|
line = line.strip()
|
|
|
|
|
# Skip empty lines and comments
|
|
|
|
|
if not line or line.startswith("#"):
|
|
|
|
|
continue
|
|
|
|
|
# Handle key=value pairs
|
|
|
|
|
if "=" in line:
|
|
|
|
|
key, value = line.split("=", 1)
|
|
|
|
|
key = key.strip()
|
|
|
|
|
value = value.strip()
|
|
|
|
|
# Remove quotes if present
|
|
|
|
|
if value.startswith('"') and value.endswith('"'):
|
|
|
|
|
value = value[1:-1]
|
|
|
|
|
elif value.startswith("'") and value.endswith("'"):
|
|
|
|
|
value = value[1:-1]
|
|
|
|
|
env_vars[key] = value
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print_warning(f"Could not parse {filepath}: {e}")
|
|
|
|
|
|
|
|
|
|
return env_vars
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_existing_env_vars():
|
|
|
|
|
"""Loads existing environment variables from .env files."""
|
|
|
|
|
backend_env = parse_env_file(os.path.join("backend", ".env"))
|
|
|
|
|
frontend_env = parse_env_file(os.path.join("frontend", ".env.local"))
|
|
|
|
|
|
|
|
|
|
# Organize the variables by category
|
|
|
|
|
existing_vars = {
|
|
|
|
|
"supabase": {
|
|
|
|
|
"SUPABASE_URL": backend_env.get("SUPABASE_URL", ""),
|
|
|
|
|
"SUPABASE_ANON_KEY": backend_env.get("SUPABASE_ANON_KEY", ""),
|
|
|
|
|
"SUPABASE_SERVICE_ROLE_KEY": backend_env.get(
|
|
|
|
|
"SUPABASE_SERVICE_ROLE_KEY", ""
|
|
|
|
|
),
|
|
|
|
|
},
|
|
|
|
|
"daytona": {
|
|
|
|
|
"DAYTONA_API_KEY": backend_env.get("DAYTONA_API_KEY", ""),
|
|
|
|
|
"DAYTONA_SERVER_URL": backend_env.get("DAYTONA_SERVER_URL", ""),
|
|
|
|
|
"DAYTONA_TARGET": backend_env.get("DAYTONA_TARGET", ""),
|
|
|
|
|
},
|
|
|
|
|
"llm": {
|
|
|
|
|
"OPENAI_API_KEY": backend_env.get("OPENAI_API_KEY", ""),
|
|
|
|
|
"ANTHROPIC_API_KEY": backend_env.get("ANTHROPIC_API_KEY", ""),
|
|
|
|
|
"OPENROUTER_API_KEY": backend_env.get("OPENROUTER_API_KEY", ""),
|
2025-07-24 19:14:52 +08:00
|
|
|
|
"MORPH_API_KEY": backend_env.get("MORPH_API_KEY", ""),
|
2025-07-28 14:14:58 +08:00
|
|
|
|
"GEMINI_API_KEY": backend_env.get("GEMINI_API_KEY", ""),
|
2025-08-17 02:40:43 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
},
|
|
|
|
|
"search": {
|
|
|
|
|
"TAVILY_API_KEY": backend_env.get("TAVILY_API_KEY", ""),
|
|
|
|
|
"FIRECRAWL_API_KEY": backend_env.get("FIRECRAWL_API_KEY", ""),
|
|
|
|
|
"FIRECRAWL_URL": backend_env.get("FIRECRAWL_URL", ""),
|
|
|
|
|
},
|
|
|
|
|
"rapidapi": {
|
|
|
|
|
"RAPID_API_KEY": backend_env.get("RAPID_API_KEY", ""),
|
|
|
|
|
},
|
2025-08-09 18:10:11 +08:00
|
|
|
|
"cron": {
|
|
|
|
|
# No secrets required. Make sure pg_cron and pg_net are enabled in Supabase
|
2025-07-03 12:47:57 +08:00
|
|
|
|
},
|
|
|
|
|
"webhook": {
|
2025-06-23 23:51:29 +08:00
|
|
|
|
"WEBHOOK_BASE_URL": backend_env.get("WEBHOOK_BASE_URL", ""),
|
2025-08-09 18:10:11 +08:00
|
|
|
|
"TRIGGER_WEBHOOK_SECRET": backend_env.get("TRIGGER_WEBHOOK_SECRET", ""),
|
2025-06-23 23:51:29 +08:00
|
|
|
|
},
|
2025-07-03 12:47:57 +08:00
|
|
|
|
"slack": {
|
|
|
|
|
"SLACK_CLIENT_ID": backend_env.get("SLACK_CLIENT_ID", ""),
|
|
|
|
|
"SLACK_CLIENT_SECRET": backend_env.get("SLACK_CLIENT_SECRET", ""),
|
|
|
|
|
"SLACK_REDIRECT_URI": backend_env.get("SLACK_REDIRECT_URI", ""),
|
|
|
|
|
},
|
2025-06-23 23:51:29 +08:00
|
|
|
|
"mcp": {
|
|
|
|
|
"MCP_CREDENTIAL_ENCRYPTION_KEY": backend_env.get(
|
|
|
|
|
"MCP_CREDENTIAL_ENCRYPTION_KEY", ""
|
|
|
|
|
),
|
|
|
|
|
},
|
2025-07-08 21:18:49 +08:00
|
|
|
|
"pipedream": {
|
|
|
|
|
"PIPEDREAM_PROJECT_ID": backend_env.get("PIPEDREAM_PROJECT_ID", ""),
|
|
|
|
|
"PIPEDREAM_CLIENT_ID": backend_env.get("PIPEDREAM_CLIENT_ID", ""),
|
|
|
|
|
"PIPEDREAM_CLIENT_SECRET": backend_env.get("PIPEDREAM_CLIENT_SECRET", ""),
|
|
|
|
|
"PIPEDREAM_X_PD_ENVIRONMENT": backend_env.get("PIPEDREAM_X_PD_ENVIRONMENT", ""),
|
|
|
|
|
},
|
2025-08-07 03:31:45 +08:00
|
|
|
|
"kortix": {
|
|
|
|
|
"KORTIX_ADMIN_API_KEY": backend_env.get("KORTIX_ADMIN_API_KEY", ""),
|
|
|
|
|
},
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"frontend": {
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_URL": frontend_env.get(
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_URL", ""
|
|
|
|
|
),
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_ANON_KEY": frontend_env.get(
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_ANON_KEY", ""
|
|
|
|
|
),
|
|
|
|
|
"NEXT_PUBLIC_BACKEND_URL": frontend_env.get("NEXT_PUBLIC_BACKEND_URL", ""),
|
|
|
|
|
"NEXT_PUBLIC_URL": frontend_env.get("NEXT_PUBLIC_URL", ""),
|
|
|
|
|
"NEXT_PUBLIC_ENV_MODE": frontend_env.get("NEXT_PUBLIC_ENV_MODE", ""),
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return existing_vars
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def mask_sensitive_value(value, show_last=4):
|
|
|
|
|
"""Masks sensitive values for display, showing only the last few characters."""
|
|
|
|
|
if not value or len(value) <= show_last:
|
|
|
|
|
return value
|
|
|
|
|
return "*" * (len(value) - show_last) + value[-show_last:]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# --- State Management ---
|
|
|
|
|
def save_progress(step, data):
|
|
|
|
|
"""Saves the current step and collected data."""
|
|
|
|
|
with open(PROGRESS_FILE, "w") as f:
|
|
|
|
|
json.dump({"step": step, "data": data}, f)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_progress():
|
|
|
|
|
"""Loads the last saved step and data."""
|
|
|
|
|
if os.path.exists(PROGRESS_FILE):
|
|
|
|
|
with open(PROGRESS_FILE, "r") as f:
|
|
|
|
|
try:
|
|
|
|
|
return json.load(f)
|
|
|
|
|
except (json.JSONDecodeError, KeyError):
|
|
|
|
|
return {"step": 0, "data": {}}
|
|
|
|
|
return {"step": 0, "data": {}}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# --- Validators ---
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def validate_url(url, allow_empty=False):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Validates a URL format."""
|
2025-05-13 23:46:28 +08:00
|
|
|
|
if allow_empty and not url:
|
|
|
|
|
return True
|
|
|
|
|
pattern = re.compile(
|
2025-06-14 04:58:56 +08:00
|
|
|
|
r"^(?:http|https)://"
|
|
|
|
|
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|"
|
|
|
|
|
r"localhost|"
|
|
|
|
|
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})"
|
|
|
|
|
r"(?::\d+)?"
|
|
|
|
|
r"(?:/?|[/?]\S+)$",
|
|
|
|
|
re.IGNORECASE,
|
|
|
|
|
)
|
2025-05-13 23:46:28 +08:00
|
|
|
|
return bool(pattern.match(url))
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
def validate_api_key(api_key, allow_empty=False):
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"""Performs a basic validation for an API key."""
|
2025-05-13 23:46:28 +08:00
|
|
|
|
if allow_empty and not api_key:
|
|
|
|
|
return True
|
2025-06-14 04:58:56 +08:00
|
|
|
|
return bool(api_key and len(api_key) >= 10)
|
|
|
|
|
|
|
|
|
|
|
2025-06-23 23:51:29 +08:00
|
|
|
|
def generate_encryption_key():
|
|
|
|
|
"""Generates a secure base64-encoded encryption key for MCP credentials."""
|
|
|
|
|
# Generate 32 random bytes (256 bits)
|
|
|
|
|
key_bytes = secrets.token_bytes(32)
|
|
|
|
|
# Encode as base64
|
|
|
|
|
return base64.b64encode(key_bytes).decode("utf-8")
|
|
|
|
|
|
|
|
|
|
|
2025-08-07 03:31:45 +08:00
|
|
|
|
def generate_admin_api_key():
|
|
|
|
|
"""Generates a secure admin API key for Kortix."""
|
|
|
|
|
# Generate 32 random bytes and encode as hex for a readable API key
|
|
|
|
|
key_bytes = secrets.token_bytes(32)
|
|
|
|
|
return key_bytes.hex()
|
|
|
|
|
|
|
|
|
|
|
2025-08-09 18:10:11 +08:00
|
|
|
|
def generate_webhook_secret():
|
|
|
|
|
"""Generates a secure shared secret for trigger webhooks."""
|
|
|
|
|
# 32 random bytes as hex (64 hex chars)
|
|
|
|
|
return secrets.token_hex(32)
|
|
|
|
|
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# --- Main Setup Class ---
|
|
|
|
|
class SetupWizard:
|
|
|
|
|
def __init__(self):
|
|
|
|
|
progress = load_progress()
|
|
|
|
|
self.current_step = progress.get("step", 0)
|
|
|
|
|
|
|
|
|
|
# Load existing environment variables from .env files
|
|
|
|
|
existing_env_vars = load_existing_env_vars()
|
|
|
|
|
|
|
|
|
|
# Start with existing values, then override with any saved progress
|
|
|
|
|
self.env_vars = {
|
|
|
|
|
"setup_method": None,
|
|
|
|
|
"supabase": existing_env_vars["supabase"],
|
|
|
|
|
"daytona": existing_env_vars["daytona"],
|
|
|
|
|
"llm": existing_env_vars["llm"],
|
|
|
|
|
"search": existing_env_vars["search"],
|
|
|
|
|
"rapidapi": existing_env_vars["rapidapi"],
|
2025-08-09 18:10:11 +08:00
|
|
|
|
"cron": existing_env_vars.get("cron", {}),
|
2025-07-03 12:47:57 +08:00
|
|
|
|
"slack": existing_env_vars["slack"],
|
|
|
|
|
"webhook": existing_env_vars["webhook"],
|
2025-06-23 23:51:29 +08:00
|
|
|
|
"mcp": existing_env_vars["mcp"],
|
2025-07-08 21:18:49 +08:00
|
|
|
|
"pipedream": existing_env_vars["pipedream"],
|
2025-08-07 03:31:45 +08:00
|
|
|
|
"kortix": existing_env_vars["kortix"],
|
2025-06-14 04:58:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Override with any progress data (in case user is resuming)
|
|
|
|
|
saved_data = progress.get("data", {})
|
|
|
|
|
for key, value in saved_data.items():
|
|
|
|
|
if key in self.env_vars and isinstance(value, dict):
|
|
|
|
|
self.env_vars[key].update(value)
|
|
|
|
|
else:
|
|
|
|
|
self.env_vars[key] = value
|
|
|
|
|
|
2025-08-09 18:10:11 +08:00
|
|
|
|
self.total_steps = 17
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
def show_current_config(self):
|
|
|
|
|
"""Shows the current configuration status."""
|
|
|
|
|
config_items = []
|
|
|
|
|
|
|
|
|
|
# Check Supabase
|
|
|
|
|
if self.env_vars["supabase"]["SUPABASE_URL"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Supabase")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Supabase")
|
|
|
|
|
|
|
|
|
|
# Check Daytona
|
|
|
|
|
if self.env_vars["daytona"]["DAYTONA_API_KEY"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Daytona")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Daytona")
|
|
|
|
|
|
|
|
|
|
# Check LLM providers
|
|
|
|
|
llm_keys = [
|
|
|
|
|
k
|
|
|
|
|
for k in self.env_vars["llm"]
|
2025-08-17 02:40:43 +08:00
|
|
|
|
if self.env_vars["llm"][k] and k != "MORPH_API_KEY"
|
2025-06-14 04:58:56 +08:00
|
|
|
|
]
|
|
|
|
|
if llm_keys:
|
|
|
|
|
providers = [k.split("_")[0].capitalize() for k in llm_keys]
|
|
|
|
|
config_items.append(
|
|
|
|
|
f"{Colors.GREEN}✓{Colors.ENDC} LLM ({', '.join(providers)})"
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} LLM providers")
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# Check Search APIs
|
|
|
|
|
search_configured = (
|
|
|
|
|
self.env_vars["search"]["TAVILY_API_KEY"]
|
|
|
|
|
and self.env_vars["search"]["FIRECRAWL_API_KEY"]
|
|
|
|
|
)
|
|
|
|
|
if search_configured:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Search APIs")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Search APIs")
|
|
|
|
|
|
|
|
|
|
# Check RapidAPI (optional)
|
|
|
|
|
if self.env_vars["rapidapi"]["RAPID_API_KEY"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} RapidAPI (optional)")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.CYAN}○{Colors.ENDC} RapidAPI (optional)")
|
|
|
|
|
|
2025-08-09 18:10:11 +08:00
|
|
|
|
# Check Cron/Webhook setup
|
|
|
|
|
if self.env_vars["webhook"]["WEBHOOK_BASE_URL"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Supabase Cron & Webhooks")
|
2025-06-23 23:51:29 +08:00
|
|
|
|
else:
|
2025-08-09 18:10:11 +08:00
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Supabase Cron & Webhooks")
|
2025-06-23 23:51:29 +08:00
|
|
|
|
|
|
|
|
|
# Check MCP encryption key
|
|
|
|
|
if self.env_vars["mcp"]["MCP_CREDENTIAL_ENCRYPTION_KEY"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} MCP encryption key")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} MCP encryption key")
|
|
|
|
|
|
2025-07-08 21:18:49 +08:00
|
|
|
|
# Check Pipedream configuration
|
|
|
|
|
if self.env_vars["pipedream"]["PIPEDREAM_PROJECT_ID"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Pipedream (optional)")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.CYAN}○{Colors.ENDC} Pipedream (optional)")
|
|
|
|
|
|
2025-07-03 12:47:57 +08:00
|
|
|
|
# Check Slack configuration
|
|
|
|
|
if self.env_vars["slack"]["SLACK_CLIENT_ID"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Slack (optional)")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.CYAN}○{Colors.ENDC} Slack (optional)")
|
|
|
|
|
|
|
|
|
|
# Check Webhook configuration
|
|
|
|
|
if self.env_vars["webhook"]["WEBHOOK_BASE_URL"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Webhook")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Webhook")
|
|
|
|
|
|
2025-07-24 19:14:52 +08:00
|
|
|
|
# Check Morph (optional but recommended)
|
|
|
|
|
if self.env_vars["llm"].get("MORPH_API_KEY"):
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Morph (Code Editing)")
|
|
|
|
|
elif self.env_vars["llm"].get("OPENROUTER_API_KEY"):
|
|
|
|
|
config_items.append(f"{Colors.CYAN}○{Colors.ENDC} Morph (fallback to OpenRouter)")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Morph (recommended)")
|
|
|
|
|
|
2025-08-07 03:31:45 +08:00
|
|
|
|
# Check Kortix configuration
|
|
|
|
|
if self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"]:
|
|
|
|
|
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Kortix Admin")
|
|
|
|
|
else:
|
|
|
|
|
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Kortix Admin")
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
if any("✓" in item for item in config_items):
|
|
|
|
|
print_info("Current configuration status:")
|
|
|
|
|
for item in config_items:
|
|
|
|
|
print(f" {item}")
|
|
|
|
|
print()
|
|
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
|
"""Runs the setup wizard."""
|
|
|
|
|
print_banner()
|
|
|
|
|
print(
|
2025-08-09 09:23:08 +08:00
|
|
|
|
"This wizard will guide you through setting up Suna, an open-source generalist AI Worker.\n"
|
2025-06-14 04:58:56 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Show current configuration status
|
|
|
|
|
self.show_current_config()
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
|
|
|
|
try:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
self.run_step(1, self.choose_setup_method)
|
|
|
|
|
self.run_step(2, self.check_requirements)
|
|
|
|
|
self.run_step(3, self.collect_supabase_info)
|
|
|
|
|
self.run_step(4, self.collect_daytona_info)
|
|
|
|
|
self.run_step(5, self.collect_llm_api_keys)
|
2025-07-24 19:14:52 +08:00
|
|
|
|
self.run_step(6, self.collect_morph_api_key)
|
|
|
|
|
self.run_step(7, self.collect_search_api_keys)
|
|
|
|
|
self.run_step(8, self.collect_rapidapi_keys)
|
2025-08-07 03:31:45 +08:00
|
|
|
|
self.run_step(9, self.collect_kortix_keys)
|
2025-08-09 18:10:11 +08:00
|
|
|
|
# Supabase Cron does not require keys; ensure DB migrations enable cron functions
|
|
|
|
|
self.run_step(10, self.collect_webhook_keys)
|
2025-07-24 19:14:52 +08:00
|
|
|
|
self.run_step(11, self.collect_mcp_keys)
|
|
|
|
|
self.run_step(12, self.collect_pipedream_keys)
|
|
|
|
|
self.run_step(13, self.collect_slack_keys)
|
2025-08-09 18:10:11 +08:00
|
|
|
|
# Removed duplicate webhook collection step
|
|
|
|
|
self.run_step(14, self.configure_env_files)
|
|
|
|
|
self.run_step(15, self.setup_supabase_database)
|
|
|
|
|
self.run_step(16, self.install_dependencies)
|
|
|
|
|
self.run_step(17, self.start_suna)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
self.final_instructions()
|
|
|
|
|
|
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
|
print("\n\nSetup interrupted. Your progress has been saved.")
|
|
|
|
|
print("You can resume setup anytime by running this script again.")
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print_error(f"An unexpected error occurred: {e}")
|
|
|
|
|
print_error(
|
|
|
|
|
"Please check the error message and try running the script again."
|
|
|
|
|
)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
def run_step(self, step_number, step_function, *args, **kwargs):
|
|
|
|
|
"""Executes a setup step if it hasn't been completed."""
|
|
|
|
|
if self.current_step < step_number:
|
|
|
|
|
step_function(*args, **kwargs)
|
|
|
|
|
self.current_step = step_number
|
|
|
|
|
save_progress(self.current_step, self.env_vars)
|
|
|
|
|
|
|
|
|
|
def choose_setup_method(self):
|
|
|
|
|
"""Asks the user to choose between Docker and manual setup."""
|
|
|
|
|
print_step(1, self.total_steps, "Choose Setup Method")
|
|
|
|
|
|
|
|
|
|
if self.env_vars.get("setup_method"):
|
|
|
|
|
print_info(
|
|
|
|
|
f"Continuing with '{self.env_vars['setup_method']}' setup method."
|
|
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
print_info(
|
|
|
|
|
"You can start Suna using either Docker Compose or by manually starting the services."
|
|
|
|
|
)
|
|
|
|
|
print(f"\n{Colors.CYAN}How would you like to set up Suna?{Colors.ENDC}")
|
|
|
|
|
print(
|
|
|
|
|
f"{Colors.CYAN}[1] {Colors.GREEN}Docker Compose{Colors.ENDC} {Colors.CYAN}(recommended, starts all services automatically){Colors.ENDC}"
|
|
|
|
|
)
|
|
|
|
|
print(
|
|
|
|
|
f"{Colors.CYAN}[2] {Colors.GREEN}Manual{Colors.ENDC} {Colors.CYAN}(requires installing dependencies and running services manually){Colors.ENDC}\n"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
choice = input("Enter your choice (1 or 2): ").strip()
|
|
|
|
|
if choice == "1":
|
|
|
|
|
self.env_vars["setup_method"] = "docker"
|
|
|
|
|
break
|
|
|
|
|
elif choice == "2":
|
|
|
|
|
self.env_vars["setup_method"] = "manual"
|
2025-05-13 23:46:28 +08:00
|
|
|
|
break
|
|
|
|
|
else:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
print_error(
|
|
|
|
|
"Invalid selection. Please enter '1' for Docker or '2' for Manual."
|
|
|
|
|
)
|
|
|
|
|
print_success(f"Selected '{self.env_vars['setup_method']}' setup.")
|
|
|
|
|
|
|
|
|
|
def check_requirements(self):
|
|
|
|
|
"""Checks if all required tools for the chosen setup method are installed."""
|
|
|
|
|
print_step(2, self.total_steps, "Checking Requirements")
|
|
|
|
|
|
|
|
|
|
if self.env_vars["setup_method"] == "docker":
|
|
|
|
|
requirements = {
|
|
|
|
|
"git": "https://git-scm.com/downloads",
|
|
|
|
|
"docker": "https://docs.docker.com/get-docker/",
|
|
|
|
|
}
|
|
|
|
|
else: # manual
|
|
|
|
|
requirements = {
|
|
|
|
|
"git": "https://git-scm.com/downloads",
|
|
|
|
|
"uv": "https://github.com/astral-sh/uv#installation",
|
|
|
|
|
"node": "https://nodejs.org/en/download/",
|
|
|
|
|
"npm": "https://docs.npmjs.com/downloading-and-installing-node-js-and-npm",
|
2025-08-05 23:59:36 +08:00
|
|
|
|
"docker": "https://docs.docker.com/get-docker/", # For Redis
|
2025-06-14 04:58:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
missing = []
|
|
|
|
|
for cmd, url in requirements.items():
|
|
|
|
|
try:
|
|
|
|
|
cmd_to_check = cmd
|
|
|
|
|
# On Windows, python3 is just python
|
|
|
|
|
if IS_WINDOWS and cmd in ["python3", "pip3"]:
|
|
|
|
|
cmd_to_check = cmd.replace("3", "")
|
|
|
|
|
|
|
|
|
|
subprocess.run(
|
|
|
|
|
[cmd_to_check, "--version"],
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
check=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
|
|
|
|
print_success(f"{cmd} is installed.")
|
|
|
|
|
except (subprocess.SubprocessError, FileNotFoundError):
|
|
|
|
|
missing.append((cmd, url))
|
|
|
|
|
print_error(f"{cmd} is not installed.")
|
|
|
|
|
|
|
|
|
|
if missing:
|
|
|
|
|
print_error(
|
|
|
|
|
"\nMissing required tools. Please install them before continuing:"
|
|
|
|
|
)
|
|
|
|
|
for cmd, url in missing:
|
|
|
|
|
print(f" - {cmd}: {url}")
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
self.check_docker_running()
|
|
|
|
|
self.check_suna_directory()
|
|
|
|
|
|
|
|
|
|
def check_docker_running(self):
|
|
|
|
|
"""Checks if the Docker daemon is running."""
|
|
|
|
|
print_info("Checking if Docker is running...")
|
|
|
|
|
try:
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["docker", "info"],
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
|
check=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
|
|
|
|
print_success("Docker is running.")
|
|
|
|
|
return True
|
|
|
|
|
except subprocess.SubprocessError:
|
|
|
|
|
print_error(
|
|
|
|
|
"Docker is installed but not running. Please start Docker and try again."
|
|
|
|
|
)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
def check_suna_directory(self):
|
|
|
|
|
"""Checks if the script is run from the correct project root directory."""
|
|
|
|
|
print_info("Verifying project structure...")
|
|
|
|
|
required_dirs = ["backend", "frontend"]
|
|
|
|
|
required_files = ["README.md", "docker-compose.yaml"]
|
|
|
|
|
|
|
|
|
|
for directory in required_dirs:
|
|
|
|
|
if not os.path.isdir(directory):
|
|
|
|
|
print_error(
|
|
|
|
|
f"'{directory}' directory not found. Make sure you're in the Suna repository root."
|
|
|
|
|
)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
for file in required_files:
|
|
|
|
|
if not os.path.isfile(file):
|
|
|
|
|
print_error(
|
|
|
|
|
f"'{file}' not found. Make sure you're in the Suna repository root."
|
|
|
|
|
)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
print_success("Suna repository detected.")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def _get_input(
|
|
|
|
|
self, prompt, validator, error_message, allow_empty=False, default_value=""
|
|
|
|
|
):
|
|
|
|
|
"""Helper to get validated user input with optional default value."""
|
2025-05-13 23:46:28 +08:00
|
|
|
|
while True:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# Show default value in prompt if it exists
|
|
|
|
|
if default_value:
|
|
|
|
|
# Mask sensitive values for display
|
|
|
|
|
if "key" in prompt.lower() or "token" in prompt.lower():
|
|
|
|
|
display_default = mask_sensitive_value(default_value)
|
|
|
|
|
else:
|
|
|
|
|
display_default = default_value
|
|
|
|
|
full_prompt = (
|
|
|
|
|
f"{prompt}[{Colors.GREEN}{display_default}{Colors.ENDC}]: "
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
full_prompt = prompt
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
value = input(full_prompt).strip()
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# Use default value if user just pressed Enter
|
|
|
|
|
if not value and default_value:
|
|
|
|
|
value = default_value
|
2025-05-18 05:11:37 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
if validator(value, allow_empty=allow_empty):
|
|
|
|
|
return value
|
|
|
|
|
print_error(error_message)
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
def collect_supabase_info(self):
|
|
|
|
|
"""Collects Supabase project information from the user."""
|
|
|
|
|
print_step(3, self.total_steps, "Collecting Supabase Information")
|
2025-05-18 08:14:52 +08:00
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = any(self.env_vars["supabase"].values())
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
"Found existing Supabase configuration. Press Enter to keep current values or type new ones."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info(
|
|
|
|
|
"You'll need a Supabase project. Visit https://supabase.com/dashboard/projects to create one."
|
|
|
|
|
)
|
|
|
|
|
print_info(
|
|
|
|
|
"In your project settings, go to 'API' to find the required information."
|
|
|
|
|
)
|
|
|
|
|
input("Press Enter to continue once you have your project details...")
|
|
|
|
|
|
|
|
|
|
self.env_vars["supabase"]["SUPABASE_URL"] = self._get_input(
|
|
|
|
|
"Enter your Supabase Project URL (e.g., https://xyz.supabase.co): ",
|
|
|
|
|
validate_url,
|
|
|
|
|
"Invalid URL format. Please enter a valid URL.",
|
|
|
|
|
default_value=self.env_vars["supabase"]["SUPABASE_URL"],
|
2025-05-13 23:46:28 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
self.env_vars["supabase"]["SUPABASE_ANON_KEY"] = self._get_input(
|
|
|
|
|
"Enter your Supabase anon key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"This does not look like a valid key. It should be at least 10 characters.",
|
|
|
|
|
default_value=self.env_vars["supabase"]["SUPABASE_ANON_KEY"],
|
2025-05-13 23:46:28 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
self.env_vars["supabase"]["SUPABASE_SERVICE_ROLE_KEY"] = self._get_input(
|
|
|
|
|
"Enter your Supabase service role key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"This does not look like a valid key. It should be at least 10 characters.",
|
|
|
|
|
default_value=self.env_vars["supabase"]["SUPABASE_SERVICE_ROLE_KEY"],
|
2025-05-28 18:56:46 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
print_success("Supabase information saved.")
|
|
|
|
|
|
|
|
|
|
def collect_daytona_info(self):
|
|
|
|
|
"""Collects Daytona API key."""
|
|
|
|
|
print_step(4, self.total_steps, "Collecting Daytona Information")
|
|
|
|
|
|
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = bool(self.env_vars["daytona"]["DAYTONA_API_KEY"])
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
"Found existing Daytona configuration. Press Enter to keep current values or type new ones."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info(
|
|
|
|
|
"Suna uses Daytona for sandboxing. Visit https://app.daytona.io/ to create an account."
|
|
|
|
|
)
|
|
|
|
|
print_info("Then, generate an API key from the 'Keys' menu.")
|
|
|
|
|
input("Press Enter to continue once you have your API key...")
|
|
|
|
|
|
|
|
|
|
self.env_vars["daytona"]["DAYTONA_API_KEY"] = self._get_input(
|
|
|
|
|
"Enter your Daytona API key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid API key format. It should be at least 10 characters long.",
|
|
|
|
|
default_value=self.env_vars["daytona"]["DAYTONA_API_KEY"],
|
2025-05-13 23:46:28 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
# Set defaults if not already configured
|
|
|
|
|
if not self.env_vars["daytona"]["DAYTONA_SERVER_URL"]:
|
|
|
|
|
self.env_vars["daytona"][
|
|
|
|
|
"DAYTONA_SERVER_URL"
|
|
|
|
|
] = "https://app.daytona.io/api"
|
|
|
|
|
if not self.env_vars["daytona"]["DAYTONA_TARGET"]:
|
|
|
|
|
self.env_vars["daytona"]["DAYTONA_TARGET"] = "us"
|
|
|
|
|
|
|
|
|
|
print_success("Daytona information saved.")
|
|
|
|
|
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print_warning(
|
|
|
|
|
"IMPORTANT: You must create a Suna snapshot in Daytona for it to work properly."
|
|
|
|
|
)
|
|
|
|
|
print_info(
|
|
|
|
|
f"Visit {Colors.GREEN}https://app.daytona.io/dashboard/snapshots{Colors.ENDC}{Colors.CYAN} to create a snapshot."
|
|
|
|
|
)
|
|
|
|
|
print_info("Create a snapshot with these exact settings:")
|
2025-08-24 03:44:13 +08:00
|
|
|
|
print_info(f" - Name:\t\t{Colors.GREEN}kortix/suna:0.1.3.10{Colors.ENDC}")
|
|
|
|
|
print_info(f" - Snapshot name:\t{Colors.GREEN}kortix/suna:0.1.3.10{Colors.ENDC}")
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print_info(
|
|
|
|
|
f" - Entrypoint:\t{Colors.GREEN}/usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf{Colors.ENDC}"
|
|
|
|
|
)
|
|
|
|
|
input("Press Enter to continue once you have created the snapshot...")
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
def collect_llm_api_keys(self):
|
|
|
|
|
"""Collects LLM API keys for various providers."""
|
|
|
|
|
print_step(5, self.total_steps, "Collecting LLM API Keys")
|
|
|
|
|
|
|
|
|
|
# Check if we already have any LLM keys configured
|
|
|
|
|
existing_keys = {
|
2025-08-17 02:40:43 +08:00
|
|
|
|
k: v for k, v in self.env_vars["llm"].items() if v
|
2025-06-14 04:58:56 +08:00
|
|
|
|
}
|
|
|
|
|
has_existing = bool(existing_keys)
|
|
|
|
|
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info("Found existing LLM API keys:")
|
|
|
|
|
for key, value in existing_keys.items():
|
|
|
|
|
provider_name = key.split("_")[0].capitalize()
|
|
|
|
|
print_info(f" - {provider_name}: {mask_sensitive_value(value)}")
|
|
|
|
|
print_info(
|
|
|
|
|
"You can add more providers or press Enter to keep existing configuration."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info(
|
2025-07-28 14:14:58 +08:00
|
|
|
|
"Suna requires at least one LLM provider. Supported: OpenAI, Anthropic, Google Gemini, OpenRouter."
|
2025-06-14 04:58:56 +08:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Don't clear existing keys if we're updating
|
|
|
|
|
if not has_existing:
|
|
|
|
|
self.env_vars["llm"] = {}
|
|
|
|
|
|
|
|
|
|
while not any(
|
|
|
|
|
k
|
|
|
|
|
for k in self.env_vars["llm"]
|
2025-08-17 02:40:43 +08:00
|
|
|
|
if self.env_vars["llm"][k]
|
2025-06-14 04:58:56 +08:00
|
|
|
|
):
|
|
|
|
|
providers = {
|
|
|
|
|
"1": ("OpenAI", "OPENAI_API_KEY"),
|
|
|
|
|
"2": ("Anthropic", "ANTHROPIC_API_KEY"),
|
2025-07-28 14:14:58 +08:00
|
|
|
|
"3": ("Google Gemini", "GEMINI_API_KEY"),
|
|
|
|
|
"4": ("OpenRouter", "OPENROUTER_API_KEY"),
|
2025-06-14 04:58:56 +08:00
|
|
|
|
}
|
|
|
|
|
print(
|
|
|
|
|
f"\n{Colors.CYAN}Select LLM providers to configure (e.g., 1,3):{Colors.ENDC}"
|
|
|
|
|
)
|
|
|
|
|
for key, (name, env_key) in providers.items():
|
|
|
|
|
current_value = self.env_vars["llm"].get(env_key, "")
|
|
|
|
|
status = (
|
|
|
|
|
f" {Colors.GREEN}(configured){Colors.ENDC}" if current_value else ""
|
|
|
|
|
)
|
|
|
|
|
print(f"{Colors.CYAN}[{key}] {Colors.GREEN}{name}{Colors.ENDC}{status}")
|
|
|
|
|
|
|
|
|
|
# Allow Enter to skip if we already have keys configured
|
|
|
|
|
if has_existing:
|
|
|
|
|
choices_input = input(
|
|
|
|
|
"Select providers (or press Enter to skip): "
|
|
|
|
|
).strip()
|
|
|
|
|
if not choices_input:
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
choices_input = input("Select providers: ").strip()
|
|
|
|
|
|
|
|
|
|
choices = choices_input.replace(",", " ").split()
|
|
|
|
|
selected_keys = {providers[c][1] for c in choices if c in providers}
|
|
|
|
|
|
|
|
|
|
if not selected_keys and not has_existing:
|
|
|
|
|
print_error("Invalid selection. Please choose at least one provider.")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
for key in selected_keys:
|
|
|
|
|
provider_name = key.split("_")[0].capitalize()
|
|
|
|
|
existing_value = self.env_vars["llm"].get(key, "")
|
|
|
|
|
api_key = self._get_input(
|
|
|
|
|
f"Enter your {provider_name} API key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid API key format.",
|
|
|
|
|
default_value=existing_value,
|
|
|
|
|
)
|
|
|
|
|
self.env_vars["llm"][key] = api_key
|
|
|
|
|
|
2025-08-17 02:40:43 +08:00
|
|
|
|
print_success("LLM keys saved.")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-07-24 19:14:52 +08:00
|
|
|
|
def collect_morph_api_key(self):
|
|
|
|
|
"""Collects the optional MorphLLM API key for code editing."""
|
|
|
|
|
print_step(6, self.total_steps, "Configure AI-Powered Code Editing (Optional)")
|
|
|
|
|
|
|
|
|
|
existing_key = self.env_vars["llm"].get("MORPH_API_KEY", "")
|
|
|
|
|
openrouter_key = self.env_vars["llm"].get("OPENROUTER_API_KEY", "")
|
|
|
|
|
|
|
|
|
|
if existing_key:
|
|
|
|
|
print_info(f"Found existing Morph API key: {mask_sensitive_value(existing_key)}")
|
|
|
|
|
print_info("AI-powered code editing is enabled using Morph.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
print_info("Suna uses Morph for fast, intelligent code editing.")
|
|
|
|
|
print_info("This is optional but highly recommended for the best experience.")
|
|
|
|
|
|
|
|
|
|
if openrouter_key:
|
|
|
|
|
print_info(
|
|
|
|
|
f"An OpenRouter API key is already configured. It can be used as a fallback for code editing if you don't provide a Morph key."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
choice = input("Do you want to add a Morph API key now? (y/n): ").lower().strip()
|
|
|
|
|
if choice in ['y', 'n', '']:
|
|
|
|
|
break
|
|
|
|
|
print_error("Invalid input. Please enter 'y' or 'n'.")
|
|
|
|
|
|
|
|
|
|
if choice == 'y':
|
|
|
|
|
print_info("Great! Please get your API key from: https://morphllm.com/api-keys")
|
|
|
|
|
morph_api_key = self._get_input(
|
|
|
|
|
"Enter your Morph API key (or press Enter to skip): ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"The key seems invalid, but continuing. You can edit it later in backend/.env",
|
|
|
|
|
allow_empty=True,
|
|
|
|
|
default_value="",
|
|
|
|
|
)
|
|
|
|
|
if morph_api_key:
|
|
|
|
|
self.env_vars["llm"]["MORPH_API_KEY"] = morph_api_key
|
|
|
|
|
print_success("Morph API key saved. AI-powered code editing is enabled.")
|
|
|
|
|
else:
|
|
|
|
|
if openrouter_key:
|
|
|
|
|
print_info("Skipping Morph key. OpenRouter will be used for code editing.")
|
|
|
|
|
else:
|
|
|
|
|
print_warning("Skipping Morph key. Code editing will use a less capable model.")
|
|
|
|
|
else:
|
|
|
|
|
if openrouter_key:
|
|
|
|
|
print_info("Okay, OpenRouter will be used as a fallback for code editing.")
|
|
|
|
|
else:
|
|
|
|
|
print_warning("Okay, code editing will use a less capable model without a Morph or OpenRouter key.")
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
def collect_search_api_keys(self):
|
|
|
|
|
"""Collects API keys for search and web scraping tools."""
|
2025-07-24 19:14:52 +08:00
|
|
|
|
print_step(7, self.total_steps, "Collecting Search and Scraping API Keys")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = any(self.env_vars["search"].values())
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
"Found existing search API keys. Press Enter to keep current values or type new ones."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info("Suna uses Tavily for search and Firecrawl for web scraping.")
|
|
|
|
|
print_info(
|
|
|
|
|
"Get a Tavily key at https://tavily.com and a Firecrawl key at https://firecrawl.dev"
|
|
|
|
|
)
|
|
|
|
|
input("Press Enter to continue once you have your keys...")
|
|
|
|
|
|
|
|
|
|
self.env_vars["search"]["TAVILY_API_KEY"] = self._get_input(
|
|
|
|
|
"Enter your Tavily API key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid API key.",
|
|
|
|
|
default_value=self.env_vars["search"]["TAVILY_API_KEY"],
|
2025-05-13 23:46:28 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
self.env_vars["search"]["FIRECRAWL_API_KEY"] = self._get_input(
|
|
|
|
|
"Enter your Firecrawl API key: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid API key.",
|
|
|
|
|
default_value=self.env_vars["search"]["FIRECRAWL_API_KEY"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Handle Firecrawl URL configuration
|
|
|
|
|
current_url = self.env_vars["search"]["FIRECRAWL_URL"]
|
|
|
|
|
is_self_hosted_default = (
|
|
|
|
|
current_url and current_url != "https://api.firecrawl.dev"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if current_url:
|
|
|
|
|
prompt = f"Are you self-hosting Firecrawl? (y/N) [Current: {'y' if is_self_hosted_default else 'N'}]: "
|
|
|
|
|
else:
|
|
|
|
|
prompt = "Are you self-hosting Firecrawl? (y/N): "
|
|
|
|
|
|
|
|
|
|
response = input(prompt).lower().strip()
|
|
|
|
|
if not response and current_url:
|
|
|
|
|
# Use existing configuration
|
|
|
|
|
is_self_hosted = is_self_hosted_default
|
|
|
|
|
else:
|
|
|
|
|
is_self_hosted = response == "y"
|
|
|
|
|
|
|
|
|
|
if is_self_hosted:
|
|
|
|
|
self.env_vars["search"]["FIRECRAWL_URL"] = self._get_input(
|
|
|
|
|
"Enter your self-hosted Firecrawl URL: ",
|
|
|
|
|
validate_url,
|
|
|
|
|
"Invalid URL.",
|
|
|
|
|
default_value=(
|
|
|
|
|
current_url if current_url != "https://api.firecrawl.dev" else ""
|
|
|
|
|
),
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
self.env_vars["search"]["FIRECRAWL_URL"] = "https://api.firecrawl.dev"
|
|
|
|
|
|
|
|
|
|
print_success("Search and scraping keys saved.")
|
|
|
|
|
|
|
|
|
|
def collect_rapidapi_keys(self):
|
|
|
|
|
"""Collects the optional RapidAPI key."""
|
2025-07-24 19:14:52 +08:00
|
|
|
|
print_step(8, self.total_steps, "Collecting RapidAPI Key (Optional)")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have a value configured
|
|
|
|
|
existing_key = self.env_vars["rapidapi"]["RAPID_API_KEY"]
|
|
|
|
|
if existing_key:
|
|
|
|
|
print_info(
|
|
|
|
|
f"Found existing RapidAPI key: {mask_sensitive_value(existing_key)}"
|
|
|
|
|
)
|
|
|
|
|
print_info("Press Enter to keep current value or type a new one.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("A RapidAPI key enables extra tools like LinkedIn scraping.")
|
|
|
|
|
print_info(
|
|
|
|
|
"Get a key at https://rapidapi.com/. You can skip this and add it later."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
rapid_api_key = self._get_input(
|
|
|
|
|
"Enter your RapidAPI key (or press Enter to skip): ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"The key seems invalid, but continuing. You can edit it later in backend/.env",
|
|
|
|
|
allow_empty=True,
|
|
|
|
|
default_value=existing_key,
|
|
|
|
|
)
|
|
|
|
|
self.env_vars["rapidapi"]["RAPID_API_KEY"] = rapid_api_key
|
|
|
|
|
if rapid_api_key:
|
|
|
|
|
print_success("RapidAPI key saved.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Skipping RapidAPI key.")
|
|
|
|
|
|
2025-08-07 03:31:45 +08:00
|
|
|
|
def collect_kortix_keys(self):
|
|
|
|
|
"""Generates or configures the Kortix admin API key."""
|
|
|
|
|
print_step(9, self.total_steps, "Configuring Kortix Admin API Key")
|
|
|
|
|
|
|
|
|
|
# Check if we already have a value configured
|
|
|
|
|
existing_key = self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"]
|
|
|
|
|
if existing_key:
|
|
|
|
|
print_info(
|
|
|
|
|
f"Found existing Kortix admin API key: {mask_sensitive_value(existing_key)}"
|
|
|
|
|
)
|
|
|
|
|
print_info("Using existing admin API key.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Generating a secure admin API key for Kortix administrative functions...")
|
|
|
|
|
self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"] = generate_admin_api_key()
|
|
|
|
|
print_success("Kortix admin API key generated.")
|
|
|
|
|
|
|
|
|
|
print_success("Kortix admin configuration saved.")
|
|
|
|
|
|
2025-06-23 23:51:29 +08:00
|
|
|
|
|
|
|
|
|
def collect_mcp_keys(self):
|
|
|
|
|
"""Collects the MCP configuration."""
|
2025-07-24 19:14:52 +08:00
|
|
|
|
print_step(11, self.total_steps, "Collecting MCP Configuration")
|
2025-06-23 23:51:29 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have an encryption key configured
|
|
|
|
|
existing_key = self.env_vars["mcp"]["MCP_CREDENTIAL_ENCRYPTION_KEY"]
|
|
|
|
|
if existing_key:
|
|
|
|
|
print_info(
|
|
|
|
|
f"Found existing MCP encryption key: {mask_sensitive_value(existing_key)}"
|
|
|
|
|
)
|
|
|
|
|
print_info("Using existing encryption key.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Generating a secure encryption key for MCP credentials...")
|
|
|
|
|
self.env_vars["mcp"][
|
|
|
|
|
"MCP_CREDENTIAL_ENCRYPTION_KEY"
|
|
|
|
|
] = generate_encryption_key()
|
|
|
|
|
print_success("MCP encryption key generated.")
|
|
|
|
|
|
|
|
|
|
print_success("MCP configuration saved.")
|
|
|
|
|
|
2025-07-08 21:18:49 +08:00
|
|
|
|
def collect_pipedream_keys(self):
|
|
|
|
|
"""Collects the optional Pipedream configuration."""
|
2025-07-24 19:14:52 +08:00
|
|
|
|
print_step(12, self.total_steps, "Collecting Pipedream Configuration (Optional)")
|
2025-07-08 21:18:49 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = any(self.env_vars["pipedream"].values())
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
"Found existing Pipedream configuration. Press Enter to keep current values or type new ones."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info("Pipedream enables workflow automation and MCP integrations.")
|
|
|
|
|
print_info("Create a Pipedream Connect project at https://pipedream.com/connect to get your credentials.")
|
|
|
|
|
print_info("You can skip this step and configure Pipedream later.")
|
|
|
|
|
|
|
|
|
|
# Ask if user wants to configure Pipedream
|
|
|
|
|
if not has_existing:
|
|
|
|
|
configure_pipedream = input("Do you want to configure Pipedream integration? (y/N): ").lower().strip()
|
|
|
|
|
if configure_pipedream != 'y':
|
|
|
|
|
print_info("Skipping Pipedream configuration.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
self.env_vars["pipedream"]["PIPEDREAM_PROJECT_ID"] = self._get_input(
|
|
|
|
|
"Enter your Pipedream Project ID (or press Enter to skip): ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid Pipedream Project ID format. It should be a valid project ID.",
|
|
|
|
|
allow_empty=True,
|
|
|
|
|
default_value=self.env_vars["pipedream"]["PIPEDREAM_PROJECT_ID"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if self.env_vars["pipedream"]["PIPEDREAM_PROJECT_ID"]:
|
|
|
|
|
self.env_vars["pipedream"]["PIPEDREAM_CLIENT_ID"] = self._get_input(
|
|
|
|
|
"Enter your Pipedream Client ID: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid Pipedream Client ID format. It should be a valid client ID.",
|
|
|
|
|
default_value=self.env_vars["pipedream"]["PIPEDREAM_CLIENT_ID"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
self.env_vars["pipedream"]["PIPEDREAM_CLIENT_SECRET"] = self._get_input(
|
|
|
|
|
"Enter your Pipedream Client Secret: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid Pipedream Client Secret format. It should be a valid client secret.",
|
|
|
|
|
default_value=self.env_vars["pipedream"]["PIPEDREAM_CLIENT_SECRET"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Set default environment if not already configured
|
|
|
|
|
if not self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"]:
|
|
|
|
|
self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"] = "development"
|
|
|
|
|
|
|
|
|
|
self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"] = self._get_input(
|
|
|
|
|
"Enter your Pipedream Environment (development/production): ",
|
|
|
|
|
lambda x, allow_empty=False: x.lower() in ["development", "production"] or allow_empty,
|
|
|
|
|
"Invalid environment. Please enter 'development' or 'production'.",
|
|
|
|
|
default_value=self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
print_success("Pipedream configuration saved.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Skipping Pipedream configuration.")
|
|
|
|
|
|
2025-07-03 12:47:57 +08:00
|
|
|
|
def collect_slack_keys(self):
|
|
|
|
|
"""Collects the optional Slack configuration."""
|
2025-07-24 19:14:52 +08:00
|
|
|
|
print_step(13, self.total_steps, "Collecting Slack Configuration (Optional)")
|
2025-07-03 12:47:57 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = any(self.env_vars["slack"].values())
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
"Found existing Slack configuration. Press Enter to keep current values or type new ones."
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
print_info("Slack integration enables communication and notifications.")
|
|
|
|
|
print_info("Create a Slack app at https://api.slack.com/apps to get your credentials.")
|
|
|
|
|
print_info("You can skip this step and configure Slack later.")
|
|
|
|
|
|
|
|
|
|
# Ask if user wants to configure Slack
|
|
|
|
|
if not has_existing:
|
|
|
|
|
configure_slack = input("Do you want to configure Slack integration? (y/N): ").lower().strip()
|
|
|
|
|
if configure_slack != 'y':
|
|
|
|
|
print_info("Skipping Slack configuration.")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
self.env_vars["slack"]["SLACK_CLIENT_ID"] = self._get_input(
|
|
|
|
|
"Enter your Slack Client ID (or press Enter to skip): ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid Slack Client ID format. It should be a valid API key.",
|
|
|
|
|
allow_empty=True,
|
|
|
|
|
default_value=self.env_vars["slack"]["SLACK_CLIENT_ID"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if self.env_vars["slack"]["SLACK_CLIENT_ID"]:
|
|
|
|
|
self.env_vars["slack"]["SLACK_CLIENT_SECRET"] = self._get_input(
|
|
|
|
|
"Enter your Slack Client Secret: ",
|
|
|
|
|
validate_api_key,
|
|
|
|
|
"Invalid Slack Client Secret format. It should be a valid API key.",
|
|
|
|
|
default_value=self.env_vars["slack"]["SLACK_CLIENT_SECRET"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Set default redirect URI if not already configured
|
|
|
|
|
if not self.env_vars["slack"]["SLACK_REDIRECT_URI"]:
|
|
|
|
|
self.env_vars["slack"]["SLACK_REDIRECT_URI"] = "http://localhost:3000/api/integrations/slack/callback"
|
|
|
|
|
|
|
|
|
|
self.env_vars["slack"]["SLACK_REDIRECT_URI"] = self._get_input(
|
|
|
|
|
"Enter your Slack Redirect URI: ",
|
|
|
|
|
validate_url,
|
|
|
|
|
"Invalid Slack Redirect URI format. It should be a valid URL.",
|
|
|
|
|
default_value=self.env_vars["slack"]["SLACK_REDIRECT_URI"],
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
print_success("Slack configuration saved.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Skipping Slack configuration.")
|
|
|
|
|
|
|
|
|
|
def collect_webhook_keys(self):
|
|
|
|
|
"""Collects the webhook configuration."""
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_step(10, self.total_steps, "Collecting Webhook Configuration")
|
2025-07-03 12:47:57 +08:00
|
|
|
|
|
|
|
|
|
# Check if we already have values configured
|
|
|
|
|
has_existing = bool(self.env_vars["webhook"]["WEBHOOK_BASE_URL"])
|
|
|
|
|
if has_existing:
|
|
|
|
|
print_info(
|
|
|
|
|
f"Found existing webhook URL: {self.env_vars['webhook']['WEBHOOK_BASE_URL']}"
|
|
|
|
|
)
|
|
|
|
|
print_info("Press Enter to keep current value or type a new one.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Webhook base URL is required for workflows to receive callbacks.")
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_info("This must be a publicly accessible URL where Suna API can receive webhooks from Supabase Cron.")
|
|
|
|
|
print_info("For local development, you can use services like ngrok or localtunnel to expose http://localhost:8000 to the internet.")
|
2025-07-03 12:47:57 +08:00
|
|
|
|
|
|
|
|
|
self.env_vars["webhook"]["WEBHOOK_BASE_URL"] = self._get_input(
|
2025-08-09 18:10:11 +08:00
|
|
|
|
"Enter your webhook base URL (e.g., https://your-domain.ngrok.io): ",
|
2025-07-03 12:47:57 +08:00
|
|
|
|
validate_url,
|
|
|
|
|
"Invalid webhook base URL format. It should be a valid publicly accessible URL.",
|
|
|
|
|
default_value=self.env_vars["webhook"]["WEBHOOK_BASE_URL"],
|
|
|
|
|
)
|
|
|
|
|
|
2025-08-09 18:10:11 +08:00
|
|
|
|
# Ensure a webhook secret exists; generate a strong default if missing
|
|
|
|
|
if not self.env_vars["webhook"].get("TRIGGER_WEBHOOK_SECRET"):
|
|
|
|
|
print_info("Generating a secure TRIGGER_WEBHOOK_SECRET for webhook authentication...")
|
|
|
|
|
self.env_vars["webhook"]["TRIGGER_WEBHOOK_SECRET"] = generate_webhook_secret()
|
|
|
|
|
print_success("Webhook secret generated.")
|
|
|
|
|
else:
|
|
|
|
|
print_info("Found existing TRIGGER_WEBHOOK_SECRET. Keeping existing value.")
|
|
|
|
|
|
2025-07-03 12:47:57 +08:00
|
|
|
|
print_success("Webhook configuration saved.")
|
|
|
|
|
|
2025-06-14 04:58:56 +08:00
|
|
|
|
def configure_env_files(self):
|
|
|
|
|
"""Configures and writes the .env files for frontend and backend."""
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_step(14, self.total_steps, "Configuring Environment Files")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
# --- Backend .env ---
|
|
|
|
|
is_docker = self.env_vars["setup_method"] == "docker"
|
|
|
|
|
redis_host = "redis" if is_docker else "localhost"
|
|
|
|
|
|
|
|
|
|
backend_env = {
|
|
|
|
|
"ENV_MODE": "local",
|
|
|
|
|
**self.env_vars["supabase"],
|
|
|
|
|
"REDIS_HOST": redis_host,
|
|
|
|
|
"REDIS_PORT": "6379",
|
|
|
|
|
**self.env_vars["llm"],
|
|
|
|
|
**self.env_vars["search"],
|
|
|
|
|
**self.env_vars["rapidapi"],
|
2025-08-09 18:10:11 +08:00
|
|
|
|
**self.env_vars.get("cron", {}),
|
2025-07-03 12:47:57 +08:00
|
|
|
|
**self.env_vars["slack"],
|
|
|
|
|
**self.env_vars["webhook"],
|
2025-06-23 23:51:29 +08:00
|
|
|
|
**self.env_vars["mcp"],
|
2025-07-08 21:18:49 +08:00
|
|
|
|
**self.env_vars["pipedream"],
|
2025-06-14 04:58:56 +08:00
|
|
|
|
**self.env_vars["daytona"],
|
2025-08-07 03:31:45 +08:00
|
|
|
|
**self.env_vars["kortix"],
|
2025-06-14 04:58:56 +08:00
|
|
|
|
"NEXT_PUBLIC_URL": "http://localhost:3000",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
backend_env_content = f"# Generated by Suna install script for '{self.env_vars['setup_method']}' setup\n\n"
|
|
|
|
|
for key, value in backend_env.items():
|
|
|
|
|
backend_env_content += f"{key}={value or ''}\n"
|
|
|
|
|
|
|
|
|
|
with open(os.path.join("backend", ".env"), "w") as f:
|
|
|
|
|
f.write(backend_env_content)
|
|
|
|
|
print_success("Created backend/.env file.")
|
|
|
|
|
|
|
|
|
|
# --- Frontend .env.local ---
|
|
|
|
|
frontend_env = {
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_URL": self.env_vars["supabase"]["SUPABASE_URL"],
|
|
|
|
|
"NEXT_PUBLIC_SUPABASE_ANON_KEY": self.env_vars["supabase"][
|
|
|
|
|
"SUPABASE_ANON_KEY"
|
|
|
|
|
],
|
|
|
|
|
"NEXT_PUBLIC_BACKEND_URL": "http://localhost:8000/api",
|
|
|
|
|
"NEXT_PUBLIC_URL": "http://localhost:3000",
|
|
|
|
|
"NEXT_PUBLIC_ENV_MODE": "LOCAL",
|
2025-08-07 03:31:45 +08:00
|
|
|
|
"KORTIX_ADMIN_API_KEY": self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"],
|
2025-06-14 04:58:56 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
frontend_env_content = "# Generated by Suna install script\n\n"
|
|
|
|
|
for key, value in frontend_env.items():
|
|
|
|
|
frontend_env_content += f"{key}={value or ''}\n"
|
|
|
|
|
|
|
|
|
|
with open(os.path.join("frontend", ".env.local"), "w") as f:
|
|
|
|
|
f.write(frontend_env_content)
|
|
|
|
|
print_success("Created frontend/.env.local file.")
|
|
|
|
|
|
|
|
|
|
def setup_supabase_database(self):
|
|
|
|
|
"""Links the project to Supabase and pushes database migrations."""
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_step(15, self.total_steps, "Setting up Supabase Database")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-06-14 06:20:33 +08:00
|
|
|
|
print_info(
|
|
|
|
|
"This step will link your project to Supabase and push database migrations."
|
|
|
|
|
)
|
|
|
|
|
print_info(
|
|
|
|
|
"You can skip this if you've already set up your database or prefer to do it manually."
|
|
|
|
|
)
|
|
|
|
|
|
2025-08-09 18:10:11 +08:00
|
|
|
|
prompt = "Do you want to skip the database setup? (y/N): "
|
|
|
|
|
default_skip = False
|
2025-06-23 23:51:29 +08:00
|
|
|
|
|
|
|
|
|
user_input = input(prompt).lower().strip()
|
|
|
|
|
|
|
|
|
|
# Handle default behavior based on existing configuration
|
|
|
|
|
if not user_input:
|
|
|
|
|
skip_db_setup = default_skip
|
|
|
|
|
else:
|
|
|
|
|
skip_db_setup = user_input in ["y", "yes"]
|
|
|
|
|
|
|
|
|
|
if skip_db_setup:
|
2025-06-14 06:20:33 +08:00
|
|
|
|
print_info("Skipping Supabase database setup.")
|
|
|
|
|
print_warning(
|
|
|
|
|
"Remember to manually set up your Supabase database with the required migrations."
|
|
|
|
|
)
|
|
|
|
|
print_info(
|
|
|
|
|
"You can find the migration files in the backend/supabase/migrations directory."
|
|
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
try:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
subprocess.run(
|
|
|
|
|
["supabase", "--version"],
|
|
|
|
|
check=True,
|
2025-05-28 18:56:46 +08:00
|
|
|
|
capture_output=True,
|
2025-06-14 04:58:56 +08:00
|
|
|
|
shell=IS_WINDOWS,
|
2025-05-13 23:46:28 +08:00
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
except (subprocess.SubprocessError, FileNotFoundError):
|
|
|
|
|
print_error(
|
|
|
|
|
"Supabase CLI not found. Install it from: https://supabase.com/docs/guides/cli"
|
|
|
|
|
)
|
2025-06-14 06:20:33 +08:00
|
|
|
|
print_info("You can skip this step and set up the database manually later.")
|
|
|
|
|
skip_due_to_cli = (
|
|
|
|
|
input("Skip database setup due to missing CLI? (y/N): ").lower().strip()
|
|
|
|
|
)
|
|
|
|
|
if skip_due_to_cli == "y":
|
|
|
|
|
print_info("Skipping Supabase database setup.")
|
|
|
|
|
return
|
2025-06-14 04:58:56 +08:00
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
supabase_url = self.env_vars["supabase"]["SUPABASE_URL"]
|
|
|
|
|
match = re.search(r"https://([^.]+)\.supabase\.co", supabase_url)
|
|
|
|
|
if not match:
|
|
|
|
|
print_error(f"Could not extract project reference from URL: {supabase_url}")
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
project_ref = match.group(1)
|
|
|
|
|
print_info(f"Detected Supabase project reference: {project_ref}")
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
print_info("Logging into Supabase CLI...")
|
|
|
|
|
subprocess.run(["supabase", "login"], check=True, shell=IS_WINDOWS)
|
|
|
|
|
|
|
|
|
|
print_info(f"Linking to Supabase project {project_ref}...")
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["supabase", "link", "--project-ref", project_ref],
|
|
|
|
|
cwd="backend",
|
|
|
|
|
check=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
print_info("Pushing database migrations...")
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["supabase", "db", "push"], cwd="backend", check=True, shell=IS_WINDOWS
|
|
|
|
|
)
|
|
|
|
|
print_success("Database migrations pushed successfully.")
|
|
|
|
|
|
|
|
|
|
print_warning("IMPORTANT: You must manually expose the 'basejump' schema.")
|
|
|
|
|
print_info(
|
2025-08-06 18:00:16 +08:00
|
|
|
|
"In your Supabase dashboard, go to: Project Settings -> Data API -> Exposed schemas"
|
2025-06-14 04:58:56 +08:00
|
|
|
|
)
|
|
|
|
|
print_info("Ensure 'basejump' is checked, then save.")
|
|
|
|
|
input("Press Enter once you've completed this step...")
|
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
except subprocess.SubprocessError as e:
|
2025-06-14 04:58:56 +08:00
|
|
|
|
print_error(f"Failed to set up Supabase database: {e}")
|
|
|
|
|
print_error(
|
|
|
|
|
"Please check the Supabase CLI output for errors and try again."
|
|
|
|
|
)
|
2025-05-13 23:46:28 +08:00
|
|
|
|
sys.exit(1)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
def install_dependencies(self):
|
|
|
|
|
"""Installs frontend and backend dependencies for manual setup."""
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_step(16, self.total_steps, "Installing Dependencies")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
if self.env_vars["setup_method"] == "docker":
|
|
|
|
|
print_info(
|
|
|
|
|
"Skipping dependency installation for Docker setup (will be handled by Docker Compose)."
|
|
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
print_info("Installing frontend dependencies with npm...")
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["npm", "install"], cwd="frontend", check=True, shell=IS_WINDOWS
|
|
|
|
|
)
|
|
|
|
|
print_success("Frontend dependencies installed.")
|
|
|
|
|
|
|
|
|
|
print_info("Installing backend dependencies with uv...")
|
2025-06-14 06:20:33 +08:00
|
|
|
|
|
|
|
|
|
# Check if a virtual environment already exists
|
|
|
|
|
venv_exists = os.path.exists(os.path.join("backend", ".venv"))
|
|
|
|
|
|
|
|
|
|
if not venv_exists:
|
|
|
|
|
print_info("Creating virtual environment...")
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["uv", "venv"], cwd="backend", check=True, shell=IS_WINDOWS
|
|
|
|
|
)
|
|
|
|
|
print_success("Virtual environment created.")
|
|
|
|
|
|
|
|
|
|
# Install dependencies in the virtual environment
|
|
|
|
|
subprocess.run(
|
2025-06-23 23:51:29 +08:00
|
|
|
|
["uv", "sync"],
|
2025-06-14 04:58:56 +08:00
|
|
|
|
cwd="backend",
|
|
|
|
|
check=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
2025-06-14 06:20:33 +08:00
|
|
|
|
print_success("Backend dependencies and package installed.")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
except subprocess.SubprocessError as e:
|
|
|
|
|
print_error(f"Failed to install dependencies: {e}")
|
|
|
|
|
print_info("Please install dependencies manually and run the script again.")
|
2025-05-30 12:04:25 +08:00
|
|
|
|
sys.exit(1)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
def start_suna(self):
|
|
|
|
|
"""Starts Suna using Docker Compose or shows instructions for manual startup."""
|
2025-08-09 18:10:11 +08:00
|
|
|
|
print_step(17, self.total_steps, "Starting Suna")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
if self.env_vars["setup_method"] == "docker":
|
|
|
|
|
print_info("Starting Suna with Docker Compose...")
|
|
|
|
|
try:
|
|
|
|
|
subprocess.run(
|
|
|
|
|
["docker", "compose", "up", "-d", "--build"],
|
|
|
|
|
check=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
|
|
|
|
print_info("Waiting for services to spin up...")
|
|
|
|
|
time.sleep(15)
|
|
|
|
|
# A simple check to see if containers are running
|
|
|
|
|
result = subprocess.run(
|
|
|
|
|
["docker", "compose", "ps"],
|
|
|
|
|
capture_output=True,
|
|
|
|
|
text=True,
|
|
|
|
|
shell=IS_WINDOWS,
|
|
|
|
|
)
|
|
|
|
|
if "backend" in result.stdout and "frontend" in result.stdout:
|
|
|
|
|
print_success("Suna services are starting up!")
|
|
|
|
|
else:
|
|
|
|
|
print_warning(
|
|
|
|
|
"Some services might not be running. Check 'docker compose ps' for details."
|
|
|
|
|
)
|
|
|
|
|
except subprocess.SubprocessError as e:
|
|
|
|
|
print_error(f"Failed to start Suna with Docker Compose: {e}")
|
|
|
|
|
print_info(
|
|
|
|
|
"Try running 'docker compose up --build' manually to diagnose the issue."
|
|
|
|
|
)
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
else:
|
|
|
|
|
print_info("All configurations are complete. Manual start is required.")
|
|
|
|
|
|
|
|
|
|
def final_instructions(self):
|
|
|
|
|
"""Shows final instructions to the user."""
|
|
|
|
|
print(f"\n{Colors.GREEN}{Colors.BOLD}✨ Suna Setup Complete! ✨{Colors.ENDC}\n")
|
|
|
|
|
|
|
|
|
|
print_info(
|
2025-08-17 02:40:43 +08:00
|
|
|
|
f"Suna is configured with your LLM API keys and ready to use."
|
2025-06-14 04:58:56 +08:00
|
|
|
|
)
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print_info(
|
|
|
|
|
f"Delete the {Colors.RED}.setup_progress{Colors.ENDC} file to reset the setup."
|
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
if self.env_vars["setup_method"] == "docker":
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print_info("Your Suna instance is ready to use!")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
print("\nUseful Docker commands:")
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.CYAN}docker compose ps{Colors.ENDC} - Check service status"
|
|
|
|
|
)
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.CYAN}docker compose logs -f{Colors.ENDC} - Follow logs"
|
|
|
|
|
)
|
|
|
|
|
print(
|
|
|
|
|
f" {Colors.CYAN}docker compose down{Colors.ENDC} - Stop Suna services"
|
|
|
|
|
)
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print(
|
|
|
|
|
f" {Colors.CYAN}python start.py{Colors.ENDC} - To start or stop Suna services"
|
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
else:
|
|
|
|
|
print_info(
|
|
|
|
|
"To start Suna, you need to run these commands in separate terminals:"
|
|
|
|
|
)
|
|
|
|
|
print(
|
|
|
|
|
f"\n{Colors.BOLD}1. Start Infrastructure (in project root):{Colors.ENDC}"
|
|
|
|
|
)
|
2025-08-05 23:59:36 +08:00
|
|
|
|
print(f"{Colors.CYAN} docker compose up redis -d{Colors.ENDC}")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
|
|
|
|
print(f"\n{Colors.BOLD}2. Start Frontend (in a new terminal):{Colors.ENDC}")
|
|
|
|
|
print(f"{Colors.CYAN} cd frontend && npm run dev{Colors.ENDC}")
|
|
|
|
|
|
|
|
|
|
print(f"\n{Colors.BOLD}3. Start Backend (in a new terminal):{Colors.ENDC}")
|
2025-07-10 02:15:37 +08:00
|
|
|
|
print(f"{Colors.CYAN} cd backend && uv run api.py{Colors.ENDC}")
|
|
|
|
|
|
|
|
|
|
print(
|
|
|
|
|
f"\n{Colors.BOLD}4. Start Background Worker (in a new terminal):{Colors.ENDC}"
|
|
|
|
|
)
|
|
|
|
|
print(
|
|
|
|
|
f"{Colors.CYAN} cd backend && uv run dramatiq run_agent_background{Colors.ENDC}"
|
|
|
|
|
)
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-06-16 01:43:21 +08:00
|
|
|
|
print("\nOnce all services are running, access Suna at: http://localhost:3000")
|
2025-06-14 04:58:56 +08:00
|
|
|
|
|
2025-05-13 23:46:28 +08:00
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2025-06-14 04:58:56 +08:00
|
|
|
|
wizard = SetupWizard()
|
|
|
|
|
wizard.run()
|