Merge pull request #1456 from nettanvirdev/updated-docs-self-hosting

fix: updated self hosting docs, removed slack configuration, updated setup.py
This commit is contained in:
Bobbie 2025-08-26 02:12:30 +05:30 committed by GitHub
commit 33ddd9303f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 353 additions and 409 deletions

View File

@ -1,71 +1,81 @@
# Copy this file to .env and fill in your values
## Copy this file to .env and fill values.
## Required keys are marked REQUIRED. Others are optional.
# Environment Mode
# Valid values: local, staging, production
ENV_MODE=local
#DATABASE
##### DATABASE (REQUIRED)
SUPABASE_URL=
SUPABASE_ANON_KEY=
SUPABASE_SERVICE_ROLE_KEY=
##### REDIS
# Use "redis" when using docker compose, or "localhost" for fully local
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=
# Set false for local/docker compose
REDIS_SSL=false
# LLM Providers:
##### LLM PROVIDERS (At least one is functionally REQUIRED)
# Provide at least one of the following:
ANTHROPIC_API_KEY=
OPENAI_API_KEY=
MODEL_TO_USE=
GROQ_API_KEY=
OPENROUTER_API_KEY=
GEMINI_API_KEY=
XAI_API_KEY=
# AWS Bedrock (only if using Bedrock)
AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
AWS_REGION_NAME=
GROQ_API_KEY=
OPENROUTER_API_KEY=
GEMINI_API_KEY=
MORPH_API_KEY=
# OpenAI-compatible
OPENAI_COMPATIBLE_API_KEY=
OPENAI_COMPATIBLE_API_BASE=
# DATA APIS
##### DATA / SEARCH (REQUIRED)
RAPID_API_KEY=
# WEB SEARCH
TAVILY_API_KEY=
# WEB SCRAPE
##### WEB SCRAPE (REQUIRED)
FIRECRAWL_API_KEY=
# Default used if empty: https://api.firecrawl.dev
FIRECRAWL_URL=
# Sandbox container provider:
##### AGENT SANDBOX (REQUIRED to use Daytona sandbox)
DAYTONA_API_KEY=
DAYTONA_SERVER_URL=
DAYTONA_TARGET=
LANGFUSE_PUBLIC_KEY="pk-REDACTED"
LANGFUSE_SECRET_KEY="sk-REDACTED"
LANGFUSE_HOST="https://cloud.langfuse.com"
# Used for storing API keys
API_KEY_SECRET=
SMITHERY_API_KEY=
DAYTONA_SERVER_URL=https://app.daytona.io/api
DAYTONA_TARGET=us
##### SECURITY & WEBHOOKS (Recommended)
MCP_CREDENTIAL_ENCRYPTION_KEY=
WEBHOOK_BASE_URL=http://localhost:8000
TRIGGER_WEBHOOK_SECRET=
WEBHOOK_BASE_URL=""
##### OBSERVABILITY (Optional)
LANGFUSE_PUBLIC_KEY=
LANGFUSE_SECRET_KEY=
LANGFUSE_HOST=https://cloud.langfuse.com
# Optional
SLACK_CLIENT_ID=""
SLACK_CLIENT_SECRET=""
SLACK_REDIRECT_URI=""
##### BILLING (Optional;)
STRIPE_SECRET_KEY=
STRIPE_WEBHOOK_SECRET=
STRIPE_DEFAULT_PLAN_ID=
STRIPE_DEFAULT_TRIAL_DAYS=14
##### ADMIN
KORTIX_ADMIN_API_KEY=
PIPEDREAM_CLIENT_ID=""
PIPEDREAM_CLIENT_SECRET=""
PIPEDREAM_ENVIRONMENT=""
PIPEDREAM_PROJECT_ID=""
##### INTEGRATIONS
COMPOSIO_API_KEY=
COMPOSIO_WEBHOOK_SECRET=
COMPOSIO_API_BASE=https://backend.composio.dev
# Pipedream (optional; for MCP)
PIPEDREAM_PROJECT_ID=
PIPEDREAM_CLIENT_ID=
PIPEDREAM_CLIENT_SECRET=
PIPEDREAM_X_PD_ENVIRONMENT=development

View File

@ -1,328 +1,274 @@
# Suna Self-Hosting Guide
This guide provides detailed instructions for setting up and hosting your own instance of Suna, an open-source generalist AI Worker.
This guide walks you through hosting your own Suna instance, including required environment variables and two deployment options: with Docker and without Docker.
## Table of Contents
- [Overview](#overview)
- [Prerequisites](#prerequisites)
- [Installation Steps](#installation-steps)
- [Manual Configuration](#manual-configuration)
- [Post-Installation Steps](#post-installation-steps)
- [Troubleshooting](#troubleshooting)
- Overview
- Prerequisites
- 1. Supabase Project
- 2. API Keys (Required vs Optional)
- 3. Required Software
- Installation Steps
- Environment Configuration
- Backend (.env)
- Frontend (.env.local)
- Hosting Options
- A. With Docker (recommended)
- B. Without Docker (manual)
- PostInstallation Checks
- Troubleshooting
## Overview
Suna consists of four main components:
Suna is composed of:
1. **Backend API** - Python/FastAPI service for REST endpoints, thread management, and LLM integration
2. **Backend Worker** - Python/Dramatiq worker service for handling agent tasks
3. **Frontend** - Next.js/React application providing the user interface
4. **Agent Docker** - Isolated execution environment for each agent
5. **Supabase Database** - Handles data persistence and authentication
1. Backend API (FastAPI) - REST endpoints, thread management, LLM orchestration
2. Backend Worker (Dramatiq) - background agent task execution
3. Frontend (Next.js) - web UI
4. Agent Sandbox (Daytona) - isolated runtime for agent actions
5. Supabase - database and auth
## Prerequisites
Before starting the installation process, you'll need to set up the following:
### 1. Supabase Project
1. Create an account at [Supabase](https://supabase.com/)
2. Create a new project
3. Note down the following information (found in Project Settings → API):
- Project URL (e.g., `https://abcdefg.supabase.co`)
- API keys (anon key and service role key)
1. Create a project at https://supabase.com/
2. From Project Settings → API, copy:
- Project URL (e.g., https://<your>.supabase.co)
- anon key
- service role key
Also expose the basejump schema: Project Settings → API → Add `basejump` to Exposed Schemas.
### 2. API Keys
Obtain the following API keys:
Below is a summary of environment variables detected from the codebase and whether they are required for the backend to boot. Some are optional in the code, but functionally youll want at least one LLM provider.
#### Required
Backend keys (by purpose):
- **LLM Provider** (at least one of the following):
| Purpose | Key | Required to boot | Default | Notes |
| ------------- | ----------------------------- | ---------------------------------------: | -------------------------- | ------------------------------------------------------------------- |
| Environment | ENV_MODE | No | local | local, staging, production |
| Database/Auth | SUPABASE_URL | Yes | - | Supabase project URL |
| | SUPABASE_ANON_KEY | Yes | - | Supabase anon key |
| | SUPABASE_SERVICE_ROLE_KEY | Yes | - | Supabase service role key |
| Redis | REDIS_HOST | Yes | redis | Use `redis` with Docker, `localhost` without |
| | REDIS_PORT | No | 6379 | |
| | REDIS_PASSWORD | No | - | |
| | REDIS_SSL | No | true | Set false for local/Docker compose |
| LLM providers | ANTHROPIC_API_KEY | Functionally required (at least one LLM) | - | Any one of Anthropic/OpenAI/Groq/OpenRouter/Gemini/X.ai/AWS Bedrock |
| | OPENAI_API_KEY | " | - | |
| | GROQ_API_KEY | " | - | |
| | OPENROUTER_API_KEY | " | - | |
| | GEMINI_API_KEY | " | - | |
| | XAI_API_KEY | " | - | |
| | AWS_ACCESS_KEY_ID | " (if using Bedrock) | - | |
| | AWS_SECRET_ACCESS_KEY | " (if using Bedrock) | - | |
| | AWS_REGION_NAME | " (if using Bedrock) | - | |
| Web search | TAVILY_API_KEY | Yes | - | Used by search tools |
| Web scraping | FIRECRAWL_API_KEY | Yes | - | Used by scraping tools |
| Data APIs | RAPID_API_KEY | Yes | - | Enables LinkedIn scraping and other data tools |
| Agent sandbox | DAYTONA_API_KEY | Yes | - | Required by Daytona SDK |
| | DAYTONA_SERVER_URL | Yes | https://app.daytona.io/api | |
| | DAYTONA_TARGET | Yes | us | region/target |
| Observability | LANGFUSE_PUBLIC_KEY | No | - | Optional tracing |
| | LANGFUSE_SECRET_KEY | No | - | |
| | LANGFUSE_HOST | No | https://cloud.langfuse.com | |
| Credentials | MCP_CREDENTIAL_ENCRYPTION_KEY | Recommended | - | Used to encrypt stored credentials; generated if missing |
| Triggers | WEBHOOK_BASE_URL | No | http://localhost:8000 | Public base URL for inbound webhooks |
| | TRIGGER_WEBHOOK_SECRET | Recommended | - | Verifies inbound triggers |
| Billing | STRIPE\_\* | No | - | Only if you enable billing |
| Admin | KORTIX_ADMIN_API_KEY | No | - | Protects admin APIs |
| Integrations | COMPOSIO\_\* | No | - | Optional Composio integration |
- [Anthropic](https://console.anthropic.com/) - Recommended for best performance
- [OpenAI](https://platform.openai.com/)
- [Groq](https://console.groq.com/)
- [OpenRouter](https://openrouter.ai/)
- [AWS Bedrock](https://aws.amazon.com/bedrock/)
Frontend keys:
- **AI-Powered Code Editing (Optional but Recommended)**:
- [Morph](https://morphllm.com/api-keys) - For intelligent code editing capabilities
| Key | Required | Default | Notes |
| ----------------------------- | -------: | --------------------- | ----------------------------------- |
| NEXT_PUBLIC_ENV_MODE | No | local | |
| NEXT_PUBLIC_SUPABASE_URL | Yes | - | Must match backend Supabase project |
| NEXT_PUBLIC_SUPABASE_ANON_KEY | Yes | - | Supabase anon key |
| NEXT_PUBLIC_BACKEND_URL | Yes | http://localhost:8000 | Backend API base URL |
| NEXT_PUBLIC_URL | No | http://localhost:3000 | Public site URL |
- **Search and Web Scraping**:
Notes:
- [Tavily](https://tavily.com/) - For enhanced search capabilities
- [Firecrawl](https://firecrawl.dev/) - For web scraping capabilities
- **Agent Execution**:
- [Daytona](https://app.daytona.io/) - For secure agent execution
- **Background Job Processing**:
- Supabase Cron - For workflows, automated tasks, and webhook handling
#### Optional
- **RapidAPI** - For accessing additional API services (enables LinkedIn scraping and other tools)
- **Custom MCP Servers** - For extending functionality with custom tools
- At least one LLM provider key is functionally required to run agents.
- Daytona keys are required by configuration. If you dont plan to use sandboxes, you can supply placeholder values to boot, but related features wont be usable.
### 3. Required Software
Ensure the following tools are installed on your system:
- Docker
- Git
- Python 3.11+
- Node.js 18+ and npm
- **[Docker](https://docs.docker.com/get-docker/)**
- **[Supabase CLI](https://supabase.com/docs/guides/local-development/cli/getting-started)**
- **[Git](https://git-scm.com/downloads)**
- **[Python 3.11](https://www.python.org/downloads/)**
Optional (but supported):
For manual setup, you'll also need:
- **[uv](https://docs.astral.sh/uv/)**
- **[Node.js & npm](https://nodejs.org/en/download/)**
- uv (Python package manager/runner)
- Supabase CLI
## Installation Steps
### 1. Clone the Repository
1. Clone the repository
```bash
git clone https://github.com/kortix-ai/suna.git
cd suna
```
### 2. Run the Setup Wizard
2. Prepare environment files
The setup wizard will guide you through the installation process:
- Backend: copy `backend/.env.example` to `backend/.env` and fill the required keys
- Frontend: copy `frontend/.env.example` to `frontend/.env.local` and fill the required keys
```bash
python setup.py
```
## Environment Configuration
The wizard will:
### Backend (`backend/.env`)
- Check if all required tools are installed
- Collect your API keys and configuration information
- Set up the Supabase database
- Configure environment files
- Install dependencies
- Start Suna using your preferred method
Minimal example (required keys only):
The setup wizard has 14 steps and includes progress saving, so you can resume if interrupted.
### 3. Supabase Configuration
During setup, you'll need to:
1. Log in to the Supabase CLI
2. Link your local project to your Supabase project
3. Push database migrations
4. Manually expose the 'basejump' schema in Supabase:
- Go to your Supabase project
- Navigate to Project Settings → API
- Add 'basejump' to the Exposed Schema section
### 4. Daytona Configuration
As part of the setup, you'll need to:
1. Create a Daytona account
2. Generate an API key
3. Create a Snapshot:
- Name: `kortix/suna:0.1.3.11`
- Image name: `kortix/suna:0.1.3.11`
- Entrypoint: `/usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf`
## Manual Configuration
If you prefer to configure your installation manually, or if you need to modify the configuration after installation, here's what you need to know:
### Backend Configuration (.env)
The backend configuration is stored in `backend/.env`
Example configuration:
```sh
# Environment Mode
```env
ENV_MODE=local
# DATABASE
SUPABASE_URL=https://your-project.supabase.co
SUPABASE_ANON_KEY=your-anon-key
SUPABASE_SERVICE_ROLE_KEY=your-service-role-key
SUPABASE_URL=YOUR_SUPABASE_URL
SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY
SUPABASE_SERVICE_ROLE_KEY=YOUR_SUPABASE_SERVICE_ROLE_KEY
# REDIS
# Redis: use redis for Docker, localhost for manual
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_PASSWORD=
REDIS_SSL=false
# LLM Providers
ANTHROPIC_API_KEY=your-anthropic-key
OPENAI_API_KEY=your-openai-key
OPENROUTER_API_KEY=your-openrouter-key
GEMINI_API_KEY=your-gemini-api-key
MORPH_API_KEY=
OPENAI_COMPATIBLE_API_KEY=your-openai-compatible-api-key
OPENAI_COMPATIBLE_API_BASE=your-openai-compatible-api-base
# LLM provider: provide at least one
# OPENAI_API_KEY=...
# ANTHROPIC_API_KEY=...
TAVILY_API_KEY=YOUR_TAVILY_API_KEY
FIRECRAWL_API_KEY=YOUR_FIRECRAWL_API_KEY
# WEB SEARCH
TAVILY_API_KEY=your-tavily-key
# WEB SCRAPE
FIRECRAWL_API_KEY=your-firecrawl-key
FIRECRAWL_URL=https://api.firecrawl.dev
# Sandbox container provider
DAYTONA_API_KEY=your-daytona-key
DAYTONA_API_KEY=YOUR_DAYTONA_API_KEY
DAYTONA_SERVER_URL=https://app.daytona.io/api
DAYTONA_TARGET=us
# Background job processing (Required)
WEBHOOK_BASE_URL=https://your-domain.ngrok.io
# Data APIs required by configuration
RAPID_API_KEY=YOUR_RAPID_API_KEY
# MCP Configuration
MCP_CREDENTIAL_ENCRYPTION_KEY=your-generated-encryption-key
MCP_CREDENTIAL_ENCRYPTION_KEY=GENERATED_FERNET_KEY
WEBHOOK_BASE_URL=http://localhost:8000
TRIGGER_WEBHOOK_SECRET=your_random_string
```
# Optional APIs
RAPID_API_KEY=your-rapidapi-key
# MCP server configurations in database
To generate a Fernet key for MCP_CREDENTIAL_ENCRYPTION_KEY:
```bash
python - << 'PY'
from cryptography.fernet import Fernet
print(Fernet.generate_key().decode())
PY
```
### Frontend (`frontend/.env.local`)
```env
NEXT_PUBLIC_ENV_MODE=local
NEXT_PUBLIC_SUPABASE_URL=YOUR_SUPABASE_URL
NEXT_PUBLIC_SUPABASE_ANON_KEY=YOUR_SUPABASE_ANON_KEY
NEXT_PUBLIC_BACKEND_URL=http://localhost:8000
NEXT_PUBLIC_URL=http://localhost:3000
```
### Frontend Configuration (.env.local)
## Hosting Options
The frontend configuration is stored in `frontend/.env.local` and includes:
### A. With Docker (recommended)
- Supabase connection details
- Backend API URL
This uses the root `docker-compose.yaml` to bring up Redis, backend, worker, and frontend.
Example configuration:
```sh
NEXT_PUBLIC_SUPABASE_URL=https://your-project.supabase.co
NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key
NEXT_PUBLIC_BACKEND_URL=http://localhost:8000/api
NEXT_PUBLIC_URL=http://localhost:3000
NEXT_PUBLIC_ENV_MODE=LOCAL
```
## Post-Installation Steps
After completing the installation, you'll need to:
1. **Create an account** - Use Supabase authentication to create your first account
2. **Verify installations** - Check that all components are running correctly
## Startup Options
Suna can be started in two ways:
### 1. Using Docker Compose (Recommended)
This method starts all required services in Docker containers:
1. Ensure `backend/.env` and `frontend/.env.local` are filled.
2. From the project root:
```bash
docker compose up -d # Use `docker compose down` to stop it later
# or
python start.py # Use the same to stop it later
docker compose up -d --build
```
### 2. Manual Startup
3. Access:
This method requires you to start each component separately:
- Frontend: http://localhost:3000
- Backend API: http://localhost:8000
1. Start Redis (required for backend):
4. Logs and lifecycle:
```bash
docker compose up redis -d
# or
python start.py # Use the same to stop it later
docker compose logs -f
docker compose ps
docker compose down
```
2. Start the frontend (in one terminal):
Redis is already included in this compose file. No extra steps are needed.
### B. Without Docker (manual)
Youll run Redis in Docker, then start backend and worker locally, and the frontend via npm.
1. Start Redis in Docker
```bash
docker compose up -d redis
```
2. Backend API and Worker (Python venv)
```bash
cd backend
python -m venv .venv
source .venv/Scripts/activate
python -m pip install -e .
# Start the worker (terminal 1)
python -m dramatiq run_agent_background --processes 4 --threads 4
# Start the API (terminal 2)
uvicorn api:app --host 0.0.0.0 --port 8000 --reload
```
Alternative using uv:
```bash
# terminal 1
cd backend
uv run dramatiq --processes 4 --threads 4 run_agent_background
# terminal 2
cd backend
uv run uvicorn api:app --host 0.0.0.0 --port 8000 --reload
```
3. Frontend
```bash
cd frontend
npm install
npm run dev
```
3. Start the backend (in another terminal):
Visit http://localhost:3000 and sign up via Supabase auth.
```bash
cd backend
uv run api.py
```
## PostInstallation Checks
4. Start the worker (in one more terminal):
```bash
cd backend
uv run dramatiq run_agent_background
```
- Frontend loads at http://localhost:3000
- Backend health: http://localhost:8000/health returns OK
- Create an account and start an agent; verify logs for worker activity
## Troubleshooting
### Common Issues
- Docker services fail: check `docker compose logs -f` and port conflicts (3000, 8000, 6379)
- Supabase errors: confirm URL and keys; basejump schema is exposed
- LLM errors: ensure at least one LLM API key is set and not rate-limited
- Daytona errors: verify API key/URL/target; sandbox operations require valid Daytona setup
- Redis connection errors: ensure `REDIS_HOST=redis` when using Docker, `localhost` when fully local
- if you get an issue saying `ghcr.io/suna-ai/suna-backend:latest` already exists, then try running the docker command again, it should work the second time automatically.
1. **Docker services not starting**
If you get a startup error complaining about missing configuration fields, it means a required key from the table above is missing in `backend/.env`.
- Check Docker logs: `docker compose logs`
- Ensure Docker is running correctly
- Verify port availability (3000 for frontend, 8000 for backend)
2. **Database connection issues**
- Verify Supabase configuration
- Check if 'basejump' schema is exposed in Supabase
3. **LLM API key issues**
- Verify API keys are correctly entered
- Check for API usage limits or restrictions
4. **Daytona connection issues**
- Verify Daytona API key
- Check if the container image is correctly configured
5. **Setup wizard issues**
- Delete `.setup_progress` file to reset the setup wizard
- Check that all required tools are installed and accessible
### Logs
To view logs and diagnose issues:
```bash
# Docker Compose logs
docker compose logs -f
# Frontend logs (manual setup)
cd frontend
npm run dev
# Backend logs (manual setup)
cd backend
uv run api.py
# Worker logs (manual setup)
cd backend
uv run dramatiq run_agent_background
```
### Resuming Setup
If the setup wizard is interrupted, you can resume from where you left off by running:
```bash
python setup.py
```
The wizard will detect your progress and continue from the last completed step.
---
For further assistance, join the [Suna Discord Community](https://discord.gg/Py6pCBUUPw) or check the [GitHub repository](https://github.com/kortix-ai/suna) for updates and issues.
For help, join the Suna Discord or open an issue on GitHub.

View File

@ -5,7 +5,5 @@ NEXT_PUBLIC_BACKEND_URL="http://localhost:8000/api"
NEXT_PUBLIC_URL="http://localhost:3000"
NEXT_PUBLIC_GOOGLE_CLIENT_ID=""
NEXT_PUBLIC_POSTHOG_KEY=""
KORTIX_ADMIN_API_KEY=""
EDGE_CONFIG="https://edge-config.vercel.com/REDACTED?token=REDACTED"

238
setup.py
View File

@ -148,11 +148,6 @@ def load_existing_env_vars():
"WEBHOOK_BASE_URL": backend_env.get("WEBHOOK_BASE_URL", ""),
"TRIGGER_WEBHOOK_SECRET": backend_env.get("TRIGGER_WEBHOOK_SECRET", ""),
},
"slack": {
"SLACK_CLIENT_ID": backend_env.get("SLACK_CLIENT_ID", ""),
"SLACK_CLIENT_SECRET": backend_env.get("SLACK_CLIENT_SECRET", ""),
"SLACK_REDIRECT_URI": backend_env.get("SLACK_REDIRECT_URI", ""),
},
"mcp": {
"MCP_CREDENTIAL_ENCRYPTION_KEY": backend_env.get(
"MCP_CREDENTIAL_ENCRYPTION_KEY", ""
@ -271,7 +266,6 @@ class SetupWizard:
"search": existing_env_vars["search"],
"rapidapi": existing_env_vars["rapidapi"],
"cron": existing_env_vars.get("cron", {}),
"slack": existing_env_vars["slack"],
"webhook": existing_env_vars["webhook"],
"mcp": existing_env_vars["mcp"],
"pipedream": existing_env_vars["pipedream"],
@ -330,33 +324,35 @@ class SetupWizard:
# Check RapidAPI (optional)
if self.env_vars["rapidapi"]["RAPID_API_KEY"]:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} RapidAPI (optional)")
config_items.append(
f"{Colors.GREEN}{Colors.ENDC} RapidAPI (optional)")
else:
config_items.append(f"{Colors.CYAN}{Colors.ENDC} RapidAPI (optional)")
config_items.append(
f"{Colors.CYAN}{Colors.ENDC} RapidAPI (optional)")
# Check Cron/Webhook setup
if self.env_vars["webhook"]["WEBHOOK_BASE_URL"]:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} Supabase Cron & Webhooks")
config_items.append(
f"{Colors.GREEN}{Colors.ENDC} Supabase Cron & Webhooks")
else:
config_items.append(f"{Colors.YELLOW}{Colors.ENDC} Supabase Cron & Webhooks")
config_items.append(
f"{Colors.YELLOW}{Colors.ENDC} Supabase Cron & Webhooks")
# Check MCP encryption key
if self.env_vars["mcp"]["MCP_CREDENTIAL_ENCRYPTION_KEY"]:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} MCP encryption key")
config_items.append(
f"{Colors.GREEN}{Colors.ENDC} MCP encryption key")
else:
config_items.append(f"{Colors.YELLOW}{Colors.ENDC} MCP encryption key")
config_items.append(
f"{Colors.YELLOW}{Colors.ENDC} MCP encryption key")
# Check Pipedream configuration
if self.env_vars["pipedream"]["PIPEDREAM_PROJECT_ID"]:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} Pipedream (optional)")
config_items.append(
f"{Colors.GREEN}{Colors.ENDC} Pipedream (optional)")
else:
config_items.append(f"{Colors.CYAN}{Colors.ENDC} Pipedream (optional)")
# Check Slack configuration
if self.env_vars["slack"]["SLACK_CLIENT_ID"]:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} Slack (optional)")
else:
config_items.append(f"{Colors.CYAN}{Colors.ENDC} Slack (optional)")
config_items.append(
f"{Colors.CYAN}{Colors.ENDC} Pipedream (optional)")
# Check Webhook configuration
if self.env_vars["webhook"]["WEBHOOK_BASE_URL"]:
@ -366,11 +362,14 @@ class SetupWizard:
# Check Morph (optional but recommended)
if self.env_vars["llm"].get("MORPH_API_KEY"):
config_items.append(f"{Colors.GREEN}{Colors.ENDC} Morph (Code Editing)")
config_items.append(
f"{Colors.GREEN}{Colors.ENDC} Morph (Code Editing)")
elif self.env_vars["llm"].get("OPENROUTER_API_KEY"):
config_items.append(f"{Colors.CYAN}{Colors.ENDC} Morph (fallback to OpenRouter)")
config_items.append(
f"{Colors.CYAN}{Colors.ENDC} Morph (fallback to OpenRouter)")
else:
config_items.append(f"{Colors.YELLOW}{Colors.ENDC} Morph (recommended)")
config_items.append(
f"{Colors.YELLOW}{Colors.ENDC} Morph (recommended)")
# Check Kortix configuration
if self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"]:
@ -408,12 +407,11 @@ class SetupWizard:
self.run_step(10, self.collect_webhook_keys)
self.run_step(11, self.collect_mcp_keys)
self.run_step(12, self.collect_pipedream_keys)
self.run_step(13, self.collect_slack_keys)
# Removed duplicate webhook collection step
self.run_step(14, self.configure_env_files)
self.run_step(15, self.setup_supabase_database)
self.run_step(16, self.install_dependencies)
self.run_step(17, self.start_suna)
self.run_step(13, self.configure_env_files)
self.run_step(14, self.setup_supabase_database)
self.run_step(15, self.install_dependencies)
self.run_step(16, self.start_suna)
self.final_instructions()
@ -669,8 +667,10 @@ class SetupWizard:
f"Visit {Colors.GREEN}https://app.daytona.io/dashboard/snapshots{Colors.ENDC}{Colors.CYAN} to create a snapshot."
)
print_info("Create a snapshot with these exact settings:")
print_info(f" - Name:\t\t{Colors.GREEN}kortix/suna:0.1.3.11{Colors.ENDC}")
print_info(f" - Snapshot name:\t{Colors.GREEN}kortix/suna:0.1.3.11{Colors.ENDC}")
print_info(
f" - Name:\t\t{Colors.GREEN}kortix/suna:0.1.3.11{Colors.ENDC}")
print_info(
f" - Snapshot name:\t{Colors.GREEN}kortix/suna:0.1.3.11{Colors.ENDC}")
print_info(
f" - Entrypoint:\t{Colors.GREEN}/usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf{Colors.ENDC}"
)
@ -690,7 +690,8 @@ class SetupWizard:
print_info("Found existing LLM API keys:")
for key, value in existing_keys.items():
provider_name = key.split("_")[0].capitalize()
print_info(f" - {provider_name}: {mask_sensitive_value(value)}")
print_info(
f" - {provider_name}: {mask_sensitive_value(value)}")
print_info(
"You can add more providers or press Enter to keep existing configuration."
)
@ -722,7 +723,8 @@ class SetupWizard:
status = (
f" {Colors.GREEN}(configured){Colors.ENDC}" if current_value else ""
)
print(f"{Colors.CYAN}[{key}] {Colors.GREEN}{name}{Colors.ENDC}{status}")
print(
f"{Colors.CYAN}[{key}] {Colors.GREEN}{name}{Colors.ENDC}{status}")
# Allow Enter to skip if we already have keys configured
if has_existing:
@ -735,10 +737,12 @@ class SetupWizard:
choices_input = input("Select providers: ").strip()
choices = choices_input.replace(",", " ").split()
selected_keys = {providers[c][1] for c in choices if c in providers}
selected_keys = {providers[c][1]
for c in choices if c in providers}
if not selected_keys and not has_existing:
print_error("Invalid selection. Please choose at least one provider.")
print_error(
"Invalid selection. Please choose at least one provider.")
continue
for key in selected_keys:
@ -756,18 +760,21 @@ class SetupWizard:
def collect_morph_api_key(self):
"""Collects the optional MorphLLM API key for code editing."""
print_step(6, self.total_steps, "Configure AI-Powered Code Editing (Optional)")
print_step(6, self.total_steps,
"Configure AI-Powered Code Editing (Optional)")
existing_key = self.env_vars["llm"].get("MORPH_API_KEY", "")
openrouter_key = self.env_vars["llm"].get("OPENROUTER_API_KEY", "")
if existing_key:
print_info(f"Found existing Morph API key: {mask_sensitive_value(existing_key)}")
print_info(
f"Found existing Morph API key: {mask_sensitive_value(existing_key)}")
print_info("AI-powered code editing is enabled using Morph.")
return
print_info("Suna uses Morph for fast, intelligent code editing.")
print_info("This is optional but highly recommended for the best experience.")
print_info(
"This is optional but highly recommended for the best experience.")
if openrouter_key:
print_info(
@ -775,13 +782,15 @@ class SetupWizard:
)
while True:
choice = input("Do you want to add a Morph API key now? (y/n): ").lower().strip()
choice = input(
"Do you want to add a Morph API key now? (y/n): ").lower().strip()
if choice in ['y', 'n', '']:
break
print_error("Invalid input. Please enter 'y' or 'n'.")
if choice == 'y':
print_info("Great! Please get your API key from: https://morphllm.com/api-keys")
print_info(
"Great! Please get your API key from: https://morphllm.com/api-keys")
morph_api_key = self._get_input(
"Enter your Morph API key (or press Enter to skip): ",
validate_api_key,
@ -791,21 +800,27 @@ class SetupWizard:
)
if morph_api_key:
self.env_vars["llm"]["MORPH_API_KEY"] = morph_api_key
print_success("Morph API key saved. AI-powered code editing is enabled.")
print_success(
"Morph API key saved. AI-powered code editing is enabled.")
else:
if openrouter_key:
print_info("Skipping Morph key. OpenRouter will be used for code editing.")
print_info(
"Skipping Morph key. OpenRouter will be used for code editing.")
else:
print_warning("Skipping Morph key. Code editing will use a less capable model.")
print_warning(
"Skipping Morph key. Code editing will use a less capable model.")
else:
if openrouter_key:
print_info("Okay, OpenRouter will be used as a fallback for code editing.")
print_info(
"Okay, OpenRouter will be used as a fallback for code editing.")
else:
print_warning("Okay, code editing will use a less capable model without a Morph or OpenRouter key.")
print_warning(
"Okay, code editing will use a less capable model without a Morph or OpenRouter key.")
def collect_search_api_keys(self):
"""Collects API keys for search and web scraping tools."""
print_step(7, self.total_steps, "Collecting Search and Scraping API Keys")
print_step(7, self.total_steps,
"Collecting Search and Scraping API Keys")
# Check if we already have values configured
has_existing = any(self.env_vars["search"].values())
@ -814,7 +829,8 @@ class SetupWizard:
"Found existing search API keys. Press Enter to keep current values or type new ones."
)
else:
print_info("Suna uses Tavily for search and Firecrawl for web scraping.")
print_info(
"Suna uses Tavily for search and Firecrawl for web scraping.")
print_info(
"Get a Tavily key at https://tavily.com and a Firecrawl key at https://firecrawl.dev"
)
@ -877,7 +893,8 @@ class SetupWizard:
)
print_info("Press Enter to keep current value or type a new one.")
else:
print_info("A RapidAPI key enables extra tools like LinkedIn scraping.")
print_info(
"A RapidAPI key enables extra tools like LinkedIn scraping.")
print_info(
"Get a key at https://rapidapi.com/. You can skip this and add it later."
)
@ -907,13 +924,13 @@ class SetupWizard:
)
print_info("Using existing admin API key.")
else:
print_info("Generating a secure admin API key for Kortix administrative functions...")
print_info(
"Generating a secure admin API key for Kortix administrative functions...")
self.env_vars["kortix"]["KORTIX_ADMIN_API_KEY"] = generate_admin_api_key()
print_success("Kortix admin API key generated.")
print_success("Kortix admin configuration saved.")
def collect_mcp_keys(self):
"""Collects the MCP configuration."""
print_step(11, self.total_steps, "Collecting MCP Configuration")
@ -926,7 +943,8 @@ class SetupWizard:
)
print_info("Using existing encryption key.")
else:
print_info("Generating a secure encryption key for MCP credentials...")
print_info(
"Generating a secure encryption key for MCP credentials...")
self.env_vars["mcp"][
"MCP_CREDENTIAL_ENCRYPTION_KEY"
] = generate_encryption_key()
@ -936,7 +954,8 @@ class SetupWizard:
def collect_pipedream_keys(self):
"""Collects the optional Pipedream configuration."""
print_step(12, self.total_steps, "Collecting Pipedream Configuration (Optional)")
print_step(12, self.total_steps,
"Collecting Pipedream Configuration (Optional)")
# Check if we already have values configured
has_existing = any(self.env_vars["pipedream"].values())
@ -945,13 +964,16 @@ class SetupWizard:
"Found existing Pipedream configuration. Press Enter to keep current values or type new ones."
)
else:
print_info("Pipedream enables workflow automation and MCP integrations.")
print_info("Create a Pipedream Connect project at https://pipedream.com/connect to get your credentials.")
print_info(
"Pipedream enables workflow automation and MCP integrations.")
print_info(
"Create a Pipedream Connect project at https://pipedream.com/connect to get your credentials.")
print_info("You can skip this step and configure Pipedream later.")
# Ask if user wants to configure Pipedream
if not has_existing:
configure_pipedream = input("Do you want to configure Pipedream integration? (y/N): ").lower().strip()
configure_pipedream = input(
"Do you want to configure Pipedream integration? (y/N): ").lower().strip()
if configure_pipedream != 'y':
print_info("Skipping Pipedream configuration.")
return
@ -985,7 +1007,8 @@ class SetupWizard:
self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"] = self._get_input(
"Enter your Pipedream Environment (development/production): ",
lambda x, allow_empty=False: x.lower() in ["development", "production"] or allow_empty,
lambda x, allow_empty=False: x.lower(
) in ["development", "production"] or allow_empty,
"Invalid environment. Please enter 'development' or 'production'.",
default_value=self.env_vars["pipedream"]["PIPEDREAM_X_PD_ENVIRONMENT"],
)
@ -994,59 +1017,6 @@ class SetupWizard:
else:
print_info("Skipping Pipedream configuration.")
def collect_slack_keys(self):
"""Collects the optional Slack configuration."""
print_step(13, self.total_steps, "Collecting Slack Configuration (Optional)")
# Check if we already have values configured
has_existing = any(self.env_vars["slack"].values())
if has_existing:
print_info(
"Found existing Slack configuration. Press Enter to keep current values or type new ones."
)
else:
print_info("Slack integration enables communication and notifications.")
print_info("Create a Slack app at https://api.slack.com/apps to get your credentials.")
print_info("You can skip this step and configure Slack later.")
# Ask if user wants to configure Slack
if not has_existing:
configure_slack = input("Do you want to configure Slack integration? (y/N): ").lower().strip()
if configure_slack != 'y':
print_info("Skipping Slack configuration.")
return
self.env_vars["slack"]["SLACK_CLIENT_ID"] = self._get_input(
"Enter your Slack Client ID (or press Enter to skip): ",
validate_api_key,
"Invalid Slack Client ID format. It should be a valid API key.",
allow_empty=True,
default_value=self.env_vars["slack"]["SLACK_CLIENT_ID"],
)
if self.env_vars["slack"]["SLACK_CLIENT_ID"]:
self.env_vars["slack"]["SLACK_CLIENT_SECRET"] = self._get_input(
"Enter your Slack Client Secret: ",
validate_api_key,
"Invalid Slack Client Secret format. It should be a valid API key.",
default_value=self.env_vars["slack"]["SLACK_CLIENT_SECRET"],
)
# Set default redirect URI if not already configured
if not self.env_vars["slack"]["SLACK_REDIRECT_URI"]:
self.env_vars["slack"]["SLACK_REDIRECT_URI"] = "http://localhost:3000/api/integrations/slack/callback"
self.env_vars["slack"]["SLACK_REDIRECT_URI"] = self._get_input(
"Enter your Slack Redirect URI: ",
validate_url,
"Invalid Slack Redirect URI format. It should be a valid URL.",
default_value=self.env_vars["slack"]["SLACK_REDIRECT_URI"],
)
print_success("Slack configuration saved.")
else:
print_info("Skipping Slack configuration.")
def collect_webhook_keys(self):
"""Collects the webhook configuration."""
print_step(10, self.total_steps, "Collecting Webhook Configuration")
@ -1059,9 +1029,12 @@ class SetupWizard:
)
print_info("Press Enter to keep current value or type a new one.")
else:
print_info("Webhook base URL is required for workflows to receive callbacks.")
print_info("This must be a publicly accessible URL where Suna API can receive webhooks from Supabase Cron.")
print_info("For local development, you can use services like ngrok or localtunnel to expose http://localhost:8000 to the internet.")
print_info(
"Webhook base URL is required for workflows to receive callbacks.")
print_info(
"This must be a publicly accessible URL where Suna API can receive webhooks from Supabase Cron.")
print_info(
"For local development, you can use services like ngrok or localtunnel to expose http://localhost:8000 to the internet.")
self.env_vars["webhook"]["WEBHOOK_BASE_URL"] = self._get_input(
"Enter your webhook base URL (e.g., https://your-domain.ngrok.io): ",
@ -1072,11 +1045,14 @@ class SetupWizard:
# Ensure a webhook secret exists; generate a strong default if missing
if not self.env_vars["webhook"].get("TRIGGER_WEBHOOK_SECRET"):
print_info("Generating a secure TRIGGER_WEBHOOK_SECRET for webhook authentication...")
self.env_vars["webhook"]["TRIGGER_WEBHOOK_SECRET"] = generate_webhook_secret()
print_info(
"Generating a secure TRIGGER_WEBHOOK_SECRET for webhook authentication...")
self.env_vars["webhook"]["TRIGGER_WEBHOOK_SECRET"] = generate_webhook_secret(
)
print_success("Webhook secret generated.")
else:
print_info("Found existing TRIGGER_WEBHOOK_SECRET. Keeping existing value.")
print_info(
"Found existing TRIGGER_WEBHOOK_SECRET. Keeping existing value.")
print_success("Webhook configuration saved.")
@ -1088,6 +1064,12 @@ class SetupWizard:
is_docker = self.env_vars["setup_method"] == "docker"
redis_host = "redis" if is_docker else "localhost"
# Generate ENCRYPTION_KEY using the same logic as generate_encryption_key()
import base64
import secrets
encryption_key = base64.b64encode(
secrets.token_bytes(32)).decode("utf-8")
backend_env = {
"ENV_MODE": "local",
**self.env_vars["supabase"],
@ -1097,12 +1079,12 @@ class SetupWizard:
**self.env_vars["search"],
**self.env_vars["rapidapi"],
**self.env_vars.get("cron", {}),
**self.env_vars["slack"],
**self.env_vars["webhook"],
**self.env_vars["mcp"],
**self.env_vars["pipedream"],
**self.env_vars["daytona"],
**self.env_vars["kortix"],
"ENCRYPTION_KEY": encryption_key,
"NEXT_PUBLIC_URL": "http://localhost:3000",
}
@ -1112,7 +1094,7 @@ class SetupWizard:
with open(os.path.join("backend", ".env"), "w") as f:
f.write(backend_env_content)
print_success("Created backend/.env file.")
print_success("Created backend/.env file with ENCRYPTION_KEY.")
# --- Frontend .env.local ---
frontend_env = {
@ -1177,7 +1159,8 @@ class SetupWizard:
print_error(
"Supabase CLI not found. Install it from: https://supabase.com/docs/guides/cli"
)
print_info("You can skip this step and set up the database manually later.")
print_info(
"You can skip this step and set up the database manually later.")
skip_due_to_cli = (
input("Skip database setup due to missing CLI? (y/N): ").lower().strip()
)
@ -1189,7 +1172,8 @@ class SetupWizard:
supabase_url = self.env_vars["supabase"]["SUPABASE_URL"]
match = re.search(r"https://([^.]+)\.supabase\.co", supabase_url)
if not match:
print_error(f"Could not extract project reference from URL: {supabase_url}")
print_error(
f"Could not extract project reference from URL: {supabase_url}")
sys.exit(1)
project_ref = match.group(1)
print_info(f"Detected Supabase project reference: {project_ref}")
@ -1212,7 +1196,8 @@ class SetupWizard:
)
print_success("Database migrations pushed successfully.")
print_warning("IMPORTANT: You must manually expose the 'basejump' schema.")
print_warning(
"IMPORTANT: You must manually expose the 'basejump' schema.")
print_info(
"In your Supabase dashboard, go to: Project Settings -> Data API -> Exposed schemas"
)
@ -1265,7 +1250,8 @@ class SetupWizard:
except subprocess.SubprocessError as e:
print_error(f"Failed to install dependencies: {e}")
print_info("Please install dependencies manually and run the script again.")
print_info(
"Please install dependencies manually and run the script again.")
sys.exit(1)
def start_suna(self):
@ -1301,11 +1287,13 @@ class SetupWizard:
)
sys.exit(1)
else:
print_info("All configurations are complete. Manual start is required.")
print_info(
"All configurations are complete. Manual start is required.")
def final_instructions(self):
"""Shows final instructions to the user."""
print(f"\n{Colors.GREEN}{Colors.BOLD}✨ Suna Setup Complete! ✨{Colors.ENDC}\n")
print(
f"\n{Colors.GREEN}{Colors.BOLD}✨ Suna Setup Complete! ✨{Colors.ENDC}\n")
print_info(
f"Suna is configured with your LLM API keys and ready to use."
@ -1338,10 +1326,12 @@ class SetupWizard:
)
print(f"{Colors.CYAN} docker compose up redis -d{Colors.ENDC}")
print(f"\n{Colors.BOLD}2. Start Frontend (in a new terminal):{Colors.ENDC}")
print(
f"\n{Colors.BOLD}2. Start Frontend (in a new terminal):{Colors.ENDC}")
print(f"{Colors.CYAN} cd frontend && npm run dev{Colors.ENDC}")
print(f"\n{Colors.BOLD}3. Start Backend (in a new terminal):{Colors.ENDC}")
print(
f"\n{Colors.BOLD}3. Start Backend (in a new terminal):{Colors.ENDC}")
print(f"{Colors.CYAN} cd backend && uv run api.py{Colors.ENDC}")
print(