mirror of https://github.com/kortix-ai/suna.git
refactor: replace QStash with Supabase Cron for background job processing
- Removed references to QStash in documentation and codebase. - Updated setup and configuration files to reflect the transition to Supabase Cron. - Implemented new functions for scheduling and unscheduling jobs using Supabase Cron. - Added webhook authentication using a shared secret for enhanced security. - Cleaned up related dependencies and example configurations.
This commit is contained in:
parent
d295937112
commit
2cba5392ca
|
@ -70,7 +70,6 @@ project/
|
|||
- **LLM Integration**: LiteLLM for multi-provider support, structured prompts
|
||||
- **Tool System**: Dual schema decorators (OpenAPI + XML), consistent ToolResult
|
||||
- **Real-time**: Supabase subscriptions for live updates
|
||||
- **Background Jobs**: Dramatiq for async processing, QStash for scheduling
|
||||
|
||||
## Key Technologies
|
||||
|
||||
|
|
|
@ -41,7 +41,6 @@ Before contributing, ensure you have access to:
|
|||
- Daytona account (for agent execution)
|
||||
- Tavily API key (for search)
|
||||
- Firecrawl API key (for web scraping)
|
||||
- QStash account (for background jobs)
|
||||
|
||||
**Optional:**
|
||||
|
||||
|
|
|
@ -99,7 +99,6 @@ The setup process includes:
|
|||
- Setting up Daytona for secure agent execution
|
||||
- Integrating with LLM providers (Anthropic, OpenAI, OpenRouter, etc.)
|
||||
- Configuring web search and scraping capabilities (Tavily, Firecrawl)
|
||||
- Setting up QStash for background job processing and workflows
|
||||
- Configuring webhook handling for automated tasks
|
||||
- Optional integrations (RapidAPI for data providers)
|
||||
|
||||
|
@ -147,14 +146,13 @@ We welcome contributions from the community! Please see our [Contributing Guide]
|
|||
### Technologies
|
||||
|
||||
- [Daytona](https://daytona.io/) - Secure agent execution environment
|
||||
- [Supabase](https://supabase.com/) - Database and authentication
|
||||
- [Supabase](https://supabase.com/) - Database, Cron, and Authentication
|
||||
- [Playwright](https://playwright.dev/) - Browser automation
|
||||
- [OpenAI](https://openai.com/) - LLM provider
|
||||
- [Anthropic](https://www.anthropic.com/) - LLM provider
|
||||
- [Morph](https://morphllm.com/) - For AI-powered code editing
|
||||
- [Tavily](https://tavily.com/) - Search capabilities
|
||||
- [Firecrawl](https://firecrawl.dev/) - Web scraping capabilities
|
||||
- [QStash](https://upstash.com/qstash) - Background job processing and workflows
|
||||
- [RapidAPI](https://rapidapi.com/) - API services
|
||||
- Custom MCP servers - Extend functionality with custom tools
|
||||
|
||||
|
|
|
@ -54,11 +54,6 @@ SMITHERY_API_KEY=
|
|||
|
||||
MCP_CREDENTIAL_ENCRYPTION_KEY=
|
||||
|
||||
QSTASH_URL="https://qstash.upstash.io"
|
||||
QSTASH_TOKEN=""
|
||||
QSTASH_CURRENT_SIGNING_KEY=""
|
||||
QSTASH_NEXT_SIGNING_KEY=""
|
||||
|
||||
WEBHOOK_BASE_URL=""
|
||||
|
||||
# Optional
|
||||
|
|
|
@ -96,11 +96,6 @@ DAYTONA_API_KEY=your-daytona-key
|
|||
DAYTONA_SERVER_URL=https://app.daytona.io/api
|
||||
DAYTONA_TARGET=us
|
||||
|
||||
# Background Job Processing (Required)
|
||||
QSTASH_URL=https://qstash.upstash.io
|
||||
QSTASH_TOKEN=your-qstash-token
|
||||
QSTASH_CURRENT_SIGNING_KEY=your-current-signing-key
|
||||
QSTASH_NEXT_SIGNING_KEY=your-next-signing-key
|
||||
WEBHOOK_BASE_URL=https://yourdomain.com
|
||||
|
||||
# MCP Configuration
|
||||
|
|
|
@ -58,7 +58,6 @@ dependencies = [
|
|||
"cryptography>=41.0.0",
|
||||
"apscheduler>=3.10.0",
|
||||
"croniter>=1.4.0",
|
||||
"qstash>=2.0.0",
|
||||
"structlog==25.4.0",
|
||||
"PyPDF2==3.0.1",
|
||||
"python-docx==1.1.0",
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
-- Enable Supabase Cron and HTTP extensions and provide helper RPCs
|
||||
-- This migration replaces QStash-based scheduling with Supabase Cron
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Enable required extensions if not already enabled
|
||||
CREATE EXTENSION IF NOT EXISTS pg_cron;
|
||||
CREATE EXTENSION IF NOT EXISTS pg_net;
|
||||
|
||||
-- Helper function to schedule an HTTP POST via Supabase Cron
|
||||
-- Overwrites existing job with the same name
|
||||
CREATE OR REPLACE FUNCTION public.schedule_trigger_http(
|
||||
job_name text,
|
||||
schedule text,
|
||||
url text,
|
||||
headers jsonb DEFAULT '{}'::jsonb,
|
||||
body jsonb DEFAULT '{}'::jsonb,
|
||||
timeout_ms integer DEFAULT 8000
|
||||
) RETURNS bigint
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
AS $$
|
||||
DECLARE
|
||||
job_id bigint;
|
||||
sql_text text;
|
||||
headers_fixed jsonb;
|
||||
body_fixed jsonb;
|
||||
BEGIN
|
||||
-- Unschedule any existing jobs with the same name
|
||||
PERFORM cron.unschedule(j.jobid)
|
||||
FROM cron.job j
|
||||
WHERE j.jobname = job_name;
|
||||
|
||||
-- Normalize headers/body in case callers pass JSON strings instead of objects
|
||||
headers_fixed := COALESCE(
|
||||
CASE
|
||||
WHEN headers IS NULL THEN '{}'::jsonb
|
||||
WHEN jsonb_typeof(headers) = 'object' THEN headers
|
||||
WHEN jsonb_typeof(headers) = 'string' THEN (
|
||||
-- Remove surrounding quotes then unescape to get raw JSON text, finally cast to jsonb
|
||||
replace(replace(trim(both '"' from headers::text), '\\"', '"'), '\\\\', '\\')
|
||||
)::jsonb
|
||||
ELSE '{}'::jsonb
|
||||
END,
|
||||
'{}'::jsonb
|
||||
);
|
||||
|
||||
body_fixed := COALESCE(
|
||||
CASE
|
||||
WHEN body IS NULL THEN '{}'::jsonb
|
||||
WHEN jsonb_typeof(body) = 'object' THEN body
|
||||
WHEN jsonb_typeof(body) = 'string' THEN (
|
||||
replace(replace(trim(both '"' from body::text), '\\"', '"'), '\\\\', '\\')
|
||||
)::jsonb
|
||||
ELSE body
|
||||
END,
|
||||
'{}'::jsonb
|
||||
);
|
||||
|
||||
-- Build the SQL snippet to be executed by pg_cron
|
||||
sql_text := format(
|
||||
$sql$select net.http_post(
|
||||
url := %L,
|
||||
headers := %L::jsonb,
|
||||
body := %L::jsonb,
|
||||
timeout_milliseconds := %s
|
||||
);$sql$,
|
||||
url,
|
||||
headers_fixed::text,
|
||||
body_fixed::text,
|
||||
timeout_ms
|
||||
);
|
||||
|
||||
job_id := cron.schedule(job_name, schedule, sql_text);
|
||||
RETURN job_id;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Helper to unschedule by job name
|
||||
CREATE OR REPLACE FUNCTION public.unschedule_job_by_name(job_name text)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
AS $$
|
||||
BEGIN
|
||||
PERFORM cron.unschedule(j.jobid)
|
||||
FROM cron.job j
|
||||
WHERE j.jobname = job_name;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Grant execute to service role (backend uses service role key)
|
||||
GRANT EXECUTE ON FUNCTION public.schedule_trigger_http(text, text, text, jsonb, jsonb, integer) TO service_role;
|
||||
GRANT EXECUTE ON FUNCTION public.unschedule_job_by_name(text) TO service_role;
|
||||
|
||||
COMMIT;
|
|
@ -6,6 +6,7 @@ import os
|
|||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
import json
|
||||
import hmac
|
||||
|
||||
from services.supabase import DBConnection
|
||||
from utils.auth_utils import get_current_user_id_from_jwt
|
||||
|
@ -452,6 +453,18 @@ async def trigger_webhook(
|
|||
raise HTTPException(status_code=403, detail="Agent triggers are not enabled")
|
||||
|
||||
try:
|
||||
# Simple header-based auth using a shared secret
|
||||
# Configure the secret via environment variable: TRIGGER_WEBHOOK_SECRET
|
||||
secret = os.getenv("TRIGGER_WEBHOOK_SECRET")
|
||||
if not secret:
|
||||
logger.error("TRIGGER_WEBHOOK_SECRET is not configured")
|
||||
raise HTTPException(status_code=500, detail="Webhook secret not configured")
|
||||
|
||||
incoming_secret = request.headers.get("x-trigger-secret", "")
|
||||
if not hmac.compare_digest(incoming_secret, secret):
|
||||
logger.warning(f"Invalid webhook secret for trigger {trigger_id}")
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
# Get raw data from request
|
||||
raw_data = {}
|
||||
try:
|
||||
|
|
|
@ -7,7 +7,7 @@ from typing import Dict, Any, Optional, List
|
|||
|
||||
import croniter
|
||||
import pytz
|
||||
from qstash.client import QStash
|
||||
from services.supabase import DBConnection
|
||||
|
||||
from services.supabase import DBConnection
|
||||
from utils.logger import logger
|
||||
|
@ -41,19 +41,11 @@ class TriggerProvider(ABC):
|
|||
class ScheduleProvider(TriggerProvider):
|
||||
def __init__(self):
|
||||
super().__init__("schedule", TriggerType.SCHEDULE)
|
||||
self._qstash_token = os.getenv("QSTASH_TOKEN")
|
||||
self._webhook_base_url = os.getenv("WEBHOOK_BASE_URL", "http://localhost:3000")
|
||||
|
||||
if not self._qstash_token:
|
||||
logger.warning("QSTASH_TOKEN not found. Schedule provider will not work without it.")
|
||||
self._qstash = None
|
||||
else:
|
||||
self._qstash = QStash(token=self._qstash_token)
|
||||
# This should point to your backend base URL since Supabase Cron will POST to backend
|
||||
self._webhook_base_url = os.getenv("WEBHOOK_BASE_URL", "http://localhost:8000")
|
||||
self._db = DBConnection()
|
||||
|
||||
async def validate_config(self, config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if not self._qstash:
|
||||
raise ValueError("QSTASH_TOKEN environment variable is required for scheduled triggers")
|
||||
|
||||
if 'cron_expression' not in config:
|
||||
raise ValueError("cron_expression is required for scheduled triggers")
|
||||
|
||||
|
@ -81,10 +73,6 @@ class ScheduleProvider(TriggerProvider):
|
|||
return config
|
||||
|
||||
async def setup_trigger(self, trigger: Trigger) -> bool:
|
||||
if not self._qstash:
|
||||
logger.error("QStash client not available")
|
||||
return False
|
||||
|
||||
try:
|
||||
webhook_url = f"{self._webhook_base_url}/api/triggers/{trigger.trigger_id}/webhook"
|
||||
cron_expression = trigger.config['cron_expression']
|
||||
|
@ -104,63 +92,71 @@ class ScheduleProvider(TriggerProvider):
|
|||
"timestamp": datetime.now(timezone.utc).isoformat()
|
||||
}
|
||||
|
||||
headers = {
|
||||
headers: Dict[str, Any] = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Trigger-Source": "schedule"
|
||||
}
|
||||
|
||||
|
||||
# Include simple shared secret header for backend auth
|
||||
secret = os.getenv("TRIGGER_WEBHOOK_SECRET")
|
||||
if secret:
|
||||
headers["X-Trigger-Secret"] = secret
|
||||
if config.ENV_MODE == EnvMode.STAGING:
|
||||
vercel_bypass_key = os.getenv("VERCEL_PROTECTION_BYPASS_KEY", "")
|
||||
if vercel_bypass_key:
|
||||
headers["X-Vercel-Protection-Bypass"] = vercel_bypass_key
|
||||
|
||||
schedule_id = await asyncio.to_thread(
|
||||
self._qstash.schedule.create,
|
||||
destination=webhook_url,
|
||||
cron=cron_expression,
|
||||
body=json.dumps(payload),
|
||||
headers=headers,
|
||||
retries=3,
|
||||
delay="5s"
|
||||
)
|
||||
|
||||
trigger.config['qstash_schedule_id'] = schedule_id
|
||||
logger.info(f"Created QStash schedule {schedule_id} for trigger {trigger.trigger_id}")
|
||||
|
||||
# Supabase Cron job names are case-sensitive; we keep a stable name per trigger
|
||||
job_name = f"trigger_{trigger.trigger_id}"
|
||||
|
||||
# Schedule via Supabase Cron RPC helper
|
||||
client = await self._db.client
|
||||
try:
|
||||
result = await client.rpc(
|
||||
"schedule_trigger_http",
|
||||
{
|
||||
"job_name": job_name,
|
||||
"schedule": cron_expression,
|
||||
"url": webhook_url,
|
||||
"headers": headers,
|
||||
"body": payload,
|
||||
"timeout_ms": 8000,
|
||||
},
|
||||
).execute()
|
||||
except Exception as rpc_err:
|
||||
logger.error(f"Failed to schedule Supabase Cron job via RPC: {rpc_err}")
|
||||
return False
|
||||
|
||||
trigger.config['cron_job_name'] = job_name
|
||||
try:
|
||||
trigger.config['cron_job_id'] = result.data
|
||||
except Exception:
|
||||
trigger.config['cron_job_id'] = None
|
||||
logger.info(f"Created Supabase Cron job '{job_name}' for trigger {trigger.trigger_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to setup QStash schedule for trigger {trigger.trigger_id}: {e}")
|
||||
logger.error(f"Failed to setup Supabase Cron schedule for trigger {trigger.trigger_id}: {e}")
|
||||
return False
|
||||
|
||||
async def teardown_trigger(self, trigger: Trigger) -> bool:
|
||||
if not self._qstash:
|
||||
logger.warning("QStash client not available, skipping teardown")
|
||||
return True
|
||||
|
||||
try:
|
||||
schedule_id = trigger.config.get('qstash_schedule_id')
|
||||
if schedule_id:
|
||||
try:
|
||||
await asyncio.to_thread(self._qstash.schedule.delete, schedule_id)
|
||||
logger.info(f"Deleted QStash schedule {schedule_id} for trigger {trigger.trigger_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete QStash schedule {schedule_id}: {e}")
|
||||
|
||||
schedules = await asyncio.to_thread(self._qstash.schedule.list)
|
||||
webhook_url = f"{self._webhook_base_url}/api/triggers/{trigger.trigger_id}/webhook"
|
||||
|
||||
for schedule in schedules:
|
||||
if schedule.get('destination') == webhook_url:
|
||||
await asyncio.to_thread(self._qstash.schedule.delete, schedule['scheduleId'])
|
||||
logger.info(f"Deleted QStash schedule {schedule['scheduleId']} for trigger {trigger.trigger_id}")
|
||||
return True
|
||||
|
||||
logger.warning(f"No QStash schedule found for trigger {trigger.trigger_id}")
|
||||
return True
|
||||
job_name = trigger.config.get('cron_job_name') or f"trigger_{trigger.trigger_id}"
|
||||
client = await self._db.client
|
||||
|
||||
try:
|
||||
await client.rpc(
|
||||
"unschedule_job_by_name",
|
||||
{"job_name": job_name},
|
||||
).execute()
|
||||
logger.info(f"Unschedule requested for Supabase Cron job '{job_name}' (trigger {trigger.trigger_id})")
|
||||
return True
|
||||
except Exception as rpc_err:
|
||||
logger.warning(f"Failed to unschedule job '{job_name}' via RPC: {rpc_err}")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to teardown QStash schedule for trigger {trigger.trigger_id}: {e}")
|
||||
logger.error(f"Failed to teardown Supabase Cron schedule for trigger {trigger.trigger_id}: {e}")
|
||||
return False
|
||||
|
||||
async def process_event(self, trigger: Trigger, event: TriggerEvent) -> TriggerResult:
|
||||
|
|
|
@ -135,7 +135,8 @@ class TriggerService:
|
|||
|
||||
trigger.updated_at = datetime.now(timezone.utc)
|
||||
|
||||
if config is not None or (is_active is True and not trigger.is_active):
|
||||
# Reconcile provider scheduling when config changes or activation state toggles
|
||||
if (config is not None) or (is_active is not None):
|
||||
from .provider_service import get_provider_service
|
||||
provider_service = get_provider_service(self._db)
|
||||
|
||||
|
|
2845
backend/uv.lock
2845
backend/uv.lock
File diff suppressed because it is too large
Load Diff
|
@ -59,7 +59,7 @@ Obtain the following API keys:
|
|||
- [Daytona](https://app.daytona.io/) - For secure agent execution
|
||||
|
||||
- **Background Job Processing**:
|
||||
- [QStash](https://console.upstash.com/qstash) - For workflows, automated tasks, and webhook handling
|
||||
- Supabase Cron - For workflows, automated tasks, and webhook handling
|
||||
|
||||
#### Optional
|
||||
|
||||
|
@ -131,14 +131,6 @@ As part of the setup, you'll need to:
|
|||
- Image name: `kortix/suna:0.1.3`
|
||||
- Entrypoint: `/usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf`
|
||||
|
||||
### 5. QStash Configuration
|
||||
|
||||
QStash is required for background job processing, workflows, and webhook handling:
|
||||
|
||||
1. Create an account at [Upstash Console](https://console.upstash.com/qstash)
|
||||
2. Get your QStash token and signing keys
|
||||
3. Configure a publicly accessible webhook base URL for workflow callbacks
|
||||
|
||||
## Manual Configuration
|
||||
|
||||
If you prefer to configure your installation manually, or if you need to modify the configuration after installation, here's what you need to know:
|
||||
|
@ -185,11 +177,7 @@ DAYTONA_SERVER_URL=https://app.daytona.io/api
|
|||
DAYTONA_TARGET=us
|
||||
|
||||
# Background job processing (Required)
|
||||
QSTASH_URL=https://qstash.upstash.io
|
||||
QSTASH_TOKEN=your-qstash-token
|
||||
QSTASH_CURRENT_SIGNING_KEY=your-current-signing-key
|
||||
QSTASH_NEXT_SIGNING_KEY=your-next-signing-key
|
||||
WEBHOOK_BASE_URL=https://yourdomain.com
|
||||
WEBHOOK_BASE_URL=https://your-domain.ngrok.io
|
||||
|
||||
# MCP Configuration
|
||||
MCP_CREDENTIAL_ENCRYPTION_KEY=your-generated-encryption-key
|
||||
|
@ -297,13 +285,7 @@ uv run dramatiq run_agent_background
|
|||
- Verify Daytona API key
|
||||
- Check if the container image is correctly configured
|
||||
|
||||
5. **QStash/Webhook issues**
|
||||
|
||||
- Verify QStash token and signing keys
|
||||
- Ensure webhook base URL is publicly accessible
|
||||
- Check QStash console for delivery status
|
||||
|
||||
6. **Setup wizard issues**
|
||||
5. **Setup wizard issues**
|
||||
|
||||
- Delete `.setup_progress` file to reset the setup wizard
|
||||
- Check that all required tools are installed and accessible
|
||||
|
@ -318,7 +300,7 @@ docker compose logs -f
|
|||
|
||||
# Frontend logs (manual setup)
|
||||
cd frontend
|
||||
npm run dev -- --turbopack
|
||||
npm run dev
|
||||
|
||||
# Backend logs (manual setup)
|
||||
cd backend
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.arrayBuffer();
|
||||
const headers: Record<string, string> = {};
|
||||
request.headers.forEach((value, key) => {
|
||||
if (!['host', 'content-length', 'transfer-encoding', 'connection'].includes(key.toLowerCase())) {
|
||||
headers[key] = value;
|
||||
}
|
||||
});
|
||||
const backendUrl = process.env.BACKEND_URL || 'http://localhost:8000';
|
||||
const targetUrl = `${backendUrl}/triggers/qstash/webhook`;
|
||||
const response = await fetch(targetUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
...headers,
|
||||
'Content-Type': headers['content-type'] || 'application/json',
|
||||
},
|
||||
body: body,
|
||||
});
|
||||
const responseData = await response.text();
|
||||
return new NextResponse(responseData, {
|
||||
status: response.status,
|
||||
headers: {
|
||||
'Content-Type': response.headers.get('Content-Type') || 'application/json',
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('[QStash Webhook Proxy] Error:', error);
|
||||
return NextResponse.json(
|
||||
{ error: 'Internal server error' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
return NextResponse.json({ status: 'ok', service: 'qstash-webhook-proxy' });
|
||||
}
|
133
setup.py
133
setup.py
|
@ -141,16 +141,12 @@ def load_existing_env_vars():
|
|||
"rapidapi": {
|
||||
"RAPID_API_KEY": backend_env.get("RAPID_API_KEY", ""),
|
||||
},
|
||||
"qstash": {
|
||||
"QSTASH_URL": backend_env.get("QSTASH_URL", ""),
|
||||
"QSTASH_TOKEN": backend_env.get("QSTASH_TOKEN", ""),
|
||||
"QSTASH_CURRENT_SIGNING_KEY": backend_env.get(
|
||||
"QSTASH_CURRENT_SIGNING_KEY", ""
|
||||
),
|
||||
"QSTASH_NEXT_SIGNING_KEY": backend_env.get("QSTASH_NEXT_SIGNING_KEY", ""),
|
||||
"cron": {
|
||||
# No secrets required. Make sure pg_cron and pg_net are enabled in Supabase
|
||||
},
|
||||
"webhook": {
|
||||
"WEBHOOK_BASE_URL": backend_env.get("WEBHOOK_BASE_URL", ""),
|
||||
"TRIGGER_WEBHOOK_SECRET": backend_env.get("TRIGGER_WEBHOOK_SECRET", ""),
|
||||
},
|
||||
"slack": {
|
||||
"SLACK_CLIENT_ID": backend_env.get("SLACK_CLIENT_ID", ""),
|
||||
|
@ -251,6 +247,12 @@ def generate_admin_api_key():
|
|||
return key_bytes.hex()
|
||||
|
||||
|
||||
def generate_webhook_secret():
|
||||
"""Generates a secure shared secret for trigger webhooks."""
|
||||
# 32 random bytes as hex (64 hex chars)
|
||||
return secrets.token_hex(32)
|
||||
|
||||
|
||||
# --- Main Setup Class ---
|
||||
class SetupWizard:
|
||||
def __init__(self):
|
||||
|
@ -268,7 +270,7 @@ class SetupWizard:
|
|||
"llm": existing_env_vars["llm"],
|
||||
"search": existing_env_vars["search"],
|
||||
"rapidapi": existing_env_vars["rapidapi"],
|
||||
"qstash": existing_env_vars["qstash"],
|
||||
"cron": existing_env_vars.get("cron", {}),
|
||||
"slack": existing_env_vars["slack"],
|
||||
"webhook": existing_env_vars["webhook"],
|
||||
"mcp": existing_env_vars["mcp"],
|
||||
|
@ -284,7 +286,7 @@ class SetupWizard:
|
|||
else:
|
||||
self.env_vars[key] = value
|
||||
|
||||
self.total_steps = 19
|
||||
self.total_steps = 17
|
||||
|
||||
def show_current_config(self):
|
||||
"""Shows the current configuration status."""
|
||||
|
@ -332,11 +334,11 @@ class SetupWizard:
|
|||
else:
|
||||
config_items.append(f"{Colors.CYAN}○{Colors.ENDC} RapidAPI (optional)")
|
||||
|
||||
# Check QStash (required)
|
||||
if self.env_vars["qstash"]["QSTASH_TOKEN"]:
|
||||
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} QStash & Webhooks")
|
||||
# Check Cron/Webhook setup
|
||||
if self.env_vars["webhook"]["WEBHOOK_BASE_URL"]:
|
||||
config_items.append(f"{Colors.GREEN}✓{Colors.ENDC} Supabase Cron & Webhooks")
|
||||
else:
|
||||
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} QStash & Webhooks")
|
||||
config_items.append(f"{Colors.YELLOW}○{Colors.ENDC} Supabase Cron & Webhooks")
|
||||
|
||||
# Check MCP encryption key
|
||||
if self.env_vars["mcp"]["MCP_CREDENTIAL_ENCRYPTION_KEY"]:
|
||||
|
@ -402,15 +404,16 @@ class SetupWizard:
|
|||
self.run_step(7, self.collect_search_api_keys)
|
||||
self.run_step(8, self.collect_rapidapi_keys)
|
||||
self.run_step(9, self.collect_kortix_keys)
|
||||
self.run_step(10, self.collect_qstash_keys)
|
||||
# Supabase Cron does not require keys; ensure DB migrations enable cron functions
|
||||
self.run_step(10, self.collect_webhook_keys)
|
||||
self.run_step(11, self.collect_mcp_keys)
|
||||
self.run_step(12, self.collect_pipedream_keys)
|
||||
self.run_step(13, self.collect_slack_keys)
|
||||
self.run_step(14, self.collect_webhook_keys)
|
||||
self.run_step(15, self.configure_env_files)
|
||||
self.run_step(16, self.setup_supabase_database)
|
||||
self.run_step(17, self.install_dependencies)
|
||||
self.run_step(18, self.start_suna)
|
||||
# Removed duplicate webhook collection step
|
||||
self.run_step(14, self.configure_env_files)
|
||||
self.run_step(15, self.setup_supabase_database)
|
||||
self.run_step(16, self.install_dependencies)
|
||||
self.run_step(17, self.start_suna)
|
||||
|
||||
self.final_instructions()
|
||||
|
||||
|
@ -929,61 +932,6 @@ class SetupWizard:
|
|||
|
||||
print_success("Kortix admin configuration saved.")
|
||||
|
||||
def collect_qstash_keys(self):
|
||||
"""Collects the required QStash configuration."""
|
||||
print_step(
|
||||
10,
|
||||
self.total_steps,
|
||||
"Collecting QStash Configuration",
|
||||
)
|
||||
|
||||
# Check if we already have values configured
|
||||
existing_token = self.env_vars["qstash"]["QSTASH_TOKEN"]
|
||||
if existing_token:
|
||||
print_info(
|
||||
f"Found existing QStash token: {mask_sensitive_value(existing_token)}"
|
||||
)
|
||||
print_info("Press Enter to keep current values or type new ones.")
|
||||
else:
|
||||
print_info(
|
||||
"QStash is required for Suna's background job processing and scheduling."
|
||||
)
|
||||
print_info(
|
||||
"QStash enables workflows, automated tasks, and webhook handling."
|
||||
)
|
||||
print_info("Get your credentials at https://console.upstash.com/qstash")
|
||||
input("Press Enter to continue once you have your QStash credentials...")
|
||||
|
||||
qstash_token = self._get_input(
|
||||
"Enter your QStash token: ",
|
||||
validate_api_key,
|
||||
"Invalid QStash token format. It should be at least 10 characters long.",
|
||||
default_value=existing_token,
|
||||
)
|
||||
self.env_vars["qstash"]["QSTASH_TOKEN"] = qstash_token
|
||||
|
||||
# Set default URL if not already configured
|
||||
if not self.env_vars["qstash"]["QSTASH_URL"]:
|
||||
self.env_vars["qstash"]["QSTASH_URL"] = "https://qstash.upstash.io"
|
||||
|
||||
# Collect signing keys
|
||||
current_signing_key = self._get_input(
|
||||
"Enter your QStash current signing key: ",
|
||||
validate_api_key,
|
||||
"Invalid signing key format. It should be at least 10 characters long.",
|
||||
default_value=self.env_vars["qstash"]["QSTASH_CURRENT_SIGNING_KEY"],
|
||||
)
|
||||
self.env_vars["qstash"]["QSTASH_CURRENT_SIGNING_KEY"] = current_signing_key
|
||||
|
||||
next_signing_key = self._get_input(
|
||||
"Enter your QStash next signing key: ",
|
||||
validate_api_key,
|
||||
"Invalid signing key format. It should be at least 10 characters long.",
|
||||
default_value=self.env_vars["qstash"]["QSTASH_NEXT_SIGNING_KEY"],
|
||||
)
|
||||
self.env_vars["qstash"]["QSTASH_NEXT_SIGNING_KEY"] = next_signing_key
|
||||
|
||||
print_success("QStash configuration saved.")
|
||||
|
||||
def collect_mcp_keys(self):
|
||||
"""Collects the MCP configuration."""
|
||||
|
@ -1120,7 +1068,7 @@ class SetupWizard:
|
|||
|
||||
def collect_webhook_keys(self):
|
||||
"""Collects the webhook configuration."""
|
||||
print_step(14, self.total_steps, "Collecting Webhook Configuration")
|
||||
print_step(10, self.total_steps, "Collecting Webhook Configuration")
|
||||
|
||||
# Check if we already have values configured
|
||||
has_existing = bool(self.env_vars["webhook"]["WEBHOOK_BASE_URL"])
|
||||
|
@ -1131,21 +1079,29 @@ class SetupWizard:
|
|||
print_info("Press Enter to keep current value or type a new one.")
|
||||
else:
|
||||
print_info("Webhook base URL is required for workflows to receive callbacks.")
|
||||
print_info("This must be a publicly accessible URL where Suna can receive webhooks.")
|
||||
print_info("For local development, you can use services like ngrok or localtunnel.")
|
||||
print_info("This must be a publicly accessible URL where Suna API can receive webhooks from Supabase Cron.")
|
||||
print_info("For local development, you can use services like ngrok or localtunnel to expose http://localhost:8000 to the internet.")
|
||||
|
||||
self.env_vars["webhook"]["WEBHOOK_BASE_URL"] = self._get_input(
|
||||
"Enter your webhook base URL (e.g., https://yourdomain.com): ",
|
||||
"Enter your webhook base URL (e.g., https://your-domain.ngrok.io): ",
|
||||
validate_url,
|
||||
"Invalid webhook base URL format. It should be a valid publicly accessible URL.",
|
||||
default_value=self.env_vars["webhook"]["WEBHOOK_BASE_URL"],
|
||||
)
|
||||
|
||||
# Ensure a webhook secret exists; generate a strong default if missing
|
||||
if not self.env_vars["webhook"].get("TRIGGER_WEBHOOK_SECRET"):
|
||||
print_info("Generating a secure TRIGGER_WEBHOOK_SECRET for webhook authentication...")
|
||||
self.env_vars["webhook"]["TRIGGER_WEBHOOK_SECRET"] = generate_webhook_secret()
|
||||
print_success("Webhook secret generated.")
|
||||
else:
|
||||
print_info("Found existing TRIGGER_WEBHOOK_SECRET. Keeping existing value.")
|
||||
|
||||
print_success("Webhook configuration saved.")
|
||||
|
||||
def configure_env_files(self):
|
||||
"""Configures and writes the .env files for frontend and backend."""
|
||||
print_step(15, self.total_steps, "Configuring Environment Files")
|
||||
print_step(14, self.total_steps, "Configuring Environment Files")
|
||||
|
||||
# --- Backend .env ---
|
||||
is_docker = self.env_vars["setup_method"] == "docker"
|
||||
|
@ -1159,7 +1115,7 @@ class SetupWizard:
|
|||
**self.env_vars["llm"],
|
||||
**self.env_vars["search"],
|
||||
**self.env_vars["rapidapi"],
|
||||
**self.env_vars["qstash"],
|
||||
**self.env_vars.get("cron", {}),
|
||||
**self.env_vars["slack"],
|
||||
**self.env_vars["webhook"],
|
||||
**self.env_vars["mcp"],
|
||||
|
@ -1199,7 +1155,7 @@ class SetupWizard:
|
|||
|
||||
def setup_supabase_database(self):
|
||||
"""Links the project to Supabase and pushes database migrations."""
|
||||
print_step(16, self.total_steps, "Setting up Supabase Database")
|
||||
print_step(15, self.total_steps, "Setting up Supabase Database")
|
||||
|
||||
print_info(
|
||||
"This step will link your project to Supabase and push database migrations."
|
||||
|
@ -1208,15 +1164,8 @@ class SetupWizard:
|
|||
"You can skip this if you've already set up your database or prefer to do it manually."
|
||||
)
|
||||
|
||||
# Check if Supabase info is already configured
|
||||
has_existing_supabase = any(self.env_vars["supabase"].values())
|
||||
|
||||
if has_existing_supabase:
|
||||
prompt = "Do you want to skip the database setup? (Y/n): "
|
||||
default_skip = True
|
||||
else:
|
||||
prompt = "Do you want to skip the database setup? (y/N): "
|
||||
default_skip = False
|
||||
prompt = "Do you want to skip the database setup? (y/N): "
|
||||
default_skip = False
|
||||
|
||||
user_input = input(prompt).lower().strip()
|
||||
|
||||
|
@ -1298,7 +1247,7 @@ class SetupWizard:
|
|||
|
||||
def install_dependencies(self):
|
||||
"""Installs frontend and backend dependencies for manual setup."""
|
||||
print_step(17, self.total_steps, "Installing Dependencies")
|
||||
print_step(16, self.total_steps, "Installing Dependencies")
|
||||
if self.env_vars["setup_method"] == "docker":
|
||||
print_info(
|
||||
"Skipping dependency installation for Docker setup (will be handled by Docker Compose)."
|
||||
|
@ -1340,7 +1289,7 @@ class SetupWizard:
|
|||
|
||||
def start_suna(self):
|
||||
"""Starts Suna using Docker Compose or shows instructions for manual startup."""
|
||||
print_step(18, self.total_steps, "Starting Suna")
|
||||
print_step(17, self.total_steps, "Starting Suna")
|
||||
if self.env_vars["setup_method"] == "docker":
|
||||
print_info("Starting Suna with Docker Compose...")
|
||||
try:
|
||||
|
|
Loading…
Reference in New Issue