mirror of https://github.com/kortix-ai/suna.git
Merge branch 'main' of https://github.com/escapade-mckv/suna into feat/ui
This commit is contained in:
commit
d2bbd1bd27
|
@ -18,6 +18,7 @@ from agent.run import run_agent
|
|||
from utils.auth_utils import get_current_user_id_from_jwt, get_user_id_from_stream_auth, verify_thread_access
|
||||
from utils.logger import logger
|
||||
from services.billing import check_billing_status
|
||||
from utils.config import config
|
||||
from sandbox.sandbox import create_sandbox, get_or_start_sandbox
|
||||
from services.llm import make_llm_api_call
|
||||
|
||||
|
@ -31,17 +32,32 @@ instance_id = None # Global instance ID for this backend instance
|
|||
REDIS_RESPONSE_LIST_TTL = 3600 * 24
|
||||
|
||||
MODEL_NAME_ALIASES = {
|
||||
# Short names to full names
|
||||
"sonnet-3.7": "anthropic/claude-3-7-sonnet-latest",
|
||||
"gpt-4.1": "openai/gpt-4.1-2025-04-14",
|
||||
"gpt-4o": "openai/gpt-4o",
|
||||
"gpt-4-turbo": "openai/gpt-4-turbo",
|
||||
"gpt-4": "openai/gpt-4",
|
||||
"gemini-flash-2.5": "openrouter/google/gemini-2.5-flash-preview",
|
||||
"grok-3": "xai/grok-3-fast-latest",
|
||||
"deepseek": "deepseek/deepseek-chat",
|
||||
"grok-3-mini": "xai/grok-3-mini-fast-beta",
|
||||
"qwen3-4b": "openrouter/qwen/qwen3-4b:free",
|
||||
|
||||
# Also include full names as keys to ensure they map to themselves
|
||||
"anthropic/claude-3-7-sonnet-latest": "anthropic/claude-3-7-sonnet-latest",
|
||||
"openai/gpt-4.1-2025-04-14": "openai/gpt-4.1-2025-04-14",
|
||||
"openai/gpt-4o": "openai/gpt-4o",
|
||||
"openai/gpt-4-turbo": "openai/gpt-4-turbo",
|
||||
"openai/gpt-4": "openai/gpt-4",
|
||||
"openrouter/google/gemini-2.5-flash-preview": "openrouter/google/gemini-2.5-flash-preview",
|
||||
"xai/grok-3-fast-latest": "xai/grok-3-fast-latest",
|
||||
"deepseek/deepseek-chat": "deepseek/deepseek-chat",
|
||||
"xai/grok-3-mini-fast-beta": "xai/grok-3-mini-fast-beta",
|
||||
}
|
||||
|
||||
class AgentStartRequest(BaseModel):
|
||||
model_name: Optional[str] = "anthropic/claude-3-7-sonnet-latest"
|
||||
model_name: Optional[str] = None # Will be set from config.MODEL_TO_USE in the endpoint
|
||||
enable_thinking: Optional[bool] = False
|
||||
reasoning_effort: Optional[str] = 'low'
|
||||
stream: Optional[bool] = True
|
||||
|
@ -356,7 +372,22 @@ async def start_agent(
|
|||
if not instance_id:
|
||||
raise HTTPException(status_code=500, detail="Agent API not initialized with instance ID")
|
||||
|
||||
logger.info(f"Starting new agent for thread: {thread_id} with config: model={body.model_name}, thinking={body.enable_thinking}, effort={body.reasoning_effort}, stream={body.stream}, context_manager={body.enable_context_manager} (Instance: {instance_id})")
|
||||
# Use model from config if not specified in the request
|
||||
model_name = body.model_name
|
||||
logger.info(f"Original model_name from request: {model_name}")
|
||||
|
||||
if model_name is None:
|
||||
model_name = config.MODEL_TO_USE
|
||||
logger.info(f"Using model from config: {model_name}")
|
||||
|
||||
# Log the model name after alias resolution
|
||||
resolved_model = MODEL_NAME_ALIASES.get(model_name, model_name)
|
||||
logger.info(f"Resolved model name: {resolved_model}")
|
||||
|
||||
# Update model_name to use the resolved version
|
||||
model_name = resolved_model
|
||||
|
||||
logger.info(f"Starting new agent for thread: {thread_id} with config: model={model_name}, thinking={body.enable_thinking}, effort={body.reasoning_effort}, stream={body.stream}, context_manager={body.enable_context_manager} (Instance: {instance_id})")
|
||||
client = await db.client
|
||||
|
||||
await verify_thread_access(client, thread_id, user_id)
|
||||
|
@ -401,7 +432,7 @@ async def start_agent(
|
|||
run_agent_background(
|
||||
agent_run_id=agent_run_id, thread_id=thread_id, instance_id=instance_id,
|
||||
project_id=project_id, sandbox=sandbox,
|
||||
model_name=MODEL_NAME_ALIASES.get(body.model_name, body.model_name),
|
||||
model_name=model_name, # Already resolved above
|
||||
enable_thinking=body.enable_thinking, reasoning_effort=body.reasoning_effort,
|
||||
stream=body.stream, enable_context_manager=body.enable_context_manager
|
||||
)
|
||||
|
@ -643,7 +674,9 @@ async def run_agent_background(
|
|||
enable_context_manager: bool
|
||||
):
|
||||
"""Run the agent in the background using Redis for state."""
|
||||
logger.debug(f"Starting background agent run: {agent_run_id} for thread: {thread_id} (Instance: {instance_id})")
|
||||
logger.info(f"Starting background agent run: {agent_run_id} for thread: {thread_id} (Instance: {instance_id})")
|
||||
logger.info(f"🚀 Using model: {model_name} (thinking: {enable_thinking}, reasoning_effort: {reasoning_effort})")
|
||||
|
||||
client = await db.client
|
||||
start_time = datetime.now(timezone.utc)
|
||||
total_responses = 0
|
||||
|
@ -854,7 +887,7 @@ async def generate_and_update_project_name(project_id: str, prompt: str):
|
|||
@router.post("/agent/initiate", response_model=InitiateAgentResponse)
|
||||
async def initiate_agent_with_files(
|
||||
prompt: str = Form(...),
|
||||
model_name: Optional[str] = Form("anthropic/claude-3-7-sonnet-latest"),
|
||||
model_name: Optional[str] = Form(None), # Default to None to use config.MODEL_TO_USE
|
||||
enable_thinking: Optional[bool] = Form(False),
|
||||
reasoning_effort: Optional[str] = Form("low"),
|
||||
stream: Optional[bool] = Form(True),
|
||||
|
@ -867,6 +900,20 @@ async def initiate_agent_with_files(
|
|||
if not instance_id:
|
||||
raise HTTPException(status_code=500, detail="Agent API not initialized with instance ID")
|
||||
|
||||
# Use model from config if not specified in the request
|
||||
logger.info(f"Original model_name from request: {model_name}")
|
||||
|
||||
if model_name is None:
|
||||
model_name = config.MODEL_TO_USE
|
||||
logger.info(f"Using model from config: {model_name}")
|
||||
|
||||
# Log the model name after alias resolution
|
||||
resolved_model = MODEL_NAME_ALIASES.get(model_name, model_name)
|
||||
logger.info(f"Resolved model name: {resolved_model}")
|
||||
|
||||
# Update model_name to use the resolved version
|
||||
model_name = resolved_model
|
||||
|
||||
logger.info(f"[\033[91mDEBUG\033[0m] Initiating new agent with prompt and {len(files)} files (Instance: {instance_id}), model: {model_name}, enable_thinking: {enable_thinking}")
|
||||
client = await db.client
|
||||
account_id = user_id # In Basejump, personal account_id is the same as user_id
|
||||
|
@ -987,7 +1034,7 @@ async def initiate_agent_with_files(
|
|||
run_agent_background(
|
||||
agent_run_id=agent_run_id, thread_id=thread_id, instance_id=instance_id,
|
||||
project_id=project_id, sandbox=sandbox,
|
||||
model_name=MODEL_NAME_ALIASES.get(model_name, model_name),
|
||||
model_name=model_name, # Already resolved above
|
||||
enable_thinking=enable_thinking, reasoning_effort=reasoning_effort,
|
||||
stream=stream, enable_context_manager=enable_context_manager
|
||||
)
|
||||
|
|
|
@ -39,6 +39,7 @@ async def run_agent(
|
|||
enable_context_manager: bool = True
|
||||
):
|
||||
"""Run the development agent with specified configuration."""
|
||||
print(f"🚀 Starting agent with model: {model_name}")
|
||||
|
||||
thread_manager = ThreadManager()
|
||||
|
||||
|
@ -69,7 +70,6 @@ async def run_agent(
|
|||
thread_manager.add_tool(MessageTool) # we are just doing this via prompt as there is no need to call it as a tool
|
||||
thread_manager.add_tool(WebSearchTool)
|
||||
thread_manager.add_tool(SandboxVisionTool, project_id=project_id, thread_id=thread_id, thread_manager=thread_manager)
|
||||
|
||||
# Add data providers tool if RapidAPI key is available
|
||||
if config.RAPID_API_KEY:
|
||||
thread_manager.add_tool(DataProvidersTool)
|
||||
|
@ -171,7 +171,18 @@ async def run_agent(
|
|||
# logger.debug(f"Constructed temporary message with {len(temp_message_content_list)} content blocks.")
|
||||
# ---- End Temporary Message Handling ----
|
||||
|
||||
max_tokens = 64000 if "sonnet" in model_name.lower() else None
|
||||
# Set max_tokens based on model
|
||||
max_tokens = None
|
||||
if "sonnet" in model_name.lower():
|
||||
max_tokens = 64000
|
||||
elif "gpt-4" in model_name.lower():
|
||||
max_tokens = 4096
|
||||
|
||||
# # Configure tool calling based on model type
|
||||
# use_xml_tool_calling = "anthropic" in model_name.lower() or "claude" in model_name.lower()
|
||||
# use_native_tool_calling = "openai" in model_name.lower() or "gpt" in model_name.lower()
|
||||
|
||||
# # model_name = "openrouter/qwen/qwen3-235b-a22b"
|
||||
|
||||
response = await thread_manager.run_thread(
|
||||
thread_id=thread_id,
|
||||
|
@ -200,7 +211,7 @@ async def run_agent(
|
|||
|
||||
if isinstance(response, dict) and "status" in response and response["status"] == "error":
|
||||
yield response
|
||||
break
|
||||
return
|
||||
|
||||
# Track if we see ask, complete, or web-browser-takeover tool calls
|
||||
last_tool_call = None
|
||||
|
@ -238,6 +249,28 @@ async def run_agent(
|
|||
except Exception as e:
|
||||
print(f"Error processing assistant chunk: {e}")
|
||||
|
||||
# # Check for native function calls (OpenAI format)
|
||||
# elif chunk.get('type') == 'status' and 'content' in chunk:
|
||||
# try:
|
||||
# # Parse the status content
|
||||
# status_content = chunk.get('content', '{}')
|
||||
# if isinstance(status_content, str):
|
||||
# status_content = json.loads(status_content)
|
||||
|
||||
# # Check if this is a tool call status
|
||||
# status_type = status_content.get('status_type')
|
||||
# function_name = status_content.get('function_name', '')
|
||||
|
||||
# # Check for special function names that should stop execution
|
||||
# if status_type == 'tool_started' and function_name in ['ask', 'complete', 'web-browser-takeover']:
|
||||
# last_tool_call = function_name
|
||||
# print(f"Agent used native function call: {function_name}")
|
||||
# except json.JSONDecodeError:
|
||||
# # Handle cases where content might not be valid JSON
|
||||
# print(f"Warning: Could not parse status content JSON: {chunk.get('content')}")
|
||||
# except Exception as e:
|
||||
# print(f"Error processing status chunk: {e}")
|
||||
|
||||
yield chunk
|
||||
|
||||
# Check if we should stop based on the last tool call
|
||||
|
|
|
@ -231,7 +231,9 @@ Ask user a question and wait for response. Use for: 1) Requesting clarification
|
|||
"name": "complete",
|
||||
"description": "A special tool to indicate you have completed all tasks and are about to enter complete state. Use ONLY when: 1) All tasks in todo.md are marked complete [x], 2) The user's original request has been fully addressed, 3) There are no pending actions or follow-ups required, 4) You've delivered all final outputs and results to the user. IMPORTANT: This is the ONLY way to properly terminate execution. Never use this tool unless ALL tasks are complete and verified. Always ensure you've provided all necessary outputs and references before using this tool.",
|
||||
"parameters": {
|
||||
"type": "object"
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -188,12 +188,13 @@ class ThreadManager:
|
|||
"""
|
||||
|
||||
logger.info(f"Starting thread execution for thread {thread_id}")
|
||||
logger.debug(f"Parameters: model={llm_model}, temperature={llm_temperature}, max_tokens={llm_max_tokens}")
|
||||
logger.debug(f"Auto-continue: max={native_max_auto_continues}, XML tool limit={max_xml_tool_calls}")
|
||||
logger.info(f"Using model: {llm_model}")
|
||||
# Log parameters
|
||||
logger.info(f"Parameters: model={llm_model}, temperature={llm_temperature}, max_tokens={llm_max_tokens}")
|
||||
logger.info(f"Auto-continue: max={native_max_auto_continues}, XML tool limit={max_xml_tool_calls}")
|
||||
|
||||
# Use a default config if none was provided (needed for XML examples check)
|
||||
if processor_config is None:
|
||||
processor_config = ProcessorConfig()
|
||||
# Log model info
|
||||
logger.info(f"🤖 Thread {thread_id}: Using model {llm_model}")
|
||||
|
||||
# Apply max_xml_tool_calls if specified and not already set in config
|
||||
if max_xml_tool_calls > 0 and not processor_config.max_xml_tool_calls:
|
||||
|
|
|
@ -281,7 +281,8 @@ async def make_llm_api_call(
|
|||
LLMError: For other API-related errors
|
||||
"""
|
||||
# debug <timestamp>.json messages
|
||||
logger.debug(f"Making LLM API call to model: {model_name} (Thinking: {enable_thinking}, Effort: {reasoning_effort})")
|
||||
logger.info(f"Making LLM API call to model: {model_name} (Thinking: {enable_thinking}, Effort: {reasoning_effort})")
|
||||
logger.info(f"📡 API Call: Using model {model_name}")
|
||||
params = prepare_params(
|
||||
messages=messages,
|
||||
model_name=model_name,
|
||||
|
|
|
@ -0,0 +1,342 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
Script to archive sandboxes for projects that are older than 1 day.
|
||||
|
||||
Usage:
|
||||
python archive_old_sandboxes.py [--days N] [--dry-run]
|
||||
|
||||
This script:
|
||||
1. Gets all projects from the projects table
|
||||
2. Filters projects created more than N days ago (default: 1 day)
|
||||
3. Archives the sandboxes for those projects
|
||||
|
||||
Make sure your environment variables are properly set:
|
||||
- SUPABASE_URL
|
||||
- SUPABASE_SERVICE_ROLE_KEY
|
||||
- DAYTONA_SERVER_URL
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
from typing import List, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load script-specific environment variables
|
||||
load_dotenv(".env")
|
||||
|
||||
from services.supabase import DBConnection
|
||||
from sandbox.sandbox import daytona
|
||||
from utils.logger import logger
|
||||
|
||||
# Global DB connection to reuse
|
||||
db_connection = None
|
||||
|
||||
|
||||
async def get_old_projects(days_threshold: int = 1) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Query all projects created more than N days ago.
|
||||
|
||||
Args:
|
||||
days_threshold: Number of days threshold (default: 1)
|
||||
|
||||
Returns:
|
||||
List of projects with their sandbox information
|
||||
"""
|
||||
global db_connection
|
||||
if db_connection is None:
|
||||
db_connection = DBConnection()
|
||||
|
||||
client = await db_connection.client
|
||||
|
||||
# Print the Supabase URL being used
|
||||
print(f"Using Supabase URL: {os.getenv('SUPABASE_URL')}")
|
||||
|
||||
# Calculate the date threshold
|
||||
threshold_date = (datetime.now() - timedelta(days=days_threshold)).isoformat()
|
||||
|
||||
# Initialize variables for pagination
|
||||
all_projects = []
|
||||
page_size = 1000
|
||||
current_page = 0
|
||||
has_more = True
|
||||
|
||||
logger.info(f"Starting to fetch projects older than {days_threshold} day(s)")
|
||||
print(f"Looking for projects created before: {threshold_date}")
|
||||
|
||||
# Paginate through all projects
|
||||
while has_more:
|
||||
# Query projects with pagination
|
||||
start_range = current_page * page_size
|
||||
end_range = start_range + page_size - 1
|
||||
|
||||
logger.info(f"Fetching projects page {current_page+1} (range: {start_range}-{end_range})")
|
||||
|
||||
try:
|
||||
result = await client.table('projects').select(
|
||||
'project_id',
|
||||
'name',
|
||||
'created_at',
|
||||
'account_id',
|
||||
'sandbox'
|
||||
).range(start_range, end_range).execute()
|
||||
|
||||
# Debug info - print raw response
|
||||
print(f"Response data length: {len(result.data)}")
|
||||
|
||||
if not result.data:
|
||||
print("No more data returned from query, ending pagination")
|
||||
has_more = False
|
||||
else:
|
||||
# Print a sample project to see the actual data structure
|
||||
if current_page == 0 and result.data:
|
||||
print(f"Sample project data: {result.data[0]}")
|
||||
|
||||
all_projects.extend(result.data)
|
||||
current_page += 1
|
||||
|
||||
# Progress update
|
||||
logger.info(f"Loaded {len(all_projects)} projects so far")
|
||||
print(f"Loaded {len(all_projects)} projects so far...")
|
||||
|
||||
# Check if we've reached the end - if we got fewer results than the page size
|
||||
if len(result.data) < page_size:
|
||||
print(f"Got {len(result.data)} records which is less than page size {page_size}, ending pagination")
|
||||
has_more = False
|
||||
else:
|
||||
print(f"Full page returned ({len(result.data)} records), continuing to next page")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during pagination: {str(e)}")
|
||||
print(f"Error during pagination: {str(e)}")
|
||||
has_more = False # Stop on error
|
||||
|
||||
# Print the query result summary
|
||||
total_projects = len(all_projects)
|
||||
print(f"Found {total_projects} total projects in database")
|
||||
logger.info(f"Total projects found in database: {total_projects}")
|
||||
|
||||
if not all_projects:
|
||||
logger.info("No projects found in database")
|
||||
return []
|
||||
|
||||
# Filter projects that are older than the threshold and have sandbox information
|
||||
old_projects_with_sandboxes = [
|
||||
project for project in all_projects
|
||||
if project.get('created_at') and project.get('created_at') < threshold_date
|
||||
and project.get('sandbox') and project['sandbox'].get('id')
|
||||
]
|
||||
|
||||
logger.info(f"Found {len(old_projects_with_sandboxes)} old projects with sandboxes")
|
||||
|
||||
# Print a few sample old projects for debugging
|
||||
if old_projects_with_sandboxes:
|
||||
print("\nSample of old projects with sandboxes:")
|
||||
for i, project in enumerate(old_projects_with_sandboxes[:3]):
|
||||
print(f" {i+1}. {project.get('name')} (Created: {project.get('created_at')})")
|
||||
print(f" Sandbox ID: {project['sandbox'].get('id')}")
|
||||
if i >= 2:
|
||||
break
|
||||
|
||||
return old_projects_with_sandboxes
|
||||
|
||||
|
||||
async def archive_sandbox(project: Dict[str, Any], dry_run: bool) -> bool:
|
||||
"""
|
||||
Archive a single sandbox.
|
||||
|
||||
Args:
|
||||
project: Project information containing sandbox to archive
|
||||
dry_run: If True, only simulate archiving
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
sandbox_id = project['sandbox'].get('id')
|
||||
project_name = project.get('name', 'Unknown')
|
||||
project_id = project.get('project_id', 'Unknown')
|
||||
created_at = project.get('created_at', 'Unknown')
|
||||
|
||||
try:
|
||||
logger.info(f"Checking sandbox {sandbox_id} for project '{project_name}' (ID: {project_id}, Created: {created_at})")
|
||||
|
||||
if dry_run:
|
||||
logger.info(f"DRY RUN: Would archive sandbox {sandbox_id}")
|
||||
print(f"Would archive sandbox {sandbox_id} for project '{project_name}' (Created: {created_at})")
|
||||
return True
|
||||
|
||||
# Get the sandbox
|
||||
sandbox = daytona.get_current_sandbox(sandbox_id)
|
||||
|
||||
# Check sandbox state - it must be stopped before archiving
|
||||
sandbox_info = sandbox.info()
|
||||
|
||||
# Log the current state
|
||||
logger.info(f"Sandbox {sandbox_id} is in '{sandbox_info.state}' state")
|
||||
|
||||
# Only archive if the sandbox is in the stopped state
|
||||
if sandbox_info.state == "stopped":
|
||||
logger.info(f"Archiving sandbox {sandbox_id} as it is in stopped state")
|
||||
sandbox.archive()
|
||||
logger.info(f"Successfully archived sandbox {sandbox_id}")
|
||||
return True
|
||||
else:
|
||||
logger.info(f"Skipping sandbox {sandbox_id} as it is not in stopped state (current: {sandbox_info.state})")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
error_type = type(e).__name__
|
||||
stack_trace = traceback.format_exc()
|
||||
|
||||
# Log detailed error information
|
||||
logger.error(f"Error processing sandbox {sandbox_id}: {str(e)}")
|
||||
logger.error(f"Error type: {error_type}")
|
||||
logger.error(f"Stack trace:\n{stack_trace}")
|
||||
|
||||
# If the exception has a response attribute (like in HTTP errors), log it
|
||||
if hasattr(e, 'response'):
|
||||
try:
|
||||
response_data = e.response.json() if hasattr(e.response, 'json') else str(e.response)
|
||||
logger.error(f"Response data: {response_data}")
|
||||
except Exception:
|
||||
logger.error(f"Could not parse response data from error")
|
||||
|
||||
print(f"Failed to process sandbox {sandbox_id}: {error_type} - {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
async def process_sandboxes(old_projects: List[Dict[str, Any]], dry_run: bool) -> tuple[int, int]:
|
||||
"""
|
||||
Process all sandboxes sequentially.
|
||||
|
||||
Args:
|
||||
old_projects: List of projects older than the threshold
|
||||
dry_run: Whether to actually archive sandboxes or just simulate
|
||||
|
||||
Returns:
|
||||
Tuple of (processed_count, failed_count)
|
||||
"""
|
||||
processed_count = 0
|
||||
failed_count = 0
|
||||
|
||||
if dry_run:
|
||||
logger.info(f"DRY RUN: Would archive {len(old_projects)} sandboxes")
|
||||
else:
|
||||
logger.info(f"Archiving {len(old_projects)} sandboxes")
|
||||
|
||||
print(f"Processing {len(old_projects)} sandboxes...")
|
||||
|
||||
# Process each sandbox sequentially
|
||||
for i, project in enumerate(old_projects):
|
||||
success = await archive_sandbox(project, dry_run)
|
||||
|
||||
if success:
|
||||
processed_count += 1
|
||||
else:
|
||||
failed_count += 1
|
||||
|
||||
# Print progress periodically
|
||||
if (i + 1) % 20 == 0 or (i + 1) == len(old_projects):
|
||||
progress = (i + 1) / len(old_projects) * 100
|
||||
print(f"Progress: {i + 1}/{len(old_projects)} sandboxes processed ({progress:.1f}%)")
|
||||
print(f" - Processed: {processed_count}, Failed: {failed_count}")
|
||||
|
||||
return processed_count, failed_count
|
||||
|
||||
|
||||
async def main():
|
||||
"""Main function to run the script."""
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser(description='Archive sandboxes for projects older than N days')
|
||||
parser.add_argument('--days', type=int, default=1, help='Age threshold in days (default: 1)')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would be archived without actually archiving')
|
||||
args = parser.parse_args()
|
||||
|
||||
logger.info(f"Starting sandbox cleanup for projects older than {args.days} day(s)")
|
||||
if args.dry_run:
|
||||
logger.info("DRY RUN MODE - No sandboxes will be archived")
|
||||
|
||||
# Print environment info
|
||||
print(f"Environment Mode: {os.getenv('ENV_MODE', 'Not set')}")
|
||||
print(f"Daytona Server: {os.getenv('DAYTONA_SERVER_URL', 'Not set')}")
|
||||
|
||||
try:
|
||||
# Initialize global DB connection
|
||||
global db_connection
|
||||
db_connection = DBConnection()
|
||||
|
||||
# Get all projects older than the threshold
|
||||
old_projects = await get_old_projects(args.days)
|
||||
|
||||
if not old_projects:
|
||||
logger.info(f"No projects older than {args.days} day(s) with sandboxes to process")
|
||||
print(f"No projects older than {args.days} day(s) with sandboxes to archive.")
|
||||
return
|
||||
|
||||
# Print summary of what will be processed
|
||||
print("\n===== SANDBOX CLEANUP SUMMARY =====")
|
||||
print(f"Projects older than {args.days} day(s): {len(old_projects)}")
|
||||
print(f"Sandboxes that will be archived: {len(old_projects)}")
|
||||
print("===================================")
|
||||
|
||||
logger.info(f"Found {len(old_projects)} projects older than {args.days} day(s)")
|
||||
|
||||
# Ask for confirmation before proceeding
|
||||
if not args.dry_run:
|
||||
print("\n⚠️ WARNING: You are about to archive sandboxes for old projects ⚠️")
|
||||
print("This action cannot be undone!")
|
||||
confirmation = input("\nAre you sure you want to proceed with archiving? (TRUE/FALSE): ").strip().upper()
|
||||
|
||||
if confirmation != "TRUE":
|
||||
print("Archiving cancelled. Exiting script.")
|
||||
logger.info("Archiving cancelled by user")
|
||||
return
|
||||
|
||||
print("\nProceeding with sandbox archiving...\n")
|
||||
logger.info("User confirmed sandbox archiving")
|
||||
|
||||
# List a sample of projects to be processed
|
||||
for i, project in enumerate(old_projects[:5]): # Just show first 5 for brevity
|
||||
created_at = project.get('created_at', 'Unknown')
|
||||
project_name = project.get('name', 'Unknown')
|
||||
project_id = project.get('project_id', 'Unknown')
|
||||
sandbox_id = project['sandbox'].get('id')
|
||||
|
||||
print(f"{i+1}. Project: {project_name}")
|
||||
print(f" Project ID: {project_id}")
|
||||
print(f" Created At: {created_at}")
|
||||
print(f" Sandbox ID: {sandbox_id}")
|
||||
|
||||
if len(old_projects) > 5:
|
||||
print(f" ... and {len(old_projects) - 5} more projects")
|
||||
|
||||
# Process all sandboxes
|
||||
processed_count, failed_count = await process_sandboxes(old_projects, args.dry_run)
|
||||
|
||||
# Print final summary
|
||||
print("\nSandbox Cleanup Summary:")
|
||||
print(f"Total projects older than {args.days} day(s): {len(old_projects)}")
|
||||
print(f"Total sandboxes processed: {len(old_projects)}")
|
||||
|
||||
if args.dry_run:
|
||||
print(f"DRY RUN: No sandboxes were actually archived")
|
||||
else:
|
||||
print(f"Successfully processed: {processed_count}")
|
||||
print(f"Failed to process: {failed_count}")
|
||||
|
||||
logger.info("Sandbox cleanup completed")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during sandbox cleanup: {str(e)}")
|
||||
sys.exit(1)
|
||||
finally:
|
||||
# Clean up database connection
|
||||
if db_connection:
|
||||
await DBConnection.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
|
@ -4,13 +4,15 @@ import { useEffect, useState } from 'react';
|
|||
import { SidebarLeft } from '@/components/sidebar/sidebar-left';
|
||||
import { SidebarInset, SidebarProvider } from '@/components/ui/sidebar';
|
||||
// import { PricingAlert } from "@/components/billing/pricing-alert"
|
||||
import { MaintenanceAlert } from '@/components/maintenance-alert';
|
||||
import { useAccounts } from '@/hooks/use-accounts';
|
||||
import { useAuth } from '@/components/AuthProvider';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import { Loader2 } from 'lucide-react';
|
||||
import { checkApiHealth } from '@/lib/api';
|
||||
import { MaintenancePage } from '@/components/maintenance/maintenance-page';
|
||||
import { MaintenanceAlert } from "@/components/maintenance-alert"
|
||||
import { useAccounts } from "@/hooks/use-accounts"
|
||||
import { useAuth } from "@/components/AuthProvider"
|
||||
import { useRouter } from "next/navigation"
|
||||
import { Loader2 } from "lucide-react"
|
||||
import { checkApiHealth } from "@/lib/api"
|
||||
import { MaintenancePage } from "@/components/maintenance/maintenance-page"
|
||||
import { DeleteOperationProvider } from "@/contexts/DeleteOperationContext"
|
||||
import { StatusOverlay } from "@/components/ui/status-overlay"
|
||||
|
||||
interface DashboardLayoutProps {
|
||||
children: React.ReactNode;
|
||||
|
@ -78,10 +80,13 @@ export default function DashboardLayout({ children }: DashboardLayoutProps) {
|
|||
}
|
||||
|
||||
return (
|
||||
<DeleteOperationProvider>
|
||||
<SidebarProvider>
|
||||
<SidebarLeft />
|
||||
<SidebarInset>
|
||||
<div className="bg-background">{children}</div>
|
||||
<div className="bg-background">
|
||||
{children}
|
||||
</div>
|
||||
</SidebarInset>
|
||||
|
||||
{/* <PricingAlert
|
||||
|
@ -96,6 +101,10 @@ export default function DashboardLayout({ children }: DashboardLayoutProps) {
|
|||
onOpenChange={setShowMaintenanceAlert}
|
||||
closeable={true}
|
||||
/>
|
||||
|
||||
{/* Status overlay for deletion operations */}
|
||||
<StatusOverlay />
|
||||
</SidebarProvider>
|
||||
);
|
||||
</DeleteOperationProvider>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
'use client';
|
||||
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useEffect, useState, useRef } from "react"
|
||||
import {
|
||||
ArrowUpRight,
|
||||
Link as LinkIcon,
|
||||
|
@ -32,10 +32,12 @@ import {
|
|||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from '@/components/ui/tooltip';
|
||||
import { getProjects, getThreads, Project } from '@/lib/api';
|
||||
import Link from 'next/link';
|
||||
TooltipTrigger
|
||||
} from "@/components/ui/tooltip"
|
||||
import { getProjects, getThreads, Project, deleteThread } from "@/lib/api"
|
||||
import Link from "next/link"
|
||||
import { DeleteConfirmationDialog } from "@/components/thread/DeleteConfirmationDialog"
|
||||
import { useDeleteOperation } from '@/contexts/DeleteOperationContext'
|
||||
|
||||
// Thread with associated project info for display in sidebar
|
||||
type ThreadWithProject = {
|
||||
|
@ -47,12 +49,18 @@ type ThreadWithProject = {
|
|||
};
|
||||
|
||||
export function NavAgents() {
|
||||
const { isMobile, state } = useSidebar();
|
||||
const [threads, setThreads] = useState<ThreadWithProject[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [loadingThreadId, setLoadingThreadId] = useState<string | null>(null);
|
||||
const pathname = usePathname();
|
||||
const router = useRouter();
|
||||
const { isMobile, state } = useSidebar()
|
||||
const [threads, setThreads] = useState<ThreadWithProject[]>([])
|
||||
const [isLoading, setIsLoading] = useState(true)
|
||||
const [loadingThreadId, setLoadingThreadId] = useState<string | null>(null)
|
||||
const pathname = usePathname()
|
||||
const router = useRouter()
|
||||
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
|
||||
const [threadToDelete, setThreadToDelete] = useState<{ id: string; name: string } | null>(null)
|
||||
const [isDeleting, setIsDeleting] = useState(false)
|
||||
const isNavigatingRef = useRef(false)
|
||||
const { performDelete, isOperationInProgress } = useDeleteOperation();
|
||||
const isPerformingActionRef = useRef(false);
|
||||
|
||||
// Helper to sort threads by updated_at (most recent first)
|
||||
const sortThreads = (
|
||||
|
@ -198,15 +206,84 @@ export function NavAgents() {
|
|||
setLoadingThreadId(null);
|
||||
}, [pathname]);
|
||||
|
||||
// Add event handler for completed navigation
|
||||
useEffect(() => {
|
||||
const handleNavigationComplete = () => {
|
||||
console.log("NAVIGATION - Navigation event completed");
|
||||
document.body.style.pointerEvents = "auto";
|
||||
isNavigatingRef.current = false;
|
||||
};
|
||||
|
||||
window.addEventListener("popstate", handleNavigationComplete);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("popstate", handleNavigationComplete);
|
||||
// Ensure we clean up any leftover styles
|
||||
document.body.style.pointerEvents = "auto";
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Reset isNavigatingRef when pathname changes
|
||||
useEffect(() => {
|
||||
isNavigatingRef.current = false;
|
||||
document.body.style.pointerEvents = "auto";
|
||||
}, [pathname]);
|
||||
|
||||
// Function to handle thread click with loading state
|
||||
const handleThreadClick = (
|
||||
e: React.MouseEvent<HTMLAnchorElement>,
|
||||
threadId: string,
|
||||
url: string,
|
||||
) => {
|
||||
e.preventDefault();
|
||||
setLoadingThreadId(threadId);
|
||||
router.push(url);
|
||||
const handleThreadClick = (e: React.MouseEvent<HTMLAnchorElement>, threadId: string, url: string) => {
|
||||
e.preventDefault()
|
||||
setLoadingThreadId(threadId)
|
||||
router.push(url)
|
||||
}
|
||||
|
||||
// Function to handle thread deletion
|
||||
const handleDeleteThread = async (threadId: string, threadName: string) => {
|
||||
setThreadToDelete({ id: threadId, name: threadName });
|
||||
setIsDeleteDialogOpen(true);
|
||||
};
|
||||
|
||||
const confirmDelete = async () => {
|
||||
if (!threadToDelete || isPerformingActionRef.current) return;
|
||||
|
||||
// Mark action in progress
|
||||
isPerformingActionRef.current = true;
|
||||
|
||||
// Close dialog first for immediate feedback
|
||||
setIsDeleteDialogOpen(false);
|
||||
|
||||
const threadId = threadToDelete.id;
|
||||
const isActive = pathname?.includes(threadId);
|
||||
|
||||
// Store threadToDelete in a local variable since it might be cleared
|
||||
const deletedThread = { ...threadToDelete };
|
||||
|
||||
// Log operation start
|
||||
console.log("DELETION - Starting thread deletion process", {
|
||||
threadId: deletedThread.id,
|
||||
isCurrentThread: isActive
|
||||
});
|
||||
|
||||
// Use the centralized deletion system with completion callback
|
||||
await performDelete(
|
||||
threadId,
|
||||
isActive,
|
||||
async () => {
|
||||
// Delete the thread
|
||||
await deleteThread(threadId);
|
||||
|
||||
// Update the thread list
|
||||
setThreads(prev => prev.filter(t => t.threadId !== threadId));
|
||||
|
||||
// Show success message
|
||||
toast.success("Conversation deleted successfully");
|
||||
},
|
||||
// Completion callback to reset local state
|
||||
() => {
|
||||
setThreadToDelete(null);
|
||||
setIsDeleting(false);
|
||||
isPerformingActionRef.current = false;
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
|
@ -351,7 +428,7 @@ export function NavAgents() {
|
|||
</a>
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={() => handleDeleteThread(thread.threadId, thread.projectName)}>
|
||||
<Trash2 className="text-muted-foreground" />
|
||||
<span>Delete</span>
|
||||
</DropdownMenuItem>
|
||||
|
@ -372,6 +449,16 @@ export function NavAgents() {
|
|||
</SidebarMenuItem>
|
||||
)}
|
||||
</SidebarMenu>
|
||||
|
||||
{threadToDelete && (
|
||||
<DeleteConfirmationDialog
|
||||
isOpen={isDeleteDialogOpen}
|
||||
onClose={() => setIsDeleteDialogOpen(false)}
|
||||
onConfirm={confirmDelete}
|
||||
threadName={threadToDelete.name}
|
||||
isDeleting={isDeleting}
|
||||
/>
|
||||
)}
|
||||
</SidebarGroup>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
"use client"
|
||||
|
||||
import React from "react"
|
||||
import { Loader2 } from "lucide-react"
|
||||
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog"
|
||||
|
||||
interface DeleteConfirmationDialogProps {
|
||||
isOpen: boolean
|
||||
onClose: () => void
|
||||
onConfirm: () => void
|
||||
threadName: string
|
||||
isDeleting: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirmation dialog for deleting a conversation
|
||||
*/
|
||||
export function DeleteConfirmationDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onConfirm,
|
||||
threadName,
|
||||
isDeleting,
|
||||
}: DeleteConfirmationDialogProps) {
|
||||
return (
|
||||
<AlertDialog open={isOpen} onOpenChange={onClose}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Delete conversation</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
Are you sure you want to delete the conversation{" "}
|
||||
<span className="font-semibold">"{threadName}"</span>?
|
||||
<br />
|
||||
This action cannot be undone.
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel disabled={isDeleting}>Cancel</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
onClick={(e) => {
|
||||
e.preventDefault()
|
||||
onConfirm()
|
||||
}}
|
||||
disabled={isDeleting}
|
||||
className="bg-destructive text-white hover:bg-destructive/90"
|
||||
>
|
||||
{isDeleting ? (
|
||||
<>
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Deleting...
|
||||
</>
|
||||
) : (
|
||||
"Delete"
|
||||
)}
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
)
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
import React from 'react';
|
||||
import { Loader2, CheckCircle, AlertCircle } from 'lucide-react';
|
||||
import { useDeleteOperation } from '@/contexts/DeleteOperationContext';
|
||||
|
||||
export function StatusOverlay() {
|
||||
const { state } = useDeleteOperation();
|
||||
|
||||
if (state.operation === 'none' || !state.isDeleting) return null;
|
||||
|
||||
return (
|
||||
<div className="fixed bottom-4 right-4 z-50 flex items-center gap-2 bg-background/90 backdrop-blur p-3 rounded-lg shadow-lg border border-border">
|
||||
{state.operation === 'pending' && (
|
||||
<>
|
||||
<Loader2 className="h-5 w-5 text-muted-foreground animate-spin" />
|
||||
<span className="text-sm">Processing...</span>
|
||||
</>
|
||||
)}
|
||||
|
||||
{state.operation === 'success' && (
|
||||
<>
|
||||
<CheckCircle className="h-5 w-5 text-green-500" />
|
||||
<span className="text-sm">Completed</span>
|
||||
</>
|
||||
)}
|
||||
|
||||
{state.operation === 'error' && (
|
||||
<>
|
||||
<AlertCircle className="h-5 w-5 text-destructive" />
|
||||
<span className="text-sm">Failed</span>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
import React, { createContext, useContext, useReducer, useEffect, useRef } from 'react';
|
||||
|
||||
type DeleteState = {
|
||||
isDeleting: boolean;
|
||||
targetId: string | null;
|
||||
isActive: boolean;
|
||||
operation: 'none' | 'pending' | 'success' | 'error';
|
||||
};
|
||||
|
||||
type DeleteAction =
|
||||
| { type: 'START_DELETE'; id: string; isActive: boolean }
|
||||
| { type: 'DELETE_SUCCESS' }
|
||||
| { type: 'DELETE_ERROR' }
|
||||
| { type: 'RESET' };
|
||||
|
||||
const initialState: DeleteState = {
|
||||
isDeleting: false,
|
||||
targetId: null,
|
||||
isActive: false,
|
||||
operation: 'none'
|
||||
};
|
||||
|
||||
function deleteReducer(state: DeleteState, action: DeleteAction): DeleteState {
|
||||
switch (action.type) {
|
||||
case 'START_DELETE':
|
||||
return {
|
||||
...state,
|
||||
isDeleting: true,
|
||||
targetId: action.id,
|
||||
isActive: action.isActive,
|
||||
operation: 'pending'
|
||||
};
|
||||
case 'DELETE_SUCCESS':
|
||||
return {
|
||||
...state,
|
||||
operation: 'success'
|
||||
};
|
||||
case 'DELETE_ERROR':
|
||||
return {
|
||||
...state,
|
||||
isDeleting: false,
|
||||
operation: 'error'
|
||||
};
|
||||
case 'RESET':
|
||||
return initialState;
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
}
|
||||
|
||||
type DeleteOperationContextType = {
|
||||
state: DeleteState;
|
||||
dispatch: React.Dispatch<DeleteAction>;
|
||||
performDelete: (
|
||||
id: string,
|
||||
isActive: boolean,
|
||||
deleteFunction: () => Promise<void>,
|
||||
onComplete?: () => void
|
||||
) => Promise<void>;
|
||||
isOperationInProgress: React.MutableRefObject<boolean>;
|
||||
};
|
||||
|
||||
const DeleteOperationContext = createContext<DeleteOperationContextType | undefined>(undefined);
|
||||
|
||||
export function DeleteOperationProvider({ children }: { children: React.ReactNode }) {
|
||||
const [state, dispatch] = useReducer(deleteReducer, initialState);
|
||||
const isOperationInProgress = useRef(false);
|
||||
|
||||
// Listen for state changes to handle navigation
|
||||
useEffect(() => {
|
||||
if (state.operation === 'success' && state.isActive) {
|
||||
// Delay navigation to allow UI feedback
|
||||
const timer = setTimeout(() => {
|
||||
try {
|
||||
// Use window.location for reliable navigation
|
||||
window.location.pathname = '/dashboard';
|
||||
} catch (error) {
|
||||
console.error("Navigation error:", error);
|
||||
}
|
||||
}, 500);
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [state.operation, state.isActive]);
|
||||
|
||||
// Auto-reset after operations complete
|
||||
useEffect(() => {
|
||||
if (state.operation === 'success' && !state.isActive) {
|
||||
const timer = setTimeout(() => {
|
||||
dispatch({ type: 'RESET' });
|
||||
// Ensure pointer events are restored
|
||||
document.body.style.pointerEvents = "auto";
|
||||
isOperationInProgress.current = false;
|
||||
|
||||
// Restore sidebar menu interactivity
|
||||
const sidebarMenu = document.querySelector(".sidebar-menu");
|
||||
if (sidebarMenu) {
|
||||
sidebarMenu.classList.remove("pointer-events-none");
|
||||
}
|
||||
}, 1000);
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
|
||||
if (state.operation === 'error') {
|
||||
// Reset on error immediately
|
||||
document.body.style.pointerEvents = "auto";
|
||||
isOperationInProgress.current = false;
|
||||
|
||||
// Restore sidebar menu interactivity
|
||||
const sidebarMenu = document.querySelector(".sidebar-menu");
|
||||
if (sidebarMenu) {
|
||||
sidebarMenu.classList.remove("pointer-events-none");
|
||||
}
|
||||
}
|
||||
}, [state.operation, state.isActive]);
|
||||
|
||||
const performDelete = async (
|
||||
id: string,
|
||||
isActive: boolean,
|
||||
deleteFunction: () => Promise<void>,
|
||||
onComplete?: () => void
|
||||
) => {
|
||||
// Prevent multiple operations
|
||||
if (isOperationInProgress.current) return;
|
||||
isOperationInProgress.current = true;
|
||||
|
||||
// Disable pointer events during operation
|
||||
document.body.style.pointerEvents = "none";
|
||||
|
||||
// Disable sidebar menu interactions
|
||||
const sidebarMenu = document.querySelector(".sidebar-menu");
|
||||
if (sidebarMenu) {
|
||||
sidebarMenu.classList.add("pointer-events-none");
|
||||
}
|
||||
|
||||
dispatch({ type: 'START_DELETE', id, isActive });
|
||||
|
||||
try {
|
||||
// Execute the delete operation
|
||||
await deleteFunction();
|
||||
|
||||
// Use precise timing for UI updates
|
||||
setTimeout(() => {
|
||||
dispatch({ type: 'DELETE_SUCCESS' });
|
||||
|
||||
// For non-active threads, restore interaction with delay
|
||||
if (!isActive) {
|
||||
setTimeout(() => {
|
||||
document.body.style.pointerEvents = "auto";
|
||||
|
||||
if (sidebarMenu) {
|
||||
sidebarMenu.classList.remove("pointer-events-none");
|
||||
}
|
||||
|
||||
// Call the completion callback
|
||||
if (onComplete) onComplete();
|
||||
}, 100);
|
||||
}
|
||||
}, 50);
|
||||
} catch (error) {
|
||||
console.error("Delete operation failed:", error);
|
||||
|
||||
// Reset states on error
|
||||
document.body.style.pointerEvents = "auto";
|
||||
isOperationInProgress.current = false;
|
||||
|
||||
if (sidebarMenu) {
|
||||
sidebarMenu.classList.remove("pointer-events-none");
|
||||
}
|
||||
|
||||
dispatch({ type: 'DELETE_ERROR' });
|
||||
|
||||
// Call the completion callback
|
||||
if (onComplete) onComplete();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<DeleteOperationContext.Provider value={{
|
||||
state,
|
||||
dispatch,
|
||||
performDelete,
|
||||
isOperationInProgress
|
||||
}}>
|
||||
{children}
|
||||
</DeleteOperationContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useDeleteOperation() {
|
||||
const context = useContext(DeleteOperationContext);
|
||||
if (context === undefined) {
|
||||
throw new Error('useDeleteOperation must be used within a DeleteOperationProvider');
|
||||
}
|
||||
return context;
|
||||
}
|
|
@ -1244,6 +1244,53 @@ export const toggleThreadPublicStatus = async (
|
|||
return updateThread(threadId, { is_public: isPublic });
|
||||
};
|
||||
|
||||
export const deleteThread = async (threadId: string): Promise<void> => {
|
||||
try {
|
||||
const supabase = createClient();
|
||||
|
||||
// First delete all agent runs associated with this thread
|
||||
console.log(`Deleting all agent runs for thread ${threadId}`);
|
||||
const { error: agentRunsError } = await supabase
|
||||
.from('agent_runs')
|
||||
.delete()
|
||||
.eq('thread_id', threadId);
|
||||
|
||||
if (agentRunsError) {
|
||||
console.error('Error deleting agent runs:', agentRunsError);
|
||||
throw new Error(`Error deleting agent runs: ${agentRunsError.message}`);
|
||||
}
|
||||
|
||||
// Then delete all messages associated with the thread
|
||||
console.log(`Deleting all messages for thread ${threadId}`);
|
||||
const { error: messagesError } = await supabase
|
||||
.from('messages')
|
||||
.delete()
|
||||
.eq('thread_id', threadId);
|
||||
|
||||
if (messagesError) {
|
||||
console.error('Error deleting messages:', messagesError);
|
||||
throw new Error(`Error deleting messages: ${messagesError.message}`);
|
||||
}
|
||||
|
||||
// Finally, delete the thread itself
|
||||
console.log(`Deleting thread ${threadId}`);
|
||||
const { error: threadError } = await supabase
|
||||
.from('threads')
|
||||
.delete()
|
||||
.eq('thread_id', threadId);
|
||||
|
||||
if (threadError) {
|
||||
console.error('Error deleting thread:', threadError);
|
||||
throw new Error(`Error deleting thread: ${threadError.message}`);
|
||||
}
|
||||
|
||||
console.log(`Thread ${threadId} successfully deleted with all related items`);
|
||||
} catch (error) {
|
||||
console.error('Error deleting thread and related items:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Function to get public projects
|
||||
export const getPublicProjects = async (): Promise<Project[]> => {
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue