buster start, stop, restart handled well

This commit is contained in:
dal 2025-05-07 18:13:56 -06:00
parent c62d09536c
commit 87fcc41e2d
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
5 changed files with 353 additions and 51 deletions

View File

@ -94,48 +94,50 @@ pub async fn auth(mut req: Request, next: Next) -> Result<Response, StatusCode>
};
// --- Payment Required Check START ---
if let Some(org_membership) = user.organizations.get(0) {
let org_id = org_membership.id;
let pg_pool = get_pg_pool();
let mut conn = match pg_pool.get().await {
Ok(conn) => conn,
Err(e) => {
tracing::error!("Failed to get DB connection for payment check: {}", e);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
};
if env::var("ENVIRONMENT").unwrap_or_default() == "production" {
if let Some(org_membership) = user.organizations.get(0) {
let org_id = org_membership.id;
let pg_pool = get_pg_pool();
let mut conn = match pg_pool.get().await {
Ok(conn) => conn,
Err(e) => {
tracing::error!("Failed to get DB connection for payment check: {}", e);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
};
match database::schema::organizations::table
.filter(database::schema::organizations::id.eq(org_id))
.select(database::schema::organizations::payment_required)
.first::<bool>(&mut conn)
.await
{
Ok(payment_required) => {
if payment_required {
tracing::warn!(
match database::schema::organizations::table
.filter(database::schema::organizations::id.eq(org_id))
.select(database::schema::organizations::payment_required)
.first::<bool>(&mut conn)
.await
{
Ok(payment_required) => {
if payment_required {
tracing::warn!(
user_id = %user.id,
org_id = %org_id,
"Access denied due to payment requirement for organization."
);
return Err(StatusCode::PAYMENT_REQUIRED);
}
}
Err(diesel::NotFound) => {
tracing::error!(
user_id = %user.id,
org_id = %org_id,
"Access denied due to payment requirement for organization."
"Organization not found during payment check."
);
return Err(StatusCode::PAYMENT_REQUIRED);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
Err(e) => {
tracing::error!(
user_id = %user.id,
org_id = %org_id,
"Database error during payment check: {}", e
);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
}
Err(diesel::NotFound) => {
tracing::error!(
user_id = %user.id,
org_id = %org_id,
"Organization not found during payment check."
);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
Err(e) => {
tracing::error!(
user_id = %user.id,
org_id = %org_id,
"Database error during payment check: {}", e
);
return Err(StatusCode::INTERNAL_SERVER_ERROR);
}
}
}

View File

@ -1,4 +1,5 @@
use std::fs;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use crate::error::BusterError;
@ -10,6 +11,7 @@ use dirs;
#[derive(RustEmbed)]
#[folder = "../../"]
#[include = "docker-compose.yml"]
#[include = "supabase/.env.example"]
#[include = "supabase/**/*"]
#[exclude = "supabase/volumes/db/data/**/*"]
#[exclude = "supabase/volumes/storage/**/*"]
@ -60,20 +62,32 @@ async fn setup_persistent_app_environment() -> Result<PathBuf, BusterError> {
let supabase_volumes_functions_path = app_base_dir.join("supabase/volumes/functions");
fs::create_dir_all(supabase_volumes_functions_path).map_err(|e| BusterError::CommandError(format!("Failed to create supabase/volumes/functions in persistent app dir: {}", e)))?;
let local_dotenv_path = PathBuf::from("/Users/dallin/buster/buster/.env");
if local_dotenv_path.exists() {
let target_dotenv_path = app_base_dir.join(".env");
fs::copy(&local_dotenv_path, &target_dotenv_path).map_err(|e| {
BusterError::CommandError(format!(
"Failed to copy local .env from {} to {}: {}",
local_dotenv_path.display(),
target_dotenv_path.display(),
e
))
})?;
} else {
println!("Warning: Specified .env file not found at {}. Services might not configure correctly if .env is required by docker-compose.yml.", local_dotenv_path.display());
}
let target_dotenv_path = app_base_dir.join(".env");
// Always use .env.example from embedded assets
let example_env_filename = "supabase/.env.example";
let asset = StaticAssets::get(example_env_filename)
.ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", example_env_filename)))?;
fs::write(&target_dotenv_path, asset.data).map_err(|e| {
BusterError::CommandError(format!(
"Failed to write {} to {}: {}",
example_env_filename,
target_dotenv_path.display(),
e
))
})?;
// Additionally copy the .env to the supabase subdirectory
let supabase_dotenv_path = app_base_dir.join("supabase/.env");
fs::copy(&target_dotenv_path, &supabase_dotenv_path).map_err(|e| {
BusterError::CommandError(format!(
"Failed to copy .env from {} to {}: {}",
target_dotenv_path.display(),
supabase_dotenv_path.display(),
e
))
})?;
Ok(app_base_dir)
}
@ -143,4 +157,155 @@ pub async fn start() -> Result<(), BusterError> {
pub async fn stop() -> Result<(), BusterError> {
run_docker_compose_command(&["down"], "Stopping").await
}
pub async fn restart() -> Result<(), BusterError> {
println!("WARNING: This command will stop all Buster services, attempt to remove their current images, and then restart them.");
println!("This can lead to a complete wipe of the Buster database and any other local service data.");
println!("This action is irreversible.");
print!("Are you sure you want to proceed? (yes/No): ");
io::stdout().flush().map_err(|e| BusterError::CommandError(format!("Failed to flush stdout: {}", e)))?;
let mut confirmation = String::new();
io::stdin().read_line(&mut confirmation).map_err(|e| BusterError::CommandError(format!("Failed to read user input: {}", e)))?;
if confirmation.trim().to_lowercase() != "yes" {
println!("Restart cancelled by user.");
return Ok(());
}
let persistent_app_dir = setup_persistent_app_environment().await?;
let pb = ProgressBar::new_spinner();
pb.enable_steady_tick(Duration::from_millis(120));
pb.set_style(
ProgressStyle::default_spinner()
.tick_strings(&["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸", ""])
.template("{spinner:.blue} {msg}")
.expect("Failed to set progress bar style"),
);
pb.set_message("Rebuilding Buster services (step 1/4): Stopping services...");
let mut down_cmd = Command::new("docker");
down_cmd.current_dir(&persistent_app_dir)
.arg("compose")
.arg("-p")
.arg("buster")
.arg("-f")
.arg("docker-compose.yml")
.arg("down");
let down_output = down_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose down: {}", e)))?;
if !down_output.status.success() {
let err_msg = format!(
"docker compose down failed (status: {}). Logs:
Working directory: {}
Stdout:
{}
Stderr:
{}",
down_output.status,
persistent_app_dir.display(),
String::from_utf8_lossy(&down_output.stdout),
String::from_utf8_lossy(&down_output.stderr)
);
pb.abandon_with_message("Error: docker compose down failed. See console for details.");
println!("\nDocker Compose Down Error Details:\n{}", err_msg);
return Err(BusterError::CommandError(err_msg));
}
pb.set_message("Rebuilding Buster services (step 2/4): Identifying service images...");
let mut config_images_cmd = Command::new("docker");
config_images_cmd.current_dir(&persistent_app_dir)
.arg("compose")
.arg("-p")
.arg("buster")
.arg("-f")
.arg("docker-compose.yml")
.arg("config")
.arg("--images");
let config_images_output = config_images_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose config --images: {}", e)))?;
if !config_images_output.status.success() {
let err_msg = format!(
"docker compose config --images failed (status: {}). Logs:
Working directory: {}
Stdout:
{}
Stderr:
{}",
config_images_output.status,
persistent_app_dir.display(),
String::from_utf8_lossy(&config_images_output.stdout),
String::from_utf8_lossy(&config_images_output.stderr)
);
pb.abandon_with_message("Error: Failed to identify service images. See console for details.");
println!("\nDocker Compose Config --images Error Details:\n{}", err_msg);
return Err(BusterError::CommandError(err_msg));
}
let image_list_str = String::from_utf8_lossy(&config_images_output.stdout);
let image_names: Vec<&str> = image_list_str.lines().filter(|line| !line.trim().is_empty()).collect();
if image_names.is_empty() {
pb.println("No images identified by docker-compose config --images. Skipping image removal.");
} else {
pb.set_message(format!("Rebuilding Buster services (step 3/4): Removing {} service image(s)...", image_names.len()));
for (index, image_name) in image_names.iter().enumerate() {
let current_image_name = image_name.trim();
if current_image_name.is_empty() {
continue;
}
pb.set_message(format!(
"Rebuilding Buster services (step 3/4): Removing image {}/{} ('{}')...",
index + 1,
image_names.len(),
current_image_name
));
let mut rmi_cmd = Command::new("docker");
rmi_cmd.arg("image").arg("rm").arg(current_image_name);
let rmi_output = rmi_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker image rm {}: {}", current_image_name, e)))?;
// Log warning on failure but continue, as image might not exist or be in use by other non-project containers
if !rmi_output.status.success() {
let rmi_stderr = String::from_utf8_lossy(&rmi_output.stderr);
if !rmi_stderr.trim().is_empty() && !rmi_stderr.contains("No such image") { // Don't warn if image was already gone
pb.println(format!("Warning: Could not remove image '{}'. It might be in use or already removed. Stderr: {}", current_image_name, rmi_stderr.trim()));
}
}
}
}
pb.set_message("Rebuilding Buster services (step 4/4): Starting services (pulling images if needed)...");
let mut up_cmd = Command::new("docker");
up_cmd.current_dir(&persistent_app_dir)
.arg("compose")
.arg("-p")
.arg("buster")
.arg("-f")
.arg("docker-compose.yml")
.arg("up")
.arg("-d")
.arg("--pull") // Ensure latest images are pulled
.arg("--force-recreate"); // Recreate containers even if config hasn't changed
let up_output = up_cmd.output().map_err(|e| BusterError::CommandError(format!("Failed to execute docker compose up: {}", e)))?;
if up_output.status.success() {
pb.finish_with_message("Buster services rebuilt and started successfully.");
Ok(())
} else {
let err_msg = format!(
"docker compose up failed after image purge (status: {}). Logs:\nWorking directory: {}\nStdout:\n{}\nStderr:\n{}",
up_output.status,
persistent_app_dir.display(),
String::from_utf8_lossy(&up_output.stdout),
String::from_utf8_lossy(&up_output.stderr)
);
pb.abandon_with_message("Error: docker compose up failed after image purge. See console for details.");
println!("\nDocker Compose Up Error Details:\n{}", err_msg);
Err(BusterError::CommandError(err_msg))
}
}

View File

@ -80,8 +80,12 @@ pub enum Commands {
#[arg(long)]
path: Option<String>,
},
/// Start the Buster services
Start,
/// Stop the Buster services
Stop,
/// Restart the Buster services
Restart,
}
#[derive(Parser)]
@ -140,6 +144,7 @@ async fn main() {
Commands::Parse { path } => commands::parse::parse_models_command(path).await,
Commands::Start => run::start().await.map_err(anyhow::Error::from),
Commands::Stop => run::stop().await.map_err(anyhow::Error::from),
Commands::Restart => run::restart().await.map_err(anyhow::Error::from),
};
if let Err(e) = result {

View File

@ -32,6 +32,7 @@ services:
- EMBEDDING_PROVIDER=${EMBEDDING_PROVIDER}
- EMBEDDING_MODEL=${EMBEDDING_MODEL}
- COHERE_API_KEY=${COHERE_API_KEY}
- ENVIRONMENT=${ENVIRONMENT}
ports:
- "3001:3001"
- "3000:3000"

129
supabase/.env.example Normal file
View File

@ -0,0 +1,129 @@
# General Application Settings
ENVIRONMENT="development"
BUSTER_URL="http://localhost:3000"
BUSTER_WH_TOKEN="buster-wh-token"
# --- API Service Specific ---
# Direct Database Connection (for API service and potentially others)
DATABASE_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres"
# Pooled Database Connection (for API service, uses Supavisor)
POOLER_URL="postgresql://postgres.your-tenant-id:your-super-secret-and-long-postgres-password@supavisor:5432/postgres"
# Redis Connection
REDIS_URL="redis://buster-redis:6379"
# Supabase Connection for API service
SUPABASE_URL="http://kong:8000"
SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q"
# --- LLM / AI Services ---
EMBEDDING_PROVIDER="ollama"
EMBEDDING_MODEL="mxbai-embed-large"
COHERE_API_KEY=""
OPENAI_API_KEY="" # For OpenAI models or Supabase Studio assistant
LLM_API_KEY="test-key"
LLM_BASE_URL="http://litellm:4001"
# --- Web Client (Next.js) Specific ---
NEXT_PUBLIC_API_URL="http://localhost:3001" # External URL for the API service (buster-api)
NEXT_PUBLIC_URL="http://localhost:3000" # External URL for the Web service (buster-web)
NEXT_PUBLIC_SUPABASE_URL="http://kong:8000" # External URL for Supabase (Kong proxy)
NEXT_PUBLIC_WS_URL="ws://localhost:3001"
NEXT_PUBLIC_SUPABASE_ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE"
NEXT_PRIVATE_SUPABASE_SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q"
#################################################
# Supabase Stack Configuration Variables
# These are primarily used by the Supabase services themselves
# (defined in supabase/docker-compose.yml)
# and are sourced from this .env file when `docker compose up` is run.
#################################################
############
# Secrets
############
POSTGRES_PASSWORD="your-super-secret-and-long-postgres-password"
JWT_SECRET="your-super-secret-jwt-token-with-at-least-32-characters-long"
ANON_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE"
SERVICE_ROLE_KEY="eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.ey AgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q"
DASHBOARD_USERNAME="supabase"
DASHBOARD_PASSWORD="this_password_is_insecure_and_should_be_updated"
############
# Database
############
POSTGRES_HOST="db"
POSTGRES_DB="postgres"
POSTGRES_PORT="5432"
############
# Supavisor -- Database pooler
############
POOLER_PROXY_PORT_TRANSACTION="6543"
POOLER_DEFAULT_POOL_SIZE="20"
POOLER_MAX_CLIENT_CONN="100"
POOLER_TENANT_ID="your-tenant-id"
############
# API Proxy - Kong
############
KONG_HTTP_PORT="8000"
KONG_HTTPS_PORT="8443"
############
# API - PostgREST
############
PGRST_DB_SCHEMAS="public,storage,graphql_public"
############
# Auth - GoTrue
############
SITE_URL="http://localhost:3000" # Default base URL for the site (used in emails, etc.)
ADDITIONAL_REDIRECT_URLS=""
JWT_EXPIRY="3600"
DISABLE_SIGNUP="false"
API_EXTERNAL_URL="http://localhost:8000" # Publicly accessible URL for the Supabase API (via Kong)
## Mailer Config
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
MAILER_URLPATHS_INVITE="/auth/v1/verify"
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
## Email auth
ENABLE_EMAIL_SIGNUP="true"
ENABLE_EMAIL_AUTOCONFIRM="true"
SMTP_ADMIN_EMAIL="admin@buster.so"
SMTP_HOST="supabase-mail"
SMTP_PORT="2500"
SMTP_USER=""
SMTP_PASS=""
SMTP_SENDER_NAME="Buster"
ENABLE_ANONYMOUS_USERS="true"
## Phone auth
ENABLE_PHONE_SIGNUP="true"
ENABLE_PHONE_AUTOCONFIRM="true"
############
# Studio - Supabase Dashboard
############
STUDIO_DEFAULT_ORGANIZATION="Default Organization"
STUDIO_DEFAULT_PROJECT="Default Project"
STUDIO_PORT="3003"
SUPABASE_PUBLIC_URL="http://localhost:8000" # Public URL for Supabase (Kong), used by Studio
# Image Proxy
IMGPROXY_ENABLE_WEBP_DETECTION="true"
############
# Functions - Supabase Edge Functions
############
FUNCTIONS_VERIFY_JWT="false"
############
# Logs - Logflare
############
LOGFLARE_LOGGER_BACKEND_API_KEY="your-super-secret-and-long-logflare-key"
LOGFLARE_API_KEY="your-super-secret-and-long-logflare-key"
DOCKER_SOCKET_LOCATION="/var/run/docker.sock"
GOOGLE_PROJECT_ID="GOOGLE_PROJECT_ID"
GOOGLE_PROJECT_NUMBER="GOOGLE_PROJECT_NUMBER"