mirror of https://github.com/buster-so/buster.git
ok some quick changes for bugs
This commit is contained in:
parent
54c88cfdda
commit
abf09eed6b
|
@ -130,7 +130,7 @@ jobs:
|
|||
- name: Extract version from Cargo.toml
|
||||
id: get_version
|
||||
run: |
|
||||
VERSION=$(grep '^version =' cli/Cargo.toml | sed 's/version = "\(.*\)"/\1/')
|
||||
VERSION=$(grep '^version =' cli/cli/Cargo.toml | sed 's/version = "\\(.*\ astounding\\)"/\\1/')
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Extracted version: $VERSION"
|
||||
- name: Create Release
|
||||
|
|
|
@ -25,7 +25,7 @@ use crate::{agent::ModeProvider, Agent, AgentError, AgentExt, AgentThread}; // A
|
|||
use litellm::AgentMessage;
|
||||
|
||||
// Import the semantic layer models
|
||||
use semantic_layer::models::SemanticLayerSpec; // Assuming models.rs is accessible like this
|
||||
use semantic_layer::models::Model; // Assuming models.rs is accessible like this
|
||||
|
||||
// Import AgentState and determine_agent_state (assuming they are pub in modes/mod.rs or similar)
|
||||
// If not, they might need to be moved or re-exported.
|
||||
|
@ -140,23 +140,18 @@ impl BusterMultiAgent {
|
|||
let dataset_descriptions: Vec<String> = permissioned_datasets
|
||||
.into_iter()
|
||||
.filter_map(|ds| ds.yml_content) // Get Some(String), filter out None
|
||||
.map(|content| serde_yaml::from_str::<SemanticLayerSpec>(&content)) // Parse String -> Result<SemanticLayerSpec, Error>
|
||||
.map(|content| serde_yaml::from_str::<Model>(&content)) // Parse String -> Result<SemanticLayerSpec, Error>
|
||||
.filter_map(|result| {
|
||||
// Handle Result
|
||||
match result {
|
||||
Ok(parsed_spec) => {
|
||||
Ok(model) => {
|
||||
// Extract info from the first model if available
|
||||
if let Some(model) = parsed_spec.models.first() {
|
||||
// model.description is Option<String>, handle it
|
||||
let description = model
|
||||
.description
|
||||
.as_deref()
|
||||
.unwrap_or("No description available");
|
||||
Some(format!("{}: {}", model.name, description))
|
||||
} else {
|
||||
tracing::warn!("Parsed YAML has no models");
|
||||
None
|
||||
}
|
||||
// model.description is Option<String>, handle it
|
||||
let description = model
|
||||
.description
|
||||
.as_deref()
|
||||
.unwrap_or("No description available");
|
||||
Some(format!("{}: {}", model.name, description))
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::warn!("Failed to parse dataset YAML: {}", e);
|
||||
|
@ -175,11 +170,12 @@ impl BusterMultiAgent {
|
|||
// Create the mode provider
|
||||
let mode_provider = Arc::new(BusterModeProvider { agent_data });
|
||||
|
||||
let model = if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" {
|
||||
"o4-mini".to_string()
|
||||
} else {
|
||||
"gemini-2.5-pro-exp-03-25".to_string()
|
||||
};
|
||||
let model =
|
||||
if env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()) == "local" {
|
||||
"o4-mini".to_string()
|
||||
} else {
|
||||
"gemini-2.5-pro-exp-03-25".to_string()
|
||||
};
|
||||
|
||||
// Create agent, passing the provider
|
||||
let agent = Arc::new(Agent::new(
|
||||
|
|
|
@ -1,19 +1,13 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
use std::{env, sync::Arc, time::Instant};
|
||||
use database::enums::DataSourceType;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use braintrust::{get_prompt_system_message, BraintrustClient};
|
||||
use chrono::{DateTime, Utc};
|
||||
use cohere_rust::{
|
||||
api::rerank::{ReRankModel, ReRankRequest},
|
||||
Cohere,
|
||||
};
|
||||
use database::{
|
||||
enums::DataSourceType,
|
||||
pool::get_pg_pool,
|
||||
schema::datasets,
|
||||
schema::data_sources,
|
||||
};
|
||||
use diesel::prelude::*;
|
||||
|
@ -25,12 +19,11 @@ use serde_json::Value;
|
|||
use tracing::{debug, error, info, warn};
|
||||
use uuid::Uuid;
|
||||
use dataset_security::{get_permissioned_datasets, PermissionedDataset};
|
||||
use sqlx::PgPool;
|
||||
use stored_values;
|
||||
use rerank::Reranker;
|
||||
|
||||
// Import SemanticLayerSpec
|
||||
use semantic_layer::models::SemanticLayerSpec;
|
||||
use semantic_layer::models::Model;
|
||||
|
||||
use crate::{agent::Agent, tools::ToolExecutor};
|
||||
|
||||
|
@ -1179,13 +1172,12 @@ fn extract_searchable_dimensions(yml_content: &str) -> Result<Vec<SearchableDime
|
|||
let mut searchable_dimensions = Vec::new();
|
||||
|
||||
// Try parsing with SemanticLayerSpec first
|
||||
match serde_yaml::from_str::<SemanticLayerSpec>(yml_content) {
|
||||
Ok(spec) => {
|
||||
match serde_yaml::from_str::<Model>(yml_content) {
|
||||
Ok(model) => {
|
||||
debug!("Successfully parsed yml_content with SemanticLayerSpec for extract_searchable_dimensions");
|
||||
for model in spec.models {
|
||||
for dimension in model.dimensions {
|
||||
if dimension.searchable {
|
||||
searchable_dimensions.push(SearchableDimension {
|
||||
for dimension in model.dimensions {
|
||||
if dimension.searchable {
|
||||
searchable_dimensions.push(SearchableDimension {
|
||||
model_name: model.name.clone(),
|
||||
dimension_name: dimension.name.clone(),
|
||||
// The dimension_path might need adjustment if its usage relies on the old dynamic structure.
|
||||
|
@ -1193,7 +1185,6 @@ fn extract_searchable_dimensions(yml_content: &str) -> Result<Vec<SearchableDime
|
|||
dimension_path: vec!["models".to_string(), model.name.clone(), "dimensions".to_string(), dimension.name],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e_spec) => {
|
||||
|
@ -1205,20 +1196,16 @@ fn extract_searchable_dimensions(yml_content: &str) -> Result<Vec<SearchableDime
|
|||
let yaml: serde_yaml::Value = serde_yaml::from_str(yml_content)
|
||||
.context("Failed to parse dataset YAML content (fallback)")?;
|
||||
|
||||
if let Some(models) = yaml["models"].as_sequence() {
|
||||
for model_val in models {
|
||||
let model_name = model_val["name"].as_str().unwrap_or("unknown_model").to_string();
|
||||
if let Some(dimensions) = model_val["dimensions"].as_sequence() {
|
||||
for dimension_val in dimensions {
|
||||
if let Some(true) = dimension_val["searchable"].as_bool() {
|
||||
let dimension_name = dimension_val["name"].as_str().unwrap_or("unknown_dimension").to_string();
|
||||
searchable_dimensions.push(SearchableDimension {
|
||||
if let Some(dimensions) = yaml["dimensions"].as_sequence() {
|
||||
for dimension_val in dimensions {
|
||||
let model_name = dimension_val["model"].as_str().unwrap_or("unknown_model").to_string();
|
||||
if let Some(true) = dimension_val["searchable"].as_bool() {
|
||||
let dimension_name = dimension_val["name"].as_str().unwrap_or("unknown_dimension").to_string();
|
||||
searchable_dimensions.push(SearchableDimension {
|
||||
model_name: model_name.clone(),
|
||||
dimension_name: dimension_name.clone(),
|
||||
dimension_path: vec!["models".to_string(), model_name.clone(), "dimensions".to_string(), dimension_name],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1231,13 +1218,12 @@ fn extract_searchable_dimensions(yml_content: &str) -> Result<Vec<SearchableDime
|
|||
fn extract_database_info_from_yaml(yml_content: &str) -> Result<HashMap<String, HashMap<String, HashMap<String, Vec<String>>>>> {
|
||||
let mut database_info: HashMap<String, HashMap<String, HashMap<String, Vec<String>>>> = HashMap::new();
|
||||
|
||||
match serde_yaml::from_str::<SemanticLayerSpec>(yml_content) {
|
||||
Ok(spec) => {
|
||||
match serde_yaml::from_str::<Model>(yml_content) {
|
||||
Ok(model) => {
|
||||
debug!("Successfully parsed yml_content with SemanticLayerSpec for extract_database_info_from_yaml");
|
||||
for model in spec.models {
|
||||
let db_name = model.database.as_deref().unwrap_or("unknown_db").to_string();
|
||||
let sch_name = model.schema.as_deref().unwrap_or("unknown_schema").to_string();
|
||||
let tbl_name = model.name.clone(); // model.name is table name
|
||||
let db_name = model.database.as_deref().unwrap_or("unknown_db").to_string();
|
||||
let sch_name = model.schema.as_deref().unwrap_or("unknown_schema").to_string();
|
||||
let tbl_name = model.name.clone(); // model.name is table name
|
||||
|
||||
let mut columns = Vec::new();
|
||||
for dim in model.dimensions {
|
||||
|
@ -1259,7 +1245,6 @@ fn extract_database_info_from_yaml(yml_content: &str) -> Result<HashMap<String,
|
|||
.entry(sch_name)
|
||||
.or_default()
|
||||
.insert(tbl_name, columns);
|
||||
}
|
||||
}
|
||||
Err(e_spec) => {
|
||||
warn!(
|
||||
|
|
|
@ -148,6 +148,9 @@ pub async fn deploy_datasets_handler_core(
|
|||
let now = Utc::now();
|
||||
let dataset_id = existing_dataset_ids.get(&req.name).copied().unwrap_or_else(|| req.id.unwrap_or_else(Uuid::new_v4));
|
||||
|
||||
// Use req.database as a fallback for database_identifier
|
||||
let final_database_identifier = req.database_identifier.clone().or_else(|| req.database.clone());
|
||||
|
||||
let dataset = database::models::Dataset { // Incorrect path
|
||||
id: dataset_id,
|
||||
name: req.name.clone(),
|
||||
|
@ -168,7 +171,7 @@ pub async fn deploy_datasets_handler_core(
|
|||
organization_id: organization_id,
|
||||
model: req.model.clone(),
|
||||
yml_file: req.yml_file.clone(), // Ensure yml_file is included
|
||||
database_identifier: req.database_identifier.clone(), // This was req.database before, ensure it's correct
|
||||
database_identifier: final_database_identifier, // This was req.database before, ensure it's correct
|
||||
};
|
||||
datasets_to_upsert_map.insert((req.name.clone(), data_source.id), dataset);
|
||||
}
|
||||
|
|
|
@ -1,10 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, PartialEq)]
|
||||
pub struct SemanticLayerSpec {
|
||||
pub models: Vec<Model>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, PartialEq)]
|
||||
pub struct Model {
|
||||
pub name: String,
|
||||
|
@ -75,167 +70,10 @@ pub struct Argument {
|
|||
#[derive(Debug, Deserialize, Serialize, PartialEq)]
|
||||
pub struct Relationship {
|
||||
pub name: String,
|
||||
pub primary_key: String,
|
||||
pub foreign_key: String,
|
||||
pub source_col: String,
|
||||
pub ref_col: String,
|
||||
#[serde(rename = "type")]
|
||||
pub type_: Option<String>, // 'type' is optional according to spec
|
||||
pub cardinality: Option<String>, // 'cardinality' is optional
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_yaml;
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_model_file() {
|
||||
let yaml_content = r#"
|
||||
models:
|
||||
- name: culture
|
||||
description: Core model for cultural groups
|
||||
original_file_path: "models/core/culture.sql"
|
||||
dimensions:
|
||||
- name: cultureid
|
||||
description: Unique identifier for the culture
|
||||
- name: name
|
||||
description: Culture name
|
||||
options: ["Western", "Eastern"]
|
||||
measures:
|
||||
- name: revenue
|
||||
description: Revenue generated by the culture
|
||||
filters:
|
||||
- name: active_subscribed_customer
|
||||
expr: logins.login_count > {threshold} AND subscriptions.subscription_status = 'active'
|
||||
args:
|
||||
- name: threshold
|
||||
type: integer
|
||||
description: Minimum number of logins
|
||||
description: Customers with logins above threshold and active subscription
|
||||
metrics:
|
||||
- name: popular_product_revenue
|
||||
expr: SUM(revenue) WHERE culture_products.product_count > 5
|
||||
description: Revenue from cultures with popular products
|
||||
entities:
|
||||
- name: logins
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
type: LEFT
|
||||
cardinality: one-to-many
|
||||
description: Links to login activity
|
||||
- name: subscriptions
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
cardinality: one-to-one
|
||||
description: Links to subscription data (no type, LLM decides)
|
||||
- name: culture_products
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
cardinality: many-to-many
|
||||
description: Links to product associations (many-to-many via junction)
|
||||
- name: logins
|
||||
description: Tracks user logins by culture
|
||||
dimensions:
|
||||
- name: cultureid
|
||||
description: Foreign key to culture
|
||||
measures:
|
||||
- name: login_count
|
||||
description: Number of logins
|
||||
entities:
|
||||
- name: culture
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
cardinality: many-to-one
|
||||
- name: subscriptions
|
||||
description: Subscription status for cultures
|
||||
dimensions:
|
||||
- name: cultureid
|
||||
description: Foreign key to culture
|
||||
- name: subscription_status
|
||||
description: Current subscription status
|
||||
options: ["active", "inactive"]
|
||||
entities:
|
||||
- name: culture
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
cardinality: one-to-one
|
||||
- name: culture_products
|
||||
description: Junction table linking cultures to products
|
||||
dimensions:
|
||||
- name: cultureid
|
||||
description: Foreign key to culture
|
||||
- name: productid
|
||||
description: Foreign key to products
|
||||
measures:
|
||||
- name: product_count
|
||||
description: Number of products in this association
|
||||
entities:
|
||||
- name: culture
|
||||
primary_key: cultureid
|
||||
foreign_key: cultureid
|
||||
cardinality: many-to-many
|
||||
- name: products
|
||||
primary_key: productid
|
||||
foreign_key: productid
|
||||
cardinality: many-to-many
|
||||
"#;
|
||||
|
||||
let spec: Result<SemanticLayerSpec, _> = serde_yaml::from_str(yaml_content);
|
||||
assert!(spec.is_ok(), "Failed to deserialize YAML: {:?}", spec.err());
|
||||
let spec = spec.unwrap();
|
||||
|
||||
assert_eq!(spec.models.len(), 4);
|
||||
|
||||
// Basic checks on the first model ('culture')
|
||||
let culture_model = &spec.models[0];
|
||||
assert_eq!(culture_model.name, "culture");
|
||||
assert_eq!(
|
||||
culture_model.description,
|
||||
Some("Core model for cultural groups".to_string())
|
||||
);
|
||||
assert_eq!(culture_model.dimensions.len(), 2);
|
||||
assert_eq!(culture_model.measures.len(), 1);
|
||||
assert_eq!(culture_model.filters.len(), 1);
|
||||
assert_eq!(culture_model.metrics.len(), 1);
|
||||
assert_eq!(culture_model.relationships.len(), 3);
|
||||
|
||||
// Check dimension 'name' options
|
||||
let name_dim = &culture_model.dimensions[1];
|
||||
assert_eq!(name_dim.name, "name");
|
||||
assert_eq!(
|
||||
name_dim.options,
|
||||
Some(vec!["Western".to_string(), "Eastern".to_string()])
|
||||
);
|
||||
assert!(!name_dim.searchable); // Default false
|
||||
|
||||
// Check filter 'active_subscribed_customer' args
|
||||
let filter = &culture_model.filters[0];
|
||||
assert_eq!(filter.name, "active_subscribed_customer");
|
||||
assert!(!filter.args.is_empty());
|
||||
let filter_args = &filter.args;
|
||||
assert_eq!(filter_args.len(), 1);
|
||||
assert_eq!(filter_args[0].name, "threshold");
|
||||
assert_eq!(filter_args[0].type_, "integer");
|
||||
|
||||
// Check entity 'logins' type and cardinality
|
||||
let logins_entity = &culture_model.relationships[0];
|
||||
assert_eq!(logins_entity.name, "logins");
|
||||
assert_eq!(logins_entity.type_, Some("LEFT".to_string()));
|
||||
assert_eq!(logins_entity.cardinality, Some("one-to-many".to_string()));
|
||||
|
||||
// Check entity 'subscriptions' type and cardinality (optional)
|
||||
let subs_entity = &culture_model.relationships[1];
|
||||
assert_eq!(subs_entity.name, "subscriptions");
|
||||
assert_eq!(subs_entity.type_, None);
|
||||
assert_eq!(subs_entity.cardinality, Some("one-to-one".to_string()));
|
||||
|
||||
// Check second model ('logins')
|
||||
let logins_model = &spec.models[1];
|
||||
assert_eq!(logins_model.name, "logins");
|
||||
assert_eq!(logins_model.dimensions.len(), 1);
|
||||
assert_eq!(logins_model.measures.len(), 1);
|
||||
assert_eq!(logins_model.filters.len(), 0); // Default empty vec
|
||||
assert_eq!(logins_model.metrics.len(), 0); // Default empty vec
|
||||
assert_eq!(logins_model.relationships.len(), 1);
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -12,7 +12,6 @@ use std::time::Duration;
|
|||
#[derive(RustEmbed)]
|
||||
#[folder = "../../"]
|
||||
#[include = "docker-compose.yml"]
|
||||
#[include = "litellm_vertex_config.yaml"]
|
||||
#[include = "supabase/.env.example"]
|
||||
#[include = "supabase/**/*"]
|
||||
#[exclude = "supabase/volumes/db/data/**/*"]
|
||||
|
@ -70,6 +69,28 @@ async fn setup_persistent_app_environment() -> Result<PathBuf, BusterError> {
|
|||
))
|
||||
})?;
|
||||
|
||||
// Initialize .env from supabase/.env.example, which should have been extracted by StaticAssets loop
|
||||
let example_env_src_path = app_base_dir.join("supabase/.env.example");
|
||||
let main_dot_env_target_path = app_base_dir.join(".env");
|
||||
|
||||
if example_env_src_path.exists() {
|
||||
fs::copy(&example_env_src_path, &main_dot_env_target_path).map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
"Failed to initialize {} from {}: {}",
|
||||
main_dot_env_target_path.display(),
|
||||
example_env_src_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
} else {
|
||||
// This case should ideally not be hit if supabase/.env.example is correctly embedded and extracted.
|
||||
// If it's missing, it indicates an issue with asset handling.
|
||||
return Err(BusterError::CommandError(format!(
|
||||
"Critical setup error: {} not found after asset extraction. Cannot initialize main .env file.",
|
||||
example_env_src_path.display()
|
||||
)));
|
||||
}
|
||||
|
||||
let target_dotenv_path = app_base_dir.join(".env");
|
||||
|
||||
// --- BEGIN API Key and Reranker Setup using config_utils ---
|
||||
|
@ -207,6 +228,7 @@ pub async fn reset() -> Result<(), BusterError> {
|
|||
println!(
|
||||
"This can lead to a complete wipe of the Buster database and any other local service data."
|
||||
);
|
||||
println!("The ~/.buster directory will be wiped, except for ~/.buster/credentials.yml if it exists.");
|
||||
println!("This action is irreversible.");
|
||||
print!("Are you sure you want to proceed with resetting? (y/n): ");
|
||||
io::stdout()
|
||||
|
@ -223,7 +245,34 @@ pub async fn reset() -> Result<(), BusterError> {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let persistent_app_dir = setup_persistent_app_environment().await?;
|
||||
let app_base_dir = config_utils::get_app_base_dir().map_err(|e| {
|
||||
BusterError::CommandError(format!("Failed to get app base directory: {}", e))
|
||||
})?;
|
||||
println!("Target application directory for reset: {}", app_base_dir.display());
|
||||
|
||||
// Backup credentials if they exist
|
||||
let credentials_path = app_base_dir.join("credentials.yml");
|
||||
let credentials_backup = fs::read(&credentials_path).ok();
|
||||
if credentials_backup.is_some() {
|
||||
println!("Found credentials.yml at {}, will attempt to preserve it.", credentials_path.display());
|
||||
} else {
|
||||
println!("No credentials.yml found at {} to preserve.", credentials_path.display());
|
||||
}
|
||||
|
||||
// Ensure app_base_dir exists and essential files for Docker commands are present
|
||||
// These files will be wiped later with the rest of app_base_dir.
|
||||
fs::create_dir_all(&app_base_dir).map_err(|e| BusterError::CommandError(format!("Failed to create app base directory {}: {}", app_base_dir.display(), e)))?;
|
||||
|
||||
let dc_filename = "docker-compose.yml";
|
||||
let dc_asset = StaticAssets::get(dc_filename)
|
||||
.ok_or_else(|| BusterError::CommandError(format!("Failed to get embedded asset: {}", dc_filename)))?;
|
||||
fs::write(app_base_dir.join(dc_filename), dc_asset.data.as_ref()).map_err(|e| BusterError::CommandError(format!("Failed to write temporary {}: {}", dc_filename, e)))?;
|
||||
|
||||
// docker-compose.yml references supabase/.env, so ensure it exists (can be empty)
|
||||
let supabase_dir = app_base_dir.join("supabase");
|
||||
fs::create_dir_all(&supabase_dir).map_err(|e| BusterError::CommandError(format!("Failed to create supabase directory in app base dir: {}", e)))?;
|
||||
fs::write(supabase_dir.join(".env"), "").map_err(|e| BusterError::CommandError(format!("Failed to write temporary supabase/.env: {}",e)))?;
|
||||
|
||||
|
||||
let pb = ProgressBar::new_spinner();
|
||||
pb.enable_steady_tick(Duration::from_millis(120));
|
||||
|
@ -235,16 +284,16 @@ pub async fn reset() -> Result<(), BusterError> {
|
|||
);
|
||||
|
||||
// Step 1: Stop services
|
||||
pb.set_message("Resetting Buster services (step 1/4): Stopping services...");
|
||||
pb.set_message("Resetting Buster services (1/3): Stopping services...");
|
||||
|
||||
let mut down_cmd = Command::new("docker");
|
||||
down_cmd
|
||||
.current_dir(&persistent_app_dir)
|
||||
.current_dir(&app_base_dir) // Use the prepared app_base_dir
|
||||
.arg("compose")
|
||||
.arg("-p")
|
||||
.arg("buster")
|
||||
.arg("-f")
|
||||
.arg("docker-compose.yml")
|
||||
.arg("docker-compose.yml") // Relative to app_base_dir
|
||||
.arg("down");
|
||||
|
||||
let down_output = down_cmd.output().map_err(|e| {
|
||||
|
@ -259,72 +308,29 @@ Stdout:
|
|||
Stderr:
|
||||
{}",
|
||||
down_output.status,
|
||||
persistent_app_dir.display(),
|
||||
app_base_dir.display(),
|
||||
String::from_utf8_lossy(&down_output.stdout),
|
||||
String::from_utf8_lossy(&down_output.stderr)
|
||||
);
|
||||
pb.abandon_with_message("Error: docker compose down failed. See console for details.");
|
||||
println!("\nDocker Compose Down Error Details:\n{}", err_msg);
|
||||
println!("
|
||||
Docker Compose Down Error Details:
|
||||
{}", err_msg);
|
||||
return Err(BusterError::CommandError(err_msg));
|
||||
}
|
||||
pb.println("Services stopped successfully.");
|
||||
|
||||
// Step 2: Clear persistent data volumes
|
||||
pb.set_message("Resetting Buster services (step 2/4): Clearing persistent data volumes...");
|
||||
let db_volume_path = persistent_app_dir.join("supabase/volumes/db/data");
|
||||
let storage_volume_path = persistent_app_dir.join("supabase/volumes/storage");
|
||||
|
||||
if db_volume_path.exists() {
|
||||
fs::remove_dir_all(&db_volume_path).map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
"Failed to remove db volume at {}: {}",
|
||||
db_volume_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
}
|
||||
fs::create_dir_all(&db_volume_path).map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
"Failed to recreate db volume at {}: {}",
|
||||
db_volume_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
pb.println(format!(
|
||||
"Successfully cleared and recreated database volume: {}",
|
||||
db_volume_path.display()
|
||||
));
|
||||
|
||||
if storage_volume_path.exists() {
|
||||
fs::remove_dir_all(&storage_volume_path).map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
"Failed to remove storage volume at {}: {}",
|
||||
storage_volume_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
}
|
||||
fs::create_dir_all(&storage_volume_path).map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
"Failed to recreate storage volume at {}: {}",
|
||||
storage_volume_path.display(),
|
||||
e
|
||||
))
|
||||
})?;
|
||||
pb.println(format!(
|
||||
"Successfully cleared and recreated storage volume: {}",
|
||||
storage_volume_path.display()
|
||||
));
|
||||
|
||||
// Step 3: Identify service images
|
||||
pb.set_message("Resetting Buster services (step 3/4): Identifying service images...");
|
||||
// Step 2: Identify and Remove service images
|
||||
pb.set_message("Resetting Buster services (2/3): Removing service images...");
|
||||
let mut config_images_cmd = Command::new("docker");
|
||||
config_images_cmd
|
||||
.current_dir(&persistent_app_dir)
|
||||
.current_dir(&app_base_dir) // Use the prepared app_base_dir
|
||||
.arg("compose")
|
||||
.arg("-p")
|
||||
.arg("buster")
|
||||
.arg("-f")
|
||||
.arg("docker-compose.yml")
|
||||
.arg("docker-compose.yml") // Relative to app_base_dir
|
||||
.arg("config")
|
||||
.arg("--images");
|
||||
|
||||
|
@ -343,7 +349,7 @@ Stdout:
|
|||
Stderr:
|
||||
{}",
|
||||
config_images_output.status,
|
||||
persistent_app_dir.display(),
|
||||
app_base_dir.display(),
|
||||
String::from_utf8_lossy(&config_images_output.stdout),
|
||||
String::from_utf8_lossy(&config_images_output.stderr)
|
||||
);
|
||||
|
@ -351,7 +357,9 @@ Stderr:
|
|||
"Error: Failed to identify service images. See console for details.",
|
||||
);
|
||||
println!(
|
||||
"\nDocker Compose Config --images Error Details:\n{}",
|
||||
"
|
||||
Docker Compose Config --images Error Details:
|
||||
{}",
|
||||
err_msg
|
||||
);
|
||||
return Err(BusterError::CommandError(err_msg));
|
||||
|
@ -363,29 +371,25 @@ Stderr:
|
|||
.filter(|line| !line.trim().is_empty())
|
||||
.collect();
|
||||
|
||||
// Step 4: Remove service images
|
||||
if image_names.is_empty() {
|
||||
pb.println(
|
||||
"No images identified by docker-compose config --images. Skipping image removal.",
|
||||
);
|
||||
} else {
|
||||
pb.set_message(format!(
|
||||
"Resetting Buster services (step 4/4): Removing {} service image(s)...",
|
||||
image_names.len()
|
||||
));
|
||||
pb.println(format!("Found {} image(s) to remove.", image_names.len()));
|
||||
for (index, image_name) in image_names.iter().enumerate() {
|
||||
let current_image_name = image_name.trim();
|
||||
if current_image_name.is_empty() {
|
||||
continue;
|
||||
}
|
||||
pb.set_message(format!(
|
||||
"Resetting Buster services (step 4/4): Removing image {}/{} ('{}')...",
|
||||
"Resetting Buster services (2/3): Removing image {}/{} ('{}')...",
|
||||
index + 1,
|
||||
image_names.len(),
|
||||
current_image_name
|
||||
));
|
||||
let mut rmi_cmd = Command::new("docker");
|
||||
rmi_cmd.arg("image").arg("rm").arg(current_image_name);
|
||||
rmi_cmd.arg("image").arg("rm").arg(current_image_name); // Image names are global
|
||||
|
||||
let rmi_output = rmi_cmd.output().map_err(|e| {
|
||||
BusterError::CommandError(format!(
|
||||
|
@ -394,19 +398,44 @@ Stderr:
|
|||
))
|
||||
})?;
|
||||
|
||||
// Log warning on failure but continue, as image might not exist or be in use by other non-project containers
|
||||
if !rmi_output.status.success() {
|
||||
let rmi_stderr = String::from_utf8_lossy(&rmi_output.stderr);
|
||||
if !rmi_stderr.trim().is_empty() && !rmi_stderr.contains("No such image") {
|
||||
// Don't warn if image was already gone
|
||||
pb.println(format!("Warning: Could not remove image '{}'. It might be in use or already removed. Stderr: {}", current_image_name, rmi_stderr.trim()));
|
||||
}
|
||||
} else {
|
||||
pb.println(format!("Successfully removed image: {}", current_image_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
pb.println("Service image removal process complete.");
|
||||
|
||||
// Step 3: Wipe app_base_dir and restore credentials
|
||||
pb.set_message(format!("Resetting Buster services (3/3): Wiping {} and restoring credentials...", app_base_dir.display()));
|
||||
|
||||
if app_base_dir.exists() {
|
||||
fs::remove_dir_all(&app_base_dir).map_err(|e| {
|
||||
BusterError::CommandError(format!("Failed to remove app directory {}: {}", app_base_dir.display(), e))
|
||||
})?;
|
||||
pb.println(format!("Successfully removed directory: {}", app_base_dir.display()));
|
||||
}
|
||||
|
||||
fs::create_dir_all(&app_base_dir).map_err(|e| {
|
||||
BusterError::CommandError(format!("Failed to recreate app directory {}: {}", app_base_dir.display(), e))
|
||||
})?;
|
||||
pb.println(format!("Successfully recreated directory: {}", app_base_dir.display()));
|
||||
|
||||
if let Some(backup_data) = credentials_backup {
|
||||
fs::write(&credentials_path, backup_data).map_err(|e| {
|
||||
BusterError::CommandError(format!("Failed to restore credentials.yml to {}: {}", credentials_path.display(), e))
|
||||
})?;
|
||||
pb.println(format!("Successfully restored: {}", credentials_path.display()));
|
||||
} else {
|
||||
pb.println(format!("No prior credentials.yml to restore for {}.", credentials_path.display()));
|
||||
}
|
||||
|
||||
pb.finish_with_message(
|
||||
"Buster services stopped, volumes cleared, and images removed successfully.",
|
||||
format!("Buster reset complete. Docker services stopped, images removed. Directory {} wiped (credentials.yml preserved if found). Run 'buster start' to rebuild.", app_base_dir.display())
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ services:
|
|||
retries: 30
|
||||
|
||||
api:
|
||||
image: ghcr.io/buster-so/buster/api:latest
|
||||
image: ghcr.io/buster-so/buster/api:latest-arm64
|
||||
container_name: buster-api
|
||||
env_file:
|
||||
- .env
|
||||
|
@ -50,7 +50,7 @@ services:
|
|||
condition: service_healthy
|
||||
|
||||
web:
|
||||
image: ghcr.io/buster-so/buster/web:latest
|
||||
image: ghcr.io/buster-so/buster/web:latest-arm64
|
||||
container_name: buster-web
|
||||
env_file:
|
||||
- .env
|
||||
|
|
Loading…
Reference in New Issue