buster/cli/src/main.rs

161 lines
4.6 KiB
Rust
Raw Normal View History

mod commands;
mod error;
mod types;
mod utils;
use clap::{Parser, Subcommand};
use colored::*;
2025-02-26 11:11:35 +08:00
use commands::{auth::AuthArgs, deploy, init};
pub const APP_NAME: &str = "buster";
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
pub const BUILD_DATE: &str = env!("BUILD_DATE");
pub const GIT_HASH: &str = env!("GIT_HASH");
#[derive(Subcommand)]
Dal/cli-updates-skip-dbt (#67) * feat: enhance deploy command with skip_dbt option - Updated the Deploy command to accept a `skip_dbt` boolean argument, allowing users to bypass the dbt run during deployment. - Refactored the deploy function to conditionally execute the dbt command based on the `skip_dbt` flag, improving deployment flexibility. * Refactor query engine and CLI commands for improved functionality and error handling - Updated `get_bigquery_columns` and `get_snowflake_columns` functions to enhance column name handling and ensure proper error reporting. - Modified `get_snowflake_client` to accept a database ID for better connection management. - Enhanced the `deploy` command in the CLI to include additional parameters (`path`, `data_source_name`, `schema`, `env`) for more flexible deployments. - Improved error handling and reporting in the `deploy` function, including detailed summaries of deployment errors and successful file processing. - Updated `get_model_files` to accept a directory path and added checks for file existence, enhancing robustness. - Adjusted model file structures to include schema information and refined the upload process to handle optional parameters more effectively. These changes collectively improve the usability and reliability of the query engine and deployment process. * Update dataset DDL generation to include optional YML file content - Modified `generate_sql_agent` to append optional YML file content to dataset DDL - Ensures more comprehensive dataset representation during SQL agent generation - Handles cases where YML file might be present or absent gracefully
2025-01-25 08:00:38 +08:00
#[clap(rename_all = "kebab-case")]
pub enum Commands {
2025-02-26 11:17:00 +08:00
Init {
/// Path to create the buster.yml file (defaults to current directory)
#[arg(long)]
destination_path: Option<String>,
},
/// Authenticate with Buster API
Auth {
/// The Buster API host URL
#[arg(long, env = "BUSTER_HOST")]
host: Option<String>,
/// Your Buster API key
#[arg(long, env = "BUSTER_API_KEY")]
api_key: Option<String>,
/// Don't save credentials to disk
#[arg(long)]
no_save: bool,
},
/// Display version information
Version,
/// Update buster-cli to the latest version
Update {
/// Only check if an update is available
#[arg(long)]
check_only: bool,
/// Force update even if already on latest version
#[arg(long)]
force: bool,
/// Skip update confirmation prompt
#[arg(short = 'y')]
no_prompt: bool,
},
2025-02-12 21:48:29 +08:00
Generate {
#[arg(long)]
source_path: Option<String>,
#[arg(long)]
destination_path: Option<String>,
#[arg(long)]
data_source_name: Option<String>,
#[arg(long)]
schema: Option<String>,
#[arg(long)]
database: Option<String>,
/// Output YML files in a flat structure instead of maintaining directory hierarchy
#[arg(long, default_value_t = false)]
flat_structure: bool,
2025-02-12 21:48:29 +08:00
},
Dal/cli-updates-skip-dbt (#67) * feat: enhance deploy command with skip_dbt option - Updated the Deploy command to accept a `skip_dbt` boolean argument, allowing users to bypass the dbt run during deployment. - Refactored the deploy function to conditionally execute the dbt command based on the `skip_dbt` flag, improving deployment flexibility. * Refactor query engine and CLI commands for improved functionality and error handling - Updated `get_bigquery_columns` and `get_snowflake_columns` functions to enhance column name handling and ensure proper error reporting. - Modified `get_snowflake_client` to accept a database ID for better connection management. - Enhanced the `deploy` command in the CLI to include additional parameters (`path`, `data_source_name`, `schema`, `env`) for more flexible deployments. - Improved error handling and reporting in the `deploy` function, including detailed summaries of deployment errors and successful file processing. - Updated `get_model_files` to accept a directory path and added checks for file existence, enhancing robustness. - Adjusted model file structures to include schema information and refined the upload process to handle optional parameters more effectively. These changes collectively improve the usability and reliability of the query engine and deployment process. * Update dataset DDL generation to include optional YML file content - Modified `generate_sql_agent` to append optional YML file content to dataset DDL - Ensures more comprehensive dataset representation during SQL agent generation - Handles cases where YML file might be present or absent gracefully
2025-01-25 08:00:38 +08:00
Deploy {
#[arg(long)]
path: Option<String>,
2025-02-12 02:36:03 +08:00
#[arg(long, default_value_t = false)]
dry_run: bool,
/// Recursively search for model files in subdirectories
#[arg(long, default_value_t = true)]
recursive: bool,
Dal/cli-updates-skip-dbt (#67) * feat: enhance deploy command with skip_dbt option - Updated the Deploy command to accept a `skip_dbt` boolean argument, allowing users to bypass the dbt run during deployment. - Refactored the deploy function to conditionally execute the dbt command based on the `skip_dbt` flag, improving deployment flexibility. * Refactor query engine and CLI commands for improved functionality and error handling - Updated `get_bigquery_columns` and `get_snowflake_columns` functions to enhance column name handling and ensure proper error reporting. - Modified `get_snowflake_client` to accept a database ID for better connection management. - Enhanced the `deploy` command in the CLI to include additional parameters (`path`, `data_source_name`, `schema`, `env`) for more flexible deployments. - Improved error handling and reporting in the `deploy` function, including detailed summaries of deployment errors and successful file processing. - Updated `get_model_files` to accept a directory path and added checks for file existence, enhancing robustness. - Adjusted model file structures to include schema information and refined the upload process to handle optional parameters more effectively. These changes collectively improve the usability and reliability of the query engine and deployment process. * Update dataset DDL generation to include optional YML file content - Modified `generate_sql_agent` to append optional YML file content to dataset DDL - Ensures more comprehensive dataset representation during SQL agent generation - Handles cases where YML file might be present or absent gracefully
2025-01-25 08:00:38 +08:00
},
}
#[derive(Parser)]
pub struct Args {
#[command(subcommand)]
pub cmd: Commands,
}
#[tokio::main]
async fn main() {
let args = Args::parse();
// TODO: All commands should check for an update.
let result = match args.cmd {
2025-02-26 11:17:00 +08:00
Commands::Init { destination_path } => init(destination_path.as_deref()).await,
Commands::Auth {
host,
api_key,
no_save,
} => {
commands::auth::auth_with_args(AuthArgs {
host,
api_key,
no_save,
})
.await
}
Commands::Version => {
println!("{} v{}", APP_NAME.bold(), VERSION);
println!("Build Date: {}", BUILD_DATE);
println!("Git Commit: {}", GIT_HASH);
// Check for updates
match commands::version::check_latest_version().await {
Ok(Some(latest_version)) => {
if commands::version::is_update_available(VERSION, &latest_version) {
println!("\n{}", "Update available!".yellow().bold());
println!("Latest version: {}", latest_version.green());
println!("Run {} to update", "buster update".cyan());
} else {
println!("\n{}", "You are using the latest version".green());
}
}
Ok(None) => println!("\n{}", "Unable to check for updates".yellow()),
Err(e) => println!("\n{}: {}", "Error checking for updates".red(), e),
}
Ok(())
}
Commands::Update {
check_only,
force,
no_prompt,
} => {
let cmd = commands::update::UpdateCommand::new(check_only, force, no_prompt);
cmd.execute().await
}
Commands::Generate {
2025-02-12 21:48:29 +08:00
source_path,
destination_path,
data_source_name,
schema,
database,
flat_structure,
2025-02-12 21:48:29 +08:00
} => {
commands::generate(
source_path.as_deref(),
destination_path.as_deref(),
data_source_name,
schema,
database,
flat_structure,
)
.await
}
Commands::Deploy {
path,
dry_run,
recursive,
2025-02-26 11:11:35 +08:00
} => deploy(path.as_deref(), dry_run, recursive).await,
};
if let Err(e) = result {
eprintln!("{}", e);
std::process::exit(1);
}
}