From 3bfe77339a547b04e7e2c60b49acb31d3c044774 Mon Sep 17 00:00:00 2001 From: dal Date: Tue, 25 Feb 2025 21:11:06 -0700 Subject: [PATCH] Enhance dataset and API error handling - Improve column deployment logic in dataset routes - Add more detailed error reporting in CLI generate command - Support additional Postgres credential parsing - Add debug logging for Buster API interactions --- .../rest/routes/datasets/deploy_datasets.rs | 64 +++++++++---------- api/src/utils/query_engine/credentials.rs | 1 + cli/src/commands/generate.rs | 8 ++- cli/src/utils/buster/api.rs | 15 ++++- cli/src/utils/buster/types.rs | 9 ++- 5 files changed, 61 insertions(+), 36 deletions(-) diff --git a/api/src/routes/rest/routes/datasets/deploy_datasets.rs b/api/src/routes/rest/routes/datasets/deploy_datasets.rs index e5d4d86f8..afc6a0161 100644 --- a/api/src/routes/rest/routes/datasets/deploy_datasets.rs +++ b/api/src/routes/rest/routes/datasets/deploy_datasets.rs @@ -624,39 +624,7 @@ async fn deploy_datasets_handler( }) .collect(); - // Get current column names - let current_column_names: HashSet = dataset_columns::table - .filter(dataset_columns::dataset_id.eq(dataset_id)) - .filter(dataset_columns::deleted_at.is_null()) - .select(dataset_columns::name) - .load::(&mut conn) - .await? - .into_iter() - .collect(); - - // Get new column names - let new_column_names: HashSet = columns - .iter() - .map(|c| c.name.clone()) - .collect(); - - // Soft delete removed columns - let columns_to_delete: Vec = current_column_names - .difference(&new_column_names) - .cloned() - .collect(); - - if !columns_to_delete.is_empty() { - diesel::update(dataset_columns::table) - .filter(dataset_columns::dataset_id.eq(dataset_id)) - .filter(dataset_columns::name.eq_any(&columns_to_delete)) - .filter(dataset_columns::deleted_at.is_null()) - .set(dataset_columns::deleted_at.eq(now)) - .execute(&mut conn) - .await?; - } - - // Bulk upsert columns + // First: Bulk upsert columns diesel::insert_into(dataset_columns::table) .values(&columns) .on_conflict((dataset_columns::dataset_id, dataset_columns::name)) @@ -672,6 +640,36 @@ async fn deploy_datasets_handler( )) .execute(&mut conn) .await?; + + // Then: Soft delete removed columns + let current_column_names: HashSet = dataset_columns::table + .filter(dataset_columns::dataset_id.eq(dataset_id)) + .filter(dataset_columns::deleted_at.is_null()) + .select(dataset_columns::name) + .load::(&mut conn) + .await? + .into_iter() + .collect(); + + let new_column_names: HashSet = columns + .iter() + .map(|c| c.name.clone()) + .collect(); + + let columns_to_delete: Vec = current_column_names + .difference(&new_column_names) + .cloned() + .collect(); + + if !columns_to_delete.is_empty() { + diesel::update(dataset_columns::table) + .filter(dataset_columns::dataset_id.eq(dataset_id)) + .filter(dataset_columns::name.eq_any(&columns_to_delete)) + .filter(dataset_columns::deleted_at.is_null()) + .set(dataset_columns::deleted_at.eq(now)) + .execute(&mut conn) + .await?; + } } } } diff --git a/api/src/utils/query_engine/credentials.rs b/api/src/utils/query_engine/credentials.rs index c893c9256..fd821d78c 100644 --- a/api/src/utils/query_engine/credentials.rs +++ b/api/src/utils/query_engine/credentials.rs @@ -75,6 +75,7 @@ pub struct PostgresCredentials { pub port: u16, pub username: String, pub password: String, + #[serde(alias = "dbname")] pub database: Option, pub schemas: Option>, pub jump_host: Option, diff --git a/cli/src/commands/generate.rs b/cli/src/commands/generate.rs index a276c606b..b38afc6a0 100644 --- a/cli/src/commands/generate.rs +++ b/cli/src/commands/generate.rs @@ -300,7 +300,13 @@ impl GenerateCommand { if !response.errors.is_empty() { println!("\n⚠️ Some models had errors:"); for (model_name, error) in response.errors { - println!("❌ {}: {}", model_name, error); + println!("❌ {}: {}", model_name, error.message); + if let Some(error_type) = error.error_type { + println!(" Error type: {}", error_type); + } + if let Some(context) = error.context { + println!(" Context: {}", context); + } } } } diff --git a/cli/src/utils/buster/api.rs b/cli/src/utils/buster/api.rs index 850e18ead..acfbc3ccb 100644 --- a/cli/src/utils/buster/api.rs +++ b/cli/src/utils/buster/api.rs @@ -99,6 +99,9 @@ impl BusterClient { pub async fn post_data_sources(&self, req_body: Vec) -> Result<()> { let headers = self.build_headers()?; + // Debug log the request body + println!("DEBUG: post_data_sources request body: {}", serde_json::to_string_pretty(&req_body).unwrap_or_else(|_| "Failed to serialize request".to_string())); + match self .client .post(format!("{}/api/v1/data_sources", self.base_url)) @@ -162,7 +165,17 @@ impl BusterClient { res.text().await? )); } - Ok(res.json().await?) + + let response_text = res.text().await?; + println!("DEBUG: Raw API Response: {}", response_text); + + match serde_json::from_str::(&response_text) { + Ok(parsed) => Ok(parsed), + Err(e) => { + println!("DEBUG: JSON Parse Error: {}", e); + Err(anyhow::anyhow!("Failed to parse API response: {}", e)) + } + } } Err(e) => Err(anyhow::anyhow!("POST /api/v1/datasets/generate failed: {}", e)), } diff --git a/cli/src/utils/buster/types.rs b/cli/src/utils/buster/types.rs index 41a15f948..891354d4c 100644 --- a/cli/src/utils/buster/types.rs +++ b/cli/src/utils/buster/types.rs @@ -108,5 +108,12 @@ pub struct GenerateApiRequest { #[derive(Debug, Deserialize)] pub struct GenerateApiResponse { pub yml_contents: HashMap, - pub errors: HashMap, + pub errors: HashMap, +} + +#[derive(Debug, Deserialize)] +pub struct GenerateApiError { + pub message: String, + pub error_type: Option, + pub context: Option, }