mirror of https://github.com/buster-so/buster.git
Enhance dataset and API error handling
- Improve column deployment logic in dataset routes - Add more detailed error reporting in CLI generate command - Support additional Postgres credential parsing - Add debug logging for Buster API interactions
This commit is contained in:
parent
3d5c05f89d
commit
3bfe77339a
|
@ -624,39 +624,7 @@ async fn deploy_datasets_handler(
|
|||
})
|
||||
.collect();
|
||||
|
||||
// Get current column names
|
||||
let current_column_names: HashSet<String> = dataset_columns::table
|
||||
.filter(dataset_columns::dataset_id.eq(dataset_id))
|
||||
.filter(dataset_columns::deleted_at.is_null())
|
||||
.select(dataset_columns::name)
|
||||
.load::<String>(&mut conn)
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
// Get new column names
|
||||
let new_column_names: HashSet<String> = columns
|
||||
.iter()
|
||||
.map(|c| c.name.clone())
|
||||
.collect();
|
||||
|
||||
// Soft delete removed columns
|
||||
let columns_to_delete: Vec<String> = current_column_names
|
||||
.difference(&new_column_names)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if !columns_to_delete.is_empty() {
|
||||
diesel::update(dataset_columns::table)
|
||||
.filter(dataset_columns::dataset_id.eq(dataset_id))
|
||||
.filter(dataset_columns::name.eq_any(&columns_to_delete))
|
||||
.filter(dataset_columns::deleted_at.is_null())
|
||||
.set(dataset_columns::deleted_at.eq(now))
|
||||
.execute(&mut conn)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Bulk upsert columns
|
||||
// First: Bulk upsert columns
|
||||
diesel::insert_into(dataset_columns::table)
|
||||
.values(&columns)
|
||||
.on_conflict((dataset_columns::dataset_id, dataset_columns::name))
|
||||
|
@ -672,6 +640,36 @@ async fn deploy_datasets_handler(
|
|||
))
|
||||
.execute(&mut conn)
|
||||
.await?;
|
||||
|
||||
// Then: Soft delete removed columns
|
||||
let current_column_names: HashSet<String> = dataset_columns::table
|
||||
.filter(dataset_columns::dataset_id.eq(dataset_id))
|
||||
.filter(dataset_columns::deleted_at.is_null())
|
||||
.select(dataset_columns::name)
|
||||
.load::<String>(&mut conn)
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let new_column_names: HashSet<String> = columns
|
||||
.iter()
|
||||
.map(|c| c.name.clone())
|
||||
.collect();
|
||||
|
||||
let columns_to_delete: Vec<String> = current_column_names
|
||||
.difference(&new_column_names)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if !columns_to_delete.is_empty() {
|
||||
diesel::update(dataset_columns::table)
|
||||
.filter(dataset_columns::dataset_id.eq(dataset_id))
|
||||
.filter(dataset_columns::name.eq_any(&columns_to_delete))
|
||||
.filter(dataset_columns::deleted_at.is_null())
|
||||
.set(dataset_columns::deleted_at.eq(now))
|
||||
.execute(&mut conn)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -75,6 +75,7 @@ pub struct PostgresCredentials {
|
|||
pub port: u16,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
#[serde(alias = "dbname")]
|
||||
pub database: Option<String>,
|
||||
pub schemas: Option<Vec<String>>,
|
||||
pub jump_host: Option<String>,
|
||||
|
|
|
@ -300,7 +300,13 @@ impl GenerateCommand {
|
|||
if !response.errors.is_empty() {
|
||||
println!("\n⚠️ Some models had errors:");
|
||||
for (model_name, error) in response.errors {
|
||||
println!("❌ {}: {}", model_name, error);
|
||||
println!("❌ {}: {}", model_name, error.message);
|
||||
if let Some(error_type) = error.error_type {
|
||||
println!(" Error type: {}", error_type);
|
||||
}
|
||||
if let Some(context) = error.context {
|
||||
println!(" Context: {}", context);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,6 +99,9 @@ impl BusterClient {
|
|||
pub async fn post_data_sources(&self, req_body: Vec<PostDataSourcesRequest>) -> Result<()> {
|
||||
let headers = self.build_headers()?;
|
||||
|
||||
// Debug log the request body
|
||||
println!("DEBUG: post_data_sources request body: {}", serde_json::to_string_pretty(&req_body).unwrap_or_else(|_| "Failed to serialize request".to_string()));
|
||||
|
||||
match self
|
||||
.client
|
||||
.post(format!("{}/api/v1/data_sources", self.base_url))
|
||||
|
@ -162,7 +165,17 @@ impl BusterClient {
|
|||
res.text().await?
|
||||
));
|
||||
}
|
||||
Ok(res.json().await?)
|
||||
|
||||
let response_text = res.text().await?;
|
||||
println!("DEBUG: Raw API Response: {}", response_text);
|
||||
|
||||
match serde_json::from_str::<GenerateApiResponse>(&response_text) {
|
||||
Ok(parsed) => Ok(parsed),
|
||||
Err(e) => {
|
||||
println!("DEBUG: JSON Parse Error: {}", e);
|
||||
Err(anyhow::anyhow!("Failed to parse API response: {}", e))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => Err(anyhow::anyhow!("POST /api/v1/datasets/generate failed: {}", e)),
|
||||
}
|
||||
|
|
|
@ -108,5 +108,12 @@ pub struct GenerateApiRequest {
|
|||
#[derive(Debug, Deserialize)]
|
||||
pub struct GenerateApiResponse {
|
||||
pub yml_contents: HashMap<String, String>,
|
||||
pub errors: HashMap<String, String>,
|
||||
pub errors: HashMap<String, GenerateApiError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct GenerateApiError {
|
||||
pub message: String,
|
||||
pub error_type: Option<String>,
|
||||
pub context: Option<String>,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue