mirror of https://github.com/buster-so/buster.git
functioning properly
This commit is contained in:
parent
0a4198fed1
commit
26fe0ff42e
|
@ -257,19 +257,8 @@ fn generate_default_sql(model: &Model) -> String {
|
|||
}
|
||||
|
||||
/// Get SQL content for a model, using original_file_path if available, or falling back to other methods.
|
||||
fn get_sql_content_for_model(model: &Model, buster_config_dir: &Path, yml_path_for_fallback: &Path) -> Result<String> {
|
||||
if let Some(ref rel_sql_path_str) = model.original_file_path {
|
||||
let sql_path = buster_config_dir.join(rel_sql_path_str);
|
||||
if sql_path.exists() {
|
||||
return fs::read_to_string(&sql_path).map_err(|e| {
|
||||
anyhow!("Failed to read SQL content from {} (original_file_path): {}", sql_path.display(), e)
|
||||
});
|
||||
} else {
|
||||
println!("Warning: original_file_path {} not found for model {}. Falling back or generating default SQL.", sql_path.display(), model.name.yellow());
|
||||
// Fall through to default generation or error if strict
|
||||
}
|
||||
}
|
||||
// Fallback for models without original_file_path (e.g. individually deployed YMLs)
|
||||
fn get_sql_content_for_model(model: &Model, _buster_config_dir: &Path, yml_path_for_fallback: &Path) -> Result<String> {
|
||||
// Fallback for models: try to find an associated .sql file or generate default.
|
||||
let found_sql_path = find_sql_file(yml_path_for_fallback); // yml_path_for_fallback is the path of the .yml file itself
|
||||
if let Some(ref p) = found_sql_path {
|
||||
Ok(fs::read_to_string(p)?)
|
||||
|
@ -799,8 +788,8 @@ mod tests {
|
|||
let single_model_yml = r#"
|
||||
name: test_model
|
||||
description: "Test model"
|
||||
original_file_path: "some/path/model.sql"
|
||||
dimensions:
|
||||
|
||||
dimension:
|
||||
- name: dim1
|
||||
description: "First dimension"
|
||||
type: "string"
|
||||
|
@ -816,20 +805,17 @@ measures:
|
|||
let models = parse_model_file(&single_model_path)?;
|
||||
assert_eq!(models.len(), 1);
|
||||
assert_eq!(models[0].name, "test_model");
|
||||
assert_eq!(models[0].original_file_path, Some("some/path/model.sql".to_string()));
|
||||
|
||||
let multi_model_yml = r#"
|
||||
models:
|
||||
- name: model1
|
||||
description: "First model"
|
||||
original_file_path: "models/model1.sql"
|
||||
dimensions:
|
||||
- name: dim1
|
||||
description: "First dimension"
|
||||
type: "string"
|
||||
- name: model2
|
||||
description: "Second model"
|
||||
original_file_path: "models/model2.sql"
|
||||
measures:
|
||||
- name: measure1
|
||||
description: "First measure"
|
||||
|
@ -842,9 +828,7 @@ models:
|
|||
let models = parse_model_file(&multi_model_path)?;
|
||||
assert_eq!(models.len(), 2);
|
||||
assert_eq!(models[0].name, "model1");
|
||||
assert_eq!(models[0].original_file_path, Some("models/model1.sql".to_string()));
|
||||
assert_eq!(models[1].name, "model2");
|
||||
assert_eq!(models[1].original_file_path, Some("models/model2.sql".to_string()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -862,7 +846,6 @@ models:
|
|||
metrics: vec![],
|
||||
filters: vec![],
|
||||
relationships: vec![],
|
||||
original_file_path: Some("m1.sql".to_string()),
|
||||
};
|
||||
|
||||
let model2 = Model {
|
||||
|
@ -876,7 +859,6 @@ models:
|
|||
metrics: vec![],
|
||||
filters: vec![],
|
||||
relationships: vec![],
|
||||
original_file_path: None,
|
||||
};
|
||||
|
||||
let model3 = Model {
|
||||
|
@ -890,7 +872,6 @@ models:
|
|||
metrics: vec![],
|
||||
filters: vec![],
|
||||
relationships: vec![],
|
||||
original_file_path: Some("path/to/m3.sql".to_string()),
|
||||
};
|
||||
|
||||
let project_context = ProjectContext {
|
||||
|
@ -926,17 +907,14 @@ models:
|
|||
assert_eq!(resolved_models[0].data_source_name, Some("model1_ds".to_string()));
|
||||
assert_eq!(resolved_models[0].schema, Some("project_schema".to_string()));
|
||||
assert_eq!(resolved_models[0].database, Some("global_db".to_string()));
|
||||
assert_eq!(resolved_models[0].original_file_path, Some("m1.sql".to_string()));
|
||||
|
||||
assert_eq!(resolved_models[1].data_source_name, Some("project_ds".to_string()));
|
||||
assert_eq!(resolved_models[1].schema, Some("project_schema".to_string()));
|
||||
assert_eq!(resolved_models[1].database, Some("model2_db".to_string()));
|
||||
assert_eq!(resolved_models[1].original_file_path, None);
|
||||
|
||||
assert_eq!(resolved_models[2].data_source_name, Some("global_ds".to_string()));
|
||||
assert_eq!(resolved_models[2].schema, Some("global_schema".to_string()));
|
||||
assert_eq!(resolved_models[2].database, Some("global_db".to_string()));
|
||||
assert_eq!(resolved_models[2].original_file_path, Some("path/to/m3.sql".to_string()));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -977,7 +955,6 @@ models:
|
|||
description: Some("Relationship to another model".to_string()),
|
||||
}
|
||||
],
|
||||
original_file_path: Some("test_model.sql".to_string()),
|
||||
};
|
||||
|
||||
let sql_content = "SELECT * FROM test_schema.test_model";
|
||||
|
|
|
@ -6,7 +6,7 @@ use std::path::{Path, PathBuf};
|
|||
// use std::time::Duration; // Duration seems unused here now
|
||||
|
||||
use crate::utils::config::BusterConfig;
|
||||
use crate::commands::init::{YamlModel, YamlDimension, YamlMeasure}; // is_measure_type is also in init
|
||||
use crate::commands::init::{YamlModel, YamlDimension, YamlMeasure, is_measure_type};
|
||||
|
||||
// Use new struct names from dbt_utils
|
||||
use dbt_utils::models::{DbtCatalog, CatalogNode, ColumnMetadata, TableMetadata}; // CatalogMetadata might not be directly used here
|
||||
|
@ -303,32 +303,46 @@ pub async fn generate_semantic_models_command(
|
|||
if let Some(dbt_col) = dbt_columns_map.remove(&existing_dim_col.name) {
|
||||
let mut updated_dim = existing_dim_col.clone();
|
||||
let mut dim_col_updated = false;
|
||||
if updated_dim.type_.as_deref() != Some(&dbt_col.type_) { // type_ is String
|
||||
updated_dim.type_ = Some(dbt_col.type_.clone()); dim_col_updated = true; columns_updated_count +=1;
|
||||
|
||||
if !crate::commands::init::is_measure_type(&dbt_col.type_) { // Still a dimension
|
||||
if updated_dim.type_.as_deref() != Some(&dbt_col.type_) {
|
||||
updated_dim.type_ = Some(dbt_col.type_.clone());
|
||||
dim_col_updated = true;
|
||||
}
|
||||
if dbt_col.comment.is_some() && updated_dim.description != dbt_col.comment {
|
||||
updated_dim.description = dbt_col.comment.clone();
|
||||
dim_col_updated = true;
|
||||
}
|
||||
if dim_col_updated { columns_updated_count +=1; model_was_updated = true; }
|
||||
current_dims.push(updated_dim);
|
||||
} else { // Was a dimension, but is now a measure according to dbt_col
|
||||
columns_removed_count += 1; model_was_updated = true; // Will be added as a new measure later
|
||||
}
|
||||
if dbt_col.comment.is_some() && updated_dim.description != dbt_col.comment {
|
||||
updated_dim.description = dbt_col.comment.clone(); dim_col_updated = true; columns_updated_count +=1;
|
||||
}
|
||||
current_dims.push(updated_dim);
|
||||
if dim_col_updated { model_was_updated = true; }
|
||||
} else { columns_removed_count += 1; model_was_updated = true; }
|
||||
}
|
||||
for existing_measure_col in std::mem::take(&mut existing_model.measures) {
|
||||
if let Some(dbt_col) = dbt_columns_map.remove(&existing_measure_col.name) {
|
||||
let mut updated_measure = existing_measure_col.clone();
|
||||
let mut measure_col_updated = false;
|
||||
if updated_measure.type_.as_deref() != Some(&dbt_col.type_) { // type_ is String
|
||||
updated_measure.type_ = Some(dbt_col.type_.clone()); measure_col_updated = true; columns_updated_count +=1;
|
||||
|
||||
if crate::commands::init::is_measure_type(&dbt_col.type_) { // Still a measure
|
||||
if updated_measure.type_.as_deref() != Some(&dbt_col.type_) {
|
||||
updated_measure.type_ = Some(dbt_col.type_.clone());
|
||||
measure_col_updated = true;
|
||||
}
|
||||
if dbt_col.comment.is_some() && updated_measure.description != dbt_col.comment {
|
||||
updated_measure.description = dbt_col.comment.clone();
|
||||
measure_col_updated = true;
|
||||
}
|
||||
if measure_col_updated { columns_updated_count +=1; model_was_updated = true; }
|
||||
current_measures.push(updated_measure);
|
||||
} else { // Was a measure, but is now a dimension
|
||||
columns_removed_count += 1; model_was_updated = true; // Will be added as a new dimension later
|
||||
}
|
||||
if dbt_col.comment.is_some() && updated_measure.description != dbt_col.comment {
|
||||
updated_measure.description = dbt_col.comment.clone(); measure_col_updated = true; columns_updated_count +=1;
|
||||
}
|
||||
current_measures.push(updated_measure);
|
||||
if measure_col_updated { model_was_updated = true; }
|
||||
} else { columns_removed_count += 1; model_was_updated = true; }
|
||||
}
|
||||
for (_col_name, dbt_col) in dbt_columns_map { // Remaining are new columns
|
||||
if crate::commands::init::is_measure_type(&dbt_col.type_) { // type_ is String
|
||||
if crate::commands::init::is_measure_type(&dbt_col.type_) {
|
||||
current_measures.push(YamlMeasure { name: dbt_col.name.clone(), description: dbt_col.comment.clone(), type_: Some(dbt_col.type_.clone()) });
|
||||
} else {
|
||||
current_dims.push(YamlDimension { name: dbt_col.name.clone(), description: dbt_col.comment.clone(), type_: Some(dbt_col.type_.clone()), searchable: false, options: None });
|
||||
|
|
|
@ -94,6 +94,16 @@ pub fn is_measure_type(sql_type: &str) -> bool {
|
|||
lower_sql_type.contains("number")
|
||||
}
|
||||
|
||||
// Helper function to normalize and unquote catalog types
|
||||
pub fn normalize_catalog_type(catalog_type_value: &str) -> String {
|
||||
let trimmed_type = catalog_type_value.trim();
|
||||
if trimmed_type.starts_with('"') && trimmed_type.ends_with('"') && trimmed_type.len() > 1 {
|
||||
trimmed_type[1..trimmed_type.len() - 1].to_string()
|
||||
} else {
|
||||
trimmed_type.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
// Enum for Database Type selection (ensure only one definition, placed before use)
|
||||
#[derive(Debug, Clone)]
|
||||
enum DatabaseType {
|
||||
|
@ -827,7 +837,7 @@ async fn generate_semantic_models_from_dbt_catalog(
|
|||
let mut measures: Vec<YamlMeasure> = Vec::new();
|
||||
|
||||
for (_col_name, col_meta) in &catalog_node.columns { // col_meta is &ColumnMetadata
|
||||
if is_measure_type(&col_meta.type_) { // Pass &String, is_measure_type takes &str
|
||||
if is_measure_type(&col_meta.type_) {
|
||||
measures.push(YamlMeasure {
|
||||
name: col_meta.name.clone(),
|
||||
description: col_meta.comment.clone(),
|
||||
|
|
Loading…
Reference in New Issue