feat(datasets): Add database_identifier support for dataset creation and deployment

- Extend Dataset model and schema to include optional database_identifier field
- Update dataset creation and deployment routes to handle new database_identifier parameter
- Modify dataset DDL generation to use database_identifier for schema resolution when available
This commit is contained in:
dal 2025-02-06 15:18:40 -07:00
parent 77045a30c2
commit 33d5990907
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
9 changed files with 20 additions and 1 deletions

View File

@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
ALTER TABLE datasets DROP COLUMN database_identifier;

View File

@ -0,0 +1,2 @@
-- Your SQL goes here
ALTER TABLE datasets ADD COLUMN database_identifier TEXT NULL;

View File

@ -189,6 +189,7 @@ pub struct Dataset {
pub deleted_at: Option<DateTime<Utc>>,
pub model: Option<String>,
pub yml_file: Option<String>,
pub database_identifier: Option<String>,
}
#[derive(Insertable, Queryable, Associations, Debug)]

View File

@ -252,6 +252,7 @@ diesel::table! {
deleted_at -> Nullable<Timestamptz>,
model -> Nullable<Text>,
yml_file -> Nullable<Text>,
database_identifier -> Nullable<Text>,
}
}

View File

@ -48,6 +48,7 @@ pub struct FullDeployDatasetsRequest {
pub entity_relationships: Option<Vec<DeployDatasetsEntityRelationshipsRequest>>,
pub columns: Vec<DeployDatasetsColumnsRequest>,
pub yml_file: Option<String>,
pub database_identifier: Option<String>,
}
#[derive(Debug, Deserialize)]
@ -238,6 +239,7 @@ async fn process_deploy_request(
entity_relationships: Some(entity_relationships),
columns,
yml_file: Some(yml.clone()),
database_identifier: None,
});
}
@ -323,6 +325,7 @@ async fn deploy_datasets_handler(
organization_id,
model: req.model.clone(),
yml_file: req.yml_file.clone(),
database_identifier: req.database_identifier.clone(),
};
match req.id {

View File

@ -127,6 +127,7 @@ async fn post_dataset_handler(
deleted_at: None,
model: None,
yml_file: None,
database_identifier: None,
};
diesel::insert_into(datasets::table)

View File

@ -235,6 +235,7 @@ async fn create_dataset(user_id: &Uuid, name: &String, data_source_id: &Uuid) ->
organization_id: user_org_id,
yml_file: None,
model: None,
database_identifier: None,
};
let mut conn = match get_pg_pool().get().await {

View File

@ -929,9 +929,16 @@ fn create_dataset_ddl(dataset: &Dataset, dataset_columns: &Vec<DatasetColumn>) -
if let Some(when_to_use) = &dataset.when_to_use {
ddl.push_str(&format!(" -- Description: {}\n", when_to_use));
}
let schema_identifier = if let Some(db_id) = &dataset.database_identifier {
format!("{}.{}", db_id, dataset.schema)
} else {
dataset.schema.clone()
};
ddl.push_str(&format!(
" CREATE TABLE {}.{} (\n",
dataset.schema, dataset.database_name
schema_identifier, dataset.database_name
));
// Add columns

View File

@ -82,6 +82,7 @@ async fn create_datasets(
deleted_at: None,
yml_file: None,
model: None,
database_identifier: None,
})
.collect::<Vec<Dataset>>();