Merge pull request #109 from buster-so/dal/database_identifier_lineage

Fix: database_identifier_fix
This commit is contained in:
dal 2025-02-10 06:28:08 -08:00 committed by GitHub
commit 89bd57b0ae
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 21 additions and 1 deletions

View File

@ -0,0 +1,2 @@
-- This file should undo anything in `up.sql`
ALTER TABLE datasets DROP COLUMN database_identifier;

View File

@ -0,0 +1,2 @@
-- Your SQL goes here
ALTER TABLE datasets ADD COLUMN database_identifier TEXT NULL;

View File

@ -189,6 +189,7 @@ pub struct Dataset {
pub deleted_at: Option<DateTime<Utc>>,
pub model: Option<String>,
pub yml_file: Option<String>,
pub database_identifier: Option<String>,
}
#[derive(Insertable, Queryable, Associations, Debug)]

View File

@ -252,6 +252,7 @@ diesel::table! {
deleted_at -> Nullable<Timestamptz>,
model -> Nullable<Text>,
yml_file -> Nullable<Text>,
database_identifier -> Nullable<Text>,
}
}

View File

@ -48,6 +48,7 @@ pub struct FullDeployDatasetsRequest {
pub entity_relationships: Option<Vec<DeployDatasetsEntityRelationshipsRequest>>,
pub columns: Vec<DeployDatasetsColumnsRequest>,
pub yml_file: Option<String>,
pub database_identifier: Option<String>,
}
#[derive(Debug, Deserialize)]
@ -238,6 +239,7 @@ async fn process_deploy_request(
entity_relationships: Some(entity_relationships),
columns,
yml_file: Some(yml.clone()),
database_identifier: None,
});
}
@ -323,6 +325,7 @@ async fn deploy_datasets_handler(
organization_id,
model: req.model.clone(),
yml_file: req.yml_file.clone(),
database_identifier: req.database_identifier.clone(),
};
match req.id {

View File

@ -127,6 +127,7 @@ async fn post_dataset_handler(
deleted_at: None,
model: None,
yml_file: None,
database_identifier: None,
};
diesel::insert_into(datasets::table)

View File

@ -235,6 +235,7 @@ async fn create_dataset(user_id: &Uuid, name: &String, data_source_id: &Uuid) ->
organization_id: user_org_id,
yml_file: None,
model: None,
database_identifier: None,
};
let mut conn = match get_pg_pool().get().await {

View File

@ -929,9 +929,16 @@ fn create_dataset_ddl(dataset: &Dataset, dataset_columns: &Vec<DatasetColumn>) -
if let Some(when_to_use) = &dataset.when_to_use {
ddl.push_str(&format!(" -- Description: {}\n", when_to_use));
}
let schema_identifier = if let Some(db_id) = &dataset.database_identifier {
format!("{}.{}", db_id, dataset.schema)
} else {
dataset.schema.clone()
};
ddl.push_str(&format!(
" CREATE TABLE {}.{} (\n",
dataset.schema, dataset.database_name
schema_identifier, dataset.database_name
));
// Add columns

View File

@ -39,6 +39,7 @@ Please output the SQL delimited in ```sql tags.
- Always order dates in ascending order.
- When working with time series data, always return a date field.
- You must use the schema when referencing tables. Like this pattern <SCHEMA_NAME>.<TABLE_NAME>
- Pay attention to the database identifier. It may be used to reference across multiple databases.
- Never use the 'SELECT *' or 'SELECT COUNT(*)' command. You must select the columns you want to see/use.
- Users may mention formatting or charting. Although this task is specific to SQL generation, the user is referring to future steps for visualization.
- A request for a line chart should default to using a date-related field unless the user specifies otherwise or it is not available.

View File

@ -82,6 +82,7 @@ async fn create_datasets(
deleted_at: None,
yml_file: None,
model: None,
database_identifier: None,
})
.collect::<Vec<Dataset>>();