Add yml_file field to Dataset model and related API structures

- Introduced a new optional `yml_file` field in the Dataset model to store YAML file references.
- Updated the database schema to include the `yml_file` column in the datasets table.
- Modified various API request and response structures to accommodate the new `yml_file` field.
- Enhanced dataset handling functions to support the inclusion of `yml_file` in dataset operations.

These changes improve the dataset management capabilities by allowing the association of YAML files with datasets, facilitating better data organization and retrieval.
This commit is contained in:
dal 2025-01-08 22:33:14 -07:00
parent f1ab3c74a5
commit c74016d3bd
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
16 changed files with 21 additions and 20 deletions

View File

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
ALTER TABLE datasets
DROP COLUMN yml_file;

View File

@ -0,0 +1,3 @@
-- Your SQL goes here
ALTER TABLE datasets
ADD COLUMN yml_file TEXT;

View File

@ -159,6 +159,7 @@ pub struct Dataset {
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub deleted_at: Option<DateTime<Utc>>,
pub yml_file: Option<String>,
}
#[derive(Insertable, Queryable, Associations, Debug)]

View File

@ -238,6 +238,7 @@ diesel::table! {
updated_at -> Timestamptz,
deleted_at -> Nullable<Timestamptz>,
model -> Nullable<Text>,
yml_file -> Nullable<Text>,
}
}

View File

@ -35,6 +35,7 @@ pub struct PostDatasetsRequest {
pub sql_definition: Option<String>,
pub entity_relationships: Option<Vec<PostDatasetsEntityRelationshipsRequest>>,
pub columns: Vec<PostDatasetsColumnsRequest>,
pub yml_file: Option<String>,
}
#[derive(Debug, Deserialize)]
@ -144,6 +145,7 @@ async fn post_datasets_handler(user_id: &Uuid, requests: Vec<PostDatasetsRequest
deleted_at: None,
imported: false,
organization_id,
yml_file: None,
};
datasets.push(dataset);

View File

@ -155,6 +155,7 @@ async fn get_data_source_datasets(data_source_id: Uuid) -> Result<Vec<Dataset>>
datasets::created_at,
datasets::updated_at,
datasets::deleted_at,
datasets::yml_file,
))
.filter(datasets::data_source_id.eq(&data_source_id))
.filter(datasets::deleted_at.is_null())

View File

@ -188,6 +188,7 @@ async fn get_dataset_and_columns(
datasets::created_at,
datasets::updated_at,
datasets::deleted_at.nullable(),
datasets::yml_file.nullable(),
),
(
dataset_columns::id,

View File

@ -257,6 +257,7 @@ async fn create_dataset(user_id: &Uuid, name: &String, data_source_id: &Uuid) ->
deleted_at: None,
imported: false,
organization_id: user_org_id,
yml_file: None,
};
let mut conn = match get_pg_pool().get().await {

View File

@ -705,6 +705,7 @@ async fn get_user_specified_dataset(
datasets::created_at,
datasets::updated_at,
datasets::deleted_at,
datasets::yml_file,
))
.inner_join(
datasets_to_permission_groups::table
@ -774,6 +775,7 @@ async fn get_permissioned_datasets(pool: &PgPool, user_id: &Uuid) -> Result<Vec<
datasets::created_at,
datasets::updated_at,
datasets::deleted_at,
datasets::yml_file,
))
.inner_join(
datasets_to_permission_groups::table.on(datasets::id

View File

@ -80,6 +80,7 @@ async fn create_datasets(
created_at: Utc::now(),
updated_at: Utc::now(),
deleted_at: None,
yml_file: None,
})
.collect::<Vec<Dataset>>();

View File

@ -42,6 +42,7 @@ pub async fn get_permissioned_datasets(
datasets::created_at,
datasets::updated_at,
datasets::deleted_at,
datasets::yml_file,
))
.inner_join(
datasets_to_permission_groups::table

View File

@ -1,6 +1,5 @@
use anyhow::Result;
use inquire::MultiSelect;
use ratatui::style::Stylize;
use tokio::task::JoinSet;
use crate::utils::{

View File

@ -3,12 +3,6 @@ use reqwest::{
header::{HeaderMap, HeaderValue},
Client,
};
use serde::{Deserialize, Serialize};
use crate::{
error::BusterError,
utils::profiles::{Credential, Profile},
};
use super::{
PostDataSourcesRequest, PostDatasetsRequest, ValidateApiKeyRequest, ValidateApiKeyResponse,

View File

@ -31,6 +31,7 @@ pub struct PostDatasetsRequest {
pub sql_definition: Option<String>,
pub entity_relationships: Option<Vec<PostDatasetsEntityRelationshipsRequest>>,
pub columns: Vec<PostDatasetsColumnsRequest>,
pub yml_file: String,
}
#[derive(Debug, Serialize)]

View File

@ -17,6 +17,7 @@ use super::{
pub struct BusterModelObject {
pub sql_definition: String,
pub model_file: BusterModel,
pub yml_content: String,
}
#[derive(Debug, Serialize, Deserialize)]
@ -91,6 +92,7 @@ async fn process_directory(
model_objects.push(BusterModelObject {
sql_definition,
model_file: model,
yml_content: yaml_content,
});
}
}
@ -160,6 +162,7 @@ pub async fn upload_model_files(
sql_definition: Some(model.sql_definition.clone()),
entity_relationships: Some(entity_relationships),
columns,
yml_file: model.yml_content.clone(),
};
post_datasets_req_body.push(dataset);

View File

@ -44,19 +44,6 @@ pub enum Credential {
}
impl Credential {
pub fn get_db_type(&self) -> String {
match self {
Credential::Postgres(_) => "postgres".to_string(),
Credential::MySQL(_) => "mysql".to_string(),
Credential::Bigquery(_) => "bigquery".to_string(),
Credential::SqlServer(_) => "sqlserver".to_string(),
Credential::Redshift(_) => "redshift".to_string(),
Credential::Databricks(_) => "databricks".to_string(),
Credential::Snowflake(_) => "snowflake".to_string(),
Credential::Starrocks(_) => "starrocks".to_string(),
}
}
pub fn get_schema(&self) -> String {
match self {
Credential::Postgres(cred) => cred.schema.clone(),