added in the user avatar url

This commit is contained in:
dal 2025-03-05 13:22:01 -07:00
parent b466a061c4
commit bba0e30b67
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
21 changed files with 264 additions and 216 deletions

View File

@ -365,6 +365,7 @@ pub struct User {
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub attributes: Value,
pub avatar_url: Option<String>,
}
#[derive(
@ -702,4 +703,4 @@ pub enum StepProgress {
InProgress,
Completed,
Failed,
}
}

View File

@ -564,6 +564,7 @@ diesel::table! {
created_at -> Timestamptz,
updated_at -> Timestamptz,
attributes -> Jsonb,
avatar_url -> Nullable<Text>,
}
}

View File

@ -29,10 +29,17 @@ pub struct ChatListItem {
pub last_edited: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct PaginationInfo {
pub has_more: bool,
pub next_page_token: Option<String>,
pub total_items: i32, // Number of items in current page
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ListChatsResponse {
pub items: Vec<ChatListItem>,
pub next_page_token: Option<String>,
pub pagination: PaginationInfo,
}
#[derive(Queryable)]
@ -76,11 +83,8 @@ pub async fn list_chats_handler(
// Add cursor-based pagination if page_token is provided
if let Some(token) = request.page_token {
let decoded_token = base64::decode(&token)
.map_err(|_| anyhow!("Invalid page token"))?;
let cursor_timestamp = String::from_utf8(decoded_token)
.map_err(|_| anyhow!("Invalid page token format"))?;
let cursor_dt = DateTime::parse_from_rfc3339(&cursor_timestamp)
// Parse the RFC3339 timestamp directly
let cursor_dt = DateTime::parse_from_rfc3339(&token)
.map_err(|_| anyhow!("Invalid timestamp in page token"))?
.with_timezone(&Utc);
@ -106,9 +110,9 @@ pub async fn list_chats_handler(
.load::<ChatWithUser>(&mut conn)
.await?;
// Check if there are more results
// Check if there are more results and prepare pagination info
let has_more = results.len() > request.page_size as usize;
let mut items = results
let items: Vec<ChatListItem> = results
.into_iter()
.take(request.page_size as usize)
.map(|chat| {
@ -130,19 +134,22 @@ pub async fn list_chats_handler(
last_edited: chat.updated_at.to_rfc3339(),
}
})
.collect::<Vec<_>>();
// Generate next page token if there are more results
let next_page_token = if has_more {
items
.last()
.map(|last_item| base64::encode(&last_item.created_at))
} else {
None
.collect();
// Create pagination info
let pagination = PaginationInfo {
has_more,
next_page_token: if has_more {
// Just use the RFC3339 timestamp directly as the token
items.last().map(|last_item| last_item.created_at.clone())
} else {
None
},
total_items: items.len() as i32,
};
Ok(ListChatsResponse {
items,
next_page_token,
pagination,
})
}

View File

@ -10,7 +10,7 @@ use serde_yaml;
use crate::files::dashboard_files::types::{
BusterDashboard, BusterDashboardResponse, DashboardConfig, DashboardRow, DashboardRowItem,
};
use crate::files::metric_files::helpers::get_metric;
use crate::metrics::get_metric_handler;
use database::enums::{AssetPermissionRole, Verification};
use database::pool::get_pg_pool;
use database::schema::dashboard_files;
@ -90,7 +90,7 @@ pub async fn get_dashboard(dashboard_id: &Uuid, user_id: &Uuid) -> Result<Buster
// Fetch all metrics concurrently
let metric_futures: Vec<_> = metric_ids
.iter()
.map(|metric_id| get_metric(metric_id, user_id))
.map(|metric_id| get_metric_handler(metric_id, user_id))
.collect();
let metric_results = join_all(metric_futures).await;

View File

@ -1,7 +1,7 @@
use database::enums::{AssetPermissionRole, Verification};
use serde::{Deserialize, Serialize};
use crate::files::BusterMetric;
use crate::metrics::types::BusterMetric;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct BusterDashboardListItem {

View File

@ -1,153 +0,0 @@
use anyhow::{anyhow, Result};
use diesel::{ExpressionMethods, QueryDsl, Queryable, Selectable};
use diesel_async::RunQueryDsl;
use serde_json::{json, Value};
use uuid::Uuid;
use serde_yaml;
use crate::files::metric_files::types::BusterMetric;
use crate::files::{ColumnMetaData, ColumnType, DataMetadata, MinMaxValue, SimpleType};
use database::enums::Verification;
use database::pool::get_pg_pool;
use database::schema::metric_files;
#[derive(Queryable, Selectable)]
#[diesel(table_name = metric_files)]
struct QueryableMetricFile {
id: Uuid,
name: String,
file_name: String,
content: Value,
verification: Verification,
evaluation_obj: Option<Value>,
evaluation_summary: Option<String>,
evaluation_score: Option<f64>,
created_by: Uuid,
created_at: chrono::DateTime<chrono::Utc>,
updated_at: chrono::DateTime<chrono::Utc>,
}
pub async fn get_metric(metric_id: &Uuid, user_id: &Uuid) -> Result<BusterMetric> {
let mut conn = match get_pg_pool().get().await {
Ok(conn) => conn,
Err(e) => return Err(anyhow!("Failed to get database connection: {}", e)),
};
// Query the metric file
let metric_file = metric_files::table
.filter(metric_files::id.eq(metric_id))
.filter(metric_files::deleted_at.is_null())
.select((
metric_files::id,
metric_files::name,
metric_files::file_name,
metric_files::content,
metric_files::verification,
metric_files::evaluation_obj,
metric_files::evaluation_summary,
metric_files::evaluation_score,
metric_files::created_by,
metric_files::created_at,
metric_files::updated_at,
))
.first::<QueryableMetricFile>(&mut conn)
.await
.map_err(|e| match e {
diesel::result::Error::NotFound => anyhow!("Metric file not found or unauthorized"),
_ => anyhow!("Database error: {}", e),
})?;
// Extract fields directly from the JSON content
let content = &metric_file.content;
let title = content
.get("title")
.and_then(Value::as_str)
.unwrap_or("Untitled")
.to_string();
let description = content
.get("description")
.and_then(|v| match v {
Value::Null => None,
v => v.as_str().map(String::from),
});
let sql = content
.get("sql")
.and_then(Value::as_str)
.unwrap_or_default()
.to_string();
// Get chart config directly
let chart_config = content.get("chart_config").cloned().unwrap_or(json!({}));
// Parse data metadata if it exists
let data_metadata = content.get("data_metadata").map(|metadata| {
DataMetadata {
column_count: metadata.as_array().map(|arr| arr.len() as i32).unwrap_or(1),
column_metadata: metadata
.as_array()
.map(|columns| {
columns
.iter()
.map(|col| ColumnMetaData {
name: col
.get("name")
.and_then(Value::as_str)
.unwrap_or("unknown")
.to_string(),
min_value: MinMaxValue::Number(0.0), // Default value
max_value: MinMaxValue::Number(0.0), // Default value
unique_values: 0, // Default value
simple_type: match col.get("data_type").and_then(Value::as_str) {
Some("string") => SimpleType::Text,
Some("number") => SimpleType::Number,
Some("boolean") => SimpleType::Boolean,
Some("date") => SimpleType::Date,
_ => SimpleType::Number,
},
column_type: match col.get("data_type").and_then(Value::as_str) {
Some("string") => ColumnType::Text,
Some("number") => ColumnType::Number,
Some("boolean") => ColumnType::Boolean,
Some("date") => ColumnType::Date,
_ => ColumnType::Number,
},
})
.collect()
})
.unwrap_or_default(),
row_count: 1, // Default value since it's not in your JSON structure
}
});
// Construct BusterMetric
Ok(BusterMetric {
id: metric_file.id.to_string(),
metric_type: "metric".to_string(),
title,
version_number: 1,
description,
file_name: metric_file.file_name,
time_frame: "TODO".to_string(),
dataset_id: "TODO".to_string(),
data_source_id: "TODO".to_string(),
dataset_name: None,
error: None,
chart_config: Some(chart_config),
data_metadata,
status: metric_file.verification,
evaluation_score: metric_file.evaluation_score.map(|score| score.to_string()),
evaluation_summary: metric_file.evaluation_summary.unwrap_or_default(),
file: serde_json::to_string(&content).unwrap_or_default(),
created_at: metric_file.created_at.to_string(),
updated_at: metric_file.updated_at.to_string(),
sent_by_id: metric_file.created_by.to_string(),
sent_by_name: "".to_string(),
sent_by_avatar_url: None,
code: None,
dashboards: vec![],
collections: vec![],
})
}

View File

@ -1,3 +0,0 @@
pub mod get_metric;
pub use get_metric::*;

View File

@ -1,5 +0,0 @@
pub mod types;
pub mod helpers;
pub use types::*;
pub use helpers::*;

View File

@ -1,5 +1,3 @@
pub mod metric_files;
pub mod dashboard_files;
pub use metric_files::*;
pub use dashboard_files::*;
pub use dashboard_files::*;

View File

@ -5,10 +5,11 @@ use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::files::metric_files::get_metric;
use query_engine::data_source_helpers;
use query_engine::data_types::DataType;
use crate::metrics::get_metric_handler;
/// Request structure for the get_metric_data handler
#[derive(Debug, Deserialize)]
pub struct GetMetricDataRequest {
@ -36,7 +37,7 @@ pub async fn get_metric_data_handler(
let user_id = user.id;
// Retrieve the metric definition
let metric = get_metric(&request.metric_id, &user_id).await?;
let metric = get_metric_handler(&request.metric_id, &user_id).await?;
// Parse the metric definition from YAML to get SQL and dataset IDs
let metric_yml = serde_json::from_str::<MetricYml>(&metric.file)?;

View File

@ -0,0 +1,186 @@
use anyhow::{anyhow, Result};
use diesel::{
ExpressionMethods, NullableExpressionMethods, QueryDsl, Queryable, Selectable, SelectableHelper,
};
use diesel_async::RunQueryDsl;
use serde_json::{json, Value};
use serde_yaml;
use uuid::Uuid;
use crate::metrics::types::{
BusterMetric, ColumnMetaData, ColumnType, DataMetadata, Dataset, MinMaxValue, SimpleType,
};
use agents::tools::file_tools::file_types::metric_yml::MetricYml;
use database::enums::Verification;
use database::pool::get_pg_pool;
use database::schema::{datasets, metric_files, users};
#[derive(Queryable, Selectable)]
#[diesel(table_name = metric_files)]
struct QueryableMetricFile {
id: Uuid,
name: String,
file_name: String,
content: Value,
verification: Verification,
evaluation_obj: Option<Value>,
evaluation_summary: Option<String>,
evaluation_score: Option<f64>,
created_by: Uuid,
created_at: chrono::DateTime<chrono::Utc>,
updated_at: chrono::DateTime<chrono::Utc>,
}
#[derive(Queryable)]
struct DatasetInfo {
id: Uuid,
name: String,
}
#[derive(Queryable)]
#[diesel(table_name = users)]
struct UserInfo {
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
name: Option<String>,
#[diesel(sql_type = diesel::sql_types::Nullable<diesel::sql_types::Text>)]
avatar_url: Option<String>,
}
/// Handler to retrieve a metric by ID
pub async fn get_metric_handler(metric_id: &Uuid, user_id: &Uuid) -> Result<BusterMetric> {
let mut conn = match get_pg_pool().get().await {
Ok(conn) => conn,
Err(e) => return Err(anyhow!("Failed to get database connection: {}", e)),
};
// Query the metric file
let metric_file = metric_files::table
.filter(metric_files::id.eq(metric_id))
.filter(metric_files::deleted_at.is_null())
.select((
metric_files::id,
metric_files::name,
metric_files::file_name,
metric_files::content,
metric_files::verification,
metric_files::evaluation_obj,
metric_files::evaluation_summary,
metric_files::evaluation_score,
metric_files::created_by,
metric_files::created_at,
metric_files::updated_at,
))
.first::<QueryableMetricFile>(&mut conn)
.await
.map_err(|e| match e {
diesel::result::Error::NotFound => anyhow!("Metric file not found or unauthorized"),
_ => anyhow!("Database error: {}", e),
})?;
// Parse the content as MetricYml
let metric_yml: MetricYml = serde_json::from_value(metric_file.content.clone())?;
// Map evaluation score to High/Moderate/Low
let evaluation_score = metric_file.evaluation_score.map(|score| {
if score >= 0.8 {
"High".to_string()
} else if score >= 0.5 {
"Moderate".to_string()
} else {
"Low".to_string()
}
});
// Convert content to pretty YAML
let file = match serde_yaml::to_string(&metric_file.content) {
Ok(yaml) => yaml,
Err(e) => return Err(anyhow!("Failed to convert content to YAML: {}", e)),
};
// Parse data metadata from MetricYml
let data_metadata = metric_yml.data_metadata.map(|metadata| {
DataMetadata {
column_count: metadata.len() as i32,
column_metadata: metadata
.iter()
.map(|col| ColumnMetaData {
name: col.name.clone(),
min_value: MinMaxValue::Number(0.0), // Default value
max_value: MinMaxValue::Number(0.0), // Default value
unique_values: 0, // Default value
simple_type: match col.data_type.as_str() {
"string" => SimpleType::Text,
"number" => SimpleType::Number,
"boolean" => SimpleType::Boolean,
"date" => SimpleType::Date,
_ => SimpleType::Text,
},
column_type: match col.data_type.as_str() {
"string" => ColumnType::Text,
"number" => ColumnType::Number,
"boolean" => ColumnType::Boolean,
"date" => ColumnType::Date,
_ => ColumnType::Text,
},
})
.collect(),
row_count: 1, // Default value since it's not in the MetricYml structure
}
});
// Get dataset information for all dataset IDs
let mut datasets = Vec::new();
for dataset_id in &metric_yml.dataset_ids {
if let Ok(dataset_info) = datasets::table
.filter(datasets::id.eq(dataset_id))
.filter(datasets::deleted_at.is_null())
.select((datasets::id, datasets::name))
.first::<DatasetInfo>(&mut conn)
.await
{
datasets.push(Dataset {
id: dataset_info.id.to_string(),
name: dataset_info.name,
});
}
}
// Get user information
let user_info = users::table
.filter(users::id.eq(metric_file.created_by))
.select((users::name, users::avatar_url))
.first::<UserInfo>(&mut conn)
.await
.map_err(|e| anyhow!("Failed to get user information: {}", e))?;
// Construct BusterMetric
Ok(BusterMetric {
id: metric_file.id.to_string(),
metric_type: "metric".to_string(),
title: metric_yml.title,
version_number: 1,
description: metric_yml.description,
file_name: metric_file.file_name,
time_frame: metric_yml
.updated_at
.map(|dt| dt.to_rfc3339())
.unwrap_or_else(|| "".to_string()),
datasets,
data_source_id: "".to_string(), // This would need to be fetched from another source
error: None,
chart_config: Some(serde_json::to_value(&metric_yml.chart_config)?),
data_metadata,
status: metric_file.verification,
evaluation_score,
evaluation_summary: metric_file.evaluation_summary.unwrap_or_default(),
file,
created_at: metric_file.created_at.to_rfc3339(),
updated_at: metric_file.updated_at.to_rfc3339(),
sent_by_id: metric_file.created_by.to_string(),
sent_by_name: user_info.name.unwrap_or("".to_string()),
sent_by_avatar_url: user_info.avatar_url,
code: None,
dashboards: vec![], // TODO: Get associated dashboards
collections: vec![], // TODO: Get associated collections
})
}

View File

@ -1,3 +1,7 @@
pub mod get_metric_data_handler;
pub mod get_metric_handler;
pub mod types;
pub use get_metric_data_handler::*;
pub use get_metric_data_handler::*;
pub use get_metric_handler::*;
pub use types::*;

View File

@ -3,37 +3,29 @@ use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;
// Note: BusterChartConfigProps needs to be defined
// #[derive(Debug, Serialize, Deserialize, Clone)]
// pub struct BusterChartConfigProps { ... }
// Note: VerificationStatus needs to be defined
// #[derive(Debug, Serialize, Deserialize, Clone)]
// pub enum VerificationStatus { ... }
// Note: BusterShare needs to be defined
// #[derive(Debug, Serialize, Deserialize, Clone)]
// pub struct BusterShare { ... }
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Dataset {
pub name: String,
pub id: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct BusterMetric {
pub id: String,
#[serde(rename = "type")]
pub metric_type: String, // Assuming always "metric"
pub metric_type: String, // Always "metric"
pub title: String,
pub version_number: i32,
pub description: Option<String>,
pub file_name: String,
pub time_frame: String,
pub dataset_id: String,
pub datasets: Vec<Dataset>,
pub data_source_id: String,
pub dataset_name: Option<String>,
pub error: Option<String>,
pub chart_config: Option<Value>, // Needs to be defined
pub chart_config: Option<Value>, // BusterChartConfigProps
pub data_metadata: Option<DataMetadata>,
pub status: Verification,
#[serde(rename = "evaluation_score")]
pub evaluation_score: Option<String>,
pub status: Verification,
pub evaluation_score: Option<String>, // "Moderate" | "High" | "Low"
pub evaluation_summary: String,
pub file: String, // yaml file
pub created_at: String,
@ -44,7 +36,6 @@ pub struct BusterMetric {
pub code: Option<String>,
pub dashboards: Vec<Dashboard>,
pub collections: Vec<Collection>,
// BusterShare fields would be included here
}
#[derive(Debug, Serialize, Deserialize, Clone)]
@ -99,6 +90,7 @@ pub enum SimpleType {
}
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "lowercase")]
pub enum ColumnType {
Text,
Float,
@ -141,4 +133,4 @@ pub enum DataValue {
String(String),
Number(f64),
Null,
}
}

View File

@ -0,0 +1,3 @@
-- This file should undo anything in `up.sql`
ALTER TABLE users
DROP COLUMN avatar_url;

View File

@ -0,0 +1,3 @@
-- Your SQL goes here
ALTER TABLE users
ADD COLUMN avatar_url TEXT NULL;

View File

@ -1,17 +1,25 @@
use axum::{
extract::Path,
http::StatusCode,
Extension,
};
use database::models::User;
use crate::routes::rest::ApiResponse;
use axum::extract::Path;
use axum::http::StatusCode;
use axum::Extension;
use handlers::files::metric_files::types::BusterMetric;
use handlers::files::metric_files::helpers::get_metric::get_metric;
use handlers::metrics::{get_metric_handler, BusterMetric};
use uuid::Uuid;
use crate::routes::rest::ApiResponse;
pub async fn get_metric_rest_handler(
Extension(user): Extension<User>,
Path(id): Path<Uuid>,
) -> Result<ApiResponse<BusterMetric>, (StatusCode, &'static str)> {
let metric = match get_metric(&id, &user.id).await {
tracing::info!(
"Processing GET request for metric with ID: {}, user_id: {}",
id,
user.id
);
let metric = match get_metric_handler(&id, &user.id).await {
Ok(response) => response,
Err(e) => {
tracing::error!("Error getting metric: {}", e);

View File

@ -96,6 +96,7 @@ pub async fn get_user_information(user_id: &Uuid) -> Result<UserInfoObject> {
users::created_at,
users::updated_at,
users::attributes,
users::avatar_url,
),
(
teams::id,

View File

@ -101,6 +101,7 @@ async fn post_user_handler(
"user_email": email,
"organization_role": role.to_string(),
}),
avatar_url: None,
};
let user_to_organization = UserToOrganization {

View File

@ -93,6 +93,7 @@ async fn invite_users_handler(user: &User, req: InviteUsersRequest) -> Result<()
"user_email": email,
"organization_role": "viewer".to_string(),
}),
avatar_url: None,
}
})
.collect::<Vec<User>>();

View File

@ -383,6 +383,7 @@ pub async fn get_user_information(user_id: &Uuid) -> Result<UserInfoObject> {
users::created_at,
users::updated_at,
users::attributes,
users::avatar_url,
),
(
teams::id,

View File

@ -697,6 +697,7 @@ async fn create_new_users_and_add_permissions(
created_at: chrono::Utc::now(),
updated_at: chrono::Utc::now(),
config: json!({}),
avatar_url: None,
};
let permission = AssetPermission {