try and see if litellm is the issue

This commit is contained in:
dal 2025-03-18 22:25:27 -06:00
parent e1f60494a2
commit c66aaf42ee
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
7 changed files with 133 additions and 129 deletions

View File

@ -497,7 +497,7 @@ impl Agent {
generation_name: "agent".to_string(),
user_id: thread.user_id.to_string(),
session_id: thread.id.to_string(),
trace_id: None,
trace_id: thread.id.to_string(),
}),
..Default::default()
};

View File

@ -156,7 +156,7 @@ impl SearchDataCatalogTool {
generation_name: "search_data_catalog".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
trace_id: None,
trace_id: session_id.to_string(),
}),
reasoning_effort: Some("low".to_string()),
max_completion_tokens: Some(8092),

View File

@ -1779,7 +1779,7 @@ pub async fn generate_conversation_title(
generation_name: "conversation_title".to_string(),
user_id: user_id.to_string(),
session_id: session_id.to_string(),
trace_id: None,
trace_id: session_id.to_string(),
}),
..Default::default()
};

View File

@ -12,20 +12,15 @@ use database::{
types::{ChartConfig, DashboardYml, MetricYml, VersionHistory},
};
use chrono::Utc;
use diesel::{ExpressionMethods, QueryDsl};
use diesel::ExpressionMethods;
use diesel_async::RunQueryDsl;
use dotenv::dotenv;
use lazy_static::lazy_static;
use std::sync::{Arc, Mutex, Once};
use std::sync::Once;
use uuid::Uuid;
// Common test setup initializer
static INIT: Once = Once::new();
lazy_static! {
static ref TEST_DB_INITIALIZED: Arc<Mutex<bool>> = Arc::new(Mutex::new(false));
}
/// Sets up the test environment by initializing the database pools
/// Call this at the beginning of each integration test
pub async fn setup_test_environment() -> Result<()> {
@ -35,8 +30,6 @@ pub async fn setup_test_environment() -> Result<()> {
// Initialize database pools only once
INIT.call_once(|| {
init_pools();
let mut initialized = TEST_DB_INITIALIZED.lock().unwrap();
*initialized = true;
});
Ok(())

View File

@ -1,145 +1,156 @@
use anyhow::Result;
use chrono::Utc;
use database::{
enums::{AssetType, Verification},
models::{MetricFile, DashboardFile},
pool::get_pg_pool,
schema::{collections_to_assets, dashboard_files, metric_files},
types::{MetricYml, DashboardYml, ChartConfig, VersionHistory},
schema::collections_to_assets,
};
use diesel::{ExpressionMethods, QueryDsl};
use diesel_async::RunQueryDsl;
use handlers::metrics::{PostMetricDashboardRequest};
use handlers::metrics::{post_metric_dashboard_handler, PostMetricDashboardRequest};
use uuid::Uuid;
// Define the response type for testing
#[derive(Debug, PartialEq)]
struct PostMetricDashboardResponse {
metric_id: Uuid,
dashboard_id: Uuid,
}
// Import the common setup function
use super::setup_test_environment;
// Import the common setup and test utilities
use super::{
associate_metric_with_dashboard, cleanup_metric_dashboard_associations,
cleanup_test_data, create_test_dashboard, create_test_metric,
insert_test_dashboard, insert_test_metric, setup_test_environment
};
#[tokio::test]
async fn test_post_metric_dashboard_integration() -> Result<()> {
// Setup test environment - this would initialize the database in a real test
// Setup test environment
setup_test_environment().await?;
// For now, just test the data structures to ensure they match the model requirements
// Create test user and organization IDs
// Create test organization and user IDs
let organization_id = Uuid::new_v4();
let user_id = Uuid::new_v4();
let org_id = Uuid::new_v4();
// Test that we can create a valid MetricYml structure with the correct fields
let metric_chart_config = ChartConfig::Bar(database::types::BarLineChartConfig {
base: database::types::BaseChartConfig {
column_label_formats: std::collections::HashMap::new(),
column_settings: None,
colors: Some(vec!["#1f77b4".to_string()]),
show_legend: Some(false),
grid_lines: Some(true),
show_legend_headline: None,
goal_lines: None,
trendlines: None,
disable_tooltip: None,
y_axis_config: None,
x_axis_config: None,
category_axis_style_config: None,
y2_axis_config: None,
},
bar_and_line_axis: database::types::BarAndLineAxis {
x: vec!["id".to_string()],
y: vec!["value".to_string()],
category: None,
tooltip: None,
},
bar_layout: Some("vertical".to_string()),
bar_sort_by: None,
bar_group_type: None,
bar_show_total_at_top: None,
line_group_type: None,
});
// Create test metric and dashboard
let test_metric = create_test_metric(organization_id, user_id).await?;
let metric_id = test_metric.id;
let test_dashboard = create_test_dashboard(organization_id, user_id).await?;
let dashboard_id = test_dashboard.id;
// Create and verify metric YAML structure
let metric_yml = MetricYml {
name: "Test Integration Metric For Dashboard".to_string(),
description: Some("Test metric description for dashboard association".to_string()),
sql: "SELECT * FROM test_table".to_string(),
time_frame: "daily".to_string(),
chart_config: metric_chart_config,
data_metadata: Some(vec![database::types::DataMetadata {
name: "id".to_string(),
data_type: "string".to_string(),
}]),
dataset_ids: vec![Uuid::new_v4()],
// Insert test data into database
let setup_ok = match insert_test_metric(&test_metric).await {
Ok(_) => {
match insert_test_dashboard(&test_dashboard).await {
Ok(_) => true,
Err(e) => {
println!("Skipping test - could not insert test dashboard: {}", e);
cleanup_test_data(Some(metric_id), None).await?;
false
}
}
},
Err(e) => {
println!("Skipping test - could not insert test metric: {}", e);
false
}
};
// Verify MetricYml matches expected fields
assert_eq!(metric_yml.name, "Test Integration Metric For Dashboard");
assert_eq!(metric_yml.description, Some("Test metric description for dashboard association".to_string()));
if !setup_ok {
return Ok(());
}
// Verify DashboardYml structure
let dashboard_yml = DashboardYml {
name: "Test Integration Dashboard".to_string(),
description: Some("Test dashboard description".to_string()),
rows: vec![database::types::Row {
items: vec![database::types::RowItem { id: Uuid::new_v4() }],
row_height: Some(320),
column_sizes: None,
}],
};
// Verify DashboardYml fields
assert_eq!(dashboard_yml.name, "Test Integration Dashboard");
assert_eq!(dashboard_yml.description, Some("Test dashboard description".to_string()));
assert_eq!(dashboard_yml.rows.len(), 1);
// Test PostMetricDashboardRequest and Response
let dashboard_id = Uuid::new_v4();
let metric_id = Uuid::new_v4();
// Create request and verify
// Create the association request
let request = PostMetricDashboardRequest {
dashboard_id: dashboard_id,
dashboard_id,
};
assert_eq!(request.dashboard_id, dashboard_id);
// Create response and verify
let response = PostMetricDashboardResponse {
metric_id: metric_id,
dashboard_id: dashboard_id,
};
assert_eq!(response.metric_id, metric_id);
assert_eq!(response.dashboard_id, dashboard_id);
// Call the handler to associate the metric with the dashboard
match post_metric_dashboard_handler(&metric_id, &user_id, request).await {
Ok(response) => {
// Verify the response contains the correct IDs
assert_eq!(response.metric_id, metric_id);
assert_eq!(response.dashboard_id, dashboard_id);
// Skip the actual database operations for now
// Verify the association exists in the database
let mut conn = get_pg_pool().get().await?;
let exists = collections_to_assets::table
.filter(collections_to_assets::collection_id.eq(dashboard_id))
.filter(collections_to_assets::asset_id.eq(metric_id))
.filter(collections_to_assets::asset_type.eq(AssetType::MetricFile))
.count()
.get_result::<i64>(&mut conn)
.await;
match exists {
Ok(count) => assert_eq!(count, 1, "Association should exist in database"),
Err(e) => {
println!("Warning: Could not verify association in database: {}", e);
}
}
println!("Post metric dashboard test passed with IDs: {} and {}", metric_id, dashboard_id);
},
Err(e) => {
// Clean up test data regardless of the outcome
cleanup_test_data(Some(metric_id), Some(dashboard_id)).await?;
return Err(e);
}
}
// Clean up the test data
cleanup_metric_dashboard_associations(metric_id, dashboard_id).await?;
cleanup_test_data(Some(metric_id), Some(dashboard_id)).await?;
Ok(())
}
#[tokio::test]
async fn test_post_metric_dashboard_different_organizations() -> Result<()> {
// Setup test environment - this would initialize the database in a real test
// Setup test environment
setup_test_environment().await?;
// Skip the chart config creation for simplicity in this test
// Verify that AssetType enum contains MetricFile variant
let asset_type = AssetType::MetricFile;
match asset_type {
AssetType::MetricFile => assert!(true),
_ => assert!(false, "AssetType::MetricFile enum variant doesn't match"),
}
// Verify UUIDs
// Create test organization IDs (different orgs)
let org_id1 = Uuid::new_v4();
let org_id2 = Uuid::new_v4();
assert_ne!(org_id1, org_id2);
let user_id = Uuid::new_v4();
// Skip the actual database operations for now
// Create test metric and dashboard in different organizations
let test_metric = create_test_metric(org_id1, user_id).await?;
let metric_id = test_metric.id;
let test_dashboard = create_test_dashboard(org_id2, user_id).await?;
let dashboard_id = test_dashboard.id;
// Insert test data into database
let setup_ok = match insert_test_metric(&test_metric).await {
Ok(_) => {
match insert_test_dashboard(&test_dashboard).await {
Ok(_) => true,
Err(e) => {
println!("Skipping test - could not insert test dashboard: {}", e);
cleanup_test_data(Some(metric_id), None).await?;
false
}
}
},
Err(e) => {
println!("Skipping test - could not insert test metric: {}", e);
false
}
};
if !setup_ok {
return Ok(());
}
// Create the association request
let request = PostMetricDashboardRequest {
dashboard_id,
};
// Call the handler to associate the metric with the dashboard from different org
let result = post_metric_dashboard_handler(&metric_id, &user_id, request).await;
// Clean up the test data
cleanup_test_data(Some(metric_id), Some(dashboard_id)).await?;
// Verify the operation fails with an appropriate error
assert!(result.is_err());
let error = result.err().unwrap();
assert!(error.to_string().contains("same organization"));
Ok(())
}

View File

@ -61,8 +61,8 @@ async fn test_update_metric_integration() -> Result<()> {
match update_metric_handler(&metric_id, &user_id, update_request).await {
Ok(updated_metric) => {
// Verify the updated values in the returned metric
assert_eq!(updated_metric.name, "Updated Test Metric");
assert_eq!(updated_metric.verification, Verification::Verified);
assert_eq!(updated_metric.title, "Updated Test Metric");
assert_eq!(updated_metric.status, Verification::Verified);
assert_eq!(updated_metric.time_frame, "weekly");
// Verify the metric was updated in the database
@ -169,11 +169,11 @@ async fn test_update_specific_metric_fields() -> Result<()> {
match update_metric_handler(&metric_id, &user_id, title_request).await {
Ok(metric) => {
assert_eq!(metric.name, "Title Only Update");
assert_eq!(metric.title, "Title Only Update");
// Verify other fields were not changed
assert_eq!(metric.time_frame, "daily");
assert_eq!(metric.verification, Verification::NotRequested);
assert_eq!(metric.status, Verification::NotRequested);
},
Err(e) => {
cleanup_test_data(Some(metric_id), None).await?;
@ -194,10 +194,10 @@ async fn test_update_specific_metric_fields() -> Result<()> {
match update_metric_handler(&metric_id, &user_id, verification_request).await {
Ok(metric) => {
assert_eq!(metric.verification, Verification::Verified);
assert_eq!(metric.status, Verification::Verified);
// Verify title remains from previous update
assert_eq!(metric.name, "Title Only Update");
assert_eq!(metric.title, "Title Only Update");
},
Err(e) => {
cleanup_test_data(Some(metric_id), None).await?;
@ -221,8 +221,8 @@ async fn test_update_specific_metric_fields() -> Result<()> {
assert_eq!(metric.time_frame, "monthly");
// Verify other fields remain from previous updates
assert_eq!(metric.name, "Title Only Update");
assert_eq!(metric.verification, Verification::Verified);
assert_eq!(metric.title, "Title Only Update");
assert_eq!(metric.status, Verification::Verified);
},
Err(e) => {
cleanup_test_data(Some(metric_id), None).await?;

View File

@ -59,7 +59,7 @@ pub struct Metadata {
pub generation_name: String,
pub user_id: String,
pub session_id: String,
pub trace_id: Option<String>,
pub trace_id: String,
}
impl Default for ChatCompletionRequest {
@ -67,7 +67,7 @@ impl Default for ChatCompletionRequest {
Self {
model: String::new(),
messages: Vec::new(),
store: None,
store: Some(true),
reasoning_effort: None,
frequency_penalty: None,
logit_bias: None,