From b8fd636740fbad8cd09223d89002440bf3d1d64d Mon Sep 17 00:00:00 2001 From: dal Date: Tue, 1 Apr 2025 12:13:40 -0600 Subject: [PATCH] updated cursor and claude --- api/.cursor/rules/database_migrations.mdc | 357 +++++ api/.cursor/rules/global.mdc | 177 +-- api/.cursor/rules/handlers.mdc | 73 +- api/.cursor/rules/libs.mdc | 4 +- api/.cursor/rules/prds.mdc | 136 +- api/.cursor/rules/rest.mdc | 2 +- api/.cursor/rules/testing.mdc | 1714 +++++++++++++++++++-- api/CLAUDE.md | 2 + api/documentation/database_migrations.mdc | 7 +- 9 files changed, 2153 insertions(+), 319 deletions(-) create mode 100644 api/.cursor/rules/database_migrations.mdc diff --git a/api/.cursor/rules/database_migrations.mdc b/api/.cursor/rules/database_migrations.mdc new file mode 100644 index 000000000..ce37e380a --- /dev/null +++ b/api/.cursor/rules/database_migrations.mdc @@ -0,0 +1,357 @@ +--- +description: +globs: +alwaysApply: false +--- +# Database Migrations Guide + +This document provides a comprehensive guide on how to create and manage database migrations in our project. + +## Overview + +Database migrations are a way to evolve your database schema over time. Each migration represents a specific change to the database schema, such as creating a table, adding a column, or modifying an enum type. Migrations are version-controlled and can be applied or reverted as needed. + +In our project, we use [Diesel](mdc:https:/diesel.rs) for handling database migrations. Diesel is an ORM and query builder for Rust that helps us manage our database schema changes in a safe and consistent way. + +## Migration Workflow + +### 1. Creating a New Migration + +To create a new migration, use the Diesel CLI: + +```bash +diesel migration generate name_of_migration +``` + +This command creates a new directory in the `migrations` folder with a timestamp prefix (e.g., `2025-03-06-232923_name_of_migration`). Inside this directory, two files are created: +- `up.sql`: Contains SQL statements to apply the migration +- `down.sql`: Contains SQL statements to revert the migration + +### 2. Writing Migration SQL + +#### Up Migration + +The `up.sql` file should contain all the SQL statements needed to apply your changes to the database. For example: + +```sql +-- Create a new table +CREATE TABLE users ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR NOT NULL, + email VARCHAR NOT NULL UNIQUE, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() +); + +-- Add a column to an existing table +ALTER TABLE organizations +ADD COLUMN description TEXT; + +-- Create an enum type +CREATE TYPE user_role_enum AS ENUM ('admin', 'member', 'guest'); +``` + +#### Down Migration + +The `down.sql` file should contain SQL statements that revert the changes made in `up.sql`. It should be written in the reverse order of the operations in `up.sql`: + +```sql +-- Remove the enum type +DROP TYPE user_role_enum; + +-- Remove the column +ALTER TABLE organizations +DROP COLUMN description; + +-- Drop the table +DROP TABLE users; +``` + +### 3. Running Migrations + +To apply all pending migrations: + +```bash +diesel migration run +``` + +This command: +1. Executes the SQL in the `up.sql` files of all pending migrations +2. Updates the `__diesel_schema_migrations` table to track which migrations have been applied +3. Regenerates the `schema.rs` file to reflect the current database schema + +### 4. Reverting Migrations + +To revert the most recent migration: + +```bash +diesel migration revert +``` + +This executes the SQL in the `down.sql` file of the most recently applied migration. + +### 5. Checking Migration Status + +To see which migrations have been applied and which are pending: + +```bash +diesel migration list +``` + +## Working with Enums + +We prefer using enums when possible for fields with a fixed set of values. Here's how to work with enums in our project: + +### 1. Creating an Enum in SQL Migration + +```sql +-- In up.sql +CREATE TYPE asset_type_enum AS ENUM ('dashboard', 'dataset', 'metric'); + +-- In down.sql +DROP TYPE asset_type_enum; +``` + +### 2. Adding Values to an Existing Enum + +```sql +-- In up.sql +ALTER TYPE asset_type_enum ADD VALUE IF NOT EXISTS 'chat'; + +-- In down.sql +DELETE FROM pg_enum +WHERE enumlabel = 'chat' +AND enumtypid = (SELECT oid FROM pg_type WHERE typname = 'asset_type_enum'); +``` + +### 3. Implementing the Enum in Rust + +After running the migration, you need to update the `enums.rs` file to reflect the changes: + +```rust +#[derive( + Serialize, + Deserialize, + Debug, + Clone, + Copy, + PartialEq, + Eq, + diesel::AsExpression, + diesel::FromSqlRow, +)] +#[diesel(sql_type = sql_types::AssetTypeEnum)] +#[serde(rename_all = "camelCase")] +pub enum AssetType { + Dashboard, + Dataset, + Metric, + Chat, +} + +impl ToSql for AssetType { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result { + match *self { + AssetType::Dashboard => out.write_all(b"dashboard")?, + AssetType::Dataset => out.write_all(b"dataset")?, + AssetType::Metric => out.write_all(b"metric")?, + AssetType::Chat => out.write_all(b"chat")?, + } + Ok(IsNull::No) + } +} + +impl FromSql for AssetType { + fn from_sql(bytes: PgValue<'_>) -> deserialize::Result { + match bytes.as_bytes() { + b"dashboard" => Ok(AssetType::Dashboard), + b"dataset" => Ok(AssetType::Dataset), + b"metric" => Ok(AssetType::Metric), + b"chat" => Ok(AssetType::Chat), + _ => Err("Unrecognized enum variant".into()), + } + } +} +``` + +## Working with JSON Types + +When working with JSON data in the database, we map it to Rust structs. Here's how: + +### 1. Adding a JSON Column in Migration + +```sql +-- In up.sql +ALTER TABLE metric_files +ADD COLUMN version_history JSONB NOT NULL DEFAULT '{}'::jsonb; + +-- In down.sql +ALTER TABLE metric_files +DROP COLUMN version_history; +``` + +### 2. Creating a Type for the JSON Data + +Create a new file in the `libs/database/src/types` directory or update an existing one: + +```rust +// In libs/database/src/types/version_history.rs +use std::io::Write; +use diesel::{ + deserialize::FromSql, + pg::Pg, + serialize::{IsNull, Output, ToSql}, + sql_types::Jsonb, + AsExpression, FromSqlRow, +}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Serialize, Deserialize, FromSqlRow, AsExpression, Clone)] +#[diesel(sql_type = Jsonb)] +pub struct VersionHistory { + pub version: String, + pub updated_at: String, + pub content: serde_json::Value, +} + +impl FromSql for VersionHistory { + fn from_sql(bytes: diesel::pg::PgValue) -> diesel::deserialize::Result { + let value = serde_json::from_value(Jsonb::from_sql(bytes)?)?; + Ok(value) + } +} + +impl ToSql for VersionHistory { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> diesel::serialize::Result { + let json = serde_json::to_value(self)?; + ToSql::::to_sql(&json, out) + } +} +``` + +### 3. Updating the `mod.rs` File + +Make sure to export your new type in the `libs/database/src/types/mod.rs` file: + +```rust +pub mod version_history; +pub use version_history::*; +``` + +### 4. Using the Type in Models + +Update the corresponding model in `models.rs` to use your new type: + +```rust +#[derive(Queryable, Insertable, Identifiable, Debug, Clone, Serialize)] +#[diesel(table_name = metric_files)] +pub struct MetricFile { + pub id: Uuid, + pub name: String, + pub content: String, + pub organization_id: Uuid, + pub created_by: Uuid, + pub created_at: DateTime, + pub updated_at: DateTime, + pub deleted_at: Option>, + pub version_history: VersionHistory, +} +``` + +## Best Practices + +1. **Keep migrations small and focused**: Each migration should do one logical change to the schema. + +2. **Test migrations before applying to production**: Always test migrations in a development or staging environment first. + +3. **Always provide a down migration**: Make sure your `down.sql` properly reverts all changes made in `up.sql`. + +4. **Use transactions**: Wrap complex migrations in transactions to ensure atomicity. + +5. **Be careful with data migrations**: If you need to migrate data (not just schema), consider using separate migrations or Rust code. + +6. **Document your migrations**: Add comments to your SQL files explaining what the migration does and why. + +7. **Version control your migrations**: Always commit your migrations to version control. + +## Common Migration Patterns + +### Adding a New Table + +```sql +-- up.sql +CREATE TABLE new_table ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + name VARCHAR NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW() +); + +-- down.sql +DROP TABLE new_table; +``` + +### Adding a Column + +```sql +-- up.sql +ALTER TABLE existing_table +ADD COLUMN new_column VARCHAR; + +-- down.sql +ALTER TABLE existing_table +DROP COLUMN new_column; +``` + +### Creating a Join Table + +```sql +-- up.sql +CREATE TABLE table_a_to_table_b ( + table_a_id UUID NOT NULL REFERENCES table_a(id), + table_b_id UUID NOT NULL REFERENCES table_b(id), + created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), + PRIMARY KEY (table_a_id, table_b_id) +); + +-- down.sql +DROP TABLE table_a_to_table_b; +``` + +### Working with Constraints + +```sql +-- up.sql +ALTER TABLE users +ADD CONSTRAINT unique_email UNIQUE (email); + +-- down.sql +ALTER TABLE users +DROP CONSTRAINT unique_email; +``` + +## Troubleshooting + +### Migration Failed to Apply + +If a migration fails to apply, Diesel will stop and not apply any further migrations. You'll need to fix the issue and try again. + +### Schema Drift + +If your `schema.rs` doesn't match the actual database schema, you can regenerate it: + +```bash +diesel print-schema > libs/database/src/schema.rs +``` + +### Fixing a Bad Migration + +If you've applied a migration that has errors: + +1. Fix the issues in your `up.sql` file +2. Run `diesel migration revert` to undo the migration +3. Run `diesel migration run` to apply the fixed migration + +## Conclusion + +Following these guidelines will help maintain a clean and consistent database schema evolution process. Remember that migrations are part of your codebase and should be treated with the same care as any other code. diff --git a/api/.cursor/rules/global.mdc b/api/.cursor/rules/global.mdc index 5e5c60abb..dcd476d55 100644 --- a/api/.cursor/rules/global.mdc +++ b/api/.cursor/rules/global.mdc @@ -1,143 +1,70 @@ --- -description: These are global rules and recommendations for the rust server. +description: globs: alwaysApply: true --- +# Buster API Repository Navigation Guide -# Global Rules and Project Structure +## Row Limit Implementation Notes +All database query functions in the query_engine library have been updated to respect a 5000 row limit by default. The limit can be overridden by passing an explicit limit value. This is implemented in the libs/query_engine directory. -## Project Overview -This is a Rust web server project built with Axum, focusing on high performance, safety, and maintainability. +## Documentation +The project's detailed documentation is in the `/documentation` directory: +- `handlers.mdc` - Handler patterns +- `libs.mdc` - Library construction guidelines +- `rest.mdc` - REST API formatting +- `testing.mdc` - Testing standards +- `tools.mdc` - Tools documentation +- `websockets.mdc` - WebSocket patterns -## Project Structure -- `src/` - - `routes/` - - `rest/` - REST API endpoints using Axum - - `routes/` - Individual route modules - - `ws/` - WebSocket handlers and related functionality - - `database/` - Database models, schema, and connection management - - `main.rs` - Application entry point and server setup +While these files contain best practices for writing tests, REST patterns, etc., **each subdirectory should have its own README.md or CLAUDE.md** that should be referenced first when working in that specific area. These subdirectory-specific guides often contain implementation details and patterns specific to that component. -## Implementation -When working with prds, you should always mark your progress off in them as you build. +## Repository Structure +- `src/` - Main server code + - `routes/` - API endpoints (REST, WebSocket) + - `utils/` - Shared utilities + - `types/` - Common type definitions +- `libs/` - Shared libraries + - Each lib has its own Cargo.toml and docs +- `migrations/` - Database migrations +- `tests/` - Integration tests +- `documentation/` - Detailed docs +- `prds/` - Product requirements -## Database Connectivity -- The primary database connection is managed through `get_pg_pool()`, which returns a lazy static `PgPool` -- Always use this pool for database connections to ensure proper connection management -- Example usage: +## Build Commands +- `make dev` - Start development +- `make stop` - Stop development +- `cargo test -- --test-threads=1 --nocapture` - Run tests +- `cargo clippy` - Run linter +- `cargo build` - Build project + +## Core Guidelines +- Use `anyhow::Result` for error handling +- Group imports (std lib, external, internal) +- Put shared types in `types/`, route-specific types in route files +- Use snake_case for variables/functions, CamelCase for types +- Never log secrets or sensitive data +- All dependencies inherit from workspace using `{ workspace = true }` +- Use database connection pool from `get_pg_pool().get().await?` +- Write tests with `tokio::test` for async tests + +## Common Database Pattern ```rust -let mut conn = get_pg_pool().get().await?; +let pool = get_pg_pool(); +let mut conn = pool.get().await?; + +diesel::update(table) + .filter(conditions) + .set(values) + .execute(&mut conn) + .await? ``` -## Code Style and Best Practices - -### References and Memory Management -- Prefer references over owned values when possible -- Avoid unnecessary `.clone()` calls -- Use `&str` instead of `String` for function parameters when the string doesn't need to be owned - -### Importing packages/crates -- Please make the dependency as short as possible in the actual logic by importing the crate/package. - -### Database Operations -- Use Diesel for database migrations and query building -- Migrations are stored in the `migrations/` directory - -### Concurrency Guidelines -- Prioritize concurrent operations, especially for: - - API requests - - File operations -- Optimize database connection usage: - - Batch operations where possible - - Build queries/parameters before executing database operations - - Use bulk inserts/updates instead of individual operations +## Common Concurrency Pattern ```rust -// Preferred: Bulk operation -let items: Vec<_> = prepare_items(); -diesel::insert_into(table) - .values(&items) - .execute(conn)?; - -// Avoid: Individual operations in a loop -for item in items { - diesel::insert_into(table) - .values(&item) - .execute(conn)?; -} -``` - -### Error Handling -- Never use `.unwrap()` or `.expect()` in production code -- Always handle errors appropriately using: - - The `?` operator for error propagation - - `match` statements when specific error cases need different handling -- Use `anyhow` for error handling: - - Prefer `anyhow::Result` as the return type for functions that can fail - - Use `anyhow::Error` for error types - - Use `anyhow!` macro for creating custom errors -```rust -use anyhow::{Result, anyhow}; - -// Example of proper error handling -pub async fn process_data(input: &str) -> Result { - // Use ? for error propagation - let parsed = parse_input(input)?; - - // Use match when specific error cases need different handling - match validate_data(&parsed) { - Ok(valid_data) => Ok(valid_data), - Err(e) => Err(anyhow!("Data validation failed: {}", e)) - } -} - -// Avoid this: -// let data = parse_input(input).unwrap(); // ❌ Never use unwrap -``` - -### API Design -- REST endpoints should be in `routes/rest/routes/` -- WebSocket handlers should be in `routes/ws/` -- Use proper HTTP status codes -- Implement proper validation for incoming requests - -### Testing -- Write unit tests for critical functionality -- Use integration tests for API endpoints -- Mock external dependencies when appropriate - -## Common Patterns - -### Database Queries -```rust -use diesel::prelude::*; - -// Example of a typical database query -pub async fn get_item(id: i32) -> Result { - let pool = get_pg_pool(); - let conn = pool.get().await?; - - items::table - .filter(items::id.eq(id)) - .first(&conn) - .map_err(Into::into) -} -``` - -### Concurrent Operations -```rust -use futures::future::try_join_all; - -// Example of concurrent processing let futures: Vec<_> = items .into_iter() .map(|item| process_item(item)) .collect(); let results = try_join_all(futures).await?; -``` - -Remember to always consider: -1. Connection pool limits when designing concurrent operations -2. Transaction boundaries for data consistency -3. Error propagation and cleanup -4. Memory usage and ownership -5. Please use comments to help document your code and make it more readable. +``` \ No newline at end of file diff --git a/api/.cursor/rules/handlers.mdc b/api/.cursor/rules/handlers.mdc index 1025d94b2..27aec484c 100644 --- a/api/.cursor/rules/handlers.mdc +++ b/api/.cursor/rules/handlers.mdc @@ -1,5 +1,5 @@ --- -description: This is helpul docs for buildng hanlders in the project.l +description: This is helpful documentation for building handlers in the project. globs: libs/handlers/**/*.rs alwaysApply: false --- @@ -22,7 +22,6 @@ Handlers are the core business logic components that implement functionality use - Handler functions should follow the same pattern: `[action]_[resource]_handler` - Example: `get_chat_handler()`, `delete_message_handler()` - Type definitions should be clear and descriptive - - Request types: `[Action][Resource]Request` - Response types: `[Action][Resource]Response` ## Handler Implementation Guidelines @@ -30,15 +29,22 @@ Handlers are the core business logic components that implement functionality use ### Function Signatures ```rust pub async fn action_resource_handler( - // Parameters typically include: - request: ActionResourceRequest, // For REST/WS request data - user: User, // For authenticated user context + // Parameters should be decoupled from request types: + resource_id: Uuid, // Individual parameters instead of request objects + options: Vec, // Specific data needed for the operation + user: User, // For authenticated user context // Other contextual parameters as needed ) -> Result { // Implementation } ``` +### Decoupling from Request Types +- Handlers should NOT take request types as inputs +- Instead, use individual parameters that represent the exact data needed +- This keeps handlers flexible and reusable across different contexts +- The return type can be a specific response type, as this is what the handler produces + ### Error Handling - Use `anyhow::Result` for return types - Provide descriptive error messages with context @@ -59,6 +65,7 @@ match operation() { ### Database Operations - Use the connection pool: `get_pg_pool().get().await?` - Run concurrent operations when possible +- For related operations, use sequential operations with error handling - Handle database-specific errors appropriately - Example: ```rust @@ -72,6 +79,24 @@ diesel::update(table) .await? ``` +Example with related operations: +```rust +let pool = get_pg_pool(); +let mut conn = pool.get().await?; + +// First operation +diesel::insert_into(table1) + .values(&values1) + .execute(&mut conn) + .await?; + +// Second related operation +diesel::update(table2) + .filter(conditions) + .set(values2) + .execute(&mut conn) + .await?; + ### Concurrency - Use `tokio::spawn` for concurrent operations - Use `futures::try_join_all` for parallel processing @@ -109,10 +134,9 @@ tracing::info!( - Example: ```rust #[derive(Debug, Serialize, Deserialize)] -pub struct ResourceRequest { +pub struct ResourceResponse { pub id: Uuid, pub name: String, - #[serde(default)] pub options: Vec, } ``` @@ -125,34 +149,46 @@ pub struct ResourceRequest { ```rust // In REST route pub async fn rest_endpoint( - Json(payload): Json, + Json(payload): Json, user: User, ) -> Result, AppError> { - let result = handler::action_resource_handler(payload, user).await?; + // Extract specific parameters from the request + let result = handler::action_resource_handler( + payload.id, + payload.options, + user + ).await?; Ok(Json(result)) } // In WebSocket handler async fn ws_message_handler(message: WsMessage, user: User) -> Result { - let payload: HandlerRequest = serde_json::from_str(&message.payload)?; - let result = handler::action_resource_handler(payload, user).await?; + let payload: WsRequest = serde_json::from_str(&message.payload)?; + // Extract specific parameters from the request + let result = handler::action_resource_handler( + payload.id, + payload.options, + user + ).await?; Ok(WsResponse::new(result)) } ``` ## CLI Integration -- Handler types should be reusable in CLI commands +- CLI commands should extract specific parameters from arguments - CLI commands should use the same handlers as the API when possible - Example: ```rust // In CLI command pub fn cli_command(args: &ArgMatches) -> Result<()> { - let request = HandlerRequest { - // Parse from args - }; + // Extract parameters from args + let id = Uuid::parse_str(args.value_of("id").unwrap())?; + let options = args.values_of("options") + .map(|vals| vals.map(String::from).collect()) + .unwrap_or_default(); let result = tokio::runtime::Runtime::new()?.block_on(async { - handler::action_resource_handler(request, mock_user()).await + handler::action_resource_handler(id, options, mock_user()).await })?; println!("{}", serde_json::to_string_pretty(&result)?); @@ -169,11 +205,12 @@ pub fn cli_command(args: &ArgMatches) -> Result<()> { #[tokio::test] async fn test_action_resource_handler() { // Setup test data - let request = HandlerRequest { /* ... */ }; + let id = Uuid::new_v4(); + let options = vec!["option1".to_string(), "option2".to_string()]; let user = mock_user(); // Call handler - let result = action_resource_handler(request, user).await; + let result = action_resource_handler(id, options, user).await; // Assert expectations assert!(result.is_ok()); diff --git a/api/.cursor/rules/libs.mdc b/api/.cursor/rules/libs.mdc index 28ef56b9b..6f8d88f7a 100644 --- a/api/.cursor/rules/libs.mdc +++ b/api/.cursor/rules/libs.mdc @@ -1,6 +1,6 @@ --- description: This is helpful for building libs for our web server to interact with. -globs: libs/**/*.{rs,toml} +globs: libs/**/*.{rs alwaysApply: false --- @@ -13,7 +13,7 @@ libs/ │ ├── Cargo.toml # Library-specific manifest │ ├── src/ │ │ ├── lib.rs # Library root -│ │ ├── types.rs/ # Data structures and types +│ │ ├── types.rs # Data structures and types │ │ ├── utils/ # Utility functions │ │ └── errors.rs # Custom error types │ └── tests/ # Integration tests diff --git a/api/.cursor/rules/prds.mdc b/api/.cursor/rules/prds.mdc index 2e2ff80f5..f09273f10 100644 --- a/api/.cursor/rules/prds.mdc +++ b/api/.cursor/rules/prds.mdc @@ -1,5 +1,5 @@ --- -description: This is helpful for building and designing prds for our application and how to write them. Refe +description: This is helpful for building and designing PRDs for our application and how to write them. globs: prds/**/*.md alwaysApply: false --- @@ -16,29 +16,107 @@ All PRDs should be stored in the `/prds` directory with the following structure: /prds ├── template.md # The master template for all PRDs ├── active/ # Active/In-progress PRDs -│ ├── feature_auth.md -│ └── api_deployment.md +│ ├── project_feature_name.md # Project-level PRD +│ ├── api_feature_component1.md # Sub-PRD for component 1 +│ └── api_feature_component2.md # Sub-PRD for component 2 ├── completed/ # Completed PRDs that have been shipped -│ ├── feature_user_auth.md -│ └── api_deployment.md +│ ├── project_completed_feature.md +│ └── api_completed_component.md └── archived/ # Archived/Deprecated PRDs ``` ### Naming Convention - Use snake_case for file names - Include a prefix for the type of change: + - `project_` for project-level PRDs that contain multiple sub-PRDs - `feature_` for new features - `enhancement_` for improvements - `fix_` for bug fixes - `refactor_` for code refactoring - `api_` for API changes +## Project PRDs and Sub-PRDs + +### Project PRD Structure +Project PRDs serve as the main document for large features that require multiple components or endpoints. They should: + +1. Provide a high-level overview of the entire feature +2. Break down the implementation into logical components +3. Reference individual sub-PRDs for each component +4. Track the status of each sub-PRD +5. Define dependencies between sub-PRDs + +Example project PRD sections: +```markdown +## Implementation Plan + +The implementation will be broken down into six separate PRDs, each focusing on a specific endpoint: + +1. [Add Dashboard to Collections REST Endpoint](mdc:api_add_dashboards_to_collection.md) +2. [Remove Dashboard from Collections REST Endpoint](mdc:api_remove_dashboards_from_collection.md) +3. [Add Metric to Collections REST Endpoint](mdc:api_add_metrics_to_collection.md) +4. [Remove Metric from Collections REST Endpoint](mdc:api_remove_metrics_from_collection.md) +5. [Add Assets to Collection REST Endpoint](mdc:api_add_assets_to_collection.md) +6. [Remove Assets from Collection REST Endpoint](mdc:api_remove_assets_from_collection.md) +``` + +### Sub-PRD Structure +Sub-PRDs focus on specific components of the larger project. They should: + +1. Reference the parent project PRD +2. Focus on detailed implementation of a specific component +3. Include all technical details required for implementation +4. Be independently implementable (when possible) +5. Follow the standard PRD template + +### Enabling Concurrent Development + +The project PRD and sub-PRD structure is designed to enable efficient concurrent development by: + +1. **Clear Component Boundaries**: Each sub-PRD should have well-defined boundaries that minimize overlap with other components. + +2. **Explicit Dependencies**: The project PRD should clearly state which sub-PRDs depend on others, allowing teams to plan their work accordingly. + +3. **Interface Definitions**: Each sub-PRD should define clear interfaces for how other components interact with it, reducing the risk of integration issues. + +4. **Conflict Identification**: The project PRD should identify potential areas of conflict between concurrently developed components and provide strategies to mitigate them. + +5. **Integration Strategy**: The project PRD should define how and when components will be integrated, including any feature flag strategies to allow incomplete features to be merged without affecting production. + +### Example Workflow + +1. **Project Planning**: + - Create the project PRD with a clear breakdown of components + - Define dependencies and development order + - Identify which components can be developed concurrently + +2. **Development Kickoff**: + - Begin work on foundation components that others depend on + - Once foundation is complete, start concurrent development of independent components + - Regularly update the project PRD with status changes + +3. **Integration**: + - Follow the integration strategy defined in the project PRD + - Address any conflicts that arise during integration + - Update the project PRD with lessons learned + +4. **Completion**: + - Move completed PRDs to the `/prds/completed` directory + - Update the project PRD to reflect completion + - Document any deviations from the original plan + ## Using the Template ### Getting Started -1. Copy [template.md](mdc:prds/template.md) to create a new PRD -2. Place it in the `/prds/active` directory -3. Fill out each section following the template's comments and guidelines +1. For a new project with multiple components: + - Create a project-level PRD using [project_template.md](mdc:prds/project_template.md) + - Place it in the `/prds/active` directory with prefix `project_` + - Create sub-PRDs for each component using [sub_prd_template.md](mdc:prds/sub_prd_template.md) with appropriate prefixes + +2. For a standalone feature: + - Copy [template.md](mdc:prds/template.md) to create a new PRD + - Place it in the `/prds/active` directory + - Fill out each section following the template's guidelines ### Key Sections to Focus On The template [template.md](mdc:prds/template.md) provides comprehensive sections. Pay special attention to: @@ -59,6 +137,7 @@ The template [template.md](mdc:prds/template.md) provides comprehensive sections - Include clear success criteria - List dependencies between phases - Provide testing strategy for each phase + - For project PRDs, reference all sub-PRDs with their status 4. **Testing Strategy** - Unit test requirements @@ -66,6 +145,26 @@ The template [template.md](mdc:prds/template.md) provides comprehensive sections ## Best Practices +### Project PRD Best Practices +1. Keep the project PRD focused on high-level architecture and component relationships +2. Clearly define the scope of each sub-PRD +3. Maintain a status indicator for each sub-PRD ( Complete, ⏳ In Progress, Upcoming) +4. Update the project PRD when sub-PRDs are completed +5. Include a visual representation of component relationships when possible +6. Define clear interfaces between components +7. **Explicitly define the order in which sub-PRDs should be implemented** +8. **Identify which sub-PRDs can be developed concurrently without conflicts** +9. **Document dependencies between sub-PRDs to prevent blocking issues** +10. **Provide strategies for avoiding conflicts during concurrent development** +11. **Establish clear integration points for components developed in parallel** + +### Sub-PRD Best Practices +1. Always reference the parent project PRD +2. Focus on detailed implementation of a specific component +3. Include all technical details required for implementation +4. Ensure consistency with other sub-PRDs in the same project +5. Follow the standard PRD template structure + ### Documentation 1. Use clear, concise language 2. Include code examples where relevant @@ -80,10 +179,10 @@ The template [template.md](mdc:prds/template.md) provides comprehensive sections - Deprecated PRDs → `/prds/archived` 2. Update status section regularly: - - ✅ Completed items - - ⏳ In Progress items - - 🔜 Upcoming items - - ❌ Known Issues + - Completed items + - In Progress items + - Upcoming items + - Known Issues ### Review Process 1. Technical review @@ -106,17 +205,26 @@ The template [template.md](mdc:prds/template.md) provides comprehensive sections 5. No rollback plan 6. Missing security considerations 7. Undefined monitoring metrics +8. Inconsistencies between project PRD and sub-PRDs +9. Overlapping responsibilities between sub-PRDs +10. Missing dependencies between sub-PRDs ## Example PRDs Reference these example PRDs for guidance: -[template.md](mdc:prds/template.md) +- Project PRD: [Collections REST Endpoints](mdc:prds/active/project_collections_rest_endpoints.md) +- Sub-PRD: [Add Metrics to Collection](mdc:prds/active/api_add_metrics_to_collection.md) +- Project Template: [project_template.md](mdc:prds/project_template.md) +- Sub-PRD Template: [sub_prd_template.md](mdc:prds/sub_prd_template.md) +- Standard Template: [template.md](mdc:prds/template.md) ## Checklist Before Submission - [ ] All template sections completed - [ ] Technical design is detailed and complete - [ ] File changes are documented -- [ ] Implementation phases are clear (can be as many as you need.) +- [ ] Implementation phases are clear (can be as many as you need) - [ ] Testing strategy is defined - [ ] Security considerations addressed - [ ] Dependencies and Files listed - [ ] File References included +- [ ] For project PRDs: all sub-PRDs are referenced with status +- [ ] For sub-PRDs: parent project PRD is referenced diff --git a/api/.cursor/rules/rest.mdc b/api/.cursor/rules/rest.mdc index d6962b4d8..571831c18 100644 --- a/api/.cursor/rules/rest.mdc +++ b/api/.cursor/rules/rest.mdc @@ -1,5 +1,5 @@ --- -description: This rule is helpful for understanding how to build our rest functions. Structure, common patterns, where to look for types, etc.Ï +description: This rule is helpful for understanding how to build our REST functions. Structure, common patterns, where to look for types, etc. globs: src/routes/rest/**/*.rs alwaysApply: false --- diff --git a/api/.cursor/rules/testing.mdc b/api/.cursor/rules/testing.mdc index 5e9aebe7a..d62983b70 100644 --- a/api/.cursor/rules/testing.mdc +++ b/api/.cursor/rules/testing.mdc @@ -1,77 +1,509 @@ --- -description: This is designed to help understand how to do testing in this project. +description: globs: alwaysApply: false --- # Testing Rules and Best Practices +## Testing Organization Best Practices + +### Recommended Mocking Libraries + +For consistent mocking across the codebase, use these libraries: + +| Library | Purpose | Use Case | +|---------|---------|----------| +| **mockito** | HTTP service mocking | Mocking external API calls | +| **mockall** | Trait/struct mocking | Mocking database and service interfaces | +| **mock-it** | Simple mocking | Quick mocks for simple interfaces | +| **wiremock** | Advanced HTTP mocking | Complex API scenarios with extended matching | + +Always prefer dependency injection patterns to enable easy mocking of dependencies. + +### Library Code Testing Structure +- **Unit Tests**: Include unit tests inside library source files using `#[cfg(test)]` modules +- **Integration Tests**: Place integration tests in the lib's `tests/` directory +- **Test Utils**: Create a private `test_utils.rs` module in each lib for test utilities specific to that lib +- **Scope Isolation**: Test utilities should only be shared within their scope (e.g., libs/database, libs/agents) +- **Conditional Testing**: Use conditional compilation or runtime checks to make tests non-blocking (e.g., skip tests if a required API key is missing) + +### API (src) Testing Structure +- Maintain API-specific tests in the `/api/tests/` directory +- Organize integration tests to mirror the API's route structure +- Keep API-specific test utilities in the shared test common directory + +### Testing Configuration +- Use cargo features to make running specific test groups easier +- Consider using cargo-nextest for parallel test execution +- Add test tags to group tests by functionality or component + +### Running Tests in Different Scopes + +- **Run all tests**: `cargo test` +- **Run library-specific tests**: `cargo test -p ` +- **Run a specific test**: `cargo test ` +- **Run tests with specific pattern**: `cargo test -- ` +- **Run tests conditionally**: Use features to enable/disable test categories + +```toml +# In Cargo.toml +[features] +integration-tests = [] +unit-tests = [] +``` + +```rust +// In code +#[cfg(test)] +#[cfg(feature = "integration-tests")] +mod integration_tests { + // Test code... +} +``` + +Then, run: `cargo test --features integration-tests` + +### Examples of Proper Test Organization + +#### Example: Library Unit Tests (`#[cfg(test)]` Module) +```rust +// File: libs/braintrust/src/helpers.rs + +// Implementation code... + +/// Get system message from a stored prompt +pub async fn get_prompt_system_message(client: &BraintrustClient, prompt_id: &str) -> Result { + // Function implementation... +} + +#[cfg(test)] +mod tests { + use super::*; + use std::env; + use dotenv::dotenv; + + #[tokio::test] + async fn test_get_prompt_system_message() -> Result<()> { + // Load environment variables + dotenv().ok(); + + // Skip test if no API key is available (non-blocking) + if env::var("BRAINTRUST_API_KEY").is_err() { + println!("Skipping test_get_prompt_system_message: No API key available"); + return Ok(()); + } + + // Test implementation... + + Ok(()) + } +} +``` + +#### Example: Library Integration Tests (Separate Directory) +```rust +// File: libs/handlers/tests/metrics/delete_metric_test.rs + +use handlers::metrics::delete_metric_handler; +use database::models::MetricFile; +use crate::common::fixtures::create_test_metric; +use anyhow::Result; + +/// Integration test for the delete_metric_handler +#[tokio::test] +async fn test_delete_metric_integration() -> Result<()> { + // Setup test environment + let test_db = setup_test_db().await?; + + // Create and insert test data + let test_metric = create_test_metric(&test_db).await?; + + // Test the handler functionality + let result = delete_metric_handler(&test_metric.id, &test_db.user_id).await?; + + // Verify results + assert!(result.deleted_at.is_some()); + + // Clean up test data + test_db.cleanup().await?; + + Ok(()) +} +``` + +#### Example: Library-Specific Test Utilities +```rust +// File: libs/database/src/test_utils.rs + +use crate::models::{User, Organization}; +use crate::pool::PgPool; +use uuid::Uuid; +use anyhow::Result; + +/// Test utilities specific to the database library +pub struct DatabaseTestUtils { + pub pool: PgPool, + pub test_id: String, +} + +impl DatabaseTestUtils { + pub async fn new() -> Result { + // Initialize test database connection + let pool = create_test_pool().await?; + let test_id = Uuid::new_v4().to_string(); + + Ok(Self { pool, test_id }) + } + + pub async fn create_test_user(&self) -> Result { + // Create a test user in the database + // ... + } + + pub async fn cleanup(&self) -> Result<()> { + // Clean up test data + // ... + } +} +``` + ## General Testing Guidelines - All tests must be async and use tokio test framework - Tests should be well-documented with clear test case descriptions and expected outputs - Each test should focus on testing a single piece of functionality - Tests should be independent and not rely on the state of other tests - Use meaningful test names that describe what is being tested +- **Design for testability**: Code should be written with testing in mind + - Favor dependency injection for easier mocking + - Separate business logic from external dependencies + - Use interfaces/traits to abstract dependencies + - Write small, focused functions that do one thing well + - Avoid global state and side effects where possible ## Unit Tests - Unit tests should be inline with the code they are testing using `#[cfg(test)]` modules - Each public function should have corresponding unit tests -- Mock external dependencies using mockito for HTTP calls -- Use `mockito::Server::new_async()` instead of `mockito::Server::new()` +- **Important**: Unit tests should NEVER connect to external services or databases +- Mock all external dependencies: + - Use mockito for HTTP services + - Use trait-based mocks for database operations + - Create mock implementations of dependencies +- **IMPORTANT**: Always use the async version of mockito: + ```rust + // Correct async approach + let server = mockito::Server::new_async().await; + ``` + Instead of: + ```rust + // Incorrect: Not compatible with tokio runtime + let server = mockito::Server::new(); + ``` +- When using mockito in async tests, ensure all mock setup is done asynchronously - Test both success and error cases - Test edge cases and boundary conditions +- Structure unit tests to maximize code coverage +- Use dependency injection to make code easily testable with mocks ## Integration Tests -- Integration tests should be placed in the `/tests` directory -- Organize integration tests to mirror the main codebase structure -- Each major feature/resource should have its own test file -- Test the interaction between multiple components -- Use real dependencies when possible, mock only what's necessary -- Include end-to-end workflow tests -### Integration Test Setup Requirements -- All integration tests must import and utilize the application's schema from [schema.rs](mdc:src/database/schema.rs) -- Database models from [models.rs](mdc:src/database/models.rs) should be used for test data setup and verification -- Environment setup must use `dotenv` for configuration: +Integration tests are specifically designed to verify the interaction with external services and should be separate from the main codebase. + +### Integration Test Structure + +- **Location**: Integration tests should always be in a separate `tests/` directory, never mixed with the main code +- **External Interaction**: Unlike unit tests, integration tests are explicitly designed to interact with external services (databases, APIs, etc.) +- **Configuration**: + - Configuration should come from environment variables via `.env` files + - Use `dotenv` to load environment variables during test setup + - Example: Database connection parameters should come from `.env` + - Prefer `.env.test` for test-specific configurations + +```rust +// Example of proper integration test setup +use dotenv::dotenv; +use diesel_async::AsyncPgConnection; +use diesel_async::pooled_connection::AsyncDieselConnectionManager; +use deadpool_diesel::postgres::Pool; + +// Setup function that loads from environment +async fn setup_test_environment() -> Result { + // Load environment variables, preferring .env.test if available + if std::path::Path::new(".env.test").exists() { + dotenv::from_filename(".env.test").ok(); + } else { + dotenv().ok(); + } + + // Create database pool from environment variables + let database_url = std::env::var("DATABASE_URL") + .expect("DATABASE_URL must be set for integration tests"); + + let config = AsyncDieselConnectionManager::::new(database_url); + let pool = Pool::builder(config).max_size(5).build()?; + + // Return test context with real connections to external services + Ok(TestContext { pool, ... }) +} +``` + +### Library-Specific Integration Tests + +- Each library should have its own integration tests in its `tests/` directory +- These tests should focus ONLY on the library's public API +- Integration tests should NOT cross library boundaries +- Example structure: + +``` +libs/ + ├── database/ + │ ├── src/ + │ └── tests/ # Database-specific integration tests + │ ├── common/ # Database test utilities + │ └── models/ # Tests for database models + ├── handlers/ + │ ├── src/ + │ └── tests/ # Handler-specific integration tests + │ ├── common/ # Handler test utilities + │ └── users/ # Tests for user handlers +``` + +### API Integration Tests + +- API tests should be in the main `/api/tests/` directory +- Structure should mirror the API routes +- Focus on end-to-end testing of the entire API +- Example structure: + +``` +api/ + ├── src/ + └── tests/ + ├── common/ # Shared API test utilities + └── integration/ # Organized by API resource + ├── users/ # User endpoint tests + ├── threads/ # Thread endpoint tests + └── messages/ # Message endpoint tests +``` + +### Modular Test Utilities + +- **Scope Isolation**: Test utilities should be scoped to their specific domain: + - Database utilities in `libs/database/tests/common/` + - Handler utilities in `libs/handlers/tests/common/` + - API utilities in `/api/tests/common/` + +- **Modular Structure**: Organize test utilities by function: + +``` +tests/common/ + ├── mod.rs # Re-exports common utilities + ├── db.rs # Database test helpers + ├── http.rs # HTTP client testing + ├── auth.rs # Authentication test helpers + └── fixtures/ # Test data fixtures + ├── mod.rs # Re-exports all fixtures + ├── users.rs # User test data + └── messages.rs # Message test data +``` + +- **No Global Utilities**: Avoid creating global test utilities shared across all components +- **Library-Specific Fixtures**: Test fixtures should be specific to their domain + +### Test Data Isolation + +- Each test should create its own isolated data +- Use unique identifiers (UUIDs) to mark test data +- Clean up after tests complete +- Use test_id pattern to track and clean up test data: + +```rust +#[tokio::test] +async fn test_user_creation() -> Result<()> { + let test_id = Uuid::new_v4().to_string(); + let pool = get_db_pool().await?; + + // Create test data with test_id marker + let test_user = UserBuilder::new() + .with_email("test@example.com") + .with_test_id(&test_id) // Mark this as test data + .build(); + + // Insert test data + test_user.insert(&pool).await?; + + // Run test assertions... + + // Clean up all data with this test_id + cleanup_test_data(&pool, &test_id).await?; + + Ok(()) +} +``` + +### Integration Test Setup Best Practices + +#### Environment Configuration +- Create a centralized test environment setup system: ```rust + // tests/common/env.rs + use std::sync::Once; use dotenv::dotenv; - #[tokio::test] - async fn setup_test_environment() { - dotenv().ok(); // Load environment variables - // Test environment setup - } - ``` -- Service configurations should be derived from environment variables: - ```rust - // Example of service configuration using env vars - let database_url = std::env::var("DATABASE_URL") - .expect("DATABASE_URL must be set for integration tests"); - let test_api_key = std::env::var("TEST_API_KEY") - .expect("TEST_API_KEY must be set for integration tests"); - ``` -- Test database setup should include: - ```rust - use crate::database::{schema, models}; + static ENV_SETUP: Once = Once::new(); - async fn setup_test_db() -> PgPool { - let pool = PgPoolOptions::new() - .max_connections(5) - .connect(&std::env::var("TEST_DATABASE_URL")?) - .await?; - - // Run migrations or setup test data - // Use schema and models for consistency - Ok(pool) + /// Initialize test environment once per test process + pub fn init_test_env() { + ENV_SETUP.call_once(|| { + // First check for .env.test + if std::path::Path::new(".env.test").exists() { + dotenv::from_filename(".env.test").ok(); + } else { + // Fall back to regular .env + dotenv().ok(); + } + + // Set additional test-specific env vars + if std::env::var("TEST_ENV").is_err() { + std::env::set_var("TEST_ENV", "test"); + } + + // Initialize logger for tests + if std::env::var("TEST_LOG").is_ok() { + tracing_subscriber::fmt() + .with_env_filter("debug") + .with_test_writer() + .init(); + } + }); } ``` -### Required Environment Variables +#### Database Setup +- All integration tests must import and utilize the application's schema from [schema.rs](mdc:src/database/schema.rs) +- Database models from [models.rs](mdc:src/database/models.rs) should be used for test data setup and verification +- Create a robust database test helper: + ```rust + // tests/common/db.rs + use anyhow::Result; + use diesel_async::{AsyncPgConnection, AsyncConnection}; + use diesel_async::pooled_connection::AsyncDieselConnectionManager; + use deadpool_diesel::postgres::Pool; + use uuid::Uuid; + + pub struct TestDb { + pub pool: Pool, + pub test_id: String, // Unique identifier for this test run + } + + impl TestDb { + /// Creates a new isolated test database environment + pub async fn new() -> Result { + // Initialize environment + crate::common::env::init_test_env(); + + // Generate unique test identifier + let test_id = Uuid::new_v4().to_string(); + + // Get database config from env + let database_url = std::env::var("TEST_DATABASE_URL") + .expect("TEST_DATABASE_URL must be set for tests"); + + // Create connection manager and pool + let config = AsyncDieselConnectionManager::::new(database_url); + let pool = Pool::builder(config) + .max_size(5) + .build()?; + + let db = Self { pool, test_id }; + + // Setup initial test data + db.setup_schema().await?; + + Ok(db) + } + + /// Setup schema and initial test data + async fn setup_schema(&self) -> Result<()> { + let conn = &mut self.pool.get().await?; + + // Run migrations or setup tables + // Add any shared setup like creating test users + + Ok(()) + } + + /// Clean up test data created by this test instance + pub async fn cleanup(&self) -> Result<()> { + let conn = &mut self.pool.get().await?; + + // Delete data with test_id marker + // Reset sequences if needed + + Ok(()) + } + + /// Create test data with proper isolation using the test_id + pub async fn create_test_data(&self) -> Result<()> { + // Create test data tagged with test_id for isolation + Ok(()) + } + } + + impl Drop for TestDb { + fn drop(&mut self) { + // Optionally perform synchronous cleanup on drop + // This ensures cleanup even if tests panic + } + } + ``` + +#### Required Environment Variables Create a `.env.test` file with necessary test configurations: ```env TEST_DATABASE_URL=postgres://user:pass@localhost/test_db TEST_API_KEY=test-key TEST_ENV=test -# Add other required test environment variables +# Optional: enable test logging +# TEST_LOG=debug +``` + +**IMPORTANT**: All integration tests must have all required services configured and accessible via environment variables loaded through dotenv. Tests should never be written with hardcoded service configurations. + +#### Service Mocks +Create reusable mock services for common external dependencies: +```rust +// tests/common/mocks/http_client.rs +pub struct MockHttpClient { + server: mockito::Server, +} + +impl MockHttpClient { + pub async fn new() -> Self { + Self { + server: mockito::Server::new_async().await, + } + } + + pub fn url(&self) -> String { + self.server.url() + } + + pub fn mock_success_response(&self, path: &str, body: &str) -> mockito::Mock { + self.server + .mock("GET", path) + .with_status(200) + .with_header("content-type", "application/json") + .with_body(body) + .create() + } + + pub fn mock_error_response(&self, path: &str, status: usize) -> mockito::Mock { + self.server + .mock("GET", path) + .with_status(status) + .create() + } +} ``` ## Test Structure @@ -104,11 +536,362 @@ mod tests { } ``` -## Mocking Guidelines -- Use mockito for HTTP service mocks -- Create mock responses that match real API responses -- Include both successful and error responses in mocks -- Clean up mocks after tests complete +## Mocking Guidelines for Async Testing + +### Mockito for HTTP Service Mocking + +Mockito is the primary tool for mocking HTTP services in tests. When using mockito in an async context: + +```rust +// CORRECT: Create async mockito server +#[tokio::test] +async fn test_http_client() -> Result<()> { + // Always use the async version for tokio compatibility + let server = mockito::Server::new_async().await; + + // Setup the mock with expected request and response + let mock = server + .mock("GET", "/api/data") + .with_status(200) + .with_header("content-type", "application/json") + .with_body(r#"{"key": "value"}"#) + .create(); + + // Use the server's URL with your client + let client = YourHttpClient::new(&server.url()); + let response = client.get_data().await?; + + // Verify the mock was called as expected + mock.assert(); + + Ok(()) +} +``` + +#### Mockito Best Practices: + +- **Always use `Server::new_async()`** instead of `Server::new()` for tokio compatibility +- **Match complex requests** with matchers: + ```rust + // Match JSON request bodies + server.mock("POST", "/api/users") + .match_body(mockito::Matcher::Json(json!({"name": "Test User"}))) + .with_status(201) + .create(); + + // Match headers + server.mock("GET", "/api/protected") + .match_header("authorization", "Bearer token") + .with_status(200) + .create(); + ``` +- **Test different response scenarios** (success, errors, timeouts): + ```rust + // Success response + let success_mock = server.mock("GET", "/api/resource/1") + .with_status(200) + .with_body(r#"{"id": "1", "name": "Resource"}"#) + .create(); + + // Error response + let error_mock = server.mock("GET", "/api/resource/999") + .with_status(404) + .with_body(r#"{"error": "Not found"}"#) + .create(); + + // Timeout simulation + let timeout_mock = server.mock("GET", "/api/slow") + .with_delay(std::time::Duration::from_secs(10)) + .with_status(200) + .create(); + ``` +- **Create reusable mock setup functions** for common patterns + +### Additional Mocking Libraries for Async Rust + +#### 1. Mock_it for trait mocking + +For mocking traits and interfaces: + +```rust +use mock_it::Mock; + +#[async_trait] +pub trait Database { + async fn get_user(&self, id: &str) -> Result; +} + +// Create a mock implementation +let mut db_mock = Mock::new(); + +// Set up expectations with async behavior +db_mock.expect_call(matching!("get_user", arg if arg == "123")) + .returns_async(Ok(User { + id: "123".to_string(), + name: "Test User".to_string(), + })); + +// Test using the mock +let result = db_mock.get_user("123").await?; +assert_eq!(result.name, "Test User"); +``` + +#### 2. Async-std's Async-Mock + +For async-std runtime testing: + +```rust +use async_std::test; +use async_mock::AsyncMock; + +#[derive(AsyncMock)] +#[async_mock(UserRepository)] +pub trait UserRepository { + async fn find_user(&self, id: &str) -> Result; +} + +#[test] +async fn test_user_service() { + // Create a mock repository + let mock_repo = MockUserRepository::new(); + + // Setup expectations + mock_repo.expect_find_user() + .with(eq("123")) + .returns(Ok(User { id: "123".to_string(), name: "User" })); + + // Test service with mock + let service = UserService::new(mock_repo); + let user = service.get_user_data("123").await?; + + assert_eq!(user.name, "User"); +} +``` + +#### 3. WireMock for Complex HTTP Mocking + +For extensive API mocking scenarios: + +```rust +use wiremock::{MockServer, Mock, ResponseTemplate}; +use wiremock::matchers::{method, path}; + +#[tokio::test] +async fn test_api_client() -> Result<()> { + // Start mock server + let mock_server = MockServer::start().await; + + // Setup mock + Mock::given(method("GET")) + .and(path("/api/users")) + .respond_with(ResponseTemplate::new(200) + .set_body_json(json!([ + {"id": "1", "name": "User 1"}, + {"id": "2", "name": "User 2"} + ])) + ) + .mount(&mock_server) + .await; + + // Create client using mock URL + let client = ApiClient::new(&mock_server.uri()); + + // Test the client + let users = client.list_users().await?; + assert_eq!(users.len(), 2); + + Ok(()) +} +``` + +#### 4. Tower's `mock` Module for Service Mocking + +For mocking Tower services: + +```rust +use tower_test::{mock, assert_request_eq}; +use tower::Service; + +#[tokio::test] +async fn test_tower_service() { + // Create mock service + let (mut mock, handle) = mock::pair::, Response<()>>(); + + // Spawn a task that processes requests + tokio::spawn(async move { + // Send successful response + if let Ok(request) = handle.recv().await { + handle.send_response( + Response::builder() + .status(200) + .body(()) + .unwrap() + ); + } + }); + + // Create your service that uses the mock + let service = YourService::new(mock); + + // Make request through your service + let response = service + .call(Request::get("/test").body(()).unwrap()) + .await + .unwrap(); + + assert_eq!(response.status(), 200); +} +``` + +### Mocking Database Access for Unit Tests + +For unit tests, completely mock the database access rather than using real connections: + +```rust +use mock_it::Mock; +use async_trait::async_trait; +use uuid::Uuid; + +// Define a repository trait that can be mocked +#[async_trait] +pub trait UserRepository { + async fn find_user_by_id(&self, id: &str) -> Result>; + async fn create_user(&self, email: &str, name: &str) -> Result; + async fn delete_user(&self, id: &str) -> Result<()>; +} + +// In unit tests, create a mock implementation +#[tokio::test] +async fn test_user_service_with_mocked_db() -> Result<()> { + // Create mock repository + let mut repo_mock = Mock::new(); + + // Setup expectations for database operations + let test_user = User { + id: Uuid::new_v4().to_string(), + email: "test@example.com".to_string(), + name: "Test User".to_string(), + created_at: chrono::Utc::now(), + }; + + // Mock the find_user_by_id method + repo_mock.expect_call(matching!("find_user_by_id", arg if arg == "123")) + .returns_async(Ok(Some(test_user.clone()))); + + // Create the service with the mocked repository + let service = UserService::new(repo_mock); + + // Test the service logic + let user = service.get_user_profile("123").await?; + + // Assertions to verify service logic + assert_eq!(user.email, "test@example.com"); + + Ok(()) +} +``` + +### Mocking Diesel Models for Unit Tests + +For mocking Diesel model operations: + +```rust +use mockall::predicate::*; +use mockall::mock; + +// Create a mock for database operations +mock! { + pub DieselDb { + async fn find_user(&self, id: &str) -> Result>; + async fn create_user(&self, data: NewUser) -> Result; + } +} + +// User service that depends on database +struct UserService { + db: Box, +} + +impl UserService { + fn new(db: impl DieselDbTrait + 'static) -> Self { + Self { db: Box::new(db) } + } + + async fn get_user(&self, id: &str) -> Result> { + self.db.find_user(id).await + } +} + +// Unit test with mocked database +#[tokio::test] +async fn test_get_user() -> Result<()> { + // Create mock DB + let mut mock_db = MockDieselDb::new(); + + // Setup expectations + let test_user = User { + id: "user123".to_string(), + name: "Test User".to_string(), + email: "test@example.com".to_string(), + created_at: chrono::Utc::now(), + }; + + mock_db.expect_find_user() + .with(eq("user123")) + .times(1) + .returning(move |_| Ok(Some(test_user.clone()))); + + // Create service with mock + let service = UserService::new(mock_db); + + // Test service method + let user = service.get_user("user123").await?; + + // Verify results + assert!(user.is_some()); + let user = user.unwrap(); + assert_eq!(user.email, "test@example.com"); + + Ok(()) +} +``` + +### Combining Different Mocking Approaches + +For complex systems, combine different mocking libraries: + +```rust +#[tokio::test] +async fn test_complex_system() -> Result<()> { + // Set up HTTP mock + let server = mockito::Server::new_async().await; + + // Set up mock responses + let api_mock = server + .mock("GET", "/api/data") + .with_status(200) + .with_body(r#"{"data": "value"}"#) + .create(); + + // Create mock database implementation + let mut db_mock = Mock::new(); + db_mock.expect_call(matching!("save_data", _)) + .returns_async(Ok(())); + + // Initialize system with mocks + let client = HttpClient::new(&server.url()); + let system = YourSystem::new(client, db_mock); + + // Test system behavior + let result = system.process_and_save().await?; + assert!(result.is_success()); + + // Verify HTTP mock was called + api_mock.assert(); + + Ok(()) +} +``` ## Error Testing - Test error conditions and error handling @@ -120,7 +903,6 @@ mod tests { ## Database Testing - Use a separate test database for integration tests - Clean up test data after tests complete -- Test database transactions and rollbacks - Test database connection error handling ## Test Output @@ -135,21 +917,24 @@ mod tests { - Tests should not have external dependencies that could fail CI - Test execution time should be reasonable -## Example Test +## Example Unit Test with Mockito ```rust #[cfg(test)] mod tests { use super::*; use mockito; use tokio; + use anyhow::Result; #[tokio::test] - async fn test_api_call_success() { + async fn test_api_call_success() -> Result<()> { // Test case: Successful API call returns expected response // Expected: Response contains user data with status 200 - let mut server = mockito::Server::new_async().await; + // Create async mockito server + let server = mockito::Server::new_async().await; + // Setup the mock with expected request and response let mock = server .mock("GET", "/api/user") .match_header("authorization", "Bearer test-token") @@ -157,119 +942,186 @@ mod tests { .with_body(r#"{"id": "123", "name": "Test User"}"#) .create(); + // Create API client with mock server URL let client = ApiClient::new(server.url()); - let response = client.get_user().await.unwrap(); + // Execute the function being tested + let response = client.get_user("test-token").await?; + + // Verify results assert_eq!(response.id, "123"); assert_eq!(response.name, "Test User"); + // Verify that the mock was called as expected mock.assert(); + + Ok(()) + } + + #[tokio::test] + async fn test_api_call_error_handling() -> Result<()> { + // Test case: API returns error status + // Expected: Function returns appropriate error + + let server = mockito::Server::new_async().await; + + // Setup mock with error response + let mock = server + .mock("GET", "/api/user") + .with_status(401) + .with_body(r#"{"error": "Unauthorized"}"#) + .create(); + + let client = ApiClient::new(server.url()); + + // Execute and verify it returns an error + let result = client.get_user("invalid-token").await; + assert!(result.is_err()); + + // Verify the error contains the expected message + let err = result.unwrap_err(); + assert!(err.to_string().contains("Unauthorized")); + + mock.assert(); + + Ok(()) } } ``` -## Example Integration Test +## Example Application Integration Test + ```rust +use crate::tests::common::{db::TestDb, env::init_test_env}; use crate::database::{models, schema}; -use dotenv::dotenv; +use crate::handlers::users::get_user_handler; +use anyhow::Result; #[tokio::test] -async fn test_user_creation_flow() { - // Load test environment - dotenv().ok(); +async fn test_user_creation_flow() -> Result<()> { + // Initialize test environment with proper setup + init_test_env(); - // Setup test database connection - let pool = setup_test_db().await.expect("Failed to setup test database"); + // Setup test database with isolation + let test_db = TestDb::new().await?; - // Create test user using models - let test_user = models::User { - id: Uuid::new_v4(), - email: "test@example.com".to_string(), - name: Some("Test User".to_string()), - config: serde_json::Value::Null, - created_at: Utc::now(), - updated_at: Utc::now(), - attributes: serde_json::Value::Null, - }; + // Create test user using fixture or builder pattern + let test_user = models::User::builder() + .email("test@example.com") + .name("Test User") + .build(); - // Use schema for database operations + // Insert test data with proper error handling diesel::insert_into(schema::users::table) .values(&test_user) - .execute(&mut pool.get().await?) - .expect("Failed to insert test user"); + .execute(&mut test_db.pool.get().await?) + .await?; - // Test application logic - let response = create_test_client() - .get("/api/users") + // Create HTTP client for API testing + let client = reqwest::Client::new(); + + // Test the API endpoint + let response = client + .get(&format!("{}/api/users/{}", TEST_API_URL, test_user.id)) + .header("Authorization", "Bearer test-token") .send() .await?; + // Verify response status assert_eq!(response.status(), 200); - // Additional assertions... + + // Parse and verify response body + let user_response = response.json::().await?; + assert_eq!(user_response.email, "test@example.com"); + + // Clean up test data + test_db.cleanup().await?; + + Ok(()) +} +``` + +## Example Library Integration Test + +This example shows how to structure integration tests for libraries in the `libs/` directory: + +```rust +// File: libs/my_library/tests/integration_test.rs + +use my_library::{Client, Config}; +use anyhow::Result; + +// Import test utilities if needed +// Testing utilities can be in the library's tests/common/ directory + +#[tokio::test] +async fn test_client_performs_operation() -> Result<()> { + // Setup mock server for external API + let server = mockito::Server::new_async().await; + + // Configure mock responses + let mock = server + .mock("POST", "/api/resource") + .with_status(201) + .with_body(r#"{"id": "new-resource-id"}"#) + .create(); + + // Configure the client to use the mock server + let config = Config::builder() + .base_url(server.url()) + .timeout(std::time::Duration::from_secs(5)) + .build(); + + let client = Client::new(config); + + // Call the library function being tested + let result = client.create_resource("test-resource").await?; + + // Verify the library correctly processed the response + assert_eq!(result.id, "new-resource-id"); + + // Verify the mock was called as expected + mock.assert(); + + Ok(()) } ``` ## Common Test Utilities -- All shared test utilities should be placed in `tests/common/mod.rs` -- Common database setup and teardown functions should be in `tests/common/db.rs` -- Environment setup utilities should be in `tests/common/env.rs` -- Shared test fixtures should be in `tests/common/fixtures/` + +A comprehensive set of testing utilities has been implemented in the `tests/common/` directory. These utilities provide a standardized approach to testing throughout the codebase and should be used for all new tests. The utilities address common testing patterns and provide a consistent interface for test code. ### Common Test Module Structure ``` tests/ ├── common/ -│ ├── mod.rs # Main module file that re-exports all common utilities -│ ├── db.rs # Database setup/teardown utilities -│ ├── env.rs # Environment configuration utilities -│ ├── fixtures/ # Test data fixtures -│ │ ├── mod.rs # Exports all fixtures -│ │ ├── users.rs # User-related test data -│ │ └── threads.rs # Thread-related test data -│ └── helpers.rs # General test helper functions -└── integration/ # Integration test files +│ ├── mod.rs # Main module that re-exports all utilities +│ ├── env.rs # Environment setup utilities +│ ├── db.rs # Database testing utilities with test isolation +│ ├── http/ # HTTP testing utilities +│ │ ├── mock_server.rs # MockServer wrapper for mockito +│ │ ├── client.rs # TestHttpClient for API requests +│ │ └── mod.rs # HTTP module exports +│ ├── fixtures/ # Test data fixtures +│ │ ├── mod.rs # Exports all fixtures +│ │ ├── builder.rs # FixtureBuilder trait and helpers +│ │ ├── users.rs # User-related test data +│ │ └── threads.rs # Thread-related test data +│ ├── assertions/ # Test assertion utilities +│ │ ├── mod.rs # Exports all assertions +│ │ ├── response.rs # HTTP response assertions +│ │ └── model.rs # Model validation assertions +│ ├── matchers/ # Matcher utilities for mockito +│ │ ├── mod.rs # Exports all matchers +│ │ ├── json.rs # JSON matchers for request bodies +│ │ └── headers.rs # Header matchers for requests +│ └── helpers.rs # General test helper functions +└── integration/ # Integration test files ``` -### Common Database Setup -```rust -// tests/common/db.rs -use diesel::PgConnection; -use diesel::r2d2::{ConnectionManager, Pool}; -use crate::database::{models, schema}; -use dotenv::dotenv; +### Environment Setup Utilities +The `env.rs` module provides utilities for setting up the test environment: -pub struct TestDb { - pub pool: Pool>, -} - -impl TestDb { - pub async fn new() -> anyhow::Result { - dotenv().ok(); - - let database_url = std::env::var("TEST_DATABASE_URL") - .expect("TEST_DATABASE_URL must be set"); - - let manager = ConnectionManager::::new(database_url); - let pool = Pool::builder() - .max_size(5) - .build(manager)?; - - Ok(Self { pool }) - } - - pub async fn setup_test_data(&self) -> anyhow::Result<()> { - // Add common test data setup here - Ok(()) - } - - pub async fn cleanup(&self) -> anyhow::Result<()> { - // Cleanup test data - Ok(()) - } -} -``` - -### Common Environment Setup ```rust // tests/common/env.rs use std::sync::Once; @@ -277,57 +1129,603 @@ use dotenv::dotenv; static ENV_SETUP: Once = Once::new(); -pub fn setup_test_env() { +/// Initialize test environment once per test process +pub fn init_test_env() { ENV_SETUP.call_once(|| { - dotenv().ok(); - // Set any default environment variables for tests - std::env::set_var("TEST_ENV", "test"); + // First check for .env.test + if std::path::Path::new(".env.test").exists() { + dotenv::from_filename(".env.test").ok(); + } else { + // Fall back to regular .env + dotenv().ok(); + } + + // Set additional test-specific env vars + if std::env::var("TEST_ENV").is_err() { + std::env::set_var("TEST_ENV", "test"); + } + + // Initialize logger for tests + if std::env::var("TEST_LOG").is_ok() { + tracing_subscriber::fmt() + .with_env_filter("debug") + .with_test_writer() + .init(); + } }); } + +/// Get a config value from environment with fallback +pub fn get_test_config(key: &str, default: &str) -> String { + std::env::var(key).unwrap_or_else(|_| default.to_string()) +} ``` -### Example Test Fixtures +### Database Testing Utilities +The `db.rs` module provides utilities for database testing with proper test isolation: + ```rust -// tests/common/fixtures/users.rs -use crate::database::models::User; -use chrono::Utc; +// tests/common/db.rs +use anyhow::Result; +use diesel_async::{AsyncPgConnection, AsyncConnection}; +use diesel_async::pooled_connection::AsyncDieselConnectionManager; +use deadpool_diesel::postgres::Pool; use uuid::Uuid; -pub fn create_test_user() -> User { - User { - id: Uuid::new_v4(), - email: "test@example.com".to_string(), - name: Some("Test User".to_string()), - config: serde_json::Value::Null, - created_at: Utc::now(), - updated_at: Utc::now(), - attributes: serde_json::Value::Null, +pub struct TestDb { + pub pool: Pool, + pub test_id: String, // Unique identifier for this test run +} + +impl TestDb { + /// Creates a new isolated test database environment + pub async fn new() -> Result { + // Initialize environment + crate::common::env::init_test_env(); + + // Generate unique test identifier + let test_id = Uuid::new_v4().to_string(); + + // Get database config from env + let database_url = std::env::var("TEST_DATABASE_URL") + .expect("TEST_DATABASE_URL must be set for tests"); + + // Create connection manager and pool + let config = AsyncDieselConnectionManager::::new(database_url); + let pool = Pool::builder(config) + .max_size(5) + .build()?; + + let db = Self { pool, test_id }; + + // Optional: Setup initial test data + // db.setup_schema().await?; + + Ok(db) + } + + /// Get a connection from the pool + pub async fn get_conn(&self) -> Result { + Ok(self.pool.get().await?) + } + + /// Clean up test data created by this test instance + pub async fn cleanup(&self) -> Result<()> { + // Example: Clean up tables used in tests + // let conn = &mut self.get_conn().await?; + // diesel::delete(schema::users::table) + // .filter(schema::users::test_id.eq(&self.test_id)) + // .execute(conn) + // .await?; + + Ok(()) + } + + /// Get the unique test ID for this test instance + pub fn test_id(&self) -> &str { + &self.test_id + } +} + +impl Drop for TestDb { + fn drop(&mut self) { + // Optional synchronous cleanup fallback } } ``` -### Using Common Test Utilities +### HTTP Testing Utilities +The `http/` directory contains utilities for HTTP testing, including a MockServer wrapper and a TestHttpClient: + ```rust -// Example integration test using common utilities -use crate::tests::common::{db::TestDb, env::setup_test_env, fixtures}; +// tests/common/http/mock_server.rs +use anyhow::Result; +use mockito::{self, Mock, Server}; +use serde_json::Value; + +/// MockServer is a wrapper around mockito::Server +pub struct MockServer { + server: Server, +} + +impl MockServer { + /// Create a new MockServer + pub async fn new() -> Result { + Ok(Self { + server: Server::new_async().await, + }) + } + + /// Get the base URL of the mock server + pub fn url(&self) -> String { + self.server.url() + } + + /// Mock a GET request with JSON response + pub fn mock_get_json(&self, path: &str, response: &T) -> Result { + let body = serde_json::to_string(response)?; + + Ok(self.server + .mock("GET", path) + .with_status(200) + .with_header("content-type", "application/json") + .with_body(body) + .create()) + } + + /// Mock a POST request with JSON request and response + pub fn mock_post_json(&self, + path: &str, + request_matcher: Option, + response: &T) -> Result { + let body = serde_json::to_string(response)?; + let mut mock = self.server + .mock("POST", path) + .with_status(200) + .with_header("content-type", "application/json") + .with_body(body); + + // Add request body matcher if provided + if let Some(req_body) = request_matcher { + mock = mock.match_body(crate::common::matchers::json::json_contains(req_body)); + } + + Ok(mock.create()) + } + + /// Mock an error response + pub fn mock_error(&self, path: &str, method: &str, status: usize, message: &str) -> Result { + let error_body = serde_json::json!({ + "error": message, + }); + + Ok(self.server + .mock(method, path) + .with_status(status) + .with_header("content-type", "application/json") + .with_body(serde_json::to_string(&error_body)?) + .create()) + } +} + +// tests/common/http/client.rs +use anyhow::Result; +use reqwest::{Client, Method, RequestBuilder, Response, StatusCode}; +use serde::de::DeserializeOwned; +use serde_json::Value; +use std::time::Duration; + +/// TestHttpClient provides a fluent API for making HTTP requests in tests +pub struct TestHttpClient { + client: Client, + base_url: String, +} + +impl TestHttpClient { + /// Create a new TestHttpClient with the given base URL + pub fn new(base_url: &str) -> Self { + let client = Client::builder() + .timeout(Duration::from_secs(5)) + .build() + .expect("Failed to create HTTP client"); + + Self { + client, + base_url: base_url.to_string(), + } + } + + /// Create a request builder with the given method and path + pub fn request(&self, method: Method, path: &str) -> RequestBuilder { + let url = format!("{}{}", self.base_url, path); + self.client.request(method, url) + } + + /// Make a GET request and return the response + pub async fn get(&self, path: &str) -> Result { + Ok(self.request(Method::GET, path).send().await?) + } + + /// Make a POST request with the given body and return the response + pub async fn post(&self, path: &str, body: &T) -> Result { + Ok(self.request(Method::POST, path) + .json(body) + .send() + .await?) + } + + /// Make a GET request and parse the response as JSON + pub async fn get_json(&self, path: &str) -> Result { + let response = self.get(path).await?; + + if response.status().is_success() { + Ok(response.json::().await?) + } else { + let status = response.status(); + let error_text = response.text().await?; + anyhow::bail!("Request failed with status {}: {}", status, error_text) + } + } + + /// Make a POST request with the given body and parse the response as JSON + pub async fn post_json( + &self, + path: &str, + body: &T + ) -> Result { + let response = self.post(path, body).await?; + + if response.status().is_success() { + Ok(response.json::().await?) + } else { + let status = response.status(); + let error_text = response.text().await?; + anyhow::bail!("Request failed with status {}: {}", status, error_text) + } + } +} +``` + +### Fixture Builder Pattern +The `fixtures/builder.rs` file provides a builder pattern for creating test fixtures: + +```rust +// tests/common/fixtures/builder.rs +use uuid::Uuid; + +/// TestFixture is a trait for test fixtures +pub trait TestFixture { + /// Get the test ID for this fixture + fn test_id(&self) -> Option<&str>; + + /// Set the test ID for this fixture + fn with_test_id(self, test_id: &str) -> Self; +} + +/// FixtureBuilder is a trait for building test fixtures +pub trait FixtureBuilder { + /// Build the fixture + fn build(&self) -> T; + + /// Build the fixture with the given test ID + fn build_with_test_id(&self, test_id: &str) -> T; +} + +/// Example builder for a User fixture +#[derive(Default)] +pub struct UserBuilder { + pub email: Option, + pub name: Option, + pub test_id: Option, +} + +impl UserBuilder { + pub fn new() -> Self { + Self::default() + } + + pub fn email(mut self, email: &str) -> Self { + self.email = Some(email.to_string()); + self + } + + pub fn name(mut self, name: &str) -> Self { + self.name = Some(name.to_string()); + self + } + + pub fn test_id(mut self, test_id: &str) -> Self { + self.test_id = Some(test_id.to_string()); + self + } +} + +impl FixtureBuilder for UserBuilder { + fn build(&self) -> User { + User { + id: Uuid::new_v4(), + email: self.email.clone().unwrap_or_else(|| format!("user-{}@example.com", Uuid::new_v4())), + name: self.name.clone(), + test_id: self.test_id.clone(), + created_at: chrono::Utc::now(), + updated_at: chrono::Utc::now(), + } + } + + fn build_with_test_id(&self, test_id: &str) -> User { + let mut builder = self.clone(); + builder.test_id = Some(test_id.to_string()); + builder.build() + } +} +``` + +### Assertion Utilities +The `assertions/` directory contains utilities for making assertions in tests: + +```rust +// tests/common/assertions/response.rs +use anyhow::Result; +use reqwest::Response; +use reqwest::StatusCode; +use serde::de::DeserializeOwned; +use serde_json::Value; + +/// Extension trait for reqwest::Response providing assertion methods +pub trait ResponseAssertions { + /// Assert that the response has the given status code + fn assert_status(self, status: StatusCode) -> Self; + + /// Assert that the response has a success status code (2xx) + fn assert_success(self) -> Self; + + /// Assert that the response has an error status code (4xx or 5xx) + fn assert_error(self) -> Self; + + /// Assert that the response contains the given header + fn assert_header(self, name: &str, value: &str) -> Self; + + /// Assert that the response body contains the given JSON value + fn assert_json_contains(self, expected: Value) -> Result; + + /// Deserialize the response body as JSON and apply the given assertion function + fn assert_json(self, assert_fn: F) -> Result + where + T: DeserializeOwned, + F: FnOnce(&T) -> bool; +} + +impl ResponseAssertions for Response { + fn assert_status(self, status: StatusCode) -> Self { + assert_eq!(self.status(), status, "Expected status code {}, got {}", status, self.status()); + self + } + + fn assert_success(self) -> Self { + assert!(self.status().is_success(), "Expected success status, got {}", self.status()); + self + } + + fn assert_error(self) -> Self { + assert!(self.status().is_client_error() || self.status().is_server_error(), + "Expected error status, got {}", self.status()); + self + } + + fn assert_header(self, name: &str, value: &str) -> Self { + let header_value = self.headers().get(name) + .expect(&format!("Header {} not found", name)) + .to_str() + .expect(&format!("Header {} is not valid UTF-8", name)); + + assert_eq!(header_value, value, "Expected header {} to be {}, got {}", name, value, header_value); + self + } + + async fn assert_json_contains(self, expected: Value) -> Result { + let json = self.json::().await?; + + // Check if expected is a subset of json + assert!(json_contains(&json, &expected), + "Expected JSON to contain {:?}, got {:?}", expected, json); + + Ok(self) + } + + async fn assert_json(self, assert_fn: F) -> Result + where + T: DeserializeOwned, + F: FnOnce(&T) -> bool, + { + let json = self.json::().await?; + + assert!(assert_fn(&json), "JSON assertion failed"); + + Ok(self) + } +} + +// Helper function to check if one JSON value contains another +fn json_contains(json: &Value, expected: &Value) -> bool { + match (json, expected) { + (Value::Object(json_obj), Value::Object(expected_obj)) => { + expected_obj.iter().all(|(k, v)| { + json_obj.get(k).map_or(false, |json_v| json_contains(json_v, v)) + }) + } + (Value::Array(json_arr), Value::Array(expected_arr)) => { + expected_arr.iter().all(|expected_v| { + json_arr.iter().any(|json_v| json_contains(json_v, expected_v)) + }) + } + _ => json == expected, + } +} +``` + +### JSON and Header Matchers +The `matchers/` directory contains utilities for matching JSON and headers in mockito: + +```rust +// tests/common/matchers/json.rs +use mockito::Matcher; +use serde_json::Value; +use std::fmt; + +/// Create a matcher that checks if a JSON request body contains the given JSON value +pub fn json_contains(expected: Value) -> JsonContainsMatcher { + JsonContainsMatcher { expected } +} + +/// Matcher for checking if a JSON request body contains a JSON value +pub struct JsonContainsMatcher { + expected: Value, +} + +impl Matcher for JsonContainsMatcher { + fn matches(&self, body: &[u8]) -> bool { + let actual: Value = match serde_json::from_slice(body) { + Ok(v) => v, + Err(_) => return false, + }; + + contains_json(&actual, &self.expected) + } +} + +impl fmt::Display for JsonContainsMatcher { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "JSON containing {}", self.expected) + } +} + +/// Helper function to check if one JSON value contains another +fn contains_json(json: &Value, expected: &Value) -> bool { + match (json, expected) { + (Value::Object(json_obj), Value::Object(expected_obj)) => { + expected_obj.iter().all(|(k, v)| { + json_obj.get(k).map_or(false, |json_v| contains_json(json_v, v)) + }) + } + (Value::Array(json_arr), Value::Array(expected_arr)) => { + expected_arr.iter().all(|expected_v| { + json_arr.iter().any(|json_v| contains_json(json_v, expected_v)) + }) + } + _ => json == expected, + } +} + +// tests/common/matchers/headers.rs +use mockito::Matcher; +use std::collections::HashMap; +use std::fmt; + +/// Create a matcher that checks if request headers match the expected headers +pub fn header_matcher(expected_headers: HashMap) -> HeaderMatcher { + HeaderMatcher { expected_headers } +} + +/// Matcher for checking request headers +pub struct HeaderMatcher { + expected_headers: HashMap, +} + +impl Matcher for HeaderMatcher { + fn matches(&self, request_headers: &[u8]) -> bool { + let headers_str = match std::str::from_utf8(request_headers) { + Ok(s) => s, + Err(_) => return false, + }; + + // Parse headers + let headers: HashMap = headers_str + .split("\r\n") + .filter(|line| !line.is_empty()) + .filter_map(|line| { + let mut parts = line.splitn(2, ": "); + let name = parts.next()?; + let value = parts.next()?; + Some((name.to_lowercase(), value.to_string())) + }) + .collect(); + + // Check if expected headers are present + self.expected_headers.iter().all(|(name, value)| { + headers.get(&name.to_lowercase()).map_or(false, |v| v == value) + }) + } +} + +impl fmt::Display for HeaderMatcher { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "Headers containing {:?}", self.expected_headers) + } +} +``` + +### Using These Test Utilities + +Here's an example of using these test utilities in an integration test: + +```rust +use crate::tests::common::{ + db::TestDb, + env::init_test_env, + http::{MockServer, TestHttpClient}, + fixtures::UserBuilder, + assertions::ResponseAssertions, +}; +use anyhow::Result; +use serde_json::json; #[tokio::test] -async fn test_user_creation() { - // Setup test environment - setup_test_env(); +async fn test_user_api() -> Result<()> { + // Initialize environment + init_test_env(); - // Initialize test database - let test_db = TestDb::new().await.expect("Failed to setup test database"); + // Setup test database + let test_db = TestDb::new().await?; - // Get test user fixture - let test_user = fixtures::users::create_test_user(); + // Create test user using builder + let user = UserBuilder::new() + .email("test@example.com") + .name("Test User") + .build_with_test_id(test_db.test_id()); + + // Insert test user into database + // ... - // Run test - let result = create_user(&test_db.pool, &test_user).await?; + // Setup mock server for external API + let mock_server = MockServer::new().await?; - // Cleanup + // Configure mock responses + let mock = mock_server.mock_get_json("/api/external", &json!({ + "status": "success" + }))?; + + // Create test HTTP client + let client = TestHttpClient::new("http://localhost:8000"); + + // Test the API endpoint + let response = client.get(&format!("/api/users/{}", user.id)).await?; + + // Assert on the response + response + .assert_status(StatusCode::OK) + .assert_header("content-type", "application/json") + .assert_json_contains(json!({ + "id": user.id.to_string(), + "email": "test@example.com" + }))?; + + // Verify the mock was called + mock.assert(); + + // Clean up test_db.cleanup().await?; - assert!(result.is_ok()); + Ok(()) } +``` ``` \ No newline at end of file diff --git a/api/CLAUDE.md b/api/CLAUDE.md index 3009d0799..e53225b95 100644 --- a/api/CLAUDE.md +++ b/api/CLAUDE.md @@ -12,6 +12,8 @@ The project's detailed documentation is in the `/documentation` directory: - `tools.mdc` - Tools documentation - `websockets.mdc` - WebSocket patterns +While these files contain best practices for writing tests, REST patterns, etc., **each subdirectory should have its own README.md or CLAUDE.md** that should be referenced first when working in that specific area. These subdirectory-specific guides often contain implementation details and patterns specific to that component. + ## Repository Structure - `src/` - Main server code - `routes/` - API endpoints (REST, WebSocket) diff --git a/api/documentation/database_migrations.mdc b/api/documentation/database_migrations.mdc index 5493fa542..16deae353 100644 --- a/api/documentation/database_migrations.mdc +++ b/api/documentation/database_migrations.mdc @@ -1,3 +1,8 @@ +--- +description: Helpful when making migrations with diesel.rs +globs: +alwaysApply: false +--- # Database Migrations Guide This document provides a comprehensive guide on how to create and manage database migrations in our project. @@ -6,7 +11,7 @@ This document provides a comprehensive guide on how to create and manage databas Database migrations are a way to evolve your database schema over time. Each migration represents a specific change to the database schema, such as creating a table, adding a column, or modifying an enum type. Migrations are version-controlled and can be applied or reverted as needed. -In our project, we use [Diesel](https://diesel.rs/) for handling database migrations. Diesel is an ORM and query builder for Rust that helps us manage our database schema changes in a safe and consistent way. +In our project, we use [Diesel](mdc:https:/diesel.rs) for handling database migrations. Diesel is an ORM and query builder for Rust that helps us manage our database schema changes in a safe and consistent way. ## Migration Workflow