This commit is contained in:
dal 2025-09-03 15:32:35 -06:00
parent 551ef41f8c
commit c75f4ae6eb
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
4 changed files with 1423 additions and 3 deletions

View File

@ -0,0 +1,522 @@
import { mkdir, rm, writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import yaml from 'js-yaml';
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
import type { BusterConfig, DeployOptions, DeployRequest, DeployResponse, Model } from './schemas';
import { deployHandler, validateDeployOptions } from './deploy-handler';
// Mock the deployment strategies module
vi.mock('./deployment/strategies', () => ({
createAuthenticatedDeployer: vi.fn(),
createDryRunDeployer: vi.fn(),
}));
// Mock console methods
const consoleInfoSpy = vi.spyOn(console, 'info').mockImplementation(() => {});
const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {});
const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
describe('deploy-handler', () => {
let testDir: string;
beforeEach(async () => {
const testId = Math.random().toString(36).substring(7);
testDir = join(tmpdir(), `buster-cli-test-${testId}`);
await mkdir(testDir, { recursive: true });
vi.clearAllMocks();
});
afterEach(async () => {
await rm(testDir, { recursive: true, force: true });
});
describe('configuration cascading integration tests', () => {
it('should cascade global config to models without config', async () => {
// Create buster.yml with global config
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'global_postgres',
database: 'global_db',
schema: 'global_schema',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create model file without data_source, database, or schema
await mkdir(join(testDir, 'models'), { recursive: true });
const model = {
name: 'users',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }],
};
await writeFile(join(testDir, 'models', 'users.yml'), yaml.dump(model));
// Mock the deployment function to capture the request
let capturedRequest: DeployRequest | undefined;
const mockDeploy = vi.fn(async (request: DeployRequest) => {
capturedRequest = request;
return {
success: request.models.map((m) => ({ model_name: m.name, data_source: m.data_source_name })),
updated: [],
no_change: [],
failures: [],
} as DeployResponse;
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
// Run the deploy handler
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await deployHandler(options);
// Verify the model inherited the global config
expect(capturedRequest).toBeDefined();
expect(capturedRequest?.models).toHaveLength(1);
expect(capturedRequest?.models[0].data_source_name).toBe('global_postgres');
expect(capturedRequest?.models[0].database).toBe('global_db');
expect(capturedRequest?.models[0].schema).toBe('global_schema');
});
it('should allow models to override global config', async () => {
// Create buster.yml with global config
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'global_postgres',
database: 'global_db',
schema: 'global_schema',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create model file that overrides some config
await mkdir(join(testDir, 'models'), { recursive: true });
const model = {
name: 'orders',
data_source_name: 'model_mysql', // Override
schema: 'model_sales', // Override
// database not specified, should inherit from global
dimensions: [{ name: 'order_id', searchable: false }],
measures: [{ name: 'total' }],
};
await writeFile(join(testDir, 'models', 'orders.yml'), yaml.dump(model));
// Mock the deployment function
let capturedRequest: DeployRequest | undefined;
const mockDeploy = vi.fn(async (request: DeployRequest) => {
capturedRequest = request;
return {
success: request.models.map((m) => ({ model_name: m.name, data_source: m.data_source_name })),
updated: [],
no_change: [],
failures: [],
} as DeployResponse;
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
// Run the deploy handler
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await deployHandler(options);
// Verify the model overrides worked correctly
expect(capturedRequest).toBeDefined();
expect(capturedRequest?.models).toHaveLength(1);
expect(capturedRequest?.models[0].data_source_name).toBe('model_mysql'); // Overridden
expect(capturedRequest?.models[0].database).toBe('global_db'); // Inherited
expect(capturedRequest?.models[0].schema).toBe('model_sales'); // Overridden
});
it('should handle multiple models with different override patterns', async () => {
// Create buster.yml with global config
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'global_postgres',
database: 'global_db',
schema: 'global_schema',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create models directory
await mkdir(join(testDir, 'models'), { recursive: true });
// Model 1: No overrides, inherits all
const model1 = {
name: 'users',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }],
};
await writeFile(join(testDir, 'models', 'users.yml'), yaml.dump(model1));
// Model 2: Override data_source only
const model2 = {
name: 'analytics',
data_source_name: 'bigquery',
dimensions: [{ name: 'event_id', searchable: false }],
measures: [{ name: 'events' }],
};
await writeFile(join(testDir, 'models', 'analytics.yml'), yaml.dump(model2));
// Model 3: Override all fields
const model3 = {
name: 'external',
data_source_name: 'snowflake',
database: 'warehouse',
schema: 'external_data',
dimensions: [{ name: 'external_id', searchable: false }],
measures: [],
};
await writeFile(join(testDir, 'models', 'external.yml'), yaml.dump(model3));
// Mock the deployment function
let capturedRequest: DeployRequest | undefined;
const mockDeploy = vi.fn(async (request: DeployRequest) => {
capturedRequest = request;
return {
success: request.models.map((m) => ({ model_name: m.name, data_source: m.data_source_name })),
updated: [],
no_change: [],
failures: [],
} as DeployResponse;
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
// Run the deploy handler
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await deployHandler(options);
// Verify all models have correct config
expect(capturedRequest).toBeDefined();
expect(capturedRequest?.models).toHaveLength(3);
// Model 1: All inherited
const users = capturedRequest?.models.find((m) => m.name === 'users');
expect(users?.data_source_name).toBe('global_postgres');
expect(users?.database).toBe('global_db');
expect(users?.schema).toBe('global_schema');
// Model 2: data_source overridden, others inherited
const analytics = capturedRequest?.models.find((m) => m.name === 'analytics');
expect(analytics?.data_source_name).toBe('bigquery');
expect(analytics?.database).toBe('global_db');
expect(analytics?.schema).toBe('global_schema');
// Model 3: All overridden
const external = capturedRequest?.models.find((m) => m.name === 'external');
expect(external?.data_source_name).toBe('snowflake');
expect(external?.database).toBe('warehouse');
expect(external?.schema).toBe('external_data');
});
it('should handle multi-model files with cascading', async () => {
// Create buster.yml with global config
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'global_postgres',
database: 'global_db',
schema: 'global_schema',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create multi-model file
await mkdir(join(testDir, 'models'), { recursive: true });
const multiModel = {
models: [
{
name: 'model1',
dimensions: [{ name: 'id', searchable: false }],
},
{
name: 'model2',
data_source_name: 'custom_source',
dimensions: [{ name: 'id', searchable: false }],
},
{
name: 'model3',
schema: 'custom_schema',
dimensions: [{ name: 'id', searchable: false }],
},
],
};
await writeFile(join(testDir, 'models', 'models.yml'), yaml.dump(multiModel));
// Mock the deployment function
let capturedRequest: DeployRequest | undefined;
const mockDeploy = vi.fn(async (request: DeployRequest) => {
capturedRequest = request;
return {
success: request.models.map((m) => ({ model_name: m.name, data_source: m.data_source_name })),
updated: [],
no_change: [],
failures: [],
} as DeployResponse;
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
// Run the deploy handler
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await deployHandler(options);
// Verify cascading worked for all models in the file
expect(capturedRequest).toBeDefined();
expect(capturedRequest?.models).toHaveLength(3);
const model1 = capturedRequest?.models.find((m) => m.name === 'model1');
expect(model1?.data_source_name).toBe('global_postgres');
expect(model1?.schema).toBe('global_schema');
const model2 = capturedRequest?.models.find((m) => m.name === 'model2');
expect(model2?.data_source_name).toBe('custom_source');
expect(model2?.schema).toBe('global_schema');
const model3 = capturedRequest?.models.find((m) => m.name === 'model3');
expect(model3?.data_source_name).toBe('global_postgres');
expect(model3?.schema).toBe('custom_schema');
});
it('should handle multiple projects with different configs', async () => {
// Create buster.yml with multiple projects
const busterConfig: BusterConfig = {
projects: [
{
name: 'postgres-project',
data_source: 'postgres',
database: 'pg_db',
schema: 'public',
},
{
name: 'bigquery-project',
data_source: 'bigquery',
database: 'analytics',
schema: 'events',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create model file
await mkdir(join(testDir, 'models'), { recursive: true });
const model = {
name: 'shared_model',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }],
};
await writeFile(join(testDir, 'models', 'shared.yml'), yaml.dump(model));
// Mock the deployment function
const capturedRequests: DeployRequest[] = [];
const mockDeploy = vi.fn(async (request: DeployRequest) => {
capturedRequests.push(request);
return {
success: request.models.map((m) => ({ model_name: m.name, data_source: m.data_source_name })),
updated: [],
no_change: [],
failures: [],
} as DeployResponse;
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
// Run the deploy handler
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await deployHandler(options);
// Each project should have deployed the model with its own config
expect(capturedRequests).toHaveLength(2);
// First project deployment
expect(capturedRequests[0].models[0].data_source_name).toBe('postgres');
expect(capturedRequests[0].models[0].database).toBe('pg_db');
expect(capturedRequests[0].models[0].schema).toBe('public');
// Second project deployment
expect(capturedRequests[1].models[0].data_source_name).toBe('bigquery');
expect(capturedRequests[1].models[0].database).toBe('analytics');
expect(capturedRequests[1].models[0].schema).toBe('events');
});
});
describe('validateDeployOptions', () => {
it('should validate valid options', () => {
const options: DeployOptions = {
dryRun: false,
verbose: true,
};
const result = validateDeployOptions(options);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should validate when path exists', () => {
const options: DeployOptions = {
path: testDir,
dryRun: false,
verbose: false,
};
const result = validateDeployOptions(options);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should fail when path does not exist', () => {
const options: DeployOptions = {
path: '/non/existent/path',
dryRun: false,
verbose: false,
};
const result = validateDeployOptions(options);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Path does not exist: /non/existent/path');
});
});
describe('error handling', () => {
it('should handle missing buster.yml gracefully', async () => {
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
await expect(deployHandler(options)).rejects.toThrow('No buster.yml found');
});
it('should handle invalid model files gracefully', async () => {
// Create buster.yml
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'postgres',
database: 'db',
schema: 'public',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create invalid model file
await mkdir(join(testDir, 'models'), { recursive: true });
await writeFile(join(testDir, 'models', 'invalid.yml'), 'invalid: yaml: content: :::');
// Mock the deployment function
const mockDeploy = vi.fn(async () => ({
success: [],
updated: [],
no_change: [],
failures: [],
}));
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
const result = await deployHandler(options);
// Should handle the error gracefully
expect(result.failures.length).toBeGreaterThan(0);
expect(consoleErrorSpy).toHaveBeenCalled();
});
it('should handle deployment failures gracefully', async () => {
// Create buster.yml
const busterConfig: BusterConfig = {
projects: [
{
name: 'test-project',
data_source: 'postgres',
database: 'db',
schema: 'public',
},
],
};
await writeFile(join(testDir, 'buster.yml'), yaml.dump(busterConfig));
// Create valid model
await mkdir(join(testDir, 'models'), { recursive: true });
const model = {
name: 'users',
dimensions: [{ name: 'id', searchable: false }],
};
await writeFile(join(testDir, 'models', 'users.yml'), yaml.dump(model));
// Mock deployment to fail
const mockDeploy = vi.fn(async () => {
throw new Error('Deployment failed: Network error');
});
const { createDryRunDeployer } = await import('./deployment/strategies');
(createDryRunDeployer as any).mockReturnValue(mockDeploy);
const options: DeployOptions = {
path: testDir,
dryRun: true,
verbose: false,
};
const result = await deployHandler(options);
// Should handle the error and return failure result
expect(result.failures).toHaveLength(1);
expect(result.failures[0].errors[0]).toContain('Deployment error: Deployment failed');
expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining('Deployment failed'));
});
});
});

View File

@ -0,0 +1,270 @@
import { mkdir, rm, writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import type { ResolvedConfig } from '../schemas';
import { countModelsInFiles, discoverModelFiles, filterModelFiles } from './discovery';
describe('discovery', () => {
let testDir: string;
beforeEach(async () => {
const testId = Math.random().toString(36).substring(7);
testDir = join(tmpdir(), `buster-cli-test-${testId}`);
await mkdir(testDir, { recursive: true });
});
afterEach(async () => {
await rm(testDir, { recursive: true, force: true });
});
describe('discoverModelFiles', () => {
it('should find all yaml files matching include patterns', async () => {
// Create test files
await mkdir(join(testDir, 'models'), { recursive: true });
await writeFile(join(testDir, 'models', 'users.yml'), 'name: users');
await writeFile(join(testDir, 'models', 'orders.yaml'), 'name: orders');
await writeFile(join(testDir, 'products.yml'), 'name: products');
await writeFile(join(testDir, 'ignored.txt'), 'not a yaml file');
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['**/*.yml', '**/*.yaml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
expect(files).toHaveLength(3);
expect(files.map((f) => f.replace(testDir, '')).sort()).toEqual([
'/models/orders.yaml',
'/models/users.yml',
'/products.yml',
]);
});
it('should respect specific include patterns', async () => {
// Create test files
await mkdir(join(testDir, 'models'), { recursive: true });
await mkdir(join(testDir, 'metrics'), { recursive: true });
await writeFile(join(testDir, 'models', 'users.yml'), 'name: users');
await writeFile(join(testDir, 'models', 'orders.yml'), 'name: orders');
await writeFile(join(testDir, 'metrics', 'revenue.yml'), 'name: revenue');
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['models/**/*.yml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
expect(files).toHaveLength(2);
expect(files.every((f) => f.includes('/models/'))).toBe(true);
});
it('should ignore node_modules and other excluded directories', async () => {
// Create files in directories that should be ignored
await mkdir(join(testDir, 'node_modules'), { recursive: true });
await mkdir(join(testDir, 'dist'), { recursive: true });
await mkdir(join(testDir, '.git'), { recursive: true });
await mkdir(join(testDir, 'models'), { recursive: true });
await writeFile(join(testDir, 'node_modules', 'model.yml'), 'name: ignored');
await writeFile(join(testDir, 'dist', 'model.yml'), 'name: ignored');
await writeFile(join(testDir, '.git', 'model.yml'), 'name: ignored');
await writeFile(join(testDir, 'models', 'valid.yml'), 'name: valid');
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['**/*.yml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
expect(files).toHaveLength(1);
expect(files[0]).toContain('models/valid.yml');
});
it('should return empty array when no files match', async () => {
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['**/*.yml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
expect(files).toEqual([]);
});
it('should handle deeply nested directories', async () => {
const deepPath = join(testDir, 'a', 'b', 'c', 'd');
await mkdir(deepPath, { recursive: true });
await writeFile(join(deepPath, 'deep.yml'), 'name: deep');
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['**/*.yml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
expect(files).toHaveLength(1);
expect(files[0]).toContain('a/b/c/d/deep.yml');
});
it('should return sorted file paths', async () => {
await writeFile(join(testDir, 'z.yml'), 'name: z');
await writeFile(join(testDir, 'a.yml'), 'name: a');
await writeFile(join(testDir, 'm.yml'), 'name: m');
const config: ResolvedConfig = {
data_source_name: 'postgres',
schema: 'public',
include: ['*.yml'],
exclude: [],
};
const files = await discoverModelFiles(config, testDir);
const fileNames = files.map((f) => f.split('/').pop());
expect(fileNames).toEqual(['a.yml', 'm.yml', 'z.yml']);
});
});
describe('filterModelFiles', () => {
const testFiles = [
'/path/to/models/users.yml',
'/path/to/models/orders.yml',
'/path/to/test/test-model.yml',
'/path/to/temp/temp-model.yml',
'/path/to/metrics/revenue.yml',
];
it('should return all files when no exclusion patterns', async () => {
const { included, excluded } = await filterModelFiles(testFiles, [], '/path/to');
expect(included).toEqual(testFiles);
expect(excluded).toEqual([]);
});
it('should exclude files matching single pattern', async () => {
const { included, excluded } = await filterModelFiles(
testFiles,
['**/test/**'],
'/path/to'
);
expect(included).toHaveLength(4);
expect(excluded).toHaveLength(1);
expect(excluded[0]).toEqual({
file: 'test/test-model.yml',
reason: 'Matched exclusion pattern: **/test/**',
});
});
it('should exclude files matching multiple patterns', async () => {
const { included, excluded } = await filterModelFiles(
testFiles,
['**/test/**', '**/temp/**'],
'/path/to'
);
expect(included).toHaveLength(3);
expect(excluded).toHaveLength(2);
expect(excluded.map((e) => e.file).sort()).toEqual([
'temp/temp-model.yml',
'test/test-model.yml',
]);
});
it('should handle specific file exclusions', async () => {
const { included, excluded } = await filterModelFiles(
testFiles,
['models/users.yml'],
'/path/to'
);
expect(included).toHaveLength(4);
expect(excluded).toHaveLength(1);
expect(excluded[0].file).toBe('models/users.yml');
});
it('should handle wildcard patterns', async () => {
const { included, excluded } = await filterModelFiles(
testFiles,
['**/revenue.*'],
'/path/to'
);
expect(included).toHaveLength(4);
expect(excluded).toHaveLength(1);
expect(excluded[0].file).toBe('metrics/revenue.yml');
});
it('should work with absolute file paths', async () => {
const absoluteFiles = [
join(testDir, 'models', 'users.yml'),
join(testDir, 'test', 'test.yml'),
join(testDir, 'orders.yml'),
];
const { included, excluded } = await filterModelFiles(
absoluteFiles,
['**/test/**'],
testDir
);
expect(included).toHaveLength(2);
expect(excluded).toHaveLength(1);
expect(excluded[0].file).toBe('test/test.yml');
});
it('should match first pattern when multiple patterns match', async () => {
const { excluded } = await filterModelFiles(
testFiles,
['**/test/**', '**/*test*', '**/*.yml'],
'/path/to'
);
const testExclusion = excluded.find((e) => e.file === 'test/test-model.yml');
expect(testExclusion?.reason).toBe('Matched exclusion pattern: **/test/**');
});
});
describe('countModelsInFiles', () => {
it('should return the count of files', async () => {
const files = [
'/path/to/model1.yml',
'/path/to/model2.yml',
'/path/to/model3.yml',
];
const count = await countModelsInFiles(files);
expect(count).toBe(3);
});
it('should return 0 for empty array', async () => {
const count = await countModelsInFiles([]);
expect(count).toBe(0);
});
it('should handle large file lists', async () => {
const files = Array.from({ length: 100 }, (_, i) => `/path/to/model${i}.yml`);
const count = await countModelsInFiles(files);
expect(count).toBe(100);
});
});
});

View File

@ -0,0 +1,631 @@
import { mkdir, rm, writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import yaml from 'js-yaml';
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import type { Model } from '../schemas';
import {
ModelParsingError,
generateDefaultSQL,
parseModelFile,
resolveModelConfig,
validateModel,
} from './parsing';
describe('parsing', () => {
let testDir: string;
beforeEach(async () => {
const testId = Math.random().toString(36).substring(7);
testDir = join(tmpdir(), `buster-cli-test-${testId}`);
await mkdir(testDir, { recursive: true });
});
afterEach(async () => {
await rm(testDir, { recursive: true, force: true });
});
describe('resolveModelConfig', () => {
describe('cascading configuration', () => {
const baseConfig = {
data_source_name: 'global_postgres',
database: 'global_db',
schema: 'global_schema',
};
it('should use global config values when model values are undefined', () => {
const model: Model = {
name: 'test_model',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('global_postgres');
expect(resolved.database).toBe('global_db');
expect(resolved.schema).toBe('global_schema');
});
it('should use model values when they override global config', () => {
const model: Model = {
name: 'test_model',
data_source_name: 'model_mysql',
database: 'model_db',
schema: 'model_schema',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('model_mysql');
expect(resolved.database).toBe('model_db');
expect(resolved.schema).toBe('model_schema');
});
it('should handle partial overrides - only data_source_name', () => {
const model: Model = {
name: 'test_model',
data_source_name: 'model_bigquery',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('model_bigquery');
expect(resolved.database).toBe('global_db');
expect(resolved.schema).toBe('global_schema');
});
it('should handle partial overrides - only database', () => {
const model: Model = {
name: 'test_model',
database: 'model_specific_db',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('global_postgres');
expect(resolved.database).toBe('model_specific_db');
expect(resolved.schema).toBe('global_schema');
});
it('should handle partial overrides - only schema', () => {
const model: Model = {
name: 'test_model',
schema: 'model_specific_schema',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('global_postgres');
expect(resolved.database).toBe('global_db');
expect(resolved.schema).toBe('model_specific_schema');
});
it('should handle mixed overrides - data_source and schema', () => {
const model: Model = {
name: 'test_model',
data_source_name: 'model_snowflake',
schema: 'model_warehouse',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.data_source_name).toBe('model_snowflake');
expect(resolved.database).toBe('global_db');
expect(resolved.schema).toBe('model_warehouse');
});
it('should handle empty config object', () => {
const model: Model = {
name: 'test_model',
data_source_name: 'model_postgres',
database: 'model_db',
schema: 'model_schema',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const resolved = resolveModelConfig(model, {});
expect(resolved.data_source_name).toBe('model_postgres');
expect(resolved.database).toBe('model_db');
expect(resolved.schema).toBe('model_schema');
});
it('should handle undefined values in config', () => {
const model: Model = {
name: 'test_model',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const partialConfig = {
data_source_name: 'config_postgres',
database: undefined,
schema: undefined,
};
const resolved = resolveModelConfig(model, partialConfig);
expect(resolved.data_source_name).toBe('config_postgres');
expect(resolved.database).toBeUndefined();
expect(resolved.schema).toBeUndefined();
});
it('should preserve other model properties during resolution', () => {
const model: Model = {
name: 'test_model',
description: 'Test model description',
dimensions: [
{ name: 'id', searchable: false },
{ name: 'name', searchable: true, type: 'string' },
],
measures: [{ name: 'count', type: 'integer' }],
metrics: [{ name: 'total', expr: 'sum(count)' }],
filters: [{ name: 'active', expr: 'status = "active"' }],
relationships: [
{ name: 'user_rel', source_col: 'user_id', ref_col: 'users.id' },
],
};
const resolved = resolveModelConfig(model, baseConfig);
expect(resolved.name).toBe('test_model');
expect(resolved.description).toBe('Test model description');
expect(resolved.dimensions).toEqual(model.dimensions);
expect(resolved.measures).toEqual(model.measures);
expect(resolved.metrics).toEqual(model.metrics);
expect(resolved.filters).toEqual(model.filters);
expect(resolved.relationships).toEqual(model.relationships);
});
it('should handle multiple models with different override patterns', () => {
const models: Model[] = [
{
name: 'model1',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: 'model2',
data_source_name: 'custom_source',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: 'model3',
database: 'custom_db',
schema: 'custom_schema',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
];
const resolved = models.map((m) => resolveModelConfig(m, baseConfig));
// Model 1: inherits all from global
expect(resolved[0].data_source_name).toBe('global_postgres');
expect(resolved[0].database).toBe('global_db');
expect(resolved[0].schema).toBe('global_schema');
// Model 2: overrides data_source, inherits others
expect(resolved[1].data_source_name).toBe('custom_source');
expect(resolved[1].database).toBe('global_db');
expect(resolved[1].schema).toBe('global_schema');
// Model 3: overrides database and schema, inherits data_source
expect(resolved[2].data_source_name).toBe('global_postgres');
expect(resolved[2].database).toBe('custom_db');
expect(resolved[2].schema).toBe('custom_schema');
});
});
});
describe('parseModelFile', () => {
it('should parse a single model file', async () => {
const model = {
name: 'users',
description: 'User model',
data_source_name: 'postgres',
schema: 'public',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }],
};
const filePath = join(testDir, 'users.yml');
await writeFile(filePath, yaml.dump(model));
const result = await parseModelFile(filePath);
expect(result).toHaveLength(1);
expect(result[0].name).toBe('users');
expect(result[0].description).toBe('User model');
expect(result[0].dimensions).toHaveLength(1);
expect(result[0].measures).toHaveLength(1);
});
it('should parse a multi-model file', async () => {
const multiModel = {
models: [
{
name: 'users',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
},
{
name: 'orders',
dimensions: [{ name: 'order_id', searchable: false }],
measures: [{ name: 'total' }],
},
],
};
const filePath = join(testDir, 'models.yml');
await writeFile(filePath, yaml.dump(multiModel));
const result = await parseModelFile(filePath);
expect(result).toHaveLength(2);
expect(result[0].name).toBe('users');
expect(result[1].name).toBe('orders');
});
it('should throw ModelParsingError for invalid YAML', async () => {
const filePath = join(testDir, 'invalid.yml');
await writeFile(filePath, 'invalid: yaml: content: :::');
await expect(parseModelFile(filePath)).rejects.toThrow(ModelParsingError);
});
it('should throw ModelParsingError for invalid model structure', async () => {
const invalidModel = {
// Missing required 'name' field
dimensions: 'not an array',
};
const filePath = join(testDir, 'invalid-model.yml');
await writeFile(filePath, yaml.dump(invalidModel));
await expect(parseModelFile(filePath)).rejects.toThrow(ModelParsingError);
});
it('should throw ModelParsingError for empty file', async () => {
const filePath = join(testDir, 'empty.yml');
await writeFile(filePath, '');
await expect(parseModelFile(filePath)).rejects.toThrow('Invalid YAML structure');
});
it('should include file path in error message', async () => {
const filePath = join(testDir, 'error.yml');
await writeFile(filePath, 'null');
try {
await parseModelFile(filePath);
expect.fail('Should have thrown');
} catch (error) {
expect(error).toBeInstanceOf(ModelParsingError);
expect((error as ModelParsingError).file).toBe(filePath);
expect((error as ModelParsingError).getDetailedMessage()).toContain(filePath);
}
});
});
describe('validateModel', () => {
it('should validate a valid model', () => {
const model: Model = {
name: 'valid_model',
data_source_name: 'postgres',
schema: 'public',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }],
metrics: [],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(true);
expect(result.errors).toHaveLength(0);
});
it('should require model name', () => {
const model: Model = {
name: '',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Model name is required');
});
it('should require at least one dimension or measure', () => {
const model: Model = {
name: 'empty_model',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Model must have at least one dimension or measure');
});
it('should detect duplicate dimension names', () => {
const model: Model = {
name: 'test',
dimensions: [
{ name: 'duplicate', searchable: false },
{ name: 'duplicate', searchable: true },
],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Duplicate dimension name: duplicate');
});
it('should detect duplicate measure names', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [{ name: 'count' }, { name: 'count' }],
metrics: [],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Duplicate measure name: count');
});
it('should detect duplicate metric names', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [
{ name: 'total', expr: 'sum(amount)' },
{ name: 'total', expr: 'count(*)' },
],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Duplicate metric name: total');
});
it('should require metric expressions', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [{ name: 'empty_metric', expr: '' }],
filters: [],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Metric empty_metric must have an expression');
});
it('should detect duplicate filter names', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [
{ name: 'active', expr: 'status = "active"' },
{ name: 'active', expr: 'deleted_at IS NULL' },
],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Duplicate filter name: active');
});
it('should require filter expressions', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [{ name: 'empty_filter', expr: '' }],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Filter empty_filter must have an expression');
});
it('should validate relationship columns', () => {
const model: Model = {
name: 'test',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [
{ name: 'incomplete', source_col: '', ref_col: 'users.id' },
],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors).toContain('Relationship incomplete must have source_col and ref_col');
});
it('should return multiple errors for multiple issues', () => {
const model: Model = {
name: '',
dimensions: [],
measures: [],
metrics: [{ name: 'metric1', expr: '' }],
filters: [
{ name: 'filter1', expr: 'valid' },
{ name: 'filter1', expr: 'duplicate' },
],
relationships: [],
};
const result = validateModel(model);
expect(result.valid).toBe(false);
expect(result.errors.length).toBeGreaterThan(2);
expect(result.errors).toContain('Model name is required');
expect(result.errors).toContain('Model must have at least one dimension or measure');
expect(result.errors).toContain('Metric metric1 must have an expression');
expect(result.errors).toContain('Duplicate filter name: filter1');
});
});
describe('generateDefaultSQL', () => {
it('should generate SQL with database and schema', () => {
const model: Model = {
name: 'users',
database: 'analytics',
schema: 'public',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const sql = generateDefaultSQL(model);
expect(sql).toBe('SELECT * FROM analytics.public.users');
});
it('should generate SQL with only schema', () => {
const model: Model = {
name: 'orders',
schema: 'sales',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const sql = generateDefaultSQL(model);
expect(sql).toBe('SELECT * FROM sales.orders');
});
it('should generate SQL without database or schema', () => {
const model: Model = {
name: 'products',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const sql = generateDefaultSQL(model);
expect(sql).toBe('SELECT * FROM products');
});
});
describe('ModelParsingError', () => {
it('should format error message with Zod errors', () => {
const zodError = {
issues: [
{ path: ['name'], message: 'Required' },
{ path: ['dimensions', 0, 'type'], message: 'Invalid type' },
],
} as any;
const error = new ModelParsingError('Parse failed', '/path/to/file.yml', zodError);
const detailed = error.getDetailedMessage();
expect(detailed).toContain('Parse failed (/path/to/file.yml)');
expect(detailed).toContain('name: Required');
expect(detailed).toContain('dimensions.0.type: Invalid type');
});
it('should format error message without Zod errors', () => {
const error = new ModelParsingError('Simple error', '/path/to/file.yml');
const detailed = error.getDetailedMessage();
expect(detailed).toBe('Simple error (/path/to/file.yml)');
});
});
});

View File

@ -5,10 +5,7 @@ import React from 'react';
import { Auth } from './commands/auth.js';
import { DeployCommand } from './commands/deploy/deploy.js';
import { DeployOptionsSchema } from './commands/deploy/schemas.js';
import { HelloCommand } from './commands/hello.js';
import { InitCommand } from './commands/init.js';
import { InteractiveCommand } from './commands/interactive.js';
import { Main } from './commands/main.js';
// CLI metadata
program