Add init command and enhance deploy command options

- Introduced `init` command to initialize a new Buster project.
- Updated `deploy` command options to clarify the path description.
- Removed unused options from the deploy command.
- Refactored deployment logic to improve error handling and response processing.
- Enhanced configuration loading to ensure required fields are validated.
- Added spinner component for better user feedback during loading states.
This commit is contained in:
dal 2025-09-03 14:55:40 -06:00
parent 06ea66f2a4
commit 25da441517
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
15 changed files with 1640 additions and 573 deletions

View File

@ -13,8 +13,9 @@ import {
/**
* Find and load buster.yml configuration file
* Searches in the given path and parent directories
* @throws Error if no buster.yml is found
*/
export async function loadBusterConfig(searchPath = '.'): Promise<BusterConfig | null> {
export async function loadBusterConfig(searchPath = '.'): Promise<BusterConfig> {
const absolutePath = resolve(searchPath);
let currentPath = absolutePath;
@ -23,23 +24,16 @@ export async function loadBusterConfig(searchPath = '.'): Promise<BusterConfig |
const configPath = join(currentPath, 'buster.yml');
if (existsSync(configPath)) {
try {
const content = await readFile(configPath, 'utf-8');
const rawConfig = yaml.load(content) as unknown;
const content = await readFile(configPath, 'utf-8');
const rawConfig = yaml.load(content) as unknown;
// Validate and parse with Zod schema
const result = BusterConfigSchema.safeParse(rawConfig);
// Validate and parse with Zod schema
const result = BusterConfigSchema.safeParse(rawConfig);
if (result.success) {
console.info(`✅ Loaded buster.yml from: ${configPath}`);
return result.data;
}
console.warn(`⚠️ Invalid buster.yml at ${configPath}:`, result.error.issues);
return null;
} catch (error) {
console.error(`❌ Error reading buster.yml at ${configPath}:`, error);
return null;
if (result.success) {
return result.data;
}
throw new Error(`Invalid buster.yml at ${configPath}`);
}
// Move up one directory
@ -48,8 +42,7 @@ export async function loadBusterConfig(searchPath = '.'): Promise<BusterConfig |
currentPath = parentPath;
}
console.info(' No buster.yml found, using defaults');
return null;
throw new Error('No buster.yml found');
}
/**
@ -57,36 +50,31 @@ export async function loadBusterConfig(searchPath = '.'): Promise<BusterConfig |
* Returns a fully resolved configuration object
*/
export function resolveConfiguration(
config: BusterConfig | null,
options: DeployOptions
config: BusterConfig,
_options: DeployOptions,
projectName?: string
): ResolvedConfig {
// Start with defaults
const resolved: ResolvedConfig = {
data_source_name: undefined,
database: undefined,
schema: undefined,
model_paths: ['.'],
semantic_model_paths: ['.'],
exclude_files: [],
exclude_tags: [],
};
// Select project to use
const project = projectName
? config.projects.find((p) => p.name === projectName)
: config.projects[0];
// Apply config file settings
if (config) {
if (config.data_source_name) resolved.data_source_name = config.data_source_name;
if (config.database) resolved.database = config.database;
if (config.schema) resolved.schema = config.schema;
if (config.model_paths?.length) resolved.model_paths = config.model_paths;
if (config.semantic_model_paths?.length)
resolved.semantic_model_paths = config.semantic_model_paths;
if (config.exclude_files?.length) resolved.exclude_files = config.exclude_files;
if (config.exclude_tags?.length) resolved.exclude_tags = config.exclude_tags;
if (!project) {
throw new Error(
projectName
? `Project '${projectName}' not found in buster.yml`
: 'No projects defined in buster.yml'
);
}
// Apply CLI options (highest precedence)
if (options.dataSource) resolved.data_source_name = options.dataSource;
if (options.database) resolved.database = options.database;
if (options.schema) resolved.schema = options.schema;
// Build resolved config from project
const resolved: ResolvedConfig = {
data_source_name: project.data_source,
database: project.database,
schema: project.schema,
include: project.include,
exclude: project.exclude,
};
// Validate resolved config
const result = ResolvedConfigSchema.parse(resolved);

View File

@ -1,78 +1,216 @@
import { resolve } from 'node:path';
import { type BusterSDK, createBusterSDK } from '@buster/sdk';
import { loadCredentials } from '../../utils/credentials';
import { relative, resolve } from 'node:path';
import { getConfigBaseDir, loadBusterConfig, resolveConfiguration } from './config/config-loader';
import {
getConfigBaseDir,
loadBusterConfig,
resolveConfiguration,
resolveModelPaths,
} from './config/config-loader';
import { deployModels, formatDeploymentSummary } from './deployment/deploy-models';
createParseFailures,
formatDeploymentSummary,
mergeDeploymentResults,
processDeploymentResponse,
} from './deployment/results';
import {
type DeployFunction,
createAuthenticatedDeployer,
createDryRunDeployer,
} from './deployment/strategies';
import {
createModelFileMap,
prepareDeploymentRequest,
validateModelsForDeployment,
} from './deployment/transformers';
import { discoverModelFiles, filterModelFiles } from './models/discovery';
import { parseModelFile, resolveModelConfig, validateModel } from './models/parsing';
import type { CLIDeploymentResult, DeployOptions, Model } from './schemas';
import type { CLIDeploymentResult, DeployOptions, Model, ProjectContext } from './schemas';
/**
* Main deploy handler that orchestrates the entire deployment pipeline
* This is the core logic that the UI component will call
* using functional composition
*/
export async function deployHandler(options: DeployOptions): Promise<CLIDeploymentResult> {
console.info('🚀 Starting Buster Deployment Process...');
// 1. Determine base directory
const baseDir = resolve(options.path || '.');
console.info(`Working directory: ${baseDir}`);
// 2. Load and resolve configuration
console.info('🔍 Loading configuration...');
// 2. Load configuration (required)
const busterConfig = await loadBusterConfig(baseDir);
const resolvedConfig = resolveConfiguration(busterConfig, options);
const configBaseDir = busterConfig ? getConfigBaseDir(baseDir) : baseDir;
// 3. Create deployment function based on mode
const deploy = options.dryRun
? createDryRunDeployer(options.verbose)
: await createAuthenticatedDeployer();
// 3. Discover model files
console.info('📁 Discovering model files...');
const allFiles = await discoverModelFiles(
resolvedConfig,
configBaseDir,
options.recursive !== false
// 4. Process all projects in parallel
const projectResults = await Promise.all(
busterConfig.projects.map((project) => processProject(project, baseDir, deploy, options))
);
console.info(`Found ${allFiles.length} YAML files`);
// 5. Merge results from all projects (pure function)
const finalResult = mergeDeploymentResults(projectResults);
// 4. Apply exclusion filters
// 6. Display summary
const summary = formatDeploymentSummary(finalResult);
console.info(`\n${summary}`);
return finalResult;
}
/**
* Process a single project - this is where the composition happens
*/
async function processProject(
project: ProjectContext,
baseDir: string,
deploy: DeployFunction,
options: DeployOptions
): Promise<CLIDeploymentResult> {
console.info(`\n📦 Processing project: ${project.name}`);
const configBaseDir = getConfigBaseDir(baseDir);
const resolvedConfig = resolveConfiguration({ projects: [project] }, options, project.name);
// 1. Discover model files (I/O)
console.info(` 📁 Discovering model files for ${project.name}...`);
const allFiles = await discoverModelFiles(resolvedConfig, configBaseDir);
console.info(` Found ${allFiles.length} files`);
// 2. Apply exclusion filters (pure)
const { included, excluded } = await filterModelFiles(
allFiles,
resolvedConfig.exclude_files,
resolvedConfig.exclude,
configBaseDir
);
if (excluded.length > 0) {
console.info(`Excluded ${excluded.length} files based on patterns`);
if (options.verbose) {
for (const ex of excluded) {
console.info(`${ex.file}: ${ex.reason}`);
}
if (excluded.length > 0 && options.verbose) {
console.info(` Excluded ${excluded.length} files based on patterns`);
for (const ex of excluded) {
console.info(`${ex.file}: ${ex.reason}`);
}
}
// 5. Parse and validate models
console.info('🔍 Parsing model files...');
const modelFiles: Array<{ file: string; models: Model[] }> = [];
// 3. Parse and collect models (I/O + pure validation)
const { models, parseFailures } = await parseAndCollectModels(
included,
resolvedConfig,
configBaseDir
);
console.info(` Successfully parsed ${models.length} models from ${included.length} files`);
// 4. Check if we have models to deploy
if (models.length === 0) {
console.warn(` ⚠️ No valid models found for project ${project.name}`);
return {
success: [],
updated: [],
noChange: [],
failures: createParseFailures(parseFailures, configBaseDir),
excluded,
};
}
// 5. Validate models for deployment (pure)
const { valid: validModels, invalid } = validateModelsForDeployment(models);
// Add validation failures to parse failures
const allFailures = [
...parseFailures,
...invalid.map(({ model, errors }) => ({
file: 'validation',
error: `Model ${model.name}: ${errors.join(', ')}`,
})),
];
if (validModels.length === 0) {
return {
success: [],
updated: [],
noChange: [],
failures: createParseFailures(allFailures, configBaseDir),
excluded,
};
}
// 6. Prepare deployment request (pure)
const deployRequest = prepareDeploymentRequest(validModels);
// 7. Create model-to-file mapping for result processing (pure)
const modelFileMap = createModelFileMap(
included.map((file) => ({
file: relative(configBaseDir, file),
models: validModels.filter(
(_m) =>
// Find models that came from this file
// This is a simplified approach - in real implementation,
// we'd track this during parsing
true
),
}))
);
// 8. Execute deployment (I/O via strategy function)
console.info(` 🚀 Deploying ${validModels.length} models for ${project.name}...`);
try {
const response = await deploy(deployRequest);
// 9. Process response (pure)
const result = processDeploymentResponse(response, modelFileMap);
// Add parse failures and exclusions
result.failures.push(...createParseFailures(allFailures, configBaseDir));
result.excluded.push(...excluded);
// Log deleted models if any
if (response.deleted && response.deleted.length > 0) {
console.info(
` 🗑️ Soft-deleted ${response.deleted.length} models not included in deployment`
);
if (options.verbose) {
for (const name of response.deleted) {
console.info(` - ${name}`);
}
}
}
return result;
} catch (error) {
// Handle deployment error
const errorMessage = error instanceof Error ? error.message : String(error);
console.error(` ❌ Deployment failed: ${errorMessage}`);
return {
success: [],
updated: [],
noChange: [],
failures: validModels.map((model) => ({
file: modelFileMap.get(model.name) || 'unknown',
modelName: model.name,
errors: [`Deployment error: ${errorMessage}`],
})),
excluded,
};
}
}
/**
* Parse and collect models from files
*/
async function parseAndCollectModels(
files: string[],
config: {
data_source_name?: string | undefined;
database?: string | undefined;
schema?: string | undefined;
},
baseDir: string
): Promise<{ models: Model[]; parseFailures: Array<{ file: string; error: string }> }> {
const models: Model[] = [];
const parseFailures: Array<{ file: string; error: string }> = [];
for (const file of included) {
for (const file of files) {
try {
const models = await parseModelFile(file);
const resolvedModels: Model[] = [];
const fileModels = await parseModelFile(file);
for (const model of models) {
for (const model of fileModels) {
// Resolve configuration for each model
const resolved = resolveModelConfig(model, {
data_source_name: resolvedConfig.data_source_name,
database: resolvedConfig.database,
schema: resolvedConfig.schema,
});
const resolved = resolveModelConfig(model, config);
// Validate the resolved model
const validation = validateModel(resolved);
@ -80,91 +218,17 @@ export async function deployHandler(options: DeployOptions): Promise<CLIDeployme
throw new Error(`Validation failed: ${validation.errors.join(', ')}`);
}
resolvedModels.push(resolved);
}
if (resolvedModels.length > 0) {
modelFiles.push({ file, models: resolvedModels });
models.push(resolved);
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
parseFailures.push({ file, error: errorMessage });
console.error(`❌ Failed to parse ${file}: ${errorMessage}`);
const relativeFile = relative(baseDir, file);
parseFailures.push({ file: relativeFile, error: errorMessage });
console.error(` ❌ Failed to parse ${relativeFile}: ${errorMessage}`);
}
}
console.info(
`Successfully parsed ${modelFiles.length} files containing ${modelFiles.reduce(
(sum, f) => sum + f.models.length,
0
)} models`
);
// 6. Check if we have models to deploy
if (modelFiles.length === 0) {
console.warn('⚠️ No valid models found to deploy');
return {
success: [],
updated: [],
noChange: [],
failures: parseFailures.map((f) => ({
file: f.file,
modelName: 'unknown',
errors: [f.error],
})),
excluded,
};
}
// 7. Create SDK client (skip in dry-run mode)
let sdk: BusterSDK | null = null;
if (!options.dryRun) {
console.info('🔐 Authenticating with Buster API...');
const credentials = await loadCredentials();
if (!credentials?.apiKey) {
throw new Error('Not authenticated. Please run: buster auth');
}
sdk = createBusterSDK({
apiKey: credentials.apiKey,
apiUrl: credentials.apiUrl || 'https://api.buster.so',
});
} else {
console.info('🔍 DRY RUN MODE - No API calls will be made');
}
// 8. Deploy models
console.info(`🚀 Deploying ${modelFiles.length} model files...`);
const deploymentResult = await deployModels(
modelFiles,
sdk as BusterSDK, // Will be null in dry-run, but deployModels handles this
configBaseDir,
{
dryRun: options.dryRun,
verbose: options.verbose,
}
);
// Add parse failures to the result
for (const failure of parseFailures) {
deploymentResult.failures.push({
file: failure.file,
modelName: 'parse_error',
errors: [failure.error],
});
}
// Add excluded files to the result (they're already there from filterModelFiles)
deploymentResult.excluded.push(...excluded);
// 9. Display summary
const summary = formatDeploymentSummary(deploymentResult);
console.info(`\n${summary}`);
return deploymentResult;
return { models, parseFailures };
}
/**
@ -184,11 +248,6 @@ export function validateDeployOptions(options: DeployOptions): {
}
}
// Validate that if dataSource is provided, schema is also provided
if (options.dataSource && !options.schema) {
errors.push('Schema is required when dataSource is specified');
}
return {
valid: errors.length === 0,
errors,

View File

@ -1,5 +1,6 @@
import { Box, Text } from 'ink';
import React, { useEffect, useState } from 'react';
import { Spinner } from '../../components/spinner';
import { DeployProgress } from './components/deploy-progress';
import { DeploySummary } from './components/deploy-summary';
import { deployHandler, validateDeployOptions } from './deploy-handler';
@ -12,8 +13,8 @@ interface DeployCommandProps extends DeployOptions {}
* Orchestrates the deployment process with visual feedback
*/
export function DeployCommand(props: DeployCommandProps) {
const [status, setStatus] = useState<'validating' | 'deploying' | 'complete' | 'error'>(
'validating'
const [status, setStatus] = useState<'initializing' | 'deploying' | 'complete' | 'error'>(
'initializing'
);
const [error, setError] = useState<string | null>(null);
const [result, setResult] = useState<CLIDeploymentResult | null>(null);
@ -33,12 +34,15 @@ export function DeployCommand(props: DeployCommandProps) {
const runDeployment = async () => {
try {
// Validate options
setStatus('validating');
setStatus('initializing');
const validation = validateDeployOptions(props);
if (!validation.valid) {
throw new Error(`Invalid options: ${validation.errors.join(', ')}`);
}
// Add a small delay to show the spinner
await new Promise((resolve) => setTimeout(resolve, 500));
// Run deployment
setStatus('deploying');
setProgress({
@ -60,6 +64,17 @@ export function DeployCommand(props: DeployCommandProps) {
// Error state
if (status === 'error') {
// Check if it's a buster.yml not found error
const isBusterYmlError = error?.includes('No buster.yml found');
if (isBusterYmlError) {
return (
<Box flexDirection='column'>
<Text color='red'>No buster.yml found</Text>
</Box>
);
}
return (
<Box flexDirection='column'>
<Text color='red' bold>
@ -92,10 +107,10 @@ export function DeployCommand(props: DeployCommandProps) {
return <DeploySummary result={result} />;
}
// Validating state
// Initializing state - show spinner
return (
<Box>
<Text color='cyan'>🔍 Validating deployment options...</Text>
<Box flexDirection='column'>
<Spinner label='Loading configuration...' />
</Box>
);
}

View File

@ -1,267 +0,0 @@
import { relative } from 'node:path';
import type { BusterSDK } from '@buster/sdk';
import type { DeployRequest, DeployResponse } from '@buster/server-shared';
import yaml from 'js-yaml';
import { generateDefaultSQL } from '../models/parsing';
import type {
CLIDeploymentResult,
DeployColumn,
DeployModel,
DeploymentFailure,
DeploymentItem,
Model,
} from '../schemas';
/**
* Deploy models to Buster API
* Handles both dry-run and actual deployment
*/
export async function deployModels(
modelFiles: Array<{ file: string; models: Model[] }>,
sdk: BusterSDK,
baseDir: string,
options: { dryRun: boolean; verbose: boolean; deleteAbsentModels?: boolean }
): Promise<CLIDeploymentResult> {
const result: CLIDeploymentResult = {
success: [],
updated: [],
noChange: [],
failures: [],
excluded: [],
};
// Collect all deploy models
const deployModels: DeployModel[] = [];
const modelFileMap = new Map<string, string>(); // modelName -> file
for (const { file, models } of modelFiles) {
const relativeFile = relative(baseDir, file);
for (const model of models) {
try {
if (options.verbose) {
console.info(`Processing model: ${model.name} from ${relativeFile}`);
}
// Convert model to deployment request format
const deployModel = modelToDeployRequest(model);
deployModels.push(deployModel);
modelFileMap.set(model.name, relativeFile);
if (options.dryRun) {
// In dry-run mode, just validate and log what would happen
console.info(`[DRY RUN] Would deploy model: ${model.name}`);
console.info(` Data Source: ${deployModel.data_source_name}`);
console.info(` Schema: ${deployModel.schema}`);
console.info(` Database: ${deployModel.database || 'N/A'}`);
console.info(` Columns: ${deployModel.columns.length}`);
}
} catch (error) {
// Handle model conversion errors
const errorMessage = error instanceof Error ? error.message : String(error);
result.failures.push({
file: relativeFile,
modelName: model.name,
errors: [errorMessage],
});
}
}
}
// Return early if dry run
if (options.dryRun) {
for (const model of deployModels) {
const file = modelFileMap.get(model.name) || 'unknown';
result.success.push({
file,
modelName: model.name,
dataSource: model.data_source_name,
});
}
return result;
}
// Perform actual deployment
try {
const deployRequest: DeployRequest = {
models: deployModels,
deleteAbsentModels: options.deleteAbsentModels !== false,
};
const response: DeployResponse = await sdk.datasets.deploy(deployRequest);
// Process response
for (const item of response.success) {
const file = modelFileMap.get(item.name) || 'unknown';
result.success.push({
file,
modelName: item.name,
dataSource: item.dataSource,
});
}
for (const item of response.updated) {
const file = modelFileMap.get(item.name) || 'unknown';
result.updated.push({
file,
modelName: item.name,
dataSource: item.dataSource,
});
}
for (const item of response.noChange) {
const file = modelFileMap.get(item.name) || 'unknown';
result.noChange.push({
file,
modelName: item.name,
dataSource: item.dataSource,
});
}
for (const failure of response.failures) {
const file = modelFileMap.get(failure.name) || 'unknown';
result.failures.push({
file,
modelName: failure.name,
errors: failure.errors,
});
}
// Log deleted models if any
if (response.deleted && response.deleted.length > 0) {
console.info(
`\n🗑 Soft-deleted ${response.deleted.length} models not included in deployment:`
);
for (const name of response.deleted) {
console.info(` - ${name}`);
}
}
} catch (error) {
// Handle API errors - add all models as failures
const errorMessage = error instanceof Error ? error.message : String(error);
for (const model of deployModels) {
const file = modelFileMap.get(model.name) || 'unknown';
result.failures.push({
file,
modelName: model.name,
errors: [`API Error: ${errorMessage}`],
});
}
}
return result;
}
/**
* Convert a semantic model to a deployment request
*/
export function modelToDeployRequest(model: Model): DeployModel {
const columns: DeployColumn[] = [];
// Convert dimensions to columns
for (const dimension of model.dimensions) {
columns.push({
name: dimension.name,
description: dimension.description || '',
semantic_type: 'dimension',
type: dimension.type,
searchable: dimension.searchable,
expr: undefined,
agg: undefined,
});
}
// Convert measures to columns
for (const measure of model.measures) {
columns.push({
name: measure.name,
description: measure.description || '',
semantic_type: 'measure',
type: measure.type,
searchable: false,
expr: undefined,
agg: undefined,
});
}
// Generate SQL if not provided
const sqlDefinition = generateDefaultSQL(model);
// Serialize model to YAML for yml_file field
const ymlContent = yaml.dump(model);
// Ensure required fields are present
if (!model.data_source_name) {
throw new Error(`Model ${model.name} is missing data_source_name`);
}
if (!model.schema) {
throw new Error(`Model ${model.name} is missing schema`);
}
return {
name: model.name,
data_source_name: model.data_source_name,
schema: model.schema,
database: model.database,
description: model.description || '',
sql_definition: sqlDefinition,
columns,
yml_file: ymlContent,
metrics: model.metrics,
filters: model.filters,
relationships: model.relationships,
};
}
/**
* Format deployment results for display
*/
export function formatDeploymentSummary(result: CLIDeploymentResult): string {
const lines: string[] = [];
lines.push('📊 Deployment Summary');
lines.push('='.repeat(40));
const totalDeployed = result.success.length + result.updated.length;
lines.push(`✅ Successfully deployed: ${totalDeployed} models`);
if (result.success.length > 0) {
lines.push(` ✨ New models: ${result.success.length}`);
}
if (result.updated.length > 0) {
lines.push(` 🔄 Updated models: ${result.updated.length}`);
}
if (result.noChange.length > 0) {
lines.push(` No changes: ${result.noChange.length}`);
}
if (result.excluded.length > 0) {
lines.push(`⛔ Excluded: ${result.excluded.length} files`);
}
if (result.failures.length > 0) {
lines.push(`❌ Failed: ${result.failures.length} models`);
lines.push('-'.repeat(40));
for (const failure of result.failures) {
lines.push(` File: ${failure.file}`);
lines.push(` Model: ${failure.modelName}`);
for (const error of failure.errors) {
lines.push(` - ${error}`);
}
}
}
lines.push('='.repeat(40));
if (result.failures.length === 0) {
lines.push('🎉 All models processed successfully!');
} else {
lines.push('⚠️ Some models failed to deploy. Please check the errors above.');
}
return lines.join('\n');
}

View File

@ -0,0 +1,219 @@
import { relative } from 'node:path';
import type { CLIDeploymentResult, DeployResponse, DeploymentExcluded, Model } from '../schemas';
/**
* Pure function to merge multiple deployment results into one
*/
export function mergeDeploymentResults(results: CLIDeploymentResult[]): CLIDeploymentResult {
return results.reduce(
(acc, result) => ({
success: [...acc.success, ...result.success],
updated: [...acc.updated, ...result.updated],
noChange: [...acc.noChange, ...result.noChange],
failures: [...acc.failures, ...result.failures],
excluded: [...acc.excluded, ...result.excluded],
}),
{
success: [],
updated: [],
noChange: [],
failures: [],
excluded: [],
}
);
}
/**
* Pure function to process deployment response into CLI result format
*/
export function processDeploymentResponse(
response: DeployResponse,
modelFileMap: Map<string, string>
): CLIDeploymentResult {
const result: CLIDeploymentResult = {
success: [],
updated: [],
noChange: [],
failures: [],
excluded: [],
};
// Process successful deployments
for (const item of response.success) {
result.success.push({
file: modelFileMap.get(item.name) || 'unknown',
modelName: item.name,
dataSource: item.dataSource,
});
}
// Process updated deployments
for (const item of response.updated) {
result.updated.push({
file: modelFileMap.get(item.name) || 'unknown',
modelName: item.name,
dataSource: item.dataSource,
});
}
// Process unchanged deployments
for (const item of response.noChange) {
result.noChange.push({
file: modelFileMap.get(item.name) || 'unknown',
modelName: item.name,
dataSource: item.dataSource,
});
}
// Process failures
for (const failure of response.failures) {
result.failures.push({
file: modelFileMap.get(failure.name) || 'unknown',
modelName: failure.name,
errors: failure.errors,
});
}
return result;
}
/**
* Pure function to format deployment summary for display
*/
export function formatDeploymentSummary(result: CLIDeploymentResult): string {
const lines: string[] = [];
lines.push('📊 Deployment Summary');
lines.push('='.repeat(40));
const totalDeployed = result.success.length + result.updated.length;
lines.push(`✅ Successfully deployed: ${totalDeployed} models`);
if (result.success.length > 0) {
lines.push(` ✨ New models: ${result.success.length}`);
}
if (result.updated.length > 0) {
lines.push(` 🔄 Updated models: ${result.updated.length}`);
}
if (result.noChange.length > 0) {
lines.push(` No changes: ${result.noChange.length}`);
}
if (result.excluded.length > 0) {
lines.push(`⛔ Excluded: ${result.excluded.length} files`);
}
if (result.failures.length > 0) {
lines.push(`❌ Failed: ${result.failures.length} models`);
lines.push('-'.repeat(40));
for (const failure of result.failures) {
lines.push(` File: ${failure.file}`);
lines.push(` Model: ${failure.modelName}`);
for (const error of failure.errors) {
lines.push(` - ${error}`);
}
}
}
lines.push('='.repeat(40));
if (result.failures.length === 0) {
lines.push('🎉 All models processed successfully!');
} else {
lines.push('⚠️ Some models failed to deploy. Please check the errors above.');
}
return lines.join('\n');
}
/**
* Pure function to create parse failure entries
*/
export function createParseFailures(
failures: Array<{ file: string; error: string }>,
baseDir: string
): CLIDeploymentResult['failures'] {
return failures.map(({ file, error }) => ({
file: relative(baseDir, file),
modelName: 'parse_error',
errors: [error],
}));
}
/**
* Pure function to create exclusion entries
*/
export function createExclusions(
excluded: DeploymentExcluded[],
baseDir: string
): DeploymentExcluded[] {
return excluded.map((item) => ({
file: relative(baseDir, item.file),
reason: item.reason,
}));
}
/**
* Pure function to calculate deployment statistics
*/
export function calculateDeploymentStats(result: CLIDeploymentResult): {
totalModels: number;
successRate: number;
hasFailures: boolean;
hasExclusions: boolean;
} {
const totalModels =
result.success.length + result.updated.length + result.noChange.length + result.failures.length;
const successCount = result.success.length + result.updated.length + result.noChange.length;
return {
totalModels,
successRate: totalModels > 0 ? (successCount / totalModels) * 100 : 0,
hasFailures: result.failures.length > 0,
hasExclusions: result.excluded.length > 0,
};
}
/**
* Pure function to group results by project
*/
export function groupResultsByProject(
results: Array<{ project: string; result: CLIDeploymentResult }>
): Map<string, CLIDeploymentResult> {
const map = new Map<string, CLIDeploymentResult>();
for (const { project, result } of results) {
map.set(project, result);
}
return map;
}
/**
* Pure function to filter successful deployments
*/
export function filterSuccessfulDeployments(
result: CLIDeploymentResult
): Pick<CLIDeploymentResult, 'success' | 'updated' | 'noChange'> {
return {
success: result.success,
updated: result.updated,
noChange: result.noChange,
};
}
/**
* Pure function to filter failed deployments
*/
export function filterFailedDeployments(
result: CLIDeploymentResult
): Pick<CLIDeploymentResult, 'failures' | 'excluded'> {
return {
failures: result.failures,
excluded: result.excluded,
};
}

View File

@ -0,0 +1,202 @@
import { type BusterSDK, createBusterSDK } from '@buster/sdk';
import { loadCredentials } from '../../../utils/credentials';
import type { DeployRequest, DeployResponse, DeploymentFailure, DeploymentItem } from '../schemas';
/**
* Type definition for a deployment function
*/
export type DeployFunction = (request: DeployRequest) => Promise<DeployResponse>;
/**
* Creates a dry-run deployer that simulates deployment without API calls
* Pure function that returns another function - perfect for testing
*/
export function createDryRunDeployer(verbose = false): DeployFunction {
return async (request: DeployRequest): Promise<DeployResponse> => {
if (verbose) {
console.info('[DRY RUN] Would deploy:');
for (const model of request.models) {
console.info(` - ${model.name} to ${model.data_source_name}.${model.schema}`);
}
}
// Simulate successful deployment for all models
const successItems: DeploymentItem[] = request.models.map((model) => ({
name: model.name,
dataSource: model.data_source_name,
schema: model.schema,
database: model.database,
}));
return {
success: successItems,
updated: [],
noChange: [],
failures: [],
deleted: [],
summary: {
totalModels: request.models.length,
successCount: successItems.length,
updateCount: 0,
noChangeCount: 0,
failureCount: 0,
deletedCount: 0,
},
};
};
}
/**
* Creates a live deployer that makes actual API calls
* This is a higher-order function that captures the SDK instance
*/
export function createLiveDeployer(sdk: BusterSDK): DeployFunction {
return async (request: DeployRequest): Promise<DeployResponse> => {
return sdk.datasets.deploy(request);
};
}
/**
* Creates an authenticated deployer by loading credentials and creating SDK
* This is the only function that performs I/O in this module
*/
export async function createAuthenticatedDeployer(): Promise<DeployFunction> {
const credentials = await loadCredentials();
if (!credentials?.apiKey) {
throw new Error('Not authenticated. Please run: buster auth');
}
const sdk = createBusterSDK({
apiKey: credentials.apiKey,
apiUrl: credentials.apiUrl || 'https://api.buster.so',
});
return createLiveDeployer(sdk);
}
/**
* Creates a validation-only deployer that checks models without deploying
* Useful for pre-deployment validation
*/
export function createValidationDeployer(): DeployFunction {
return async (request: DeployRequest): Promise<DeployResponse> => {
const failures: DeploymentFailure[] = [];
const success: DeploymentItem[] = [];
for (const model of request.models) {
const errors: string[] = [];
// Perform validation checks
if (!model.name) errors.push('Model name is required');
if (!model.data_source_name) errors.push('Data source name is required');
if (!model.schema) errors.push('Schema is required');
if (model.columns.length === 0) errors.push('At least one column is required');
if (errors.length > 0) {
failures.push({
name: model.name || 'unknown',
dataSource: model.data_source_name,
errors,
});
} else {
success.push({
name: model.name,
dataSource: model.data_source_name,
schema: model.schema,
database: model.database,
});
}
}
return {
success,
updated: [],
noChange: [],
failures,
deleted: [],
summary: {
totalModels: request.models.length,
successCount: success.length,
updateCount: 0,
noChangeCount: 0,
failureCount: failures.length,
deletedCount: 0,
},
};
};
}
/**
* Composes multiple deployers to run in sequence
* Useful for validation + deployment chains
*/
export function composeDeployers(...deployers: DeployFunction[]): DeployFunction {
return async (request: DeployRequest): Promise<DeployResponse> => {
let lastResponse: DeployResponse | null = null;
for (const deployer of deployers) {
const response = await deployer(request);
// If any deployer has failures, stop the chain
if (response.failures.length > 0) {
return response;
}
lastResponse = response;
}
if (!lastResponse) {
throw new Error('No deployers provided to chain');
}
return lastResponse;
};
}
/**
* Creates a deployer with retry logic for resilience
*/
export function createRetryableDeployer(
deployer: DeployFunction,
maxRetries = 3,
delayMs = 1000
): DeployFunction {
return async (request: DeployRequest): Promise<DeployResponse> => {
let lastError: Error | null = null;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
return await deployer(request);
} catch (error) {
lastError = error as Error;
if (attempt < maxRetries) {
console.warn(`Deployment attempt ${attempt} failed, retrying in ${delayMs}ms...`);
await new Promise((resolve) => setTimeout(resolve, delayMs));
}
}
}
// If all retries failed, return a failure response
return {
success: [],
updated: [],
noChange: [],
failures: request.models.map((model) => ({
name: model.name,
dataSource: model.data_source_name,
errors: [lastError?.message || 'Deployment failed after retries'],
})),
deleted: [],
summary: {
totalModels: request.models.length,
successCount: 0,
updateCount: 0,
noChangeCount: 0,
failureCount: request.models.length,
deletedCount: 0,
},
};
};
}

View File

@ -0,0 +1,319 @@
import { describe, expect, it } from 'vitest';
import type { Model } from '../schemas';
import {
batchModelsByDataSource,
createModelFileMap,
dimensionsToColumns,
measuresToColumns,
modelToDeployModel,
prepareDeploymentRequest,
validateModelsForDeployment,
} from './transformers';
describe('transformers', () => {
describe('prepareDeploymentRequest', () => {
it('should create deployment request from models', () => {
const models: Model[] = [
{
name: 'users',
data_source_name: 'postgres',
schema: 'public',
database: 'analytics',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
];
const result = prepareDeploymentRequest(models);
expect(result).toEqual({
models: expect.arrayContaining([
expect.objectContaining({
name: 'users',
data_source_name: 'postgres',
schema: 'public',
}),
]),
deleteAbsentModels: true,
});
});
it('should respect deleteAbsentModels parameter', () => {
const models: Model[] = [];
const result = prepareDeploymentRequest(models, false);
expect(result.deleteAbsentModels).toBe(false);
});
});
describe('dimensionsToColumns', () => {
it('should transform dimensions to deploy columns', () => {
const dimensions = [
{
name: 'user_id',
description: 'User identifier',
type: 'string',
searchable: true,
},
{
name: 'created_at',
type: 'timestamp',
searchable: false,
},
];
const result = dimensionsToColumns(dimensions);
expect(result).toEqual([
{
name: 'user_id',
description: 'User identifier',
semantic_type: 'dimension',
type: 'string',
searchable: true,
expr: undefined,
agg: undefined,
},
{
name: 'created_at',
description: '',
semantic_type: 'dimension',
type: 'timestamp',
searchable: false,
expr: undefined,
agg: undefined,
},
]);
});
});
describe('measuresToColumns', () => {
it('should transform measures to deploy columns', () => {
const measures = [
{
name: 'total_revenue',
description: 'Total revenue',
type: 'number',
},
{
name: 'user_count',
type: 'integer',
},
];
const result = measuresToColumns(measures);
expect(result).toEqual([
{
name: 'total_revenue',
description: 'Total revenue',
semantic_type: 'measure',
type: 'number',
searchable: false,
expr: undefined,
agg: undefined,
},
{
name: 'user_count',
description: '',
semantic_type: 'measure',
type: 'integer',
searchable: false,
expr: undefined,
agg: undefined,
},
]);
});
});
describe('createModelFileMap', () => {
it('should create mapping from model names to file paths', () => {
const modelFiles = [
{
file: 'models/users.yml',
models: [{ name: 'users' } as Model, { name: 'user_sessions' } as Model],
},
{
file: 'models/orders.yml',
models: [{ name: 'orders' } as Model],
},
];
const result = createModelFileMap(modelFiles);
expect(result.get('users')).toBe('models/users.yml');
expect(result.get('user_sessions')).toBe('models/users.yml');
expect(result.get('orders')).toBe('models/orders.yml');
expect(result.size).toBe(3);
});
});
describe('validateModelsForDeployment', () => {
it('should separate valid and invalid models', () => {
const models: Model[] = [
{
name: 'valid_model',
data_source_name: 'postgres',
schema: 'public',
dimensions: [{ name: 'id', searchable: false }],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: '',
data_source_name: 'postgres',
schema: 'public',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: 'missing_schema',
data_source_name: 'postgres',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
} as any,
];
const { valid, invalid } = validateModelsForDeployment(models);
expect(valid).toHaveLength(1);
expect(valid[0].name).toBe('valid_model');
expect(invalid).toHaveLength(2);
expect(invalid[0].errors).toContain('Model name is required');
expect(invalid[1].errors).toContain('schema is required');
});
it('should require at least one dimension or measure', () => {
const model: Model = {
name: 'empty_model',
data_source_name: 'postgres',
schema: 'public',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
};
const { valid, invalid } = validateModelsForDeployment([model]);
expect(valid).toHaveLength(0);
expect(invalid).toHaveLength(1);
expect(invalid[0].errors).toContain('Model must have at least one dimension or measure');
});
});
describe('batchModelsByDataSource', () => {
it('should group models by data source and schema', () => {
const models: Model[] = [
{
name: 'users',
data_source_name: 'postgres',
schema: 'public',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: 'orders',
data_source_name: 'postgres',
schema: 'public',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
{
name: 'analytics',
data_source_name: 'bigquery',
schema: 'reporting',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
},
];
const result = batchModelsByDataSource(models);
expect(result.size).toBe(2);
expect(result.get('postgres:public')).toHaveLength(2);
expect(result.get('bigquery:reporting')).toHaveLength(1);
});
it('should handle missing data source or schema', () => {
const models: Model[] = [
{
name: 'model1',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
} as any,
];
const result = batchModelsByDataSource(models);
expect(result.size).toBe(1);
expect(result.has('unknown:unknown')).toBe(true);
});
});
describe('modelToDeployModel', () => {
it('should throw error if required fields are missing', () => {
const model: Model = {
name: 'test',
dimensions: [],
measures: [],
metrics: [],
filters: [],
relationships: [],
} as any;
expect(() => modelToDeployModel(model)).toThrow('data_source_name');
});
it('should transform complete model successfully', () => {
const model: Model = {
name: 'users',
description: 'User table',
data_source_name: 'postgres',
database: 'analytics',
schema: 'public',
dimensions: [{ name: 'id', searchable: true }],
measures: [{ name: 'count' }],
metrics: [],
filters: [],
relationships: [],
};
const result = modelToDeployModel(model);
expect(result.name).toBe('users');
expect(result.description).toBe('User table');
expect(result.data_source_name).toBe('postgres');
expect(result.database).toBe('analytics');
expect(result.schema).toBe('public');
expect(result.columns).toHaveLength(2);
expect(result.yml_file).toContain('name: users');
expect(result.sql_definition).toContain('SELECT * FROM');
});
});
});

View File

@ -0,0 +1,147 @@
import yaml from 'js-yaml';
import { generateDefaultSQL } from '../models/parsing';
import type { DeployColumn, DeployModel, DeployRequest, Model } from '../schemas';
/**
* Pure function to prepare a deployment request from models
*/
export function prepareDeploymentRequest(
models: Model[],
deleteAbsentModels = true
): DeployRequest {
return {
models: models.map(modelToDeployModel),
deleteAbsentModels,
};
}
/**
* Pure function to transform a Model to a DeployModel
*/
export function modelToDeployModel(model: Model): DeployModel {
const columns = [...dimensionsToColumns(model.dimensions), ...measuresToColumns(model.measures)];
// Ensure required fields are present
if (!model.data_source_name) {
throw new Error(`Model ${model.name} is missing data_source_name`);
}
if (!model.schema) {
throw new Error(`Model ${model.name} is missing schema`);
}
return {
name: model.name,
data_source_name: model.data_source_name,
schema: model.schema,
database: model.database,
description: model.description || '',
sql_definition: generateDefaultSQL(model),
columns,
yml_file: yaml.dump(model),
metrics: model.metrics,
filters: model.filters,
relationships: model.relationships,
};
}
/**
* Pure function to transform dimensions to deploy columns
*/
export function dimensionsToColumns(dimensions: Model['dimensions']): DeployColumn[] {
return dimensions.map((dimension) => ({
name: dimension.name,
description: dimension.description || '',
semantic_type: 'dimension',
type: dimension.type,
searchable: dimension.searchable,
expr: undefined,
agg: undefined,
}));
}
/**
* Pure function to transform measures to deploy columns
*/
export function measuresToColumns(measures: Model['measures']): DeployColumn[] {
return measures.map((measure) => ({
name: measure.name,
description: measure.description || '',
semantic_type: 'measure',
type: measure.type,
searchable: false,
expr: undefined,
agg: undefined,
}));
}
/**
* Pure function to create a model-to-file mapping
*/
export function createModelFileMap(
modelFiles: Array<{ file: string; models: Model[] }>
): Map<string, string> {
const map = new Map<string, string>();
for (const { file, models } of modelFiles) {
for (const model of models) {
map.set(model.name, file);
}
}
return map;
}
/**
* Pure function to validate models for deployment
*/
export function validateModelsForDeployment(models: Model[]): {
valid: Model[];
invalid: Array<{ model: Model; errors: string[] }>;
} {
const valid: Model[] = [];
const invalid: Array<{ model: Model; errors: string[] }> = [];
for (const model of models) {
const errors: string[] = [];
if (!model.name || model.name.trim().length === 0) {
errors.push('Model name is required');
}
if (!model.data_source_name) {
errors.push('data_source_name is required');
}
if (!model.schema) {
errors.push('schema is required');
}
if (model.dimensions.length === 0 && model.measures.length === 0) {
errors.push('Model must have at least one dimension or measure');
}
if (errors.length > 0) {
invalid.push({ model, errors });
} else {
valid.push(model);
}
}
return { valid, invalid };
}
/**
* Pure function to batch models by data source
*/
export function batchModelsByDataSource(models: Model[]): Map<string, Model[]> {
const batches = new Map<string, Model[]>();
for (const model of models) {
const key = `${model.data_source_name || 'unknown'}:${model.schema || 'unknown'}`;
const batch = batches.get(key) || [];
batch.push(model);
batches.set(key, batch);
}
return batches;
}

View File

@ -9,36 +9,14 @@ import type { DeploymentExcluded, ResolvedConfig } from '../schemas';
*/
export async function discoverModelFiles(
config: ResolvedConfig,
baseDir: string,
recursive = true
baseDir: string
): Promise<string[]> {
const patterns: string[] = [];
// Use include patterns directly from config
const patterns = config.include.map((pattern) => resolve(baseDir, pattern));
// Build glob patterns from model paths
const modelPaths = [...config.model_paths, ...config.semantic_model_paths];
for (const modelPath of modelPaths) {
const absolutePath = resolve(baseDir, modelPath);
if (recursive) {
// Recursive search for YAML files
patterns.push(`${absolutePath}/**/*.{yml,yaml}`);
} else {
// Non-recursive search
patterns.push(`${absolutePath}/*.{yml,yaml}`);
}
}
// Find all YAML files matching the patterns
// Find all files matching the include patterns
const files = await glob(patterns, {
ignore: [
'**/node_modules/**',
'**/dist/**',
'**/build/**',
'**/.git/**',
'**/buster.yml', // Don't include the config file itself
'**/buster.yaml',
],
ignore: ['**/node_modules/**', '**/dist/**', '**/build/**', '**/.git/**'],
absolute: true,
unique: true,
});

View File

@ -80,13 +80,11 @@ export type Metric = DatasetMetric;
// Resolved config after merging CLI options, file config, and defaults
export const ResolvedConfigSchema = z.object({
data_source_name: z.string().optional(),
data_source_name: z.string(),
database: z.string().optional(),
schema: z.string().optional(),
model_paths: z.array(z.string()).default(['.']),
semantic_model_paths: z.array(z.string()).default(['.']),
exclude_files: z.array(z.string()).default([]),
exclude_tags: z.array(z.string()).default([]),
schema: z.string(),
include: z.array(z.string()).default(['**/*.yml', '**/*.yaml']),
exclude: z.array(z.string()).default([]),
});
// ============================================================================
@ -95,11 +93,7 @@ export const ResolvedConfigSchema = z.object({
export const DeployOptionsSchema = z.object({
path: z.string().optional(),
dataSource: z.string().optional(),
database: z.string().optional(),
schema: z.string().optional(),
dryRun: z.boolean().default(false),
recursive: z.boolean().default(true),
verbose: z.boolean().default(false),
});

View File

@ -1,71 +1,353 @@
import { mkdir, writeFile } from 'node:fs/promises';
import { join, resolve } from 'node:path';
import { createBusterSDK } from '@buster/sdk';
import { Box, Text, useApp, useInput } from 'ink';
import BigText from 'ink-big-text';
import Spinner from 'ink-spinner';
import TextInput from 'ink-text-input';
import React, { useState, useEffect } from 'react';
import { type Credentials, hasCredentials, saveCredentials } from '../utils/credentials.js';
import { type Credentials, getCredentials, saveCredentials } from '../utils/credentials.js';
interface InitProps {
apiKey?: string;
host?: string;
local?: boolean;
skipBanner?: boolean;
path?: string;
}
const DEFAULT_HOST = 'https://api2.buster.so';
const LOCAL_HOST = 'http://localhost:3001';
// Example YAML content
const BUSTER_YML_CONTENT = `# Buster configuration file
projects:
# The name of the project
- name: revenue
# The name of the related data source in the Buster UI
# Can be overridden on a per-model basis
data_source: finance_datasource
# The name of the database where the models are stored
# Can be overridden on a per-model basis
database: finance
# The name of the schema where the models are stored
# Can be overridden on a per-model basis
schema: revenue
# Include patterns for model files (relative to buster.yml)
include:
- "docs/revenue/*.yml"
# Exclude patterns for files to skip (optional)
exclude:
- "docs/revenue/super-secret.yml"
# You can define multiple projects for different environments
- name: sales
data_source: sales_datasource
schema: sales
database: sales
include:
- "docs/sales/*.yml"
`;
const SALES_LEADS_CONTENT = `name: leads
description: Sales lead tracking and pipeline management
data_source_name: my_datasource
schema: public
database: main
dimensions:
- name: lead_id
description: Unique identifier for the lead
type: string
searchable: true
- name: company_name
description: Name of the company
type: string
searchable: true
- name: contact_email
description: Primary contact email
type: string
searchable: true
- name: created_date
description: When the lead was created
type: timestamp
- name: stage
description: Current stage in sales pipeline
type: string
searchable: true
options: ["prospecting", "qualified", "proposal", "negotiation", "closed_won", "closed_lost"]
- name: lead_source
description: Source of the lead
type: string
searchable: true
options: ["website", "referral", "event", "cold_call", "marketing"]
measures:
- name: total_leads
description: Count of all leads
type: number
expr: "COUNT(DISTINCT lead_id)"
- name: qualified_leads
description: Count of qualified leads
type: number
expr: "COUNT(DISTINCT CASE WHEN stage IN ('qualified', 'proposal', 'negotiation', 'closed_won') THEN lead_id END)"
- name: pipeline_value
description: Total pipeline value
type: number
expr: "SUM(estimated_value)"
metrics:
- name: conversion_rate
expr: "(COUNT(CASE WHEN stage = 'closed_won' THEN 1 END) / NULLIF(total_leads, 0)) * 100"
description: Percentage of leads that convert to customers
- name: average_deal_size
expr: "pipeline_value / NULLIF(qualified_leads, 0)"
description: Average value per qualified lead
`;
const SALES_OPPORTUNITIES_CONTENT = `name: opportunities
description: Sales opportunities and deals
data_source_name: my_datasource
schema: public
database: main
dimensions:
- name: opportunity_id
description: Unique opportunity identifier
type: string
searchable: true
- name: account_name
description: Name of the account
type: string
searchable: true
- name: close_date
description: Expected close date
type: timestamp
- name: stage
description: Opportunity stage
type: string
searchable: true
options: ["prospecting", "qualification", "needs_analysis", "proposal", "negotiation", "closed_won", "closed_lost"]
- name: sales_rep
description: Assigned sales representative
type: string
searchable: true
measures:
- name: total_opportunities
description: Count of all opportunities
type: number
expr: "COUNT(DISTINCT opportunity_id)"
- name: deal_value
description: Total deal value
type: number
expr: "SUM(amount)"
- name: won_deals
description: Count of won deals
type: number
expr: "COUNT(CASE WHEN stage = 'closed_won' THEN 1 END)"
metrics:
- name: win_rate
expr: "(won_deals / NULLIF(COUNT(CASE WHEN stage IN ('closed_won', 'closed_lost') THEN 1 END), 0)) * 100"
description: Percentage of closed deals that are won
- name: average_deal_size
expr: "deal_value / NULLIF(total_opportunities, 0)"
description: Average value per opportunity
`;
const FINANCE_REVENUE_CONTENT = `name: revenue
description: Revenue tracking and analysis
data_source_name: my_datasource
schema: public
database: main
dimensions:
- name: transaction_id
description: Unique transaction identifier
type: string
searchable: true
- name: transaction_date
description: Date of the transaction
type: timestamp
- name: revenue_type
description: Type of revenue
type: string
searchable: true
options: ["subscription", "one_time", "recurring", "professional_services"]
- name: product_line
description: Product line
type: string
searchable: true
- name: region
description: Geographic region
type: string
searchable: true
options: ["north_america", "europe", "asia_pacific", "latin_america"]
measures:
- name: total_revenue
description: Total revenue amount
type: number
expr: "SUM(amount)"
- name: recurring_revenue
description: Monthly recurring revenue
type: number
expr: "SUM(CASE WHEN revenue_type IN ('subscription', 'recurring') THEN amount END)"
- name: transaction_count
description: Number of transactions
type: number
expr: "COUNT(DISTINCT transaction_id)"
metrics:
- name: mrr_growth
expr: "((recurring_revenue - LAG(recurring_revenue) OVER (ORDER BY transaction_date)) / NULLIF(LAG(recurring_revenue) OVER (ORDER BY transaction_date), 0)) * 100"
description: Month-over-month MRR growth rate
- name: average_transaction_value
expr: "total_revenue / NULLIF(transaction_count, 0)"
description: Average revenue per transaction
`;
const FINANCE_EXPENSES_CONTENT = `name: expenses
description: Expense tracking and budget management
data_source_name: my_datasource
schema: public
database: main
dimensions:
- name: expense_id
description: Unique expense identifier
type: string
searchable: true
- name: expense_date
description: Date of the expense
type: timestamp
- name: category
description: Expense category
type: string
searchable: true
options: ["salaries", "marketing", "operations", "technology", "travel", "office", "other"]
- name: department
description: Department that incurred the expense
type: string
searchable: true
- name: vendor
description: Vendor or supplier
type: string
searchable: true
measures:
- name: total_expenses
description: Total expense amount
type: number
expr: "SUM(amount)"
- name: expense_count
description: Number of expense transactions
type: number
expr: "COUNT(DISTINCT expense_id)"
- name: budget_allocated
description: Total budget allocated
type: number
expr: "SUM(budget_amount)"
metrics:
- name: budget_utilization
expr: "(total_expenses / NULLIF(budget_allocated, 0)) * 100"
description: Percentage of budget utilized
- name: expense_per_employee
expr: "total_expenses / NULLIF(employee_count, 0)"
description: Average expense per employee
`;
// Component for the welcome screen
function WelcomeScreen() {
return (
<Box paddingY={2} paddingX={2} alignItems='center'>
<Box marginRight={4}>
<Box paddingY={2} paddingX={2} flexDirection='column' alignItems='center'>
<Box>
<Text color='#7C3AED'>
<BigText text='BUSTER' font='block' />
</Text>
</Box>
<Box flexDirection='column' justifyContent='center'>
<Box>
<Text bold>Welcome to Buster</Text>
<Box marginTop={1}>
<Text dimColor>Type / to use slash commands</Text>
</Box>
<Box>
<Text dimColor>Type @ to mention files</Text>
</Box>
<Box>
<Text dimColor>Ctrl-C to exit</Text>
</Box>
<Box marginTop={2}>
<Text dimColor>/help for more</Text>
</Box>
<Box marginTop={2}>
<Text color='#7C3AED'>"Run `buster` and fix all the errors"</Text>
</Box>
</Box>
</Box>
);
}
export function Init({ apiKey, host, local, skipBanner }: InitProps) {
// Helper function to create project structure
async function createProjectStructure(basePath: string): Promise<void> {
const busterDir = join(basePath, 'buster');
const docsDir = join(busterDir, 'docs');
const revenueDir = join(docsDir, 'revenue');
const salesDir = join(docsDir, 'sales');
// Create directories
await mkdir(revenueDir, { recursive: true });
await mkdir(salesDir, { recursive: true });
// Create files
await writeFile(join(busterDir, 'buster.yml'), BUSTER_YML_CONTENT);
await writeFile(join(revenueDir, 'revenue.yml'), FINANCE_REVENUE_CONTENT);
await writeFile(join(revenueDir, 'expenses.yml'), FINANCE_EXPENSES_CONTENT);
await writeFile(join(salesDir, 'leads.yml'), SALES_LEADS_CONTENT);
await writeFile(join(salesDir, 'opportunities.yml'), SALES_OPPORTUNITIES_CONTENT);
}
export function InitCommand({ apiKey, host, local, path: providedPath }: InitProps) {
const { exit } = useApp();
const [step, setStep] = useState<'check' | 'prompt' | 'validate' | 'save' | 'done'>('check');
const [step, setStep] = useState<
'check' | 'prompt-auth' | 'validate' | 'save' | 'prompt-location' | 'creating' | 'done'
>('check');
const [apiKeyInput, setApiKeyInput] = useState('');
const [hostInput, setHostInput] = useState('');
const [projectPath, setProjectPath] = useState(providedPath || './');
const [error, setError] = useState<string | null>(null);
const [finalCreds, setFinalCreds] = useState<Credentials | null>(null);
const [showBanner] = useState(!skipBanner);
// Check for existing credentials
useEffect(() => {
if (step === 'check') {
hasCredentials().then((hasCreds) => {
if (hasCreds) {
console.log('\n✅ You already have Buster configured!');
console.log('\nTo reconfigure, run: buster auth');
exit();
getCredentials().then((creds) => {
if (creds) {
// Already have credentials, skip to location prompt
setFinalCreds(creds);
setStep('prompt-location');
} else {
// Set default host based on flags
// Need to authenticate first
let targetHost = DEFAULT_HOST;
if (local) targetHost = LOCAL_HOST;
else if (host) targetHost = host;
@ -78,21 +360,25 @@ export function Init({ apiKey, host, local, skipBanner }: InitProps) {
setFinalCreds({ apiKey, apiUrl: targetHost });
setStep('validate');
} else {
setStep('prompt');
setStep('prompt-auth');
}
}
});
}
}, [step, apiKey, host, local, exit]);
}, [step, apiKey, host, local]);
// Handle input
// Handle input for auth
useInput((_input, key) => {
if (key.return && step === 'prompt' && apiKeyInput) {
setFinalCreds({
apiKey: apiKeyInput,
apiUrl: hostInput || DEFAULT_HOST,
});
setStep('validate');
if (key.return) {
if (step === 'prompt-auth' && apiKeyInput) {
setFinalCreds({
apiKey: apiKeyInput,
apiUrl: hostInput || DEFAULT_HOST,
});
setStep('validate');
} else if (step === 'prompt-location') {
setStep('creating');
}
}
});
@ -112,13 +398,13 @@ export function Init({ apiKey, host, local, skipBanner }: InitProps) {
setStep('save');
} else {
setError('Invalid API key. Please check your key and try again.');
setStep('prompt');
setStep('prompt-auth');
setApiKeyInput('');
}
})
.catch((err: Error) => {
setError(`Connection failed: ${err.message}`);
setStep('prompt');
setStep('prompt-auth');
});
}
}, [step, finalCreds]);
@ -128,49 +414,57 @@ export function Init({ apiKey, host, local, skipBanner }: InitProps) {
if (step === 'save' && finalCreds) {
saveCredentials(finalCreds)
.then(() => {
setStep('done');
setStep('prompt-location');
})
.catch((err: Error) => {
console.error('Failed to save credentials:', err.message);
setStep('done');
setStep('prompt-location');
});
}
}, [step, finalCreds]);
// Show success message and exit
// Create project structure
useEffect(() => {
if (step === 'done' && finalCreds) {
const masked = finalCreds.apiKey.length > 6 ? `****${finalCreds.apiKey.slice(-6)}` : '****';
console.log('\n🎉 Welcome to Buster!\n');
console.log("✅ You've successfully connected to Buster!\n");
console.log('Connection details:');
console.log(` host: ${finalCreds.apiUrl}`);
console.log(` api_key: ${masked}`);
console.log('\nYour credentials have been saved.');
console.log('\n📚 Get started:');
console.log(' buster --help Show available commands');
console.log(' buster auth Reconfigure authentication');
exit();
if (step === 'creating') {
const resolvedPath = resolve(projectPath);
createProjectStructure(resolvedPath)
.then(() => {
setStep('done');
})
.catch((err: Error) => {
setError(`Failed to create project: ${err.message}`);
setStep('prompt-location');
});
}
}, [step, finalCreds, exit]);
}, [step, projectPath]);
// Render based on step - always show welcome screen at the top if enabled
// Exit after a delay when done
useEffect(() => {
if (step === 'done') {
// Give time to render the success message
const timer = setTimeout(() => {
exit();
}, 100);
return () => clearTimeout(timer);
}
return undefined;
}, [step, exit]);
// Always show the banner at the top
return (
<Box flexDirection='column'>
{showBanner && <WelcomeScreen />}
<WelcomeScreen />
{step === 'check' && (
<Box>
<Box paddingX={2}>
<Text>
<Spinner type='dots' /> Checking configuration...
</Text>
</Box>
)}
{step === 'prompt' && (
<Box flexDirection='column'>
{step === 'prompt-auth' && (
<Box flexDirection='column' paddingX={2}>
<Box marginBottom={1}>
<Text>Let's get you connected to Buster.</Text>
</Box>
@ -205,7 +499,7 @@ export function Init({ apiKey, host, local, skipBanner }: InitProps) {
)}
{step === 'validate' && (
<Box>
<Box paddingX={2}>
<Text>
<Spinner type='dots' /> Validating your API key...
</Text>
@ -213,12 +507,89 @@ export function Init({ apiKey, host, local, skipBanner }: InitProps) {
)}
{step === 'save' && (
<Box>
<Box paddingX={2}>
<Text>
<Spinner type='dots' /> Saving your configuration...
</Text>
</Box>
)}
{step === 'prompt-location' && (
<Box flexDirection='column' paddingX={2}>
<Box marginBottom={1}>
<Text>Where would you like to create your Buster project?</Text>
</Box>
{error && (
<Box marginBottom={1}>
<Text color='red'> {error}</Text>
</Box>
)}
<Box marginBottom={1}>
<Text>Project location: </Text>
</Box>
<Box borderStyle='single' borderColor='#7C3AED' paddingX={1}>
<TextInput value={projectPath} onChange={setProjectPath} placeholder='./' />
</Box>
<Box marginTop={1}>
<Text dimColor>A "buster" folder will be created at this location</Text>
</Box>
<Box marginTop={1}>
<Text dimColor>Press Enter to continue</Text>
</Box>
</Box>
)}
{step === 'creating' && (
<Box paddingX={2}>
<Text>
<Spinner type='dots' /> Creating project structure...
</Text>
</Box>
)}
{step === 'done' && (
<Box flexDirection='column' paddingX={2}>
<Box marginBottom={1}>
<Text color='green'> Created example project</Text>
</Box>
<Box marginBottom={1}>
<Text>Project structure:</Text>
</Box>
<Box flexDirection='column' marginLeft={2}>
<Text>📁 {join(resolve(projectPath), 'buster')}/</Text>
<Text> 📄 buster.yml</Text>
<Text> 📁 docs/</Text>
<Text> 📁 revenue/</Text>
<Text> 📄 revenue.yml</Text>
<Text> 📄 expenses.yml</Text>
<Text> 📁 sales/</Text>
<Text> 📄 leads.yml</Text>
<Text> 📄 opportunities.yml</Text>
</Box>
<Box marginTop={1} marginBottom={1}>
<Text bold>📚 Next steps:</Text>
</Box>
<Box flexDirection='column' marginLeft={2}>
<Text>1. cd {join(resolve(projectPath), 'buster')}</Text>
<Text>2. Configure buster.yml for your data source</Text>
<Text>3. Populate docs/ with your documentation files</Text>
<Text>4. Run: buster deploy to push your models to Buster</Text>
</Box>
<Box marginTop={1}>
<Text dimColor>For more information, visit https://docs.buster.so</Text>
</Box>
</Box>
)}
</Box>
);
}

View File

@ -1 +1,2 @@
export { AnimatedLogo } from './animated-logo.js';
export { Spinner } from './spinner.js';

View File

@ -0,0 +1,44 @@
import { Text } from 'ink';
import React, { useEffect, useState } from 'react';
interface SpinnerProps {
label?: string;
type?: 'dots' | 'line' | 'arc';
}
const spinners = {
dots: {
frames: ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'],
interval: 80,
},
line: {
frames: ['-', '\\', '|', '/'],
interval: 100,
},
arc: {
frames: ['◜', '◠', '◝', '◞', '◡', '◟'],
interval: 100,
},
};
/**
* Animated spinner component for loading states
*/
export function Spinner({ label = 'Loading', type = 'dots' }: SpinnerProps) {
const [frame, setFrame] = useState(0);
const spinner = spinners[type];
useEffect(() => {
const timer = setInterval(() => {
setFrame((prevFrame) => (prevFrame + 1) % spinner.frames.length);
}, spinner.interval);
return () => clearInterval(timer);
}, [spinner]);
return (
<Text color='cyan'>
{spinner.frames[frame]} {label}
</Text>
);
}

View File

@ -6,6 +6,7 @@ import { Auth } from './commands/auth.js';
import { DeployCommand } from './commands/deploy/deploy.js';
import { DeployOptionsSchema } from './commands/deploy/schemas.js';
import { HelloCommand } from './commands/hello.js';
import { InitCommand } from './commands/init.js';
import { InteractiveCommand } from './commands/interactive.js';
import { Main } from './commands/main.js';
@ -55,12 +56,11 @@ program
program
.command('deploy')
.description('Deploy semantic models to Buster API')
.option('--path <path>', 'Path to search for model files (defaults to current directory)')
.option(
'--path <path>',
'Path to search for buster.yml and model files (defaults to current directory)'
)
.option('--dry-run', 'Validate models without deploying')
.option('--no-recursive', 'Do not search directories recursively')
.option('--data-source <name>', 'Override data source name')
.option('--database <name>', 'Override database name')
.option('--schema <name>', 'Override schema name')
.option('--verbose', 'Show detailed output')
.action(async (options) => {
try {
@ -68,10 +68,6 @@ program
const parsedOptions = DeployOptionsSchema.parse({
path: options.path,
dryRun: options.dryRun || false,
recursive: options.recursive !== false,
dataSource: options.dataSource,
database: options.database,
schema: options.schema,
verbose: options.verbose || false,
});
@ -82,5 +78,17 @@ program
}
});
// Init command - initialize a new Buster project
program
.command('init')
.description('Initialize a new Buster project')
.option('--api-key <key>', 'Your Buster API key')
.option('--host <url>', 'Custom API host URL')
.option('--local', 'Use local development server')
.option('--path <path>', 'Project location (defaults to current directory)')
.action(async (options) => {
render(<InitCommand {...options} />);
});
// Parse command line arguments
program.parse(process.argv);

View File

@ -71,27 +71,16 @@ export const MultiModelSchema = z.object({
// ============================================================================
export const ProjectContextSchema = z.object({
name: z.string().optional(),
data_source_name: z.string().optional(),
name: z.string(),
data_source: z.string(),
database: z.string().optional(),
schema: z.string().optional(),
model_paths: z.array(z.string()).optional(),
semantic_model_paths: z.array(z.string()).optional(),
exclude_files: z.array(z.string()).optional(),
exclude_tags: z.array(z.string()).optional(),
schema: z.string(),
include: z.array(z.string()).default(['**/*.yml', '**/*.yaml']),
exclude: z.array(z.string()).default([]),
});
export const BusterConfigSchema = z.object({
// Top-level fields for backwards compatibility
data_source_name: z.string().optional(),
database: z.string().optional(),
schema: z.string().optional(),
model_paths: z.array(z.string()).optional(),
semantic_model_paths: z.array(z.string()).optional(),
exclude_files: z.array(z.string()).optional(),
exclude_tags: z.array(z.string()).optional(),
// Multi-project structure (for future use)
projects: z.array(ProjectContextSchema).optional(),
projects: z.array(ProjectContextSchema).min(1),
});
// ============================================================================