mirror of https://github.com/buster-so/buster.git
Merge branch 'staging' into devin/BUS-1467-1753083441
This commit is contained in:
commit
51a1541abc
|
@ -47,6 +47,7 @@ mockito = "1.2.0"
|
|||
mockall = "0.12.1"
|
||||
bb8-redis = "0.18.0"
|
||||
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||
itertools = "0.14"
|
||||
once_cell = "1.20.2"
|
||||
rustls = { version = "0.23", features = ["ring"] }
|
||||
rustls-native-certs = "0.8"
|
||||
|
|
|
@ -2,4 +2,4 @@ pub mod message_user_clarifying_question;
|
|||
pub mod done;
|
||||
|
||||
pub use message_user_clarifying_question::*;
|
||||
pub use done::*;
|
||||
pub use done::*;
|
|
@ -37,6 +37,7 @@ semantic_layer = { path = "../semantic_layer" }
|
|||
|
||||
# Add any handler-specific dependencies here
|
||||
dashmap = "5.5.3"
|
||||
itertools = { workspace = true }
|
||||
|
||||
# Add stored_values dependency
|
||||
stored_values = { path = "../stored_values" }
|
||||
|
|
|
@ -11,6 +11,7 @@ use database::{
|
|||
};
|
||||
use diesel::{ExpressionMethods, JoinOnDsl, NullableExpressionMethods, QueryDsl, Queryable};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use itertools::Itertools;
|
||||
use middleware::AuthenticatedUser;
|
||||
use sharing::{check_permission_access, compute_effective_permission};
|
||||
use tracing;
|
||||
|
@ -156,7 +157,10 @@ pub async fn get_collection_handler(
|
|||
name: p.name,
|
||||
avatar_url: p.avatar_url,
|
||||
})
|
||||
.collect::<Vec<BusterShareIndividual>>(),
|
||||
.collect::<Vec<BusterShareIndividual>>()
|
||||
.into_iter()
|
||||
.sorted_by(|a, b| a.email.to_lowercase().cmp(&b.email.to_lowercase()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ use chrono::{DateTime, Utc};
|
|||
use diesel::{BoolExpressionMethods, ExpressionMethods, JoinOnDsl, QueryDsl, Queryable, Selectable};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use futures::future::join_all;
|
||||
use itertools::Itertools;
|
||||
use middleware::AuthenticatedUser;
|
||||
use serde_json::Value;
|
||||
use serde_yaml;
|
||||
|
@ -390,7 +391,10 @@ pub async fn get_dashboard_handler(
|
|||
name: p.name,
|
||||
avatar_url: p.avatar_url,
|
||||
})
|
||||
.collect::<Vec<BusterShareIndividual>>(),
|
||||
.collect::<Vec<BusterShareIndividual>>()
|
||||
.into_iter()
|
||||
.sorted_by(|a, b| a.email.to_lowercase().cmp(&b.email.to_lowercase()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ use anyhow::{anyhow, Result};
|
|||
use diesel::{BoolExpressionMethods, ExpressionMethods, JoinOnDsl, QueryDsl, Queryable};
|
||||
use diesel_async::RunQueryDsl;
|
||||
use futures::future::join;
|
||||
use itertools::Itertools;
|
||||
use middleware::AuthenticatedUser;
|
||||
use serde_yaml;
|
||||
use sharing::asset_access_checks::check_metric_collection_access;
|
||||
|
@ -464,7 +465,10 @@ pub async fn get_metric_handler(
|
|||
name: p.name,
|
||||
avatar_url: p.avatar_url,
|
||||
})
|
||||
.collect::<Vec<crate::metrics::types::BusterShareIndividual>>(),
|
||||
.collect::<Vec<crate::metrics::types::BusterShareIndividual>>()
|
||||
.into_iter()
|
||||
.sorted_by(|a, b| a.email.to_lowercase().cmp(&b.email.to_lowercase()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
"@buster/typescript-config": "workspace:*",
|
||||
"@buster/vitest-config": "workspace:*",
|
||||
"@mastra/core": "catalog:",
|
||||
"@trigger.dev/sdk": "catalog:",
|
||||
"@trigger.dev/sdk": "4.0.0-v4-beta.24",
|
||||
"ai": "catalog:",
|
||||
"braintrust": "catalog:",
|
||||
"vitest": "catalog:",
|
||||
|
@ -36,6 +36,6 @@
|
|||
"drizzle-orm": "catalog:"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@trigger.dev/build": "catalog:"
|
||||
"@trigger.dev/build": "4.0.0-v4-beta.24"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -352,7 +352,7 @@ export const slackAgentTask: ReturnType<
|
|||
});
|
||||
|
||||
// Check if task has started
|
||||
if (run.status === 'EXECUTING' || run.status === 'REATTEMPTING') {
|
||||
if (run.status === 'EXECUTING') {
|
||||
hasStartedRunning = true;
|
||||
logger.log('Analyst task started executing during rapid poll', {
|
||||
runId: analystHandle.id,
|
||||
|
@ -438,8 +438,7 @@ export const slackAgentTask: ReturnType<
|
|||
run.status === 'SYSTEM_FAILURE' ||
|
||||
run.status === 'CRASHED' ||
|
||||
run.status === 'CANCELED' ||
|
||||
run.status === 'TIMED_OUT' ||
|
||||
run.status === 'INTERRUPTED'
|
||||
run.status === 'TIMED_OUT'
|
||||
) {
|
||||
// Task already completed or failed during rapid polling
|
||||
isComplete = true;
|
||||
|
@ -474,7 +473,7 @@ export const slackAgentTask: ReturnType<
|
|||
});
|
||||
|
||||
// Handle transition from queued to executing if we haven't sent progress message yet
|
||||
if (!hasStartedRunning && (run.status === 'EXECUTING' || run.status === 'REATTEMPTING')) {
|
||||
if (!hasStartedRunning && run.status === 'EXECUTING') {
|
||||
hasStartedRunning = true;
|
||||
logger.log('Analyst task has started executing', {
|
||||
runId: analystHandle.id,
|
||||
|
@ -563,8 +562,7 @@ export const slackAgentTask: ReturnType<
|
|||
run.status === 'SYSTEM_FAILURE' ||
|
||||
run.status === 'CRASHED' ||
|
||||
run.status === 'CANCELED' ||
|
||||
run.status === 'TIMED_OUT' ||
|
||||
run.status === 'INTERRUPTED'
|
||||
run.status === 'TIMED_OUT'
|
||||
) {
|
||||
isComplete = true;
|
||||
analystResult = { ok: false, error: run.error || 'Task failed' };
|
||||
|
|
|
@ -182,7 +182,7 @@ export const useShareCollection = () => {
|
|||
draft.individual_permissions = [
|
||||
...params.map((p) => ({ ...p })),
|
||||
...(draft.individual_permissions || [])
|
||||
];
|
||||
].sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -208,7 +208,8 @@ export const useUnshareCollection = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft: BusterCollection) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [];
|
||||
(draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [])
|
||||
.sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -231,11 +232,11 @@ export const useUpdateCollectionShare = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.map((t) => {
|
||||
(draft.individual_permissions?.map((t) => {
|
||||
const found = params.users?.find((v) => v.email === t.email);
|
||||
if (found) return { ...t, ...found };
|
||||
return t;
|
||||
}) || [];
|
||||
}) || []).sort((a, b) => a.email.localeCompare(b.email));
|
||||
|
||||
if (params.publicly_accessible !== undefined) {
|
||||
draft.publicly_accessible = params.publicly_accessible;
|
||||
|
|
|
@ -368,7 +368,7 @@ export const useShareDashboard = () => {
|
|||
avatar_url: p.avatar_url || null
|
||||
})),
|
||||
...(draft.individual_permissions || [])
|
||||
];
|
||||
].sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -398,7 +398,8 @@ export const useUnshareDashboard = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [];
|
||||
(draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [])
|
||||
.sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -419,11 +420,11 @@ export const useUpdateDashboardShare = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.map((t) => {
|
||||
(draft.individual_permissions?.map((t) => {
|
||||
const found = params.users?.find((v) => v.email === t.email);
|
||||
if (found) return { ...t, ...found };
|
||||
return t;
|
||||
}) || [];
|
||||
}) || []).sort((a, b) => a.email.localeCompare(b.email));
|
||||
|
||||
if (params.publicly_accessible !== undefined) {
|
||||
draft.publicly_accessible = params.publicly_accessible;
|
||||
|
|
|
@ -233,7 +233,7 @@ export const useShareMetric = () => {
|
|||
avatar_url: p.avatar_url || null
|
||||
})),
|
||||
...(draft.individual_permissions || [])
|
||||
];
|
||||
].sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -264,7 +264,8 @@ export const useUnshareMetric = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft: BusterMetric) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [];
|
||||
(draft.individual_permissions?.filter((t) => !variables.data.includes(t.email)) || [])
|
||||
.sort((a, b) => a.email.localeCompare(b.email));
|
||||
});
|
||||
});
|
||||
},
|
||||
|
@ -297,11 +298,11 @@ export const useUpdateMetricShare = () => {
|
|||
if (!previousData) return previousData;
|
||||
return create(previousData, (draft: BusterMetric) => {
|
||||
draft.individual_permissions =
|
||||
draft.individual_permissions?.map((t) => {
|
||||
(draft.individual_permissions?.map((t) => {
|
||||
const found = variables.params.users?.find((v) => v.email === t.email);
|
||||
if (found) return { ...t, ...found };
|
||||
return t;
|
||||
}) || [];
|
||||
}) || []).sort((a, b) => a.email.localeCompare(b.email));
|
||||
|
||||
if (variables.params.publicly_accessible !== undefined) {
|
||||
draft.publicly_accessible = variables.params.publicly_accessible;
|
||||
|
|
|
@ -0,0 +1,99 @@
|
|||
import { createTool } from '@mastra/core/tools';
|
||||
import { wrapTraced } from 'braintrust';
|
||||
import { z } from 'zod';
|
||||
|
||||
// Input/Output schemas
|
||||
const idleInputSchema = z.object({
|
||||
final_response: z
|
||||
.string()
|
||||
.min(1, 'Final response is required')
|
||||
.describe(
|
||||
"The final response message to the user. **MUST** be formatted in Markdown. Use bullet points or other appropriate Markdown formatting. Do not include headers. Do not use the '•' bullet character. Do not include markdown tables."
|
||||
),
|
||||
});
|
||||
|
||||
export type IdleToolExecuteInput = z.infer<typeof idleInputSchema>;
|
||||
|
||||
/**
|
||||
* Optimistic parsing function for streaming idle tool arguments
|
||||
* Extracts the final_response field as it's being built incrementally
|
||||
*/
|
||||
export function parseStreamingArgs(
|
||||
accumulatedText: string
|
||||
): Partial<z.infer<typeof idleInputSchema>> | null {
|
||||
// Validate input type
|
||||
if (typeof accumulatedText !== 'string') {
|
||||
throw new Error(`parseStreamingArgs expects string input, got ${typeof accumulatedText}`);
|
||||
}
|
||||
|
||||
try {
|
||||
// First try to parse as complete JSON
|
||||
const parsed = JSON.parse(accumulatedText);
|
||||
return {
|
||||
final_response: parsed.final_response || undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
// Only catch JSON parse errors - let other errors bubble up
|
||||
if (error instanceof SyntaxError) {
|
||||
// JSON parsing failed - try regex extraction for partial content
|
||||
// Handle both complete and incomplete strings, accounting for escaped quotes
|
||||
const match = accumulatedText.match(/"final_response"\s*:\s*"((?:[^"\\]|\\.)*)"/);
|
||||
if (match && match[1] !== undefined) {
|
||||
// Unescape the string
|
||||
const unescaped = match[1].replace(/\\"/g, '"').replace(/\\\\/g, '\\');
|
||||
return {
|
||||
final_response: unescaped,
|
||||
};
|
||||
}
|
||||
|
||||
// Try to extract partial string that's still being built (incomplete quote)
|
||||
const partialMatch = accumulatedText.match(/"final_response"\s*:\s*"((?:[^"\\]|\\.*)*)/);
|
||||
if (partialMatch && partialMatch[1] !== undefined) {
|
||||
// Unescape the partial string
|
||||
const unescaped = partialMatch[1].replace(/\\"/g, '"').replace(/\\\\/g, '\\');
|
||||
return {
|
||||
final_response: unescaped,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
// Unexpected error - re-throw with context
|
||||
throw new Error(
|
||||
`Unexpected error in parseStreamingArgs: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const idleOutputSchema = z.object({
|
||||
success: z.boolean().describe('Whether the operation was successful'),
|
||||
});
|
||||
|
||||
type IdleOutput = z.infer<typeof idleOutputSchema>;
|
||||
|
||||
async function processIdle(_input: IdleToolExecuteInput): Promise<IdleOutput> {
|
||||
return {
|
||||
success: true,
|
||||
};
|
||||
}
|
||||
|
||||
const executeIdle = wrapTraced(
|
||||
async (input: IdleToolExecuteInput): Promise<z.infer<typeof idleOutputSchema>> => {
|
||||
return await processIdle(input);
|
||||
},
|
||||
{ name: 'idle-tool' }
|
||||
);
|
||||
|
||||
// Export the tool
|
||||
export const idleTool = createTool({
|
||||
id: 'idle',
|
||||
description:
|
||||
"Marks all remaining unfinished tasks as complete, sends a final response to the user, and enters an idle state. Use this when current work is finished but the agent should remain available for future tasks. This must be in markdown format and not use the '•' bullet character.",
|
||||
inputSchema: idleInputSchema,
|
||||
outputSchema: idleOutputSchema,
|
||||
execute: async ({ context }) => {
|
||||
return await executeIdle(context as IdleToolExecuteInput);
|
||||
},
|
||||
});
|
||||
|
||||
export default idleTool;
|
|
@ -0,0 +1,144 @@
|
|||
import { spawn } from 'node:child_process';
|
||||
|
||||
export interface BashCommandParams {
|
||||
command: string;
|
||||
description?: string | undefined;
|
||||
timeout?: number | undefined;
|
||||
}
|
||||
|
||||
export interface BashExecuteResult {
|
||||
command: string;
|
||||
stdout: string;
|
||||
stderr?: string | undefined;
|
||||
exitCode: number;
|
||||
success: boolean;
|
||||
error?: string | undefined;
|
||||
}
|
||||
|
||||
async function executeSingleBashCommand(
|
||||
command: string,
|
||||
timeout?: number
|
||||
): Promise<{
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
exitCode: number;
|
||||
}> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn('bash', ['-c', command], {
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
let timeoutId: NodeJS.Timeout | undefined;
|
||||
|
||||
if (timeout) {
|
||||
timeoutId = setTimeout(() => {
|
||||
child.kill('SIGTERM');
|
||||
reject(new Error(`Command timed out after ${timeout}ms`));
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
child.stdout?.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
child.stderr?.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
resolve({
|
||||
stdout: stdout.trim(),
|
||||
stderr: stderr.trim(),
|
||||
exitCode: code || 0,
|
||||
});
|
||||
});
|
||||
|
||||
child.on('error', (error) => {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function executeBashCommandsSafely(
|
||||
commands: BashCommandParams[]
|
||||
): Promise<BashExecuteResult[]> {
|
||||
const results: BashExecuteResult[] = [];
|
||||
|
||||
for (const cmd of commands) {
|
||||
try {
|
||||
const result = await executeSingleBashCommand(cmd.command, cmd.timeout);
|
||||
|
||||
results.push({
|
||||
command: cmd.command,
|
||||
stdout: result.stdout,
|
||||
stderr: result.stderr ? result.stderr : undefined,
|
||||
exitCode: result.exitCode,
|
||||
success: result.exitCode === 0,
|
||||
error: result.exitCode !== 0 ? result.stderr || 'Command failed' : undefined,
|
||||
});
|
||||
} catch (error) {
|
||||
results.push({
|
||||
command: cmd.command,
|
||||
stdout: '',
|
||||
stderr: undefined,
|
||||
exitCode: 1,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown execution error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
export function generateBashExecuteCode(commands: BashCommandParams[]): string {
|
||||
return `
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
function executeSingleBashCommand(command, timeout) {
|
||||
try {
|
||||
const options = {
|
||||
shell: '/bin/bash',
|
||||
encoding: 'utf8',
|
||||
timeout: timeout || undefined,
|
||||
};
|
||||
|
||||
const result = spawnSync('bash', ['-c', command], options);
|
||||
|
||||
return {
|
||||
command,
|
||||
stdout: result.stdout ? result.stdout.trim() : '',
|
||||
stderr: result.stderr ? result.stderr.trim() : undefined,
|
||||
exitCode: result.status !== null ? result.status : 1,
|
||||
success: result.status === 0,
|
||||
error: result.status !== 0 ? (result.stderr ? result.stderr.trim() : 'Command failed') : undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
command,
|
||||
stdout: '',
|
||||
stderr: undefined,
|
||||
exitCode: 1,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown execution error',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function executeBashCommandsConcurrently(commands) {
|
||||
return commands.map((cmd) => executeSingleBashCommand(cmd.command, cmd.timeout));
|
||||
}
|
||||
|
||||
const commands = ${JSON.stringify(commands)};
|
||||
const results = executeBashCommandsConcurrently(commands);
|
||||
console.log(JSON.stringify(results));
|
||||
`.trim();
|
||||
}
|
|
@ -0,0 +1,222 @@
|
|||
import { RuntimeContext } from '@mastra/core/runtime-context';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { z } from 'zod';
|
||||
import { type SandboxContext, SandboxContextKey } from '../../context/sandbox-context';
|
||||
import { bashExecute } from './bash-execute-tool';
|
||||
|
||||
vi.mock('@buster/sandbox', () => ({
|
||||
runTypescript: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('./bash-execute-functions', () => ({
|
||||
generateBashExecuteCode: vi.fn(),
|
||||
executeBashCommandsSafely: vi.fn(),
|
||||
}));
|
||||
|
||||
import { runTypescript } from '@buster/sandbox';
|
||||
import { executeBashCommandsSafely, generateBashExecuteCode } from './bash-execute-functions';
|
||||
|
||||
const mockRunTypescript = vi.mocked(runTypescript);
|
||||
const mockGenerateBashExecuteCode = vi.mocked(generateBashExecuteCode);
|
||||
const mockExecuteBashCommandsSafely = vi.mocked(executeBashCommandsSafely);
|
||||
|
||||
describe('bash-execute-tool', () => {
|
||||
let runtimeContext: RuntimeContext<SandboxContext>;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
runtimeContext = new RuntimeContext<SandboxContext>();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('bashExecute tool', () => {
|
||||
it('should have correct tool configuration', () => {
|
||||
expect(bashExecute.id).toBe('bash_execute');
|
||||
expect(bashExecute.description).toContain('Executes bash commands');
|
||||
expect(bashExecute.inputSchema).toBeDefined();
|
||||
expect(bashExecute.outputSchema).toBeDefined();
|
||||
});
|
||||
|
||||
it('should validate input schema correctly', () => {
|
||||
const validInput = {
|
||||
commands: [
|
||||
{ command: 'echo "hello"', description: 'test command' },
|
||||
{ command: 'ls -la', timeout: 5000 },
|
||||
],
|
||||
};
|
||||
|
||||
expect(() => bashExecute.inputSchema.parse(validInput)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should execute with sandbox when available', async () => {
|
||||
const mockSandbox = { process: { codeRun: vi.fn() } };
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, mockSandbox as any);
|
||||
|
||||
const input = {
|
||||
commands: [{ command: 'echo "hello"' }],
|
||||
};
|
||||
|
||||
const mockCode = 'generated typescript code';
|
||||
const mockSandboxResult = {
|
||||
result: JSON.stringify([
|
||||
{
|
||||
command: 'echo "hello"',
|
||||
stdout: 'hello',
|
||||
stderr: undefined,
|
||||
exitCode: 0,
|
||||
success: true,
|
||||
error: undefined,
|
||||
},
|
||||
]),
|
||||
exitCode: 0,
|
||||
stderr: '',
|
||||
};
|
||||
|
||||
mockGenerateBashExecuteCode.mockReturnValue(mockCode);
|
||||
mockRunTypescript.mockResolvedValue(mockSandboxResult);
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(mockGenerateBashExecuteCode).toHaveBeenCalledWith(input.commands);
|
||||
expect(mockRunTypescript).toHaveBeenCalledWith(mockSandbox, mockCode);
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(result.results[0]).toEqual({
|
||||
command: 'echo "hello"',
|
||||
stdout: 'hello',
|
||||
stderr: undefined,
|
||||
exitCode: 0,
|
||||
success: true,
|
||||
error: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should fallback to local execution when sandbox not available', async () => {
|
||||
const input = {
|
||||
commands: [{ command: 'echo "hello"' }],
|
||||
};
|
||||
|
||||
const mockLocalResults = [
|
||||
{
|
||||
command: 'echo "hello"',
|
||||
stdout: 'hello',
|
||||
stderr: undefined,
|
||||
exitCode: 0,
|
||||
success: true,
|
||||
error: undefined,
|
||||
},
|
||||
];
|
||||
|
||||
mockExecuteBashCommandsSafely.mockResolvedValue(mockLocalResults);
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(mockExecuteBashCommandsSafely).toHaveBeenCalledWith(input.commands);
|
||||
expect(result.results).toEqual(mockLocalResults);
|
||||
});
|
||||
|
||||
it('should handle sandbox execution errors', async () => {
|
||||
const mockSandbox = { process: { codeRun: vi.fn() } };
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, mockSandbox as any);
|
||||
|
||||
const input = {
|
||||
commands: [{ command: 'echo "hello"' }],
|
||||
};
|
||||
|
||||
const mockCode = 'generated typescript code';
|
||||
const mockSandboxResult = {
|
||||
result: 'error output',
|
||||
exitCode: 1,
|
||||
stderr: 'Execution failed',
|
||||
};
|
||||
|
||||
mockGenerateBashExecuteCode.mockReturnValue(mockCode);
|
||||
mockRunTypescript.mockResolvedValue(mockSandboxResult);
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(result.results[0]).toEqual({
|
||||
command: 'echo "hello"',
|
||||
stdout: '',
|
||||
stderr: undefined,
|
||||
exitCode: 1,
|
||||
success: false,
|
||||
error: 'Execution error: Sandbox execution failed: Execution failed',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle execution errors', async () => {
|
||||
const input = {
|
||||
commands: [{ command: 'echo "hello"' }],
|
||||
};
|
||||
|
||||
mockExecuteBashCommandsSafely.mockRejectedValue(new Error('Execution failed'));
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(result.results[0]?.success).toBe(false);
|
||||
expect(result.results[0]?.error).toContain('Execution error');
|
||||
});
|
||||
|
||||
it('should handle empty commands array', async () => {
|
||||
const input = { commands: [] };
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should handle JSON parse errors from sandbox', async () => {
|
||||
const mockSandbox = { process: { codeRun: vi.fn() } };
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, mockSandbox as any);
|
||||
|
||||
const input = {
|
||||
commands: [{ command: 'echo "hello"' }],
|
||||
};
|
||||
|
||||
const mockCode = 'generated typescript code';
|
||||
const mockSandboxResult = {
|
||||
result: 'invalid json output',
|
||||
exitCode: 0,
|
||||
stderr: '',
|
||||
};
|
||||
|
||||
mockGenerateBashExecuteCode.mockReturnValue(mockCode);
|
||||
mockRunTypescript.mockResolvedValue(mockSandboxResult);
|
||||
|
||||
const result = await bashExecute.execute({
|
||||
context: input,
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.results).toHaveLength(1);
|
||||
expect(result.results[0]).toEqual({
|
||||
command: 'echo "hello"',
|
||||
stdout: '',
|
||||
stderr: undefined,
|
||||
exitCode: 1,
|
||||
success: false,
|
||||
error: expect.stringContaining('Failed to parse sandbox output'),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,113 @@
|
|||
import { runTypescript } from '@buster/sandbox';
|
||||
import type { RuntimeContext } from '@mastra/core/runtime-context';
|
||||
import { createTool } from '@mastra/core/tools';
|
||||
import { wrapTraced } from 'braintrust';
|
||||
import { z } from 'zod';
|
||||
import { type SandboxContext, SandboxContextKey } from '../../context/sandbox-context';
|
||||
|
||||
const bashCommandSchema = z.object({
|
||||
command: z.string().describe('The bash command to execute'),
|
||||
description: z.string().optional().describe('Description of what this command does'),
|
||||
timeout: z.number().optional().describe('Timeout in milliseconds'),
|
||||
});
|
||||
|
||||
const inputSchema = z.object({
|
||||
commands: z
|
||||
.union([bashCommandSchema, z.array(bashCommandSchema)])
|
||||
.describe('Single command or array of bash commands to execute'),
|
||||
});
|
||||
|
||||
const outputSchema = z.object({
|
||||
results: z.array(
|
||||
z.object({
|
||||
command: z.string(),
|
||||
stdout: z.string(),
|
||||
stderr: z.string().optional(),
|
||||
exitCode: z.number(),
|
||||
success: z.boolean(),
|
||||
error: z.string().optional(),
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
const executeBashCommands = wrapTraced(
|
||||
async (
|
||||
input: z.infer<typeof inputSchema>,
|
||||
runtimeContext: RuntimeContext<SandboxContext>
|
||||
): Promise<z.infer<typeof outputSchema>> => {
|
||||
const commands = Array.isArray(input.commands) ? input.commands : [input.commands];
|
||||
|
||||
if (!commands || commands.length === 0) {
|
||||
return { results: [] };
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if sandbox is available in runtime context
|
||||
const sandbox = runtimeContext.get(SandboxContextKey.Sandbox);
|
||||
|
||||
if (sandbox) {
|
||||
const { generateBashExecuteCode } = await import('./bash-execute-functions');
|
||||
const code = generateBashExecuteCode(commands);
|
||||
const result = await runTypescript(sandbox, code);
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error('Sandbox execution failed. Exit code:', result.exitCode);
|
||||
console.error('Stderr:', result.stderr);
|
||||
console.error('Stdout:', result.result);
|
||||
throw new Error(`Sandbox execution failed: ${result.stderr || 'Unknown error'}`);
|
||||
}
|
||||
|
||||
let bashResults: Array<{
|
||||
command: string;
|
||||
stdout: string;
|
||||
stderr?: string;
|
||||
exitCode: number;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}>;
|
||||
try {
|
||||
bashResults = JSON.parse(result.result.trim());
|
||||
} catch (parseError) {
|
||||
console.error('Failed to parse sandbox output:', result.result);
|
||||
throw new Error(
|
||||
`Failed to parse sandbox output: ${parseError instanceof Error ? parseError.message : 'Unknown parse error'}`
|
||||
);
|
||||
}
|
||||
|
||||
return { results: bashResults };
|
||||
}
|
||||
|
||||
const { executeBashCommandsSafely } = await import('./bash-execute-functions');
|
||||
const bashResults = await executeBashCommandsSafely(commands);
|
||||
return { results: bashResults };
|
||||
} catch (error) {
|
||||
return {
|
||||
results: commands.map((cmd) => ({
|
||||
command: cmd.command,
|
||||
stdout: '',
|
||||
stderr: undefined,
|
||||
exitCode: 1,
|
||||
success: false,
|
||||
error: `Execution error: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
},
|
||||
{ name: 'bash-execute-tool' }
|
||||
);
|
||||
|
||||
export const bashExecute = createTool({
|
||||
id: 'bash_execute',
|
||||
description: 'Executes bash commands and captures stdout, stderr, and exit codes',
|
||||
inputSchema,
|
||||
outputSchema,
|
||||
execute: async ({
|
||||
context,
|
||||
runtimeContext,
|
||||
}: {
|
||||
context: z.infer<typeof inputSchema>;
|
||||
runtimeContext: RuntimeContext<SandboxContext>;
|
||||
}) => {
|
||||
return await executeBashCommands(context, runtimeContext);
|
||||
},
|
||||
});
|
|
@ -0,0 +1,186 @@
|
|||
import { type Sandbox, createSandbox } from '@buster/sandbox';
|
||||
import { RuntimeContext } from '@mastra/core/runtime-context';
|
||||
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
||||
import { SandboxContextKey } from '../../../context/sandbox-context';
|
||||
import { grepSearch } from './grep-search-tool';
|
||||
|
||||
describe('grep-search-tool integration test', () => {
|
||||
const hasApiKey = !!process.env.DAYTONA_API_KEY;
|
||||
let sandbox: Sandbox;
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!hasApiKey) return;
|
||||
|
||||
sandbox = await createSandbox({
|
||||
language: 'typescript',
|
||||
});
|
||||
}, 30000);
|
||||
|
||||
afterAll(async () => {
|
||||
if (sandbox) {
|
||||
await sandbox.delete();
|
||||
}
|
||||
});
|
||||
|
||||
it.skipIf(!hasApiKey)('should perform grep searches in sandbox environment', async () => {
|
||||
const createFilesCode = `
|
||||
const fs = require('fs');
|
||||
|
||||
fs.writeFileSync('test1.txt', 'Hello world\\nThis is a test\\nAnother line');
|
||||
fs.writeFileSync('test2.txt', 'Different content\\nHello again\\nFinal line');
|
||||
console.log('Files created');
|
||||
console.log('Current directory:', process.cwd());
|
||||
console.log('Files in directory:', fs.readdirSync('.'));
|
||||
`;
|
||||
|
||||
await sandbox.process.codeRun(createFilesCode);
|
||||
|
||||
const runtimeContext = new RuntimeContext();
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, sandbox);
|
||||
|
||||
const result = await grepSearch.execute({
|
||||
context: {
|
||||
searches: [
|
||||
{
|
||||
path: 'test1.txt',
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
{
|
||||
path: 'test2.txt',
|
||||
pattern: 'Hello',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.successful_searches).toHaveLength(2);
|
||||
expect(result.successful_searches[0]).toEqual({
|
||||
path: 'test1.txt',
|
||||
pattern: 'test',
|
||||
matches: [
|
||||
{
|
||||
file: 'test1.txt',
|
||||
lineNumber: 2,
|
||||
content: 'This is a test',
|
||||
},
|
||||
],
|
||||
matchCount: 1,
|
||||
});
|
||||
expect(result.successful_searches[1]).toEqual({
|
||||
path: 'test2.txt',
|
||||
pattern: 'Hello',
|
||||
matches: [
|
||||
{
|
||||
file: 'test2.txt',
|
||||
lineNumber: 2,
|
||||
content: 'Hello again',
|
||||
},
|
||||
],
|
||||
matchCount: 1,
|
||||
});
|
||||
});
|
||||
|
||||
it.skipIf(!hasApiKey)('should handle non-existent files in sandbox', async () => {
|
||||
const runtimeContext = new RuntimeContext();
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, sandbox);
|
||||
|
||||
const result = await grepSearch.execute({
|
||||
context: {
|
||||
searches: [
|
||||
{
|
||||
path: 'nonexistent.txt',
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.failed_searches).toHaveLength(1);
|
||||
expect(result.failed_searches[0]).toEqual({
|
||||
path: 'nonexistent.txt',
|
||||
pattern: 'test',
|
||||
error: 'Path does not exist: nonexistent.txt',
|
||||
});
|
||||
});
|
||||
|
||||
it.skipIf(!hasApiKey)('should handle case-insensitive searches in sandbox', async () => {
|
||||
const createFileCode = `
|
||||
const fs = require('fs');
|
||||
fs.writeFileSync('case-test.txt', 'Hello World\\nHELLO world\\nhello WORLD');
|
||||
`;
|
||||
|
||||
await sandbox.process.codeRun(createFileCode);
|
||||
|
||||
const runtimeContext = new RuntimeContext();
|
||||
runtimeContext.set(SandboxContextKey.Sandbox, sandbox);
|
||||
|
||||
const result = await grepSearch.execute({
|
||||
context: {
|
||||
searches: [
|
||||
{
|
||||
path: 'case-test.txt',
|
||||
pattern: 'hello',
|
||||
recursive: false,
|
||||
ignoreCase: true,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.successful_searches[0]?.matchCount).toBe(3);
|
||||
});
|
||||
|
||||
it('should fall back to local execution when no sandbox is available', async () => {
|
||||
const runtimeContext = new RuntimeContext();
|
||||
|
||||
const result = await grepSearch.execute({
|
||||
context: {
|
||||
searches: [
|
||||
{
|
||||
path: 'nonexistent-local.txt',
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
runtimeContext,
|
||||
});
|
||||
|
||||
expect(result.failed_searches).toHaveLength(1);
|
||||
expect(result.failed_searches[0]).toEqual({
|
||||
path: 'nonexistent-local.txt',
|
||||
pattern: 'test',
|
||||
error: 'Path does not exist: nonexistent-local.txt',
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,208 @@
|
|||
import { runTypescript } from '@buster/sandbox';
|
||||
import type { RuntimeContext } from '@mastra/core/runtime-context';
|
||||
import { createTool } from '@mastra/core/tools';
|
||||
import { wrapTraced } from 'braintrust';
|
||||
import { z } from 'zod';
|
||||
import { type SandboxContext, SandboxContextKey } from '../../../context/sandbox-context';
|
||||
|
||||
const grepSearchConfigSchema = z
|
||||
.object({
|
||||
path: z.string().describe('File or directory path to search'),
|
||||
pattern: z.string().describe('Search pattern'),
|
||||
recursive: z.boolean().optional().default(false).describe('Recursive search (-r)'),
|
||||
ignoreCase: z.boolean().optional().default(false).describe('Case-insensitive search (-i)'),
|
||||
invertMatch: z.boolean().optional().default(false).describe('Invert matches (-v)'),
|
||||
lineNumbers: z.boolean().optional().default(true).describe('Show line numbers (-n)'),
|
||||
wordMatch: z.boolean().optional().default(false).describe('Match whole words only (-w)'),
|
||||
fixedStrings: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.default(false)
|
||||
.describe('Treat pattern as fixed string (-F)'),
|
||||
maxCount: z.number().optional().describe('Maximum number of matches (-m)'),
|
||||
})
|
||||
.transform((data) => {
|
||||
const result: {
|
||||
path: string;
|
||||
pattern: string;
|
||||
recursive: boolean;
|
||||
ignoreCase: boolean;
|
||||
invertMatch: boolean;
|
||||
lineNumbers: boolean;
|
||||
wordMatch: boolean;
|
||||
fixedStrings: boolean;
|
||||
maxCount?: number;
|
||||
} = {
|
||||
path: data.path,
|
||||
pattern: data.pattern,
|
||||
recursive: data.recursive ?? false,
|
||||
ignoreCase: data.ignoreCase ?? false,
|
||||
invertMatch: data.invertMatch ?? false,
|
||||
lineNumbers: data.lineNumbers ?? true,
|
||||
wordMatch: data.wordMatch ?? false,
|
||||
fixedStrings: data.fixedStrings ?? false,
|
||||
};
|
||||
|
||||
if (data.maxCount !== undefined) {
|
||||
result.maxCount = data.maxCount;
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
|
||||
const grepSearchInputSchema = z.object({
|
||||
searches: z.array(grepSearchConfigSchema).min(1).describe('Array of search configurations'),
|
||||
});
|
||||
|
||||
const grepMatchSchema = z.object({
|
||||
file: z.string().describe('File path where match was found'),
|
||||
lineNumber: z.number().optional().describe('Line number of the match'),
|
||||
content: z.string().describe('Matched line content'),
|
||||
});
|
||||
|
||||
const grepSearchResultSchema = z.object({
|
||||
path: z.string().describe('Search path'),
|
||||
pattern: z.string().describe('Search pattern'),
|
||||
matches: z.array(grepMatchSchema).describe('Array of matches found'),
|
||||
matchCount: z.number().describe('Total number of matches'),
|
||||
});
|
||||
|
||||
const grepSearchFailureSchema = z.object({
|
||||
path: z.string().describe('Search path that failed'),
|
||||
pattern: z.string().describe('Search pattern'),
|
||||
error: z.string().describe('Error message'),
|
||||
});
|
||||
|
||||
const grepSearchOutputSchema = z.object({
|
||||
message: z.string().describe('Summary message'),
|
||||
duration: z.number().describe('Duration of operation in milliseconds'),
|
||||
successful_searches: z.array(grepSearchResultSchema).describe('Successful searches with matches'),
|
||||
failed_searches: z.array(grepSearchFailureSchema).describe('Failed searches with error messages'),
|
||||
});
|
||||
|
||||
export type GrepSearchConfig = z.infer<typeof grepSearchConfigSchema>;
|
||||
export type GrepSearchInput = z.infer<typeof grepSearchInputSchema>;
|
||||
export type GrepSearchOutput = z.infer<typeof grepSearchOutputSchema>;
|
||||
|
||||
const grepSearchExecution = wrapTraced(
|
||||
async (
|
||||
params: z.infer<typeof grepSearchInputSchema>,
|
||||
runtimeContext: RuntimeContext<SandboxContext>
|
||||
): Promise<z.infer<typeof grepSearchOutputSchema>> => {
|
||||
const { searches: rawSearches } = params;
|
||||
|
||||
const searches = rawSearches;
|
||||
const startTime = Date.now();
|
||||
|
||||
if (!rawSearches || rawSearches.length === 0) {
|
||||
return {
|
||||
message: 'No searches provided',
|
||||
duration: Date.now() - startTime,
|
||||
successful_searches: [],
|
||||
failed_searches: [],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const sandbox = runtimeContext.get(SandboxContextKey.Sandbox);
|
||||
|
||||
if (sandbox) {
|
||||
const { generateGrepSearchCode } = await import('./grep-search');
|
||||
const code = generateGrepSearchCode(searches);
|
||||
const result = await runTypescript(sandbox, code);
|
||||
|
||||
if (result.exitCode !== 0) {
|
||||
console.error('Sandbox execution failed. Exit code:', result.exitCode);
|
||||
console.error('Stderr:', result.stderr);
|
||||
console.error('Stdout:', result.result);
|
||||
throw new Error(`Sandbox execution failed: ${result.stderr || 'Unknown error'}`);
|
||||
}
|
||||
|
||||
let searchResults: Array<{
|
||||
success: boolean;
|
||||
path: string;
|
||||
pattern: string;
|
||||
matches?: Array<{ file: string; lineNumber?: number; content: string }>;
|
||||
matchCount?: number;
|
||||
error?: string;
|
||||
}>;
|
||||
try {
|
||||
searchResults = JSON.parse(result.result.trim());
|
||||
} catch (parseError) {
|
||||
console.error('Failed to parse sandbox output:', result.result);
|
||||
throw new Error(
|
||||
`Failed to parse sandbox output: ${parseError instanceof Error ? parseError.message : 'Unknown parse error'}`
|
||||
);
|
||||
}
|
||||
|
||||
const successfulSearches: z.infer<typeof grepSearchResultSchema>[] = [];
|
||||
const failedSearches: z.infer<typeof grepSearchFailureSchema>[] = [];
|
||||
|
||||
for (const searchResult of searchResults) {
|
||||
if (searchResult.success) {
|
||||
successfulSearches.push({
|
||||
path: searchResult.path,
|
||||
pattern: searchResult.pattern,
|
||||
matches: searchResult.matches || [],
|
||||
matchCount: searchResult.matchCount || 0,
|
||||
});
|
||||
} else {
|
||||
failedSearches.push({
|
||||
path: searchResult.path,
|
||||
pattern: searchResult.pattern,
|
||||
error: searchResult.error || 'Unknown error',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: `Completed ${successfulSearches.length} searches successfully, ${failedSearches.length} failed`,
|
||||
duration: Date.now() - startTime,
|
||||
successful_searches: successfulSearches,
|
||||
failed_searches: failedSearches,
|
||||
};
|
||||
}
|
||||
|
||||
const { executeGrepSearchesLocally } = await import('./grep-search');
|
||||
const localResults = await executeGrepSearchesLocally(searches);
|
||||
|
||||
return {
|
||||
message: `Completed ${localResults.successful_searches.length} searches successfully, ${localResults.failed_searches.length} failed`,
|
||||
duration: Date.now() - startTime,
|
||||
successful_searches: localResults.successful_searches,
|
||||
failed_searches: localResults.failed_searches,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
message: 'Execution error occurred',
|
||||
duration: Date.now() - startTime,
|
||||
successful_searches: [],
|
||||
failed_searches: searches.map((search) => ({
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: `Execution error: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
})),
|
||||
};
|
||||
}
|
||||
},
|
||||
{ name: 'grep-search' }
|
||||
);
|
||||
|
||||
export const grepSearch = createTool({
|
||||
id: 'grep_search',
|
||||
description:
|
||||
'Performs grep-like searches on files and directories with pattern matching. Supports various grep options like recursive search, case-insensitive matching, line numbers, and more. Can handle bulk searches efficiently.',
|
||||
inputSchema: grepSearchInputSchema,
|
||||
outputSchema: grepSearchOutputSchema,
|
||||
execute: async ({
|
||||
context,
|
||||
runtimeContext,
|
||||
}: {
|
||||
context: z.infer<typeof grepSearchInputSchema>;
|
||||
runtimeContext: RuntimeContext<SandboxContext>;
|
||||
}) => {
|
||||
return await grepSearchExecution(context, runtimeContext);
|
||||
},
|
||||
});
|
||||
|
||||
export default grepSearch;
|
|
@ -0,0 +1,305 @@
|
|||
import { exec } from 'node:child_process';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
import { promisify } from 'node:util';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import { executeGrepSearchesLocally, generateGrepSearchCode } from './grep-search';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
describe('grep-search', () => {
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'grep-search-test-'));
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('executeGrepSearchesLocally - real file system tests', () => {
|
||||
it('should search for patterns in files', async () => {
|
||||
const file1Path = path.join(tempDir, 'file1.txt');
|
||||
const file2Path = path.join(tempDir, 'file2.txt');
|
||||
|
||||
await fs.writeFile(file1Path, 'Hello world\nThis is a test\nAnother line');
|
||||
await fs.writeFile(file2Path, 'Different content\nHello again\nFinal line');
|
||||
|
||||
const result = await executeGrepSearchesLocally([
|
||||
{
|
||||
path: file1Path,
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.successful_searches).toHaveLength(1);
|
||||
expect(result.successful_searches[0]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches).toHaveLength(1);
|
||||
expect(result.successful_searches[0]!.matches[0]).toEqual({
|
||||
file: file1Path,
|
||||
lineNumber: 2,
|
||||
content: 'This is a test',
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle case-insensitive searches', async () => {
|
||||
const filePath = path.join(tempDir, 'case-test.txt');
|
||||
await fs.writeFile(filePath, 'Hello World\nHELLO world\nhello WORLD');
|
||||
|
||||
const result = await executeGrepSearchesLocally([
|
||||
{
|
||||
path: filePath,
|
||||
pattern: 'hello',
|
||||
recursive: false,
|
||||
ignoreCase: true,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.successful_searches[0]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches).toHaveLength(3);
|
||||
});
|
||||
|
||||
it('should handle recursive searches', async () => {
|
||||
const subDir = path.join(tempDir, 'subdir');
|
||||
await fs.mkdir(subDir);
|
||||
|
||||
const file1 = path.join(tempDir, 'root.txt');
|
||||
const file2 = path.join(subDir, 'nested.txt');
|
||||
|
||||
await fs.writeFile(file1, 'search term in root');
|
||||
await fs.writeFile(file2, 'search term in nested');
|
||||
|
||||
const result = await executeGrepSearchesLocally([
|
||||
{
|
||||
path: tempDir,
|
||||
pattern: 'search term',
|
||||
recursive: true,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.successful_searches[0]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('should handle non-existent files', async () => {
|
||||
const nonExistentPath = path.join(tempDir, 'nonexistent.txt');
|
||||
|
||||
const result = await executeGrepSearchesLocally([
|
||||
{
|
||||
path: nonExistentPath,
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.failed_searches).toHaveLength(1);
|
||||
expect(result.failed_searches[0]).toEqual({
|
||||
path: nonExistentPath,
|
||||
pattern: 'test',
|
||||
error: `Path does not exist: ${nonExistentPath}`,
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle word matching', async () => {
|
||||
const filePath = path.join(tempDir, 'word-test.txt');
|
||||
await fs.writeFile(filePath, 'test testing tested\nword test word');
|
||||
|
||||
const result = await executeGrepSearchesLocally([
|
||||
{
|
||||
path: filePath,
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: true,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
expect(result.successful_searches[0]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches).toHaveLength(2);
|
||||
expect(result.successful_searches[0]!.matches[0]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches[0]!.content).toBe('test testing tested');
|
||||
expect(result.successful_searches[0]!.matches[1]).toBeDefined();
|
||||
expect(result.successful_searches[0]!.matches[1]!.content).toBe('word test word');
|
||||
});
|
||||
});
|
||||
|
||||
describe('generateGrepSearchCode', () => {
|
||||
it('should generate valid executable TypeScript code', async () => {
|
||||
const testFile = path.join(tempDir, 'code-gen-test.txt');
|
||||
await fs.writeFile(testFile, 'Generated code test content\nSecond line with pattern');
|
||||
|
||||
const code = generateGrepSearchCode([
|
||||
{
|
||||
path: testFile,
|
||||
pattern: 'pattern',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
const codeFile = path.join(tempDir, 'test-generated.ts');
|
||||
await fs.writeFile(codeFile, code);
|
||||
|
||||
try {
|
||||
const { stdout, stderr } = await execAsync(`npx tsx ${codeFile}`, { cwd: tempDir });
|
||||
|
||||
if (stderr) {
|
||||
console.error('Execution stderr:', stderr);
|
||||
}
|
||||
|
||||
const results = JSON.parse(stdout.trim());
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual({
|
||||
success: true,
|
||||
path: testFile,
|
||||
pattern: 'pattern',
|
||||
matches: [
|
||||
{
|
||||
file: testFile,
|
||||
lineNumber: 2,
|
||||
content: 'Second line with pattern',
|
||||
},
|
||||
],
|
||||
matchCount: 1,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Failed to execute generated code:', error);
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle multiple searches in generated code', async () => {
|
||||
const file1 = path.join(tempDir, 'gen1.txt');
|
||||
const file2 = path.join(tempDir, 'gen2.txt');
|
||||
|
||||
await fs.writeFile(file1, 'First file content');
|
||||
await fs.writeFile(file2, 'Second file content');
|
||||
|
||||
const code = generateGrepSearchCode([
|
||||
{
|
||||
path: file1,
|
||||
pattern: 'First',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
{
|
||||
path: file2,
|
||||
pattern: 'Second',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
const codeFile = path.join(tempDir, 'test-multi.ts');
|
||||
await fs.writeFile(codeFile, code);
|
||||
|
||||
const { stdout } = await execAsync(`npx tsx ${codeFile}`, { cwd: tempDir });
|
||||
|
||||
const results = JSON.parse(stdout.trim());
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]?.matches[0]?.content).toBe('First file content');
|
||||
expect(results[1]?.matches[0]?.content).toBe('Second file content');
|
||||
});
|
||||
|
||||
it('should properly escape special characters in patterns', () => {
|
||||
const searches = [
|
||||
{
|
||||
path: 'test.txt',
|
||||
pattern: 'pattern"with"quotes',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
{
|
||||
path: 'test2.txt',
|
||||
pattern: "pattern'with'apostrophes",
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
];
|
||||
|
||||
const code = generateGrepSearchCode(searches);
|
||||
|
||||
expect(code).toContain(`const searches = ${JSON.stringify(searches)}`);
|
||||
expect(code).toContain("const { execSync } = require('child_process')");
|
||||
expect(code).toContain("const fs = require('fs')");
|
||||
});
|
||||
|
||||
it('should handle errors in generated code', async () => {
|
||||
const nonExistent = path.join(tempDir, 'does-not-exist.txt');
|
||||
|
||||
const code = generateGrepSearchCode([
|
||||
{
|
||||
path: nonExistent,
|
||||
pattern: 'test',
|
||||
recursive: false,
|
||||
ignoreCase: false,
|
||||
invertMatch: false,
|
||||
lineNumbers: true,
|
||||
wordMatch: false,
|
||||
fixedStrings: false,
|
||||
},
|
||||
]);
|
||||
|
||||
const codeFile = path.join(tempDir, 'test-error.ts');
|
||||
await fs.writeFile(codeFile, code);
|
||||
|
||||
const { stdout } = await execAsync(`npx tsx ${codeFile}`, { cwd: tempDir });
|
||||
|
||||
const results = JSON.parse(stdout.trim());
|
||||
|
||||
expect(results[0]).toEqual({
|
||||
success: false,
|
||||
path: nonExistent,
|
||||
pattern: 'test',
|
||||
error: `Path does not exist: ${nonExistent}`,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,370 @@
|
|||
import { execSync } from 'node:child_process';
|
||||
import { existsSync } from 'node:fs';
|
||||
|
||||
export interface GrepSearchResult {
|
||||
success: boolean;
|
||||
path: string;
|
||||
pattern: string;
|
||||
matches?: Array<{ file: string; lineNumber?: number; content: string }>;
|
||||
matchCount?: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
function executeGrepSearchLocally(search: {
|
||||
path: string;
|
||||
pattern: string;
|
||||
recursive?: boolean;
|
||||
ignoreCase?: boolean;
|
||||
invertMatch?: boolean;
|
||||
lineNumbers?: boolean;
|
||||
wordMatch?: boolean;
|
||||
fixedStrings?: boolean;
|
||||
maxCount?: number;
|
||||
}): GrepSearchResult {
|
||||
try {
|
||||
if (!existsSync(search.path)) {
|
||||
return {
|
||||
success: false,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: `Path does not exist: ${search.path}`,
|
||||
};
|
||||
}
|
||||
|
||||
const grepArgs: string[] = [];
|
||||
|
||||
if (search.recursive ?? false) grepArgs.push('-r');
|
||||
if (search.ignoreCase ?? false) grepArgs.push('-i');
|
||||
if (search.invertMatch ?? false) grepArgs.push('-v');
|
||||
if (search.lineNumbers ?? true) grepArgs.push('-n');
|
||||
if (search.wordMatch ?? false) grepArgs.push('-w');
|
||||
if (search.fixedStrings ?? false) grepArgs.push('-F');
|
||||
if (search.maxCount) grepArgs.push('-m', search.maxCount.toString());
|
||||
|
||||
grepArgs.push(search.pattern);
|
||||
grepArgs.push(search.path);
|
||||
|
||||
const output = execSync(
|
||||
`grep ${grepArgs.map((arg) => `"${arg.replace(/"/g, '\\"')}"`).join(' ')}`,
|
||||
{
|
||||
encoding: 'utf8',
|
||||
maxBuffer: 1024 * 1024 * 10,
|
||||
timeout: 30000,
|
||||
}
|
||||
);
|
||||
|
||||
const lines = output
|
||||
.trim()
|
||||
.split('\n')
|
||||
.filter((line) => line.length > 0);
|
||||
const matches: Array<{ file: string; lineNumber?: number; content: string }> = [];
|
||||
|
||||
for (const line of lines) {
|
||||
if (search.lineNumbers ?? true) {
|
||||
// When not in recursive mode, grep outputs: linenumber:content
|
||||
// When in recursive mode, grep outputs: filename:linenumber:content
|
||||
if (search.recursive) {
|
||||
const match = line.match(/^([^:]+):(\d+):(.*)$/);
|
||||
if (match?.[1] && match[2] && match[3] !== undefined) {
|
||||
matches.push({
|
||||
file: match[1],
|
||||
lineNumber: Number.parseInt(match[2], 10),
|
||||
content: match[3],
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// For single file search with line numbers: linenumber:content
|
||||
const match = line.match(/^(\d+):(.*)$/);
|
||||
if (match?.[1] && match[2] !== undefined) {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
lineNumber: Number.parseInt(match[1], 10),
|
||||
content: match[2],
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const colonIndex = line.indexOf(':');
|
||||
if (colonIndex > 0 && search.recursive) {
|
||||
matches.push({
|
||||
file: line.substring(0, colonIndex),
|
||||
content: line.substring(colonIndex + 1),
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
matches,
|
||||
matchCount: matches.length,
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
if (error && typeof error === 'object' && 'status' in error && error.status === 1) {
|
||||
return {
|
||||
success: true,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
matches: [],
|
||||
matchCount: 0,
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeGrepSearchesLocally(
|
||||
searches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
recursive?: boolean;
|
||||
ignoreCase?: boolean;
|
||||
invertMatch?: boolean;
|
||||
lineNumbers?: boolean;
|
||||
wordMatch?: boolean;
|
||||
fixedStrings?: boolean;
|
||||
maxCount?: number;
|
||||
}>
|
||||
): Promise<{
|
||||
successful_searches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
matches: Array<{ file: string; lineNumber?: number; content: string }>;
|
||||
matchCount: number;
|
||||
}>;
|
||||
failed_searches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
error: string;
|
||||
}>;
|
||||
}> {
|
||||
const successfulSearches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
matches: Array<{ file: string; lineNumber?: number; content: string }>;
|
||||
matchCount: number;
|
||||
}> = [];
|
||||
const failedSearches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
error: string;
|
||||
}> = [];
|
||||
|
||||
const results = await Promise.allSettled(
|
||||
searches.map(async (search) => {
|
||||
try {
|
||||
const result = executeGrepSearchLocally(search);
|
||||
|
||||
if (result.success) {
|
||||
return {
|
||||
type: 'success' as const,
|
||||
data: {
|
||||
path: result.path,
|
||||
pattern: result.pattern,
|
||||
matches: result.matches || [],
|
||||
matchCount: result.matchCount || 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: 'failure' as const,
|
||||
data: {
|
||||
path: result.path,
|
||||
pattern: result.pattern,
|
||||
error: result.error || 'Unknown error occurred',
|
||||
},
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
type: 'failure' as const,
|
||||
data: {
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
for (const result of results) {
|
||||
if (result.status === 'fulfilled') {
|
||||
if (result.value.type === 'success') {
|
||||
successfulSearches.push(result.value.data);
|
||||
} else {
|
||||
failedSearches.push(result.value.data);
|
||||
}
|
||||
} else {
|
||||
failedSearches.push({
|
||||
path: 'unknown',
|
||||
pattern: 'unknown',
|
||||
error: result.reason?.message || 'Promise rejected',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
successful_searches: successfulSearches,
|
||||
failed_searches: failedSearches,
|
||||
};
|
||||
}
|
||||
|
||||
export function generateGrepSearchCode(
|
||||
searches: Array<{
|
||||
path: string;
|
||||
pattern: string;
|
||||
recursive?: boolean;
|
||||
ignoreCase?: boolean;
|
||||
invertMatch?: boolean;
|
||||
lineNumbers?: boolean;
|
||||
wordMatch?: boolean;
|
||||
fixedStrings?: boolean;
|
||||
maxCount?: number;
|
||||
}>
|
||||
): string {
|
||||
return `
|
||||
const { execSync } = require('child_process');
|
||||
const fs = require('fs');
|
||||
|
||||
function executeGrepSearch(search) {
|
||||
try {
|
||||
if (!fs.existsSync(search.path)) {
|
||||
return {
|
||||
success: false,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: \`Path does not exist: \${search.path}\`,
|
||||
};
|
||||
}
|
||||
|
||||
const grepArgs = [];
|
||||
|
||||
if (search.recursive ?? false) grepArgs.push('-r');
|
||||
if (search.ignoreCase ?? false) grepArgs.push('-i');
|
||||
if (search.invertMatch ?? false) grepArgs.push('-v');
|
||||
if (search.lineNumbers ?? true) grepArgs.push('-n');
|
||||
if (search.wordMatch ?? false) grepArgs.push('-w');
|
||||
if (search.fixedStrings ?? false) grepArgs.push('-F');
|
||||
if (search.maxCount) grepArgs.push('-m', search.maxCount.toString());
|
||||
|
||||
grepArgs.push(search.pattern);
|
||||
grepArgs.push(search.path);
|
||||
|
||||
const output = execSync(
|
||||
\`grep \${grepArgs.map((arg) => \`"\${arg.replace(/"/g, '\\\\"')}"\`).join(' ')}\`,
|
||||
{
|
||||
encoding: 'utf8',
|
||||
maxBuffer: 1024 * 1024 * 10,
|
||||
timeout: 30000,
|
||||
}
|
||||
);
|
||||
|
||||
const lines = output
|
||||
.trim()
|
||||
.split('\\n')
|
||||
.filter((line) => line.length > 0);
|
||||
const matches = [];
|
||||
|
||||
for (const line of lines) {
|
||||
if (search.lineNumbers ?? true) {
|
||||
// When not in recursive mode, grep outputs: linenumber:content
|
||||
// When in recursive mode, grep outputs: filename:linenumber:content
|
||||
if (search.recursive) {
|
||||
const match = line.match(/^([^:]+):(\\d+):(.*)$/);
|
||||
if (match && match[1] && match[2] && match[3] !== undefined) {
|
||||
matches.push({
|
||||
file: match[1],
|
||||
lineNumber: parseInt(match[2], 10),
|
||||
content: match[3],
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// For single file search with line numbers: linenumber:content
|
||||
const match = line.match(/^(\\d+):(.*)$/);
|
||||
if (match && match[1] && match[2] !== undefined) {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
lineNumber: parseInt(match[1], 10),
|
||||
content: match[2],
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const colonIndex = line.indexOf(':');
|
||||
if (colonIndex > 0 && search.recursive) {
|
||||
matches.push({
|
||||
file: line.substring(0, colonIndex),
|
||||
content: line.substring(colonIndex + 1),
|
||||
});
|
||||
} else {
|
||||
matches.push({
|
||||
file: search.path,
|
||||
content: line,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
matches,
|
||||
matchCount: matches.length,
|
||||
};
|
||||
} catch (error) {
|
||||
if (error && typeof error === 'object' && 'status' in error && error.status === 1) {
|
||||
return {
|
||||
success: true,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
matches: [],
|
||||
matchCount: 0,
|
||||
};
|
||||
}
|
||||
return {
|
||||
success: false,
|
||||
path: search.path,
|
||||
pattern: search.pattern,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const searches = ${JSON.stringify(searches)};
|
||||
const results = searches.map(search => executeGrepSearch(search));
|
||||
console.log(JSON.stringify(results));
|
||||
`.trim();
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export { bashExecute } from './bash-execute-tool';
|
|
@ -1,4 +1,5 @@
|
|||
export { doneTool } from './communication-tools/done-tool';
|
||||
export { idleTool } from './communication-tools/idle-tool';
|
||||
export { respondWithoutAnalysis } from './communication-tools/respond-without-analysis';
|
||||
export { submitThoughts } from './communication-tools/submit-thoughts-tool';
|
||||
export { messageUserClarifyingQuestion } from './communication-tools/message-user-clarifying-question';
|
||||
|
@ -13,4 +14,6 @@ export { editFiles } from './file-tools/edit-files-tool/edit-files-tool';
|
|||
export { readFiles } from './file-tools/read-files-tool/read-files-tool';
|
||||
export { createFiles } from './file-tools/create-files-tool/create-file-tool';
|
||||
export { lsFiles } from './file-tools/ls-files-tool/ls-files-tool';
|
||||
export { grepSearch } from './file-tools/grep-search-tool/grep-search-tool';
|
||||
export { bashExecute } from './file-tools';
|
||||
export { deleteFiles } from './file-tools/delete-files-tool/delete-files-tool';
|
||||
|
|
|
@ -12,12 +12,9 @@ catalogs:
|
|||
'@supabase/supabase-js':
|
||||
specifier: ^2.50.0
|
||||
version: 2.50.2
|
||||
'@trigger.dev/build':
|
||||
specifier: ^4.0.0-v4-beta.23
|
||||
version: 4.0.0-v4-beta.23
|
||||
'@trigger.dev/sdk':
|
||||
specifier: ^4.0.0-v4-beta.23
|
||||
version: 4.0.0-v4-beta.23
|
||||
specifier: ^4.0.0-v4-beta.24
|
||||
version: 4.0.0-v4-beta.24
|
||||
ai:
|
||||
specifier: ^4.0.0
|
||||
version: 4.3.16
|
||||
|
@ -150,7 +147,7 @@ importers:
|
|||
version: 2.50.2
|
||||
'@trigger.dev/sdk':
|
||||
specifier: 'catalog:'
|
||||
version: 4.0.0-v4-beta.23(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)
|
||||
version: 4.0.0-v4-beta.24(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)
|
||||
ai:
|
||||
specifier: 'catalog:'
|
||||
version: 4.3.16(react@18.3.1)(zod@3.25.1)
|
||||
|
@ -212,8 +209,8 @@ importers:
|
|||
specifier: 'catalog:'
|
||||
version: 0.10.8(openapi-types@12.1.3)(react@18.3.1)(zod@3.25.1)
|
||||
'@trigger.dev/sdk':
|
||||
specifier: 'catalog:'
|
||||
version: 4.0.0-v4-beta.23(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)
|
||||
specifier: 4.0.0-v4-beta.24
|
||||
version: 4.0.0-v4-beta.24(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)
|
||||
ai:
|
||||
specifier: 'catalog:'
|
||||
version: 4.3.16(react@18.3.1)(zod@3.25.1)
|
||||
|
@ -231,8 +228,8 @@ importers:
|
|||
version: 3.25.1
|
||||
devDependencies:
|
||||
'@trigger.dev/build':
|
||||
specifier: 'catalog:'
|
||||
version: 4.0.0-v4-beta.23(typescript@5.8.3)
|
||||
specifier: 4.0.0-v4-beta.24
|
||||
version: 4.0.0-v4-beta.24(typescript@5.8.3)
|
||||
|
||||
apps/web:
|
||||
dependencies:
|
||||
|
@ -5039,16 +5036,16 @@ packages:
|
|||
resolution: {integrity: sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==}
|
||||
engines: {node: '>= 10'}
|
||||
|
||||
'@trigger.dev/build@4.0.0-v4-beta.23':
|
||||
resolution: {integrity: sha512-azK1qRVIWmuu4a2iPpWspqAuJdlldxOaqLfZFbmU2gXXAgzM8xqlPc09MHm3aYbugCoXL60DIUM1D+huFNCSEQ==}
|
||||
'@trigger.dev/build@4.0.0-v4-beta.24':
|
||||
resolution: {integrity: sha512-XciG8nq40IZwHNTg9Evi9A1O4YmxtVKvU/ArYgW78EfjqApoxMZHs71p0r/g2eZCgucOLaOhzJlPjteiK4Li8Q==}
|
||||
engines: {node: '>=18.20.0'}
|
||||
|
||||
'@trigger.dev/core@4.0.0-v4-beta.23':
|
||||
resolution: {integrity: sha512-n8XPKzotMAHtZTcChdAcddCKoDhKp5ZXDU2U3tnLbIL1LAtWxvKW56fzuFBmf6e69wb9rrFL+xyOsf5YIoH/rg==}
|
||||
'@trigger.dev/core@4.0.0-v4-beta.24':
|
||||
resolution: {integrity: sha512-qMPX0J9X0XxYJLHx9/bX1TE0eVw5i09fQyVn093JXB9nzcOoT8qHN9Aq7JCF/9TQOGe4vz5haMzuZECpNpgXSw==}
|
||||
engines: {node: '>=18.20.0'}
|
||||
|
||||
'@trigger.dev/sdk@4.0.0-v4-beta.23':
|
||||
resolution: {integrity: sha512-evlvT/KzODjYT+ZrKFQxwPN2roIxKX1V96lJSB4c4+ecqPfY0INf/CdISCeVcGuGj2WZu1vlRV6jBdPm3NO0Iw==}
|
||||
'@trigger.dev/sdk@4.0.0-v4-beta.24':
|
||||
resolution: {integrity: sha512-VbpH0lpg40JNb++Xy3VVzGgPCRhFLcig+hKP6mj84BiX1+dU7y1/ohICS2bYIbjEDEcxIvEx1A0+KXLqIqkFKQ==}
|
||||
engines: {node: '>=18.20.0'}
|
||||
peerDependencies:
|
||||
ai: ^4.2.0
|
||||
|
@ -16865,9 +16862,9 @@ snapshots:
|
|||
|
||||
'@tootallnate/once@2.0.0': {}
|
||||
|
||||
'@trigger.dev/build@4.0.0-v4-beta.23(typescript@5.8.3)':
|
||||
'@trigger.dev/build@4.0.0-v4-beta.24(typescript@5.8.3)':
|
||||
dependencies:
|
||||
'@trigger.dev/core': 4.0.0-v4-beta.23
|
||||
'@trigger.dev/core': 4.0.0-v4-beta.24
|
||||
pkg-types: 1.3.1
|
||||
tinyglobby: 0.2.14
|
||||
tsconfck: 3.1.3(typescript@5.8.3)
|
||||
|
@ -16877,7 +16874,7 @@ snapshots:
|
|||
- typescript
|
||||
- utf-8-validate
|
||||
|
||||
'@trigger.dev/core@4.0.0-v4-beta.23':
|
||||
'@trigger.dev/core@4.0.0-v4-beta.24':
|
||||
dependencies:
|
||||
'@bugsnag/cuid': 3.2.1
|
||||
'@electric-sql/client': 1.0.0-beta.1
|
||||
|
@ -16918,12 +16915,12 @@ snapshots:
|
|||
- supports-color
|
||||
- utf-8-validate
|
||||
|
||||
'@trigger.dev/sdk@4.0.0-v4-beta.23(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)':
|
||||
'@trigger.dev/sdk@4.0.0-v4-beta.24(ai@4.3.16(react@18.3.1)(zod@3.25.1))(zod@3.25.1)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.52.1
|
||||
'@opentelemetry/semantic-conventions': 1.25.1
|
||||
'@trigger.dev/core': 4.0.0-v4-beta.23
|
||||
'@trigger.dev/core': 4.0.0-v4-beta.24
|
||||
chalk: 5.4.1
|
||||
cronstrue: 2.59.0
|
||||
debug: 4.4.1
|
||||
|
|
|
@ -12,8 +12,8 @@ packages:
|
|||
catalog:
|
||||
"@mastra/core": "^0.10.8"
|
||||
"@supabase/supabase-js": "^2.50.0"
|
||||
"@trigger.dev/build": "^4.0.0-v4-beta.23"
|
||||
"@trigger.dev/sdk": "^4.0.0-v4-beta.23"
|
||||
"@trigger.dev/build": "^4.0.0-v4-beta.24"
|
||||
"@trigger.dev/sdk": "^4.0.0-v4-beta.24"
|
||||
ai: "^4.0.0"
|
||||
axios: "^1.10.0"
|
||||
"braintrust": "^0.0.209"
|
||||
|
|
Loading…
Reference in New Issue