From e0828bc8c8806a9c0c1156d85519210e71535d4e Mon Sep 17 00:00:00 2001 From: Wells Bunker Date: Tue, 30 Sep 2025 14:45:25 -0600 Subject: [PATCH] Fixing the todo step to throw llm errors if the model throws an error --- .../create-todos-step/create-todos-step.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/ai/src/steps/analyst-agent-steps/create-todos-step/create-todos-step.ts b/packages/ai/src/steps/analyst-agent-steps/create-todos-step/create-todos-step.ts index 372ee004a..4fcbb4253 100644 --- a/packages/ai/src/steps/analyst-agent-steps/create-todos-step/create-todos-step.ts +++ b/packages/ai/src/steps/analyst-agent-steps/create-todos-step/create-todos-step.ts @@ -102,6 +102,10 @@ async function generateTodosWithLLM( messages: todosMessages, temperature: 0, providerOptions: DEFAULT_ANTHROPIC_OPTIONS, + onError: (error) => { + console.info('LLM error while generating todos', error); + throw new Error('LLM error while generating todos'); + }, }); // Process text deltas for optimistic updates @@ -111,11 +115,8 @@ async function generateTodosWithLLM( } })(); - // Wait for the final object - const result = await object; - // Ensure all delta processing is complete before finalizing - await deltaProcessing; + const [_, result] = await Promise.all([deltaProcessing, object]); // Finalize the reasoning message await onStreamFinish(result);