Fixing the todo step to throw llm errors if the model throws an error

This commit is contained in:
Wells Bunker 2025-09-30 14:45:25 -06:00
parent 248994edff
commit e0828bc8c8
No known key found for this signature in database
GPG Key ID: DB16D6F2679B78FC
1 changed files with 5 additions and 4 deletions

View File

@ -102,6 +102,10 @@ async function generateTodosWithLLM(
messages: todosMessages,
temperature: 0,
providerOptions: DEFAULT_ANTHROPIC_OPTIONS,
onError: (error) => {
console.info('LLM error while generating todos', error);
throw new Error('LLM error while generating todos');
},
});
// Process text deltas for optimistic updates
@ -111,11 +115,8 @@ async function generateTodosWithLLM(
}
})();
// Wait for the final object
const result = await object;
// Ensure all delta processing is complete before finalizing
await deltaProcessing;
const [_, result] = await Promise.all([deltaProcessing, object]);
// Finalize the reasoning message
await onStreamFinish(result);