2025-04-13 20:57:29 +08:00
import { createClient } from '@/lib/supabase/client' ;
2025-03-30 14:48:57 +08:00
const API_URL = process . env . NEXT_PUBLIC_BACKEND_URL || '' ;
2025-04-01 15:27:39 +08:00
// Simple cache implementation
const apiCache = {
projects : new Map ( ) ,
threads : new Map ( ) ,
threadMessages : new Map ( ) ,
agentRuns : new Map ( ) ,
getProject : ( projectId : string ) = > apiCache . projects . get ( projectId ) ,
setProject : ( projectId : string , data : any ) = > apiCache . projects . set ( projectId , data ) ,
getProjects : ( ) = > apiCache . projects . get ( 'all' ) ,
setProjects : ( data : any ) = > apiCache . projects . set ( 'all' , data ) ,
getThreads : ( projectId : string ) = > apiCache . threads . get ( projectId || 'all' ) ,
setThreads : ( projectId : string , data : any ) = > apiCache . threads . set ( projectId || 'all' , data ) ,
getThreadMessages : ( threadId : string ) = > apiCache . threadMessages . get ( threadId ) ,
setThreadMessages : ( threadId : string , data : any ) = > apiCache . threadMessages . set ( threadId , data ) ,
getAgentRuns : ( threadId : string ) = > apiCache . agentRuns . get ( threadId ) ,
setAgentRuns : ( threadId : string , data : any ) = > apiCache . agentRuns . set ( threadId , data ) ,
// Helper to clear parts of the cache when data changes
invalidateThreadMessages : ( threadId : string ) = > apiCache . threadMessages . delete ( threadId ) ,
invalidateAgentRuns : ( threadId : string ) = > apiCache . agentRuns . delete ( threadId ) ,
} ;
// Add a fetch queue system to prevent multiple simultaneous requests
const fetchQueue = {
agentRuns : new Map < string , Promise < any > > ( ) ,
threads : new Map < string , Promise < any > > ( ) ,
messages : new Map < string , Promise < any > > ( ) ,
projects : new Map < string , Promise < any > > ( ) ,
getQueuedAgentRuns : ( threadId : string ) = > fetchQueue . agentRuns . get ( threadId ) ,
setQueuedAgentRuns : ( threadId : string , promise : Promise < any > ) = > {
fetchQueue . agentRuns . set ( threadId , promise ) ;
// Auto-clean the queue after the promise resolves
promise . finally ( ( ) = > {
fetchQueue . agentRuns . delete ( threadId ) ;
} ) ;
return promise ;
} ,
getQueuedThreads : ( projectId : string ) = > fetchQueue . threads . get ( projectId || 'all' ) ,
setQueuedThreads : ( projectId : string , promise : Promise < any > ) = > {
fetchQueue . threads . set ( projectId || 'all' , promise ) ;
promise . finally ( ( ) = > {
fetchQueue . threads . delete ( projectId || 'all' ) ;
} ) ;
return promise ;
} ,
getQueuedMessages : ( threadId : string ) = > fetchQueue . messages . get ( threadId ) ,
setQueuedMessages : ( threadId : string , promise : Promise < any > ) = > {
fetchQueue . messages . set ( threadId , promise ) ;
promise . finally ( ( ) = > {
fetchQueue . messages . delete ( threadId ) ;
} ) ;
return promise ;
} ,
getQueuedProjects : ( ) = > fetchQueue . projects . get ( 'all' ) ,
setQueuedProjects : ( promise : Promise < any > ) = > {
fetchQueue . projects . set ( 'all' , promise ) ;
promise . finally ( ( ) = > {
fetchQueue . projects . delete ( 'all' ) ;
} ) ;
return promise ;
}
} ;
2025-03-30 14:48:57 +08:00
export type Project = {
id : string ;
name : string ;
description : string ;
2025-04-13 20:57:29 +08:00
account_id : string ;
2025-03-30 14:48:57 +08:00
created_at : string ;
2025-04-15 22:34:26 +08:00
sandbox : {
vnc_preview? : string ;
id? : string ;
pass? : string ;
} ;
2025-03-30 14:48:57 +08:00
}
export type Thread = {
thread_id : string ;
2025-04-13 20:57:29 +08:00
account_id : string | null ;
2025-03-31 06:22:00 +08:00
project_id? : string | null ;
2025-03-30 14:48:57 +08:00
created_at : string ;
2025-04-06 17:10:18 +08:00
updated_at : string ;
2025-03-30 14:48:57 +08:00
}
export type Message = {
2025-03-31 06:22:00 +08:00
role : string ;
2025-03-30 14:48:57 +08:00
content : string ;
2025-04-14 08:31:23 +08:00
type : string ;
2025-03-30 14:48:57 +08:00
}
export type AgentRun = {
id : string ;
thread_id : string ;
status : 'running' | 'completed' | 'stopped' | 'error' ;
started_at : string ;
completed_at : string | null ;
2025-03-31 13:27:21 +08:00
responses : Message [ ] ;
2025-03-30 14:48:57 +08:00
error : string | null ;
}
2025-03-31 13:27:21 +08:00
export type ToolCall = {
name : string ;
arguments : Record < string , unknown > ;
}
2025-03-30 14:48:57 +08:00
// Project APIs
export const getProjects = async ( ) : Promise < Project [ ] > = > {
2025-04-01 15:27:39 +08:00
// Check if we already have a pending request
const pendingRequest = fetchQueue . getQueuedProjects ( ) ;
if ( pendingRequest ) {
return pendingRequest ;
}
2025-03-30 14:48:57 +08:00
2025-04-01 15:27:39 +08:00
// Check cache first
const cached = apiCache . getProjects ( ) ;
if ( cached ) {
return cached ;
}
// Create and queue the promise
const fetchPromise = ( async ( ) = > {
2025-04-13 20:57:29 +08:00
try {
const supabase = createClient ( ) ;
const { data , error } = await supabase
. from ( 'projects' )
. select ( '*' ) ;
if ( error ) {
// Handle permission errors specifically
if ( error . code === '42501' && error . message . includes ( 'has_role_on_account' ) ) {
console . error ( 'Permission error: User does not have proper account access' ) ;
return [ ] ; // Return empty array instead of throwing
}
throw error ;
}
// Cache the result
apiCache . setProjects ( data || [ ] ) ;
return data || [ ] ;
} catch ( err ) {
console . error ( 'Error fetching projects:' , err ) ;
// Return empty array for permission errors to avoid crashing the UI
return [ ] ;
}
2025-04-01 15:27:39 +08:00
} ) ( ) ;
// Add to queue and return
return fetchQueue . setQueuedProjects ( fetchPromise ) ;
2025-03-30 14:48:57 +08:00
} ;
export const getProject = async ( projectId : string ) : Promise < Project > = > {
2025-04-01 15:27:39 +08:00
// Check cache first
const cached = apiCache . getProject ( projectId ) ;
if ( cached ) {
return cached ;
}
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
const { data , error } = await supabase
. from ( 'projects' )
. select ( '*' )
. eq ( 'project_id' , projectId )
. single ( ) ;
if ( error ) throw error ;
2025-04-01 15:27:39 +08:00
// Cache the result
apiCache . setProject ( projectId , data ) ;
2025-03-30 14:48:57 +08:00
return data ;
} ;
2025-04-13 20:57:29 +08:00
export const createProject = async (
projectData : { name : string ; description : string } ,
accountId? : string
) : Promise < Project > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
2025-04-13 20:57:29 +08:00
// If accountId is not provided, we'll need to get the user's ID
if ( ! accountId ) {
const { data : userData , error : userError } = await supabase . auth . getUser ( ) ;
if ( userError ) throw userError ;
if ( ! userData . user ) throw new Error ( 'You must be logged in to create a project' ) ;
// In Basejump, the personal account ID is the same as the user ID
accountId = userData . user . id ;
}
2025-03-30 14:48:57 +08:00
const { data , error } = await supabase
. from ( 'projects' )
. insert ( {
name : projectData.name ,
description : projectData.description || null ,
2025-04-13 20:57:29 +08:00
account_id : accountId
2025-03-30 14:48:57 +08:00
} )
. select ( )
. single ( ) ;
if ( error ) throw error ;
// Map the database response to our Project type
return {
id : data.project_id ,
name : data.name ,
description : data.description || '' ,
2025-04-13 20:57:29 +08:00
account_id : data.account_id ,
2025-04-15 22:34:26 +08:00
created_at : data.created_at ,
sandbox : { id : "" , pass : "" , vnc_preview : "" }
2025-03-30 14:48:57 +08:00
} ;
} ;
export const updateProject = async ( projectId : string , data : Partial < Project > ) : Promise < Project > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
const { data : updatedData , error } = await supabase
. from ( 'projects' )
. update ( data )
. eq ( 'project_id' , projectId )
. select ( )
. single ( ) ;
if ( error ) throw error ;
return updatedData ;
} ;
export const deleteProject = async ( projectId : string ) : Promise < void > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
const { error } = await supabase
. from ( 'projects' )
. delete ( )
. eq ( 'project_id' , projectId ) ;
if ( error ) throw error ;
} ;
// Thread APIs
export const getThreads = async ( projectId? : string ) : Promise < Thread [ ] > = > {
2025-04-01 15:27:39 +08:00
// Check if we already have a pending request
const pendingRequest = fetchQueue . getQueuedThreads ( projectId || 'all' ) ;
if ( pendingRequest ) {
return pendingRequest ;
2025-03-30 14:48:57 +08:00
}
2025-04-01 15:27:39 +08:00
// Check cache first
const cached = apiCache . getThreads ( projectId || 'all' ) ;
if ( cached ) {
return cached ;
}
2025-03-30 14:48:57 +08:00
2025-04-01 15:27:39 +08:00
// Create and queue the promise
const fetchPromise = ( async ( ) = > {
const supabase = createClient ( ) ;
let query = supabase . from ( 'threads' ) . select ( '*' ) ;
if ( projectId ) {
query = query . eq ( 'project_id' , projectId ) ;
}
const { data , error } = await query ;
if ( error ) throw error ;
// Cache the result
2025-04-06 17:10:18 +08:00
apiCache . setThreads ( projectId || 'all' , data || [ ] ) ;
return data || [ ] ;
2025-04-01 15:27:39 +08:00
} ) ( ) ;
// Add to queue and return
return fetchQueue . setQueuedThreads ( projectId || 'all' , fetchPromise ) ;
2025-03-30 14:48:57 +08:00
} ;
export const getThread = async ( threadId : string ) : Promise < Thread > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
const { data , error } = await supabase
. from ( 'threads' )
. select ( '*' )
. eq ( 'thread_id' , threadId )
. single ( ) ;
if ( error ) throw error ;
2025-04-06 17:10:18 +08:00
return data ;
2025-03-30 14:48:57 +08:00
} ;
2025-04-13 20:57:29 +08:00
export const createThread = async ( projectId : string ) : Promise < Thread > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
2025-04-13 20:57:29 +08:00
// If user is not logged in, redirect to login
const { data : { user } } = await supabase . auth . getUser ( ) ;
if ( ! user ) {
throw new Error ( 'You must be logged in to create a thread' ) ;
}
2025-03-30 14:48:57 +08:00
const { data , error } = await supabase
. from ( 'threads' )
. insert ( {
2025-04-13 20:57:29 +08:00
project_id : projectId ,
account_id : user.id , // Use the current user's ID as the account ID
2025-03-30 14:48:57 +08:00
} )
. select ( )
. single ( ) ;
2025-04-13 20:57:29 +08:00
if ( error ) throw error ;
2025-03-30 14:48:57 +08:00
2025-04-06 17:10:18 +08:00
return data ;
2025-03-30 14:48:57 +08:00
} ;
2025-04-06 17:10:18 +08:00
export const addUserMessage = async ( threadId : string , content : string ) : Promise < void > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
2025-03-30 14:48:57 +08:00
2025-04-06 17:10:18 +08:00
// Format the message in the format the LLM expects - keep it simple with only required fields
const message = {
role : 'user' ,
content : content
} ;
2025-03-30 14:48:57 +08:00
2025-04-06 17:10:18 +08:00
// Insert the message into the messages table
const { error } = await supabase
. from ( 'messages' )
. insert ( {
thread_id : threadId ,
type : 'user' ,
is_llm_message : true ,
content : JSON.stringify ( message )
} ) ;
2025-03-30 14:48:57 +08:00
2025-04-06 17:10:18 +08:00
if ( error ) {
console . error ( 'Error adding user message:' , error ) ;
throw new Error ( ` Error adding message: ${ error . message } ` ) ;
2025-03-30 14:48:57 +08:00
}
2025-04-01 15:27:39 +08:00
// Invalidate the cache for this thread's messages
apiCache . invalidateThreadMessages ( threadId ) ;
2025-03-30 14:48:57 +08:00
} ;
2025-04-16 08:04:04 +08:00
export const getMessages = async ( threadId : string ) : Promise < Message [ ] > = > {
2025-04-01 15:27:39 +08:00
// Check if we already have a pending request
const pendingRequest = fetchQueue . getQueuedMessages ( threadId ) ;
if ( pendingRequest ) {
return pendingRequest ;
2025-03-30 14:48:57 +08:00
}
2025-04-01 15:27:39 +08:00
// Check cache first
const cached = apiCache . getThreadMessages ( threadId ) ;
if ( cached ) {
2025-04-16 08:04:04 +08:00
return cached ;
2025-04-01 15:27:39 +08:00
}
2025-03-30 14:48:57 +08:00
2025-04-01 15:27:39 +08:00
// Create and queue the promise
const fetchPromise = ( async ( ) = > {
const supabase = createClient ( ) ;
2025-04-06 17:10:18 +08:00
2025-04-01 15:27:39 +08:00
const { data , error } = await supabase
2025-04-06 17:10:18 +08:00
. from ( 'messages' )
. select ( '*' )
2025-04-01 15:27:39 +08:00
. eq ( 'thread_id' , threadId )
2025-04-14 08:31:23 +08:00
. neq ( 'type' , 'cost' )
2025-04-06 17:10:18 +08:00
. order ( 'created_at' , { ascending : true } ) ;
2025-04-01 15:27:39 +08:00
if ( error ) {
console . error ( 'Error fetching messages:' , error ) ;
throw new Error ( ` Error getting messages: ${ error . message } ` ) ;
}
2025-04-06 17:10:18 +08:00
// Process the messages to the expected format
const messages = ( data || [ ] ) . map ( msg = > {
try {
// Parse the content from JSONB
const content = typeof msg . content === 'string'
? JSON . parse ( msg . content )
: msg . content ;
// Return in the format the app expects
return content ;
} catch ( e ) {
console . error ( 'Error parsing message content:' , e , msg ) ;
// Fallback for malformed messages
return {
role : msg.is_llm_message ? 'assistant' : 'user' ,
content : 'Error: Could not parse message content'
} ;
}
} ) ;
2025-04-01 15:27:39 +08:00
// Cache the result
apiCache . setThreadMessages ( threadId , messages ) ;
return messages ;
} ) ( ) ;
2025-04-16 08:04:04 +08:00
// Add to queue and return
return fetchQueue . setQueuedMessages ( threadId , fetchPromise ) ;
2025-03-30 14:48:57 +08:00
} ;
// Agent APIs
export const startAgent = async ( threadId : string ) : Promise < { agent_run_id : string } > = > {
2025-04-04 23:06:49 +08:00
try {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
2025-03-31 06:22:00 +08:00
2025-04-04 23:06:49 +08:00
// Check if backend URL is configured
if ( ! API_URL ) {
throw new Error ( 'Backend URL is not configured. Set NEXT_PUBLIC_BACKEND_URL in your environment.' ) ;
}
console . log ( ` [API] Starting agent for thread ${ threadId } using ${ API_URL } /thread/ ${ threadId } /agent/start ` ) ;
const response = await fetch ( ` ${ API_URL } /thread/ ${ threadId } /agent/start ` , {
method : 'POST' ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
} ) ;
if ( ! response . ok ) {
const errorText = await response . text ( ) . catch ( ( ) = > 'No error details available' ) ;
console . error ( ` [API] Error starting agent: ${ response . status } ${ response . statusText } ` , errorText ) ;
throw new Error ( ` Error starting agent: ${ response . statusText } ( ${ response . status } ) ` ) ;
}
// Invalidate relevant caches
apiCache . invalidateAgentRuns ( threadId ) ;
apiCache . invalidateThreadMessages ( threadId ) ;
return response . json ( ) ;
} catch ( error ) {
console . error ( '[API] Failed to start agent:' , error ) ;
// Provide clearer error message for network errors
if ( error instanceof TypeError && error . message . includes ( 'Failed to fetch' ) ) {
throw new Error ( ` Cannot connect to backend server. Please check your internet connection and make sure the backend is running. ` ) ;
}
throw error ;
2025-03-30 14:48:57 +08:00
}
} ;
export const stopAgent = async ( agentRunId : string ) : Promise < void > = > {
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
2025-03-30 14:48:57 +08:00
2025-03-31 06:22:00 +08:00
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
2025-03-30 14:48:57 +08:00
const response = await fetch ( ` ${ API_URL } /agent-run/ ${ agentRunId } /stop ` , {
method : 'POST' ,
2025-03-31 06:22:00 +08:00
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
2025-03-30 14:48:57 +08:00
} ) ;
if ( ! response . ok ) {
throw new Error ( ` Error stopping agent: ${ response . statusText } ` ) ;
}
} ;
export const getAgentStatus = async ( agentRunId : string ) : Promise < AgentRun > = > {
2025-04-04 23:06:49 +08:00
console . log ( ` [API] ⚠️ Requesting agent status for ${ agentRunId } ` ) ;
2025-03-30 14:48:57 +08:00
2025-04-04 23:06:49 +08:00
try {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
console . error ( '[API] ❌ No access token available for getAgentStatus' ) ;
throw new Error ( 'No access token available' ) ;
}
2025-03-31 06:22:00 +08:00
2025-04-04 23:06:49 +08:00
const url = ` ${ API_URL } /agent-run/ ${ agentRunId } ` ;
console . log ( ` [API] 🔍 Fetching from: ${ url } ` ) ;
const response = await fetch ( url , {
headers : {
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
} ) ;
if ( ! response . ok ) {
const errorText = await response . text ( ) . catch ( ( ) = > 'No error details available' ) ;
console . error ( ` [API] ❌ Error getting agent status: ${ response . status } ${ response . statusText } ` , errorText ) ;
throw new Error ( ` Error getting agent status: ${ response . statusText } ( ${ response . status } ) ` ) ;
}
const data = await response . json ( ) ;
console . log ( ` [API] ✅ Successfully got agent status: ` , data ) ;
return data ;
} catch ( error ) {
console . error ( '[API] ❌ Failed to get agent status:' , error ) ;
throw error ;
2025-03-30 14:48:57 +08:00
}
} ;
export const getAgentRuns = async ( threadId : string ) : Promise < AgentRun [ ] > = > {
2025-04-01 15:27:39 +08:00
// Check if we already have a pending request for this thread ID
const pendingRequest = fetchQueue . getQueuedAgentRuns ( threadId ) ;
if ( pendingRequest ) {
return pendingRequest ;
2025-03-31 06:22:00 +08:00
}
2025-03-30 14:48:57 +08:00
2025-04-01 15:27:39 +08:00
// Check cache first
const cached = apiCache . getAgentRuns ( threadId ) ;
if ( cached ) {
return cached ;
2025-03-30 14:48:57 +08:00
}
2025-04-01 15:27:39 +08:00
// Create and queue the promise to prevent duplicate requests
const fetchPromise = ( async ( ) = > {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
const response = await fetch ( ` ${ API_URL } /thread/ ${ threadId } /agent-runs ` , {
headers : {
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
} ) ;
if ( ! response . ok ) {
throw new Error ( ` Error getting agent runs: ${ response . statusText } ` ) ;
}
const data = await response . json ( ) ;
const agentRuns = data . agent_runs || [ ] ;
// Cache the result
apiCache . setAgentRuns ( threadId , agentRuns ) ;
return agentRuns ;
} ) ( ) ;
// Add to queue and return
return fetchQueue . setQueuedAgentRuns ( threadId , fetchPromise ) ;
2025-03-30 14:48:57 +08:00
} ;
export const streamAgent = ( agentRunId : string , callbacks : {
onMessage : ( content : string ) = > void ;
2025-03-31 13:27:21 +08:00
onError : ( error : Error | string ) = > void ;
2025-03-30 14:48:57 +08:00
onClose : ( ) = > void ;
} ) : ( ) = > void = > {
let eventSourceInstance : EventSource | null = null ;
let isClosing = false ;
console . log ( ` [STREAM] Setting up stream for agent run ${ agentRunId } ` ) ;
const setupStream = async ( ) = > {
try {
if ( isClosing ) {
console . log ( ` [STREAM] Already closing, not setting up stream for ${ agentRunId } ` ) ;
return ;
}
2025-03-31 06:22:00 +08:00
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
2025-03-30 14:48:57 +08:00
console . error ( '[STREAM] No auth token available' ) ;
2025-04-16 13:01:57 +08:00
callbacks . onError ( 'Authentication required' ) ;
2025-03-30 14:48:57 +08:00
callbacks . onClose ( ) ;
return ;
}
const url = new URL ( ` ${ API_URL } /agent-run/ ${ agentRunId } /stream ` ) ;
2025-03-31 06:22:00 +08:00
url . searchParams . append ( 'token' , session . access_token ) ;
2025-03-30 14:48:57 +08:00
console . log ( ` [STREAM] Creating EventSource for ${ agentRunId } ` ) ;
eventSourceInstance = new EventSource ( url . toString ( ) ) ;
eventSourceInstance . onopen = ( ) = > {
console . log ( ` [STREAM] Connection opened for ${ agentRunId } ` ) ;
} ;
eventSourceInstance . onmessage = ( event ) = > {
try {
const rawData = event . data ;
if ( rawData . includes ( '"type":"ping"' ) ) return ;
2025-04-16 13:01:57 +08:00
// Skip empty messages
if ( ! rawData || rawData . trim ( ) === '' ) return ;
2025-03-30 14:48:57 +08:00
// Log raw data for debugging
console . log ( ` [STREAM] Received data: ${ rawData . substring ( 0 , 100 ) } ${ rawData . length > 100 ? '...' : '' } ` ) ;
2025-04-16 13:01:57 +08:00
let jsonData ;
try {
jsonData = JSON . parse ( rawData ) ;
} catch ( parseError ) {
console . error ( '[STREAM] Failed to parse message:' , parseError ) ;
2025-04-04 23:06:49 +08:00
return ;
}
2025-04-16 13:01:57 +08:00
// Handle stream errors and failures first
if ( jsonData . status === 'error' || ( jsonData . type === 'status' && jsonData . status === 'failed' ) ) {
// Get a clean string version of any error message
const errorMessage = typeof jsonData . message === 'object'
? JSON . stringify ( jsonData . message )
: String ( jsonData . message || 'Stream failed' ) ;
// Only log to console if it's an unexpected error (not a known API error response)
if ( jsonData . status !== 'error' ) {
console . error ( ` [STREAM] Stream error for ${ agentRunId } : ` , errorMessage ) ;
}
// Ensure we close the stream and prevent reconnection
if ( ! isClosing ) {
isClosing = true ;
if ( eventSourceInstance ) {
eventSourceInstance . close ( ) ;
eventSourceInstance = null ;
}
callbacks . onError ( errorMessage ) ;
callbacks . onClose ( ) ;
}
return ;
}
// Handle completion status
if ( jsonData . type === 'status' && jsonData . status === 'completed' ) {
console . log ( ` [STREAM] Completion message received for ${ agentRunId } ` ) ;
2025-04-04 23:06:49 +08:00
2025-04-16 13:01:57 +08:00
if ( ! isClosing ) {
isClosing = true ;
2025-04-04 23:06:49 +08:00
callbacks . onMessage ( rawData ) ;
2025-04-16 13:01:57 +08:00
if ( eventSourceInstance ) {
2025-04-04 23:06:49 +08:00
eventSourceInstance . close ( ) ;
eventSourceInstance = null ;
2025-03-30 14:48:57 +08:00
}
2025-04-16 13:01:57 +08:00
callbacks . onClose ( ) ;
2025-03-30 14:48:57 +08:00
}
2025-04-16 13:01:57 +08:00
return ;
}
// Pass other messages normally
if ( ! isClosing ) {
callbacks . onMessage ( rawData ) ;
2025-03-30 14:48:57 +08:00
}
} catch ( error ) {
2025-04-16 13:01:57 +08:00
console . error ( ` [STREAM] Error in message handler: ` , error ) ;
if ( ! isClosing ) {
isClosing = true ;
if ( eventSourceInstance ) {
eventSourceInstance . close ( ) ;
eventSourceInstance = null ;
}
callbacks . onError ( error instanceof Error ? error . message : 'Stream processing error' ) ;
callbacks . onClose ( ) ;
}
2025-03-30 14:48:57 +08:00
}
} ;
2025-04-04 23:06:49 +08:00
eventSourceInstance . onerror = ( event ) = > {
// Add detailed event logging
console . log ( ` [STREAM] 🔍 EventSource onerror triggered for ${ agentRunId } ` , event ) ;
2025-03-30 14:48:57 +08:00
// For clean closures (manual or completed), we don't need to log an error
if ( isClosing ) {
console . log ( ` [STREAM] EventSource closed as expected for ${ agentRunId } ` ) ;
return ;
}
// Only log as error for unexpected closures
2025-04-16 13:01:57 +08:00
console . error ( ` [STREAM] EventSource connection error/closed unexpectedly for ${ agentRunId } ` ) ;
2025-03-30 14:48:57 +08:00
if ( ! isClosing ) {
2025-04-16 13:01:57 +08:00
console . log ( ` [STREAM] Handling unexpected connection close for ${ agentRunId } ` ) ;
2025-03-30 14:48:57 +08:00
// Close the connection
if ( eventSourceInstance ) {
eventSourceInstance . close ( ) ;
eventSourceInstance = null ;
}
2025-04-16 13:01:57 +08:00
// Then notify error and close (once)
2025-03-30 14:48:57 +08:00
isClosing = true ;
2025-04-16 13:01:57 +08:00
callbacks . onError ( new Error ( 'Stream connection closed unexpectedly.' ) ) ; // Add error callback
2025-03-30 14:48:57 +08:00
callbacks . onClose ( ) ;
}
} ;
} catch ( error ) {
console . error ( ` [STREAM] Error setting up stream: ` , error ) ;
if ( ! isClosing ) {
isClosing = true ;
2025-03-31 13:27:21 +08:00
callbacks . onError ( error instanceof Error ? error : String ( error ) ) ;
2025-03-30 14:48:57 +08:00
callbacks . onClose ( ) ;
}
}
} ;
2025-04-13 20:57:29 +08:00
// Set up the stream once
2025-03-30 14:48:57 +08:00
setupStream ( ) ;
// Return cleanup function
return ( ) = > {
console . log ( ` [STREAM] Manual cleanup called for ${ agentRunId } ` ) ;
if ( isClosing ) {
console . log ( ` [STREAM] Already closing, ignoring duplicate cleanup for ${ agentRunId } ` ) ;
return ;
}
isClosing = true ;
if ( eventSourceInstance ) {
console . log ( ` [STREAM] Manually closing EventSource for ${ agentRunId } ` ) ;
eventSourceInstance . close ( ) ;
eventSourceInstance = null ;
}
} ;
2025-04-11 10:45:32 +08:00
} ;
// Sandbox API Functions
export const createSandboxFile = async ( sandboxId : string , filePath : string , content : string ) : Promise < void > = > {
try {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
2025-04-11 20:56:50 +08:00
// Determine if content is likely binary (contains non-printable characters)
const isProbablyBinary = /[\x00-\x08\x0E-\x1F\x80-\xFF]/ . test ( content ) ||
content . startsWith ( 'data:' ) ||
/^[A-Za-z0-9+/]*={0,2}$/ . test ( content ) ;
2025-04-11 10:45:32 +08:00
const response = await fetch ( ` ${ API_URL } /sandboxes/ ${ sandboxId } /files ` , {
method : 'POST' ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
body : JSON.stringify ( {
path : filePath ,
content : content ,
2025-04-11 20:56:50 +08:00
is_base64 : isProbablyBinary
2025-04-11 10:45:32 +08:00
} ) ,
} ) ;
if ( ! response . ok ) {
const errorText = await response . text ( ) . catch ( ( ) = > 'No error details available' ) ;
console . error ( ` Error creating sandbox file: ${ response . status } ${ response . statusText } ` , errorText ) ;
throw new Error ( ` Error creating sandbox file: ${ response . statusText } ( ${ response . status } ) ` ) ;
}
return response . json ( ) ;
} catch ( error ) {
console . error ( 'Failed to create sandbox file:' , error ) ;
throw error ;
}
} ;
export interface FileInfo {
name : string ;
path : string ;
is_dir : boolean ;
size : number ;
mod_time : string ;
permissions? : string ;
}
export const listSandboxFiles = async ( sandboxId : string , path : string ) : Promise < FileInfo [ ] > = > {
try {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
const url = new URL ( ` ${ API_URL } /sandboxes/ ${ sandboxId } /files ` ) ;
url . searchParams . append ( 'path' , path ) ;
const response = await fetch ( url . toString ( ) , {
headers : {
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
} ) ;
if ( ! response . ok ) {
const errorText = await response . text ( ) . catch ( ( ) = > 'No error details available' ) ;
console . error ( ` Error listing sandbox files: ${ response . status } ${ response . statusText } ` , errorText ) ;
throw new Error ( ` Error listing sandbox files: ${ response . statusText } ( ${ response . status } ) ` ) ;
}
const data = await response . json ( ) ;
return data . files || [ ] ;
} catch ( error ) {
console . error ( 'Failed to list sandbox files:' , error ) ;
throw error ;
}
} ;
export const getSandboxFileContent = async ( sandboxId : string , path : string ) : Promise < string | Blob > = > {
try {
const supabase = createClient ( ) ;
const { data : { session } } = await supabase . auth . getSession ( ) ;
if ( ! session ? . access_token ) {
throw new Error ( 'No access token available' ) ;
}
const url = new URL ( ` ${ API_URL } /sandboxes/ ${ sandboxId } /files/content ` ) ;
url . searchParams . append ( 'path' , path ) ;
const response = await fetch ( url . toString ( ) , {
headers : {
'Authorization' : ` Bearer ${ session . access_token } ` ,
} ,
} ) ;
if ( ! response . ok ) {
const errorText = await response . text ( ) . catch ( ( ) = > 'No error details available' ) ;
console . error ( ` Error getting sandbox file content: ${ response . status } ${ response . statusText } ` , errorText ) ;
throw new Error ( ` Error getting sandbox file content: ${ response . statusText } ( ${ response . status } ) ` ) ;
}
// Check if it's a text file or binary file based on content-type
const contentType = response . headers . get ( 'content-type' ) ;
if ( contentType && contentType . includes ( 'text' ) || contentType ? . includes ( 'application/json' ) ) {
return await response . text ( ) ;
} else {
return await response . blob ( ) ;
}
} catch ( error ) {
console . error ( 'Failed to get sandbox file content:' , error ) ;
throw error ;
}
2025-04-16 17:54:52 +08:00
} ;
export const generateThreadName = async ( message : string ) : Promise < string > = > {
try {
// Default name in case the API fails
const defaultName = message . trim ( ) . length > 50
? message . trim ( ) . substring ( 0 , 47 ) + "..."
: message . trim ( ) ;
// OpenAI API key should be stored in an environment variable
const apiKey = process . env . NEXT_PUBLIC_OPENAI_API_KEY ;
if ( ! apiKey ) {
console . error ( 'OpenAI API key not found' ) ;
return defaultName ;
}
const response = await fetch ( 'https://api.openai.com/v1/chat/completions' , {
method : 'POST' ,
headers : {
'Content-Type' : 'application/json' ,
'Authorization' : ` Bearer ${ apiKey } `
} ,
body : JSON.stringify ( {
model : 'gpt-4o-mini' ,
messages : [
{
role : 'system' ,
content : 'You are a helpful assistant that generates extremely concise titles (2-4 words maximum) for chat threads based on the user\'s message. Respond with only the title, no other text or punctuation.'
} ,
{
role : 'user' ,
content : ` Generate an extremely brief title (2-4 words only) for a chat thread that starts with this message: " ${ message } " `
}
] ,
max_tokens : 20 ,
temperature : 0.7
} )
} ) ;
if ( ! response . ok ) {
const errorData = await response . text ( ) ;
console . error ( 'OpenAI API error:' , errorData ) ;
return defaultName ;
}
const data = await response . json ( ) ;
const generatedName = data . choices [ 0 ] ? . message ? . content ? . trim ( ) ;
// Return the generated name or default if empty
return generatedName || defaultName ;
} catch ( error ) {
console . error ( 'Error generating thread name:' , error ) ;
// Fall back to using a truncated version of the message
return message . trim ( ) . length > 50
? message . trim ( ) . substring ( 0 , 47 ) + "..."
: message . trim ( ) ;
}
2025-04-04 23:06:49 +08:00
} ;