Merge pull request #1097 from kortix-ai/cleanup

Cleanup wip
This commit is contained in:
Marko Kraemer 2025-07-27 01:47:16 +02:00 committed by GitHub
commit 76968e5044
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 168 additions and 356 deletions

View File

@ -291,19 +291,17 @@ async def start_agent(
client = await db.client client = await db.client
await verify_thread_access(client, thread_id, user_id) await verify_thread_access(client, thread_id, user_id)
thread_result = await client.table('threads').select('project_id', 'account_id', 'agent_id', 'metadata').eq('thread_id', thread_id).execute() thread_result = await client.table('threads').select('project_id', 'account_id', 'metadata').eq('thread_id', thread_id).execute()
if not thread_result.data: if not thread_result.data:
raise HTTPException(status_code=404, detail="Thread not found") raise HTTPException(status_code=404, detail="Thread not found")
thread_data = thread_result.data[0] thread_data = thread_result.data[0]
project_id = thread_data.get('project_id') project_id = thread_data.get('project_id')
account_id = thread_data.get('account_id') account_id = thread_data.get('account_id')
thread_agent_id = thread_data.get('agent_id')
thread_metadata = thread_data.get('metadata', {}) thread_metadata = thread_data.get('metadata', {})
structlog.contextvars.bind_contextvars( structlog.contextvars.bind_contextvars(
project_id=project_id, project_id=project_id,
account_id=account_id, account_id=account_id,
thread_agent_id=thread_agent_id,
thread_metadata=thread_metadata, thread_metadata=thread_metadata,
) )
@ -316,11 +314,10 @@ async def start_agent(
# Load agent configuration with version support # Load agent configuration with version support
agent_config = None agent_config = None
effective_agent_id = body.agent_id or thread_agent_id # Use provided agent_id or the one stored in thread effective_agent_id = body.agent_id # Optional agent ID from request
logger.info(f"[AGENT LOAD] Agent loading flow:") logger.info(f"[AGENT LOAD] Agent loading flow:")
logger.info(f" - body.agent_id: {body.agent_id}") logger.info(f" - body.agent_id: {body.agent_id}")
logger.info(f" - thread_agent_id: {thread_agent_id}")
logger.info(f" - effective_agent_id: {effective_agent_id}") logger.info(f" - effective_agent_id: {effective_agent_id}")
if effective_agent_id: if effective_agent_id:
@ -359,7 +356,7 @@ async def start_agent(
logger.info(f"Using agent {agent_config['name']} ({effective_agent_id}) version {agent_config.get('version_name', 'v1')}") logger.info(f"Using agent {agent_config['name']} ({effective_agent_id}) version {agent_config.get('version_name', 'v1')}")
else: else:
logger.info(f"Using agent {agent_config['name']} ({effective_agent_id}) - no version data") logger.info(f"Using agent {agent_config['name']} ({effective_agent_id}) - no version data")
source = "request" if body.agent_id else "thread" source = "request" if body.agent_id else "fallback"
else: else:
logger.info(f"[AGENT LOAD] No effective_agent_id, will try default agent") logger.info(f"[AGENT LOAD] No effective_agent_id, will try default agent")
@ -399,8 +396,6 @@ async def start_agent(
logger.info(f"[AGENT LOAD] Final agent_config: {agent_config is not None}") logger.info(f"[AGENT LOAD] Final agent_config: {agent_config is not None}")
if agent_config: if agent_config:
logger.info(f"[AGENT LOAD] Agent config keys: {list(agent_config.keys())}") logger.info(f"[AGENT LOAD] Agent config keys: {list(agent_config.keys())}")
if body.agent_id and body.agent_id != thread_agent_id and agent_config:
logger.info(f"Using agent {agent_config['agent_id']} for this agent run (thread remains agent-agnostic)") logger.info(f"Using agent {agent_config['agent_id']} for this agent run (thread remains agent-agnostic)")
can_use, model_message, allowed_models = await can_use_model(client, account_id, model_name) can_use, model_message, allowed_models = await can_use_model(client, account_id, model_name)
@ -516,8 +511,8 @@ async def get_agent_run(agent_run_id: str, user_id: str = Depends(get_current_us
@router.get("/thread/{thread_id}/agent", response_model=ThreadAgentResponse) @router.get("/thread/{thread_id}/agent", response_model=ThreadAgentResponse)
async def get_thread_agent(thread_id: str, user_id: str = Depends(get_current_user_id_from_jwt)): async def get_thread_agent(thread_id: str, user_id: str = Depends(get_current_user_id_from_jwt)):
"""Get the agent details for a specific thread. Since threads are now agent-agnostic, """Get the agent details for a specific thread. Since threads are fully agent-agnostic,
this returns the most recently used agent or the default agent.""" this returns the most recently used agent from agent_runs only."""
structlog.contextvars.bind_contextvars( structlog.contextvars.bind_contextvars(
thread_id=thread_id, thread_id=thread_id,
) )
@ -525,21 +520,20 @@ async def get_thread_agent(thread_id: str, user_id: str = Depends(get_current_us
client = await db.client client = await db.client
try: try:
# Verify thread access and get thread data including agent_id # Verify thread access and get thread data
await verify_thread_access(client, thread_id, user_id) await verify_thread_access(client, thread_id, user_id)
thread_result = await client.table('threads').select('agent_id', 'account_id').eq('thread_id', thread_id).execute() thread_result = await client.table('threads').select('account_id').eq('thread_id', thread_id).execute()
if not thread_result.data: if not thread_result.data:
raise HTTPException(status_code=404, detail="Thread not found") raise HTTPException(status_code=404, detail="Thread not found")
thread_data = thread_result.data[0] thread_data = thread_result.data[0]
thread_agent_id = thread_data.get('agent_id')
account_id = thread_data.get('account_id') account_id = thread_data.get('account_id')
effective_agent_id = None effective_agent_id = None
agent_source = "none" agent_source = "none"
# First, try to get the most recently used agent from agent_runs # Get the most recently used agent from agent_runs
recent_agent_result = await client.table('agent_runs').select('agent_id', 'agent_version_id').eq('thread_id', thread_id).not_.is_('agent_id', 'null').order('created_at', desc=True).limit(1).execute() recent_agent_result = await client.table('agent_runs').select('agent_id', 'agent_version_id').eq('thread_id', thread_id).not_.is_('agent_id', 'null').order('created_at', desc=True).limit(1).execute()
if recent_agent_result.data: if recent_agent_result.data:
effective_agent_id = recent_agent_result.data[0]['agent_id'] effective_agent_id = recent_agent_result.data[0]['agent_id']
@ -547,26 +541,12 @@ async def get_thread_agent(thread_id: str, user_id: str = Depends(get_current_us
agent_source = "recent" agent_source = "recent"
logger.info(f"Found most recently used agent: {effective_agent_id} (version: {recent_version_id})") logger.info(f"Found most recently used agent: {effective_agent_id} (version: {recent_version_id})")
# If no recent agent, fall back to thread default agent # If no agent found in agent_runs
elif thread_agent_id:
effective_agent_id = thread_agent_id
agent_source = "thread"
logger.info(f"Using thread default agent: {effective_agent_id}")
# If no thread agent, try to get the default agent for the account
else:
default_agent_result = await client.table('agents').select('agent_id').eq('account_id', account_id).eq('is_default', True).execute()
if default_agent_result.data:
effective_agent_id = default_agent_result.data[0]['agent_id']
agent_source = "default"
logger.info(f"Using account default agent: {effective_agent_id}")
# If still no agent found
if not effective_agent_id: if not effective_agent_id:
return { return {
"agent": None, "agent": None,
"source": "none", "source": "none",
"message": "No agent configured for this thread. Threads are agent-agnostic - you can select any agent." "message": "No agent has been used in this thread yet. Threads are agent-agnostic - use /agent/start to select an agent."
} }
# Fetch the agent details # Fetch the agent details

View File

@ -126,60 +126,6 @@ async def get_thread_knowledge_base(
logger.error(f"Error getting knowledge base for thread {thread_id}: {str(e)}") logger.error(f"Error getting knowledge base for thread {thread_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to retrieve knowledge base") raise HTTPException(status_code=500, detail="Failed to retrieve knowledge base")
@router.post("/threads/{thread_id}", response_model=KnowledgeBaseEntryResponse)
async def create_knowledge_base_entry(
thread_id: str,
entry_data: CreateKnowledgeBaseEntryRequest,
user_id: str = Depends(get_current_user_id_from_jwt)
):
if not await is_enabled("knowledge_base"):
raise HTTPException(
status_code=403,
detail="This feature is not available at the moment."
)
"""Create a new knowledge base entry for a thread"""
try:
client = await db.client
thread_result = await client.table('threads').select('account_id').eq('thread_id', thread_id).execute()
if not thread_result.data:
raise HTTPException(status_code=404, detail="Thread not found")
account_id = thread_result.data[0]['account_id']
insert_data = {
'thread_id': thread_id,
'account_id': account_id,
'name': entry_data.name,
'description': entry_data.description,
'content': entry_data.content,
'usage_context': entry_data.usage_context
}
result = await client.table('knowledge_base_entries').insert(insert_data).execute()
if not result.data:
raise HTTPException(status_code=500, detail="Failed to create knowledge base entry")
created_entry = result.data[0]
return KnowledgeBaseEntryResponse(
entry_id=created_entry['entry_id'],
name=created_entry['name'],
description=created_entry['description'],
content=created_entry['content'],
usage_context=created_entry['usage_context'],
is_active=created_entry['is_active'],
content_tokens=created_entry.get('content_tokens'),
created_at=created_entry['created_at'],
updated_at=created_entry['updated_at']
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error creating knowledge base entry for thread {thread_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to create knowledge base entry")
@router.get("/agents/{agent_id}", response_model=KnowledgeBaseListResponse) @router.get("/agents/{agent_id}", response_model=KnowledgeBaseListResponse)
async def get_agent_knowledge_base( async def get_agent_knowledge_base(
@ -495,155 +441,6 @@ async def get_agent_knowledge_base_context(
logger.error(f"Error getting knowledge base context for agent {agent_id}: {str(e)}") logger.error(f"Error getting knowledge base context for agent {agent_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to retrieve agent knowledge base context") raise HTTPException(status_code=500, detail="Failed to retrieve agent knowledge base context")
@router.put("/{entry_id}", response_model=KnowledgeBaseEntryResponse)
async def update_knowledge_base_entry(
entry_id: str,
entry_data: UpdateKnowledgeBaseEntryRequest,
user_id: str = Depends(get_current_user_id_from_jwt)
):
if not await is_enabled("knowledge_base"):
raise HTTPException(
status_code=403,
detail="This feature is not available at the moment."
)
"""Update a knowledge base entry (works for both thread and agent entries)"""
try:
client = await db.client
entry_result = await client.table('knowledge_base_entries').select('*').eq('entry_id', entry_id).execute()
table_name = 'knowledge_base_entries'
if not entry_result.data:
entry_result = await client.table('agent_knowledge_base_entries').select('*').eq('entry_id', entry_id).execute()
table_name = 'agent_knowledge_base_entries'
if not entry_result.data:
raise HTTPException(status_code=404, detail="Knowledge base entry not found")
update_data = {}
if entry_data.name is not None:
update_data['name'] = entry_data.name
if entry_data.description is not None:
update_data['description'] = entry_data.description
if entry_data.content is not None:
update_data['content'] = entry_data.content
if entry_data.usage_context is not None:
update_data['usage_context'] = entry_data.usage_context
if entry_data.is_active is not None:
update_data['is_active'] = entry_data.is_active
if not update_data:
raise HTTPException(status_code=400, detail="No fields to update")
result = await client.table(table_name).update(update_data).eq('entry_id', entry_id).execute()
if not result.data:
raise HTTPException(status_code=500, detail="Failed to update knowledge base entry")
updated_entry = result.data[0]
return KnowledgeBaseEntryResponse(
entry_id=updated_entry['entry_id'],
name=updated_entry['name'],
description=updated_entry['description'],
content=updated_entry['content'],
usage_context=updated_entry['usage_context'],
is_active=updated_entry['is_active'],
content_tokens=updated_entry.get('content_tokens'),
created_at=updated_entry['created_at'],
updated_at=updated_entry['updated_at'],
source_type=updated_entry.get('source_type'),
source_metadata=updated_entry.get('source_metadata'),
file_size=updated_entry.get('file_size'),
file_mime_type=updated_entry.get('file_mime_type')
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error updating knowledge base entry {entry_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to update knowledge base entry")
@router.delete("/{entry_id}")
async def delete_knowledge_base_entry(
entry_id: str,
user_id: str = Depends(get_current_user_id_from_jwt)
):
if not await is_enabled("knowledge_base"):
raise HTTPException(
status_code=403,
detail="This feature is not available at the moment."
)
"""Delete a knowledge base entry (works for both thread and agent entries)"""
try:
client = await db.client
entry_result = await client.table('knowledge_base_entries').select('entry_id').eq('entry_id', entry_id).execute()
table_name = 'knowledge_base_entries'
if not entry_result.data:
entry_result = await client.table('agent_knowledge_base_entries').select('entry_id').eq('entry_id', entry_id).execute()
table_name = 'agent_knowledge_base_entries'
if not entry_result.data:
raise HTTPException(status_code=404, detail="Knowledge base entry not found")
result = await client.table(table_name).delete().eq('entry_id', entry_id).execute()
return {"message": "Knowledge base entry deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.error(f"Error deleting knowledge base entry {entry_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to delete knowledge base entry")
@router.get("/{entry_id}", response_model=KnowledgeBaseEntryResponse)
async def get_knowledge_base_entry(
entry_id: str,
user_id: str = Depends(get_current_user_id_from_jwt)
):
if not await is_enabled("knowledge_base"):
raise HTTPException(
status_code=403,
detail="This feature is not available at the moment."
)
"""Get a specific knowledge base entry (works for both thread and agent entries)"""
try:
client = await db.client
result = await client.table('knowledge_base_entries').select('*').eq('entry_id', entry_id).execute()
if not result.data:
result = await client.table('agent_knowledge_base_entries').select('*').eq('entry_id', entry_id).execute()
if not result.data:
raise HTTPException(status_code=404, detail="Knowledge base entry not found")
entry = result.data[0]
return KnowledgeBaseEntryResponse(
entry_id=entry['entry_id'],
name=entry['name'],
description=entry['description'],
content=entry['content'],
usage_context=entry['usage_context'],
is_active=entry['is_active'],
content_tokens=entry.get('content_tokens'),
created_at=entry['created_at'],
updated_at=entry['updated_at'],
source_type=entry.get('source_type'),
source_metadata=entry.get('source_metadata'),
file_size=entry.get('file_size'),
file_mime_type=entry.get('file_mime_type')
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error getting knowledge base entry {entry_id}: {str(e)}")
raise HTTPException(status_code=500, detail="Failed to retrieve knowledge base entry")
@router.get("/threads/{thread_id}/context") @router.get("/threads/{thread_id}/context")
async def get_knowledge_base_context( async def get_knowledge_base_context(

View File

@ -0,0 +1,15 @@
-- Migration: Remove recordings, devices tables and responses field from agent_runs
-- This migration cleans up unused tables and fields
BEGIN;
-- Drop recordings table first (has foreign key to devices)
DROP TABLE IF EXISTS public.recordings CASCADE;
-- Drop devices table
DROP TABLE IF EXISTS public.devices CASCADE;
-- Remove responses column from agent_runs table
ALTER TABLE agent_runs DROP COLUMN IF EXISTS responses;
COMMIT;

View File

@ -0,0 +1,30 @@
BEGIN;
-- Remove old workflow execution and step tables
-- These are no longer needed since steps are now stored as JSON in agent_workflows.steps
-- and executions can be tracked differently if needed
-- Drop workflow step executions first (has foreign keys to other tables)
DROP TABLE IF EXISTS workflow_step_executions CASCADE;
-- Drop workflow executions
DROP TABLE IF EXISTS workflow_executions CASCADE;
-- Drop workflow steps
DROP TABLE IF EXISTS workflow_steps CASCADE;
-- Drop the related enum types that are no longer needed
DROP TYPE IF EXISTS workflow_step_type CASCADE;
DROP TYPE IF EXISTS workflow_execution_status CASCADE;
-- Clean up any related indexes that might still exist
DROP INDEX IF EXISTS idx_workflow_steps_workflow_id CASCADE;
DROP INDEX IF EXISTS idx_workflow_steps_order CASCADE;
DROP INDEX IF EXISTS idx_workflow_executions_workflow_id CASCADE;
DROP INDEX IF EXISTS idx_workflow_executions_agent_id CASCADE;
DROP INDEX IF EXISTS idx_workflow_executions_status CASCADE;
DROP INDEX IF EXISTS idx_workflow_executions_started_at CASCADE;
DROP INDEX IF EXISTS idx_workflow_step_executions_execution_id CASCADE;
DROP INDEX IF EXISTS idx_workflow_step_executions_step_id CASCADE;
COMMIT;

View File

@ -0,0 +1,6 @@
-- Remove unused tables from schema cleanup
-- Drop dependent tables first, then main tables with CASCADE to handle any remaining dependencies
DROP TABLE IF EXISTS knowledge_base_usage_log CASCADE;
DROP TABLE IF EXISTS knowledge_base_entries CASCADE;
DROP TABLE IF EXISTS trigger_events CASCADE;
DROP TABLE IF EXISTS custom_trigger_providers CASCADE;

View File

@ -0,0 +1,19 @@
-- Migration: Remove agent_id and agent_version_id from threads table
-- This makes threads truly agent-agnostic - agent selection is handled only through agent/initiate and agent/start endpoints
BEGIN;
-- Drop indexes first
DROP INDEX IF EXISTS idx_threads_agent_id;
DROP INDEX IF EXISTS idx_threads_agent_version;
-- Remove agent_id column from threads table
ALTER TABLE threads DROP COLUMN IF EXISTS agent_id;
-- Remove agent_version_id column from threads table
ALTER TABLE threads DROP COLUMN IF EXISTS agent_version_id;
-- Update comment to reflect that threads are now completely agent-agnostic
COMMENT ON TABLE threads IS 'Conversation threads - completely agent-agnostic. Agent selection handled via /agent/initiate and /agent/start endpoints.';
COMMIT;

View File

@ -0,0 +1,31 @@
-- Migration: Move agent_id and agent_version_id from dedicated columns to metadata
-- This improves storage efficiency by only storing agent info for assistant messages where it's relevant
BEGIN;
-- Step 1: Update existing messages to move agent info to metadata
-- Only update messages that have agent_id or agent_version_id set
UPDATE messages
SET metadata = jsonb_set(
jsonb_set(
COALESCE(metadata, '{}'::jsonb),
'{agent_id}',
to_jsonb(agent_id)
),
'{agent_version_id}',
to_jsonb(agent_version_id)
)
WHERE agent_id IS NOT NULL OR agent_version_id IS NOT NULL;
-- Step 2: Drop indexes on the columns we're about to remove
DROP INDEX IF EXISTS idx_messages_agent_id;
DROP INDEX IF EXISTS idx_messages_agent_version_id;
-- Step 3: Drop the dedicated agent columns
ALTER TABLE messages DROP COLUMN IF EXISTS agent_id;
ALTER TABLE messages DROP COLUMN IF EXISTS agent_version_id;
-- Step 4: Add comment explaining the new structure
COMMENT ON COLUMN messages.metadata IS 'JSONB metadata including agent_id and agent_version_id for assistant messages, and other message-specific data';
COMMIT;

View File

@ -0,0 +1,36 @@
-- Reverse Migration: Move agent_id and agent_version_id back from metadata to dedicated columns
-- This reverses the changes made in 20250726223759_move_agent_fields_to_metadata.sql
BEGIN;
-- Step 1: Add back the dedicated agent columns with proper foreign key constraints
ALTER TABLE messages ADD COLUMN IF NOT EXISTS agent_id UUID REFERENCES agents(agent_id) ON DELETE SET NULL;
ALTER TABLE messages ADD COLUMN IF NOT EXISTS agent_version_id UUID REFERENCES agent_versions(version_id) ON DELETE SET NULL;
-- Step 2: Extract agent info from metadata and populate the dedicated columns
-- Only update messages that have agent info in metadata
UPDATE messages
SET
agent_id = CASE
WHEN metadata ? 'agent_id' THEN (metadata->>'agent_id')::UUID
ELSE NULL
END,
agent_version_id = CASE
WHEN metadata ? 'agent_version_id' THEN (metadata->>'agent_version_id')::UUID
ELSE NULL
END
WHERE metadata ? 'agent_id' OR metadata ? 'agent_version_id';
-- Step 3: Remove agent fields from metadata
UPDATE messages
SET metadata = metadata - 'agent_id' - 'agent_version_id'
WHERE metadata ? 'agent_id' OR metadata ? 'agent_version_id';
-- Step 4: Recreate the indexes on the agent columns
CREATE INDEX IF NOT EXISTS idx_messages_agent_id ON messages(agent_id);
CREATE INDEX IF NOT EXISTS idx_messages_agent_version_id ON messages(agent_version_id);
-- Step 5: Update the comment to reflect the original structure
COMMENT ON COLUMN messages.metadata IS 'JSONB metadata for message-specific data (agent info stored in dedicated columns)';
COMMIT;

View File

@ -68,20 +68,7 @@ class WorkflowResponse(BaseModel):
created_at: str created_at: str
updated_at: str updated_at: str
class WorkflowExecutionResponse(BaseModel):
id: str
workflow_id: str
agent_id: str
thread_id: Optional[str]
status: str
started_at: str
completed_at: Optional[str]
duration_seconds: Optional[float]
triggered_by: str
input_data: Optional[Dict[str, Any]]
output_data: Optional[Dict[str, Any]]
error_message: Optional[str]
created_at: str
class WorkflowExecuteRequest(BaseModel): class WorkflowExecuteRequest(BaseModel):
input_data: Optional[Dict[str, Any]] = None input_data: Optional[Dict[str, Any]] = None
@ -159,20 +146,6 @@ async def get_agent_workflows(
steps = [] steps = []
if workflow_data.get('steps'): if workflow_data.get('steps'):
steps = convert_json_to_steps(workflow_data['steps']) steps = convert_json_to_steps(workflow_data['steps'])
else:
workflow_steps_result = await client.table('workflow_steps').select('*').eq('workflow_id', workflow_data['id']).order('step_order').execute()
for step_data in workflow_steps_result.data:
steps.append(WorkflowStepResponse(
id=step_data['id'],
name=step_data['name'],
description=step_data.get('description'),
type=step_data['type'],
config=step_data.get('config', {}),
conditions=step_data.get('conditions'),
order=step_data['step_order'],
created_at=step_data['created_at'],
updated_at=step_data['updated_at']
))
workflows.append(WorkflowResponse( workflows.append(WorkflowResponse(
id=workflow_data['id'], id=workflow_data['id'],
@ -264,8 +237,6 @@ async def update_agent_workflow(
if workflow_data.steps is not None: if workflow_data.steps is not None:
steps_json = convert_steps_to_json(workflow_data.steps) steps_json = convert_steps_to_json(workflow_data.steps)
update_data['steps'] = steps_json update_data['steps'] = steps_json
await client.table('workflow_steps').delete().eq('workflow_id', workflow_id).execute()
if update_data: if update_data:
await client.table('agent_workflows').update(update_data).eq('id', workflow_id).execute() await client.table('agent_workflows').update(update_data).eq('id', workflow_id).execute()
@ -276,20 +247,6 @@ async def update_agent_workflow(
steps = [] steps = []
if workflow_data.get('steps'): if workflow_data.get('steps'):
steps = convert_json_to_steps(workflow_data['steps']) steps = convert_json_to_steps(workflow_data['steps'])
else:
workflow_steps_result = await client.table('workflow_steps').select('*').eq('workflow_id', workflow_id).order('step_order').execute()
for step_data in workflow_steps_result.data:
steps.append(WorkflowStepResponse(
id=step_data['id'],
name=step_data['name'],
description=step_data.get('description'),
type=step_data['type'],
config=step_data.get('config', {}),
conditions=step_data.get('conditions'),
order=step_data['step_order'],
created_at=step_data['created_at'],
updated_at=step_data['updated_at']
))
return WorkflowResponse( return WorkflowResponse(
id=workflow_data['id'], id=workflow_data['id'],
@ -409,37 +366,7 @@ async def execute_agent_workflow(
} }
) )
@router.get("/agents/{agent_id}/workflows/{workflow_id}/executions")
async def get_workflow_executions(
agent_id: str,
workflow_id: str,
user_id: str = Depends(get_current_user_id_from_jwt),
limit: int = Query(20, ge=1, le=100)
):
db = await get_db_connection()
client = await db.client
executions_result = await client.table('workflow_executions').select('*').eq('workflow_id', workflow_id).order('created_at', desc=True).limit(limit).execute()
executions = []
for execution_data in executions_result.data:
executions.append(WorkflowExecutionResponse(
id=execution_data['id'],
workflow_id=execution_data['workflow_id'],
agent_id=execution_data['agent_id'],
thread_id=execution_data.get('thread_id'),
status=execution_data['status'],
started_at=execution_data['started_at'],
completed_at=execution_data.get('completed_at'),
duration_seconds=execution_data.get('duration_seconds'),
triggered_by=execution_data['triggered_by'],
input_data=execution_data.get('input_data'),
output_data=execution_data.get('output_data'),
error_message=execution_data.get('error_message'),
created_at=execution_data['created_at']
))
return executions
@router.post("/agents/{agent_id}/workflows/{workflow_id}/webhook") @router.post("/agents/{agent_id}/workflows/{workflow_id}/webhook")
async def trigger_workflow_webhook( async def trigger_workflow_webhook(

View File

@ -400,9 +400,6 @@ class WorkflowExecutor:
await self._validate_workflow_execution(account_id) await self._validate_workflow_execution(account_id)
await self._session_manager.start_sandbox(project_id) await self._session_manager.start_sandbox(project_id)
execution_id = await self._create_workflow_execution_record(
workflow_id, agent_id, thread_id, workflow_input, trigger_result
)
await self._create_workflow_message(thread_id, workflow_config, workflow_input) await self._create_workflow_message(thread_id, workflow_config, workflow_input)
agent_run_id = await self._start_workflow_agent_execution( agent_run_id = await self._start_workflow_agent_execution(
@ -411,7 +408,6 @@ class WorkflowExecutor:
return { return {
"success": True, "success": True,
"execution_id": execution_id,
"thread_id": thread_id, "thread_id": thread_id,
"agent_run_id": agent_run_id, "agent_run_id": agent_run_id,
"message": "Workflow execution started successfully" "message": "Workflow execution started successfully"
@ -439,22 +435,11 @@ class WorkflowExecutor:
if workflow_config.get('steps'): if workflow_config.get('steps'):
steps_json = workflow_config['steps'] steps_json = workflow_config['steps']
else: else:
steps_json = await self._get_legacy_workflow_steps(workflow_id) steps_json = []
return workflow_config, steps_json return workflow_config, steps_json
async def _get_legacy_workflow_steps(self, workflow_id: str) -> list:
client = await self._db.client
workflow_steps_result = await client.table('workflow_steps').select('*').eq('workflow_id', workflow_id).order('step_order').execute()
return [{
'name': step_data['name'],
'description': step_data.get('description'),
'type': step_data['type'],
'config': step_data.get('config', {}),
'conditions': step_data.get('conditions'),
'order': step_data['step_order']
} for step_data in workflow_steps_result.data]
async def _get_agent_data(self, agent_id: str) -> Tuple[Dict[str, Any], str]: async def _get_agent_data(self, agent_id: str) -> Tuple[Dict[str, Any], str]:
from agent.versioning.domain.entities import AgentId from agent.versioning.domain.entities import AgentId
@ -547,26 +532,7 @@ class WorkflowExecutor:
if not can_run: if not can_run:
raise Exception(f"Billing check failed: {billing_message}") raise Exception(f"Billing check failed: {billing_message}")
async def _create_workflow_execution_record(
self,
workflow_id: str,
agent_id: str,
thread_id: str,
workflow_input: Dict[str, Any],
trigger_result: TriggerResult
) -> str:
client = await self._db.client
execution_result = await client.table('workflow_executions').insert({
'workflow_id': workflow_id,
'agent_id': agent_id,
'thread_id': thread_id,
'triggered_by': trigger_result.execution_variables.variables.get('triggered_by', 'trigger'),
'status': 'running',
'input_data': workflow_input
}).execute()
return execution_result.data[0]['id']
async def _create_workflow_message( async def _create_workflow_message(
self, self,

View File

@ -1,9 +0,0 @@
NEXT_PUBLIC_ENV_MODE="LOCAL" #production, or staging
NEXT_PUBLIC_SUPABASE_URL=""
NEXT_PUBLIC_SUPABASE_ANON_KEY=""
NEXT_PUBLIC_BACKEND_URL=""
NEXT_PUBLIC_URL=""
NEXT_PUBLIC_GOOGLE_CLIENT_ID=""
OPENAI_API_KEY=""
EDGE_CONFIG="https://edge-config.vercel.com/REDACTED?token=REDACTED"

View File

@ -627,7 +627,7 @@ export const startAgent = async (
enable_thinking?: boolean; enable_thinking?: boolean;
reasoning_effort?: string; reasoning_effort?: string;
stream?: boolean; stream?: boolean;
agent_id?: string; agent_id?: string; // Optional again
}, },
): Promise<{ agent_run_id: string }> => { ): Promise<{ agent_run_id: string }> => {
try { try {
@ -656,7 +656,6 @@ export const startAgent = async (
enable_thinking: false, enable_thinking: false,
reasoning_effort: 'low', reasoning_effort: 'low',
stream: true, stream: true,
agent_id: undefined,
}; };
const finalOptions = { ...defaultOptions, ...options }; const finalOptions = { ...defaultOptions, ...options };

15
sdk_example.py Normal file
View File

@ -0,0 +1,15 @@
from kortix import Agent, ModelSettings, function_tool
agent = Agent(
name="Haiku agent",
instructions="Always respond in haiku form",
model="o3-mini",
tools=[get_weather],
)
assistant = client.beta.assistants.create(
name="Math Tutor",
instructions="You are a personal math tutor. Answer questions briefly, in a sentence or less.",
model="gpt-4o",
)