From bc6620569f685a61562a9ebd5c547515a7a5cb41 Mon Sep 17 00:00:00 2001 From: marko-kraemer Date: Mon, 29 Sep 2025 14:12:45 +0200 Subject: [PATCH] rm general tab, integrations tab default --- backend/core/agent_runs.py | 27 ++----------------- .../composio_integration/toolkit_service.py | 4 +-- backend/core/run.py | 11 ++------ .../templates/services/template_service.py | 2 +- backend/core/triggers/execution_service.py | 6 ----- backend/run_agent_background.py | 4 --- .../20250707140000_add_agent_run_metadata.sql | 2 +- .../agents/agent-configuration-dialog.tsx | 10 +++---- .../thread/chat-input/chat-input.tsx | 4 +-- .../thread/chat-input/unified-config-menu.tsx | 2 +- 10 files changed, 16 insertions(+), 56 deletions(-) diff --git a/backend/core/agent_runs.py b/backend/core/agent_runs.py index a821f56f..764d6b92 100644 --- a/backend/core/agent_runs.py +++ b/backend/core/agent_runs.py @@ -29,21 +29,6 @@ from .core_utils import check_agent_run_limit, check_project_count_limit router = APIRouter() -async def check_billing_status(client, user_id: str) -> Tuple[bool, str, Optional[Dict]]: - """ - Compatibility wrapper for the new credit-based billing system. - Converts new credit system response to match old billing status format. - """ - can_run, message, reservation_id = await billing_integration.check_and_reserve_credits(user_id) - - # Create a subscription-like object for backward compatibility - subscription_info = { - "price_id": "credit_based", - "plan_name": "Credit System", - "minutes_limit": "credit based" - } - - return can_run, message, subscription_info @router.post("/thread/{thread_id}/agent/start") @@ -226,9 +211,7 @@ async def start_agent( "agent_id": agent_config.get('agent_id') if agent_config else None, "agent_version_id": agent_config.get('current_version_id') if agent_config else None, "metadata": { - "model_name": effective_model, - "requested_model": model_name, - "enable_context_manager": body.enable_context_manager + "model_name": effective_model } }).execute() @@ -250,8 +233,6 @@ async def start_agent( agent_run_id=agent_run_id, thread_id=thread_id, instance_id=utils.instance_id, project_id=project_id, model_name=model_name, # Already resolved above - enable_context_manager=body.enable_context_manager, - enable_prompt_caching=body.enable_prompt_caching, agent_config=agent_config, # Pass agent configuration request_id=request_id, ) @@ -961,9 +942,7 @@ async def initiate_agent_with_files( "agent_id": agent_config.get('agent_id') if agent_config else None, "agent_version_id": agent_config.get('current_version_id') if agent_config else None, "metadata": { - "model_name": effective_model, - "requested_model": model_name, - "enable_context_manager": enable_context_manager + "model_name": effective_model } }).execute() agent_run_id = agent_run.data[0]['id'] @@ -986,8 +965,6 @@ async def initiate_agent_with_files( agent_run_id=agent_run_id, thread_id=thread_id, instance_id=utils.instance_id, project_id=project_id, model_name=model_name, # Already resolved above - enable_context_manager=enable_context_manager, - enable_prompt_caching=enable_prompt_caching, agent_config=agent_config, # Pass agent configuration request_id=request_id, ) diff --git a/backend/core/composio_integration/toolkit_service.py b/backend/core/composio_integration/toolkit_service.py index f8295e41..bae9c284 100644 --- a/backend/core/composio_integration/toolkit_service.py +++ b/backend/core/composio_integration/toolkit_service.py @@ -247,7 +247,7 @@ class ToolkitService: async def get_toolkit_icon(self, toolkit_slug: str) -> Optional[str]: try: - logger.debug(f"Fetching toolkit icon for: {toolkit_slug}") + # logger.debug(f"Fetching toolkit icon for: {toolkit_slug}") toolkit_response = self.client.toolkits.retrieve(toolkit_slug) if hasattr(toolkit_response, 'model_dump'): @@ -265,7 +265,7 @@ class ToolkitService: else: logo = None - logger.debug(f"Successfully fetched icon for {toolkit_slug}: {logo}") + # logger.debug(f"Successfully fetched icon for {toolkit_slug}: {logo}") return logo except Exception as e: diff --git a/backend/core/run.py b/backend/core/run.py index 269550d8..2344f080 100644 --- a/backend/core/run.py +++ b/backend/core/run.py @@ -56,10 +56,8 @@ class AgentConfig: native_max_auto_continues: int = 25 max_iterations: int = 100 model_name: str = "openai/gpt-5-mini" - enable_context_manager: bool = True agent_config: Optional[dict] = None trace: Optional[StatefulTraceClient] = None - enable_prompt_caching: bool = True class ToolManager: @@ -119,7 +117,6 @@ class ToolManager: ('sb_design_tool', SandboxDesignerTool, {'project_id': self.project_id, 'thread_id': self.thread_id, 'thread_manager': self.thread_manager}), ('sb_presentation_outline_tool', SandboxPresentationOutlineTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}), ('sb_presentation_tool', SandboxPresentationTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}), - ('sb_sheets_tool', SandboxSheetsTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}), # ('sb_web_dev_tool', SandboxWebDevTool, {'project_id': self.project_id, 'thread_id': self.thread_id, 'thread_manager': self.thread_manager}), # DEACTIVATED ('sb_upload_file_tool', SandboxUploadFileTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}), @@ -772,8 +769,8 @@ class AgentRunner: ), native_max_auto_continues=self.config.native_max_auto_continues, generation=generation, - enable_prompt_caching=self.config.enable_prompt_caching, - enable_context_manager=self.config.enable_context_manager + enable_prompt_caching=True, + enable_context_manager=True ) last_tool_call = None @@ -896,8 +893,6 @@ async def run_agent( native_max_auto_continues: int = 25, max_iterations: int = 100, model_name: str = "openai/gpt-5-mini", - enable_context_manager: bool = False, - enable_prompt_caching: bool = False, agent_config: Optional[dict] = None, trace: Optional[StatefulTraceClient] = None ): @@ -919,8 +914,6 @@ async def run_agent( native_max_auto_continues=native_max_auto_continues, max_iterations=max_iterations, model_name=effective_model, - enable_context_manager=enable_context_manager, - enable_prompt_caching=enable_prompt_caching, agent_config=agent_config, trace=trace ) diff --git a/backend/core/templates/services/template_service.py b/backend/core/templates/services/template_service.py index f8521726..0a7cbab0 100644 --- a/backend/core/templates/services/template_service.py +++ b/backend/core/templates/services/template_service.py @@ -32,7 +32,7 @@ class TemplateService: filters: MarketplaceFilters ) -> PaginatedResponse[Dict[str, Any]]: try: - logger.debug(f"Fetching marketplace templates with filters: {filters.__dict__}") + # logger.debug(f"Fetching marketplace templates with filters: {filters.__dict__}") from ..template_service import get_template_service from ..utils import format_template_for_response diff --git a/backend/core/triggers/execution_service.py b/backend/core/triggers/execution_service.py index e5d17ca6..502d2b09 100644 --- a/backend/core/triggers/execution_service.py +++ b/backend/core/triggers/execution_service.py @@ -396,7 +396,6 @@ class AgentExecutor: "agent_version_id": agent_config.get('current_version_id'), "metadata": { "model_name": model_name, - "enable_context_manager": True, "trigger_execution": True, "trigger_variables": trigger_variables } @@ -412,8 +411,6 @@ class AgentExecutor: instance_id="trigger_executor", project_id=project_id, model_name=model_name, - enable_context_manager=True, - enable_prompt_caching=True, agent_config=agent_config, request_id=structlog.contextvars.get_contextvars().get('request_id'), ) @@ -700,7 +697,6 @@ class WorkflowExecutor: "agent_version_id": agent_config.get('current_version_id'), "metadata": { "model_name": model_name, - "enable_context_manager": True, "workflow_execution": True } }).execute() @@ -715,8 +711,6 @@ class WorkflowExecutor: instance_id=getattr(config, 'INSTANCE_ID', 'default'), project_id=project_id, model_name=model_name, - enable_context_manager=True, - enable_prompt_caching=True, agent_config=agent_config, request_id=None, ) diff --git a/backend/run_agent_background.py b/backend/run_agent_background.py index 78715228..0df469e0 100644 --- a/backend/run_agent_background.py +++ b/backend/run_agent_background.py @@ -58,8 +58,6 @@ async def run_agent_background( instance_id: str, project_id: str, model_name: str = "openai/gpt-5-mini", - enable_context_manager: bool = True, - enable_prompt_caching: bool = True, agent_config: Optional[dict] = None, request_id: Optional[str] = None ): @@ -165,8 +163,6 @@ async def run_agent_background( agent_gen = run_agent( thread_id=thread_id, project_id=project_id, model_name=effective_model, - enable_context_manager=enable_context_manager, - enable_prompt_caching=enable_prompt_caching, agent_config=agent_config, trace=trace, ) diff --git a/backend/supabase/migrations/20250707140000_add_agent_run_metadata.sql b/backend/supabase/migrations/20250707140000_add_agent_run_metadata.sql index 00bc06cb..ff61c89a 100644 --- a/backend/supabase/migrations/20250707140000_add_agent_run_metadata.sql +++ b/backend/supabase/migrations/20250707140000_add_agent_run_metadata.sql @@ -11,6 +11,6 @@ ALTER TABLE agent_runs ADD COLUMN IF NOT EXISTS metadata JSONB DEFAULT '{}'::jso CREATE INDEX IF NOT EXISTS idx_agent_runs_metadata ON agent_runs USING GIN (metadata); -- Add comment to document the metadata column -COMMENT ON COLUMN agent_runs.metadata IS 'Streaming and configuration parameters for this agent run (model_name, enable_thinking, reasoning_effort, enable_context_manager, etc.)'; +COMMENT ON COLUMN agent_runs.metadata IS 'Streaming and configuration parameters for this agent run (model_name, enable_thinking, reasoning_effort, etc.)'; COMMIT; \ No newline at end of file diff --git a/frontend/src/components/agents/agent-configuration-dialog.tsx b/frontend/src/components/agents/agent-configuration-dialog.tsx index 467688a7..aadcd60f 100644 --- a/frontend/src/components/agents/agent-configuration-dialog.tsx +++ b/frontend/src/components/agents/agent-configuration-dialog.tsx @@ -61,14 +61,14 @@ interface AgentConfigurationDialogProps { open: boolean; onOpenChange: (open: boolean) => void; agentId: string; - initialTab?: 'general' | 'instructions' | 'tools' | 'integrations' | 'knowledge' | 'playbooks' | 'triggers'; + initialTab?: 'instructions' | 'tools' | 'integrations' | 'knowledge' | 'playbooks' | 'triggers'; } export function AgentConfigurationDialog({ open, onOpenChange, agentId, - initialTab = 'general', + initialTab = 'instructions', }: AgentConfigurationDialogProps) { const router = useRouter(); const searchParams = useSearchParams(); @@ -299,7 +299,7 @@ export function AgentConfigurationDialog({ } const tabItems = [ - { id: 'general', label: 'General', icon: Settings, disabled: false }, + // { id: 'general', label: 'General', icon: Settings, disabled: false }, { id: 'instructions', label: 'Instructions', icon: Brain, disabled: isSunaAgent }, { id: 'tools', label: 'Tools', icon: Wrench, disabled: isSunaAgent }, { id: 'integrations', label: 'Integrations', icon: Server, disabled: false }, @@ -461,7 +461,7 @@ export function AgentConfigurationDialog({
- + {/*
@@ -474,7 +474,7 @@ export function AgentConfigurationDialog({
-
+
*/}
diff --git a/frontend/src/components/thread/chat-input/chat-input.tsx b/frontend/src/components/thread/chat-input/chat-input.tsx index bd6f8f1c..4a493032 100644 --- a/frontend/src/components/thread/chat-input/chat-input.tsx +++ b/frontend/src/components/thread/chat-input/chat-input.tsx @@ -150,7 +150,7 @@ export const ChatInput = memo(forwardRef( const [showSnackbar, setShowSnackbar] = useState(defaultShowSnackbar); const [userDismissedUsage, setUserDismissedUsage] = useState(false); const [billingModalOpen, setBillingModalOpen] = useState(false); - const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'general' | 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'general' }); + const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'instructions' }); const [mounted, setMounted] = useState(false); const { @@ -575,7 +575,7 @@ export const ChatInput = memo(forwardRef(