rm general tab, integrations tab default

This commit is contained in:
marko-kraemer 2025-09-29 14:12:45 +02:00
parent 9f7327159a
commit bc6620569f
10 changed files with 16 additions and 56 deletions

View File

@ -29,21 +29,6 @@ from .core_utils import check_agent_run_limit, check_project_count_limit
router = APIRouter()
async def check_billing_status(client, user_id: str) -> Tuple[bool, str, Optional[Dict]]:
"""
Compatibility wrapper for the new credit-based billing system.
Converts new credit system response to match old billing status format.
"""
can_run, message, reservation_id = await billing_integration.check_and_reserve_credits(user_id)
# Create a subscription-like object for backward compatibility
subscription_info = {
"price_id": "credit_based",
"plan_name": "Credit System",
"minutes_limit": "credit based"
}
return can_run, message, subscription_info
@router.post("/thread/{thread_id}/agent/start")
@ -226,9 +211,7 @@ async def start_agent(
"agent_id": agent_config.get('agent_id') if agent_config else None,
"agent_version_id": agent_config.get('current_version_id') if agent_config else None,
"metadata": {
"model_name": effective_model,
"requested_model": model_name,
"enable_context_manager": body.enable_context_manager
"model_name": effective_model
}
}).execute()
@ -250,8 +233,6 @@ async def start_agent(
agent_run_id=agent_run_id, thread_id=thread_id, instance_id=utils.instance_id,
project_id=project_id,
model_name=model_name, # Already resolved above
enable_context_manager=body.enable_context_manager,
enable_prompt_caching=body.enable_prompt_caching,
agent_config=agent_config, # Pass agent configuration
request_id=request_id,
)
@ -961,9 +942,7 @@ async def initiate_agent_with_files(
"agent_id": agent_config.get('agent_id') if agent_config else None,
"agent_version_id": agent_config.get('current_version_id') if agent_config else None,
"metadata": {
"model_name": effective_model,
"requested_model": model_name,
"enable_context_manager": enable_context_manager
"model_name": effective_model
}
}).execute()
agent_run_id = agent_run.data[0]['id']
@ -986,8 +965,6 @@ async def initiate_agent_with_files(
agent_run_id=agent_run_id, thread_id=thread_id, instance_id=utils.instance_id,
project_id=project_id,
model_name=model_name, # Already resolved above
enable_context_manager=enable_context_manager,
enable_prompt_caching=enable_prompt_caching,
agent_config=agent_config, # Pass agent configuration
request_id=request_id,
)

View File

@ -247,7 +247,7 @@ class ToolkitService:
async def get_toolkit_icon(self, toolkit_slug: str) -> Optional[str]:
try:
logger.debug(f"Fetching toolkit icon for: {toolkit_slug}")
# logger.debug(f"Fetching toolkit icon for: {toolkit_slug}")
toolkit_response = self.client.toolkits.retrieve(toolkit_slug)
if hasattr(toolkit_response, 'model_dump'):
@ -265,7 +265,7 @@ class ToolkitService:
else:
logo = None
logger.debug(f"Successfully fetched icon for {toolkit_slug}: {logo}")
# logger.debug(f"Successfully fetched icon for {toolkit_slug}: {logo}")
return logo
except Exception as e:

View File

@ -56,10 +56,8 @@ class AgentConfig:
native_max_auto_continues: int = 25
max_iterations: int = 100
model_name: str = "openai/gpt-5-mini"
enable_context_manager: bool = True
agent_config: Optional[dict] = None
trace: Optional[StatefulTraceClient] = None
enable_prompt_caching: bool = True
class ToolManager:
@ -119,7 +117,6 @@ class ToolManager:
('sb_design_tool', SandboxDesignerTool, {'project_id': self.project_id, 'thread_id': self.thread_id, 'thread_manager': self.thread_manager}),
('sb_presentation_outline_tool', SandboxPresentationOutlineTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}),
('sb_presentation_tool', SandboxPresentationTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}),
('sb_sheets_tool', SandboxSheetsTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}),
# ('sb_web_dev_tool', SandboxWebDevTool, {'project_id': self.project_id, 'thread_id': self.thread_id, 'thread_manager': self.thread_manager}), # DEACTIVATED
('sb_upload_file_tool', SandboxUploadFileTool, {'project_id': self.project_id, 'thread_manager': self.thread_manager}),
@ -772,8 +769,8 @@ class AgentRunner:
),
native_max_auto_continues=self.config.native_max_auto_continues,
generation=generation,
enable_prompt_caching=self.config.enable_prompt_caching,
enable_context_manager=self.config.enable_context_manager
enable_prompt_caching=True,
enable_context_manager=True
)
last_tool_call = None
@ -896,8 +893,6 @@ async def run_agent(
native_max_auto_continues: int = 25,
max_iterations: int = 100,
model_name: str = "openai/gpt-5-mini",
enable_context_manager: bool = False,
enable_prompt_caching: bool = False,
agent_config: Optional[dict] = None,
trace: Optional[StatefulTraceClient] = None
):
@ -919,8 +914,6 @@ async def run_agent(
native_max_auto_continues=native_max_auto_continues,
max_iterations=max_iterations,
model_name=effective_model,
enable_context_manager=enable_context_manager,
enable_prompt_caching=enable_prompt_caching,
agent_config=agent_config,
trace=trace
)

View File

@ -32,7 +32,7 @@ class TemplateService:
filters: MarketplaceFilters
) -> PaginatedResponse[Dict[str, Any]]:
try:
logger.debug(f"Fetching marketplace templates with filters: {filters.__dict__}")
# logger.debug(f"Fetching marketplace templates with filters: {filters.__dict__}")
from ..template_service import get_template_service
from ..utils import format_template_for_response

View File

@ -396,7 +396,6 @@ class AgentExecutor:
"agent_version_id": agent_config.get('current_version_id'),
"metadata": {
"model_name": model_name,
"enable_context_manager": True,
"trigger_execution": True,
"trigger_variables": trigger_variables
}
@ -412,8 +411,6 @@ class AgentExecutor:
instance_id="trigger_executor",
project_id=project_id,
model_name=model_name,
enable_context_manager=True,
enable_prompt_caching=True,
agent_config=agent_config,
request_id=structlog.contextvars.get_contextvars().get('request_id'),
)
@ -700,7 +697,6 @@ class WorkflowExecutor:
"agent_version_id": agent_config.get('current_version_id'),
"metadata": {
"model_name": model_name,
"enable_context_manager": True,
"workflow_execution": True
}
}).execute()
@ -715,8 +711,6 @@ class WorkflowExecutor:
instance_id=getattr(config, 'INSTANCE_ID', 'default'),
project_id=project_id,
model_name=model_name,
enable_context_manager=True,
enable_prompt_caching=True,
agent_config=agent_config,
request_id=None,
)

View File

@ -58,8 +58,6 @@ async def run_agent_background(
instance_id: str,
project_id: str,
model_name: str = "openai/gpt-5-mini",
enable_context_manager: bool = True,
enable_prompt_caching: bool = True,
agent_config: Optional[dict] = None,
request_id: Optional[str] = None
):
@ -165,8 +163,6 @@ async def run_agent_background(
agent_gen = run_agent(
thread_id=thread_id, project_id=project_id,
model_name=effective_model,
enable_context_manager=enable_context_manager,
enable_prompt_caching=enable_prompt_caching,
agent_config=agent_config,
trace=trace,
)

View File

@ -11,6 +11,6 @@ ALTER TABLE agent_runs ADD COLUMN IF NOT EXISTS metadata JSONB DEFAULT '{}'::jso
CREATE INDEX IF NOT EXISTS idx_agent_runs_metadata ON agent_runs USING GIN (metadata);
-- Add comment to document the metadata column
COMMENT ON COLUMN agent_runs.metadata IS 'Streaming and configuration parameters for this agent run (model_name, enable_thinking, reasoning_effort, enable_context_manager, etc.)';
COMMENT ON COLUMN agent_runs.metadata IS 'Streaming and configuration parameters for this agent run (model_name, enable_thinking, reasoning_effort, etc.)';
COMMIT;

View File

@ -61,14 +61,14 @@ interface AgentConfigurationDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
agentId: string;
initialTab?: 'general' | 'instructions' | 'tools' | 'integrations' | 'knowledge' | 'playbooks' | 'triggers';
initialTab?: 'instructions' | 'tools' | 'integrations' | 'knowledge' | 'playbooks' | 'triggers';
}
export function AgentConfigurationDialog({
open,
onOpenChange,
agentId,
initialTab = 'general',
initialTab = 'instructions',
}: AgentConfigurationDialogProps) {
const router = useRouter();
const searchParams = useSearchParams();
@ -299,7 +299,7 @@ export function AgentConfigurationDialog({
}
const tabItems = [
{ id: 'general', label: 'General', icon: Settings, disabled: false },
// { id: 'general', label: 'General', icon: Settings, disabled: false },
{ id: 'instructions', label: 'Instructions', icon: Brain, disabled: isSunaAgent },
{ id: 'tools', label: 'Tools', icon: Wrench, disabled: isSunaAgent },
{ id: 'integrations', label: 'Integrations', icon: Server, disabled: false },
@ -461,7 +461,7 @@ export function AgentConfigurationDialog({
</TabsList>
</div>
<div className="flex-1 overflow-auto">
<TabsContent value="general" className="p-6 mt-0 flex flex-col h-full">
{/* <TabsContent value="general" className="p-6 mt-0 flex flex-col h-full">
<div className="flex flex-col flex-1 gap-6">
<div className="flex-shrink-0">
<Label className="text-base font-semibold mb-3 block">Model</Label>
@ -474,7 +474,7 @@ export function AgentConfigurationDialog({
</div>
</div>
</TabsContent>
</TabsContent> */}
<TabsContent value="instructions" className="p-6 mt-0 flex flex-col h-full">
<div className="flex flex-col flex-1 min-h-0">

View File

@ -150,7 +150,7 @@ export const ChatInput = memo(forwardRef<ChatInputHandles, ChatInputProps>(
const [showSnackbar, setShowSnackbar] = useState(defaultShowSnackbar);
const [userDismissedUsage, setUserDismissedUsage] = useState(false);
const [billingModalOpen, setBillingModalOpen] = useState(false);
const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'general' | 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'general' });
const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'instructions' });
const [mounted, setMounted] = useState(false);
const {
@ -575,7 +575,7 @@ export const ChatInput = memo(forwardRef<ChatInputHandles, ChatInputProps>(
<div className="bg-gradient-to-b from-transparent via-transparent to-muted/30 pt-8 pb-2 px-4 rounded-b-3xl border border-t-0 border-border/50 transition-all duration-300 ease-out">
<div className="flex items-center justify-between gap-1 overflow-x-auto scrollbar-none relative">
<button
onClick={() => setRegistryDialogOpen(true)}
onClick={() => setAgentConfigDialog({ open: true, tab: 'integrations' })}
className="flex items-center gap-1.5 text-muted-foreground hover:text-foreground transition-all duration-200 px-2.5 py-1.5 rounded-lg hover:bg-muted/50 border border-transparent hover:border-border/30 flex-shrink-0 cursor-pointer relative pointer-events-auto"
>
<div className="flex items-center -space-x-0.5">

View File

@ -71,7 +71,7 @@ const LoggedInMenu: React.FC<UnifiedConfigMenuProps> = memo(function LoggedInMen
const [showNewAgentDialog, setShowNewAgentDialog] = useState(false);
const searchInputRef = useRef<HTMLInputElement>(null);
const [execDialog, setExecDialog] = useState<{ open: boolean; playbook: any | null; agentId: string | null }>({ open: false, playbook: null, agentId: null });
const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'general' | 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'general' });
const [agentConfigDialog, setAgentConfigDialog] = useState<{ open: boolean; tab: 'instructions' | 'knowledge' | 'triggers' | 'playbooks' | 'tools' | 'integrations' }>({ open: false, tab: 'instructions' });
// Debounce search query
useEffect(() => {