diff --git a/backend/agent/api.py b/backend/agent/api.py
index 5cec0970..0f51c139 100644
--- a/backend/agent/api.py
+++ b/backend/agent/api.py
@@ -255,6 +255,9 @@ async def get_agent_run_with_access_check(client, agent_run_id: str, user_id: st
await verify_thread_access(client, thread_id, user_id)
return agent_run_data
+
+
+
@router.post("/thread/{thread_id}/agent/start")
async def start_agent(
thread_id: str,
diff --git a/backend/agent/tools/sb_expose_tool.py b/backend/agent/tools/sb_expose_tool.py
index e0e1973b..fa24e4f9 100644
--- a/backend/agent/tools/sb_expose_tool.py
+++ b/backend/agent/tools/sb_expose_tool.py
@@ -10,30 +10,6 @@ class SandboxExposeTool(SandboxToolsBase):
def __init__(self, project_id: str, thread_manager: ThreadManager):
super().__init__(project_id, thread_manager)
- async def _wait_for_sandbox_services(self, timeout: int = 30) -> bool:
- """Wait for sandbox services to be fully started before exposing ports."""
- start_time = time.time()
-
- while time.time() - start_time < timeout:
- try:
- # Check if supervisord is running and managing services
- result = await self.sandbox.process.exec("supervisorctl status", timeout=10)
-
- if result.exit_code == 0:
- # Check if key services are running
- status_output = result.output
- if "http_server" in status_output and "RUNNING" in status_output:
- return True
-
- # If services aren't ready, wait a bit
- await asyncio.sleep(2)
-
- except Exception as e:
- # If we can't check status, wait a bit and try again
- await asyncio.sleep(2)
-
- return False
-
@openapi_schema({
"type": "function",
"function": {
@@ -93,11 +69,6 @@ class SandboxExposeTool(SandboxToolsBase):
if not 1 <= port <= 65535:
return self.fail_response(f"Invalid port number: {port}. Must be between 1 and 65535.")
- # Wait for sandbox services to be ready (especially important for workflows)
- services_ready = await self._wait_for_sandbox_services()
- if not services_ready:
- return self.fail_response(f"Sandbox services are not fully started yet. Please wait a moment and try again, or ensure a service is running on port {port}.")
-
# Check if something is actually listening on the port (for custom ports)
if port not in [6080, 8080, 8003]: # Skip check for known sandbox ports
try:
diff --git a/frontend/src/app/(home)/page.tsx b/frontend/src/app/(home)/page.tsx
index 2d4c59e3..28afe591 100644
--- a/frontend/src/app/(home)/page.tsx
+++ b/frontend/src/app/(home)/page.tsx
@@ -28,7 +28,7 @@ export default function Home() {
-
+
{/* */}
diff --git a/frontend/src/components/home/sections/hero-section.tsx b/frontend/src/components/home/sections/hero-section.tsx
index 3fe62e4e..e938eccc 100644
--- a/frontend/src/components/home/sections/hero-section.tsx
+++ b/frontend/src/components/home/sections/hero-section.tsx
@@ -16,6 +16,7 @@ import { useInitiateAgentMutation } from '@/hooks/react-query/dashboard/use-init
import { useThreadQuery } from '@/hooks/react-query/threads/use-threads';
import { generateThreadName } from '@/lib/actions/threads';
import GoogleSignIn from '@/components/GoogleSignIn';
+import { useAgents } from '@/hooks/react-query/agents/use-agents';
import {
Dialog,
DialogContent,
@@ -30,13 +31,11 @@ import { useAccounts } from '@/hooks/use-accounts';
import { isLocalMode, config } from '@/lib/config';
import { toast } from 'sonner';
import { useModal } from '@/hooks/use-modal-store';
-import { Card, CardContent } from '@/components/ui/card';
-import { Button } from '@/components/ui/button';
-import { Send, ArrowUp, Paperclip } from 'lucide-react';
-import { Textarea } from '@/components/ui/textarea';
-import { cn } from '@/lib/utils';
-import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip';
-import ChatDropdown from '@/components/thread/chat-input/chat-dropdown';
+import { ChatInput, ChatInputHandles } from '@/components/thread/chat-input/chat-input';
+import { normalizeFilenameToNFC } from '@/lib/utils/unicode';
+import { createQueryHook } from '@/hooks/use-query';
+import { agentKeys } from '@/hooks/react-query/agents/keys';
+import { getAgents } from '@/hooks/react-query/agents/utils';
// Custom dialog overlay with blur effect
const BlurredDialogOverlay = () => (
@@ -55,6 +54,7 @@ export function HeroSection() {
const scrollTimeout = useRef(null);
const { scrollY } = useScroll();
const [inputValue, setInputValue] = useState('');
+ const [selectedAgentId, setSelectedAgentId] = useState();
const router = useRouter();
const { user, isLoading } = useAuth();
const { billingError, handleBillingError, clearBillingError } =
@@ -65,6 +65,28 @@ export function HeroSection() {
const initiateAgentMutation = useInitiateAgentMutation();
const [initiatedThreadId, setInitiatedThreadId] = useState(null);
const threadQuery = useThreadQuery(initiatedThreadId || '');
+ const chatInputRef = useRef(null);
+
+ // Fetch agents for selection
+ const { data: agentsResponse } = createQueryHook(
+ agentKeys.list({
+ limit: 100,
+ sort_by: 'name',
+ sort_order: 'asc'
+ }),
+ () => getAgents({
+ limit: 100,
+ sort_by: 'name',
+ sort_order: 'asc'
+ }),
+ {
+ enabled: !!user && !isLoading,
+ staleTime: 5 * 60 * 1000,
+ gcTime: 10 * 60 * 1000,
+ }
+ )();
+
+ const agents = agentsResponse?.agents || [];
// Auth dialog state
const [authDialogOpen, setAuthDialogOpen] = useState(false);
@@ -116,31 +138,66 @@ export function HeroSection() {
if (thread.project_id) {
router.push(`/projects/${thread.project_id}/thread/${initiatedThreadId}`);
} else {
- router.push(`/thread/${initiatedThreadId}`);
+ router.push(`/agents/${initiatedThreadId}`);
}
setInitiatedThreadId(null);
}
}, [threadQuery.data, initiatedThreadId, router]);
- const createAgentWithPrompt = async () => {
- if (!inputValue.trim() || isSubmitting) return;
+ // Handle ChatInput submission
+ const handleChatInputSubmit = async (
+ message: string,
+ options?: { model_name?: string; enable_thinking?: boolean }
+ ) => {
+ if ((!message.trim() && !chatInputRef.current?.getPendingFiles().length) || isSubmitting) return;
+
+ // If user is not logged in, save prompt and show auth dialog
+ if (!user && !isLoading) {
+ localStorage.setItem(PENDING_PROMPT_KEY, message.trim());
+ setAuthDialogOpen(true);
+ return;
+ }
+
+ // User is logged in, create the agent with files like dashboard does
setIsSubmitting(true);
try {
+ const files = chatInputRef.current?.getPendingFiles() || [];
+ localStorage.removeItem(PENDING_PROMPT_KEY);
+
const formData = new FormData();
- formData.append('prompt', inputValue.trim());
- formData.append('model_name', 'openrouter/deepseek/deepseek-chat');
- formData.append('enable_thinking', 'false');
+ formData.append('prompt', message);
+
+ // Add selected agent if one is chosen
+ if (selectedAgentId) {
+ formData.append('agent_id', selectedAgentId);
+ }
+
+ // Add files if any
+ files.forEach((file) => {
+ const normalizedName = normalizeFilenameToNFC(file.name);
+ formData.append('files', file, normalizedName);
+ });
+
+ if (options?.model_name) formData.append('model_name', options.model_name);
+ formData.append('enable_thinking', String(options?.enable_thinking ?? false));
formData.append('reasoning_effort', 'low');
formData.append('stream', 'true');
formData.append('enable_context_manager', 'false');
const result = await initiateAgentMutation.mutateAsync(formData);
- setInitiatedThreadId(result.thread_id);
+ if (result.thread_id) {
+ setInitiatedThreadId(result.thread_id);
+ } else {
+ throw new Error('Agent initiation did not return a thread_id.');
+ }
+
+ chatInputRef.current?.clearPendingFiles();
setInputValue('');
} catch (error: any) {
if (error instanceof BillingError) {
- console.log('Billing error:');
+ console.log('Billing error:', error.detail);
+ onOpen("paymentRequiredDialog");
} else {
const isConnectionError =
error instanceof TypeError &&
@@ -156,38 +213,6 @@ export function HeroSection() {
}
};
- // Handle form submission
- const handleSubmit = async (e?: FormEvent) => {
- if (e) {
- e.preventDefault();
- e.stopPropagation(); // Stop event propagation to prevent dialog closing
- }
-
- if (!inputValue.trim() || isSubmitting) return;
-
- // If user is not logged in, save prompt and show auth dialog
- if (!user && !isLoading) {
- // Save prompt to localStorage BEFORE showing the dialog
- localStorage.setItem(PENDING_PROMPT_KEY, inputValue.trim());
- setAuthDialogOpen(true);
- return;
- }
-
- // User is logged in, create the agent
- createAgentWithPrompt();
- };
-
- // Handle Enter key press
- const handleKeyDown = (e: React.KeyboardEvent) => {
- if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
- e.preventDefault(); // Prevent default form submission
- e.stopPropagation(); // Stop event propagation
- handleSubmit();
- }
- };
-
-
-
return (
@@ -280,87 +305,27 @@ export function HeroSection() {
{hero.description}
+
-
diff --git a/frontend/src/components/home/sections/hero-video-section.tsx b/frontend/src/components/home/sections/hero-video-section.tsx
index 6c977b7e..24ccdf40 100644
--- a/frontend/src/components/home/sections/hero-video-section.tsx
+++ b/frontend/src/components/home/sections/hero-video-section.tsx
@@ -1,24 +1,39 @@
import { HeroVideoDialog } from '@/components/home/ui/hero-video-dialog';
+import { SectionHeader } from '@/components/home/section-header';
export function HeroVideoSection() {
return (
-
-
-
-
+
+
+
+ Watch Intelligence in Motion
+
+
+ Watch how Suna executes complex workflows with precision and autonomy
+
+ What are AI tokens? Tokens are units of text that AI models process.
+ Your plan includes credits to spend on various AI models - the more complex the task,
+ the more tokens used.
+