mirror of https://github.com/kortix-ai/suna.git
commit
c1d3071392
|
@ -126,7 +126,7 @@ You'll need the following components:
|
||||||
- Generate an API key from your account settings
|
- Generate an API key from your account settings
|
||||||
- Go to [Images](https://app.daytona.io/dashboard/images)
|
- Go to [Images](https://app.daytona.io/dashboard/images)
|
||||||
- Click "Add Image"
|
- Click "Add Image"
|
||||||
- Enter `adamcohenhillel/kortix-suna:0.0.13` as the image name
|
- Enter `adamcohenhillel/kortix-suna:0.0.16` as the image name
|
||||||
- Set `exec /usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf` as the Entrypoint
|
- Set `exec /usr/bin/supervisord -n -c /etc/supervisor/conf.d/supervisord.conf` as the Entrypoint
|
||||||
|
|
||||||
4. **LLM API Keys**:
|
4. **LLM API Keys**:
|
||||||
|
@ -251,3 +251,7 @@ python api.py
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Kortix Suna is licensed under the Apache License, Version 2.0. See [LICENSE](./LICENSE) for the full license text.
|
Kortix Suna is licensed under the Apache License, Version 2.0. See [LICENSE](./LICENSE) for the full license text.
|
||||||
|
|
||||||
|
|
||||||
|
## Co-Creators:
|
||||||
|
Adam Cohen Hillel, Marko Kraemer, Dom
|
|
@ -341,23 +341,22 @@ class BrowserAutomation:
|
||||||
launch_options = {"timeout": 90000}
|
launch_options = {"timeout": 90000}
|
||||||
self.browser = await playwright.chromium.launch(**launch_options)
|
self.browser = await playwright.chromium.launch(**launch_options)
|
||||||
print("Browser launched with minimal options")
|
print("Browser launched with minimal options")
|
||||||
|
|
||||||
print("Creating new page...")
|
|
||||||
try:
|
try:
|
||||||
|
await self.get_current_page()
|
||||||
|
print("Found existing page, using it")
|
||||||
|
self.current_page_index = 0
|
||||||
|
except Exception as page_error:
|
||||||
|
print(f"Error finding existing page, creating new one. ( {page_error})")
|
||||||
page = await self.browser.new_page()
|
page = await self.browser.new_page()
|
||||||
print("New page created successfully")
|
print("New page created successfully")
|
||||||
self.pages.append(page)
|
self.pages.append(page)
|
||||||
self.current_page_index = 0
|
self.current_page_index = 0
|
||||||
|
|
||||||
# Navigate to about:blank to ensure page is ready
|
# Navigate to about:blank to ensure page is ready
|
||||||
await page.goto("about:blank", timeout=30000)
|
# await page.goto("google.com", timeout=30000)
|
||||||
print("Navigated to about:blank")
|
print("Navigated to google.com")
|
||||||
|
|
||||||
print("Browser initialization completed successfully")
|
print("Browser initialization completed successfully")
|
||||||
except Exception as page_error:
|
|
||||||
print(f"Error creating page: {page_error}")
|
|
||||||
traceback.print_exc()
|
|
||||||
raise RuntimeError(f"Failed to initialize browser page: {page_error}")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Browser startup error: {str(e)}")
|
print(f"Browser startup error: {str(e)}")
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
|
|
|
@ -6,7 +6,7 @@ services:
|
||||||
dockerfile: ${DOCKERFILE:-Dockerfile}
|
dockerfile: ${DOCKERFILE:-Dockerfile}
|
||||||
args:
|
args:
|
||||||
TARGETPLATFORM: ${TARGETPLATFORM:-linux/amd64}
|
TARGETPLATFORM: ${TARGETPLATFORM:-linux/amd64}
|
||||||
image: adamcohenhillel/kortix-suna:0.0.13
|
image: adamcohenhillel/kortix-suna:0.0.16
|
||||||
ports:
|
ports:
|
||||||
- "6080:6080" # noVNC web interface
|
- "6080:6080" # noVNC web interface
|
||||||
- "5901:5901" # VNC port
|
- "5901:5901" # VNC port
|
||||||
|
|
|
@ -96,7 +96,7 @@ def create_sandbox(password: str):
|
||||||
logger.debug("OPENAI_API_KEY configured for sandbox")
|
logger.debug("OPENAI_API_KEY configured for sandbox")
|
||||||
|
|
||||||
sandbox = daytona.create(CreateSandboxParams(
|
sandbox = daytona.create(CreateSandboxParams(
|
||||||
image="adamcohenhillel/kortix-suna:0.0.14",
|
image="adamcohenhillel/kortix-suna:0.0.16",
|
||||||
public=False,
|
public=False,
|
||||||
env_vars={
|
env_vars={
|
||||||
"CHROME_PERSISTENT_SESSION": "true",
|
"CHROME_PERSISTENT_SESSION": "true",
|
||||||
|
|
|
@ -268,7 +268,7 @@ select exists(
|
||||||
);
|
);
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
grant execute on function basejump.has_role_on_account(uuid, basejump.account_role) to authenticated;
|
grant execute on function basejump.has_role_on_account(uuid, basejump.account_role) to authenticated, anon, public, service_role;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -15,6 +15,7 @@ CREATE TABLE threads (
|
||||||
thread_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
thread_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
account_id UUID REFERENCES basejump.accounts(id) ON DELETE CASCADE,
|
account_id UUID REFERENCES basejump.accounts(id) ON DELETE CASCADE,
|
||||||
project_id UUID REFERENCES projects(project_id) ON DELETE CASCADE,
|
project_id UUID REFERENCES projects(project_id) ON DELETE CASCADE,
|
||||||
|
is_public BOOLEAN DEFAULT FALSE,
|
||||||
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
created_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL,
|
||||||
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
updated_at TIMESTAMP WITH TIME ZONE DEFAULT TIMEZONE('utc'::text, NOW()) NOT NULL
|
||||||
);
|
);
|
||||||
|
@ -113,6 +114,7 @@ CREATE POLICY project_delete_policy ON projects
|
||||||
CREATE POLICY thread_select_policy ON threads
|
CREATE POLICY thread_select_policy ON threads
|
||||||
FOR SELECT
|
FOR SELECT
|
||||||
USING (
|
USING (
|
||||||
|
is_public = TRUE OR
|
||||||
basejump.has_role_on_account(account_id) = true OR
|
basejump.has_role_on_account(account_id) = true OR
|
||||||
EXISTS (
|
EXISTS (
|
||||||
SELECT 1 FROM projects
|
SELECT 1 FROM projects
|
||||||
|
@ -163,6 +165,7 @@ CREATE POLICY agent_run_select_policy ON agent_runs
|
||||||
LEFT JOIN projects ON threads.project_id = projects.project_id
|
LEFT JOIN projects ON threads.project_id = projects.project_id
|
||||||
WHERE threads.thread_id = agent_runs.thread_id
|
WHERE threads.thread_id = agent_runs.thread_id
|
||||||
AND (
|
AND (
|
||||||
|
threads.is_public = TRUE OR
|
||||||
basejump.has_role_on_account(threads.account_id) = true OR
|
basejump.has_role_on_account(threads.account_id) = true OR
|
||||||
basejump.has_role_on_account(projects.account_id) = true
|
basejump.has_role_on_account(projects.account_id) = true
|
||||||
)
|
)
|
||||||
|
@ -220,6 +223,7 @@ CREATE POLICY message_select_policy ON messages
|
||||||
LEFT JOIN projects ON threads.project_id = projects.project_id
|
LEFT JOIN projects ON threads.project_id = projects.project_id
|
||||||
WHERE threads.thread_id = messages.thread_id
|
WHERE threads.thread_id = messages.thread_id
|
||||||
AND (
|
AND (
|
||||||
|
threads.is_public = TRUE OR
|
||||||
basejump.has_role_on_account(threads.account_id) = true OR
|
basejump.has_role_on_account(threads.account_id) = true OR
|
||||||
basejump.has_role_on_account(projects.account_id) = true
|
basejump.has_role_on_account(projects.account_id) = true
|
||||||
)
|
)
|
||||||
|
@ -270,8 +274,8 @@ CREATE POLICY message_delete_policy ON messages
|
||||||
|
|
||||||
-- Grant permissions to roles
|
-- Grant permissions to roles
|
||||||
GRANT ALL PRIVILEGES ON TABLE projects TO authenticated, service_role;
|
GRANT ALL PRIVILEGES ON TABLE projects TO authenticated, service_role;
|
||||||
GRANT ALL PRIVILEGES ON TABLE threads TO authenticated, service_role;
|
GRANT SELECT ON TABLE threads TO authenticated, anon, service_role;
|
||||||
GRANT ALL PRIVILEGES ON TABLE messages TO authenticated, service_role;
|
GRANT SELECT ON TABLE messages TO authenticated, anon, service_role;
|
||||||
GRANT ALL PRIVILEGES ON TABLE agent_runs TO authenticated, service_role;
|
GRANT ALL PRIVILEGES ON TABLE agent_runs TO authenticated, service_role;
|
||||||
|
|
||||||
-- Create a function that matches the Python get_messages behavior
|
-- Create a function that matches the Python get_messages behavior
|
||||||
|
@ -286,12 +290,18 @@ DECLARE
|
||||||
current_role TEXT;
|
current_role TEXT;
|
||||||
latest_summary_id UUID;
|
latest_summary_id UUID;
|
||||||
latest_summary_time TIMESTAMP WITH TIME ZONE;
|
latest_summary_time TIMESTAMP WITH TIME ZONE;
|
||||||
|
is_thread_public BOOLEAN;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Get current role
|
-- Get current role
|
||||||
SELECT current_user INTO current_role;
|
SELECT current_user INTO current_role;
|
||||||
|
|
||||||
-- Skip access check for service_role
|
-- Check if thread is public
|
||||||
IF current_role = 'authenticated' THEN
|
SELECT is_public INTO is_thread_public
|
||||||
|
FROM threads
|
||||||
|
WHERE thread_id = p_thread_id;
|
||||||
|
|
||||||
|
-- Skip access check for service_role or public threads
|
||||||
|
IF current_role = 'authenticated' AND NOT is_thread_public THEN
|
||||||
-- Check if thread exists and user has access
|
-- Check if thread exists and user has access
|
||||||
SELECT EXISTS (
|
SELECT EXISTS (
|
||||||
SELECT 1 FROM threads t
|
SELECT 1 FROM threads t
|
||||||
|
@ -361,4 +371,4 @@ END;
|
||||||
$$;
|
$$;
|
||||||
|
|
||||||
-- Grant execute permissions
|
-- Grant execute permissions
|
||||||
GRANT EXECUTE ON FUNCTION get_llm_formatted_messages TO authenticated, service_role;
|
GRANT EXECUTE ON FUNCTION get_llm_formatted_messages TO authenticated, anon, service_role;
|
|
@ -128,6 +128,11 @@ async def verify_thread_access(client, thread_id: str, user_id: str):
|
||||||
raise HTTPException(status_code=404, detail="Thread not found")
|
raise HTTPException(status_code=404, detail="Thread not found")
|
||||||
|
|
||||||
thread_data = thread_result.data[0]
|
thread_data = thread_result.data[0]
|
||||||
|
|
||||||
|
# Check if thread is public
|
||||||
|
if thread_data.get('is_public'):
|
||||||
|
return True
|
||||||
|
|
||||||
account_id = thread_data.get('account_id')
|
account_id = thread_data.get('account_id')
|
||||||
# When using service role, we need to manually check account membership instead of using current_user_account_role
|
# When using service role, we need to manually check account membership instead of using current_user_account_role
|
||||||
if account_id:
|
if account_id:
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
import { Metadata } from 'next';
|
||||||
|
|
||||||
|
export const metadata: Metadata = {
|
||||||
|
title: 'Shared Conversation',
|
||||||
|
description: 'View a shared AI conversation',
|
||||||
|
openGraph: {
|
||||||
|
title: 'Shared AI Conversation',
|
||||||
|
description: 'View a shared AI conversation from Kortix Manus',
|
||||||
|
images: ['/kortix-logo.png'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function ThreadLayout({
|
||||||
|
children,
|
||||||
|
}: {
|
||||||
|
children: React.ReactNode;
|
||||||
|
}) {
|
||||||
|
return <>{children}</>;
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -16,11 +16,10 @@ const apiCache = {
|
||||||
|
|
||||||
getThreads: (projectId: string) => apiCache.threads.get(projectId || 'all'),
|
getThreads: (projectId: string) => apiCache.threads.get(projectId || 'all'),
|
||||||
setThreads: (projectId: string, data: any) => apiCache.threads.set(projectId || 'all', data),
|
setThreads: (projectId: string, data: any) => apiCache.threads.set(projectId || 'all', data),
|
||||||
|
invalidateThreads: (projectId: string) => apiCache.threads.delete(projectId || 'all'),
|
||||||
|
|
||||||
getThreadMessages: (threadId: string) => apiCache.threadMessages.get(threadId),
|
getThreadMessages: (threadId: string) => apiCache.threadMessages.get(threadId),
|
||||||
setThreadMessages: (threadId: string, data: any) => apiCache.threadMessages.set(threadId, data),
|
setThreadMessages: (threadId: string, data: any) => apiCache.threadMessages.set(threadId, data),
|
||||||
|
|
||||||
// Helper to clear parts of the cache when data changes
|
|
||||||
invalidateThreadMessages: (threadId: string) => apiCache.threadMessages.delete(threadId),
|
invalidateThreadMessages: (threadId: string) => apiCache.threadMessages.delete(threadId),
|
||||||
|
|
||||||
// Functions to clear all cache
|
// Functions to clear all cache
|
||||||
|
@ -67,7 +66,8 @@ export type Project = {
|
||||||
export type Thread = {
|
export type Thread = {
|
||||||
thread_id: string;
|
thread_id: string;
|
||||||
account_id: string | null;
|
account_id: string | null;
|
||||||
project_id: string | null;
|
project_id?: string | null;
|
||||||
|
is_public?: boolean;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
[key: string]: any; // Allow additional properties to handle database fields
|
[key: string]: any; // Allow additional properties to handle database fields
|
||||||
|
@ -151,58 +151,70 @@ export const getProject = async (projectId: string): Promise<Project> => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const supabase = createClient();
|
const supabase = createClient();
|
||||||
const { data, error } = await supabase
|
|
||||||
.from('projects')
|
|
||||||
.select('*')
|
|
||||||
.eq('project_id', projectId)
|
|
||||||
.single();
|
|
||||||
|
|
||||||
if (error) throw error;
|
try {
|
||||||
|
const { data, error } = await supabase
|
||||||
console.log('Raw project data from database:', data);
|
.from('projects')
|
||||||
|
.select('*')
|
||||||
// If project has a sandbox, ensure it's started
|
.eq('project_id', projectId)
|
||||||
if (data.sandbox?.id) {
|
.single();
|
||||||
try {
|
|
||||||
const { data: { session } } = await supabase.auth.getSession();
|
if (error) {
|
||||||
if (session?.access_token) {
|
// Handle the specific "no rows returned" error from Supabase
|
||||||
console.log(`Ensuring sandbox is active for project ${projectId}...`);
|
if (error.code === 'PGRST116') {
|
||||||
const response = await fetch(`${API_URL}/project/${projectId}/sandbox/ensure-active`, {
|
throw new Error(`Project not found or not accessible: ${projectId}`);
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Authorization': `Bearer ${session.access_token}`,
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text().catch(() => 'No error details available');
|
|
||||||
console.warn(`Failed to ensure sandbox is active: ${response.status} ${response.statusText}`, errorText);
|
|
||||||
} else {
|
|
||||||
console.log('Sandbox activation successful');
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (sandboxError) {
|
throw error;
|
||||||
console.warn('Failed to ensure sandbox is active:', sandboxError);
|
|
||||||
// Non-blocking error - continue with the project data
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log('Raw project data from database:', data);
|
||||||
|
|
||||||
|
// If project has a sandbox, ensure it's started
|
||||||
|
if (data.sandbox?.id) {
|
||||||
|
try {
|
||||||
|
const { data: { session } } = await supabase.auth.getSession();
|
||||||
|
if (session?.access_token) {
|
||||||
|
console.log(`Ensuring sandbox is active for project ${projectId}...`);
|
||||||
|
const response = await fetch(`${API_URL}/project/${projectId}/sandbox/ensure-active`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Authorization': `Bearer ${session.access_token}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text().catch(() => 'No error details available');
|
||||||
|
console.warn(`Failed to ensure sandbox is active: ${response.status} ${response.statusText}`, errorText);
|
||||||
|
} else {
|
||||||
|
console.log('Sandbox activation successful');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (sandboxError) {
|
||||||
|
console.warn('Failed to ensure sandbox is active:', sandboxError);
|
||||||
|
// Non-blocking error - continue with the project data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map database fields to our Project type
|
||||||
|
const mappedProject: Project = {
|
||||||
|
id: data.project_id,
|
||||||
|
name: data.name || '',
|
||||||
|
description: data.description || '',
|
||||||
|
account_id: data.account_id,
|
||||||
|
created_at: data.created_at,
|
||||||
|
sandbox: data.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" }
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log('Mapped project data for frontend:', mappedProject);
|
||||||
|
|
||||||
|
// Cache the result
|
||||||
|
apiCache.setProject(projectId, mappedProject);
|
||||||
|
return mappedProject;
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error fetching project ${projectId}:`, error);
|
||||||
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Map database fields to our Project type
|
|
||||||
const mappedProject: Project = {
|
|
||||||
id: data.project_id,
|
|
||||||
name: data.name || '',
|
|
||||||
description: data.description || '',
|
|
||||||
account_id: data.account_id,
|
|
||||||
created_at: data.created_at,
|
|
||||||
sandbox: data.sandbox || { id: "", pass: "", vnc_preview: "", sandbox_url: "" }
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log('Mapped project data for frontend:', mappedProject);
|
|
||||||
|
|
||||||
// Cache the result
|
|
||||||
apiCache.setProject(projectId, mappedProject);
|
|
||||||
return mappedProject;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const createProject = async (
|
export const createProject = async (
|
||||||
|
@ -1003,3 +1015,35 @@ export const getSandboxFileContent = async (sandboxId: string, path: string): Pr
|
||||||
export const clearApiCache = () => {
|
export const clearApiCache = () => {
|
||||||
apiCache.clearAll();
|
apiCache.clearAll();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const updateThread = async (threadId: string, data: Partial<Thread>): Promise<Thread> => {
|
||||||
|
const supabase = createClient();
|
||||||
|
|
||||||
|
// Format the data for update
|
||||||
|
const updateData = { ...data };
|
||||||
|
|
||||||
|
// Update the thread
|
||||||
|
const { data: updatedThread, error } = await supabase
|
||||||
|
.from('threads')
|
||||||
|
.update(updateData)
|
||||||
|
.eq('thread_id', threadId)
|
||||||
|
.select()
|
||||||
|
.single();
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
console.error('Error updating thread:', error);
|
||||||
|
throw new Error(`Error updating thread: ${error.message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invalidate thread cache if we're updating thread data
|
||||||
|
if (updatedThread.project_id) {
|
||||||
|
apiCache.invalidateThreads(updatedThread.project_id);
|
||||||
|
}
|
||||||
|
apiCache.invalidateThreads('all');
|
||||||
|
|
||||||
|
return updatedThread;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const toggleThreadPublicStatus = async (threadId: string, isPublic: boolean): Promise<Thread> => {
|
||||||
|
return updateThread(threadId, { is_public: isPublic });
|
||||||
|
};
|
||||||
|
|
Loading…
Reference in New Issue