mirror of https://github.com/kortix-ai/suna.git
wip
This commit is contained in:
parent
50aa5ab163
commit
aa93cd2d74
|
@ -247,8 +247,11 @@ class WebSearchTool(Tool):
|
|||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
print(f"--- Raw Tavily Response ---")
|
||||
print(data)
|
||||
print(f"--------------------------")
|
||||
|
||||
# Normalize Tavily extract output to a list of dicts
|
||||
# Normalise Tavily extract output to a list of dicts
|
||||
extracted = []
|
||||
if isinstance(data, list):
|
||||
extracted = data
|
||||
|
@ -260,25 +263,18 @@ class WebSearchTool(Tool):
|
|||
else:
|
||||
extracted = [data]
|
||||
|
||||
# Format results consistently
|
||||
formatted_results = []
|
||||
for item in extracted:
|
||||
formatted_result = {
|
||||
"title": item.get("title", ""),
|
||||
"url": item.get("url", url),
|
||||
"content": item.get("raw_content") or item.get("content") or item.get("text", "")
|
||||
"Title": item.get("title"),
|
||||
"URL": item.get("url") or url,
|
||||
"Text":item.get("raw_content") or item.get("content") or item.get("text")
|
||||
}
|
||||
|
||||
if item.get("published_date"):
|
||||
formatted_result["published_date"] = item["published_date"]
|
||||
|
||||
formatted_result["Published Date"] = item["published_date"]
|
||||
formatted_results.append(formatted_result)
|
||||
|
||||
# Return a properly formatted ToolResult
|
||||
return ToolResult(
|
||||
success=True,
|
||||
output=json.dumps(formatted_results, ensure_ascii=False)
|
||||
)
|
||||
return self.success_response(formatted_results)
|
||||
|
||||
except Exception as e:
|
||||
error_message = str(e)
|
||||
|
|
|
@ -10,8 +10,15 @@ import {
|
|||
Download,
|
||||
ArrowLeft,
|
||||
ArrowRight,
|
||||
Fullscreen
|
||||
Fullscreen,
|
||||
Loader
|
||||
} from "lucide-react";
|
||||
import { Document, Page, pdfjs } from "react-pdf";
|
||||
import 'react-pdf/dist/esm/Page/AnnotationLayer.css';
|
||||
import 'react-pdf/dist/esm/Page/TextLayer.css';
|
||||
|
||||
// Initialize pdfjs worker
|
||||
pdfjs.GlobalWorkerOptions.workerSrc = `//unpkg.com/pdfjs-dist@${pdfjs.version}/build/pdf.worker.min.js`;
|
||||
|
||||
interface PdfRendererProps {
|
||||
url: string;
|
||||
|
@ -20,12 +27,16 @@ interface PdfRendererProps {
|
|||
|
||||
export function PdfRenderer({ url, className }: PdfRendererProps) {
|
||||
// State for zoom and rotation controls
|
||||
const [zoom, setZoom] = useState(100);
|
||||
const [zoom, setZoom] = useState(1);
|
||||
const [rotation, setRotation] = useState(0);
|
||||
const [numPages, setNumPages] = useState<number | null>(null);
|
||||
const [pageNumber, setPageNumber] = useState(1);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<Error | null>(null);
|
||||
|
||||
// Handle zoom in/out
|
||||
const handleZoomIn = () => setZoom(prev => Math.min(prev + 25, 200));
|
||||
const handleZoomOut = () => setZoom(prev => Math.max(prev - 25, 50));
|
||||
const handleZoomIn = () => setZoom(prev => Math.min(prev + 0.2, 2.0));
|
||||
const handleZoomOut = () => setZoom(prev => Math.max(prev - 0.2, 0.5));
|
||||
|
||||
// Handle rotation
|
||||
const handleRotate = () => setRotation(prev => (prev + 90) % 360);
|
||||
|
@ -40,12 +51,32 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
|
|||
document.body.removeChild(link);
|
||||
};
|
||||
|
||||
// Handle page navigation
|
||||
const goToPrevPage = () => setPageNumber(prev => Math.max(prev - 1, 1));
|
||||
const goToNextPage = () => {
|
||||
if (numPages !== null) {
|
||||
setPageNumber(prev => Math.min(prev + 1, numPages));
|
||||
}
|
||||
};
|
||||
|
||||
// Handle document loading
|
||||
const onDocumentLoadSuccess = ({ numPages }: { numPages: number }) => {
|
||||
setNumPages(numPages);
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
const onDocumentLoadError = (error: Error) => {
|
||||
console.error("Error loading PDF:", error);
|
||||
setError(error);
|
||||
setIsLoading(false);
|
||||
};
|
||||
|
||||
// Handle fullscreen
|
||||
const handleFullscreen = () => {
|
||||
const iframe = document.querySelector('iframe');
|
||||
if (iframe) {
|
||||
if (iframe.requestFullscreen) {
|
||||
iframe.requestFullscreen();
|
||||
const pdfContainer = document.getElementById('pdf-container');
|
||||
if (pdfContainer) {
|
||||
if (pdfContainer.requestFullscreen) {
|
||||
pdfContainer.requestFullscreen();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -64,7 +95,7 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
|
|||
>
|
||||
<ZoomOut className="h-4 w-4" />
|
||||
</Button>
|
||||
<span className="text-xs font-medium">{zoom}%</span>
|
||||
<span className="text-xs font-medium">{Math.round(zoom * 100)}%</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
|
@ -86,6 +117,33 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
|
|||
</div>
|
||||
|
||||
<div className="flex items-center space-x-2">
|
||||
{numPages && (
|
||||
<div className="flex items-center space-x-2 mr-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-8 w-8 p-0"
|
||||
onClick={goToPrevPage}
|
||||
disabled={pageNumber <= 1}
|
||||
title="Previous page"
|
||||
>
|
||||
<ArrowLeft className="h-4 w-4" />
|
||||
</Button>
|
||||
<span className="text-xs font-medium">
|
||||
{pageNumber} / {numPages}
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-8 w-8 p-0"
|
||||
onClick={goToNextPage}
|
||||
disabled={pageNumber >= (numPages || 1)}
|
||||
title="Next page"
|
||||
>
|
||||
<ArrowRight className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
|
@ -108,17 +166,38 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
|
|||
</div>
|
||||
|
||||
{/* PDF Viewer */}
|
||||
<div className="flex-1 overflow-hidden rounded-b-md bg-white">
|
||||
<iframe
|
||||
src={url}
|
||||
className="w-full h-full border-0"
|
||||
style={{
|
||||
transform: `scale(${zoom / 100}) rotate(${rotation}deg)`,
|
||||
transformOrigin: 'center center',
|
||||
transition: 'transform 0.2s ease'
|
||||
}}
|
||||
title="PDF Viewer"
|
||||
/>
|
||||
<div id="pdf-container" className="flex-1 overflow-auto rounded-b-md bg-white flex justify-center">
|
||||
{isLoading && (
|
||||
<div className="flex items-center justify-center w-full h-full">
|
||||
<Loader className="h-8 w-8 animate-spin text-muted-foreground" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="flex flex-col items-center justify-center w-full h-full text-destructive p-4 text-center">
|
||||
<p className="font-semibold">Failed to load PDF</p>
|
||||
<p className="text-sm mt-2">{error.message}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<Document
|
||||
file={url}
|
||||
onLoadSuccess={onDocumentLoadSuccess}
|
||||
onLoadError={onDocumentLoadError}
|
||||
loading={null}
|
||||
className="mx-auto"
|
||||
>
|
||||
{!isLoading && !error && (
|
||||
<Page
|
||||
pageNumber={pageNumber}
|
||||
scale={zoom}
|
||||
rotate={rotation}
|
||||
renderTextLayer={true}
|
||||
renderAnnotationLayer={true}
|
||||
className="shadow-md"
|
||||
/>
|
||||
)}
|
||||
</Document>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
|
|
@ -366,21 +366,55 @@ export function extractUrlsAndTitles(content: string): Array<{ title: string, ur
|
|||
while ((match = urlRegex.exec(content)) !== null) {
|
||||
let url = match[0];
|
||||
|
||||
// --- Start: New Truncation Logic ---
|
||||
// Find the first occurrence of potential garbage separators like /n or \n after the protocol.
|
||||
const protocolEndIndex = url.indexOf('://');
|
||||
const searchStartIndex = protocolEndIndex !== -1 ? protocolEndIndex + 3 : 0;
|
||||
|
||||
const newlineIndexN = url.indexOf('/n', searchStartIndex);
|
||||
const newlineIndexSlashN = url.indexOf('\\n', searchStartIndex);
|
||||
|
||||
let firstNewlineIndex = -1;
|
||||
if (newlineIndexN !== -1 && newlineIndexSlashN !== -1) {
|
||||
firstNewlineIndex = Math.min(newlineIndexN, newlineIndexSlashN);
|
||||
} else if (newlineIndexN !== -1) {
|
||||
firstNewlineIndex = newlineIndexN;
|
||||
} else if (newlineIndexSlashN !== -1) {
|
||||
firstNewlineIndex = newlineIndexSlashN;
|
||||
}
|
||||
|
||||
// If a newline indicator is found, truncate the URL there.
|
||||
if (firstNewlineIndex !== -1) {
|
||||
url = url.substring(0, firstNewlineIndex);
|
||||
}
|
||||
// --- End: New Truncation Logic ---
|
||||
|
||||
// Basic cleaning: remove common tags or artifacts if they are directly appended
|
||||
url = url.replace(/<\/?url>$/, '')
|
||||
.replace(/<\/?content>$/, '')
|
||||
.replace(/%3C$/, ''); // Remove trailing %3C (less than sign)
|
||||
|
||||
// Decode URI components to handle % sequences, but catch errors
|
||||
try {
|
||||
url = decodeURIComponent(url);
|
||||
} catch (e) {
|
||||
// If decoding fails, use the URL as is, potentially still needs cleaning
|
||||
console.warn("Failed to decode URL component:", url, e);
|
||||
// Aggressive trailing character removal (common issues)
|
||||
// Apply this *after* potential truncation
|
||||
while (/[);.,\/]$/.test(url)) {
|
||||
url = url.slice(0, -1);
|
||||
}
|
||||
|
||||
// Final cleaning for specific problematic sequences like ellipsis
|
||||
// Decode URI components to handle % sequences, but catch errors
|
||||
try {
|
||||
// Decode multiple times? Sometimes needed for double encoding
|
||||
url = decodeURIComponent(decodeURIComponent(url));
|
||||
} catch (e) {
|
||||
try { // Try decoding once if double decoding failed
|
||||
url = decodeURIComponent(url);
|
||||
} catch (e2) {
|
||||
console.warn("Failed to decode URL component:", url, e2);
|
||||
}
|
||||
}
|
||||
|
||||
// Final cleaning for specific problematic sequences like ellipsis or remaining tags
|
||||
url = url.replace(/\u2026$/, ''); // Remove trailing ellipsis (…)
|
||||
url = url.replace(/<\/?url>$/, '').replace(/<\/?content>$/, ''); // Re-apply tag removal after decode
|
||||
|
||||
// Try to find a title near this URL - simplified logic
|
||||
const urlIndex = match.index;
|
||||
|
@ -399,7 +433,7 @@ export function extractUrlsAndTitles(content: string): Array<{ title: string, ur
|
|||
}
|
||||
|
||||
// Avoid adding duplicates if the cleaning resulted in the same URL
|
||||
if (!results.some(r => r.url === url)) {
|
||||
if (url && !results.some(r => r.url === url)) { // Added check for non-empty url
|
||||
results.push({
|
||||
title: title,
|
||||
url: url
|
||||
|
|
|
@ -2,45 +2,6 @@ import { createClient } from '@/lib/supabase/client';
|
|||
|
||||
const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || '';
|
||||
|
||||
// Simple cache implementation for non-agent data
|
||||
const apiCache = {
|
||||
projects: new Map(),
|
||||
threads: new Map(),
|
||||
threadMessages: new Map(),
|
||||
|
||||
getProject: (projectId: string) => apiCache.projects.get(projectId),
|
||||
setProject: (projectId: string, data: any) => apiCache.projects.set(projectId, data),
|
||||
|
||||
getProjects: () => apiCache.projects.get('all'),
|
||||
setProjects: (data: any) => apiCache.projects.set('all', data),
|
||||
|
||||
getThreads: (projectId: string) => apiCache.threads.get(projectId || 'all'),
|
||||
setThreads: (projectId: string, data: any) => apiCache.threads.set(projectId || 'all', data),
|
||||
invalidateThreads: (projectId: string) => apiCache.threads.delete(projectId || 'all'),
|
||||
|
||||
getThreadMessages: (threadId: string) => apiCache.threadMessages.get(threadId),
|
||||
setThreadMessages: (threadId: string, data: any) => apiCache.threadMessages.set(threadId, data),
|
||||
invalidateThreadMessages: (threadId: string) => apiCache.threadMessages.delete(threadId),
|
||||
|
||||
// Functions to clear all cache
|
||||
clearAll: () => {
|
||||
apiCache.projects.clear();
|
||||
apiCache.threads.clear();
|
||||
apiCache.threadMessages.clear();
|
||||
console.log('[API] Cache cleared');
|
||||
},
|
||||
|
||||
clearProjects: () => {
|
||||
apiCache.projects.clear();
|
||||
console.log('[API] Projects cache cleared');
|
||||
},
|
||||
|
||||
clearThreads: () => {
|
||||
apiCache.threads.clear();
|
||||
console.log('[API] Threads cache cleared');
|
||||
}
|
||||
};
|
||||
|
||||
// Track active streams by agent run ID
|
||||
const activeStreams = new Map<string, EventSource>();
|
||||
|
||||
|
@ -96,13 +57,6 @@ export type ToolCall = {
|
|||
|
||||
// Project APIs
|
||||
export const getProjects = async (): Promise<Project[]> => {
|
||||
// Check cache first
|
||||
const cached = apiCache.getProjects();
|
||||
if (cached) {
|
||||
console.log('[API] Returning cached projects:', cached.length);
|
||||
return cached;
|
||||
}
|
||||
|
||||
try {
|
||||
const supabase = createClient();
|
||||
const { data, error } = await supabase
|
||||
|
@ -133,8 +87,6 @@ export const getProjects = async (): Promise<Project[]> => {
|
|||
|
||||
console.log('[API] Mapped projects for frontend:', mappedProjects.length);
|
||||
|
||||
// Cache the result
|
||||
apiCache.setProjects(mappedProjects);
|
||||
return mappedProjects;
|
||||
} catch (err) {
|
||||
console.error('Error fetching projects:', err);
|
||||
|
@ -144,12 +96,6 @@ export const getProjects = async (): Promise<Project[]> => {
|
|||
};
|
||||
|
||||
export const getProject = async (projectId: string): Promise<Project> => {
|
||||
// Check cache first
|
||||
const cached = apiCache.getProject(projectId);
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const supabase = createClient();
|
||||
|
||||
try {
|
||||
|
@ -208,8 +154,6 @@ export const getProject = async (projectId: string): Promise<Project> => {
|
|||
|
||||
console.log('Mapped project data for frontend:', mappedProject);
|
||||
|
||||
// Cache the result
|
||||
apiCache.setProject(projectId, mappedProject);
|
||||
return mappedProject;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching project ${projectId}:`, error);
|
||||
|
@ -285,10 +229,6 @@ export const updateProject = async (projectId: string, data: Partial<Project>):
|
|||
throw new Error('No data returned from update');
|
||||
}
|
||||
|
||||
// Invalidate cache after successful update
|
||||
apiCache.projects.delete(projectId);
|
||||
apiCache.projects.delete('all');
|
||||
|
||||
// Dispatch a custom event to notify components about the project change
|
||||
if (typeof window !== 'undefined') {
|
||||
window.dispatchEvent(new CustomEvent('project-updated', {
|
||||
|
@ -326,13 +266,6 @@ export const deleteProject = async (projectId: string): Promise<void> => {
|
|||
|
||||
// Thread APIs
|
||||
export const getThreads = async (projectId?: string): Promise<Thread[]> => {
|
||||
// Check cache first
|
||||
const cached = apiCache.getThreads(projectId || 'all');
|
||||
if (cached) {
|
||||
console.log('[API] Returning cached threads:', cached.length, projectId ? `for project ${projectId}` : 'for all projects');
|
||||
return cached;
|
||||
}
|
||||
|
||||
const supabase = createClient();
|
||||
let query = supabase.from('threads').select('*');
|
||||
|
||||
|
@ -359,8 +292,6 @@ export const getThreads = async (projectId?: string): Promise<Thread[]> => {
|
|||
updated_at: thread.updated_at
|
||||
}));
|
||||
|
||||
// Cache the result
|
||||
apiCache.setThreads(projectId || 'all', mappedThreads);
|
||||
return mappedThreads;
|
||||
};
|
||||
|
||||
|
@ -423,13 +354,9 @@ export const addUserMessage = async (threadId: string, content: string): Promise
|
|||
console.error('Error adding user message:', error);
|
||||
throw new Error(`Error adding message: ${error.message}`);
|
||||
}
|
||||
|
||||
// Invalidate the cache for this thread's messages
|
||||
apiCache.invalidateThreadMessages(threadId);
|
||||
};
|
||||
|
||||
export const getMessages = async (threadId: string): Promise<Message[]> => {
|
||||
// Cache code removed - will always fetch fresh messages
|
||||
const supabase = createClient();
|
||||
|
||||
const { data, error } = await supabase
|
||||
|
@ -447,8 +374,6 @@ export const getMessages = async (threadId: string): Promise<Message[]> => {
|
|||
|
||||
console.log('[API] Messages fetched:', data);
|
||||
|
||||
// Cache storage removed
|
||||
|
||||
return data || [];
|
||||
};
|
||||
|
||||
|
@ -1005,11 +930,6 @@ export const getSandboxFileContent = async (sandboxId: string, path: string): Pr
|
|||
}
|
||||
};
|
||||
|
||||
// Function to clear all API cache
|
||||
export const clearApiCache = () => {
|
||||
apiCache.clearAll();
|
||||
};
|
||||
|
||||
export const updateThread = async (threadId: string, data: Partial<Thread>): Promise<Thread> => {
|
||||
const supabase = createClient();
|
||||
|
||||
|
@ -1029,12 +949,6 @@ export const updateThread = async (threadId: string, data: Partial<Thread>): Pro
|
|||
throw new Error(`Error updating thread: ${error.message}`);
|
||||
}
|
||||
|
||||
// Invalidate thread cache if we're updating thread data
|
||||
if (updatedThread.project_id) {
|
||||
apiCache.invalidateThreads(updatedThread.project_id);
|
||||
}
|
||||
apiCache.invalidateThreads('all');
|
||||
|
||||
return updatedThread;
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in New Issue