This commit is contained in:
marko-kraemer 2025-04-21 01:29:16 +01:00
parent 50aa5ab163
commit aa93cd2d74
4 changed files with 151 additions and 128 deletions

View File

@ -247,8 +247,11 @@ class WebSearchTool(Tool):
) )
response.raise_for_status() response.raise_for_status()
data = response.json() data = response.json()
print(f"--- Raw Tavily Response ---")
print(data)
print(f"--------------------------")
# Normalize Tavily extract output to a list of dicts # Normalise Tavily extract output to a list of dicts
extracted = [] extracted = []
if isinstance(data, list): if isinstance(data, list):
extracted = data extracted = data
@ -260,25 +263,18 @@ class WebSearchTool(Tool):
else: else:
extracted = [data] extracted = [data]
# Format results consistently
formatted_results = [] formatted_results = []
for item in extracted: for item in extracted:
formatted_result = { formatted_result = {
"title": item.get("title", ""), "Title": item.get("title"),
"url": item.get("url", url), "URL": item.get("url") or url,
"content": item.get("raw_content") or item.get("content") or item.get("text", "") "Text":item.get("raw_content") or item.get("content") or item.get("text")
} }
if item.get("published_date"): if item.get("published_date"):
formatted_result["published_date"] = item["published_date"] formatted_result["Published Date"] = item["published_date"]
formatted_results.append(formatted_result) formatted_results.append(formatted_result)
# Return a properly formatted ToolResult return self.success_response(formatted_results)
return ToolResult(
success=True,
output=json.dumps(formatted_results, ensure_ascii=False)
)
except Exception as e: except Exception as e:
error_message = str(e) error_message = str(e)

View File

@ -10,8 +10,15 @@ import {
Download, Download,
ArrowLeft, ArrowLeft,
ArrowRight, ArrowRight,
Fullscreen Fullscreen,
Loader
} from "lucide-react"; } from "lucide-react";
import { Document, Page, pdfjs } from "react-pdf";
import 'react-pdf/dist/esm/Page/AnnotationLayer.css';
import 'react-pdf/dist/esm/Page/TextLayer.css';
// Initialize pdfjs worker
pdfjs.GlobalWorkerOptions.workerSrc = `//unpkg.com/pdfjs-dist@${pdfjs.version}/build/pdf.worker.min.js`;
interface PdfRendererProps { interface PdfRendererProps {
url: string; url: string;
@ -20,12 +27,16 @@ interface PdfRendererProps {
export function PdfRenderer({ url, className }: PdfRendererProps) { export function PdfRenderer({ url, className }: PdfRendererProps) {
// State for zoom and rotation controls // State for zoom and rotation controls
const [zoom, setZoom] = useState(100); const [zoom, setZoom] = useState(1);
const [rotation, setRotation] = useState(0); const [rotation, setRotation] = useState(0);
const [numPages, setNumPages] = useState<number | null>(null);
const [pageNumber, setPageNumber] = useState(1);
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<Error | null>(null);
// Handle zoom in/out // Handle zoom in/out
const handleZoomIn = () => setZoom(prev => Math.min(prev + 25, 200)); const handleZoomIn = () => setZoom(prev => Math.min(prev + 0.2, 2.0));
const handleZoomOut = () => setZoom(prev => Math.max(prev - 25, 50)); const handleZoomOut = () => setZoom(prev => Math.max(prev - 0.2, 0.5));
// Handle rotation // Handle rotation
const handleRotate = () => setRotation(prev => (prev + 90) % 360); const handleRotate = () => setRotation(prev => (prev + 90) % 360);
@ -40,12 +51,32 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
document.body.removeChild(link); document.body.removeChild(link);
}; };
// Handle page navigation
const goToPrevPage = () => setPageNumber(prev => Math.max(prev - 1, 1));
const goToNextPage = () => {
if (numPages !== null) {
setPageNumber(prev => Math.min(prev + 1, numPages));
}
};
// Handle document loading
const onDocumentLoadSuccess = ({ numPages }: { numPages: number }) => {
setNumPages(numPages);
setIsLoading(false);
};
const onDocumentLoadError = (error: Error) => {
console.error("Error loading PDF:", error);
setError(error);
setIsLoading(false);
};
// Handle fullscreen // Handle fullscreen
const handleFullscreen = () => { const handleFullscreen = () => {
const iframe = document.querySelector('iframe'); const pdfContainer = document.getElementById('pdf-container');
if (iframe) { if (pdfContainer) {
if (iframe.requestFullscreen) { if (pdfContainer.requestFullscreen) {
iframe.requestFullscreen(); pdfContainer.requestFullscreen();
} }
} }
}; };
@ -64,7 +95,7 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
> >
<ZoomOut className="h-4 w-4" /> <ZoomOut className="h-4 w-4" />
</Button> </Button>
<span className="text-xs font-medium">{zoom}%</span> <span className="text-xs font-medium">{Math.round(zoom * 100)}%</span>
<Button <Button
variant="ghost" variant="ghost"
size="sm" size="sm"
@ -86,6 +117,33 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
</div> </div>
<div className="flex items-center space-x-2"> <div className="flex items-center space-x-2">
{numPages && (
<div className="flex items-center space-x-2 mr-2">
<Button
variant="ghost"
size="sm"
className="h-8 w-8 p-0"
onClick={goToPrevPage}
disabled={pageNumber <= 1}
title="Previous page"
>
<ArrowLeft className="h-4 w-4" />
</Button>
<span className="text-xs font-medium">
{pageNumber} / {numPages}
</span>
<Button
variant="ghost"
size="sm"
className="h-8 w-8 p-0"
onClick={goToNextPage}
disabled={pageNumber >= (numPages || 1)}
title="Next page"
>
<ArrowRight className="h-4 w-4" />
</Button>
</div>
)}
<Button <Button
variant="ghost" variant="ghost"
size="sm" size="sm"
@ -108,17 +166,38 @@ export function PdfRenderer({ url, className }: PdfRendererProps) {
</div> </div>
{/* PDF Viewer */} {/* PDF Viewer */}
<div className="flex-1 overflow-hidden rounded-b-md bg-white"> <div id="pdf-container" className="flex-1 overflow-auto rounded-b-md bg-white flex justify-center">
<iframe {isLoading && (
src={url} <div className="flex items-center justify-center w-full h-full">
className="w-full h-full border-0" <Loader className="h-8 w-8 animate-spin text-muted-foreground" />
style={{ </div>
transform: `scale(${zoom / 100}) rotate(${rotation}deg)`, )}
transformOrigin: 'center center',
transition: 'transform 0.2s ease' {error && (
}} <div className="flex flex-col items-center justify-center w-full h-full text-destructive p-4 text-center">
title="PDF Viewer" <p className="font-semibold">Failed to load PDF</p>
/> <p className="text-sm mt-2">{error.message}</p>
</div>
)}
<Document
file={url}
onLoadSuccess={onDocumentLoadSuccess}
onLoadError={onDocumentLoadError}
loading={null}
className="mx-auto"
>
{!isLoading && !error && (
<Page
pageNumber={pageNumber}
scale={zoom}
rotate={rotation}
renderTextLayer={true}
renderAnnotationLayer={true}
className="shadow-md"
/>
)}
</Document>
</div> </div>
</div> </div>
); );

View File

@ -366,22 +366,56 @@ export function extractUrlsAndTitles(content: string): Array<{ title: string, ur
while ((match = urlRegex.exec(content)) !== null) { while ((match = urlRegex.exec(content)) !== null) {
let url = match[0]; let url = match[0];
// --- Start: New Truncation Logic ---
// Find the first occurrence of potential garbage separators like /n or \n after the protocol.
const protocolEndIndex = url.indexOf('://');
const searchStartIndex = protocolEndIndex !== -1 ? protocolEndIndex + 3 : 0;
const newlineIndexN = url.indexOf('/n', searchStartIndex);
const newlineIndexSlashN = url.indexOf('\\n', searchStartIndex);
let firstNewlineIndex = -1;
if (newlineIndexN !== -1 && newlineIndexSlashN !== -1) {
firstNewlineIndex = Math.min(newlineIndexN, newlineIndexSlashN);
} else if (newlineIndexN !== -1) {
firstNewlineIndex = newlineIndexN;
} else if (newlineIndexSlashN !== -1) {
firstNewlineIndex = newlineIndexSlashN;
}
// If a newline indicator is found, truncate the URL there.
if (firstNewlineIndex !== -1) {
url = url.substring(0, firstNewlineIndex);
}
// --- End: New Truncation Logic ---
// Basic cleaning: remove common tags or artifacts if they are directly appended // Basic cleaning: remove common tags or artifacts if they are directly appended
url = url.replace(/<\/?url>$/, '') url = url.replace(/<\/?url>$/, '')
.replace(/<\/?content>$/, '') .replace(/<\/?content>$/, '')
.replace(/%3C$/, ''); // Remove trailing %3C (less than sign) .replace(/%3C$/, ''); // Remove trailing %3C (less than sign)
// Decode URI components to handle % sequences, but catch errors // Aggressive trailing character removal (common issues)
try { // Apply this *after* potential truncation
url = decodeURIComponent(url); while (/[);.,\/]$/.test(url)) {
} catch (e) { url = url.slice(0, -1);
// If decoding fails, use the URL as is, potentially still needs cleaning
console.warn("Failed to decode URL component:", url, e);
} }
// Final cleaning for specific problematic sequences like ellipsis // Decode URI components to handle % sequences, but catch errors
url = url.replace(/\u2026$/, ''); // Remove trailing ellipsis (…) try {
// Decode multiple times? Sometimes needed for double encoding
url = decodeURIComponent(decodeURIComponent(url));
} catch (e) {
try { // Try decoding once if double decoding failed
url = decodeURIComponent(url);
} catch (e2) {
console.warn("Failed to decode URL component:", url, e2);
}
}
// Final cleaning for specific problematic sequences like ellipsis or remaining tags
url = url.replace(/\u2026$/, ''); // Remove trailing ellipsis (…)
url = url.replace(/<\/?url>$/, '').replace(/<\/?content>$/, ''); // Re-apply tag removal after decode
// Try to find a title near this URL - simplified logic // Try to find a title near this URL - simplified logic
const urlIndex = match.index; const urlIndex = match.index;
const surroundingText = content.substring(Math.max(0, urlIndex - 100), urlIndex + url.length + 150); // Increased lookahead for content const surroundingText = content.substring(Math.max(0, urlIndex - 100), urlIndex + url.length + 150); // Increased lookahead for content
@ -399,7 +433,7 @@ export function extractUrlsAndTitles(content: string): Array<{ title: string, ur
} }
// Avoid adding duplicates if the cleaning resulted in the same URL // Avoid adding duplicates if the cleaning resulted in the same URL
if (!results.some(r => r.url === url)) { if (url && !results.some(r => r.url === url)) { // Added check for non-empty url
results.push({ results.push({
title: title, title: title,
url: url url: url

View File

@ -2,45 +2,6 @@ import { createClient } from '@/lib/supabase/client';
const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || ''; const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || '';
// Simple cache implementation for non-agent data
const apiCache = {
projects: new Map(),
threads: new Map(),
threadMessages: new Map(),
getProject: (projectId: string) => apiCache.projects.get(projectId),
setProject: (projectId: string, data: any) => apiCache.projects.set(projectId, data),
getProjects: () => apiCache.projects.get('all'),
setProjects: (data: any) => apiCache.projects.set('all', data),
getThreads: (projectId: string) => apiCache.threads.get(projectId || 'all'),
setThreads: (projectId: string, data: any) => apiCache.threads.set(projectId || 'all', data),
invalidateThreads: (projectId: string) => apiCache.threads.delete(projectId || 'all'),
getThreadMessages: (threadId: string) => apiCache.threadMessages.get(threadId),
setThreadMessages: (threadId: string, data: any) => apiCache.threadMessages.set(threadId, data),
invalidateThreadMessages: (threadId: string) => apiCache.threadMessages.delete(threadId),
// Functions to clear all cache
clearAll: () => {
apiCache.projects.clear();
apiCache.threads.clear();
apiCache.threadMessages.clear();
console.log('[API] Cache cleared');
},
clearProjects: () => {
apiCache.projects.clear();
console.log('[API] Projects cache cleared');
},
clearThreads: () => {
apiCache.threads.clear();
console.log('[API] Threads cache cleared');
}
};
// Track active streams by agent run ID // Track active streams by agent run ID
const activeStreams = new Map<string, EventSource>(); const activeStreams = new Map<string, EventSource>();
@ -96,13 +57,6 @@ export type ToolCall = {
// Project APIs // Project APIs
export const getProjects = async (): Promise<Project[]> => { export const getProjects = async (): Promise<Project[]> => {
// Check cache first
const cached = apiCache.getProjects();
if (cached) {
console.log('[API] Returning cached projects:', cached.length);
return cached;
}
try { try {
const supabase = createClient(); const supabase = createClient();
const { data, error } = await supabase const { data, error } = await supabase
@ -133,8 +87,6 @@ export const getProjects = async (): Promise<Project[]> => {
console.log('[API] Mapped projects for frontend:', mappedProjects.length); console.log('[API] Mapped projects for frontend:', mappedProjects.length);
// Cache the result
apiCache.setProjects(mappedProjects);
return mappedProjects; return mappedProjects;
} catch (err) { } catch (err) {
console.error('Error fetching projects:', err); console.error('Error fetching projects:', err);
@ -144,12 +96,6 @@ export const getProjects = async (): Promise<Project[]> => {
}; };
export const getProject = async (projectId: string): Promise<Project> => { export const getProject = async (projectId: string): Promise<Project> => {
// Check cache first
const cached = apiCache.getProject(projectId);
if (cached) {
return cached;
}
const supabase = createClient(); const supabase = createClient();
try { try {
@ -208,8 +154,6 @@ export const getProject = async (projectId: string): Promise<Project> => {
console.log('Mapped project data for frontend:', mappedProject); console.log('Mapped project data for frontend:', mappedProject);
// Cache the result
apiCache.setProject(projectId, mappedProject);
return mappedProject; return mappedProject;
} catch (error) { } catch (error) {
console.error(`Error fetching project ${projectId}:`, error); console.error(`Error fetching project ${projectId}:`, error);
@ -284,10 +228,6 @@ export const updateProject = async (projectId: string, data: Partial<Project>):
if (!updatedData) { if (!updatedData) {
throw new Error('No data returned from update'); throw new Error('No data returned from update');
} }
// Invalidate cache after successful update
apiCache.projects.delete(projectId);
apiCache.projects.delete('all');
// Dispatch a custom event to notify components about the project change // Dispatch a custom event to notify components about the project change
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
@ -326,13 +266,6 @@ export const deleteProject = async (projectId: string): Promise<void> => {
// Thread APIs // Thread APIs
export const getThreads = async (projectId?: string): Promise<Thread[]> => { export const getThreads = async (projectId?: string): Promise<Thread[]> => {
// Check cache first
const cached = apiCache.getThreads(projectId || 'all');
if (cached) {
console.log('[API] Returning cached threads:', cached.length, projectId ? `for project ${projectId}` : 'for all projects');
return cached;
}
const supabase = createClient(); const supabase = createClient();
let query = supabase.from('threads').select('*'); let query = supabase.from('threads').select('*');
@ -359,8 +292,6 @@ export const getThreads = async (projectId?: string): Promise<Thread[]> => {
updated_at: thread.updated_at updated_at: thread.updated_at
})); }));
// Cache the result
apiCache.setThreads(projectId || 'all', mappedThreads);
return mappedThreads; return mappedThreads;
}; };
@ -423,13 +354,9 @@ export const addUserMessage = async (threadId: string, content: string): Promise
console.error('Error adding user message:', error); console.error('Error adding user message:', error);
throw new Error(`Error adding message: ${error.message}`); throw new Error(`Error adding message: ${error.message}`);
} }
// Invalidate the cache for this thread's messages
apiCache.invalidateThreadMessages(threadId);
}; };
export const getMessages = async (threadId: string): Promise<Message[]> => { export const getMessages = async (threadId: string): Promise<Message[]> => {
// Cache code removed - will always fetch fresh messages
const supabase = createClient(); const supabase = createClient();
const { data, error } = await supabase const { data, error } = await supabase
@ -447,8 +374,6 @@ export const getMessages = async (threadId: string): Promise<Message[]> => {
console.log('[API] Messages fetched:', data); console.log('[API] Messages fetched:', data);
// Cache storage removed
return data || []; return data || [];
}; };
@ -1005,11 +930,6 @@ export const getSandboxFileContent = async (sandboxId: string, path: string): Pr
} }
}; };
// Function to clear all API cache
export const clearApiCache = () => {
apiCache.clearAll();
};
export const updateThread = async (threadId: string, data: Partial<Thread>): Promise<Thread> => { export const updateThread = async (threadId: string, data: Partial<Thread>): Promise<Thread> => {
const supabase = createClient(); const supabase = createClient();
@ -1029,12 +949,6 @@ export const updateThread = async (threadId: string, data: Partial<Thread>): Pro
throw new Error(`Error updating thread: ${error.message}`); throw new Error(`Error updating thread: ${error.message}`);
} }
// Invalidate thread cache if we're updating thread data
if (updatedThread.project_id) {
apiCache.invalidateThreads(updatedThread.project_id);
}
apiCache.invalidateThreads('all');
return updatedThread; return updatedThread;
}; };