feat: rework thread content and improvements

This commit is contained in:
Vukasin 2025-05-06 19:15:22 +02:00
parent 6a464d4454
commit fea771b0f2
6 changed files with 1563 additions and 1394 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,518 @@
import React, { useCallback, useEffect, useState, useRef } from 'react';
import { Button } from '@/components/ui/button';
import { Play, Pause, ArrowDown, FileText, Info } from 'lucide-react';
import { UnifiedMessage } from '@/components/thread/types';
import { safeJsonParse } from '@/components/thread/utils';
// Define the set of tags whose raw XML should be hidden during streaming
const HIDE_STREAMING_XML_TAGS = new Set([
'execute-command',
'create-file',
'delete-file',
'full-file-rewrite',
'str-replace',
'browser-click-element',
'browser-close-tab',
'browser-drag-drop',
'browser-get-dropdown-options',
'browser-go-back',
'browser-input-text',
'browser-navigate-to',
'browser-scroll-down',
'browser-scroll-to-text',
'browser-scroll-up',
'browser-select-dropdown-option',
'browser-send-keys',
'browser-switch-tab',
'browser-wait',
'deploy',
'ask',
'complete',
'crawl-webpage',
'web-search'
]);
export interface PlaybackControlsProps {
messages: UnifiedMessage[];
isSidePanelOpen: boolean;
onToggleSidePanel: () => void;
toolCalls: any[];
setCurrentToolIndex: (index: number) => void;
onFileViewerOpen: () => void;
projectName?: string;
}
export interface PlaybackState {
isPlaying: boolean;
currentMessageIndex: number;
visibleMessages: UnifiedMessage[];
streamingText: string;
isStreamingText: boolean;
currentToolCall: any | null;
toolPlaybackIndex: number;
}
export interface PlaybackController {
playbackState: PlaybackState;
updatePlaybackState: (updates: Partial<PlaybackState>) => void;
renderHeader: () => JSX.Element;
renderFloatingControls: () => JSX.Element;
renderWelcomeOverlay: () => JSX.Element;
togglePlayback: () => void;
resetPlayback: () => void;
skipToEnd: () => void;
}
export const PlaybackControls = ({
messages,
isSidePanelOpen,
onToggleSidePanel,
toolCalls,
setCurrentToolIndex,
onFileViewerOpen,
projectName = 'Shared Conversation'
}: PlaybackControlsProps): PlaybackController => {
const [playbackState, setPlaybackState] = useState<PlaybackState>({
isPlaying: false,
currentMessageIndex: 0,
visibleMessages: [],
streamingText: "",
isStreamingText: false,
currentToolCall: null,
toolPlaybackIndex: -1
});
// Extract state variables for easier access
const {
isPlaying,
currentMessageIndex,
visibleMessages,
streamingText,
isStreamingText,
currentToolCall,
toolPlaybackIndex
} = playbackState;
// Helper function to update playback state
const updatePlaybackState = useCallback((updates: Partial<PlaybackState>) => {
setPlaybackState(prev => ({ ...prev, ...updates }));
}, []);
// Define togglePlayback and resetPlayback functions
const togglePlayback = useCallback(() => {
updatePlaybackState({
isPlaying: !isPlaying
});
// When starting playback, show the side panel
if (!isPlaying && !isSidePanelOpen) {
onToggleSidePanel();
}
}, [isPlaying, isSidePanelOpen, onToggleSidePanel]);
const resetPlayback = useCallback(() => {
updatePlaybackState({
isPlaying: false,
currentMessageIndex: 0,
visibleMessages: [],
streamingText: "",
isStreamingText: false,
currentToolCall: null,
toolPlaybackIndex: -1
});
}, [updatePlaybackState]);
const skipToEnd = useCallback(() => {
updatePlaybackState({
isPlaying: false,
currentMessageIndex: messages.length,
visibleMessages: messages,
streamingText: "",
isStreamingText: false,
currentToolCall: null,
toolPlaybackIndex: toolCalls.length - 1
});
if (toolCalls.length > 0) {
setCurrentToolIndex(toolCalls.length - 1);
if (!isSidePanelOpen) {
onToggleSidePanel();
}
}
}, [messages, toolCalls, isSidePanelOpen, onToggleSidePanel, setCurrentToolIndex, updatePlaybackState]);
// Streaming text function
const streamText = useCallback((text: string, onComplete: () => void) => {
if (!text || !isPlaying) {
onComplete();
return () => { };
}
updatePlaybackState({
isStreamingText: true,
streamingText: ""
});
// Define regex to find tool calls in text
const toolCallRegex = /<([a-zA-Z\-_]+)(?:\s+[^>]*)?>(?:[\s\S]*?)<\/\1>|<([a-zA-Z\-_]+)(?:\s+[^>]*)?\/>/g;
// Split text into chunks (handling tool calls as special chunks)
const chunks: { text: string; isTool: boolean; toolName?: string }[] = [];
let lastIndex = 0;
let match;
while ((match = toolCallRegex.exec(text)) !== null) {
// Add text before the tool call
if (match.index > lastIndex) {
chunks.push({
text: text.substring(lastIndex, match.index),
isTool: false
});
}
// Add the tool call
const toolName = match[1] || match[2];
chunks.push({
text: match[0],
isTool: true,
toolName
});
lastIndex = toolCallRegex.lastIndex;
}
// Add any remaining text after the last tool call
if (lastIndex < text.length) {
chunks.push({
text: text.substring(lastIndex),
isTool: false
});
}
let currentIndex = 0;
let chunkIndex = 0;
let currentText = '';
let isPaused = false;
const processNextCharacter = () => {
// Check if component is unmounted or playback is stopped
if (!isPlaying || isPaused) {
setTimeout(processNextCharacter, 100); // Check again after a short delay
return;
}
if (chunkIndex >= chunks.length) {
// All chunks processed, we're done
updatePlaybackState({
isStreamingText: false
});
// Update visible messages with the complete message
const currentMessage = messages[currentMessageIndex];
const lastMessage = visibleMessages[visibleMessages.length - 1];
if (lastMessage?.message_id === currentMessage.message_id) {
// Replace the streaming message with the complete one
updatePlaybackState({
visibleMessages: [...visibleMessages.slice(0, -1), currentMessage]
});
} else {
// Add the complete message
updatePlaybackState({
visibleMessages: [...visibleMessages, currentMessage]
});
}
onComplete();
return;
}
const currentChunk = chunks[chunkIndex];
// If this is a tool call chunk and we're at the start of it
if (currentChunk.isTool && currentIndex === 0) {
// For tool calls, check if they should be hidden during streaming
if (currentChunk.toolName && HIDE_STREAMING_XML_TAGS.has(currentChunk.toolName)) {
// Instead of showing the XML, create a tool call object
const toolCall = {
name: currentChunk.toolName,
arguments: currentChunk.text,
xml_tag_name: currentChunk.toolName
};
updatePlaybackState({
currentToolCall: toolCall,
toolPlaybackIndex: toolPlaybackIndex + 1
});
if (!isSidePanelOpen) {
onToggleSidePanel();
}
setCurrentToolIndex(toolPlaybackIndex + 1);
// Pause streaming briefly while showing the tool
isPaused = true;
setTimeout(() => {
isPaused = false;
updatePlaybackState({ currentToolCall: null });
chunkIndex++; // Move to next chunk
currentIndex = 0; // Reset index for next chunk
processNextCharacter();
}, 500); // Reduced from 1500ms to 500ms pause for tool display
return;
}
}
// Handle normal text streaming for non-tool chunks or visible tool chunks
if (currentIndex < currentChunk.text.length) {
// Dynamically adjust typing speed for a more realistic effect
const baseDelay = 5; // Reduced from 15ms to 5ms
let typingDelay = baseDelay;
// Add more delay for punctuation to make it feel more natural
const char = currentChunk.text[currentIndex];
if (".!?,;:".includes(char)) {
typingDelay = baseDelay + Math.random() * 100 + 50; // Reduced from 300+100 to 100+50ms pause after punctuation
} else {
const variableDelay = Math.random() * 5; // Reduced from 15 to 5ms
typingDelay = baseDelay + variableDelay; // 5-10ms for normal typing
}
// Add the next character
currentText += currentChunk.text[currentIndex];
updatePlaybackState({ streamingText: currentText });
currentIndex++;
// Process next character with dynamic delay
setTimeout(processNextCharacter, typingDelay);
} else {
// Move to the next chunk
chunkIndex++;
currentIndex = 0;
processNextCharacter();
}
};
processNextCharacter();
// Return cleanup function
return () => {
updatePlaybackState({
isStreamingText: false,
streamingText: ""
});
isPaused = true; // Stop processing
};
}, [isPlaying, messages, currentMessageIndex, toolPlaybackIndex, setCurrentToolIndex, isSidePanelOpen, onToggleSidePanel, updatePlaybackState, visibleMessages]);
// Main playback function
useEffect(() => {
if (!isPlaying || messages.length === 0) return;
let playbackTimeout: NodeJS.Timeout;
let cleanupStreaming: (() => void) | undefined;
const playbackNextMessage = async () => {
// Ensure we're within bounds
if (currentMessageIndex >= messages.length) {
updatePlaybackState({ isPlaying: false });
return;
}
const currentMessage = messages[currentMessageIndex];
console.log(`Playing message ${currentMessageIndex}:`, currentMessage.type, currentMessage.message_id);
// If it's an assistant message, stream it
if (currentMessage.type === 'assistant') {
try {
// Parse the content if it's JSON
let content = currentMessage.content;
try {
const parsed = JSON.parse(content);
if (parsed.content) {
content = parsed.content;
}
} catch (e) {
// Not JSON, use as is
}
// Stream the message content
await new Promise<void>((resolve) => {
cleanupStreaming = streamText(content, resolve);
});
} catch (error) {
console.error('Error streaming message:', error);
}
} else {
// For non-assistant messages, just add them to visible messages
updatePlaybackState({
visibleMessages: [...visibleMessages, currentMessage]
});
// Wait a moment before showing the next message
await new Promise(resolve => setTimeout(resolve, 500));
}
// Move to the next message
updatePlaybackState({
currentMessageIndex: currentMessageIndex + 1
});
};
// Start playback with a small delay
playbackTimeout = setTimeout(playbackNextMessage, 500);
return () => {
clearTimeout(playbackTimeout);
if (cleanupStreaming) cleanupStreaming();
};
}, [isPlaying, currentMessageIndex, messages, streamText, updatePlaybackState, visibleMessages]);
// Floating playback controls position based on side panel state
const controlsPositionClass = isSidePanelOpen
? 'left-1/2 -translate-x-1/4 sm:left-[calc(50%-225px)] md:left-[calc(50%-250px)] lg:left-[calc(50%-275px)] xl:left-[calc(50%-325px)]'
: 'left-1/2 -translate-x-1/2';
// Header with playback controls
const renderHeader = useCallback(() => (
<div className="border-b bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60">
<div className="flex h-14 items-center gap-4 px-4">
<div className="flex-1">
<div className="flex items-center gap-2">
<div className="flex items-center justify-center w-6 h-6 rounded-md overflow-hidden bg-primary/10">
<img src="/kortix-symbol.svg" alt="Kortix" width={16} height={16} className="object-contain" />
</div>
<span className="font-medium text-foreground">{projectName}</span>
</div>
</div>
<div className="flex items-center gap-2">
<Button
variant="ghost"
size="icon"
onClick={onFileViewerOpen}
className="h-8 w-8"
aria-label="View Files"
>
<FileText className="h-4 w-4" />
</Button>
<Button
variant="ghost"
size="icon"
onClick={togglePlayback}
className="h-8 w-8"
aria-label={isPlaying ? "Pause Replay" : "Play Replay"}
>
{isPlaying ? <Pause className="h-4 w-4" /> : <Play className="h-4 w-4" />}
</Button>
<Button
variant="ghost"
size="icon"
onClick={resetPlayback}
className="h-8 w-8"
aria-label="Restart Replay"
>
<ArrowDown className="h-4 w-4 rotate-90" />
</Button>
<Button
variant="ghost"
size="icon"
onClick={onToggleSidePanel}
className={`h-8 w-8 ${isSidePanelOpen ? "text-primary" : ""}`}
aria-label="Toggle Tool Panel"
>
<Info className="h-4 w-4" />
</Button>
</div>
</div>
</div>
), [isPlaying, isSidePanelOpen, onFileViewerOpen, onToggleSidePanel, projectName, resetPlayback, togglePlayback]);
const renderFloatingControls = useCallback(() => (
<>
{messages.length > 0 && (
<div
className={`fixed bottom-4 z-10 transform bg-background/90 backdrop-blur rounded-full border shadow-md px-3 py-1.5 transition-all duration-200 ${controlsPositionClass}`}
>
<div className="flex items-center gap-2">
<Button
variant="ghost"
size="icon"
onClick={togglePlayback}
className="h-8 w-8"
>
{isPlaying ? <Pause className="h-4 w-4" /> : <Play className="h-4 w-4" />}
</Button>
<div className="flex items-center text-xs text-muted-foreground">
<span>{Math.min(currentMessageIndex + (isStreamingText ? 0 : 1), messages.length)}/{messages.length}</span>
</div>
<Button
variant="ghost"
size="icon"
onClick={resetPlayback}
className="h-8 w-8"
>
<ArrowDown className="h-4 w-4 rotate-90" />
</Button>
<Button
variant="ghost"
size="sm"
onClick={skipToEnd}
className="text-xs"
>
Skip to end
</Button>
</div>
</div>
)}
</>
), [controlsPositionClass, currentMessageIndex, isPlaying, isStreamingText, messages.length, resetPlayback, skipToEnd, togglePlayback]);
// When no messages are displayed yet, show the welcome overlay
const renderWelcomeOverlay = useCallback(() => (
<>
{visibleMessages.length === 0 && !streamingText && !currentToolCall && (
<div className="fixed inset-0 flex flex-col items-center justify-center">
{/* Gradient overlay */}
<div className="absolute inset-0 bg-gradient-to-t from-black/90 via-black/50 to-transparent dark:from-black/90 dark:via-black/50 dark:to-transparent" />
<div className="text-center max-w-md mx-auto relative z-10 px-4">
<div className="rounded-full bg-primary/10 backdrop-blur-sm w-12 h-12 mx-auto flex items-center justify-center mb-4">
<Play className="h-5 w-5 text-primary" />
</div>
<h3 className="text-lg font-medium mb-2 text-white">Watch this agent in action</h3>
<p className="text-sm text-white/80 mb-4">
This is a shared view-only agent run. Click play to replay the entire conversation with realistic timing.
</p>
<Button
onClick={togglePlayback}
className="flex items-center mx-auto bg-white/10 hover:bg-white/20 backdrop-blur-sm text-white border-white/20"
size="lg"
variant="outline"
>
<Play className="h-4 w-4 mr-2" />
Start Playback
</Button>
</div>
</div>
)}
</>
), [currentToolCall, streamingText, togglePlayback, visibleMessages.length]);
return {
playbackState,
updatePlaybackState,
renderHeader,
renderFloatingControls,
renderWelcomeOverlay,
togglePlayback,
resetPlayback,
skipToEnd
};
};
export default PlaybackControls;

View File

@ -0,0 +1,535 @@
import React, { useRef, useState, useCallback } from 'react';
import Image from 'next/image';
import { ArrowDown, CircleDashed } from 'lucide-react';
import { Button } from '@/components/ui/button';
import { Markdown } from '@/components/ui/markdown';
import { UnifiedMessage, ParsedContent, ParsedMetadata } from '@/components/thread/types';
import { safeJsonParse } from '@/components/thread/utils';
import { FileAttachmentGrid } from '@/components/thread/file-attachment';
// Define the set of tags whose raw XML should be hidden during streaming
const HIDE_STREAMING_XML_TAGS = new Set([
'execute-command',
'create-file',
'delete-file',
'full-file-rewrite',
'str-replace',
'browser-click-element',
'browser-close-tab',
'browser-drag-drop',
'browser-get-dropdown-options',
'browser-go-back',
'browser-input-text',
'browser-navigate-to',
'browser-scroll-down',
'browser-scroll-to-text',
'browser-scroll-up',
'browser-select-dropdown-option',
'browser-send-keys',
'browser-switch-tab',
'browser-wait',
'deploy',
'ask',
'complete',
'crawl-webpage',
'web-search'
]);
// Helper function to render attachments
export function renderAttachments(attachments: string[], fileViewerHandler?: (filePath?: string) => void, sandboxId?: string) {
if (!attachments || attachments.length === 0) return null;
return <FileAttachmentGrid
attachments={attachments}
onFileClick={fileViewerHandler}
showPreviews={true}
sandboxId={sandboxId}
/>;
}
// Render Markdown content while preserving XML tags that should be displayed as tool calls
export function renderMarkdownContent(
content: string,
handleToolClick: (assistantMessageId: string | null, toolName: string) => void,
messageId: string | null,
fileViewerHandler?: (filePath?: string) => void,
sandboxId?: string
) {
const xmlRegex = /<(?!inform\b)([a-zA-Z\-_]+)(?:\s+[^>]*)?>(?:[\s\S]*?)<\/\1>|<(?!inform\b)([a-zA-Z\-_]+)(?:\s+[^>]*)?\/>/g;
let lastIndex = 0;
const contentParts: React.ReactNode[] = [];
let match;
// If no XML tags found, just return the full content as markdown
if (!content.match(xmlRegex)) {
return <Markdown className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none break-words">{content}</Markdown>;
}
while ((match = xmlRegex.exec(content)) !== null) {
// Add text before the tag as markdown
if (match.index > lastIndex) {
const textBeforeTag = content.substring(lastIndex, match.index);
contentParts.push(
<Markdown key={`md-${lastIndex}`} className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none inline-block mr-1 break-words">{textBeforeTag}</Markdown>
);
}
const rawXml = match[0];
const toolName = match[1] || match[2];
const toolCallKey = `tool-${match.index}`;
if (toolName === 'ask') {
// Extract attachments from the XML attributes
const attachmentsMatch = rawXml.match(/attachments=["']([^"']*)["']/i);
const attachments = attachmentsMatch
? attachmentsMatch[1].split(',').map(a => a.trim())
: [];
// Extract content from the ask tag
const contentMatch = rawXml.match(/<ask[^>]*>([\s\S]*?)<\/ask>/i);
const askContent = contentMatch ? contentMatch[1] : '';
// Render <ask> tag content with attachment UI (using the helper)
contentParts.push(
<div key={`ask-${match.index}`} className="space-y-3">
<Markdown className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none break-words [&>:first-child]:mt-0 prose-headings:mt-3">{askContent}</Markdown>
{renderAttachments(attachments, fileViewerHandler, sandboxId)}
</div>
);
} else {
// Import dynamically to avoid circular dependencies
const { getToolIcon, extractPrimaryParam } = require('@/components/thread/utils');
const IconComponent = getToolIcon(toolName);
const paramDisplay = extractPrimaryParam(toolName, rawXml);
// Render tool button as a clickable element
contentParts.push(
<button
key={toolCallKey}
onClick={() => handleToolClick(messageId, toolName)}
className="inline-flex items-center gap-1.5 py-1 px-2.5 my-1 text-xs text-muted-foreground bg-muted hover:bg-muted/80 rounded-md transition-colors cursor-pointer border border-border"
>
<IconComponent className="h-3.5 w-3.5 text-muted-foreground flex-shrink-0" />
<span className="font-mono text-xs text-foreground">{toolName}</span>
{paramDisplay && <span className="ml-1 text-muted-foreground truncate max-w-[200px]" title={paramDisplay}>{paramDisplay}</span>}
</button>
);
}
lastIndex = xmlRegex.lastIndex;
}
// Add text after the last tag
if (lastIndex < content.length) {
contentParts.push(
<Markdown key={`md-${lastIndex}`} className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none break-words">{content.substring(lastIndex)}</Markdown>
);
}
return contentParts;
}
export interface ThreadContentProps {
messages: UnifiedMessage[];
streamingTextContent?: string;
streamingToolCall?: any;
agentStatus: 'idle' | 'running' | 'connecting' | 'error';
handleToolClick: (assistantMessageId: string | null, toolName: string) => void;
handleOpenFileViewer: (filePath?: string) => void;
readOnly?: boolean;
visibleMessages?: UnifiedMessage[]; // For playback mode
streamingText?: string; // For playback mode
isStreamingText?: boolean; // For playback mode
currentToolCall?: any; // For playback mode
streamHookStatus?: string; // Add this prop
sandboxId?: string; // Add sandboxId prop
}
export const ThreadContent: React.FC<ThreadContentProps> = ({
messages,
streamingTextContent = "",
streamingToolCall,
agentStatus,
handleToolClick,
handleOpenFileViewer,
readOnly = false,
visibleMessages,
streamingText = "",
isStreamingText = false,
currentToolCall,
streamHookStatus = "idle",
sandboxId
}) => {
const messagesEndRef = useRef<HTMLDivElement>(null);
const messagesContainerRef = useRef<HTMLDivElement>(null);
const latestMessageRef = useRef<HTMLDivElement>(null);
const [showScrollButton, setShowScrollButton] = useState(false);
const [userHasScrolled, setUserHasScrolled] = useState(false);
// In playback mode, we use visibleMessages instead of messages
const displayMessages = readOnly && visibleMessages ? visibleMessages : messages;
const handleScroll = () => {
if (!messagesContainerRef.current) return;
const { scrollTop, scrollHeight, clientHeight } = messagesContainerRef.current;
const isScrolledUp = scrollHeight - scrollTop - clientHeight > 100;
setShowScrollButton(isScrolledUp);
setUserHasScrolled(isScrolledUp);
};
const scrollToBottom = useCallback((behavior: ScrollBehavior = 'smooth') => {
messagesEndRef.current?.scrollIntoView({ behavior });
}, []);
return (
<>
<div
ref={messagesContainerRef}
className="flex-1 overflow-y-auto px-6 py-4 pb-24 bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60"
onScroll={handleScroll}
>
<div className="mx-auto max-w-3xl">
{displayMessages.length === 0 && !streamingTextContent && !streamingToolCall &&
!streamingText && !currentToolCall && agentStatus === 'idle' ? (
<div className="flex h-full items-center justify-center">
<div className="text-center text-muted-foreground">
{readOnly ? "No messages to display." : "Send a message to start."}
</div>
</div>
) : (
<div className="space-y-8">
{(() => {
// Group messages logic
type MessageGroup = {
type: 'user' | 'assistant_group';
messages: UnifiedMessage[];
key: string;
};
const groupedMessages: MessageGroup[] = [];
let currentGroup: MessageGroup | null = null;
displayMessages.forEach((message, index) => {
const messageType = message.type;
const key = message.message_id || `msg-${index}`;
if (messageType === 'user') {
if (currentGroup) {
groupedMessages.push(currentGroup);
}
groupedMessages.push({ type: 'user', messages: [message], key });
currentGroup = null;
} else if (messageType === 'assistant' || messageType === 'tool' || messageType === 'browser_state') {
if (currentGroup && currentGroup.type === 'assistant_group') {
currentGroup.messages.push(message);
} else {
if (currentGroup) {
groupedMessages.push(currentGroup);
}
currentGroup = { type: 'assistant_group', messages: [message], key };
}
} else if (messageType !== 'status') {
if (currentGroup) {
groupedMessages.push(currentGroup);
currentGroup = null;
}
}
});
if (currentGroup) {
groupedMessages.push(currentGroup);
}
return groupedMessages.map((group, groupIndex) => {
if (group.type === 'user') {
const message = group.messages[0];
const messageContent = (() => {
try {
const parsed = safeJsonParse<ParsedContent>(message.content, { content: message.content });
return parsed.content || message.content;
} catch {
return message.content;
}
})();
// Extract attachments from the message content
const attachmentsMatch = messageContent.match(/\[Uploaded File: (.*?)\]/g);
const attachments = attachmentsMatch
? attachmentsMatch.map(match => {
const pathMatch = match.match(/\[Uploaded File: (.*?)\]/);
return pathMatch ? pathMatch[1] : null;
}).filter(Boolean)
: [];
// Remove attachment info from the message content
const cleanContent = messageContent.replace(/\[Uploaded File: .*?\]/g, '').trim();
return (
<div key={group.key} className="flex justify-end">
<div className="inline-flex max-w-[85%] rounded-lg bg-primary/10 px-4 py-3">
<div className="space-y-3">
{cleanContent && (
<Markdown className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none [&>:first-child]:mt-0 prose-headings:mt-3">{cleanContent}</Markdown>
)}
{/* Use the helper function to render user attachments */}
{renderAttachments(attachments as string[], handleOpenFileViewer, sandboxId)}
</div>
</div>
</div>
);
} else if (group.type === 'assistant_group') {
return (
<div key={group.key} ref={groupIndex === groupedMessages.length - 1 ? latestMessageRef : null}>
<div className="flex items-start gap-3">
<div className="flex-shrink-0 w-5 h-5 mt-2 rounded-md flex items-center justify-center overflow-hidden ml-auto mr-2">
<Image src="/kortix-symbol.svg" alt="Kortix" width={14} height={14} className="object-contain invert dark:invert-0 opacity-70" />
</div>
<div className="flex-1">
<div className="inline-flex max-w-[90%] rounded-lg bg-muted/5 px-4 py-3 text-sm">
<div className="space-y-2">
{(() => {
const toolResultsMap = new Map<string | null, UnifiedMessage[]>();
group.messages.forEach(msg => {
if (msg.type === 'tool') {
const meta = safeJsonParse<ParsedMetadata>(msg.metadata, {});
const assistantId = meta.assistant_message_id || null;
if (!toolResultsMap.has(assistantId)) {
toolResultsMap.set(assistantId, []);
}
toolResultsMap.get(assistantId)?.push(msg);
}
});
const renderedToolResultIds = new Set<string>();
const elements: React.ReactNode[] = [];
group.messages.forEach((message, msgIndex) => {
if (message.type === 'assistant') {
const parsedContent = safeJsonParse<ParsedContent>(message.content, {});
const msgKey = message.message_id || `submsg-assistant-${msgIndex}`;
if (!parsedContent.content) return;
const renderedContent = renderMarkdownContent(
parsedContent.content,
handleToolClick,
message.message_id,
handleOpenFileViewer,
sandboxId
);
elements.push(
<div key={msgKey} className={msgIndex > 0 ? "mt-2" : ""}>
<div className="prose prose-sm dark:prose-invert chat-markdown max-w-none [&>:first-child]:mt-0 prose-headings:mt-3">
{renderedContent}
</div>
</div>
);
}
});
return elements;
})()}
{groupIndex === groupedMessages.length - 1 && !readOnly && (streamHookStatus === 'streaming' || streamHookStatus === 'connecting') && (
<div className="mt-2">
{(() => {
let detectedTag: string | null = null;
let tagStartIndex = -1;
if (streamingTextContent) {
for (const tag of HIDE_STREAMING_XML_TAGS) {
const openingTagPattern = `<${tag}`;
const index = streamingTextContent.indexOf(openingTagPattern);
if (index !== -1) {
detectedTag = tag;
tagStartIndex = index;
break;
}
}
}
const textToRender = streamingTextContent || '';
const textBeforeTag = detectedTag ? textToRender.substring(0, tagStartIndex) : textToRender;
const showCursor = (streamHookStatus === 'streaming' || streamHookStatus === 'connecting') && !detectedTag;
return (
<>
{textBeforeTag && (
<Markdown className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none [&>:first-child]:mt-0 prose-headings:mt-3">{textBeforeTag}</Markdown>
)}
{showCursor && (
<span className="inline-block h-4 w-0.5 bg-primary ml-0.5 -mb-1 animate-pulse" />
)}
{detectedTag && (
<div className="mt-2 mb-1">
<button
className="inline-flex items-center gap-1.5 py-1 px-2.5 text-xs font-medium text-primary bg-primary/10 hover:bg-primary/20 rounded-md transition-colors cursor-pointer border border-primary/20"
>
<CircleDashed className="h-3.5 w-3.5 text-primary flex-shrink-0 animate-spin animation-duration-2000" />
<span className="font-mono text-xs text-primary">{detectedTag}</span>
</button>
</div>
)}
{streamingToolCall && !detectedTag && (
<div className="mt-2 mb-1">
{(() => {
// Import dynamically to avoid circular dependencies
const { getToolIcon, extractPrimaryParam } = require('@/components/thread/utils');
const toolName = streamingToolCall.name || streamingToolCall.xml_tag_name || 'Tool';
const IconComponent = getToolIcon(toolName);
const paramDisplay = extractPrimaryParam(toolName, streamingToolCall.arguments || '');
return (
<button
className="inline-flex items-center gap-1.5 py-1 px-2.5 text-xs font-medium text-primary bg-primary/10 hover:bg-primary/20 rounded-md transition-colors cursor-pointer border border-primary/20"
>
<CircleDashed className="h-3.5 w-3.5 text-primary flex-shrink-0 animate-spin animation-duration-2000" />
<span className="font-mono text-xs text-primary">{toolName}</span>
{paramDisplay && <span className="ml-1 text-primary/70 truncate max-w-[200px]" title={paramDisplay}>{paramDisplay}</span>}
</button>
);
})()}
</div>
)}
</>
);
})()}
</div>
)}
{/* For playback mode, show streaming text and tool calls */}
{readOnly && groupIndex === groupedMessages.length - 1 && isStreamingText && (
<div className="mt-2">
{(() => {
let detectedTag: string | null = null;
let tagStartIndex = -1;
if (streamingText) {
for (const tag of HIDE_STREAMING_XML_TAGS) {
const openingTagPattern = `<${tag}`;
const index = streamingText.indexOf(openingTagPattern);
if (index !== -1) {
detectedTag = tag;
tagStartIndex = index;
break;
}
}
}
const textToRender = streamingText || '';
const textBeforeTag = detectedTag ? textToRender.substring(0, tagStartIndex) : textToRender;
const showCursor = isStreamingText && !detectedTag;
return (
<>
{textBeforeTag && (
<Markdown className="text-sm prose prose-sm dark:prose-invert chat-markdown max-w-none [&>:first-child]:mt-0 prose-headings:mt-3">{textBeforeTag}</Markdown>
)}
{showCursor && (
<span className="inline-block h-4 w-0.5 bg-primary ml-0.5 -mb-1 animate-pulse" />
)}
{detectedTag && (
<div className="mt-2 mb-1">
<button
className="inline-flex items-center gap-1.5 py-1 px-2.5 text-xs font-medium text-primary bg-primary/10 hover:bg-primary/20 rounded-md transition-colors cursor-pointer border border-primary/20"
>
<CircleDashed className="h-3.5 w-3.5 text-primary flex-shrink-0 animate-spin animation-duration-2000" />
<span className="font-mono text-xs text-primary">{detectedTag}</span>
</button>
</div>
)}
</>
);
})()}
</div>
)}
</div>
</div>
</div>
</div>
</div>
);
}
return null;
});
})()}
{(agentStatus === 'running' || agentStatus === 'connecting') &&
!readOnly &&
(messages.length === 0 || messages[messages.length - 1].type === 'user') && (
<div ref={latestMessageRef}>
<div className="flex items-start gap-3">
<div className="flex-shrink-0 w-5 h-5 rounded-md flex items-center justify-center overflow-hidden bg-primary/10">
<Image src="/kortix-symbol.svg" alt="Suna" width={14} height={14} className="object-contain" />
</div>
<div className="flex-1 space-y-2">
<div className="max-w-[90%] px-4 py-3 text-sm">
<div className="flex items-center gap-1.5 py-1">
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse" />
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse delay-150" />
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse delay-300" />
</div>
</div>
</div>
</div>
</div>
)}
{/* For playback mode - Show tool call animation if active */}
{readOnly && currentToolCall && (
<div ref={latestMessageRef}>
<div className="flex items-start gap-3">
<div className="flex-shrink-0 w-5 h-5 mt-2 rounded-md flex items-center justify-center overflow-hidden bg-primary/10">
<Image src="/kortix-symbol.svg" alt="Suna" width={14} height={14} className="object-contain" />
</div>
<div className="flex-1 space-y-2">
<div className="inline-flex items-center gap-1.5 py-1.5 px-3 text-xs font-medium text-primary bg-primary/10 rounded-md border border-primary/20">
<CircleDashed className="h-3.5 w-3.5 text-primary flex-shrink-0 animate-spin animation-duration-2000" />
<span className="font-mono text-xs text-primary">
{currentToolCall.name || 'Using Tool'}
</span>
</div>
</div>
</div>
</div>
)}
{/* For playback mode - Show streaming indicator if no messages yet */}
{readOnly && visibleMessages && visibleMessages.length === 0 && isStreamingText && (
<div ref={latestMessageRef}>
<div className="flex items-start gap-3">
<div className="flex-shrink-0 w-5 h-5 mt-2 rounded-md flex items-center justify-center overflow-hidden bg-primary/10">
<Image src="/kortix-symbol.svg" alt="Suna" width={14} height={14} className="object-contain" />
</div>
<div className="flex-1 space-y-2">
<div className="max-w-[90%] px-4 py-3 text-sm">
<div className="flex items-center gap-1.5 py-1">
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse" />
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse delay-150" />
<div className="h-1.5 w-1.5 rounded-full bg-primary/50 animate-pulse delay-300" />
</div>
</div>
</div>
</div>
</div>
)}
</div>
)}
<div ref={messagesEndRef} className="h-1" />
</div>
</div>
{/* Scroll to bottom button */}
{showScrollButton && (
<Button
variant="outline"
size="icon"
className="fixed bottom-20 right-6 z-10 h-8 w-8 rounded-full shadow-md"
onClick={() => scrollToBottom('smooth')}
>
<ArrowDown className="h-4 w-4" />
</Button>
)}
</>
);
};
export default ThreadContent;

View File

@ -0,0 +1,225 @@
import React from 'react';
import Image from 'next/image';
import {
FileText, FileImage, FileCode, FilePlus, FileSpreadsheet, FileVideo,
FileAudio, FileType, Database, Archive, File, ExternalLink,
Download
} from 'lucide-react';
import { cn } from '@/lib/utils';
// Define basic file types
export type FileType =
| 'image' | 'code' | 'text' | 'pdf'
| 'audio' | 'video' | 'spreadsheet'
| 'archive' | 'database' | 'markdown'
| 'other';
// Simple extension-based file type detection
function getFileType(filename: string): FileType {
const ext = filename.split('.').pop()?.toLowerCase() || '';
if (['png', 'jpg', 'jpeg', 'gif', 'webp', 'svg', 'bmp'].includes(ext)) return 'image';
if (['js', 'jsx', 'ts', 'tsx', 'html', 'css', 'json', 'py', 'java', 'c', 'cpp'].includes(ext)) return 'code';
if (['txt', 'log', 'env'].includes(ext)) return 'text';
if (['md', 'markdown'].includes(ext)) return 'markdown';
if (ext === 'pdf') return 'pdf';
if (['mp3', 'wav', 'ogg', 'flac'].includes(ext)) return 'audio';
if (['mp4', 'webm', 'mov', 'avi'].includes(ext)) return 'video';
if (['csv', 'xls', 'xlsx'].includes(ext)) return 'spreadsheet';
if (['zip', 'rar', 'tar', 'gz'].includes(ext)) return 'archive';
if (['db', 'sqlite', 'sql'].includes(ext)) return 'database';
return 'other';
}
// Get appropriate icon for file type
function getFileIcon(type: FileType): React.ElementType {
const icons: Record<FileType, React.ElementType> = {
image: FileImage,
code: FileCode,
text: FileText,
markdown: FileText,
pdf: FileType,
audio: FileAudio,
video: FileVideo,
spreadsheet: FileSpreadsheet,
archive: Archive,
database: Database,
other: File
};
return icons[type];
}
// Generate a human-readable display name for file type
function getTypeLabel(type: FileType, extension?: string): string {
if (type === 'code' && extension) {
return extension.toUpperCase();
}
const labels: Record<FileType, string> = {
image: 'Image',
code: 'Code',
text: 'Text',
markdown: 'Markdown',
pdf: 'PDF',
audio: 'Audio',
video: 'Video',
spreadsheet: 'Spreadsheet',
archive: 'Archive',
database: 'Database',
other: 'File'
};
return labels[type];
}
// Generate realistic file size based on file path and type
function getFileSize(filepath: string, type: FileType): string {
// Base size calculation
const base = (filepath.length * 5) % 800 + 200;
// Type-specific multipliers
const multipliers: Record<FileType, number> = {
image: 5.0,
video: 20.0,
audio: 10.0,
code: 0.5,
text: 0.3,
markdown: 0.3,
pdf: 8.0,
spreadsheet: 3.0,
archive: 5.0,
database: 4.0,
other: 1.0
};
const size = base * multipliers[type];
if (size < 1024) return `${Math.round(size)} B`;
if (size < 1024 * 1024) return `${(size / 1024).toFixed(1)} KB`;
return `${(size / (1024 * 1024)).toFixed(1)} MB`;
}
// Get the API URL for file content
function getFileUrl(sandboxId: string | undefined, path: string): string {
if (!sandboxId) return path;
const url = new URL(`${process.env.NEXT_PUBLIC_BACKEND_URL}/sandboxes/${sandboxId}/files/content`);
url.searchParams.append('path', path);
return url.toString();
}
interface FileAttachmentProps {
filepath: string;
onClick?: (path: string) => void;
className?: string;
sandboxId?: string;
showPreview?: boolean;
}
export function FileAttachment({
filepath,
onClick,
className,
sandboxId,
showPreview = true
}: FileAttachmentProps) {
const [imageLoaded, setImageLoaded] = React.useState(false);
const [imageError, setImageError] = React.useState(false);
const filename = filepath.split('/').pop() || 'file';
const extension = filename.split('.').pop()?.toLowerCase() || '';
const fileType = getFileType(filename);
const fileUrl = sandboxId ? getFileUrl(sandboxId, filepath) : filepath;
const typeLabel = getTypeLabel(fileType, extension);
const fileSize = getFileSize(filepath, fileType);
const IconComponent = getFileIcon(fileType);
const isImage = fileType === 'image' && showPreview;
const handleClick = () => {
if (onClick) onClick(filepath);
};
// Clean layout with proper image handling
return (
<button
onClick={handleClick}
className={cn(
"group flex items-start gap-3 p-4 rounded-md bg-muted/10 hover:bg-muted/20 transition-colors",
"border border-transparent hover:border-muted/20 w-full text-left",
className
)}
>
{isImage && !imageError ? (
<div className="relative h-10 w-10 rounded-md overflow-hidden bg-muted/5 flex-shrink-0">
<Image
src={fileUrl}
alt={filename}
fill
className="object-cover"
onLoad={() => setImageLoaded(true)}
onError={() => setImageError(true)}
unoptimized
/>
</div>
) : (
<div className="flex items-center justify-center h-10 w-10 text-muted-foreground flex-shrink-0">
<IconComponent className="h-5 w-5" />
</div>
)}
<div className="flex-1 min-w-0">
<div className="text-sm font-medium text-foreground truncate">
{filename}
</div>
<div className="text-xs text-muted-foreground flex items-center gap-1">
<span>{typeLabel}</span>
<span>·</span>
<span>{fileSize}</span>
</div>
</div>
</button>
);
}
interface FileAttachmentGridProps {
attachments: string[];
onFileClick?: (path: string) => void;
className?: string;
sandboxId?: string;
showPreviews?: boolean;
}
export function FileAttachmentGrid({
attachments,
onFileClick,
className,
sandboxId,
showPreviews = true
}: FileAttachmentGridProps) {
if (!attachments || attachments.length === 0) return null;
// Deduplicate attachments
const uniqueAttachments = [...new Set(attachments)];
// Simple grid layout
return (
<div className={cn("mt-4", className)}>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
{uniqueAttachments.map((attachment, index) => (
<FileAttachment
key={`file-${index}`}
filepath={attachment}
onClick={onFileClick}
sandboxId={sandboxId}
showPreview={showPreviews}
/>
))}
</div>
</div>
);
}

View File

@ -0,0 +1,3 @@
export const threadErrorCodeMessages: Record<string, string> = {
PGRST116: 'The requested chat does not exist, has been deleted, or you do not have access to it.',
};