mirror of https://github.com/kortix-ai/suna.git
Merge branch 'main' into minor-fixes
This commit is contained in:
commit
a5179cc5fc
|
@ -1,9 +1,10 @@
|
|||
name: Build and Push Docker Images
|
||||
name: Build and Push Docker Image
|
||||
|
||||
on:
|
||||
# Remove active triggers to disable the workflow
|
||||
# push:
|
||||
# branches: [ main ]
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- PRODUCTION
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
|
@ -16,6 +17,18 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get tag name
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${GITHUB_REF#refs/heads/}" == "main" ]]; then
|
||||
echo "branch=latest" >> $GITHUB_OUTPUT
|
||||
elif [[ "${GITHUB_REF#refs/heads/}" == "PRODUCTION" ]]; then
|
||||
echo "branch=prod" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
id: get_tag_name
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
|
@ -32,16 +45,7 @@ jobs:
|
|||
context: ./backend
|
||||
file: ./backend/Dockerfile
|
||||
push: true
|
||||
tags: ghcr.io/${{ github.repository }}/suna-backend:latest
|
||||
platforms: linux/arm64, linux/amd64
|
||||
tags: ghcr.io/${{ github.repository }}/suna-backend:${{ steps.get_tag_name.outputs.branch }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push Frontend image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ./frontend
|
||||
file: ./frontend/Dockerfile
|
||||
push: true
|
||||
tags: ghcr.io/${{ github.repository }}/suna-frontend:latest
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
|
@ -38,6 +38,8 @@ ENV WORKERS=33
|
|||
ENV THREADS=2
|
||||
ENV WORKER_CONNECTIONS=2000
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
# Gunicorn configuration
|
||||
CMD ["sh", "-c", "gunicorn api:app \
|
||||
--workers $WORKERS \
|
||||
|
|
|
@ -275,6 +275,36 @@ async def read_file(
|
|||
logger.error(f"Error reading file in sandbox {sandbox_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@router.delete("/sandboxes/{sandbox_id}/files")
|
||||
async def delete_file(
|
||||
sandbox_id: str,
|
||||
path: str,
|
||||
request: Request = None,
|
||||
user_id: Optional[str] = Depends(get_optional_user_id)
|
||||
):
|
||||
"""Delete a file from the sandbox"""
|
||||
# Normalize the path to handle UTF-8 encoding correctly
|
||||
path = normalize_path(path)
|
||||
|
||||
logger.info(f"Received file delete request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
||||
client = await db.client
|
||||
|
||||
# Verify the user has access to this sandbox
|
||||
await verify_sandbox_access(client, sandbox_id, user_id)
|
||||
|
||||
try:
|
||||
# Get sandbox using the safer method
|
||||
sandbox = await get_sandbox_by_id_safely(client, sandbox_id)
|
||||
|
||||
# Delete file
|
||||
sandbox.fs.delete_file(path)
|
||||
logger.info(f"File deleted at {path} in sandbox {sandbox_id}")
|
||||
|
||||
return {"status": "success", "deleted": True, "path": path}
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting file in sandbox {sandbox_id}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
# Should happen on server-side fully
|
||||
@router.post("/project/{project_id}/sandbox/ensure-active")
|
||||
async def ensure_project_sandbox_active(
|
||||
|
|
|
@ -1266,6 +1266,7 @@ export default function ThreadPage({
|
|||
autoFocus={!isLoading}
|
||||
onFileBrowse={handleOpenFileViewer}
|
||||
sandboxId={sandboxId || undefined}
|
||||
messages={messages}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -14,6 +14,8 @@ import { handleFiles } from './file-upload-handler';
|
|||
import { MessageInput } from './message-input';
|
||||
import { AttachmentGroup } from '../attachment-group';
|
||||
import { useModelSelection } from './_use-model-selection';
|
||||
import { useFileDelete } from '@/hooks/react-query/files';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
|
||||
export interface ChatInputHandles {
|
||||
getPendingFiles: () => File[];
|
||||
|
@ -36,6 +38,7 @@ export interface ChatInputProps {
|
|||
onFileBrowse?: () => void;
|
||||
sandboxId?: string;
|
||||
hideAttachments?: boolean;
|
||||
messages?: any[]; // Add messages prop to check for existing file references
|
||||
}
|
||||
|
||||
export interface UploadedFile {
|
||||
|
@ -61,6 +64,7 @@ export const ChatInput = forwardRef<ChatInputHandles, ChatInputProps>(
|
|||
onFileBrowse,
|
||||
sandboxId,
|
||||
hideAttachments = false,
|
||||
messages = [],
|
||||
},
|
||||
ref,
|
||||
) => {
|
||||
|
@ -85,6 +89,9 @@ export const ChatInput = forwardRef<ChatInputHandles, ChatInputProps>(
|
|||
refreshCustomModels,
|
||||
} = useModelSelection();
|
||||
|
||||
const deleteFileMutation = useFileDelete();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const textareaRef = useRef<HTMLTextAreaElement | null>(null);
|
||||
const fileInputRef = useRef<HTMLInputElement | null>(null);
|
||||
|
||||
|
@ -152,14 +159,37 @@ export const ChatInput = forwardRef<ChatInputHandles, ChatInputProps>(
|
|||
|
||||
const removeUploadedFile = (index: number) => {
|
||||
const fileToRemove = uploadedFiles[index];
|
||||
|
||||
// Clean up local URL if it exists
|
||||
if (fileToRemove.localUrl) {
|
||||
URL.revokeObjectURL(fileToRemove.localUrl);
|
||||
}
|
||||
|
||||
// Remove from local state immediately for responsive UI
|
||||
setUploadedFiles((prev) => prev.filter((_, i) => i !== index));
|
||||
if (!sandboxId && pendingFiles.length > index) {
|
||||
setPendingFiles((prev) => prev.filter((_, i) => i !== index));
|
||||
}
|
||||
|
||||
// Check if file is referenced in existing chat messages before deleting from server
|
||||
const isFileUsedInChat = messages.some(message => {
|
||||
const content = typeof message.content === 'string' ? message.content : '';
|
||||
return content.includes(`[Uploaded File: ${fileToRemove.path}]`);
|
||||
});
|
||||
|
||||
// Only delete from server if file is not referenced in chat history
|
||||
if (sandboxId && fileToRemove.path && !isFileUsedInChat) {
|
||||
deleteFileMutation.mutate({
|
||||
sandboxId,
|
||||
filePath: fileToRemove.path,
|
||||
}, {
|
||||
onError: (error) => {
|
||||
console.error('Failed to delete file from server:', error);
|
||||
}
|
||||
});
|
||||
} else if (isFileUsedInChat) {
|
||||
console.log(`Skipping server deletion for ${fileToRemove.path} - file is referenced in chat history`);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDragOver = (e: React.DragEvent<HTMLDivElement>) => {
|
||||
|
@ -193,6 +223,8 @@ export const ChatInput = forwardRef<ChatInputHandles, ChatInputProps>(
|
|||
setPendingFiles,
|
||||
setUploadedFiles,
|
||||
setIsUploading,
|
||||
messages,
|
||||
queryClient,
|
||||
);
|
||||
}
|
||||
}}
|
||||
|
@ -228,6 +260,7 @@ export const ChatInput = forwardRef<ChatInputHandles, ChatInputProps>(
|
|||
setUploadedFiles={setUploadedFiles}
|
||||
setIsUploading={setIsUploading}
|
||||
hideAttachments={hideAttachments}
|
||||
messages={messages}
|
||||
|
||||
selectedModel={selectedModel}
|
||||
onModelChange={handleModelChange}
|
||||
|
|
|
@ -5,6 +5,8 @@ import { Button } from '@/components/ui/button';
|
|||
import { Paperclip, Loader2 } from 'lucide-react';
|
||||
import { toast } from 'sonner';
|
||||
import { createClient } from '@/lib/supabase/client';
|
||||
import { useQueryClient } from '@tanstack/react-query';
|
||||
import { fileQueryKeys } from '@/hooks/react-query/files/use-file-queries';
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
|
@ -49,6 +51,8 @@ const uploadFiles = async (
|
|||
sandboxId: string,
|
||||
setUploadedFiles: React.Dispatch<React.SetStateAction<UploadedFile[]>>,
|
||||
setIsUploading: React.Dispatch<React.SetStateAction<boolean>>,
|
||||
messages: any[] = [], // Add messages parameter to check for existing files
|
||||
queryClient?: any, // Add queryClient parameter for cache invalidation
|
||||
) => {
|
||||
try {
|
||||
setIsUploading(true);
|
||||
|
@ -61,10 +65,16 @@ const uploadFiles = async (
|
|||
continue;
|
||||
}
|
||||
|
||||
const uploadPath = `/workspace/${file.name}`;
|
||||
|
||||
// Check if this filename already exists in chat messages
|
||||
const isFileInChat = messages.some(message => {
|
||||
const content = typeof message.content === 'string' ? message.content : '';
|
||||
return content.includes(`[Uploaded File: ${uploadPath}]`);
|
||||
});
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const uploadPath = `/workspace/${file.name}`;
|
||||
formData.append('path', uploadPath);
|
||||
|
||||
const supabase = createClient();
|
||||
|
@ -88,6 +98,23 @@ const uploadFiles = async (
|
|||
throw new Error(`Upload failed: ${response.statusText}`);
|
||||
}
|
||||
|
||||
// If file was already in chat and we have queryClient, invalidate its cache
|
||||
if (isFileInChat && queryClient) {
|
||||
console.log(`Invalidating cache for existing file: ${uploadPath}`);
|
||||
|
||||
// Invalidate all content types for this file
|
||||
['text', 'blob', 'json'].forEach(contentType => {
|
||||
const queryKey = fileQueryKeys.content(sandboxId, uploadPath, contentType);
|
||||
queryClient.removeQueries({ queryKey });
|
||||
});
|
||||
|
||||
// Also invalidate directory listing
|
||||
const directoryPath = uploadPath.substring(0, uploadPath.lastIndexOf('/'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directory(sandboxId, directoryPath),
|
||||
});
|
||||
}
|
||||
|
||||
newUploadedFiles.push({
|
||||
name: file.name,
|
||||
path: uploadPath,
|
||||
|
@ -119,10 +146,12 @@ const handleFiles = async (
|
|||
setPendingFiles: React.Dispatch<React.SetStateAction<File[]>>,
|
||||
setUploadedFiles: React.Dispatch<React.SetStateAction<UploadedFile[]>>,
|
||||
setIsUploading: React.Dispatch<React.SetStateAction<boolean>>,
|
||||
messages: any[] = [], // Add messages parameter
|
||||
queryClient?: any, // Add queryClient parameter
|
||||
) => {
|
||||
if (sandboxId) {
|
||||
// If we have a sandboxId, upload files directly
|
||||
await uploadFiles(files, sandboxId, setUploadedFiles, setIsUploading);
|
||||
await uploadFiles(files, sandboxId, setUploadedFiles, setIsUploading, messages, queryClient);
|
||||
} else {
|
||||
// Otherwise, store files locally
|
||||
handleLocalFiles(files, setPendingFiles, setUploadedFiles);
|
||||
|
@ -138,6 +167,7 @@ interface FileUploadHandlerProps {
|
|||
setPendingFiles: React.Dispatch<React.SetStateAction<File[]>>;
|
||||
setUploadedFiles: React.Dispatch<React.SetStateAction<UploadedFile[]>>;
|
||||
setIsUploading: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
messages?: any[]; // Add messages prop
|
||||
}
|
||||
|
||||
export const FileUploadHandler = forwardRef<
|
||||
|
@ -154,9 +184,11 @@ export const FileUploadHandler = forwardRef<
|
|||
setPendingFiles,
|
||||
setUploadedFiles,
|
||||
setIsUploading,
|
||||
messages = [],
|
||||
},
|
||||
ref,
|
||||
) => {
|
||||
const queryClient = useQueryClient();
|
||||
// Clean up object URLs when component unmounts
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
|
@ -191,6 +223,8 @@ export const FileUploadHandler = forwardRef<
|
|||
setPendingFiles,
|
||||
setUploadedFiles,
|
||||
setIsUploading,
|
||||
messages,
|
||||
queryClient,
|
||||
);
|
||||
|
||||
event.target.value = '';
|
||||
|
|
|
@ -31,6 +31,7 @@ interface MessageInputProps {
|
|||
setUploadedFiles: React.Dispatch<React.SetStateAction<UploadedFile[]>>;
|
||||
setIsUploading: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
hideAttachments?: boolean;
|
||||
messages?: any[]; // Add messages prop
|
||||
|
||||
selectedModel: string;
|
||||
onModelChange: (model: string) => void;
|
||||
|
@ -61,6 +62,7 @@ export const MessageInput = forwardRef<HTMLTextAreaElement, MessageInputProps>(
|
|||
setUploadedFiles,
|
||||
setIsUploading,
|
||||
hideAttachments = false,
|
||||
messages = [],
|
||||
|
||||
selectedModel,
|
||||
onModelChange,
|
||||
|
@ -137,6 +139,7 @@ export const MessageInput = forwardRef<HTMLTextAreaElement, MessageInputProps>(
|
|||
setPendingFiles={setPendingFiles}
|
||||
setUploadedFiles={setUploadedFiles}
|
||||
setIsUploading={setIsUploading}
|
||||
messages={messages}
|
||||
/>
|
||||
)}
|
||||
|
||||
|
@ -148,7 +151,7 @@ export const MessageInput = forwardRef<HTMLTextAreaElement, MessageInputProps>(
|
|||
<TooltipTrigger>
|
||||
<p className='text-sm text-amber-500 hidden sm:block'>Upgrade for full performance</p>
|
||||
<div className='sm:hidden absolute bottom-0 left-0 right-0 flex justify-center'>
|
||||
<p className='text-xs text-amber-500 bg-background/80 backdrop-blur-sm px-2 py-1 rounded-md shadow-sm'>
|
||||
<p className='text-xs text-amber-500 px-2 py-1'>
|
||||
Upgrade for better performance
|
||||
</p>
|
||||
</div>
|
||||
|
|
|
@ -4,7 +4,7 @@ import { Button } from '@/components/ui/button';
|
|||
import { Markdown } from '@/components/ui/markdown';
|
||||
import { UnifiedMessage, ParsedContent, ParsedMetadata } from '@/components/thread/types';
|
||||
import { FileAttachmentGrid } from '@/components/thread/file-attachment';
|
||||
import { FileCache } from '@/hooks/use-cached-file';
|
||||
import { useFilePreloader, FileCache } from '@/hooks/react-query/files';
|
||||
import { useAuth } from '@/components/AuthProvider';
|
||||
import { Project } from '@/lib/api';
|
||||
import {
|
||||
|
@ -45,35 +45,12 @@ const HIDE_STREAMING_XML_TAGS = new Set([
|
|||
'see-image'
|
||||
]);
|
||||
|
||||
// Helper function to render attachments
|
||||
// Helper function to render attachments (keeping original implementation for now)
|
||||
export function renderAttachments(attachments: string[], fileViewerHandler?: (filePath?: string) => void, sandboxId?: string, project?: Project) {
|
||||
if (!attachments || attachments.length === 0) return null;
|
||||
|
||||
// Preload attachments into cache if we have a sandboxId
|
||||
if (sandboxId) {
|
||||
// Check if we can access localStorage and if there's a valid auth session before trying to preload
|
||||
let hasValidSession = false;
|
||||
let token = null;
|
||||
|
||||
try {
|
||||
const sessionData = localStorage.getItem('auth');
|
||||
if (sessionData) {
|
||||
const session = JSON.parse(sessionData);
|
||||
token = session?.access_token;
|
||||
hasValidSession = !!token;
|
||||
}
|
||||
} catch (err) {
|
||||
// Silent catch - localStorage might be unavailable in some contexts
|
||||
}
|
||||
|
||||
// Only attempt to preload if we have a valid session
|
||||
if (hasValidSession && token) {
|
||||
// Use setTimeout to do this asynchronously without blocking rendering
|
||||
setTimeout(() => {
|
||||
FileCache.preload(sandboxId, attachments, token);
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
// Note: Preloading is now handled by React Query in the main ThreadContent component
|
||||
// to avoid duplicate requests with different content types
|
||||
|
||||
return <FileAttachmentGrid
|
||||
attachments={attachments}
|
||||
|
@ -218,6 +195,9 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
const [userHasScrolled, setUserHasScrolled] = useState(false);
|
||||
const { session } = useAuth();
|
||||
|
||||
// React Query file preloader
|
||||
const { preloadFiles } = useFilePreloader();
|
||||
|
||||
// In playback mode, we use visibleMessages instead of messages
|
||||
const displayMessages = readOnly && visibleMessages ? visibleMessages : messages;
|
||||
|
||||
|
@ -259,16 +239,18 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
}
|
||||
});
|
||||
|
||||
// Only attempt to preload if we have attachments AND a valid token
|
||||
// Use React Query preloading if we have attachments AND a valid token
|
||||
if (allAttachments.length > 0 && session?.access_token) {
|
||||
// Preload files in background with authentication token
|
||||
FileCache.preload(sandboxId, allAttachments, session.access_token);
|
||||
// Preload files with React Query in background
|
||||
preloadFiles(sandboxId, allAttachments).catch(err => {
|
||||
console.error('React Query preload failed:', err);
|
||||
});
|
||||
}
|
||||
}, [displayMessages, sandboxId, session?.access_token]);
|
||||
}, [displayMessages, sandboxId, session?.access_token, preloadFiles]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
|
||||
<div
|
||||
ref={messagesContainerRef}
|
||||
className="flex-1 overflow-y-auto scrollbar-thin scrollbar-track-secondary/0 scrollbar-thumb-primary/10 scrollbar-thumb-rounded-full hover:scrollbar-thumb-primary/10 px-6 py-4 pb-72 bg-background/95 backdrop-blur supports-[backdrop-filter]:bg-background/60"
|
||||
|
@ -285,7 +267,7 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
) : (
|
||||
<div className="space-y-8">
|
||||
{(() => {
|
||||
|
||||
|
||||
type MessageGroup = {
|
||||
type: 'user' | 'assistant_group';
|
||||
messages: UnifiedMessage[];
|
||||
|
@ -374,7 +356,7 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return groupedMessages.map((group, groupIndex) => {
|
||||
if (group.type === 'user') {
|
||||
const message = group.messages[0];
|
||||
|
@ -431,7 +413,7 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
<div key={group.key} ref={groupIndex === groupedMessages.length - 1 ? latestMessageRef : null}>
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="flex-shrink-0 w-5 h-5 mt-2 rounded-md flex items-center justify-center ml-auto mr-2">
|
||||
<KortixLogo />
|
||||
<KortixLogo />
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<div className="inline-flex max-w-[90%] rounded-lg px-4 text-sm">
|
||||
|
@ -652,16 +634,16 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
return null;
|
||||
});
|
||||
})()}
|
||||
{((agentStatus === 'running' || agentStatus === 'connecting' ) && !streamingTextContent &&
|
||||
{((agentStatus === 'running' || agentStatus === 'connecting') && !streamingTextContent &&
|
||||
!readOnly &&
|
||||
(messages.length === 0 || messages[messages.length - 1].type === 'user')) && (
|
||||
<div ref={latestMessageRef} className='w-full h-22 rounded'>
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="flex-shrink-0 w-5 h-5 rounded-md flex items-center justify-center bg-primary/10">
|
||||
<KortixLogo />
|
||||
<KortixLogo />
|
||||
</div>
|
||||
<div className="flex-1 space-y-2 w-full h-12">
|
||||
<AgentLoader/>
|
||||
<AgentLoader />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -691,7 +673,7 @@ export const ThreadContent: React.FC<ThreadContentProps> = ({
|
|||
<div ref={latestMessageRef}>
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="flex-shrink-0 w-5 h-5 mt-2 rounded-md flex items-center justify-center bg-primary/10">
|
||||
<KortixLogo />
|
||||
<KortixLogo />
|
||||
</div>
|
||||
<div className="flex-1 space-y-2">
|
||||
<div className="max-w-[90%] px-4 py-3 text-sm">
|
||||
|
|
|
@ -10,8 +10,7 @@ import { AttachmentGroup } from './attachment-group';
|
|||
import { HtmlRenderer } from './preview-renderers/html-renderer';
|
||||
import { MarkdownRenderer } from './preview-renderers/markdown-renderer';
|
||||
import { CsvRenderer } from './preview-renderers/csv-renderer';
|
||||
import { useFileContent } from '@/hooks/use-file-content';
|
||||
import { useImageContent } from '@/hooks/use-image-content';
|
||||
import { useFileContent, useImageContent } from '@/hooks/react-query/files';
|
||||
import { useAuth } from '@/components/AuthProvider';
|
||||
import { Project } from '@/lib/api';
|
||||
|
||||
|
@ -249,7 +248,7 @@ export function FileAttachment({
|
|||
"bg-black/5 dark:bg-black/20",
|
||||
"p-0 overflow-hidden",
|
||||
"flex items-center justify-center",
|
||||
isGridLayout ? "w-full" : "inline-block",
|
||||
isGridLayout ? "w-full" : "min-w-[54px]",
|
||||
className
|
||||
)}
|
||||
style={{
|
||||
|
@ -331,7 +330,26 @@ export function FileAttachment({
|
|||
|
||||
// Only log details in dev environments to avoid console spam
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
console.error('Image URL:', sandboxId && session?.access_token ? imageUrl : fileUrl);
|
||||
const imgSrc = sandboxId && session?.access_token ? imageUrl : fileUrl;
|
||||
console.error('Image URL:', imgSrc);
|
||||
|
||||
// Additional debugging for blob URLs
|
||||
if (typeof imgSrc === 'string' && imgSrc.startsWith('blob:')) {
|
||||
console.error('Blob URL failed to load. This could indicate:');
|
||||
console.error('- Blob URL was revoked prematurely');
|
||||
console.error('- Blob data is corrupted or invalid');
|
||||
console.error('- MIME type mismatch');
|
||||
|
||||
// Try to check if the blob URL is still valid
|
||||
fetch(imgSrc, { method: 'HEAD' })
|
||||
.then(response => {
|
||||
console.error(`Blob URL HEAD request status: ${response.status}`);
|
||||
console.error(`Blob URL content type: ${response.headers.get('content-type')}`);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('Blob URL HEAD request failed:', err.message);
|
||||
});
|
||||
}
|
||||
|
||||
// Check if the error is potentially due to authentication
|
||||
if (sandboxId && (!session || !session.access_token)) {
|
||||
|
|
|
@ -43,7 +43,12 @@ import {
|
|||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
} from '@/components/ui/dropdown-menu';
|
||||
import { useCachedFile, getCachedFile, FileCache } from '@/hooks/use-cached-file';
|
||||
import {
|
||||
useDirectoryQuery,
|
||||
useFileContentQuery,
|
||||
useFileUpload,
|
||||
FileCache
|
||||
} from '@/hooks/react-query/files';
|
||||
|
||||
// Define API_URL
|
||||
const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || '';
|
||||
|
@ -71,10 +76,19 @@ export function FileViewerModal({
|
|||
|
||||
// File navigation state
|
||||
const [currentPath, setCurrentPath] = useState('/workspace');
|
||||
const [files, setFiles] = useState<FileInfo[]>([]);
|
||||
const [isLoadingFiles, setIsLoadingFiles] = useState(false);
|
||||
const [isInitialLoad, setIsInitialLoad] = useState(true);
|
||||
|
||||
// Use React Query for directory listing
|
||||
const {
|
||||
data: files = [],
|
||||
isLoading: isLoadingFiles,
|
||||
error: filesError,
|
||||
refetch: refetchFiles
|
||||
} = useDirectoryQuery(sandboxId, currentPath, {
|
||||
enabled: open && !!sandboxId,
|
||||
staleTime: 30 * 1000, // 30 seconds
|
||||
});
|
||||
|
||||
// Add a navigation lock to prevent race conditions
|
||||
const [isNavigationLocked, setIsNavigationLocked] = useState(false);
|
||||
const currentNavigationRef = useRef<string | null>(null);
|
||||
|
@ -88,22 +102,20 @@ export function FileViewerModal({
|
|||
const [blobUrlForRenderer, setBlobUrlForRenderer] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [isLoadingContent, setIsLoadingContent] = useState(false);
|
||||
const [contentError, setContentError] = useState<string | null>(null);
|
||||
|
||||
// Add a ref to track current loading operation
|
||||
const loadingFileRef = useRef<string | null>(null);
|
||||
|
||||
// Use the cached file hook for the selected file
|
||||
// Use the React Query hook for the selected file instead of useCachedFile
|
||||
const {
|
||||
data: cachedFileContent,
|
||||
isLoading: isCachedFileLoading,
|
||||
error: cachedFileError,
|
||||
} = useCachedFile(
|
||||
} = useFileContentQuery(
|
||||
sandboxId,
|
||||
selectedFilePath,
|
||||
{
|
||||
contentType: 'text', // Default to text, we'll handle binary later
|
||||
// Auto-detect content type consistently with other components
|
||||
enabled: !!selectedFilePath,
|
||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -169,30 +181,17 @@ export function FileViewerModal({
|
|||
setTextContentForRenderer(null); // Clear derived text content
|
||||
setBlobUrlForRenderer(null); // Clear derived blob URL
|
||||
setContentError(null);
|
||||
setIsLoadingContent(false);
|
||||
loadingFileRef.current = null; // Clear the loading ref
|
||||
}, []);
|
||||
|
||||
// Forward declaration for openFile - will be defined below but referenced first
|
||||
// Core file opening function
|
||||
const openFile = useCallback(
|
||||
async (file: FileInfo) => {
|
||||
if (file.is_dir) {
|
||||
// Since navigateToFolder is defined below, we can safely call it
|
||||
// We define navigateToFolder first, then use it in openFile
|
||||
// For directories, just navigate to that folder
|
||||
if (!file.is_dir) return;
|
||||
|
||||
// Ensure the path is properly normalized
|
||||
const normalizedPath = normalizePath(file.path);
|
||||
|
||||
// Always navigate to the folder to ensure breadcrumbs update correctly
|
||||
console.log(
|
||||
`[FILE VIEWER] Navigating to folder: ${file.path} → ${normalizedPath}`,
|
||||
);
|
||||
console.log(
|
||||
`[FILE VIEWER] Current path before navigation: ${currentPath}`,
|
||||
);
|
||||
|
||||
// Clear selected file when navigating
|
||||
clearSelectedFile();
|
||||
|
@ -202,19 +201,17 @@ export function FileViewerModal({
|
|||
return;
|
||||
}
|
||||
|
||||
// Skip if already selected and content exists
|
||||
if (selectedFilePath === file.path && rawContent) {
|
||||
console.log(`[FILE VIEWER] File already loaded: ${file.path}`);
|
||||
// Skip if already selected
|
||||
if (selectedFilePath === file.path) {
|
||||
console.log(`[FILE VIEWER] File already selected: ${file.path}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[FILE VIEWER] Opening file: ${file.path}`);
|
||||
|
||||
// Check if this is an image or PDF file
|
||||
// Check file types for logging
|
||||
const isImageFile = FileCache.isImageFile(file.path);
|
||||
const isPdfFile = FileCache.isPdfFile(file.path);
|
||||
|
||||
// Check for Office documents and other binary files
|
||||
const extension = file.path.split('.').pop()?.toLowerCase();
|
||||
const isOfficeFile = ['xlsx', 'xls', 'docx', 'doc', 'pptx', 'ppt'].includes(extension || '');
|
||||
|
||||
|
@ -226,118 +223,16 @@ export function FileViewerModal({
|
|||
console.log(`[FILE VIEWER] Opening Office document: ${file.path} (${extension})`);
|
||||
}
|
||||
|
||||
// Clear previous state FIRST
|
||||
// Clear previous state and set selected file
|
||||
clearSelectedFile();
|
||||
|
||||
// Set loading state immediately for UX
|
||||
setIsLoadingContent(true);
|
||||
setSelectedFilePath(file.path);
|
||||
|
||||
// Set the loading ref to track current operation
|
||||
loadingFileRef.current = file.path;
|
||||
|
||||
try {
|
||||
// For PDFs and Office documents, always use blob content type
|
||||
const contentType = isPdfFile || isOfficeFile ? 'blob' : FileCache.getContentTypeFromPath(file.path);
|
||||
|
||||
console.log(`[FILE VIEWER] Fetching content for ${file.path} with content type: ${contentType}`);
|
||||
|
||||
// Fetch content using the cached file utility
|
||||
const content = await getCachedFile(
|
||||
sandboxId,
|
||||
file.path,
|
||||
{
|
||||
contentType: contentType as 'text' | 'blob' | 'json',
|
||||
force: isPdfFile, // Force refresh for PDFs to ensure we get a blob
|
||||
token: session?.access_token,
|
||||
}
|
||||
);
|
||||
|
||||
|
||||
|
||||
// Critical check: Ensure the file we just loaded is still the one selected
|
||||
if (loadingFileRef.current !== file.path) {
|
||||
console.log(
|
||||
`[FILE VIEWER] Selection changed during loading, aborting. Loading: ${loadingFileRef.current}, Expected: ${file.path}`,
|
||||
);
|
||||
setIsLoadingContent(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Store raw content
|
||||
setRawContent(content);
|
||||
|
||||
// Handle content based on type
|
||||
if (typeof content === 'string') {
|
||||
if (content.startsWith('blob:')) {
|
||||
console.log(`[FILE VIEWER] Setting blob URL directly: ${content}`);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(content);
|
||||
} else if (isPdfFile || isOfficeFile) {
|
||||
// For PDFs and Office files, we should never get here as they should be handled as blobs
|
||||
console.error(`[FILE VIEWER] Received binary file content as string instead of blob, length: ${content.length}`);
|
||||
console.log(`[FILE VIEWER] First 100 chars of content: ${content.substring(0, 100)}`);
|
||||
|
||||
// Try one more time with explicit blob type and force refresh
|
||||
console.log(`[FILE VIEWER] Retrying binary file fetch with explicit blob type and force refresh`);
|
||||
const binaryBlob = await getCachedFile(
|
||||
sandboxId,
|
||||
file.path,
|
||||
{
|
||||
contentType: 'blob',
|
||||
force: true,
|
||||
token: session.access_token,
|
||||
}
|
||||
);
|
||||
|
||||
if (typeof binaryBlob === 'string' && binaryBlob.startsWith('blob:')) {
|
||||
console.log(`[FILE VIEWER] Successfully got blob URL on retry: ${binaryBlob}`);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(binaryBlob);
|
||||
} else {
|
||||
throw new Error('Failed to load binary file in correct format after retry');
|
||||
}
|
||||
} else {
|
||||
console.log(`[FILE VIEWER] Setting text content directly for renderer.`);
|
||||
setTextContentForRenderer(content);
|
||||
setBlobUrlForRenderer(null);
|
||||
}
|
||||
} else if (isBlob(content)) {
|
||||
console.log(`[FILE VIEWER] Content is a Blob. Creating blob URL.`);
|
||||
const url = URL.createObjectURL(content);
|
||||
console.log(`[FILE VIEWER] Created blob URL: ${url}`);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(url);
|
||||
}
|
||||
|
||||
setIsLoadingContent(false);
|
||||
} catch (error) {
|
||||
console.error(`[FILE VIEWER] Error loading file:`, error);
|
||||
if (loadingFileRef.current === file.path) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
if (errorMessage.includes('Authentication token required') ||
|
||||
errorMessage.includes('Authentication token missing')) {
|
||||
toast.error('Authentication error. Please refresh and login again.');
|
||||
setContentError('Authentication error. Please refresh the page and login again.');
|
||||
} else {
|
||||
setContentError(`Failed to load file: ${errorMessage}`);
|
||||
}
|
||||
setIsLoadingContent(false);
|
||||
setRawContent(null);
|
||||
}
|
||||
} finally {
|
||||
if (loadingFileRef.current === file.path) {
|
||||
loadingFileRef.current = null;
|
||||
}
|
||||
}
|
||||
// The useFileContentQuery hook will automatically handle loading the content
|
||||
// No need to manually fetch here - React Query will handle it
|
||||
},
|
||||
[
|
||||
sandboxId,
|
||||
selectedFilePath,
|
||||
rawContent,
|
||||
clearSelectedFile,
|
||||
session?.access_token,
|
||||
currentPath,
|
||||
normalizePath,
|
||||
],
|
||||
);
|
||||
|
@ -358,66 +253,20 @@ export function FileViewerModal({
|
|||
currentNavigationRef.current = currentPath;
|
||||
console.log(`[FILE VIEWER] Starting navigation to: ${currentPath}`);
|
||||
|
||||
const loadTimeout = setTimeout(async () => {
|
||||
setIsLoadingFiles(true);
|
||||
console.log(
|
||||
`[FILE VIEWER] useEffect[currentPath]: Triggered. Loading files for path: ${currentPath}`,
|
||||
);
|
||||
try {
|
||||
// Log cache status
|
||||
console.log(`[FILE VIEWER] Checking cache for directory listing at ${currentPath}`);
|
||||
// React Query handles the loading state automatically
|
||||
console.log(`[FILE VIEWER] React Query will handle directory listing for: ${currentPath}`);
|
||||
|
||||
// Create a cache key for this directory listing
|
||||
const dirCacheKey = `${sandboxId}:directory:${currentPath}`;
|
||||
// After the first load, set isInitialLoad to false
|
||||
if (isInitialLoad) {
|
||||
setIsInitialLoad(false);
|
||||
}
|
||||
|
||||
// Check if we have this directory listing cached
|
||||
let filesData;
|
||||
if (FileCache.has(dirCacheKey) && !isInitialLoad) {
|
||||
console.log(`[FILE VIEWER] Using cached directory listing for ${currentPath}`);
|
||||
filesData = FileCache.get(dirCacheKey);
|
||||
} else {
|
||||
console.log(`[FILE VIEWER] Cache miss, fetching directory listing from API for ${currentPath}`);
|
||||
filesData = await listSandboxFiles(sandboxId, currentPath);
|
||||
|
||||
// Cache the directory listing
|
||||
if (filesData && Array.isArray(filesData)) {
|
||||
console.log(`[FILE VIEWER] Caching directory listing: ${filesData.length} files`);
|
||||
FileCache.set(dirCacheKey, filesData);
|
||||
}
|
||||
}
|
||||
|
||||
// Only update files if we're still on the same path
|
||||
if (currentNavigationRef.current === currentPath) {
|
||||
console.log(
|
||||
`[FILE VIEWER] useEffect[currentPath]: Got ${filesData?.length || 0} files for ${currentPath}`,
|
||||
);
|
||||
setFiles(filesData || []);
|
||||
} else {
|
||||
console.log(`[FILE VIEWER] Path changed during loading, aborting file update for ${currentPath}`);
|
||||
}
|
||||
|
||||
// After the first load, set isInitialLoad to false
|
||||
if (isInitialLoad) {
|
||||
setIsInitialLoad(false);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to load files:', error);
|
||||
toast.error('Failed to load files');
|
||||
if (currentNavigationRef.current === currentPath) {
|
||||
setFiles([]);
|
||||
}
|
||||
} finally {
|
||||
// Only clear loading state if we're still working with the current path
|
||||
if (currentNavigationRef.current === currentPath) {
|
||||
setIsLoadingFiles(false);
|
||||
console.log(`[FILE VIEWER] Completed loading for: ${currentPath}`);
|
||||
}
|
||||
}
|
||||
}, 50); // Short delay to allow state updates to settle
|
||||
|
||||
return () => clearTimeout(loadTimeout);
|
||||
// Dependency: Only re-run when open, sandboxId, currentPath changes
|
||||
}, [open, sandboxId, currentPath, isInitialLoad, isLoadingFiles]);
|
||||
// Handle any loading errors
|
||||
if (filesError) {
|
||||
console.error('Failed to load files:', filesError);
|
||||
toast.error('Failed to load files');
|
||||
}
|
||||
}, [open, sandboxId, currentPath, isInitialLoad, isLoadingFiles, filesError]);
|
||||
|
||||
// Helper function to navigate to a folder
|
||||
const navigateToFolder = useCallback(
|
||||
|
@ -587,128 +436,64 @@ export function FileViewerModal({
|
|||
}
|
||||
}, [open, safeInitialFilePath, initialPathProcessed, normalizePath, currentPath, openFile]);
|
||||
|
||||
// Fix the useEffect that's causing infinite rendering by using a stable reference check
|
||||
// Replace the problematic useEffect around line 369
|
||||
useEffect(() => {
|
||||
// Only create a blob URL if we have raw content that is a Blob AND we don't already have a blob URL
|
||||
// This prevents the infinite loop of creating URLs → triggering renders → creating more URLs
|
||||
if (rawContent && isBlob(rawContent) && selectedFilePath && !blobUrlForRenderer) {
|
||||
// Check if this is an image file
|
||||
const isImageFile = selectedFilePath.match(/\.(png|jpg|jpeg|gif|svg|webp|bmp)$/i);
|
||||
|
||||
// Create a blob URL for binary content
|
||||
const url = URL.createObjectURL(rawContent);
|
||||
|
||||
if (isImageFile) {
|
||||
console.log(`[FILE VIEWER][IMAGE DEBUG] Created new blob URL: ${url} for image: ${selectedFilePath}`);
|
||||
console.log(`[FILE VIEWER][IMAGE DEBUG] Image blob size: ${rawContent.size} bytes, type: ${rawContent.type}`);
|
||||
} else {
|
||||
console.log(`[FILE VIEWER] Created blob URL: ${url} for ${selectedFilePath}`);
|
||||
}
|
||||
|
||||
setBlobUrlForRenderer(url);
|
||||
}
|
||||
|
||||
// Clean up previous URL when component unmounts or URL changes
|
||||
return () => {
|
||||
if (blobUrlForRenderer) {
|
||||
console.log(`[FILE VIEWER] Revoking blob URL on cleanup: ${blobUrlForRenderer}`);
|
||||
URL.revokeObjectURL(blobUrlForRenderer);
|
||||
}
|
||||
};
|
||||
}, [rawContent, selectedFilePath, isBlob, blobUrlForRenderer]);
|
||||
|
||||
// Effect to handle cached file content updates
|
||||
useEffect(() => {
|
||||
if (!selectedFilePath) return;
|
||||
|
||||
// Only update loading state if it's different from what we expect
|
||||
if (isCachedFileLoading && !isLoadingContent) {
|
||||
setIsLoadingContent(true);
|
||||
} else if (!isCachedFileLoading && isLoadingContent) {
|
||||
if (cachedFileError) {
|
||||
setContentError(`Failed to load file: ${cachedFileError.message}`);
|
||||
} else if (cachedFileContent !== null) {
|
||||
console.log(`[FILE VIEWER] Received cached content type: ${typeof cachedFileContent}`);
|
||||
console.log(`[FILE VIEWER] Received cached content is Blob: ${isBlob(cachedFileContent)}`);
|
||||
console.log(`[FILE VIEWER] Received cached content is string: ${typeof cachedFileContent === 'string'}`);
|
||||
console.log(`[FILE VIEWER] Received cached content starts with blob: ${typeof cachedFileContent === 'string' && cachedFileContent.startsWith('blob:')}`);
|
||||
|
||||
// Check if this is a PDF file or Office file
|
||||
const isPdfFile = FileCache.isPdfFile(selectedFilePath);
|
||||
const extension = selectedFilePath.split('.').pop()?.toLowerCase();
|
||||
const isOfficeFile = ['xlsx', 'xls', 'docx', 'doc', 'pptx', 'ppt'].includes(extension || '');
|
||||
|
||||
if (isPdfFile || isOfficeFile) {
|
||||
// For PDFs and Office files, handle specially to ensure it's always a blob URL
|
||||
if (typeof cachedFileContent === 'string' && cachedFileContent.startsWith('blob:')) {
|
||||
console.log(`[FILE VIEWER] Using existing blob URL for binary file`);
|
||||
setBlobUrlForRenderer(cachedFileContent);
|
||||
setTextContentForRenderer(null);
|
||||
} else if (isBlob(cachedFileContent)) {
|
||||
console.log(`[FILE VIEWER] Creating new blob URL from cached binary blob`);
|
||||
const url = URL.createObjectURL(cachedFileContent);
|
||||
setBlobUrlForRenderer(url);
|
||||
setTextContentForRenderer(null);
|
||||
} else {
|
||||
// If we somehow got text content for a binary file, force a refresh with blob type
|
||||
console.log(`[FILE VIEWER] Invalid binary content type, forcing refresh with blob type`);
|
||||
|
||||
// Force refresh with blob type
|
||||
(async () => {
|
||||
try {
|
||||
console.log(`[FILE VIEWER] Explicitly fetching binary file as blob`);
|
||||
|
||||
const binaryContent = await getCachedFile(
|
||||
sandboxId,
|
||||
selectedFilePath,
|
||||
{
|
||||
contentType: 'blob',
|
||||
force: true,
|
||||
token: session?.access_token
|
||||
}
|
||||
);
|
||||
|
||||
if (typeof binaryContent === 'string' && binaryContent.startsWith('blob:')) {
|
||||
console.log(`[FILE VIEWER] Received correct blob URL for binary file: ${binaryContent}`);
|
||||
setBlobUrlForRenderer(binaryContent);
|
||||
setTextContentForRenderer(null);
|
||||
} else {
|
||||
console.error(`[FILE VIEWER] Failed to get correct binary format after retry`);
|
||||
setContentError('Failed to load file in correct format');
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[FILE VIEWER] Error loading binary file:`, err);
|
||||
setContentError(`Failed to load file: ${err instanceof Error ? err.message : String(err)}`);
|
||||
} finally {
|
||||
setIsLoadingContent(false);
|
||||
}
|
||||
})();
|
||||
|
||||
return; // Skip the rest since we're handling loading manually
|
||||
}
|
||||
} else {
|
||||
// For non-PDF files, handle as before
|
||||
setRawContent(cachedFileContent);
|
||||
|
||||
if (typeof cachedFileContent === 'string') {
|
||||
if (cachedFileContent.startsWith('blob:')) {
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(cachedFileContent);
|
||||
} else {
|
||||
setTextContentForRenderer(cachedFileContent);
|
||||
setBlobUrlForRenderer(null);
|
||||
}
|
||||
} else if (cachedFileContent && isBlob(cachedFileContent)) {
|
||||
const url = URL.createObjectURL(cachedFileContent);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(url);
|
||||
}
|
||||
}
|
||||
}
|
||||
setIsLoadingContent(false);
|
||||
// Handle errors
|
||||
if (cachedFileError) {
|
||||
setContentError(`Failed to load file: ${cachedFileError.message}`);
|
||||
return;
|
||||
}
|
||||
}, [selectedFilePath, cachedFileContent, isCachedFileLoading, cachedFileError, isLoadingContent, isBlob, openFile, sandboxId, session?.access_token]);
|
||||
|
||||
// Handle successful content
|
||||
if (cachedFileContent !== null && !isCachedFileLoading) {
|
||||
console.log(`[FILE VIEWER] Received cached content for: ${selectedFilePath}`);
|
||||
|
||||
// Check file type to determine proper handling
|
||||
const isImageFile = FileCache.isImageFile(selectedFilePath);
|
||||
const isPdfFile = FileCache.isPdfFile(selectedFilePath);
|
||||
const extension = selectedFilePath.split('.').pop()?.toLowerCase();
|
||||
const isOfficeFile = ['xlsx', 'xls', 'docx', 'doc', 'pptx', 'ppt'].includes(extension || '');
|
||||
const isBinaryFile = isImageFile || isPdfFile || isOfficeFile;
|
||||
|
||||
// Store raw content
|
||||
setRawContent(cachedFileContent);
|
||||
|
||||
// Handle content based on type and file extension
|
||||
if (typeof cachedFileContent === 'string') {
|
||||
if (cachedFileContent.startsWith('blob:')) {
|
||||
// It's already a blob URL
|
||||
console.log(`[FILE VIEWER] Setting blob URL from cached content: ${cachedFileContent}`);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(cachedFileContent);
|
||||
} else if (isBinaryFile) {
|
||||
// Binary files should not be displayed as text, even if they come as strings
|
||||
console.warn(`[FILE VIEWER] Binary file received as string content, this should not happen: ${selectedFilePath}`);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(null);
|
||||
setContentError('Binary file received in incorrect format. Please try refreshing.');
|
||||
} else {
|
||||
// Actual text content for text files
|
||||
console.log(`[FILE VIEWER] Setting text content for text file: ${selectedFilePath}`);
|
||||
setTextContentForRenderer(cachedFileContent);
|
||||
setBlobUrlForRenderer(null);
|
||||
}
|
||||
} else if (isBlob(cachedFileContent)) {
|
||||
// Create blob URL for binary content
|
||||
const url = URL.createObjectURL(cachedFileContent);
|
||||
console.log(`[FILE VIEWER] Created blob URL: ${url} for ${selectedFilePath}`);
|
||||
setBlobUrlForRenderer(url);
|
||||
setTextContentForRenderer(null);
|
||||
} else {
|
||||
// Unknown content type
|
||||
console.warn(`[FILE VIEWER] Unknown content type for: ${selectedFilePath}`, typeof cachedFileContent);
|
||||
setTextContentForRenderer(null);
|
||||
setBlobUrlForRenderer(null);
|
||||
setContentError('Unknown content type received.');
|
||||
}
|
||||
}
|
||||
}, [selectedFilePath, cachedFileContent, isCachedFileLoading, cachedFileError]);
|
||||
|
||||
// Modify the cleanup effect to respect active downloads
|
||||
useEffect(() => {
|
||||
|
@ -720,7 +505,7 @@ export function FileViewerModal({
|
|||
};
|
||||
}, [blobUrlForRenderer, isDownloading]);
|
||||
|
||||
// Modify handleOpenChange to respect active downloads
|
||||
// Handle modal close
|
||||
const handleOpenChange = useCallback(
|
||||
(open: boolean) => {
|
||||
if (!open) {
|
||||
|
@ -734,7 +519,7 @@ export function FileViewerModal({
|
|||
|
||||
clearSelectedFile();
|
||||
setCurrentPath('/workspace');
|
||||
setFiles([]);
|
||||
// React Query will handle clearing the files data
|
||||
setInitialPathProcessed(false);
|
||||
setIsInitialLoad(true);
|
||||
}
|
||||
|
@ -1054,9 +839,8 @@ export function FileViewerModal({
|
|||
throw new Error(error || 'Upload failed');
|
||||
}
|
||||
|
||||
// Reload the file list
|
||||
const filesData = await listSandboxFiles(sandboxId, currentPath);
|
||||
setFiles(filesData);
|
||||
// Reload the file list using React Query
|
||||
await refetchFiles();
|
||||
|
||||
toast.success(`Uploaded: ${file.name}`);
|
||||
} catch (error) {
|
||||
|
@ -1069,7 +853,7 @@ export function FileViewerModal({
|
|||
if (event.target) event.target.value = '';
|
||||
}
|
||||
},
|
||||
[currentPath, sandboxId],
|
||||
[currentPath, sandboxId, refetchFiles],
|
||||
);
|
||||
|
||||
// --- Render --- //
|
||||
|
@ -1141,7 +925,7 @@ export function FileViewerModal({
|
|||
variant="outline"
|
||||
size="sm"
|
||||
onClick={handleDownload}
|
||||
disabled={isDownloading || isLoadingContent}
|
||||
disabled={isDownloading || isCachedFileLoading}
|
||||
className="h-8 gap-1"
|
||||
>
|
||||
{isDownloading ? (
|
||||
|
@ -1161,7 +945,7 @@ export function FileViewerModal({
|
|||
size="sm"
|
||||
disabled={
|
||||
isExportingPdf ||
|
||||
isLoadingContent ||
|
||||
isCachedFileLoading ||
|
||||
contentError !== null
|
||||
}
|
||||
className="h-8 gap-1"
|
||||
|
@ -1226,7 +1010,7 @@ export function FileViewerModal({
|
|||
{selectedFilePath ? (
|
||||
/* File Viewer */
|
||||
<div className="h-full w-full overflow-auto">
|
||||
{isLoadingContent ? (
|
||||
{isCachedFileLoading ? (
|
||||
<div className="h-full w-full flex flex-col items-center justify-center">
|
||||
<Loader className="h-8 w-8 animate-spin text-primary mb-3" />
|
||||
<p className="text-sm text-muted-foreground">
|
||||
|
@ -1268,7 +1052,6 @@ export function FileViewerModal({
|
|||
<Button
|
||||
onClick={() => {
|
||||
setContentError(null);
|
||||
setIsLoadingContent(true);
|
||||
openFile({
|
||||
path: selectedFilePath,
|
||||
name: selectedFilePath.split('/').pop() || '',
|
||||
|
@ -1293,19 +1076,41 @@ export function FileViewerModal({
|
|||
</div>
|
||||
) : (
|
||||
<div className="h-full w-full relative">
|
||||
<FileRenderer
|
||||
key={selectedFilePath}
|
||||
content={textContentForRenderer}
|
||||
binaryUrl={blobUrlForRenderer}
|
||||
fileName={selectedFilePath}
|
||||
className="h-full w-full"
|
||||
project={projectWithSandbox}
|
||||
markdownRef={
|
||||
isMarkdownFile(selectedFilePath) ? markdownRef : undefined
|
||||
{(() => {
|
||||
// Safety check: don't render text content for binary files
|
||||
const isImageFile = FileCache.isImageFile(selectedFilePath);
|
||||
const isPdfFile = FileCache.isPdfFile(selectedFilePath);
|
||||
const extension = selectedFilePath?.split('.').pop()?.toLowerCase();
|
||||
const isOfficeFile = ['xlsx', 'xls', 'docx', 'doc', 'pptx', 'ppt'].includes(extension || '');
|
||||
const isBinaryFile = isImageFile || isPdfFile || isOfficeFile;
|
||||
|
||||
// For binary files, only render if we have a blob URL
|
||||
if (isBinaryFile && !blobUrlForRenderer) {
|
||||
return (
|
||||
<div className="h-full w-full flex items-center justify-center">
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Loading {isPdfFile ? 'PDF' : isImageFile ? 'image' : 'file'}...
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
onDownload={handleDownload}
|
||||
isDownloading={isDownloading}
|
||||
/>
|
||||
|
||||
return (
|
||||
<FileRenderer
|
||||
key={selectedFilePath}
|
||||
content={isBinaryFile ? null : textContentForRenderer}
|
||||
binaryUrl={blobUrlForRenderer}
|
||||
fileName={selectedFilePath}
|
||||
className="h-full w-full"
|
||||
project={projectWithSandbox}
|
||||
markdownRef={
|
||||
isMarkdownFile(selectedFilePath) ? markdownRef : undefined
|
||||
}
|
||||
onDownload={handleDownload}
|
||||
isDownloading={isDownloading}
|
||||
/>
|
||||
);
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
// Core React Query file hooks
|
||||
export {
|
||||
useFileContentQuery,
|
||||
useDirectoryQuery,
|
||||
useFilePreloader,
|
||||
useCachedFile,
|
||||
fileQueryKeys,
|
||||
FileCache,
|
||||
} from './use-file-queries';
|
||||
|
||||
// Specialized content hooks
|
||||
export { useFileContent } from './use-file-content';
|
||||
export { useImageContent } from './use-image-content';
|
||||
|
||||
// File mutation hooks
|
||||
export {
|
||||
useFileUpload,
|
||||
useFileDelete,
|
||||
useFileCreate,
|
||||
} from './use-file-mutations';
|
||||
|
||||
// Utility functions for compatibility
|
||||
export {
|
||||
getCachedFile,
|
||||
fetchFileContent,
|
||||
} from './use-file-queries';
|
|
@ -0,0 +1,21 @@
|
|||
import { useFileContentQuery } from './use-file-queries';
|
||||
|
||||
/**
|
||||
* Hook for fetching file content with React Query
|
||||
* Replaces the existing useFileContent hook
|
||||
* Now auto-detects content type for proper caching consistency
|
||||
*/
|
||||
export function useFileContent(
|
||||
sandboxId?: string,
|
||||
filePath?: string,
|
||||
options: {
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
} = {}
|
||||
) {
|
||||
return useFileContentQuery(sandboxId, filePath, {
|
||||
// Auto-detect content type for consistency across all hooks
|
||||
enabled: options.enabled,
|
||||
staleTime: options.staleTime,
|
||||
});
|
||||
}
|
|
@ -0,0 +1,250 @@
|
|||
import { useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { useAuth } from '@/components/AuthProvider';
|
||||
import { fileQueryKeys } from './use-file-queries';
|
||||
import { FileCache } from '@/hooks/use-cached-file';
|
||||
import { toast } from 'sonner';
|
||||
|
||||
// Import the normalizePath function from use-file-queries
|
||||
function normalizePath(path: string): string {
|
||||
if (!path) return '/';
|
||||
|
||||
// Remove any leading/trailing whitespace
|
||||
path = path.trim();
|
||||
|
||||
// Ensure path starts with /
|
||||
if (!path.startsWith('/')) {
|
||||
path = '/' + path;
|
||||
}
|
||||
|
||||
// Remove duplicate slashes and normalize
|
||||
path = path.replace(/\/+/g, '/');
|
||||
|
||||
// Remove trailing slash unless it's the root
|
||||
if (path.length > 1 && path.endsWith('/')) {
|
||||
path = path.slice(0, -1);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
const API_URL = process.env.NEXT_PUBLIC_BACKEND_URL || '';
|
||||
|
||||
/**
|
||||
* Hook for uploading files
|
||||
*/
|
||||
export function useFileUpload() {
|
||||
const { session } = useAuth();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
sandboxId,
|
||||
file,
|
||||
targetPath,
|
||||
}: {
|
||||
sandboxId: string;
|
||||
file: File;
|
||||
targetPath: string;
|
||||
}) => {
|
||||
if (!session?.access_token) {
|
||||
throw new Error('No access token available');
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
formData.append('path', targetPath);
|
||||
|
||||
const response = await fetch(`${API_URL}/sandboxes/${sandboxId}/files`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${session.access_token}`,
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(error || 'Upload failed');
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
},
|
||||
onSuccess: (_, variables) => {
|
||||
// Invalidate directory listing for the target directory
|
||||
const directoryPath = variables.targetPath.substring(0, variables.targetPath.lastIndexOf('/'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directory(variables.sandboxId, directoryPath),
|
||||
});
|
||||
|
||||
// Also invalidate all file listings to be safe
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directories(),
|
||||
});
|
||||
|
||||
toast.success(`Uploaded: ${variables.file.name}`);
|
||||
},
|
||||
onError: (error) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
toast.error(`Upload failed: ${message}`);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for deleting files
|
||||
*/
|
||||
export function useFileDelete() {
|
||||
const { session } = useAuth();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
sandboxId,
|
||||
filePath,
|
||||
}: {
|
||||
sandboxId: string;
|
||||
filePath: string;
|
||||
}) => {
|
||||
if (!session?.access_token) {
|
||||
throw new Error('No access token available');
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`${API_URL}/sandboxes/${sandboxId}/files?path=${encodeURIComponent(filePath)}`,
|
||||
{
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${session.access_token}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(error || 'Delete failed');
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
},
|
||||
onSuccess: (_, variables) => {
|
||||
// Invalidate directory listing for the parent directory
|
||||
const directoryPath = variables.filePath.substring(0, variables.filePath.lastIndexOf('/'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directory(variables.sandboxId, directoryPath),
|
||||
});
|
||||
|
||||
// Invalidate all directory listings to be safe
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directories(),
|
||||
});
|
||||
|
||||
// Invalidate all file content queries for this specific file
|
||||
// This covers all content types (text, blob, json) for the deleted file
|
||||
queryClient.invalidateQueries({
|
||||
predicate: (query) => {
|
||||
const queryKey = query.queryKey;
|
||||
// Check if this is a file content query for our sandbox and file
|
||||
return (
|
||||
queryKey.length >= 4 &&
|
||||
queryKey[0] === 'files' &&
|
||||
queryKey[1] === 'content' &&
|
||||
queryKey[2] === variables.sandboxId &&
|
||||
queryKey[3] === variables.filePath
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
// Also remove the specific queries from cache completely
|
||||
['text', 'blob', 'json'].forEach(contentType => {
|
||||
const queryKey = fileQueryKeys.content(variables.sandboxId, variables.filePath, contentType);
|
||||
queryClient.removeQueries({ queryKey });
|
||||
});
|
||||
|
||||
// Clean up legacy FileCache entries for this file
|
||||
const normalizedPath = normalizePath(variables.filePath);
|
||||
const legacyCacheKeys = [
|
||||
`${variables.sandboxId}:${normalizedPath}:blob`,
|
||||
`${variables.sandboxId}:${normalizedPath}:text`,
|
||||
`${variables.sandboxId}:${normalizedPath}:json`,
|
||||
`${variables.sandboxId}:${normalizedPath}`,
|
||||
// Also try without leading slash for compatibility
|
||||
`${variables.sandboxId}:${normalizedPath.substring(1)}:blob`,
|
||||
`${variables.sandboxId}:${normalizedPath.substring(1)}:text`,
|
||||
`${variables.sandboxId}:${normalizedPath.substring(1)}:json`,
|
||||
`${variables.sandboxId}:${normalizedPath.substring(1)}`,
|
||||
];
|
||||
|
||||
legacyCacheKeys.forEach(key => {
|
||||
const cachedEntry = (FileCache as any).cache?.get(key);
|
||||
if (cachedEntry) {
|
||||
// If it's a blob URL, revoke it before deleting
|
||||
if (cachedEntry.type === 'url' && typeof cachedEntry.content === 'string' && cachedEntry.content.startsWith('blob:')) {
|
||||
console.log(`[FILE DELETE] Revoking blob URL for deleted file: ${cachedEntry.content}`);
|
||||
URL.revokeObjectURL(cachedEntry.content);
|
||||
}
|
||||
FileCache.delete(key);
|
||||
}
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
toast.error(`Delete failed: ${message}`);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for creating files
|
||||
*/
|
||||
export function useFileCreate() {
|
||||
const { session } = useAuth();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return useMutation({
|
||||
mutationFn: async ({
|
||||
sandboxId,
|
||||
filePath,
|
||||
content,
|
||||
}: {
|
||||
sandboxId: string;
|
||||
filePath: string;
|
||||
content: string;
|
||||
}) => {
|
||||
if (!session?.access_token) {
|
||||
throw new Error('No access token available');
|
||||
}
|
||||
|
||||
const response = await fetch(`${API_URL}/sandboxes/${sandboxId}/files`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${session.access_token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
path: filePath,
|
||||
content,
|
||||
}),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.text();
|
||||
throw new Error(error || 'Create failed');
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
},
|
||||
onSuccess: (_, variables) => {
|
||||
// Invalidate directory listing for the parent directory
|
||||
const directoryPath = variables.filePath.substring(0, variables.filePath.lastIndexOf('/'));
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: fileQueryKeys.directory(variables.sandboxId, directoryPath),
|
||||
});
|
||||
|
||||
toast.success('File created successfully');
|
||||
},
|
||||
onError: (error) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
toast.error(`Create failed: ${message}`);
|
||||
},
|
||||
});
|
||||
}
|
|
@ -0,0 +1,396 @@
|
|||
import React from 'react';
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useAuth } from '@/components/AuthProvider';
|
||||
import { listSandboxFiles, type FileInfo } from '@/lib/api';
|
||||
|
||||
// Re-export FileCache utilities for compatibility
|
||||
export { FileCache } from '@/hooks/use-cached-file';
|
||||
|
||||
/**
|
||||
* Normalize a file path to ensure consistent caching
|
||||
*/
|
||||
function normalizePath(path: string): string {
|
||||
if (!path) return '/workspace';
|
||||
|
||||
// Ensure path starts with /workspace
|
||||
if (!path.startsWith('/workspace')) {
|
||||
path = `/workspace/${path.startsWith('/') ? path.substring(1) : path}`;
|
||||
}
|
||||
|
||||
// Handle Unicode escape sequences
|
||||
try {
|
||||
path = path.replace(/\\u([0-9a-fA-F]{4})/g, (_, hexCode) => {
|
||||
return String.fromCharCode(parseInt(hexCode, 16));
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Error processing Unicode escapes in path:', e);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate React Query keys for file operations
|
||||
*/
|
||||
export const fileQueryKeys = {
|
||||
all: ['files'] as const,
|
||||
contents: () => [...fileQueryKeys.all, 'content'] as const,
|
||||
content: (sandboxId: string, path: string, contentType: string) =>
|
||||
[...fileQueryKeys.contents(), sandboxId, normalizePath(path), contentType] as const,
|
||||
directories: () => [...fileQueryKeys.all, 'directory'] as const,
|
||||
directory: (sandboxId: string, path: string) =>
|
||||
[...fileQueryKeys.directories(), sandboxId, normalizePath(path)] as const,
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine content type from file path
|
||||
*/
|
||||
function getContentTypeFromPath(path: string): 'text' | 'blob' | 'json' {
|
||||
if (!path) return 'text';
|
||||
|
||||
const ext = path.toLowerCase().split('.').pop() || '';
|
||||
|
||||
// Binary file extensions
|
||||
if (/^(xlsx|xls|docx|doc|pptx|ppt|pdf|png|jpg|jpeg|gif|bmp|webp|svg|ico|zip|exe|dll|bin|dat|obj|o|so|dylib|mp3|mp4|avi|mov|wmv|flv|wav|ogg)$/.test(ext)) {
|
||||
return 'blob';
|
||||
}
|
||||
|
||||
// JSON files
|
||||
if (ext === 'json') return 'json';
|
||||
|
||||
// Default to text
|
||||
return 'text';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is an image
|
||||
*/
|
||||
function isImageFile(path: string): boolean {
|
||||
const ext = path.split('.').pop()?.toLowerCase() || '';
|
||||
return ['png', 'jpg', 'jpeg', 'gif', 'svg', 'webp', 'bmp', 'ico'].includes(ext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is a PDF
|
||||
*/
|
||||
function isPdfFile(path: string): boolean {
|
||||
return path.toLowerCase().endsWith('.pdf');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MIME type from file path
|
||||
*/
|
||||
function getMimeTypeFromPath(path: string): string {
|
||||
const ext = path.split('.').pop()?.toLowerCase() || '';
|
||||
|
||||
switch (ext) {
|
||||
case 'xlsx': return 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet';
|
||||
case 'xls': return 'application/vnd.ms-excel';
|
||||
case 'docx': return 'application/vnd.openxmlformats-officedocument.wordprocessingml.document';
|
||||
case 'doc': return 'application/msword';
|
||||
case 'pptx': return 'application/vnd.openxmlformats-officedocument.presentationml.presentation';
|
||||
case 'ppt': return 'application/vnd.ms-powerpoint';
|
||||
case 'pdf': return 'application/pdf';
|
||||
case 'png': return 'image/png';
|
||||
case 'jpg':
|
||||
case 'jpeg': return 'image/jpeg';
|
||||
case 'gif': return 'image/gif';
|
||||
case 'svg': return 'image/svg+xml';
|
||||
case 'zip': return 'application/zip';
|
||||
default: return 'application/octet-stream';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch file content with proper error handling and content type detection
|
||||
*/
|
||||
export async function fetchFileContent(
|
||||
sandboxId: string,
|
||||
filePath: string,
|
||||
contentType: 'text' | 'blob' | 'json',
|
||||
token: string
|
||||
): Promise<string | Blob | any> {
|
||||
const normalizedPath = normalizePath(filePath);
|
||||
|
||||
const url = new URL(`${process.env.NEXT_PUBLIC_BACKEND_URL}/sandboxes/${sandboxId}/files/content`);
|
||||
url.searchParams.append('path', normalizedPath);
|
||||
|
||||
console.log(`[FILE QUERY] Fetching ${contentType} content for: ${normalizedPath}`);
|
||||
|
||||
const response = await fetch(url.toString(), {
|
||||
headers: {
|
||||
'Authorization': `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(`Failed to fetch file: ${response.status} ${errorText}`);
|
||||
}
|
||||
|
||||
// Handle content based on type
|
||||
switch (contentType) {
|
||||
case 'json':
|
||||
return await response.json();
|
||||
case 'blob': {
|
||||
const blob = await response.blob();
|
||||
|
||||
// Ensure correct MIME type for known file types
|
||||
const expectedMimeType = getMimeTypeFromPath(filePath);
|
||||
if (expectedMimeType !== blob.type && expectedMimeType !== 'application/octet-stream') {
|
||||
console.log(`[FILE QUERY] Correcting MIME type for ${filePath}: ${blob.type} → ${expectedMimeType}`);
|
||||
const correctedBlob = new Blob([blob], { type: expectedMimeType });
|
||||
|
||||
// Additional validation for images
|
||||
if (isImageFile(filePath)) {
|
||||
console.log(`[FILE QUERY] Created image blob:`, {
|
||||
originalType: blob.type,
|
||||
correctedType: correctedBlob.type,
|
||||
size: correctedBlob.size,
|
||||
filePath
|
||||
});
|
||||
}
|
||||
|
||||
return correctedBlob;
|
||||
}
|
||||
|
||||
// Log blob details for debugging
|
||||
if (isImageFile(filePath)) {
|
||||
console.log(`[FILE QUERY] Image blob details:`, {
|
||||
type: blob.type,
|
||||
size: blob.size,
|
||||
filePath
|
||||
});
|
||||
}
|
||||
|
||||
return blob;
|
||||
}
|
||||
case 'text':
|
||||
default:
|
||||
return await response.text();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy compatibility function for getCachedFile
|
||||
*/
|
||||
export async function getCachedFile(
|
||||
sandboxId: string,
|
||||
filePath: string,
|
||||
options: {
|
||||
contentType?: 'text' | 'blob' | 'json';
|
||||
force?: boolean;
|
||||
token?: string;
|
||||
} = {}
|
||||
): Promise<any> {
|
||||
const normalizedPath = normalizePath(filePath);
|
||||
const detectedContentType = getContentTypeFromPath(filePath);
|
||||
const effectiveContentType = options.contentType || detectedContentType;
|
||||
|
||||
if (!options.token) {
|
||||
throw new Error('Authentication token required');
|
||||
}
|
||||
|
||||
return fetchFileContent(sandboxId, normalizedPath, effectiveContentType, options.token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching file content with React Query
|
||||
* Returns raw content - components create blob URLs as needed
|
||||
*/
|
||||
export function useFileContentQuery(
|
||||
sandboxId?: string,
|
||||
filePath?: string,
|
||||
options: {
|
||||
contentType?: 'text' | 'blob' | 'json';
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
gcTime?: number;
|
||||
} = {}
|
||||
) {
|
||||
const { session } = useAuth();
|
||||
|
||||
const normalizedPath = filePath ? normalizePath(filePath) : null;
|
||||
const detectedContentType = filePath ? getContentTypeFromPath(filePath) : 'text';
|
||||
const effectiveContentType = options.contentType || detectedContentType;
|
||||
|
||||
const queryResult = useQuery({
|
||||
queryKey: sandboxId && normalizedPath ?
|
||||
fileQueryKeys.content(sandboxId, normalizedPath, effectiveContentType) : [],
|
||||
queryFn: async () => {
|
||||
if (!sandboxId || !normalizedPath || !session?.access_token) {
|
||||
throw new Error('Missing required parameters');
|
||||
}
|
||||
|
||||
return fetchFileContent(sandboxId, normalizedPath, effectiveContentType, session.access_token);
|
||||
},
|
||||
enabled: Boolean(sandboxId && normalizedPath && session?.access_token && (options.enabled !== false)),
|
||||
staleTime: options.staleTime || (effectiveContentType === 'blob' ? 5 * 60 * 1000 : 2 * 60 * 1000), // 5min for blobs, 2min for text
|
||||
gcTime: options.gcTime || 10 * 60 * 1000, // 10 minutes
|
||||
retry: (failureCount, error: any) => {
|
||||
// Don't retry on auth errors
|
||||
if (error?.message?.includes('401') || error?.message?.includes('403')) {
|
||||
return false;
|
||||
}
|
||||
return failureCount < 3;
|
||||
},
|
||||
});
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Refresh function
|
||||
const refreshCache = React.useCallback(async () => {
|
||||
if (!sandboxId || !filePath) return null;
|
||||
|
||||
const normalizedPath = normalizePath(filePath);
|
||||
const queryKey = fileQueryKeys.content(sandboxId, normalizedPath, effectiveContentType);
|
||||
|
||||
await queryClient.invalidateQueries({ queryKey });
|
||||
const newData = queryClient.getQueryData(queryKey);
|
||||
return newData || null;
|
||||
}, [sandboxId, filePath, effectiveContentType, queryClient]);
|
||||
|
||||
return {
|
||||
...queryResult,
|
||||
refreshCache,
|
||||
// Legacy compatibility methods
|
||||
getCachedFile: () => Promise.resolve(queryResult.data),
|
||||
getFromCache: () => queryResult.data,
|
||||
cache: new Map(), // Legacy compatibility - empty map
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching directory listings
|
||||
*/
|
||||
export function useDirectoryQuery(
|
||||
sandboxId?: string,
|
||||
directoryPath?: string,
|
||||
options: {
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
} = {}
|
||||
) {
|
||||
const { session } = useAuth();
|
||||
|
||||
const normalizedPath = directoryPath ? normalizePath(directoryPath) : null;
|
||||
|
||||
return useQuery({
|
||||
queryKey: sandboxId && normalizedPath ?
|
||||
fileQueryKeys.directory(sandboxId, normalizedPath) : [],
|
||||
queryFn: async (): Promise<FileInfo[]> => {
|
||||
if (!sandboxId || !normalizedPath || !session?.access_token) {
|
||||
throw new Error('Missing required parameters');
|
||||
}
|
||||
|
||||
console.log(`[FILE QUERY] Fetching directory listing for: ${normalizedPath}`);
|
||||
return await listSandboxFiles(sandboxId, normalizedPath);
|
||||
},
|
||||
enabled: Boolean(sandboxId && normalizedPath && session?.access_token && (options.enabled !== false)),
|
||||
staleTime: options.staleTime || 30 * 1000, // 30 seconds for directory listings
|
||||
gcTime: 5 * 60 * 1000, // 5 minutes
|
||||
retry: 2,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for preloading multiple files
|
||||
*/
|
||||
export function useFilePreloader() {
|
||||
const queryClient = useQueryClient();
|
||||
const { session } = useAuth();
|
||||
|
||||
const preloadFiles = React.useCallback(async (
|
||||
sandboxId: string,
|
||||
filePaths: string[]
|
||||
): Promise<void> => {
|
||||
if (!session?.access_token) {
|
||||
console.warn('Cannot preload files: No authentication token available');
|
||||
return;
|
||||
}
|
||||
|
||||
const uniquePaths = [...new Set(filePaths)];
|
||||
console.log(`[FILE QUERY] Preloading ${uniquePaths.length} files for sandbox ${sandboxId}`);
|
||||
|
||||
const preloadPromises = uniquePaths.map(async (path) => {
|
||||
const normalizedPath = normalizePath(path);
|
||||
const contentType = getContentTypeFromPath(path);
|
||||
|
||||
// Check if already cached
|
||||
const queryKey = fileQueryKeys.content(sandboxId, normalizedPath, contentType);
|
||||
const existingData = queryClient.getQueryData(queryKey);
|
||||
|
||||
if (existingData) {
|
||||
console.log(`[FILE QUERY] Already cached: ${normalizedPath}`);
|
||||
return existingData;
|
||||
}
|
||||
|
||||
// Prefetch the file
|
||||
return queryClient.prefetchQuery({
|
||||
queryKey,
|
||||
queryFn: () => fetchFileContent(sandboxId, normalizedPath, contentType, session.access_token!),
|
||||
staleTime: contentType === 'blob' ? 5 * 60 * 1000 : 2 * 60 * 1000,
|
||||
});
|
||||
});
|
||||
|
||||
await Promise.all(preloadPromises);
|
||||
console.log(`[FILE QUERY] Completed preloading ${uniquePaths.length} files`);
|
||||
}, [queryClient, session?.access_token]);
|
||||
|
||||
return { preloadFiles };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compatibility hook that mimics the old useCachedFile API
|
||||
*/
|
||||
export function useCachedFile<T = string>(
|
||||
sandboxId?: string,
|
||||
filePath?: string,
|
||||
options: {
|
||||
expiration?: number;
|
||||
contentType?: 'json' | 'text' | 'blob' | 'arrayBuffer' | 'base64';
|
||||
processFn?: (data: any) => T;
|
||||
} = {}
|
||||
) {
|
||||
// Map old contentType values to new ones
|
||||
const mappedContentType = React.useMemo(() => {
|
||||
switch (options.contentType) {
|
||||
case 'json': return 'json';
|
||||
case 'blob':
|
||||
case 'arrayBuffer':
|
||||
case 'base64': return 'blob';
|
||||
case 'text':
|
||||
default: return 'text';
|
||||
}
|
||||
}, [options.contentType]);
|
||||
|
||||
const query = useFileContentQuery(sandboxId, filePath, {
|
||||
contentType: mappedContentType,
|
||||
staleTime: options.expiration,
|
||||
});
|
||||
|
||||
// Process data if processFn is provided
|
||||
const processedData = React.useMemo(() => {
|
||||
if (!query.data || !options.processFn) {
|
||||
return query.data as T;
|
||||
}
|
||||
|
||||
try {
|
||||
return options.processFn(query.data);
|
||||
} catch (error) {
|
||||
console.error('Error processing file data:', error);
|
||||
return null;
|
||||
}
|
||||
}, [query.data, options.processFn]);
|
||||
|
||||
return {
|
||||
data: processedData,
|
||||
isLoading: query.isLoading,
|
||||
error: query.error,
|
||||
refreshCache: query.refreshCache,
|
||||
// Legacy compatibility methods
|
||||
getCachedFile: () => Promise.resolve(processedData),
|
||||
getFromCache: () => processedData,
|
||||
cache: new Map(), // Legacy compatibility - empty map
|
||||
};
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
import React from 'react';
|
||||
import { useFileContentQuery } from './use-file-queries';
|
||||
|
||||
/**
|
||||
* Hook for fetching image content and creating blob URLs
|
||||
* Simplified to avoid reference counting issues in React StrictMode
|
||||
*/
|
||||
export function useImageContent(
|
||||
sandboxId?: string,
|
||||
filePath?: string,
|
||||
options: {
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
} = {}
|
||||
) {
|
||||
const [blobUrl, setBlobUrl] = React.useState<string | null>(null);
|
||||
|
||||
// Get the blob data from React Query cache
|
||||
const {
|
||||
data: blobData,
|
||||
isLoading,
|
||||
error,
|
||||
} = useFileContentQuery(sandboxId, filePath, {
|
||||
contentType: 'blob',
|
||||
enabled: options.enabled,
|
||||
staleTime: options.staleTime || 5 * 60 * 1000, // 5 minutes default
|
||||
});
|
||||
|
||||
// Create blob URL when we have blob data and clean up properly
|
||||
React.useEffect(() => {
|
||||
if (blobData instanceof Blob) {
|
||||
console.log(`[IMAGE CONTENT] Creating blob URL for ${filePath}`, {
|
||||
size: blobData.size,
|
||||
type: blobData.type
|
||||
});
|
||||
|
||||
const url = URL.createObjectURL(blobData);
|
||||
setBlobUrl(url);
|
||||
|
||||
// Cleanup function to revoke the blob URL
|
||||
return () => {
|
||||
console.log(`[IMAGE CONTENT] Cleaning up blob URL for ${filePath}: ${url}`);
|
||||
URL.revokeObjectURL(url);
|
||||
setBlobUrl(null);
|
||||
};
|
||||
} else {
|
||||
setBlobUrl(null);
|
||||
return;
|
||||
}
|
||||
}, [blobData, filePath]);
|
||||
|
||||
return {
|
||||
data: blobUrl,
|
||||
isLoading,
|
||||
error,
|
||||
};
|
||||
}
|
Loading…
Reference in New Issue