From 276addb10b77a025504dbd2a79f8aa47128e7c39 Mon Sep 17 00:00:00 2001 From: Frank An Date: Tue, 23 Sep 2025 14:03:55 +0800 Subject: [PATCH] fix: update llm.py file path reference Update GitHub link path for llm.py file in custom model dialog from backend/services/llm.py to correct backend/core/services/llm.py --- .../src/components/thread/chat-input/custom-model-dialog.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/thread/chat-input/custom-model-dialog.tsx b/frontend/src/components/thread/chat-input/custom-model-dialog.tsx index 16bfc7e2..53051a81 100644 --- a/frontend/src/components/thread/chat-input/custom-model-dialog.tsx +++ b/frontend/src/components/thread/chat-input/custom-model-dialog.tsx @@ -77,7 +77,7 @@ export const CustomModelDialog: React.FC = ({ {mode === 'add' ? 'Add Custom Model' : 'Edit Custom Model'} - Kortix Suna uses LiteLLM under the hood, which makes it compatible with over 100 models. You can easily choose any OpenRouter model by prefixing it with openrouter/ and it should work out of the box. If you want to use other models besides OpenRouter, you might have to modify the llm.py, set the correct environment variables, and rebuild your self-hosted Docker container. + Kortix Suna uses LiteLLM under the hood, which makes it compatible with over 100 models. You can easily choose any OpenRouter model by prefixing it with openrouter/ and it should work out of the box. If you want to use other models besides OpenRouter, you might have to modify the llm.py, set the correct environment variables, and rebuild your self-hosted Docker container.