chore: remove name firecrawl

This commit is contained in:
Krishav Raj Singh 2025-08-22 02:15:15 +05:30
parent f093b859ab
commit 00d26a7e18
8 changed files with 7 additions and 57 deletions

View File

@ -39,8 +39,7 @@ Before contributing, ensure you have access to:
- Supabase project (database and auth)
- LLM provider API key (OpenAI, Anthropic, or OpenRouter)
- Daytona account (for agent execution)
- Tavily API key (for search)
- Firecrawl API key (for web scraping)
- Tavily API key (for search and web scraping)
**Optional:**

View File

@ -175,7 +175,7 @@ Kortix can be self-hosted on your own infrastructure using our comprehensive set
- **⚡ Performance**: Redis configuration for caching and session management
- **🛡️ Security**: Daytona setup for secure agent execution environments
- **🤖 AI Integration**: LLM providers (Anthropic, OpenAI, OpenRouter, etc.)
- **🌐 Web Capabilities**: Search and scraping (Tavily, Firecrawl)
- **🌐 Web Capabilities**: Search and scraping (Tavily)
- **📋 Workflows**: QStash for background job processing
- **🔗 Automation**: Webhook handling for automated tasks
- **📊 Data Sources**: Optional RapidAPI integrations

View File

@ -34,10 +34,6 @@ RAPID_API_KEY=
# WEB SEARCH
TAVILY_API_KEY=
# WEB SCRAPE
FIRECRAWL_API_KEY=
FIRECRAWL_URL=
# Sandbox container provider:
DAYTONA_API_KEY=
DAYTONA_SERVER_URL=

View File

@ -88,8 +88,6 @@ MODEL_TO_USE=openrouter/moonshotai/kimi-k2
# Search and Web Scraping
TAVILY_API_KEY=your-tavily-key
FIRECRAWL_API_KEY=your-firecrawl-key
FIRECRAWL_URL=https://api.firecrawl.dev
# Agent Execution
DAYTONA_API_KEY=your-daytona-key

View File

@ -266,8 +266,6 @@ class Configuration:
TAVILY_API_KEY: str
RAPID_API_KEY: str
CLOUDFLARE_API_TOKEN: Optional[str] = None
FIRECRAWL_API_KEY: str
FIRECRAWL_URL: Optional[str] = "https://api.firecrawl.dev"
# Stripe configuration
STRIPE_SECRET_KEY: Optional[str] = None

View File

@ -52,8 +52,7 @@ Obtain the following API keys:
- **Search and Web Scraping**:
- [Tavily](https://tavily.com/) - For enhanced search capabilities
- [Firecrawl](https://firecrawl.dev/) - For web scraping capabilities
- [Tavily](https://tavily.com/) - For enhanced search capabilities and web scraping capabilities
- **Agent Execution**:
- [Daytona](https://app.daytona.io/) - For secure agent execution
@ -164,13 +163,9 @@ GEMINI_API_KEY=your-gemini-api-key
MORPH_API_KEY=
# WEB SEARCH
# WEB SEARCH AND SCRAPE
TAVILY_API_KEY=your-tavily-key
# WEB SCRAPE
FIRECRAWL_API_KEY=your-firecrawl-key
FIRECRAWL_URL=https://api.firecrawl.dev
# Sandbox container provider
DAYTONA_API_KEY=your-daytona-key
DAYTONA_SERVER_URL=https://app.daytona.io/api

View File

@ -4,7 +4,7 @@ export const AGENTPRESS_TOOL_DEFINITIONS: Record<string, { enabled: boolean; des
'sb_files_tool': { enabled: true, description: 'Create, read, update, and delete files in the workspace with comprehensive file management', icon: '📁', color: 'bg-blue-100 dark:bg-blue-800/50' },
'sb_deploy_tool': { enabled: true, description: 'Deploy applications and services with automated deployment capabilities', icon: '🚀', color: 'bg-green-100 dark:bg-green-800/50' },
'sb_expose_tool': { enabled: true, description: 'Expose services and manage ports for application accessibility', icon: '🔌', color: 'bg-orange-100 dark:bg-orange-800/20' },
'web_search_tool': { enabled: true, description: 'Search the web using Tavily API and scrape webpages with Firecrawl for research', icon: '🔍', color: 'bg-yellow-100 dark:bg-yellow-800/50' },
'web_search_tool': { enabled: true, description: 'Search the web using Tavily API and scrape webpages for research', icon: '🔍', color: 'bg-yellow-100 dark:bg-yellow-800/50' },
'sb_vision_tool': { enabled: true, description: 'Vision and image processing capabilities for visual content analysis', icon: '👁️', color: 'bg-pink-100 dark:bg-pink-800/50' },
'sb_image_edit_tool': { enabled: true, description: 'Generate new images or edit existing images using OpenAI GPT Image 1', icon: '🎨', color: 'bg-purple-100 dark:bg-purple-800/50' },
'sb_presentation_outline_tool': { enabled: false, description: 'Create structured presentation outlines with slide descriptions and speaker notes', icon: '📋', color: 'bg-purple-100 dark:bg-purple-800/50' },

View File

@ -135,8 +135,6 @@ def load_existing_env_vars():
},
"search": {
"TAVILY_API_KEY": backend_env.get("TAVILY_API_KEY", ""),
"FIRECRAWL_API_KEY": backend_env.get("FIRECRAWL_API_KEY", ""),
"FIRECRAWL_URL": backend_env.get("FIRECRAWL_URL", ""),
},
"rapidapi": {
"RAPID_API_KEY": backend_env.get("RAPID_API_KEY", ""),
@ -321,7 +319,6 @@ class SetupWizard:
# Check Search APIs
search_configured = (
self.env_vars["search"]["TAVILY_API_KEY"]
and self.env_vars["search"]["FIRECRAWL_API_KEY"]
)
if search_configured:
config_items.append(f"{Colors.GREEN}{Colors.ENDC} Search APIs")
@ -814,9 +811,9 @@ class SetupWizard:
"Found existing search API keys. Press Enter to keep current values or type new ones."
)
else:
print_info("Suna uses Tavily for search and Firecrawl for web scraping.")
print_info("Suna uses Tavily for search and web scraping.")
print_info(
"Get a Tavily key at https://tavily.com and a Firecrawl key at https://firecrawl.dev"
"Get a Tavily key at https://tavily.com"
)
input("Press Enter to continue once you have your keys...")
@ -826,42 +823,9 @@ class SetupWizard:
"Invalid API key.",
default_value=self.env_vars["search"]["TAVILY_API_KEY"],
)
self.env_vars["search"]["FIRECRAWL_API_KEY"] = self._get_input(
"Enter your Firecrawl API key: ",
validate_api_key,
"Invalid API key.",
default_value=self.env_vars["search"]["FIRECRAWL_API_KEY"],
)
# Handle Firecrawl URL configuration
current_url = self.env_vars["search"]["FIRECRAWL_URL"]
is_self_hosted_default = (
current_url and current_url != "https://api.firecrawl.dev"
)
if current_url:
prompt = f"Are you self-hosting Firecrawl? (y/N) [Current: {'y' if is_self_hosted_default else 'N'}]: "
else:
prompt = "Are you self-hosting Firecrawl? (y/N): "
response = input(prompt).lower().strip()
if not response and current_url:
# Use existing configuration
is_self_hosted = is_self_hosted_default
else:
is_self_hosted = response == "y"
if is_self_hosted:
self.env_vars["search"]["FIRECRAWL_URL"] = self._get_input(
"Enter your self-hosted Firecrawl URL: ",
validate_url,
"Invalid URL.",
default_value=(
current_url if current_url != "https://api.firecrawl.dev" else ""
),
)
else:
self.env_vars["search"]["FIRECRAWL_URL"] = "https://api.firecrawl.dev"
print_success("Search and scraping keys saved.")