mirror of https://github.com/kortix-ai/suna.git
logger, docker, wip
This commit is contained in:
parent
5c750d6387
commit
2cf777cc4c
|
@ -20,5 +20,5 @@ ENV ENV_MODE="production"
|
||||||
# Expose the port the app runs on
|
# Expose the port the app runs on
|
||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Command to run the application with Uvicorn directly
|
# 24 workers
|
||||||
CMD ["gunicorn", "api:app", "--workers", "24", "--worker-class", "uvicorn.workers.UvicornWorker", "--bind", "0.0.0.0:8000", "--timeout", "600", "--graceful-timeout", "300", "--keep-alive", "250", "--max-requests", "0", "--max-requests-jitter", "0", "--forwarded-allow-ips", "*", "--worker-connections", "5000", "--worker-tmp-dir", "/dev/shm", "--preload"]
|
CMD ["gunicorn", "api:app", "--workers", "24", "--worker-class", "uvicorn.workers.UvicornWorker", "--bind", "0.0.0.0:8000", "--timeout", "600", "--graceful-timeout", "300", "--keep-alive", "250", "--max-requests", "0", "--max-requests-jitter", "0", "--forwarded-allow-ips", "*", "--worker-connections", "5000", "--worker-tmp-dir", "/dev/shm", "--preload"]
|
|
@ -83,12 +83,27 @@ async def lifespan(app: FastAPI):
|
||||||
|
|
||||||
app = FastAPI(lifespan=lifespan)
|
app = FastAPI(lifespan=lifespan)
|
||||||
|
|
||||||
# @app.middleware("http")
|
@app.middleware("http")
|
||||||
# async def log_requests_middleware(request: Request, call_next):
|
async def log_requests_middleware(request: Request, call_next):
|
||||||
# client_ip = request.client.host
|
start_time = time.time()
|
||||||
# logger.info(f"Request from IP {client_ip} to {request.method} {request.url.path}")
|
client_ip = request.client.host
|
||||||
# response = await call_next(request)
|
method = request.method
|
||||||
# return response
|
url = str(request.url)
|
||||||
|
path = request.url.path
|
||||||
|
query_params = str(request.query_params)
|
||||||
|
|
||||||
|
# Log the incoming request
|
||||||
|
logger.info(f"Request started: {method} {path} from {client_ip} | Query: {query_params}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await call_next(request)
|
||||||
|
process_time = time.time() - start_time
|
||||||
|
logger.debug(f"Request completed: {method} {path} | Status: {response.status_code} | Time: {process_time:.2f}s")
|
||||||
|
return response
|
||||||
|
except Exception as e:
|
||||||
|
process_time = time.time() - start_time
|
||||||
|
logger.error(f"Request failed: {method} {path} | Error: {str(e)} | Time: {process_time:.2f}s")
|
||||||
|
raise
|
||||||
|
|
||||||
# @app.middleware("http")
|
# @app.middleware("http")
|
||||||
# async def throw_error_middleware(request: Request, call_next):
|
# async def throw_error_middleware(request: Request, call_next):
|
||||||
|
@ -167,5 +182,13 @@ async def health_check():
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import uvicorn
|
import uvicorn
|
||||||
logger.info("Starting server on 0.0.0.0:8000")
|
|
||||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
workers = 2
|
||||||
|
|
||||||
|
logger.info(f"Starting server on 0.0.0.0:8000 with {workers} workers")
|
||||||
|
uvicorn.run(
|
||||||
|
"api:app",
|
||||||
|
host="0.0.0.0",
|
||||||
|
port=8000,
|
||||||
|
workers=workers
|
||||||
|
)
|
|
@ -4,7 +4,7 @@ services:
|
||||||
api:
|
api:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: docker/Dockerfile
|
dockerfile: Dockerfile
|
||||||
ports:
|
ports:
|
||||||
- "8000:8000"
|
- "8000:8000"
|
||||||
env_file:
|
env_file:
|
||||||
|
|
|
@ -7,7 +7,7 @@ app = 'backend-production-ogog'
|
||||||
primary_region = 'bos'
|
primary_region = 'bos'
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
dockerfile = 'docker/Dockerfile'
|
dockerfile = 'Dockerfile'
|
||||||
|
|
||||||
[http_service]
|
[http_service]
|
||||||
internal_port = 8000
|
internal_port = 8000
|
||||||
|
|
|
@ -7,7 +7,7 @@ app = 'backend-staging-icy-mountain-363'
|
||||||
primary_region = 'cdg'
|
primary_region = 'cdg'
|
||||||
|
|
||||||
[build]
|
[build]
|
||||||
dockerfile = 'docker/Dockerfile'
|
dockerfile = 'Dockerfile'
|
||||||
|
|
||||||
[http_service]
|
[http_service]
|
||||||
internal_port = 8000
|
internal_port = 8000
|
||||||
|
|
|
@ -119,6 +119,7 @@ async def create_file(
|
||||||
user_id: Optional[str] = Depends(get_optional_user_id)
|
user_id: Optional[str] = Depends(get_optional_user_id)
|
||||||
):
|
):
|
||||||
"""Create a file in the sandbox using direct file upload"""
|
"""Create a file in the sandbox using direct file upload"""
|
||||||
|
logger.info(f"Received file upload request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
||||||
client = await db.client
|
client = await db.client
|
||||||
|
|
||||||
# Verify the user has access to this sandbox
|
# Verify the user has access to this sandbox
|
||||||
|
@ -149,6 +150,7 @@ async def create_file_json(
|
||||||
user_id: Optional[str] = Depends(get_optional_user_id)
|
user_id: Optional[str] = Depends(get_optional_user_id)
|
||||||
):
|
):
|
||||||
"""Create a file in the sandbox using JSON (legacy support)"""
|
"""Create a file in the sandbox using JSON (legacy support)"""
|
||||||
|
logger.info(f"Received JSON file creation request for sandbox {sandbox_id}, user_id: {user_id}")
|
||||||
client = await db.client
|
client = await db.client
|
||||||
|
|
||||||
# Verify the user has access to this sandbox
|
# Verify the user has access to this sandbox
|
||||||
|
@ -163,6 +165,7 @@ async def create_file_json(
|
||||||
content = file_request.get("content", "")
|
content = file_request.get("content", "")
|
||||||
|
|
||||||
if not path:
|
if not path:
|
||||||
|
logger.error(f"Missing file path in request for sandbox {sandbox_id}")
|
||||||
raise HTTPException(status_code=400, detail="File path is required")
|
raise HTTPException(status_code=400, detail="File path is required")
|
||||||
|
|
||||||
# Convert string content to bytes
|
# Convert string content to bytes
|
||||||
|
@ -186,6 +189,7 @@ async def list_files(
|
||||||
user_id: Optional[str] = Depends(get_optional_user_id)
|
user_id: Optional[str] = Depends(get_optional_user_id)
|
||||||
):
|
):
|
||||||
"""List files and directories at the specified path"""
|
"""List files and directories at the specified path"""
|
||||||
|
logger.info(f"Received list files request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
||||||
client = await db.client
|
client = await db.client
|
||||||
|
|
||||||
# Verify the user has access to this sandbox
|
# Verify the user has access to this sandbox
|
||||||
|
@ -213,6 +217,7 @@ async def list_files(
|
||||||
)
|
)
|
||||||
result.append(file_info)
|
result.append(file_info)
|
||||||
|
|
||||||
|
logger.info(f"Successfully listed {len(result)} files in sandbox {sandbox_id}")
|
||||||
return {"files": [file.dict() for file in result]}
|
return {"files": [file.dict() for file in result]}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error listing files in sandbox {sandbox_id}: {str(e)}")
|
logger.error(f"Error listing files in sandbox {sandbox_id}: {str(e)}")
|
||||||
|
@ -226,6 +231,7 @@ async def read_file(
|
||||||
user_id: Optional[str] = Depends(get_optional_user_id)
|
user_id: Optional[str] = Depends(get_optional_user_id)
|
||||||
):
|
):
|
||||||
"""Read a file from the sandbox"""
|
"""Read a file from the sandbox"""
|
||||||
|
logger.info(f"Received file read request for sandbox {sandbox_id}, path: {path}, user_id: {user_id}")
|
||||||
client = await db.client
|
client = await db.client
|
||||||
|
|
||||||
# Verify the user has access to this sandbox
|
# Verify the user has access to this sandbox
|
||||||
|
@ -240,6 +246,7 @@ async def read_file(
|
||||||
|
|
||||||
# Return a Response object with the content directly
|
# Return a Response object with the content directly
|
||||||
filename = os.path.basename(path)
|
filename = os.path.basename(path)
|
||||||
|
logger.info(f"Successfully read file {filename} from sandbox {sandbox_id}")
|
||||||
return Response(
|
return Response(
|
||||||
content=content,
|
content=content,
|
||||||
media_type="application/octet-stream",
|
media_type="application/octet-stream",
|
||||||
|
@ -259,12 +266,14 @@ async def ensure_project_sandbox_active(
|
||||||
Ensure that a project's sandbox is active and running.
|
Ensure that a project's sandbox is active and running.
|
||||||
Checks the sandbox status and starts it if it's not running.
|
Checks the sandbox status and starts it if it's not running.
|
||||||
"""
|
"""
|
||||||
|
logger.info(f"Received ensure sandbox active request for project {project_id}, user_id: {user_id}")
|
||||||
client = await db.client
|
client = await db.client
|
||||||
|
|
||||||
# Find the project and sandbox information
|
# Find the project and sandbox information
|
||||||
project_result = await client.table('projects').select('*').eq('project_id', project_id).execute()
|
project_result = await client.table('projects').select('*').eq('project_id', project_id).execute()
|
||||||
|
|
||||||
if not project_result.data or len(project_result.data) == 0:
|
if not project_result.data or len(project_result.data) == 0:
|
||||||
|
logger.error(f"Project not found: {project_id}")
|
||||||
raise HTTPException(status_code=404, detail="Project not found")
|
raise HTTPException(status_code=404, detail="Project not found")
|
||||||
|
|
||||||
project_data = project_result.data[0]
|
project_data = project_result.data[0]
|
||||||
|
@ -273,6 +282,7 @@ async def ensure_project_sandbox_active(
|
||||||
if not project_data.get('is_public'):
|
if not project_data.get('is_public'):
|
||||||
# For private projects, we must have a user_id
|
# For private projects, we must have a user_id
|
||||||
if not user_id:
|
if not user_id:
|
||||||
|
logger.error(f"Authentication required for private project {project_id}")
|
||||||
raise HTTPException(status_code=401, detail="Authentication required for this resource")
|
raise HTTPException(status_code=401, detail="Authentication required for this resource")
|
||||||
|
|
||||||
account_id = project_data.get('account_id')
|
account_id = project_data.get('account_id')
|
||||||
|
@ -281,6 +291,7 @@ async def ensure_project_sandbox_active(
|
||||||
if account_id:
|
if account_id:
|
||||||
account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
|
account_user_result = await client.schema('basejump').from_('account_user').select('account_role').eq('user_id', user_id).eq('account_id', account_id).execute()
|
||||||
if not (account_user_result.data and len(account_user_result.data) > 0):
|
if not (account_user_result.data and len(account_user_result.data) > 0):
|
||||||
|
logger.error(f"User {user_id} not authorized to access project {project_id}")
|
||||||
raise HTTPException(status_code=403, detail="Not authorized to access this project")
|
raise HTTPException(status_code=403, detail="Not authorized to access this project")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -66,45 +66,64 @@ def setup_logger(name: str = 'agentpress') -> logging.Logger:
|
||||||
logging.Logger: Configured logger instance
|
logging.Logger: Configured logger instance
|
||||||
"""
|
"""
|
||||||
logger = logging.getLogger(name)
|
logger = logging.getLogger(name)
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
# Create logs directory if it doesn't exist
|
# Create logs directory if it doesn't exist
|
||||||
log_dir = 'logs'
|
log_dir = os.path.join(os.getcwd(), 'logs')
|
||||||
if not os.path.exists(log_dir):
|
try:
|
||||||
os.makedirs(log_dir)
|
if not os.path.exists(log_dir):
|
||||||
|
os.makedirs(log_dir)
|
||||||
|
print(f"Created log directory at: {log_dir}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error creating log directory: {e}")
|
||||||
|
return logger
|
||||||
|
|
||||||
# File handler with rotation
|
# File handler with rotation
|
||||||
log_file = os.path.join(log_dir, f'{name}_{datetime.now().strftime("%Y%m%d")}.log')
|
try:
|
||||||
file_handler = RotatingFileHandler(
|
log_file = os.path.join(log_dir, f'{name}_{datetime.now().strftime("%Y%m%d")}.log')
|
||||||
log_file,
|
file_handler = RotatingFileHandler(
|
||||||
maxBytes=10*1024*1024, # 10MB
|
log_file,
|
||||||
backupCount=5,
|
maxBytes=10*1024*1024, # 10MB
|
||||||
encoding='utf-8'
|
backupCount=5,
|
||||||
)
|
encoding='utf-8'
|
||||||
file_handler.setLevel(logging.DEBUG)
|
)
|
||||||
|
file_handler.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Create formatters
|
||||||
|
file_formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s'
|
||||||
|
)
|
||||||
|
file_handler.setFormatter(file_formatter)
|
||||||
|
|
||||||
|
# Add file handler to logger
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
print(f"Added file handler for: {log_file}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error setting up file handler: {e}")
|
||||||
|
|
||||||
# Console handler
|
# Console handler - WARNING in production, INFO in other environments
|
||||||
console_handler = logging.StreamHandler(sys.stdout)
|
try:
|
||||||
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
if config.ENV_MODE == EnvMode.PRODUCTION:
|
||||||
|
console_handler.setLevel(logging.WARNING)
|
||||||
|
else:
|
||||||
|
console_handler.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
console_formatter = logging.Formatter(
|
||||||
|
'%(asctime)s - %(levelname)s - %(message)s'
|
||||||
|
)
|
||||||
|
console_handler.setFormatter(console_formatter)
|
||||||
|
|
||||||
|
# Add console handler to logger
|
||||||
|
logger.addHandler(console_handler)
|
||||||
|
print(f"Added console handler with level: {console_handler.level}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error setting up console handler: {e}")
|
||||||
|
|
||||||
if config.ENV_MODE == EnvMode.PRODUCTION:
|
# # Test logging
|
||||||
console_handler.setLevel(logging.WARNING)
|
# logger.debug("Logger setup complete - DEBUG test")
|
||||||
else:
|
# logger.info("Logger setup complete - INFO test")
|
||||||
console_handler.setLevel(logging.DEBUG)
|
# logger.warning("Logger setup complete - WARNING test")
|
||||||
|
|
||||||
# Create formatters
|
|
||||||
file_formatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(filename)s:%(lineno)d - %(message)s'
|
|
||||||
)
|
|
||||||
console_formatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(levelname)s - %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set formatters
|
|
||||||
file_handler.setFormatter(file_formatter)
|
|
||||||
console_handler.setFormatter(console_formatter)
|
|
||||||
|
|
||||||
# Add handlers to logger
|
|
||||||
logger.addHandler(file_handler)
|
|
||||||
logger.addHandler(console_handler)
|
|
||||||
|
|
||||||
return logger
|
return logger
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue