Merge branch 'main' into better-credentials

This commit is contained in:
Soumyadas15 2025-06-18 20:42:29 +05:30
commit 19e9e56df5
4 changed files with 58 additions and 23 deletions

View File

@ -90,16 +90,34 @@ class ThreadManager:
else:
return msg_content
def _safe_truncate(self, msg_content: Union[str, dict], max_length: int = 300000) -> Union[str, dict]:
"""Truncate the message content safely."""
def _safe_truncate(self, msg_content: Union[str, dict], max_length: int = 100000) -> Union[str, dict]:
"""Truncate the message content safely by removing the middle portion."""
max_length = min(max_length, 100000)
if isinstance(msg_content, str):
if len(msg_content) > max_length:
return msg_content[:max_length] + f"\n\nThis message is too long, repeat relevant information in your response to remember it"
# Calculate how much to keep from start and end
keep_length = max_length - 150 # Reserve space for truncation message
start_length = keep_length // 2
end_length = keep_length - start_length
start_part = msg_content[:start_length]
end_part = msg_content[-end_length:] if end_length > 0 else ""
return start_part + f"\n\n... (middle truncated) ...\n\n" + end_part + f"\n\nThis message is too long, repeat relevant information in your response to remember it"
else:
return msg_content
elif isinstance(msg_content, dict):
if len(json.dumps(msg_content)) > max_length:
return json.dumps(msg_content)[:max_length] + f"\n\nThis message is too long, repeat relevant information in your response to remember it"
json_str = json.dumps(msg_content)
if len(json_str) > max_length:
# Calculate how much to keep from start and end
keep_length = max_length - 150 # Reserve space for truncation message
start_length = keep_length // 2
end_length = keep_length - start_length
start_part = json_str[:start_length]
end_part = json_str[-end_length:] if end_length > 0 else ""
return start_part + f"\n\n... (middle truncated) ...\n\n" + end_part + f"\n\nThis message is too long, repeat relevant information in your response to remember it"
else:
return msg_content
@ -208,10 +226,6 @@ class ThreadManager:
else:
max_tokens = 41 * 1000 - 10000
if max_iterations <= 0:
logger.warning(f"_compress_messages: Max iterations reached, returning uncompressed messages")
return messages
result = messages
result = self._remove_meta_messages(result)
@ -225,6 +239,10 @@ class ThreadManager:
logger.info(f"_compress_messages: {uncompressed_total_token_count} -> {compressed_token_count}") # Log the token compression for debugging later
if max_iterations <= 0:
logger.warning(f"_compress_messages: Max iterations reached")
return result
if (compressed_token_count > max_tokens):
logger.warning(f"Further token compression is needed: {compressed_token_count} > {max_tokens}")
result = self._compress_messages(messages, llm_model, max_tokens, int(token_threshold / 2), max_iterations - 1)

View File

@ -141,6 +141,7 @@ from mcp_local import secure_api as secure_mcp_api
app.include_router(mcp_api.router, prefix="/api")
app.include_router(secure_mcp_api.router, prefix="/api/secure-mcp")
app.include_router(transcription_api.router, prefix="/api")
app.include_router(email_api.router, prefix="/api")

File diff suppressed because one or more lines are too long

View File

@ -570,13 +570,20 @@ export const addUserMessage = async (
export const getMessages = async (threadId: string): Promise<Message[]> => {
const supabase = createClient();
let allMessages: Message[] = [];
let from = 0;
const batchSize = 1000;
let hasMore = true;
while (hasMore) {
const { data, error } = await supabase
.from('messages')
.select('*')
.eq('thread_id', threadId)
.neq('type', 'cost')
.neq('type', 'summary')
.order('created_at', { ascending: true });
.order('created_at', { ascending: true })
.range(from, from + batchSize - 1);
if (error) {
console.error('Error fetching messages:', error);
@ -584,9 +591,18 @@ export const getMessages = async (threadId: string): Promise<Message[]> => {
throw new Error(`Error getting messages: ${error.message}`);
}
console.log('[API] Messages fetched:', data);
if (data && data.length > 0) {
allMessages = allMessages.concat(data);
from += batchSize;
hasMore = data.length === batchSize;
} else {
hasMore = false;
}
}
return data || [];
console.log('[API] Messages fetched count:', allMessages.length);
return allMessages;
};
// Agent APIs