lint fixes 👀

This commit is contained in:
Nate Kelley 2025-07-12 16:32:47 -06:00
parent 139a0f52c9
commit 5448af614a
No known key found for this signature in database
GPG Key ID: FD90372AB8D98B4F
7 changed files with 30 additions and 34 deletions

View File

@ -1,9 +1,9 @@
import {
CancelChatParamsSchema,
ChatCreateRequestSchema,
ChatError,
type ChatWithMessages,
ChatWithMessagesSchema,
CancelChatParamsSchema,
} from '@buster/server-shared/chats';
import { zValidator } from '@hono/zod-validator';
import { Hono } from 'hono';
@ -11,8 +11,8 @@ import { requireAuth } from '../../../middleware/auth';
import '../../../types/hono.types'; //I added this to fix intermitent type errors. Could probably be removed.
import { HTTPException } from 'hono/http-exception';
import { z } from 'zod';
import { createChatHandler } from './handler';
import { cancelChatHandler } from './cancel-chat';
import { createChatHandler } from './handler';
const app = new Hono()
// Apply authentication middleware
@ -54,7 +54,7 @@ const app = new Hono()
.delete('/:chat_id/cancel', zValidator('param', CancelChatParamsSchema), async (c) => {
const params = c.req.valid('param');
const user = c.get('busterUser');
await cancelChatHandler(params.chat_id, user);
return c.json({ success: true, message: 'Chat cancelled successfully' });
})

View File

@ -21,7 +21,7 @@ const app = new Hono()
// For URL verification, it's application/x-www-form-urlencoded
// For actual events, it's application/json
const contentType = c.req.header('content-type');
if (contentType?.includes('application/x-www-form-urlencoded')) {
// Handle URL verification challenge
const formData = await c.req.parseBody();
@ -29,7 +29,7 @@ const app = new Hono()
return c.text(formData.challenge as string);
}
}
// For JSON payloads, try to parse but don't fail
let body = null;
if (contentType?.includes('application/json')) {
@ -39,7 +39,7 @@ const app = new Hono()
// If JSON parsing fails, just continue
}
}
const response = await eventsHandler(body);
return c.json(response);
} catch (error) {

View File

@ -171,7 +171,6 @@ function createDataMetadata(results: Record<string, unknown>[]): DataMetadata {
};
}
/**
* Ensures timeFrame values are properly quoted in YAML content
* Finds timeFrame: value and wraps the value in quotes if not already quoted

View File

@ -162,7 +162,6 @@ function createDataMetadata(results: Record<string, unknown>[]): DataMetadata {
};
}
/**
* Ensures timeFrame values are properly quoted in YAML content
* Finds timeFrame: value and wraps the value in quotes if not already quoted

View File

@ -202,9 +202,9 @@ export class SnowflakeAdapter extends BaseAdapter {
reject(new Error('Failed to acquire Snowflake connection'));
return;
}
connection.execute({
sqlText: sql, // Use original SQL unchanged for caching
sqlText: sql, // Use original SQL unchanged for caching
binds: params as snowflake.Binds,
complete: (
err: SnowflakeError | undefined,

View File

@ -48,7 +48,7 @@ describe('Snowflake Memory Protection Tests', () => {
'should handle large result sets with maxRows without running out of memory',
async () => {
await adapter.initialize(credentials);
// NOTE: Due to Snowflake SDK limitations, we cannot truly stream results
// For now, we'll test with a smaller dataset to avoid OOM
// Query ORDERS table instead of LINEITEM (1.5M rows vs 6M rows)
@ -61,7 +61,7 @@ describe('Snowflake Memory Protection Tests', () => {
expect(result.rows.length).toBe(100);
expect(result.hasMoreRows).toBe(true);
expect(result.rowCount).toBe(100);
// Verify we got the fields metadata
expect(result.fields.length).toBeGreaterThan(0);
expect(result.fields[0]).toHaveProperty('name');
@ -74,38 +74,38 @@ describe('Snowflake Memory Protection Tests', () => {
'should preserve query caching when running the same query multiple times',
async () => {
await adapter.initialize(credentials);
const sql = 'SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.CUSTOMER WHERE C_MKTSEGMENT = ?';
const params = ['AUTOMOBILE'];
// First execution - will be cached by Snowflake
const start1 = Date.now();
const result1 = await adapter.query(sql, params, 50);
const time1 = Date.now() - start1;
// Second execution - should hit Snowflake's cache
const start2 = Date.now();
const result2 = await adapter.query(sql, params, 50);
const time2 = Date.now() - start2;
// Third execution with different maxRows - should still hit cache
const start3 = Date.now();
const result3 = await adapter.query(sql, params, 25);
const time3 = Date.now() - start3;
// Verify results
expect(result1.rows.length).toBe(50);
expect(result2.rows.length).toBe(50);
expect(result3.rows.length).toBe(25);
// All should indicate more rows available
expect(result1.hasMoreRows).toBe(true);
expect(result2.hasMoreRows).toBe(true);
expect(result3.hasMoreRows).toBe(true);
// Cache hits should be faster (allowing for some variance)
console.info(`Query times: ${time1}ms, ${time2}ms, ${time3}ms`);
// The cached queries (2nd and 3rd) should generally be faster than the first
// We use a loose check because network latency can vary
const avgCachedTime = (time2 + time3) / 2;
@ -118,11 +118,9 @@ describe('Snowflake Memory Protection Tests', () => {
'should handle queries with no maxRows (fetch all results)',
async () => {
await adapter.initialize(credentials);
// Query a small table without maxRows
const result = await adapter.query(
'SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.REGION'
);
const result = await adapter.query('SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.REGION');
// REGION table has exactly 5 rows
expect(result.rows.length).toBe(5);
@ -136,7 +134,7 @@ describe('Snowflake Memory Protection Tests', () => {
'should handle maxRows=1 correctly',
async () => {
await adapter.initialize(credentials);
const result = await adapter.query(
'SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.NATION ORDER BY N_NATIONKEY',
undefined,
@ -154,7 +152,7 @@ describe('Snowflake Memory Protection Tests', () => {
'should handle edge case where result set equals maxRows',
async () => {
await adapter.initialize(credentials);
// REGION table has exactly 5 rows
const result = await adapter.query(
'SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.REGION',
@ -173,7 +171,7 @@ describe('Snowflake Memory Protection Tests', () => {
'should handle complex queries with CTEs and maxRows',
async () => {
await adapter.initialize(credentials);
const sql = `
WITH high_value_orders AS (
SELECT O_CUSTKEY, SUM(O_TOTALPRICE) as total_spent
@ -195,4 +193,4 @@ describe('Snowflake Memory Protection Tests', () => {
},
TEST_TIMEOUT
);
});
});

View File

@ -1,9 +1,9 @@
import { z } from "zod";
import { OrganizationSchema } from "../organization/organization.types";
import { OrganizationRoleSchema } from "../organization/roles.types";
import { TeamSchema } from "../teams/teams.types";
import { UserFavoriteSchema } from "./favorites.types";
import { UserSchema } from "./users.types";
import { z } from 'zod';
import { OrganizationSchema } from '../organization/organization.types';
import { OrganizationRoleSchema } from '../organization/roles.types';
import { TeamSchema } from '../teams/teams.types';
import { UserFavoriteSchema } from './favorites.types';
import { UserSchema } from './users.types';
const OrganizationUserSchema = OrganizationSchema.extend({
role: OrganizationRoleSchema,