query limit fixes

This commit is contained in:
dal 2025-09-11 22:19:55 -06:00
parent fbb3099d22
commit 3af0fee1b0
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
5 changed files with 64 additions and 16 deletions

View File

@ -144,17 +144,22 @@ export async function getMetricDataHandler(
// Execute query using the shared utility
try {
// Request one extra row to detect if there are more records
const result = await executeMetricQuery(metric.dataSourceId, sql, credentials, {
maxRows: queryLimit,
maxRows: queryLimit + 1,
timeout: 60000, // 60 seconds
retryDelays: [1000, 3000, 6000], // 1s, 3s, 6s
});
// Trim to requested limit and check if there are more records
const hasMore = result.data.length > queryLimit;
const trimmedData = result.data.slice(0, queryLimit);
const response: MetricDataResponse = {
data: result.data,
data: trimmedData,
data_metadata: result.dataMetadata,
metricId,
has_more_records: result.hasMoreRecords,
has_more_records: hasMore || result.hasMoreRecords,
};
// Cache the data if report_file_id is provided (pass-through write)
@ -163,7 +168,7 @@ export async function getMetricDataHandler(
metricId,
reportFileId,
organizationId,
rowCount: result.data.length,
rowCount: trimmedData.length,
});
// Fire and forget - don't wait for cache write

View File

@ -219,7 +219,7 @@ async function validateSql(
// Execute query using the new utility
try {
const result = await executeMetricQuery(dataSourceId, sqlQuery, credentials, {
maxRows: 1000, // Validation limit
maxRows: 50, // Reduced validation limit - just enough to validate schema
timeout: 120000, // 2 minutes
retryDelays: [1000, 3000, 6000], // 1s, 3s, 6s
});
@ -231,7 +231,7 @@ async function validateSql(
if (result.data.length === 0) {
message = 'Query executed successfully but returned no records';
} else if (result.hasMoreRecords) {
message = `Query validated successfully. Results were limited to 1000 rows for memory protection (query may return more rows when executed)${displayResults.length < result.data.length ? ` - showing first 25 of ${result.data.length} fetched` : ''}`;
message = `Query validated successfully. Results were limited to 50 rows for validation (query may return more rows when executed)${displayResults.length < result.data.length ? ` - showing first 25 of ${result.data.length} fetched` : ''}`;
} else {
message = `Query validated successfully and returned ${result.data.length} records${result.data.length > 25 ? ' (showing sample of first 25)' : ''}`;
}

View File

@ -141,7 +141,7 @@ async function validateSql(
// Execute query using the new utility
try {
const result = await executeMetricQuery(dataSourceId, sqlQuery, credentials, {
maxRows: 1000, // Validation limit
maxRows: 50, // Reduced validation limit - just enough to validate schema
timeout: 120000, // 2 minutes
retryDelays: [1000, 3000, 6000], // 1s, 3s, 6s
});
@ -153,7 +153,7 @@ async function validateSql(
if (result.data.length === 0) {
message = 'Query executed successfully but returned no records';
} else if (result.hasMoreRecords) {
message = `Query validated successfully. Results were limited to 1000 rows for memory protection (query may return more rows when executed)${displayResults.length < result.data.length ? ` - showing first 25 of ${result.data.length} fetched` : ''}`;
message = `Query validated successfully. Results were limited to 50 rows for validation (query may return more rows when executed)${displayResults.length < result.data.length ? ` - showing first 25 of ${result.data.length} fetched` : ''}`;
} else {
message = `Query validated successfully and returned ${result.data.length} records${result.data.length > 25 ? ' (showing sample of first 25)' : ''}`;
}

View File

@ -117,10 +117,10 @@ describe('Snowflake Memory Protection Tests', () => {
async () => {
await adapter.initialize(credentials);
// Query a small table without maxRows
// Query a small table without maxRows - should get ALL rows
const result = await adapter.query('SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.REGION');
// REGION table has exactly 5 rows
// REGION table has exactly 5 rows - we should get them all
expect(result.rows.length).toBe(5);
expect(result.hasMoreRows).toBe(false);
expect(result.rowCount).toBe(5);
@ -128,6 +128,46 @@ describe('Snowflake Memory Protection Tests', () => {
TEST_TIMEOUT
);
testWithCredentials(
'should fetch unlimited rows for larger datasets when no maxRows specified',
async () => {
await adapter.initialize(credentials);
// Query NATION table without maxRows - should get ALL 25 rows
const result = await adapter.query('SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.NATION');
// NATION table has exactly 25 rows - we should get them all
expect(result.rows.length).toBe(25);
expect(result.hasMoreRows).toBe(false);
expect(result.rowCount).toBe(25);
// Verify we got all the data by checking specific nations exist
const nationKeys = result.rows.map((r) => r.n_nationkey);
expect(nationKeys).toContain(0); // ALGERIA
expect(nationKeys).toContain(24); // UNITED STATES
},
TEST_TIMEOUT
);
testWithCredentials(
'should handle maxRows parameter correctly for detecting more rows',
async () => {
await adapter.initialize(credentials);
// Query with maxRows=5 (NATION has 25 rows total)
const result = await adapter.query(
'SELECT * FROM SNOWFLAKE_SAMPLE_DATA.TPCH_SF1.NATION ORDER BY N_NATIONKEY',
undefined,
5
);
expect(result.rows.length).toBe(5);
expect(result.hasMoreRows).toBe(true); // Should detect there are more rows
expect(result.rowCount).toBe(5);
},
TEST_TIMEOUT
);
testWithCredentials(
'should handle maxRows=1 correctly',
async () => {

View File

@ -200,7 +200,8 @@ export class SnowflakeAdapter extends BaseAdapter {
// Set query timeout if specified (default: 120 seconds for Snowflake queue handling)
const timeoutMs = timeout || TIMEOUT_CONFIG.query.default;
const limit = maxRows && maxRows > 0 ? maxRows : 5000;
// Only apply limit if explicitly requested, otherwise fetch all rows
const limit = maxRows && maxRows > 0 ? maxRows : undefined;
const queryPromise = new Promise<{
rows: Record<string, unknown>[];
@ -225,8 +226,10 @@ export class SnowflakeAdapter extends BaseAdapter {
const rows: Record<string, unknown>[] = [];
let hasMoreRows = false;
// Request one extra row to check if there are more rows
const stream = stmt.streamRows?.({ start: 0, end: limit });
// Stream rows with or without limit
const stream = limit
? stmt.streamRows?.({ start: 0, end: limit })
: stmt.streamRows?.();
if (!stream) {
reject(new Error('Snowflake streaming not supported'));
return;
@ -236,8 +239,8 @@ export class SnowflakeAdapter extends BaseAdapter {
stream
.on('data', (row: Record<string, unknown>) => {
// Only keep up to limit rows
if (rowCount < limit) {
// If limit is set, only keep up to limit rows
if (!limit || rowCount < limit) {
// Transform column names to lowercase to match expected behavior
const transformedRow: Record<string, unknown> = {};
for (const [key, value] of Object.entries(row)) {
@ -252,7 +255,7 @@ export class SnowflakeAdapter extends BaseAdapter {
})
.on('end', () => {
// If we got more rows than requested, there are more available
hasMoreRows = rowCount > limit;
hasMoreRows = limit ? rowCount > limit : false;
resolve({
rows,
statement: stmt,