electric context bug fix with sql helpers.

This commit is contained in:
dal 2025-07-02 00:50:37 -06:00
parent ce8497acfe
commit db3a0a7c50
No known key found for this signature in database
GPG Key ID: 16F4B0E1E9F61122
5 changed files with 8 additions and 97 deletions

View File

@ -4,12 +4,10 @@ import { errorResponse } from '../../../utils/response';
import { extractParamFromWhere } from './_helpers';
export const chatsProxyRouter = async (url: URL, _userId: string, c: Context) => {
const matches = extractParamFromWhere(url, 'id');
const chatId = matches?.[0];
const chatId = extractParamFromWhere(url, 'id');
if (!chatId) {
errorResponse('Chat ID (id) is required', 403);
return;
throw errorResponse('Chat ID (id) is required', 403);
}
// User must have access to the chat
@ -19,8 +17,7 @@ export const chatsProxyRouter = async (url: URL, _userId: string, c: Context) =>
});
if (!userHasAccessToChat) {
errorResponse('You do not have access to this chat', 403);
return;
throw errorResponse('You do not have access to this chat', 403);
}
return url;

View File

@ -46,8 +46,7 @@ const app = new Hono()
return response;
} catch (_error) {
console.error('Error fetching data from Electric Shape', _error);
errorResponse('Error fetching data from Electric Shape', 500);
return;
throw errorResponse('Error fetching data from Electric Shape', 500);
}
});

View File

@ -4,12 +4,10 @@ import { errorResponse } from '../../../utils/response';
import { extractParamFromWhere } from './_helpers';
export const messagesProxyRouter = async (url: URL, _userId: string, c: Context) => {
const matches = extractParamFromWhere(url, 'chat_id');
const chatId = matches?.[0];
const chatId = extractParamFromWhere(url, 'chat_id');
if (!chatId) {
errorResponse('Chat ID is required', 403);
return;
throw errorResponse('Chat ID is required', 403);
}
const userHasAccessToChat = await canUserAccessChat({
@ -18,8 +16,7 @@ export const messagesProxyRouter = async (url: URL, _userId: string, c: Context)
});
if (!userHasAccessToChat) {
errorResponse('You do not have access to this chat', 403);
return;
throw errorResponse('You do not have access to this chat', 403);
}
return url;

View File

@ -17,23 +17,16 @@ export async function validateSqlPermissions(
dataSourceSyntax?: string
): Promise<PermissionValidationResult> {
try {
console.info('[validateSqlPermissions] Starting validation for userId:', userId);
console.info('[validateSqlPermissions] SQL query:', sql);
console.info('[validateSqlPermissions] Data source syntax:', dataSourceSyntax);
// Extract physical tables from SQL
const tablesInQuery = extractPhysicalTables(sql, dataSourceSyntax);
console.info('[validateSqlPermissions] Tables extracted from SQL:', JSON.stringify(tablesInQuery, null, 2));
if (tablesInQuery.length === 0) {
// No tables referenced (might be a function call or constant select)
console.info('[validateSqlPermissions] No tables found in query, allowing access');
return { isAuthorized: true, unauthorizedTables: [] };
}
// Get user's permissioned datasets
const permissionedDatasets = await getPermissionedDatasets(userId, 0, 1000);
console.info('[validateSqlPermissions] Found', permissionedDatasets.length, 'permissioned datasets for user');
// Extract all allowed tables from datasets
const allowedTables: ParsedTable[] = [];
@ -41,15 +34,10 @@ export async function validateSqlPermissions(
for (const dataset of permissionedDatasets) {
if (dataset.ymlFile) {
const tables = extractTablesFromYml(dataset.ymlFile);
console.info('[validateSqlPermissions] Extracted', tables.length, 'tables from dataset:', dataset.name || 'unnamed');
console.info('[validateSqlPermissions] Tables from YML:', JSON.stringify(tables, null, 2));
allowedTables.push(...tables);
}
}
console.info('[validateSqlPermissions] Total allowed tables:', allowedTables.length);
console.info('[validateSqlPermissions] All allowed tables:', JSON.stringify(allowedTables, null, 2));
// Check each table in query against permissions
const unauthorizedTables: string[] = [];
@ -60,8 +48,6 @@ export async function validateSqlPermissions(
for (const allowedTable of allowedTables) {
const matches = tablesMatch(queryTable, allowedTable);
if (matches) {
console.info('[validateSqlPermissions] Table match found:',
`Query: ${JSON.stringify(queryTable)} matches Allowed: ${JSON.stringify(allowedTable)}`);
isAuthorized = true;
break;
}
@ -78,11 +64,9 @@ export async function validateSqlPermissions(
unauthorizedTables
};
console.info('[validateSqlPermissions] Final result:', JSON.stringify(result, null, 2));
return result;
} catch (error) {
console.error('[validateSqlPermissions] Error during validation:', error);
return {
isAuthorized: false,
unauthorizedTables: [],

View File

@ -34,15 +34,11 @@ const DIALECT_MAPPING: Record<string, string> = {
function getParserDialect(dataSourceSyntax?: string): string {
if (!dataSourceSyntax) {
console.warn('[getParserDialect] No data source syntax provided, defaulting to postgresql');
return 'postgresql';
}
const dialect = DIALECT_MAPPING[dataSourceSyntax.toLowerCase()];
if (!dialect) {
console.warn(
`[getParserDialect] Unknown data source syntax: ${dataSourceSyntax}, defaulting to postgresql`
);
return 'postgresql';
}
@ -55,19 +51,14 @@ function getParserDialect(dataSourceSyntax?: string): string {
*/
export function extractPhysicalTables(sql: string, dataSourceSyntax?: string): ParsedTable[] {
const dialect = getParserDialect(dataSourceSyntax);
console.info('[extractPhysicalTables] Using dialect:', dialect, 'for syntax:', dataSourceSyntax);
const parser = new Parser();
try {
console.info('[extractPhysicalTables] Parsing SQL:', sql);
// Parse SQL into AST with the appropriate dialect
const ast = parser.astify(sql, { database: dialect });
// Get all table references from parser with the appropriate dialect
const allTables = parser.tableList(sql, { database: dialect });
console.info('[extractPhysicalTables] Raw table list from parser:', allTables);
// Extract CTE names to exclude them
const cteNames = new Set<string>();
@ -91,20 +82,16 @@ export function extractPhysicalTables(sql: string, dataSourceSyntax?: string): P
const processedTables = new Set<string>();
for (const tableRef of allTables) {
console.info('[extractPhysicalTables] Processing table reference:', tableRef);
const parsed = parseTableReference(tableRef);
console.info('[extractPhysicalTables] Parsed table:', JSON.stringify(parsed));
// Skip if it's a CTE
if (cteNames.has(parsed.table.toLowerCase())) {
console.info('[extractPhysicalTables] Skipping CTE:', parsed.table);
continue;
}
// Skip duplicates
const tableKey = `${parsed.database || ''}.${parsed.schema || ''}.${parsed.table}`;
if (processedTables.has(tableKey)) {
console.info('[extractPhysicalTables] Skipping duplicate:', tableKey);
continue;
}
@ -112,13 +99,8 @@ export function extractPhysicalTables(sql: string, dataSourceSyntax?: string): P
physicalTables.push(parsed);
}
console.info(
'[extractPhysicalTables] Final physical tables:',
JSON.stringify(physicalTables, null, 2)
);
return physicalTables;
} catch (error) {
console.error('[extractPhysicalTables] Error parsing SQL:', error);
throw new Error(
`Failed to parse SQL: ${error instanceof Error ? error.message : 'Unknown error'}`
);
@ -230,30 +212,14 @@ export function normalizeTableIdentifier(identifier: ParsedTable): string {
* For example, "schema.table" matches "database.schema.table" if schema and table match
*/
export function tablesMatch(queryTable: ParsedTable, permissionTable: ParsedTable): boolean {
console.info('[tablesMatch] Comparing tables:');
console.info('[tablesMatch] Query table:', JSON.stringify(queryTable));
console.info('[tablesMatch] Permission table:', JSON.stringify(permissionTable));
// Exact table name must match
if (queryTable.table.toLowerCase() !== permissionTable.table.toLowerCase()) {
console.info(
'[tablesMatch] Table names do not match:',
queryTable.table,
'vs',
permissionTable.table
);
return false;
}
// If permission specifies schema, query must match
if (permissionTable.schema && queryTable.schema) {
if (permissionTable.schema.toLowerCase() !== queryTable.schema.toLowerCase()) {
console.info(
'[tablesMatch] Schemas do not match:',
queryTable.schema,
'vs',
permissionTable.schema
);
return false;
}
}
@ -261,12 +227,6 @@ export function tablesMatch(queryTable: ParsedTable, permissionTable: ParsedTabl
// If permission specifies database, query must match
if (permissionTable.database && queryTable.database) {
if (permissionTable.database.toLowerCase() !== queryTable.database.toLowerCase()) {
console.info(
'[tablesMatch] Databases do not match:',
queryTable.database,
'vs',
permissionTable.database
);
return false;
}
}
@ -274,11 +234,9 @@ export function tablesMatch(queryTable: ParsedTable, permissionTable: ParsedTabl
// If permission has schema but query doesn't, it's not a match
// (we require explicit schema matching for security)
if (permissionTable.schema && !queryTable.schema) {
console.info('[tablesMatch] Permission requires schema but query has none');
return false;
}
console.info('[tablesMatch] Tables match!');
return true;
}
@ -299,20 +257,12 @@ export function extractTablesFromYml(ymlContent: string): ParsedTable[] {
const tables: ParsedTable[] = [];
const processedTables = new Set<string>();
console.info('[extractTablesFromYml] Starting YML extraction');
console.info('[extractTablesFromYml] YML content:', `${ymlContent.substring(0, 200)}...`);
try {
// Parse YML content
const parsed = yaml.parse(ymlContent);
console.info(
'[extractTablesFromYml] Parsed YML structure:',
`${JSON.stringify(parsed, null, 2).substring(0, 500)}...`
);
// Check for flat format (top-level name, schema, database)
if (parsed?.name && !parsed?.models && (parsed?.schema || parsed?.database)) {
console.info('[extractTablesFromYml] Found flat format dataset');
const parsedTable: ParsedTable = {
table: parsed.name,
fullName: parsed.name,
@ -334,7 +284,6 @@ export function extractTablesFromYml(ymlContent: string): ParsedTable[] {
}
}
console.info('[extractTablesFromYml] Flat format table:', JSON.stringify(parsedTable));
const key = normalizeTableIdentifier(parsedTable);
if (!processedTables.has(key)) {
processedTables.add(key);
@ -344,13 +293,7 @@ export function extractTablesFromYml(ymlContent: string): ParsedTable[] {
// Look for models array
if (parsed?.models && Array.isArray(parsed.models)) {
console.info(
'[extractTablesFromYml] Found models array with',
parsed.models.length,
'models'
);
for (const model of parsed.models) {
console.info('[extractTablesFromYml] Processing model:', JSON.stringify(model));
// Process models that have name and at least schema or database
if (model.name && (model.schema || model.database)) {
const parsedTable: ParsedTable = {
@ -374,26 +317,17 @@ export function extractTablesFromYml(ymlContent: string): ParsedTable[] {
}
}
console.info('[extractTablesFromYml] Parsed model table:', JSON.stringify(parsedTable));
const key = normalizeTableIdentifier(parsedTable);
if (!processedTables.has(key)) {
processedTables.add(key);
tables.push(parsedTable);
}
} else {
console.warn(
'[extractTablesFromYml] Skipping model without schema/database:',
JSON.stringify(model)
);
}
}
}
} catch (error) {
} catch (_error) {
// If YML parsing fails, return empty array
console.error('[extractTablesFromYml] Failed to parse YML:', error);
}
console.info('[extractTablesFromYml] Total tables extracted:', tables.length);
console.info('[extractTablesFromYml] Extracted tables:', JSON.stringify(tables, null, 2));
return tables;
}