mirror of https://github.com/buster-so/buster.git
Initial commit with UI filters one shot attempt
This commit is contained in:
parent
02cfd47e66
commit
5df005581f
|
@ -12,7 +12,7 @@ const app = new Hono()
|
|||
zValidator('query', MetricDataQuerySchema),
|
||||
async (c) => {
|
||||
const { id } = c.req.valid('param');
|
||||
const { limit, version_number, report_file_id, password } = c.req.valid('query');
|
||||
const { limit, version_number, report_file_id, password, filter_values } = c.req.valid('query');
|
||||
const user = c.get('busterUser');
|
||||
|
||||
const response = await getMetricDataHandler(
|
||||
|
@ -21,7 +21,8 @@ const app = new Hono()
|
|||
limit,
|
||||
version_number,
|
||||
report_file_id,
|
||||
password
|
||||
password,
|
||||
filter_values
|
||||
);
|
||||
|
||||
return c.json(response);
|
||||
|
|
|
@ -38,7 +38,8 @@ export async function getMetricDataHandler(
|
|||
limit = 5000,
|
||||
versionNumber?: number,
|
||||
reportFileId?: string,
|
||||
password?: string
|
||||
password?: string,
|
||||
filterValues?: Record<string, unknown>
|
||||
): Promise<MetricDataResponse> {
|
||||
// Retrieve metric definition from database with data source info
|
||||
const metric = await getMetricWithDataSource({ metricId, versionNumber });
|
||||
|
@ -120,8 +121,13 @@ export async function getMetricDataHandler(
|
|||
// Ensure limit is within bounds
|
||||
const queryLimit = Math.min(Math.max(limit, 1), 5000);
|
||||
|
||||
// Compile SQL with defaults to remove filter tokens
|
||||
const compiledSql = compileSqlWithDefaults(metric.content);
|
||||
console.log('Filter values received:', filterValues);
|
||||
console.log('Metric filters:', metric.content.filters);
|
||||
|
||||
// Compile SQL with user-provided filter values or defaults
|
||||
const compiledSql = compileSqlWithDefaults(metric.content, filterValues);
|
||||
|
||||
console.log('Compiled SQL:', compiledSql);
|
||||
|
||||
// Extract SQL query from metric content (for backwards compatibility if no filters)
|
||||
const sql = compiledSql || extractSqlFromMetricContent(metric.content);
|
||||
|
|
|
@ -13,6 +13,7 @@ import {
|
|||
type DataMetadata,
|
||||
type GetMetricResponse,
|
||||
type Metric,
|
||||
type MetricWithFilters,
|
||||
type MetricYml,
|
||||
} from '@buster/server-shared/metrics';
|
||||
import type { AssetPermissionRole, VerificationStatus } from '@buster/server-shared/share';
|
||||
|
@ -196,6 +197,7 @@ export async function buildMetricResponse(
|
|||
processedData: ProcessedMetricData,
|
||||
userId: string
|
||||
): Promise<GetMetricResponse> {
|
||||
console.log('buildMetricResponse called for metric:', processedData.resolvedName);
|
||||
const {
|
||||
metricFile,
|
||||
resolvedContent,
|
||||
|
@ -271,16 +273,24 @@ export async function buildMetricResponse(
|
|||
public_password: metricFile.publicPassword,
|
||||
workspace_sharing: metricFile.workspaceSharing,
|
||||
workspace_member_count: workspaceMemberCount,
|
||||
filters: resolvedContent.filters,
|
||||
};
|
||||
|
||||
console.log(`buildMetricResponse for ${resolvedName}:`, {
|
||||
hasFilters: !!resolvedContent.filters,
|
||||
filterCount: resolvedContent.filters?.length || 0,
|
||||
filters: resolvedContent.filters,
|
||||
});
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
export async function getMetricsInAncestorAssetFromMetricIds(
|
||||
metricIds: string[],
|
||||
user: User
|
||||
): Promise<Record<string, Metric>> {
|
||||
const metricsObj: Record<string, Metric> = {};
|
||||
): Promise<Record<string, MetricWithFilters>> {
|
||||
console.log('getMetricsInAncestorAssetFromMetricIds called with', metricIds.length, 'metrics');
|
||||
const metricsObj: Record<string, MetricWithFilters> = {};
|
||||
|
||||
// Process metrics in chunks of 4 to manage concurrency
|
||||
const results = [];
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
import type { Metric } from '@buster/server-shared/metrics';
|
||||
import type { MetricWithFilters } from '@buster/server-shared/metrics';
|
||||
|
||||
export type BusterMetric = Metric;
|
||||
export type BusterMetric = MetricWithFilters;
|
||||
|
|
|
@ -153,10 +153,12 @@ export const useGetMetricData = <TData = BusterMetricDataExtended>(
|
|||
id = '',
|
||||
versionNumber: versionNumberProp,
|
||||
cacheDataId,
|
||||
filterValues,
|
||||
}: {
|
||||
id: string | undefined;
|
||||
versionNumber: number | 'LATEST' | undefined;
|
||||
cacheDataId?: string;
|
||||
filterValues?: Record<string, unknown>;
|
||||
},
|
||||
params?: Omit<UseQueryOptions<BusterMetricData, ApiError, TData>, 'queryKey' | 'queryFn'>
|
||||
) => {
|
||||
|
@ -177,12 +179,15 @@ export const useGetMetricData = <TData = BusterMetricDataExtended>(
|
|||
const queryFn = async () => {
|
||||
const chosenVersionNumber: number | undefined =
|
||||
versionNumberProp === 'LATEST' ? undefined : versionNumberProp;
|
||||
console.log('Fetching metric data with filter values:', filterValues);
|
||||
const result = await getMetricData({
|
||||
id,
|
||||
version_number: chosenVersionNumber || undefined,
|
||||
password,
|
||||
report_file_id: cacheDataId,
|
||||
filter_values: filterValues,
|
||||
});
|
||||
console.log('Received metric data:', result);
|
||||
const latestVersionNumber = getLatestMetricVersion(id);
|
||||
const isLatest =
|
||||
versionNumberProp === 'LATEST' ||
|
||||
|
@ -195,7 +200,7 @@ export const useGetMetricData = <TData = BusterMetricDataExtended>(
|
|||
};
|
||||
|
||||
return useQuery({
|
||||
...metricsQueryKeys.metricsGetData(id || '', versionNumberProp || 'LATEST'),
|
||||
...metricsQueryKeys.metricsGetData(id || '', versionNumberProp || 'LATEST', cacheDataId, filterValues),
|
||||
queryFn,
|
||||
select: params?.select,
|
||||
...params,
|
||||
|
|
|
@ -39,10 +39,15 @@ export const getMetric = async ({
|
|||
|
||||
export const getMetricData = async ({
|
||||
id,
|
||||
filter_values,
|
||||
...params
|
||||
}: GetMetricDataRequest & GetMetricParams): Promise<MetricDataResponse> => {
|
||||
const queryParams = {
|
||||
...params,
|
||||
...(filter_values && { filter_values: JSON.stringify(filter_values) }),
|
||||
};
|
||||
return mainApiV2
|
||||
.get<MetricDataResponse>(`/metric_files/${id}/data`, { params })
|
||||
.get<MetricDataResponse>(`/metric_files/${id}/data`, { params: queryParams })
|
||||
.then((res) => res.data);
|
||||
};
|
||||
|
||||
|
|
|
@ -29,7 +29,8 @@ export const metricsGetList = (
|
|||
export const metricsGetData = (
|
||||
id: string,
|
||||
version_number: number | 'LATEST',
|
||||
cacheDataId?: string
|
||||
cacheDataId?: string,
|
||||
filterValues?: Record<string, unknown>
|
||||
) =>
|
||||
queryOptions<BusterMetricDataExtended>({
|
||||
queryKey: [
|
||||
|
@ -38,6 +39,7 @@ export const metricsGetData = (
|
|||
id,
|
||||
version_number || 'LATEST',
|
||||
cacheDataId || 'live-data',
|
||||
filterValues || {},
|
||||
] as const,
|
||||
staleTime: 1000 * 60 * 30, // 30 minutes,
|
||||
});
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
import type { MetricFilter } from '@buster/server-shared/metrics';
|
||||
import React, { useState } from 'react';
|
||||
import { Input } from '@/components/ui/inputs/Input';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
interface DashboardFiltersProps {
|
||||
commonFilters: MetricFilter[];
|
||||
onFilterValuesChange: (filterValues: Record<string, unknown>) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export const DashboardFilters: React.FC<DashboardFiltersProps> = ({
|
||||
commonFilters,
|
||||
onFilterValuesChange,
|
||||
className,
|
||||
}) => {
|
||||
const [filterValues, setFilterValues] = useState<Record<string, string>>({});
|
||||
|
||||
if (!commonFilters || commonFilters.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleFilterChange = (key: string, value: string) => {
|
||||
const newFilterValues = { ...filterValues, [key]: value };
|
||||
setFilterValues(newFilterValues);
|
||||
|
||||
// Convert values to appropriate types
|
||||
const typedValues: Record<string, unknown> = {};
|
||||
Object.entries(newFilterValues).forEach(([filterKey, filterValue]) => {
|
||||
const filter = commonFilters.find((f) => f.key === filterKey);
|
||||
if (!filter || !filterValue) return;
|
||||
|
||||
// Parse values based on filter type
|
||||
if (filter.type === 'number') {
|
||||
typedValues[filterKey] = Number(filterValue);
|
||||
} else if (filter.type === 'string_list' || filter.type === 'number_list') {
|
||||
const listValues = filterValue.split(',').map((v) => v.trim());
|
||||
typedValues[filterKey] =
|
||||
filter.type === 'number_list' ? listValues.map(Number) : listValues;
|
||||
} else if (filter.type === 'boolean') {
|
||||
typedValues[filterKey] = filterValue.toLowerCase() === 'true';
|
||||
} else {
|
||||
typedValues[filterKey] = filterValue;
|
||||
}
|
||||
});
|
||||
|
||||
onFilterValuesChange(typedValues);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('bg-muted/50 border-border flex flex-wrap gap-3 border-b p-4', className)}>
|
||||
<div className="text-sm font-semibold mr-2">Dashboard Filters:</div>
|
||||
{commonFilters.map((filter) => (
|
||||
<div key={filter.key} className="flex items-center gap-2">
|
||||
<label htmlFor={`dashboard-${filter.key}`} className="text-muted-foreground text-sm font-medium">
|
||||
{filter.key}:
|
||||
</label>
|
||||
<Input
|
||||
id={`dashboard-${filter.key}`}
|
||||
type="text"
|
||||
value={filterValues[filter.key] || ''}
|
||||
onChange={(e) => handleFilterChange(filter.key, e.target.value)}
|
||||
placeholder={getPlaceholder(filter)}
|
||||
className="h-8 w-48"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function getPlaceholder(filter: MetricFilter): string {
|
||||
if (filter.type === 'string_list' || filter.type === 'number_list') {
|
||||
return 'value1, value2, value3';
|
||||
}
|
||||
if (filter.type === 'number') {
|
||||
return 'Enter number';
|
||||
}
|
||||
if (filter.type === 'boolean') {
|
||||
return 'true or false';
|
||||
}
|
||||
if (filter.type === 'date' || filter.type === 'timestamp') {
|
||||
return 'YYYY-MM-DD';
|
||||
}
|
||||
if (filter.type === 'daterange' || filter.type === 'timestamp_range') {
|
||||
return 'start, end';
|
||||
}
|
||||
return 'Enter value';
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export { DashboardFilters } from './DashboardFilters';
|
|
@ -34,7 +34,7 @@ export const DashboardMetricItem: React.FC<DashboardMetricItemBaseProps> = React
|
|||
threshold: 0.25,
|
||||
});
|
||||
const { data: dataLength = 0 } = useGetMetricData(
|
||||
{ id: metricId, versionNumber: metricVersionNumber },
|
||||
{ id: metricId, versionNumber: metricVersionNumber, filterValues: undefined },
|
||||
{ select: useCallback((data: BusterMetricData) => data.data?.length || 0, []) }
|
||||
);
|
||||
const animate = !isDragOverlay && dataLength < 125 && numberOfMetrics <= 30 && animateProp;
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import type { DraggableAttributes, DraggableSyntheticListeners } from '@dnd-kit/core';
|
||||
import isEmpty from 'lodash/isEmpty';
|
||||
import React, { useMemo } from 'react';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import type { BusterMetric, BusterMetricData } from '@/api/asset_interfaces/metric';
|
||||
import { useGetMetric, useGetMetricData } from '@/api/buster_rest/metrics';
|
||||
import { MetricFilters } from '@/components/features/metrics/MetricFilters';
|
||||
import { useDashboardFilterValues } from '@/controllers/DashboardController/DashboardViewDashboardController/DashboardContentController/DashboardFilterContext';
|
||||
import { useUpdateMetricChart } from '@/context/Metrics/useUpdateMetricChart';
|
||||
import { useSelectedColorPalette } from '@/context/Themes/usePalettes';
|
||||
import { useMemoizedFn } from '@/hooks/useMemoizedFn';
|
||||
|
@ -34,6 +36,7 @@ const stableMetricSelect = ({
|
|||
permission,
|
||||
version_number,
|
||||
versions,
|
||||
filters,
|
||||
}: BusterMetric) => ({
|
||||
name,
|
||||
description,
|
||||
|
@ -42,6 +45,7 @@ const stableMetricSelect = ({
|
|||
chart_config,
|
||||
version_number,
|
||||
versions,
|
||||
filters,
|
||||
});
|
||||
const stableMetricData: BusterMetricData['data'] = [];
|
||||
|
||||
|
@ -65,6 +69,14 @@ export const MetricChartCard = React.memo(
|
|||
},
|
||||
ref
|
||||
) => {
|
||||
const [metricSpecificFilterValues, setMetricSpecificFilterValues] = useState<Record<string, unknown>>({});
|
||||
const { dashboardFilterValues } = useDashboardFilterValues();
|
||||
|
||||
// Merge dashboard-level filters with metric-specific filters
|
||||
const filterValues = useMemo(() => {
|
||||
return { ...dashboardFilterValues, ...metricSpecificFilterValues };
|
||||
}, [dashboardFilterValues, metricSpecificFilterValues]);
|
||||
|
||||
const { data: metric, isFetched: isFetchedMetric } = useGetMetric(
|
||||
{ id: metricId, versionNumber },
|
||||
{ select: stableMetricSelect, enabled: true }
|
||||
|
@ -73,7 +85,7 @@ export const MetricChartCard = React.memo(
|
|||
data: metricData,
|
||||
isFetched: isFetchedMetricData,
|
||||
error: metricDataError,
|
||||
} = useGetMetricData({ id: metricId, versionNumber, cacheDataId });
|
||||
} = useGetMetricData({ id: metricId, versionNumber, cacheDataId, filterValues });
|
||||
|
||||
//data config
|
||||
const loadingData = !isFetchedMetricData;
|
||||
|
@ -118,6 +130,7 @@ export const MetricChartCard = React.memo(
|
|||
metricVersionNumber={versionNumber}
|
||||
/>
|
||||
<div className={'border-border border-b'} />
|
||||
<MetricFilters filters={metric?.filters} onFilterValuesChange={setMetricSpecificFilterValues} />
|
||||
{renderChartContent && (
|
||||
<MetricViewChartContent
|
||||
chartConfig={memoizedChartConfig}
|
||||
|
|
|
@ -0,0 +1,88 @@
|
|||
import type { MetricFilter } from '@buster/server-shared/metrics';
|
||||
import React, { useState } from 'react';
|
||||
import { Input } from '@/components/ui/inputs/Input';
|
||||
import { cn } from '@/lib/utils';
|
||||
|
||||
interface MetricFiltersProps {
|
||||
filters: MetricFilter[] | undefined;
|
||||
onFilterValuesChange: (filterValues: Record<string, unknown>) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export const MetricFilters: React.FC<MetricFiltersProps> = ({
|
||||
filters,
|
||||
onFilterValuesChange,
|
||||
className,
|
||||
}) => {
|
||||
const [filterValues, setFilterValues] = useState<Record<string, string>>({});
|
||||
|
||||
if (!filters || filters.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleFilterChange = (key: string, value: string) => {
|
||||
const newFilterValues = { ...filterValues, [key]: value };
|
||||
setFilterValues(newFilterValues);
|
||||
|
||||
// Convert values to appropriate types
|
||||
const typedValues: Record<string, unknown> = {};
|
||||
Object.entries(newFilterValues).forEach(([filterKey, filterValue]) => {
|
||||
const filter = filters.find((f) => f.key === filterKey);
|
||||
if (!filter || !filterValue) return;
|
||||
|
||||
// Parse values based on filter type
|
||||
if (filter.type === 'number') {
|
||||
typedValues[filterKey] = Number(filterValue);
|
||||
} else if (filter.type === 'string_list' || filter.type === 'number_list') {
|
||||
const listValues = filterValue.split(',').map((v) => v.trim());
|
||||
typedValues[filterKey] =
|
||||
filter.type === 'number_list' ? listValues.map(Number) : listValues;
|
||||
} else if (filter.type === 'boolean') {
|
||||
typedValues[filterKey] = filterValue.toLowerCase() === 'true';
|
||||
} else {
|
||||
typedValues[filterKey] = filterValue;
|
||||
}
|
||||
});
|
||||
|
||||
onFilterValuesChange(typedValues);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={cn('bg-muted/50 border-border flex flex-wrap gap-3 border-b p-3', className)}>
|
||||
{filters.map((filter) => (
|
||||
<div key={filter.key} className="flex items-center gap-2">
|
||||
<label htmlFor={filter.key} className="text-muted-foreground text-sm font-medium">
|
||||
{filter.key}:
|
||||
</label>
|
||||
<Input
|
||||
id={filter.key}
|
||||
type="text"
|
||||
value={filterValues[filter.key] || ''}
|
||||
onChange={(e) => handleFilterChange(filter.key, e.target.value)}
|
||||
placeholder={getPlaceholder(filter)}
|
||||
className="h-8 w-48"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function getPlaceholder(filter: MetricFilter): string {
|
||||
if (filter.type === 'string_list' || filter.type === 'number_list') {
|
||||
return 'value1, value2, value3';
|
||||
}
|
||||
if (filter.type === 'number') {
|
||||
return 'Enter number';
|
||||
}
|
||||
if (filter.type === 'boolean') {
|
||||
return 'true or false';
|
||||
}
|
||||
if (filter.type === 'date' || filter.type === 'timestamp') {
|
||||
return 'YYYY-MM-DD';
|
||||
}
|
||||
if (filter.type === 'daterange' || filter.type === 'timestamp_range') {
|
||||
return 'start, end';
|
||||
}
|
||||
return 'Enter value';
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export { MetricFilters } from './MetricFilters';
|
|
@ -3,11 +3,14 @@ import isEmpty from 'lodash/isEmpty';
|
|||
import React, { useMemo, useState } from 'react';
|
||||
import type { BusterDashboardResponse, BusterMetric } from '@/api/asset_interfaces';
|
||||
import type { useUpdateDashboardConfig } from '@/api/buster_rest/dashboards';
|
||||
import { DashboardFilters } from '@/components/features/dashboards/DashboardFilters';
|
||||
import { BusterResizeableGrid, type BusterResizeableGridRow } from '@/components/ui/grid';
|
||||
import { useMemoizedFn } from '@/hooks/useMemoizedFn';
|
||||
import { DashboardMetricItem } from '../../../../components/features/metrics/DashboardMetricItem';
|
||||
import { DashboardContentControllerProvider } from './DashboardContentControllerContext';
|
||||
import { DashboardEmptyState, DashboardNoContentReadOnly } from './DashboardEmptyState';
|
||||
import { DashboardFilterProvider } from './DashboardFilterContext';
|
||||
import { getCommonFilters } from './helpers/getCommonFilters';
|
||||
import { removeChildrenFromItems } from './helpers';
|
||||
|
||||
const DEFAULT_EMPTY_ROWS: DashboardConfig['rows'] = [];
|
||||
|
@ -31,6 +34,16 @@ export const DashboardContentController: React.FC<{
|
|||
onUpdateDashboardConfig,
|
||||
}) => {
|
||||
const [draggingId, setDraggingId] = useState<string | null>(null);
|
||||
const [dashboardFilterValues, setDashboardFilterValues] = useState<Record<string, unknown>>({});
|
||||
|
||||
const commonFilters = useMemo(() => {
|
||||
const filters = getCommonFilters(metrics);
|
||||
console.log('DashboardContentController - commonFilters:', filters);
|
||||
console.log('DashboardContentController - readOnly:', readOnly);
|
||||
console.log('DashboardContentController - Should show filters?', !readOnly && filters.length > 0);
|
||||
return filters;
|
||||
}, [metrics, readOnly]);
|
||||
|
||||
const dashboardVersionNumber = dashboard?.version_number;
|
||||
const dashboardConfig = dashboard?.config || DEFAULT_EMPTY_CONFIG;
|
||||
const rows = dashboardConfig?.rows || DEFAULT_EMPTY_ROWS;
|
||||
|
@ -107,16 +120,24 @@ export const DashboardContentController: React.FC<{
|
|||
return (
|
||||
<div className="dashboard-content-controller overflow-visible">
|
||||
{hasMetrics && !!dashboardRows.length && !!dashboard ? (
|
||||
<DashboardContentControllerProvider dashboard={dashboard}>
|
||||
<BusterResizeableGrid
|
||||
rows={dashboardRows}
|
||||
readOnly={readOnly}
|
||||
onRowLayoutChange={onRowLayoutChange}
|
||||
onStartDrag={onStartDrag}
|
||||
onEndDrag={onDragEnd}
|
||||
overlayComponent={memoizedOverlayComponent}
|
||||
/>
|
||||
</DashboardContentControllerProvider>
|
||||
<DashboardFilterProvider>
|
||||
<DashboardContentControllerProvider dashboard={dashboard}>
|
||||
{!readOnly && commonFilters.length > 0 && (
|
||||
<DashboardFilters
|
||||
commonFilters={commonFilters}
|
||||
onFilterValuesChange={setDashboardFilterValues}
|
||||
/>
|
||||
)}
|
||||
<BusterResizeableGrid
|
||||
rows={dashboardRows}
|
||||
readOnly={readOnly}
|
||||
onRowLayoutChange={onRowLayoutChange}
|
||||
onStartDrag={onStartDrag}
|
||||
onEndDrag={onDragEnd}
|
||||
overlayComponent={memoizedOverlayComponent}
|
||||
/>
|
||||
</DashboardContentControllerProvider>
|
||||
</DashboardFilterProvider>
|
||||
) : !readOnly ? (
|
||||
<DashboardEmptyState onOpenAddContentModal={onOpenAddContentModal} />
|
||||
) : (
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
import React, { createContext, useContext, useState } from 'react';
|
||||
|
||||
interface DashboardFilterContextValue {
|
||||
dashboardFilterValues: Record<string, unknown>;
|
||||
setDashboardFilterValues: (values: Record<string, unknown>) => void;
|
||||
}
|
||||
|
||||
const DashboardFilterContext = createContext<DashboardFilterContextValue | undefined>(undefined);
|
||||
|
||||
export const DashboardFilterProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
|
||||
const [dashboardFilterValues, setDashboardFilterValues] = useState<Record<string, unknown>>({});
|
||||
|
||||
return (
|
||||
<DashboardFilterContext.Provider value={{ dashboardFilterValues, setDashboardFilterValues }}>
|
||||
{children}
|
||||
</DashboardFilterContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
export const useDashboardFilterValues = () => {
|
||||
const context = useContext(DashboardFilterContext);
|
||||
if (!context) {
|
||||
return { dashboardFilterValues: {}, setDashboardFilterValues: () => {} };
|
||||
}
|
||||
return context;
|
||||
};
|
|
@ -0,0 +1,72 @@
|
|||
import type { MetricFilter } from '@buster/server-shared/metrics';
|
||||
import type { BusterMetric } from '@/api/asset_interfaces';
|
||||
|
||||
/**
|
||||
* Finds filters that are common across all metrics in a dashboard
|
||||
* A filter is considered common if it exists in ALL metrics with the same configuration
|
||||
*/
|
||||
export function getCommonFilters(metrics: Record<string, BusterMetric>): MetricFilter[] {
|
||||
const metricsArray = Object.values(metrics);
|
||||
|
||||
console.log('getCommonFilters - Total metrics:', metricsArray.length);
|
||||
metricsArray.forEach((m, i) => {
|
||||
console.log(` Metric ${i} (${m.name}):`, m.filters?.length || 0, 'filters', m.filters);
|
||||
});
|
||||
|
||||
if (metricsArray.length === 0) {
|
||||
console.log('No metrics, returning empty');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Get filters from first metric as baseline
|
||||
const firstMetric = metricsArray[0];
|
||||
if (!firstMetric?.filters || firstMetric.filters.length === 0) {
|
||||
console.log('First metric has no filters');
|
||||
return [];
|
||||
}
|
||||
|
||||
// Find filters that exist in all metrics
|
||||
const commonFilters: MetricFilter[] = [];
|
||||
|
||||
for (const filter of firstMetric.filters) {
|
||||
console.log(`\nChecking filter "${filter.key}"...`);
|
||||
|
||||
// Check if this filter exists in all other metrics
|
||||
const isInAllMetrics = metricsArray.every((metric, idx) => {
|
||||
if (!metric.filters) {
|
||||
console.log(` ✗ Metric ${idx} has no filters`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Find matching filter by key
|
||||
const matchingFilter = metric.filters.find((f) => f.key === filter.key);
|
||||
if (!matchingFilter) {
|
||||
console.log(` ✗ Metric ${idx} (${metric.name}) missing key "${filter.key}"`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify filter configuration matches (type, mode, column)
|
||||
const typeMatch = matchingFilter.type === filter.type;
|
||||
const modeMatch = matchingFilter.mode === filter.mode;
|
||||
const columnMatch = matchingFilter.column === filter.column;
|
||||
|
||||
if (!typeMatch || !modeMatch || !columnMatch) {
|
||||
console.log(` ✗ Metric ${idx} (${metric.name}) config mismatch:`, {
|
||||
type: typeMatch ? '✓' : `✗ ${matchingFilter.type} vs ${filter.type}`,
|
||||
mode: modeMatch ? '✓' : `✗ ${matchingFilter.mode} vs ${filter.mode}`,
|
||||
column: columnMatch ? '✓' : `✗ ${matchingFilter.column} vs ${filter.column}`,
|
||||
});
|
||||
}
|
||||
|
||||
return typeMatch && modeMatch && columnMatch;
|
||||
});
|
||||
|
||||
if (isInAllMetrics) {
|
||||
console.log(` ✓ "${filter.key}" is COMMON!`);
|
||||
commonFilters.push(filter);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\nFinal common filters:', commonFilters.length, commonFilters);
|
||||
return commonFilters;
|
||||
}
|
|
@ -919,53 +919,72 @@ You can create, update, or modify the following assets, which are automatically
|
|||
<sql_best_practices>
|
||||
- Current SQL Dialect Guidance:
|
||||
{{sql_dialect_guidance}}
|
||||
- Performance: Ensure date/timestamp columns used in `WHERE` or `JOIN` clauses are indexed. Consider functional indexes on `DATE_TRUNC` or `EXTRACT` expressions if filtering/grouping by them frequently
|
||||
- Keep Queries Simple: Strive for simplicity and clarity in your SQL. Adhere as closely as possible to the user's direct request without overcomplicating the logic or making unnecessary assumptions
|
||||
- Default Time Range: If the user does not specify a time range for analysis, default to the last 12 months from the current date. Clearly state this assumption if making it
|
||||
- Avoid Bold Assumptions: Do not make complex or bold assumptions about the user's intent or the underlying data. If the request is highly ambiguous beyond a reasonable time frame assumption, indicate this limitation in your final response
|
||||
- Prioritize Defined Metrics: Before constructing complex custom SQL, check if pre-defined metrics or columns exist in the provided data context that already represent the concept the user is asking for. Prefer using these established definitions
|
||||
- Avoid Static Queries: Do not create static queries where you are hardcoding a value. Non-static queries are always preferred
|
||||
- Instead of doing:
|
||||
- Select 55000 as revenue
|
||||
- Do this instead:
|
||||
- Select sum(sales) as revenue
|
||||
- If you need to display data from a specific point in time, use date filters rather than hardcoded values
|
||||
- Performance: Ensure date/timestamp columns used in `WHERE` or `JOIN` clauses are indexed. Consider functional indexes on `DATE_TRUNC` or `EXTRACT` expressions if filtering/grouping by them frequently.
|
||||
- Keep Queries Simple: Strive for simplicity and clarity in your SQL. Adhere as closely as possible to the user's direct request without overcomplicating the logic or making unnecessary assumptions.
|
||||
- Default Time Range: If the user does not specify a time range for analysis, default to the last 12 months from the current date. Clearly state this assumption if making it.
|
||||
- Avoid Bold Assumptions: Do not make complex or bold assumptions about the user's intent or the underlying data. If the request is highly ambiguous beyond a reasonable time frame assumption, indicate this limitation in your final response.
|
||||
- Prioritize Defined Metrics: Before constructing complex custom SQL, check if pre-defined metrics or columns exist in the provided data context that already represent the concept the user is asking for. Prefer using these established definitions.
|
||||
- Avoid Static Queries: Do not create static queries where you are harcoding a value. Non-static queries are always preferred.
|
||||
- Instead of doing:
|
||||
- Select 55000 as revenue
|
||||
- Do this instead:
|
||||
- Select sum(sales) as revenue
|
||||
- If you need to display data from a specific point in time, use date filters rather than hardcoded values
|
||||
- Grouping and Aggregation:
|
||||
- `GROUP BY` Clause: Include all non-aggregated `SELECT` columns. Using explicit names is clearer than ordinal positions (`GROUP BY 1, 2`)
|
||||
- `HAVING` Clause: Use `HAVING` to filter *after* aggregation (e.g., `HAVING COUNT(*) > 10`). Use `WHERE` to filter *before* aggregation for efficiency
|
||||
- Window Functions: Consider window functions (`OVER (...)`) for calculations relative to the current row (e.g., ranking, running totals) as an alternative/complement to `GROUP BY`
|
||||
- `GROUP BY` Clause: Include all non-aggregated `SELECT` columns. Using explicit names is clearer than ordinal positions (`GROUP BY 1, 2`).
|
||||
- `HAVING` Clause: Use `HAVING` to filter *after* aggregation (e.g., `HAVING COUNT(*) > 10`). Use `WHERE` to filter *before* aggregation for efficiency.
|
||||
- Window Functions: Consider window functions (`OVER (...)`) for calculations relative to the current row (e.g., ranking, running totals) as an alternative/complement to `GROUP BY`.
|
||||
- Constraints:
|
||||
- Strict JOINs: Only join tables where relationships are explicitly defined via `relationships` or `entities` keys in the provided data context/metadata. Do not join tables without a pre-defined relationship
|
||||
- Strict JOINs: Only join tables where relationships are explicitly defined via `relationships` or `entities` keys in the provided data context/metadata. Do not join tables without a pre-defined relationship.
|
||||
- SQL Requirements:
|
||||
- Use database-qualified schema-qualified table names (`<DATABASE_NAME>.<SCHEMA_NAME>.<TABLE_NAME>`)
|
||||
- Use column names qualified with table aliases (e.g., `<table_alias>.<column>`)
|
||||
- MANDATORY SQL NAMING CONVENTIONS:
|
||||
- All Table References: MUST be fully qualified: `DATABASE_NAME.SCHEMA_NAME.TABLE_NAME`
|
||||
- All Column References: MUST be qualified with their table alias (e.g., `c.customerid`) or CTE name (e.g., `cte_alias.column_name_from_cte`)
|
||||
- Inside CTE Definitions: When defining a CTE (e.g., `WITH my_cte AS (SELECT c.customerid FROM DATABASE.SCHEMA.TABLE1 c ...)`), all columns selected from underlying database tables MUST use their table alias (e.g., `c.customerid`, not just `customerid`). This applies even if the CTE is simple and selects from only one table
|
||||
- Selecting From CTEs: When selecting from a defined CTE, use the CTE's alias for its columns (e.g., `SELECT mc.column_name FROM my_cte mc ...`)
|
||||
- Universal Application: These naming conventions are strict requirements and apply universally to all parts of the SQL query, including every CTE definition and every subsequent SELECT statement. Non-compliance will lead to errors
|
||||
- Context Adherence: Strictly use only columns that are present in the data context provided by search results. Never invent or assume columns
|
||||
- Select specific columns (avoid `SELECT *` or `COUNT(*)`)
|
||||
- Use CTEs instead of subqueries, and use snake_case for naming them
|
||||
- Use `DISTINCT` (not `DISTINCT ON`) with matching `GROUP BY`/`SORT BY` clauses
|
||||
- Show entity names rather than just IDs:
|
||||
- When identifying products, people, categories etc (really, any entity) in a visualization - show entity names rather than IDs in all visualizations
|
||||
- e.g. a "Sales by Product" visualization should use/display "Product Name" instead of "Product ID"
|
||||
- Handle date conversions appropriately
|
||||
- Order dates in ascending order
|
||||
- Reference database identifiers for cross-database queries
|
||||
- Format output for the specified visualization type
|
||||
- Maintain a consistent data structure across requests unless changes are required
|
||||
- Use explicit ordering for custom buckets or categories
|
||||
- Avoid division by zero errors by using NULLIF() or CASE statements (e.g., `SELECT amount / NULLIF(quantity, 0)` or `CASE WHEN quantity = 0 THEN NULL ELSE amount / quantity END`)
|
||||
- Generate SQL queries using only native SQL constructs, such as CURRENT_DATE, that can be directly executed in a SQL environment without requiring prepared statements, parameterized queries, or string formatting like {{variable}}
|
||||
- You are not able to build interactive dashboards and metrics that allow users to change the filters, you can only build static metrics that can be saved to reports
|
||||
- Consider potential data duplication and apply deduplication techniques (e.g., `DISTINCT`, `GROUP BY`) where necessary
|
||||
- Fill Missing Values: For metrics, especially in time series, fill potentially missing values (NULLs) using appropriate null-handling functions to default them to zero, ensuring continuous data unless the user specifically requests otherwise
|
||||
- Handle Missing Time Periods: When creating time series visualizations, ensure ALL requested time periods are represented, even when no underlying data exists for certain periods. This is critical for avoiding confusing gaps in charts and tables. Refer to the SQL dialect-specific guidance for the appropriate method to generate complete date ranges for your database
|
||||
- Use database-qualified schema-qualified table names (`<DATABASE_NAME>.<SCHEMA_NAME>.<TABLE_NAME>`).
|
||||
- Use column names qualified with table aliases (e.g., `<table_alias>.<column>`).
|
||||
- MANDATORY SQL NAMING CONVENTIONS:
|
||||
- All Table References: MUST be fully qualified: `DATABASE_NAME.SCHEMA_NAME.TABLE_NAME`.
|
||||
- All Column References: MUST be qualified with their table alias (e.g., `c.customerid`) or CTE name (e.g., `cte_alias.column_name_from_cte`).
|
||||
- Inside CTE Definitions: When defining a CTE (e.g., `WITH my_cte AS (SELECT c.customerid FROM DATABASE.SCHEMA.TABLE1 c ...)`), all columns selected from underlying database tables MUST use their table alias (e.g., `c.customerid`, not just `customerid`). This applies even if the CTE is simple and selects from only one table.
|
||||
- Selecting From CTEs: When selecting from a defined CTE, use the CTE's alias for its columns (e.g., `SELECT mc.column_name FROM my_cte mc ...`).
|
||||
- Universal Application: These naming conventions are strict requirements and apply universally to all parts of the SQL query, including every CTE definition and every subsequent SELECT statement. Non-compliance will lead to errors.
|
||||
- Context Adherence: Strictly use only columns that are present in the data context provided by search results. Never invent or assume columns.
|
||||
- Select specific columns (avoid `SELECT *` or `COUNT(*)`).
|
||||
- Use CTEs instead of subqueries, and use snake_case for naming them.
|
||||
- Use `DISTINCT` (not `DISTINCT ON`) with matching `GROUP BY`/`SORT BY` clauses.
|
||||
- Show entity names rather than just IDs.
|
||||
- Handle date conversions appropriately.
|
||||
- Order dates in ascending order.
|
||||
- Reference database identifiers for cross-database queries.
|
||||
- Format output for the specified visualization type.
|
||||
- Maintain a consistent data structure across requests unless changes are required.
|
||||
- Use explicit ordering for custom buckets or categories.
|
||||
- Avoid division by zero errors by using NULLIF() or CASE statements (e.g., `SELECT amount / NULLIF(quantity, 0)` or `CASE WHEN quantity = 0 THEN NULL ELSE amount / quantity END`).
|
||||
- Generate SQL using native constructs and brace tokens (`{{key}}`) that the runtime replaces with parameterized fragments. Never embed raw values or ad-hoc string formatted placeholders (for example `{{value}}`).
|
||||
- When defaults are needed (for example standard date ranges or limits), set the `default` property on the corresponding filter instead of hardcoding literals in SQL.
|
||||
- Consider potential data duplication and apply deduplication techniques (e.g., `DISTINCT`, `GROUP BY`) where necessary.
|
||||
- Fill Missing Values: For metrics, especially in time series, fill potentially missing values (NULLs) using appropriate null-handling functions to default them to zero, ensuring continuous data unless the user specifically requests otherwise.
|
||||
- Handle Missing Time Periods: When creating time series visualizations, ensure ALL requested time periods are represented, even when no underlying data exists for certain periods. This is critical for avoiding confusing gaps in charts and tables. Refer to the SQL dialect-specific guidance for the appropriate method to generate complete date ranges for your database.
|
||||
</sql_best_practices>
|
||||
|
||||
<sql_filter_templating_rules>
|
||||
- All metric SQL must support dynamic filters using explicit brace tokens placed exactly where predicates or expressions belong.
|
||||
- 1:1 relationship:
|
||||
- Each filter key maps to a single SQL token `{{key}}`. Reuse the same key when the same predicate must appear in multiple clauses.
|
||||
- Filter column specification:
|
||||
- The `column` field in filter definitions can use qualified column names with table aliases (e.g., `o.order_date`, `c.customer_id`, `orders.status`)
|
||||
- Always use the appropriate table alias or table name prefix when referencing columns in filters to avoid ambiguity
|
||||
- Never concatenate filter values into SQL. Emit fragment tokens that expect bound parameters supplied by the renderer.
|
||||
- Half-open ranges are mandatory for date/timestamp filters (`>= :start AND < :end`) to preserve index and partition pruning.
|
||||
- Avoid nullable-parameter OR patterns unless explicitly requested (for example, `null_behavior: no_op`). Prefer omitting predicates when filters are not provided.
|
||||
- Token placement checklist:
|
||||
- Place predicates exactly where needed: inside `WHERE`, `JOIN ON`, window `PARTITION BY`, `QUALIFY`, `HAVING`, `ORDER BY`, or `LIMIT` clauses.
|
||||
- LIST filters should expand according to warehouse dialect:
|
||||
- Postgres: `= ANY(:param)`
|
||||
- BigQuery: `IN UNNEST(@param)`
|
||||
- Snowflake: `IN (SELECT value::variant FROM TABLE(FLATTEN(input => PARSE_JSON(:param))))`
|
||||
- Window partition tokens should render the comma-separated list (with required trailing comma/space) or nothing.
|
||||
- LIMIT tokens should expand to a single bound parameter (no leading `AND`), for example `LIMIT :limit_rows_limit`.
|
||||
- You should never say `where {{filter}}` or `and {{filter}}`.
|
||||
</sql_filter_templating_rules>
|
||||
|
||||
<handling_follow_up_user_requests>
|
||||
- Carefully examine the previous messages, thoughts, and results
|
||||
- Determine if the user is asking for a modification, a new analysis based on previous results, or a completely unrelated task
|
||||
|
|
|
@ -718,53 +718,72 @@ When in doubt, be more thorough rather than less. Reports are the default becaus
|
|||
<sql_best_practices>
|
||||
- Current SQL Dialect Guidance:
|
||||
{{sql_dialect_guidance}}
|
||||
- Performance: Ensure date/timestamp columns used in `WHERE` or `JOIN` clauses are indexed. Consider functional indexes on `DATE_TRUNC` or `EXTRACT` expressions if filtering/grouping by them frequently
|
||||
- Keep Queries Simple: Strive for simplicity and clarity in your SQL. Adhere as closely as possible to the user's direct request without overcomplicating the logic or making unnecessary assumptions
|
||||
- Default Time Range: If the user does not specify a time range for analysis, default to the last 12 months from the current date. Clearly state this assumption if making it
|
||||
- Avoid Bold Assumptions: Do not make complex or bold assumptions about the user's intent or the underlying data. If the request is highly ambiguous beyond a reasonable time frame assumption, indicate this limitation in your final response
|
||||
- Prioritize Defined Metrics: Before constructing complex custom SQL, check if pre-defined metrics or columns exist in the provided data context that already represent the concept the user is asking for. Prefer using these established definitions
|
||||
- Avoid Static Queries: Do not create static queries where you are hardcoding a value. Non-static queries are always preferred
|
||||
- Instead of doing:
|
||||
- Select 55000 as revenue
|
||||
- Do this instead:
|
||||
- Select sum(sales) as revenue
|
||||
- If you need to display data from a specific point in time, use date filters rather than hardcoded values
|
||||
- Performance: Ensure date/timestamp columns used in `WHERE` or `JOIN` clauses are indexed. Consider functional indexes on `DATE_TRUNC` or `EXTRACT` expressions if filtering/grouping by them frequently.
|
||||
- Keep Queries Simple: Strive for simplicity and clarity in your SQL. Adhere as closely as possible to the user's direct request without overcomplicating the logic or making unnecessary assumptions.
|
||||
- Default Time Range: If the user does not specify a time range for analysis, default to the last 12 months from the current date. Clearly state this assumption if making it.
|
||||
- Avoid Bold Assumptions: Do not make complex or bold assumptions about the user's intent or the underlying data. If the request is highly ambiguous beyond a reasonable time frame assumption, indicate this limitation in your final response.
|
||||
- Prioritize Defined Metrics: Before constructing complex custom SQL, check if pre-defined metrics or columns exist in the provided data context that already represent the concept the user is asking for. Prefer using these established definitions.
|
||||
- Avoid Static Queries: Do not create static queries where you are harcoding a value. Non-static queries are always preferred.
|
||||
- Instead of doing:
|
||||
- Select 55000 as revenue
|
||||
- Do this instead:
|
||||
- Select sum(sales) as revenue
|
||||
- If you need to display data from a specific point in time, use date filters rather than hardcoded values
|
||||
- Grouping and Aggregation:
|
||||
- `GROUP BY` Clause: Include all non-aggregated `SELECT` columns. Using explicit names is clearer than ordinal positions (`GROUP BY 1, 2`)
|
||||
- `HAVING` Clause: Use `HAVING` to filter *after* aggregation (e.g., `HAVING COUNT(*) > 10`). Use `WHERE` to filter *before* aggregation for efficiency
|
||||
- Window Functions: Consider window functions (`OVER (...)`) for calculations relative to the current row (e.g., ranking, running totals) as an alternative/complement to `GROUP BY`
|
||||
- `GROUP BY` Clause: Include all non-aggregated `SELECT` columns. Using explicit names is clearer than ordinal positions (`GROUP BY 1, 2`).
|
||||
- `HAVING` Clause: Use `HAVING` to filter *after* aggregation (e.g., `HAVING COUNT(*) > 10`). Use `WHERE` to filter *before* aggregation for efficiency.
|
||||
- Window Functions: Consider window functions (`OVER (...)`) for calculations relative to the current row (e.g., ranking, running totals) as an alternative/complement to `GROUP BY`.
|
||||
- Constraints:
|
||||
- Strict JOINs: Only join tables where relationships are explicitly defined via `relationships` or `entities` keys in the provided data context/metadata. Do not join tables without a pre-defined relationship
|
||||
- Strict JOINs: Only join tables where relationships are explicitly defined via `relationships` or `entities` keys in the provided data context/metadata. Do not join tables without a pre-defined relationship.
|
||||
- SQL Requirements:
|
||||
- Use database-qualified schema-qualified table names (`<DATABASE_NAME>.<SCHEMA_NAME>.<TABLE_NAME>`)
|
||||
- Use column names qualified with table aliases (e.g., `<table_alias>.<column>`)
|
||||
- MANDATORY SQL NAMING CONVENTIONS:
|
||||
- All Table References: MUST be fully qualified: `DATABASE_NAME.SCHEMA_NAME.TABLE_NAME`
|
||||
- All Column References: MUST be qualified with their table alias (e.g., `c.customerid`) or CTE name (e.g., `cte_alias.column_name_from_cte`)
|
||||
- Inside CTE Definitions: When defining a CTE (e.g., `WITH my_cte AS (SELECT c.customerid FROM DATABASE.SCHEMA.TABLE1 c ...)`), all columns selected from underlying database tables MUST use their table alias (e.g., `c.customerid`, not just `customerid`). This applies even if the CTE is simple and selects from only one table
|
||||
- Selecting From CTEs: When selecting from a defined CTE, use the CTE's alias for its columns (e.g., `SELECT mc.column_name FROM my_cte mc ...`)
|
||||
- Universal Application: These naming conventions are strict requirements and apply universally to all parts of the SQL query, including every CTE definition and every subsequent SELECT statement. Non-compliance will lead to errors
|
||||
- Context Adherence: Strictly use only columns that are present in the data context provided by search results. Never invent or assume columns
|
||||
- Select specific columns (avoid `SELECT *` or `COUNT(*)`)
|
||||
- Use CTEs instead of subqueries, and use snake_case for naming them
|
||||
- Use `DISTINCT` (not `DISTINCT ON`) with matching `GROUP BY`/`SORT BY` clauses
|
||||
- Show entity names rather than just IDs:
|
||||
- When identifying products, people, categories etc (really, any entity) in a visualization - show entity names rather than IDs in all visualizations
|
||||
- e.g. a "Sales by Product" visualization should use/display "Product Name" instead of "Product ID"
|
||||
- Handle date conversions appropriately
|
||||
- Order dates in ascending order
|
||||
- Reference database identifiers for cross-database queries
|
||||
- Format output for the specified visualization type
|
||||
- Maintain a consistent data structure across requests unless changes are required
|
||||
- Use explicit ordering for custom buckets or categories
|
||||
- Avoid division by zero errors by using NULLIF() or CASE statements (e.g., `SELECT amount / NULLIF(quantity, 0)` or `CASE WHEN quantity = 0 THEN NULL ELSE amount / quantity END`)
|
||||
- Generate SQL queries using only native SQL constructs, such as CURRENT_DATE, that can be directly executed in a SQL environment without requiring prepared statements, parameterized queries, or string formatting like {{variable}}
|
||||
- You are not able to build interactive dashboards and metrics that allow users to change the filters, you can only build static dashboards and metrics
|
||||
- Consider potential data duplication and apply deduplication techniques (e.g., `DISTINCT`, `GROUP BY`) where necessary
|
||||
- Fill Missing Values: For metrics, especially in time series, fill potentially missing values (NULLs) using appropriate null-handling functions to default them to zero, ensuring continuous data unless the user specifically requests otherwise
|
||||
- Handle Missing Time Periods: When creating time series visualizations, ensure ALL requested time periods are represented, even when no underlying data exists for certain periods. This is critical for avoiding confusing gaps in charts and tables. Refer to the SQL dialect-specific guidance for the appropriate method to generate complete date ranges for your database
|
||||
- Use database-qualified schema-qualified table names (`<DATABASE_NAME>.<SCHEMA_NAME>.<TABLE_NAME>`).
|
||||
- Use column names qualified with table aliases (e.g., `<table_alias>.<column>`).
|
||||
- MANDATORY SQL NAMING CONVENTIONS:
|
||||
- All Table References: MUST be fully qualified: `DATABASE_NAME.SCHEMA_NAME.TABLE_NAME`.
|
||||
- All Column References: MUST be qualified with their table alias (e.g., `c.customerid`) or CTE name (e.g., `cte_alias.column_name_from_cte`).
|
||||
- Inside CTE Definitions: When defining a CTE (e.g., `WITH my_cte AS (SELECT c.customerid FROM DATABASE.SCHEMA.TABLE1 c ...)`), all columns selected from underlying database tables MUST use their table alias (e.g., `c.customerid`, not just `customerid`). This applies even if the CTE is simple and selects from only one table.
|
||||
- Selecting From CTEs: When selecting from a defined CTE, use the CTE's alias for its columns (e.g., `SELECT mc.column_name FROM my_cte mc ...`).
|
||||
- Universal Application: These naming conventions are strict requirements and apply universally to all parts of the SQL query, including every CTE definition and every subsequent SELECT statement. Non-compliance will lead to errors.
|
||||
- Context Adherence: Strictly use only columns that are present in the data context provided by search results. Never invent or assume columns.
|
||||
- Select specific columns (avoid `SELECT *` or `COUNT(*)`).
|
||||
- Use CTEs instead of subqueries, and use snake_case for naming them.
|
||||
- Use `DISTINCT` (not `DISTINCT ON`) with matching `GROUP BY`/`SORT BY` clauses.
|
||||
- Show entity names rather than just IDs.
|
||||
- Handle date conversions appropriately.
|
||||
- Order dates in ascending order.
|
||||
- Reference database identifiers for cross-database queries.
|
||||
- Format output for the specified visualization type.
|
||||
- Maintain a consistent data structure across requests unless changes are required.
|
||||
- Use explicit ordering for custom buckets or categories.
|
||||
- Avoid division by zero errors by using NULLIF() or CASE statements (e.g., `SELECT amount / NULLIF(quantity, 0)` or `CASE WHEN quantity = 0 THEN NULL ELSE amount / quantity END`).
|
||||
- Generate SQL using native constructs and brace tokens (`{{key}}`) that the runtime replaces with parameterized fragments. Never embed raw values or ad-hoc string formatted placeholders (for example `{{value}}`).
|
||||
- When defaults are needed (for example standard date ranges or limits), set the `default` property on the corresponding filter instead of hardcoding literals in SQL.
|
||||
- Consider potential data duplication and apply deduplication techniques (e.g., `DISTINCT`, `GROUP BY`) where necessary.
|
||||
- Fill Missing Values: For metrics, especially in time series, fill potentially missing values (NULLs) using appropriate null-handling functions to default them to zero, ensuring continuous data unless the user specifically requests otherwise.
|
||||
- Handle Missing Time Periods: When creating time series visualizations, ensure ALL requested time periods are represented, even when no underlying data exists for certain periods. This is critical for avoiding confusing gaps in charts and tables. Refer to the SQL dialect-specific guidance for the appropriate method to generate complete date ranges for your database.
|
||||
</sql_best_practices>
|
||||
|
||||
<sql_filter_templating_rules>
|
||||
- All metric SQL must support dynamic filters using explicit brace tokens placed exactly where predicates or expressions belong.
|
||||
- 1:1 relationship:
|
||||
- Each filter key maps to a single SQL token `{{key}}`. Reuse the same key when the same predicate must appear in multiple clauses.
|
||||
- Filter column specification:
|
||||
- The `column` field in filter definitions can use qualified column names with table aliases (e.g., `o.order_date`, `c.customer_id`, `orders.status`)
|
||||
- Always use the appropriate table alias or table name prefix when referencing columns in filters to avoid ambiguity
|
||||
- Never concatenate filter values into SQL. Emit fragment tokens that expect bound parameters supplied by the renderer.
|
||||
- Half-open ranges are mandatory for date/timestamp filters (`>= :start AND < :end`) to preserve index and partition pruning.
|
||||
- Avoid nullable-parameter OR patterns unless explicitly requested (for example, `null_behavior: no_op`). Prefer omitting predicates when filters are not provided.
|
||||
- Token placement checklist:
|
||||
- Place predicates exactly where needed: inside `WHERE`, `JOIN ON`, window `PARTITION BY`, `QUALIFY`, `HAVING`, `ORDER BY`, or `LIMIT` clauses.
|
||||
- LIST filters should expand according to warehouse dialect:
|
||||
- Postgres: `= ANY(:param)`
|
||||
- BigQuery: `IN UNNEST(@param)`
|
||||
- Snowflake: `IN (SELECT value::variant FROM TABLE(FLATTEN(input => PARSE_JSON(:param))))`
|
||||
- Window partition tokens should render the comma-separated list (with required trailing comma/space) or nothing.
|
||||
- LIMIT tokens should expand to a single bound parameter (no leading `AND`), for example `LIMIT :limit_rows_limit`.
|
||||
- You should never say `where {{filter}}` or `and {{filter}}`.
|
||||
</sql_filter_templating_rules>
|
||||
|
||||
</best_practices>
|
||||
|
||||
<visualization_and_charting_guidelines>
|
||||
|
|
|
@ -15,7 +15,7 @@ const LIST_TYPES = new Set<MetricFilter['type']>(['string_list', 'number_list'])
|
|||
|
||||
const RANGE_TYPES = new Set<MetricFilter['type']>(['daterange', 'timestamp_range']);
|
||||
|
||||
export function compileSqlWithDefaults(metric: MetricContent): string {
|
||||
export function compileSqlWithDefaults(metric: MetricContent, filterValues?: Record<string, unknown>): string {
|
||||
const filters = metric.filters ?? [];
|
||||
|
||||
const filterMap = new Map<string, MetricFilter>();
|
||||
|
@ -49,20 +49,23 @@ export function compileSqlWithDefaults(metric: MetricContent): string {
|
|||
}
|
||||
usedFilters.add(key);
|
||||
|
||||
const shouldApply = shouldApplyDefault(filter, filter.default);
|
||||
// Use user-provided value if available, otherwise use default
|
||||
const value = filterValues && key in filterValues ? filterValues[key] : filter.default;
|
||||
|
||||
const shouldApply = shouldApplyDefault(filter, value);
|
||||
if (!shouldApply) {
|
||||
if (filter.required) {
|
||||
throw new Error(
|
||||
`Filter '${filter.key}' is required but no default value was provided for validation.`
|
||||
`Filter '${filter.key}' is required but no value was provided.`
|
||||
);
|
||||
}
|
||||
// Remove token entirely when no default is supplied.
|
||||
// Remove token entirely when no value is supplied.
|
||||
lastIndex = tokenEnd;
|
||||
continue;
|
||||
}
|
||||
|
||||
const indent = detectIndentation(sql, tokenStart);
|
||||
const fragment = renderFragment(filter, filter.default);
|
||||
const fragment = renderFragment(filter, value);
|
||||
const formattedFragment = fragment.trim().length === 0 ? '' : applyIndent(fragment, indent);
|
||||
compiledSql += formattedFragment;
|
||||
lastIndex = tokenEnd;
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
main
|
|
@ -0,0 +1 @@
|
|||
v2.45.5
|
|
@ -108,6 +108,10 @@ const MetricFilterSchema = z
|
|||
}
|
||||
});
|
||||
|
||||
export const MetricSchemaWithFilters = MetricSchema.extend({
|
||||
filters: z.array(MetricFilterSchema).optional(),
|
||||
});
|
||||
|
||||
export const MetricYmlSchema = z.object({
|
||||
name: z.string(),
|
||||
description: z.string(),
|
||||
|
@ -123,4 +127,6 @@ export type MetricFilter = z.infer<typeof MetricFilterSchema>;
|
|||
|
||||
export type Metric = z.infer<typeof MetricSchema>;
|
||||
|
||||
export type MetricWithFilters = z.infer<typeof MetricSchemaWithFilters>;
|
||||
|
||||
export const DEFAULT_METRIC: Required<Metric> = getDefaults(MetricSchema);
|
||||
|
|
|
@ -14,6 +14,7 @@ export const GetMetricQuerySchema = z.object({
|
|||
export const GetMetricDataRequestSchema = GetMetricQuerySchema.extend({
|
||||
limit: z.number().min(1).max(5000).default(5000).optional(),
|
||||
report_file_id: z.string().uuid('Report file ID must be a valid UUID').optional(),
|
||||
filter_values: z.record(z.unknown()).optional(),
|
||||
});
|
||||
|
||||
export const GetMetricListRequestSchema = z.object({
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { z } from 'zod';
|
||||
import { DataMetadataSchema, DataResultSchema } from './metadata.type';
|
||||
import { MetricSchema } from './metric.types';
|
||||
import { MetricSchema, MetricSchemaWithFilters } from './metric.types';
|
||||
import { MetricListItemSchema } from './metrics-list.types';
|
||||
|
||||
export const GetMetricResponseSchema = MetricSchema;
|
||||
export const GetMetricResponseSchema = MetricSchemaWithFilters;
|
||||
export const ListMetricsResponseSchema = z.array(MetricListItemSchema);
|
||||
export const UpdateMetricResponseSchema = MetricSchema;
|
||||
export const DuplicateMetricResponseSchema = MetricSchema;
|
||||
|
@ -55,6 +55,14 @@ export const MetricDataQuerySchema = z.object({
|
|||
version_number: z.coerce.number().int().min(1).optional(),
|
||||
report_file_id: z.string().uuid().optional(),
|
||||
password: z.string().min(1).optional(),
|
||||
filter_values: z.string().transform((val) => {
|
||||
if (!val) return undefined;
|
||||
try {
|
||||
return JSON.parse(val) as Record<string, unknown>;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}).optional(),
|
||||
});
|
||||
|
||||
export type MetricDataQuery = z.infer<typeof MetricDataQuerySchema>;
|
||||
|
|
Loading…
Reference in New Issue