Compare commits

...

25 Commits

Author SHA1 Message Date
Abhishek Kumar Singh
694d9958db improv: integrated origin field extraction and updated tests to check for origin fields 2025-11-18 15:03:24 +05:30
Abhishek Kumar Singh
addee4c0a5 feat: added origin field extractor for ch query 2025-11-18 14:36:03 +05:30
Abhishek Kumar Singh
f10cf7ac04 refactor: code organisation 2025-11-17 16:27:17 +05:30
Abhishek Kumar Singh
b336678639 fix: CH test cases 2025-11-17 15:01:32 +05:30
Abhishek Kumar Singh
c438b3444e refactor: removed GroupBy from FilterResult 2025-11-17 14:34:46 +05:30
Abhishek Kumar Singh
b624414507 feat: extract column origin from subquery and join before searching directly 2025-11-17 13:42:47 +05:30
Abhishek Kumar Singh
bde7963444 feat: implemented extractOriginFromSelectItem which will find the given columnName till the very end to return the origin column with given name 2025-11-17 09:00:18 +05:30
Abhishek Kumar Singh
2df93ff217 feat: extract column origin from query and add in column info 2025-11-16 10:20:38 +05:30
Abhishek Kumar Singh
f496a6ecde improv: updated result for queryfilterextractor to return column with alias 2025-11-16 08:58:33 +05:30
Abhishek Kumar Singh
599e230a72 feat: added NewExtractor function for creating extractor 2025-11-13 13:52:32 +05:30
Abhishek Kumar Singh
9a0e32ff3b refactor: removed redundant non nil checks 2025-11-13 13:41:51 +05:30
Abhishek Kumar Singh
5fe2732698 refactor: removed unused extractFromAnyFunction 2025-11-13 13:20:59 +05:30
Abhishek Kumar Singh
4993a44ecc refactor: removed unused cases + added comments 2025-11-13 12:59:35 +05:30
Abhishek Kumar Singh
ebd575a16b chore: comments + remove usage of seen map in extractGroupFromGroupByClause 2025-11-12 19:26:44 +05:30
Abhishek Kumar Singh
666582337e feat: support for CTE in clickhouse queryfilterextractor 2025-11-12 18:58:30 +05:30
Abhishek Kumar Singh
23512ab05c feat: added support for promql in queryfilterextractor 2025-11-10 20:50:42 +05:30
Abhishek Kumar Singh
1423749529 feat: added filter extractor interface and clickhouse impl with tests 2025-11-10 20:05:39 +05:30
Vikrant Gupta
4437630127 fix(tokenizer): do not retry 401 email_password session request (#9541) 2025-11-10 14:04:16 +00:00
Yunus M
89639b239e feat: convert duration ms to string to be passed to getYAxisFormattedValue (#9539) 2025-11-10 18:03:32 +05:30
Yunus M
785ae9f0bd feat: pass email if username is not set - pylon (#9526) 2025-11-10 17:30:32 +05:30
Abhi kumar
8752022cef fix: updated dashboard panel colors for better contrast ratio (#9500)
* fix: updated dashboard panel colors for better contrast ratio

* chore: preetier fix

* feat: added changes for the tooltip to follow cursor
2025-11-06 17:17:33 +05:30
Aditya Singh
c7e4a9c45d Fix: uplot dense points selection (#9469)
* feat: fix uplot focused series logic selection

* fix: stop propogation only if drilldown enabled

* feat: minor refactor

* feat: minor refactor

* feat: minor refactor

* feat: minor refactor

---------

Co-authored-by: Srikanth Chekuri <srikanth.chekuri92@gmail.com>
2025-11-06 11:14:02 +00:00
primus-bot[bot]
bf92c92204 chore(release): bump to v0.100.1 (#9499)
Co-authored-by: primus-bot[bot] <171087277+primus-bot[bot]@users.noreply.github.com>
2025-11-06 13:22:09 +05:30
Srikanth Chekuri
bd63633be7 fix: do not format for non aggregation columns (#9492) 2025-11-05 19:24:56 +05:30
Nikhil Mantri
1158e1199b Fix: filter with time in span scope condition builder (#9426) 2025-11-05 13:11:36 +05:30
50 changed files with 3324 additions and 229 deletions

View File

@@ -176,7 +176,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.100.0
image: signoz/signoz:v0.100.1
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.100.0
image: signoz/signoz:v0.100.1
command:
- --config=/root/config/prometheus.yml
ports:

View File

@@ -179,7 +179,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.100.0}
image: signoz/signoz:${VERSION:-v0.100.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -111,7 +111,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.100.0}
image: signoz/signoz:${VERSION:-v0.100.1}
container_name: signoz
command:
- --config=/root/config/prometheus.yml

View File

@@ -274,7 +274,7 @@ function App(): JSX.Element {
chat_settings: {
app_id: process.env.PYLON_APP_ID,
email: user.email,
name: user.displayName,
name: user.displayName || user.email,
},
};
}

View File

@@ -86,8 +86,9 @@ const interceptorRejected = async (
if (
response.status === 401 &&
// if the session rotate call errors out with 401 or the delete sessions call returns 401 then we do not retry!
// if the session rotate call or the create session errors out with 401 or the delete sessions call returns 401 then we do not retry!
response.config.url !== '/sessions/rotate' &&
response.config.url !== '/sessions/email_password' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
)

View File

@@ -17,12 +17,6 @@ export const Card = styled(CardComponent)<CardProps>`
overflow: hidden;
border-radius: 3px;
border: 1px solid var(--bg-slate-500);
background: linear-gradient(
0deg,
rgba(171, 189, 255, 0) 0%,
rgba(171, 189, 255, 0) 100%
),
#0b0c0e;
${({ isDarkMode }): StyledCSS =>
!isDarkMode &&

View File

@@ -88,17 +88,13 @@ function GridTableComponent({
const newValue = { ...val };
Object.keys(val).forEach((k) => {
const unit = getColumnUnit(k, columnUnits);
// Apply formatting if:
// 1. Column has a unit defined, OR
// 2. decimalPrecision is specified (format all values)
const shouldFormat = unit || decimalPrecision !== undefined;
if (shouldFormat) {
if (unit) {
// the check below takes care of not adding units for rows that have n/a or null values
if (val[k] !== 'n/a' && val[k] !== null) {
newValue[k] = getYAxisFormattedValue(
String(val[k]),
unit || 'none',
unit,
decimalPrecision,
);
} else if (val[k] === null) {

View File

@@ -121,6 +121,7 @@ export const tablePanelWidgetQuery = {
stackedBarChart: false,
bucketWidth: 0,
mergeAllActiveQueries: false,
decimalPrecision: 2,
};
export const tablePanelQueryResponse = {

View File

@@ -90,8 +90,9 @@ export function QueryTable({
column: any,
tableColumns: any,
): void => {
e.stopPropagation();
if (isQueryTypeBuilder && enableDrillDown) {
e.stopPropagation();
onClick({ x: e.clientX, y: e.clientY }, { record, column, tableColumns });
}
},

View File

@@ -662,21 +662,23 @@ const generateTableColumns = (
*
* @param columnKey - The column identifier (could be queryName.expression or queryName)
* @param columnUnits - The column units mapping
* @returns The unit string or undefined if not found
* @returns The unit string (none if the unit is set to empty string) or undefined if not found
*/
export const getColumnUnit = (
columnKey: string,
columnUnits: Record<string, string>,
): string | undefined => {
// First try the exact match (new syntax: queryName.expression)
if (columnUnits[columnKey]) {
return columnUnits[columnKey];
if (columnUnits[columnKey] !== undefined) {
return columnUnits[columnKey] || 'none';
}
// Fallback to old syntax: extract queryName from queryName.expression
if (columnKey.includes('.')) {
const queryName = columnKey.split('.')[0];
return columnUnits[queryName];
if (columnUnits[queryName] !== undefined) {
return columnUnits[queryName] || 'none';
}
}
return undefined;

View File

@@ -16,8 +16,20 @@
// https://tobyzerner.github.io/placement.js/dist/index.js
/**
* Positions an element (tooltip/popover) relative to a reference element.
* Automatically flips to the opposite side if there's insufficient space.
*
* @param element - The HTMLElement to position
* @param reference - Reference element/Range or bounding rect
* @param side - Preferred side: 'top', 'bottom', 'left', 'right' (default: 'bottom')
* @param align - Alignment: 'start', 'center', 'end' (default: 'center')
* @param options - Optional bounds for constraining the element
* - bound: Custom boundary rect/element
* - followCursor: { x, y } - If provided, tooltip follows cursor with smart positioning
*/
export const placement = (function () {
const e = {
const AXIS_PROPS = {
size: ['height', 'width'],
clientSize: ['clientHeight', 'clientWidth'],
offsetSize: ['offsetHeight', 'offsetWidth'],
@@ -28,87 +40,241 @@ export const placement = (function () {
marginAfter: ['marginBottom', 'marginRight'],
scrollOffset: ['pageYOffset', 'pageXOffset'],
};
function t(e) {
return { top: e.top, bottom: e.bottom, left: e.left, right: e.right };
}
return function (o, r, f, a, i) {
void 0 === f && (f = 'bottom'),
void 0 === a && (a = 'center'),
void 0 === i && (i = {}),
(r instanceof Element || r instanceof Range) &&
(r = t(r.getBoundingClientRect()));
const n = {
top: r.bottom,
bottom: r.top,
left: r.right,
right: r.left,
...r,
function extractRect(source) {
return {
top: source.top,
bottom: source.bottom,
left: source.left,
right: source.right,
};
const s = {
}
return function (element, reference, side, align, options) {
// Default parameters
void 0 === side && (side = 'bottom');
void 0 === align && (align = 'center');
void 0 === options && (options = {});
// Handle cursor following mode
if (options.followCursor) {
const cursorX = options.followCursor.x;
const cursorY = options.followCursor.y;
const offset = options.followCursor.offset || 10; // Default 10px offset from cursor
element.style.position = 'absolute';
element.style.maxWidth = '';
element.style.maxHeight = '';
const elementWidth = element.offsetWidth;
const elementHeight = element.offsetHeight;
// Use viewport bounds for cursor following (not chart bounds)
const viewportBounds = {
top: 0,
left: 0,
bottom: window.innerHeight,
right: window.innerWidth,
};
// Vertical positioning: follow cursor Y with offset, clamped to viewport
const topPosition = cursorY + offset;
const clampedTop = Math.max(
viewportBounds.top,
Math.min(topPosition, viewportBounds.bottom - elementHeight),
);
element.style.top = `${clampedTop}px`;
element.style.bottom = 'auto';
// Horizontal positioning: auto-detect left or right based on available space
const spaceOnRight = viewportBounds.right - cursorX;
const spaceOnLeft = cursorX - viewportBounds.left;
if (spaceOnRight >= elementWidth + offset) {
// Enough space on the right
element.style.left = `${cursorX + offset}px`;
element.style.right = 'auto';
element.dataset.side = 'right';
} else if (spaceOnLeft >= elementWidth + offset) {
// Not enough space on right, use left
element.style.left = `${cursorX - elementWidth - offset}px`;
element.style.right = 'auto';
element.dataset.side = 'left';
} else if (spaceOnRight > spaceOnLeft) {
// Not enough space on either side, pick the side with more space
const leftPos = cursorX + offset;
const clampedLeft = Math.max(
viewportBounds.left,
Math.min(leftPos, viewportBounds.right - elementWidth),
);
element.style.left = `${clampedLeft}px`;
element.style.right = 'auto';
element.dataset.side = 'right';
} else {
const leftPos = cursorX - elementWidth - offset;
const clampedLeft = Math.max(
viewportBounds.left,
Math.min(leftPos, viewportBounds.right - elementWidth),
);
element.style.left = `${clampedLeft}px`;
element.style.right = 'auto';
element.dataset.side = 'left';
}
element.dataset.align = 'cursor';
return; // Exit early, don't run normal positioning logic
}
// Normalize reference to rect object
(reference instanceof Element || reference instanceof Range) &&
(reference = extractRect(reference.getBoundingClientRect()));
// Create anchor rect with swapped opposite edges for positioning
const anchorRect = {
top: reference.bottom,
bottom: reference.top,
left: reference.right,
right: reference.left,
...reference,
};
// Viewport bounds (can be overridden via options.bound)
const bounds = {
top: 0,
left: 0,
bottom: window.innerHeight,
right: window.innerWidth,
};
i.bound &&
((i.bound instanceof Element || i.bound instanceof Range) &&
(i.bound = t(i.bound.getBoundingClientRect())),
Object.assign(s, i.bound));
const l = getComputedStyle(o);
const m = {};
const b = {};
for (const g in e)
(m[g] = e[g][f === 'top' || f === 'bottom' ? 0 : 1]),
(b[g] = e[g][f === 'top' || f === 'bottom' ? 1 : 0]);
(o.style.position = 'absolute'),
(o.style.maxWidth = ''),
(o.style.maxHeight = '');
const d = parseInt(l[b.marginBefore]);
const c = parseInt(l[b.marginAfter]);
const u = d + c;
const p = s[b.after] - s[b.before] - u;
const h = parseInt(l[b.maxSize]);
(!h || p < h) && (o.style[b.maxSize] = `${p}px`);
const x = parseInt(l[m.marginBefore]) + parseInt(l[m.marginAfter]);
const y = n[m.before] - s[m.before] - x;
const z = s[m.after] - n[m.after] - x;
((f === m.before && o[m.offsetSize] > y) ||
(f === m.after && o[m.offsetSize] > z)) &&
(f = y > z ? m.before : m.after);
const S = f === m.before ? y : z;
const v = parseInt(l[m.maxSize]);
(!v || S < v) && (o.style[m.maxSize] = `${S}px`);
const w = window[m.scrollOffset];
const O = function (e) {
return Math.max(s[m.before], Math.min(e, s[m.after] - o[m.offsetSize] - x));
options.bound &&
((options.bound instanceof Element || options.bound instanceof Range) &&
(options.bound = extractRect(options.bound.getBoundingClientRect())),
Object.assign(bounds, options.bound));
const styles = getComputedStyle(element);
const isVertical = side === 'top' || side === 'bottom';
// Build axis property maps based on orientation
const mainAxis = {}; // Properties for the main positioning axis
const crossAxis = {}; // Properties for the perpendicular axis
for (const prop in AXIS_PROPS) {
mainAxis[prop] = AXIS_PROPS[prop][isVertical ? 0 : 1];
crossAxis[prop] = AXIS_PROPS[prop][isVertical ? 1 : 0];
}
// Reset element positioning
element.style.position = 'absolute';
element.style.maxWidth = '';
element.style.maxHeight = '';
// Cross-axis: calculate and apply max size constraint
const crossMarginBefore = parseInt(styles[crossAxis.marginBefore]);
const crossMarginAfter = parseInt(styles[crossAxis.marginAfter]);
const crossMarginTotal = crossMarginBefore + crossMarginAfter;
const crossAvailableSpace =
bounds[crossAxis.after] - bounds[crossAxis.before] - crossMarginTotal;
const crossMaxSize = parseInt(styles[crossAxis.maxSize]);
(!crossMaxSize || crossAvailableSpace < crossMaxSize) &&
(element.style[crossAxis.maxSize] = `${crossAvailableSpace}px`);
// Main-axis: calculate space on both sides
const mainMarginTotal =
parseInt(styles[mainAxis.marginBefore]) +
parseInt(styles[mainAxis.marginAfter]);
const spaceBefore =
anchorRect[mainAxis.before] - bounds[mainAxis.before] - mainMarginTotal;
const spaceAfter =
bounds[mainAxis.after] - anchorRect[mainAxis.after] - mainMarginTotal;
// Auto-flip to the side with more space if needed
((side === mainAxis.before && element[mainAxis.offsetSize] > spaceBefore) ||
(side === mainAxis.after && element[mainAxis.offsetSize] > spaceAfter)) &&
(side = spaceBefore > spaceAfter ? mainAxis.before : mainAxis.after);
// Apply main-axis max size constraint
const mainAvailableSpace =
side === mainAxis.before ? spaceBefore : spaceAfter;
const mainMaxSize = parseInt(styles[mainAxis.maxSize]);
(!mainMaxSize || mainAvailableSpace < mainMaxSize) &&
(element.style[mainAxis.maxSize] = `${mainAvailableSpace}px`);
// Position on main axis
const mainScrollOffset = window[mainAxis.scrollOffset];
const clampMainPosition = function (pos) {
return Math.max(
bounds[mainAxis.before],
Math.min(
pos,
bounds[mainAxis.after] - element[mainAxis.offsetSize] - mainMarginTotal,
),
);
};
f === m.before
? ((o.style[m.before] = `${w + O(n[m.before] - o[m.offsetSize] - x)}px`),
(o.style[m.after] = 'auto'))
: ((o.style[m.before] = `${w + O(n[m.after])}px`),
(o.style[m.after] = 'auto'));
const B = window[b.scrollOffset];
const I = function (e) {
return Math.max(s[b.before], Math.min(e, s[b.after] - o[b.offsetSize] - u));
side === mainAxis.before
? ((element.style[mainAxis.before] = `${
mainScrollOffset +
clampMainPosition(
anchorRect[mainAxis.before] -
element[mainAxis.offsetSize] -
mainMarginTotal,
)
}px`),
(element.style[mainAxis.after] = 'auto'))
: ((element.style[mainAxis.before] = `${
mainScrollOffset + clampMainPosition(anchorRect[mainAxis.after])
}px`),
(element.style[mainAxis.after] = 'auto'));
// Position on cross axis based on alignment
const crossScrollOffset = window[crossAxis.scrollOffset];
const clampCrossPosition = function (pos) {
return Math.max(
bounds[crossAxis.before],
Math.min(
pos,
bounds[crossAxis.after] - element[crossAxis.offsetSize] - crossMarginTotal,
),
);
};
switch (a) {
switch (align) {
case 'start':
(o.style[b.before] = `${B + I(n[b.before] - d)}px`),
(o.style[b.after] = 'auto');
(element.style[crossAxis.before] = `${
crossScrollOffset +
clampCrossPosition(anchorRect[crossAxis.before] - crossMarginBefore)
}px`),
(element.style[crossAxis.after] = 'auto');
break;
case 'end':
(o.style[b.before] = 'auto'),
(o.style[b.after] = `${
B + I(document.documentElement[b.clientSize] - n[b.after] - c)
(element.style[crossAxis.before] = 'auto'),
(element.style[crossAxis.after] = `${
crossScrollOffset +
clampCrossPosition(
document.documentElement[crossAxis.clientSize] -
anchorRect[crossAxis.after] -
crossMarginAfter,
)
}px`);
break;
default:
var H = n[b.after] - n[b.before];
(o.style[b.before] = `${
B + I(n[b.before] + H / 2 - o[b.offsetSize] / 2 - d)
// 'center'
var crossSize = anchorRect[crossAxis.after] - anchorRect[crossAxis.before];
(element.style[crossAxis.before] = `${
crossScrollOffset +
clampCrossPosition(
anchorRect[crossAxis.before] +
crossSize / 2 -
element[crossAxis.offsetSize] / 2 -
crossMarginBefore,
)
}px`),
(o.style[b.after] = 'auto');
(element.style[crossAxis.after] = 'auto');
}
(o.dataset.side = f), (o.dataset.align = a);
// Store final placement as data attributes
(element.dataset.side = side), (element.dataset.align = align);
};
})();

View File

@@ -3,7 +3,71 @@ import { themeColors } from 'constants/theme';
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
function isSeriesValueValid(seriesValue: number | undefined | null): boolean {
return (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
);
}
// Helper function to get the focused/highlighted series at a specific position
function resolveSeriesColor(series: uPlot.Series, index: number): string {
let color = '#000000';
if (typeof series.stroke === 'string') {
color = series.stroke;
} else if (typeof series.fill === 'string') {
color = series.fill;
} else {
const seriesLabel = series.label || `Series ${index}`;
const isDarkMode = !document.body.classList.contains('lightMode');
color = generateColor(
seriesLabel,
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
);
}
return color;
}
function getPreferredSeriesIndex(
u: uPlot,
timestampIndex: number,
e: MouseEvent,
): number {
const bbox = u.over.getBoundingClientRect();
const top = e.clientY - bbox.top;
// Prefer series explicitly marked as focused
for (let i = 1; i < u.series.length; i++) {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const isSeriesFocused = u.series[i]?._focus === true;
const isSeriesShown = u.series[i].show !== false;
const seriesValue = u.data[i]?.[timestampIndex];
if (isSeriesFocused && isSeriesShown && isSeriesValueValid(seriesValue)) {
return i;
}
}
// Fallback: choose series with Y closest to mouse position
let focusedSeriesIndex = -1;
let closestPixelDiff = Infinity;
for (let i = 1; i < u.series.length; i++) {
const series = u.data[i];
const seriesValue = series?.[timestampIndex];
if (isSeriesValueValid(seriesValue) && u.series[i].show !== false) {
const yPx = u.valToPos(seriesValue as number, 'y');
const diff = Math.abs(yPx - top);
if (diff < closestPixelDiff) {
closestPixelDiff = diff;
focusedSeriesIndex = i;
}
}
}
return focusedSeriesIndex;
}
export const getFocusedSeriesAtPosition = (
e: MouseEvent,
u: uPlot,
@@ -17,74 +81,28 @@ export const getFocusedSeriesAtPosition = (
} | null => {
const bbox = u.over.getBoundingClientRect();
const left = e.clientX - bbox.left;
const top = e.clientY - bbox.top;
const timestampIndex = u.posToIdx(left);
let focusedSeriesIndex = -1;
let closestPixelDiff = Infinity;
// Check all series (skip index 0 which is the x-axis)
for (let i = 1; i < u.data.length; i++) {
const series = u.data[i];
const seriesValue = series[timestampIndex];
if (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
) {
const seriesYPx = u.valToPos(seriesValue, 'y');
const pixelDiff = Math.abs(seriesYPx - top);
if (pixelDiff < closestPixelDiff) {
closestPixelDiff = pixelDiff;
focusedSeriesIndex = i;
}
}
}
// If we found a focused series, return its data
if (focusedSeriesIndex > 0) {
const series = u.series[focusedSeriesIndex];
const seriesValue = u.data[focusedSeriesIndex][timestampIndex];
// Ensure we have a valid value
if (
seriesValue !== undefined &&
seriesValue !== null &&
!Number.isNaN(seriesValue)
) {
// Get color - try series stroke first, then generate based on label
let color = '#000000';
if (typeof series.stroke === 'string') {
color = series.stroke;
} else if (typeof series.fill === 'string') {
color = series.fill;
} else {
// Generate color based on series label (like the tooltip plugin does)
const seriesLabel = series.label || `Series ${focusedSeriesIndex}`;
// Detect theme mode by checking body class
const isDarkMode = !document.body.classList.contains('lightMode');
color = generateColor(
seriesLabel,
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
);
}
const preferredIndex = getPreferredSeriesIndex(u, timestampIndex, e);
if (preferredIndex > 0) {
const series = u.series[preferredIndex];
const seriesValue = u.data[preferredIndex][timestampIndex];
if (isSeriesValueValid(seriesValue)) {
const color = resolveSeriesColor(series, preferredIndex);
return {
seriesIndex: focusedSeriesIndex,
seriesName: series.label || `Series ${focusedSeriesIndex}`,
seriesIndex: preferredIndex,
seriesName: series.label || `Series ${preferredIndex}`,
value: seriesValue as number,
color,
show: series.show !== false,
isFocused: true, // This indicates it's the highlighted/bold one
isFocused: true,
};
}
}
return null;
};
export interface OnClickPluginOpts {
onClick: (
xValue: number,
@@ -137,50 +155,31 @@ function onClickPlugin(opts: OnClickPluginOpts): uPlot.Plugin {
const yValue = u.posToVal(event.offsetY, 'y');
// Get the focused/highlighted series (the one that would be bold in hover)
const focusedSeries = getFocusedSeriesAtPosition(event, u);
const focusedSeriesData = getFocusedSeriesAtPosition(event, u);
let metric = {};
const { series } = u;
const apiResult = opts.apiResponse?.data?.result || [];
const outputMetric = {
queryName: '',
inFocusOrNot: false,
};
// this is to get the metric value of the focused series
if (Array.isArray(series) && series.length > 0) {
series.forEach((item, index) => {
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
if (item?.show && item?._focus) {
const { metric: focusedMetric, queryName } = apiResult[index - 1] || [];
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
});
}
if (!outputMetric.queryName) {
// Get the focused series data
const focusedSeriesData = getFocusedSeriesAtPosition(event, u);
// If we found a valid focused series, get its data
if (
focusedSeriesData &&
focusedSeriesData.seriesIndex <= apiResult.length
) {
const { metric: focusedMetric, queryName } =
apiResult[focusedSeriesData.seriesIndex - 1] || [];
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
if (
focusedSeriesData &&
focusedSeriesData.seriesIndex <= apiResult.length
) {
const { metric: focusedMetric, queryName } =
apiResult[focusedSeriesData.seriesIndex - 1] || {};
metric = focusedMetric;
outputMetric.queryName = queryName;
outputMetric.inFocusOrNot = true;
}
// Get the actual data point timestamp from the focused series
let actualDataTimestamp = xValue; // fallback to click position timestamp
if (focusedSeries) {
if (focusedSeriesData) {
// Get the data index from the focused series
const dataIndex = u.posToIdx(event.offsetX);
// Get the actual timestamp from the x-axis data (u.data[0])
@@ -209,7 +208,7 @@ function onClickPlugin(opts: OnClickPluginOpts): uPlot.Plugin {
absoluteMouseX,
absoluteMouseY,
axesData,
focusedSeries,
focusedSeriesData,
);
};
u.over.addEventListener('click', handleClick);

View File

@@ -415,7 +415,11 @@ ToolTipPluginProps): any => {
}
// Clear and set new content in one operation
overlay.replaceChildren(content);
placement(overlay, anchor, 'right', 'start', { bound });
placement(overlay, anchor, 'right', 'start', {
bound,
followCursor: { x: anchor.left, y: anchor.top, offset: 4 },
});
showOverlay();
} else {
hideOverlay();

View File

@@ -16,6 +16,6 @@ export const topTracesTableColumns = [
title: 'STEP TRANSITION DURATION',
dataIndex: 'duration_ms',
key: 'duration_ms',
render: (value: string): string => getYAxisFormattedValue(value, 'ms'),
render: (value: string): string => getYAxisFormattedValue(`${value}`, 'ms'),
},
];

View File

@@ -401,14 +401,14 @@ body {
font-size: 12px;
position: absolute;
margin: 0.5rem;
background: rgba(0, 0, 0);
background: var(--bg-ink-300);
-webkit-font-smoothing: antialiased;
color: #fff;
color: var(--bg-vanilla-100);
z-index: 10000;
// pointer-events: none;
overflow: auto;
max-height: 480px !important;
max-width: 240px !important;
max-width: 300px !important;
border-radius: 5px;
border: 1px solid rgba(255, 255, 255, 0.1);
@@ -571,6 +571,12 @@ body {
}
.lightMode {
#overlay {
color: var(--bg-ink-500);
background: var(--bg-vanilla-100);
border: 1px solid var(--bg-vanilla-300);
}
.ant-dropdown-menu {
border: 1px solid var(--bg-vanilla-300);
background: var(--bg-vanilla-100);

2
go.mod
View File

@@ -4,7 +4,7 @@ go 1.24.0
require (
dario.cat/mergo v1.0.1
github.com/AfterShip/clickhouse-sql-parser v0.4.11
github.com/AfterShip/clickhouse-sql-parser v0.4.16
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
github.com/DATA-DOG/go-sqlmock v1.5.2
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd

2
go.sum
View File

@@ -68,6 +68,8 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/AfterShip/clickhouse-sql-parser v0.4.11 h1:fZMKAjRmgzW44+hEhF6ywi4VjFZQjJ8QrFBbgBsjmF4=
github.com/AfterShip/clickhouse-sql-parser v0.4.11/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
github.com/AfterShip/clickhouse-sql-parser v0.4.16 h1:gpl+wXclYUKT0p4+gBq22XeRYWwEoZ9f35vogqMvkLQ=
github.com/AfterShip/clickhouse-sql-parser v0.4.16/go.mod h1:W0Z82wJWkJxz2RVun/RMwxue3g7ut47Xxl+SFqdJGus=
github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=

View File

@@ -0,0 +1,695 @@
package queryfilterextractor
import (
"fmt"
"strings"
clickhouse "github.com/AfterShip/clickhouse-sql-parser/parser"
)
const (
// MetricNameColumn is the column name used for filtering metrics
MetricNameColumn = "metric_name"
)
// ClickHouseFilterExtractor extracts metric names and grouping keys from ClickHouse SQL queries
type ClickHouseFilterExtractor struct{}
// NewClickHouseFilterExtractor creates a new ClickHouse filter extractor
func NewClickHouseFilterExtractor() *ClickHouseFilterExtractor {
return &ClickHouseFilterExtractor{}
}
// Extract parses a ClickHouse query and extracts metric names and grouping keys
func (e *ClickHouseFilterExtractor) Extract(query string) (*FilterResult, error) {
p := clickhouse.NewParser(query)
stmts, err := p.ParseStmts()
if err != nil {
return nil, err
}
result := &FilterResult{MetricNames: []string{}, GroupByColumns: []ColumnInfo{}}
metricNames := make(map[string]bool)
// Track top-level queries for GROUP BY extraction
topLevelQueries := make(map[*clickhouse.SelectQuery]bool)
// Process all statements
for _, stmt := range stmts {
selectQuery, ok := stmt.(*clickhouse.SelectQuery)
if !ok {
continue
}
// Mark as top-level
topLevelQueries[selectQuery] = true
// Walk the AST to extract metrics
clickhouse.Walk(selectQuery, func(node clickhouse.Expr) bool {
e.fillMetricNamesFromExpr(node, metricNames)
return true // Continue traversal
})
}
// Extract GROUP BY from the top-level queries by first building a map of CTEs and
// then recursively extracting the GROUP BY from the CTEs and subqueries.
// Build CTE map for all top-level queries
cteMap := make(map[string]*clickhouse.SelectQuery)
for query := range topLevelQueries {
e.buildCTEMap(query, cteMap)
}
// Extract GROUP BY with aliases and origins from the CTEs and subqueries using recursive approach
// Use a map to handle duplicates (last ColumnInfo wins across queries)
groupByColumnsMap := make(map[string]ColumnInfo) // column name -> ColumnInfo
visited := make(map[*clickhouse.SelectQuery]bool)
for query := range topLevelQueries {
columns, err := e.extractGroupByColumns(query, cteMap, visited)
if err != nil {
return nil, err
}
for _, col := range columns {
// Last column info wins for duplicate columns across multiple queries
groupByColumnsMap[col.Name] = col
}
}
// Convert sets to slices
for metric := range metricNames {
result.MetricNames = append(result.MetricNames, metric)
}
// Build GroupByColumns from the map
for _, colInfo := range groupByColumnsMap {
result.GroupByColumns = append(result.GroupByColumns, colInfo)
}
return result, nil
}
// ========================================
// Metric Name Extraction
// ========================================
// fillMetricNamesFromExpr extracts metric names from various node types
func (e *ClickHouseFilterExtractor) fillMetricNamesFromExpr(node clickhouse.Expr, metricNames map[string]bool) {
if node == nil {
return
}
switch n := node.(type) {
case *clickhouse.BinaryOperation:
e.fillMetricFromBinaryOp(n, metricNames)
}
}
// fillMetricFromBinaryOp extracts metrics from binary operations
func (e *ClickHouseFilterExtractor) fillMetricFromBinaryOp(op *clickhouse.BinaryOperation, metricNames map[string]bool) {
// Check if left side is metric_name column
leftCol := e.getColumnName(op.LeftExpr)
rightCol := e.getColumnName(op.RightExpr)
// Handle metric_name on left side: metric_name = 'value'
if leftCol == MetricNameColumn {
e.fillMetricWithBinaryOpConditions(op, op.RightExpr, metricNames)
return
}
// Handle metric_name on right side: 'value' = metric_name
if rightCol == MetricNameColumn {
e.fillMetricWithBinaryOpConditions(op, op.LeftExpr, metricNames)
return
}
}
// fillMetricWithBinaryOpConditions extracts metric names from the value side of a binary operation
//
// Supported operators:
// - "=", "==": Extracts literal string values or values from any() function
// - "IN", "GLOBAL IN": Extracts all literal string values from the list
//
// Unsupported operators (can be added later if needed):
// - "!=", "<>", "NOT IN": Negative filters. (e.g., metric_name != 'a')
// - "LIKE", "ILIKE": Pattern matching filters
// - "NOT LIKE", "NOT ILIKE": Negative pattern matching filters
// - "OR", "AND": Boolean operators as the Walk function will automatically traverse both sides
// of OR/AND operations and extract metrics from each branch. (e.g., metric_name='a' OR metric_name='b')
func (e *ClickHouseFilterExtractor) fillMetricWithBinaryOpConditions(op *clickhouse.BinaryOperation, valueExpr clickhouse.Expr, metricNames map[string]bool) {
switch op.Operation {
case "=", "==":
// metric_name = 'value' or metric_name = any(['a', 'b'])
// Skip if value side is a function call (per spec - function-wrapped literals are ignored, CH59)
if fn, ok := valueExpr.(*clickhouse.FunctionExpr); ok {
// Only handle any() function, skip others like lowercase('cpu')
if fn.Name != nil && fn.Name.Name == "any" {
e.extractInValues(valueExpr, metricNames)
}
// Otherwise skip function-wrapped literals per spec
} else if val := e.extractStringLiteral(valueExpr); val != "" {
metricNames[val] = true
}
case "IN", "GLOBAL IN":
// metric_name IN ('a', 'b', 'c')
// GLOBAL IN behaves the same as IN for metric extraction purposes
// Skip if value side is a function call (per spec - function-wrapped literals are ignored, CH59)
if _, ok := valueExpr.(*clickhouse.FunctionExpr); !ok {
e.extractInValues(valueExpr, metricNames)
}
}
}
// extractStringLiteral extracts a string literal value from an expression
func (e *ClickHouseFilterExtractor) extractStringLiteral(expr clickhouse.Expr) string {
switch ex := expr.(type) {
case *clickhouse.StringLiteral:
return ex.Literal
}
return ""
}
// extractInValues extracts values from IN expressions
func (e *ClickHouseFilterExtractor) extractInValues(expr clickhouse.Expr, metricNames map[string]bool) {
// Find all string literals in the expression
strLits := clickhouse.FindAll(expr, func(node clickhouse.Expr) bool {
// metric_name passed in `in` condition will be string literal.
_, ok := node.(*clickhouse.StringLiteral)
return ok
})
for _, strLitNode := range strLits {
if strLit, ok := strLitNode.(*clickhouse.StringLiteral); ok {
// Unquote the string literal
val := e.extractStringLiteral(strLit)
if val != "" {
metricNames[val] = true
}
}
}
}
// ========================================
// GROUP BY Column Extraction
// ========================================
// extractGroupByColumns extracts the GROUP BY columns from a query
// It follows the top-down approach where outer GROUP BY overrides inner GROUP BY in subqueries and CTEs.
// Returns a slice of ColumnInfo with column names, aliases, and origins
func (e *ClickHouseFilterExtractor) extractGroupByColumns(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery, visited map[*clickhouse.SelectQuery]bool) ([]ColumnInfo, error) {
if visited[query] {
return nil, nil
}
// Mark this query as visited to prevent cycles
visited[query] = true
// First, check if this query has its own GROUP BY using direct field access
hasGroupBy := query.GroupBy != nil
// If this query has GROUP BY, use it (outer overrides inner)
if hasGroupBy {
// Extract GROUP BY columns
tempGroupBy := make(map[string]bool)
e.fillGroupsFromGroupByClause(query.GroupBy, tempGroupBy)
// Extract SELECT columns and their aliases from the same query level
selectAliases := e.extractSelectColumns(query)
// Build ColumnInfo array by matching GROUP BY with SELECT aliases and origins
result := []ColumnInfo{}
originVisited := make(map[*clickhouse.SelectQuery]bool)
for groupByCol := range tempGroupBy {
alias := selectAliases[groupByCol] // Will be "" if not in SELECT
// Extract originExpr by tracing back through queries
originExpr := e.extractColumnOrigin(groupByCol, query, cteMap, originVisited)
originField, err := extractCHOriginFieldFromQuery(fmt.Sprintf("SELECT %s", originExpr))
if err != nil {
return nil, err
}
result = append(result, ColumnInfo{
Name: groupByCol,
Alias: alias,
OriginExpr: originExpr,
OriginField: originField,
})
}
return result, nil
}
// If no GROUP BY in this query, follow CTE/subquery references
// It might have grouping inside the CTE/subquery
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
return e.extractGroupByColumns(sourceQuery, cteMap, visited)
}
return nil, nil
}
// fillGroupsFromGroupByClause extracts GROUP BY columns from a specific GroupByClause and fills the map with the column names
func (e *ClickHouseFilterExtractor) fillGroupsFromGroupByClause(groupByClause *clickhouse.GroupByClause, groupBy map[string]bool) {
// Extract GROUP BY expressions properly
// Find only the direct child ColumnExprList, not nested ones
// We use Find instead of FindAll to get only the first (direct child) ColumnExprList
exprListNode, foundList := clickhouse.Find(groupByClause, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.ColumnExprList)
return ok
})
if !foundList {
return
}
// Note: We only extract from the top-level ColumnExprList.Items to avoid extracting nested parts
// This prevents extracting 'timestamp' from 'toDate(timestamp)' - we only get 'toDate(timestamp)'
if exprList, ok := exprListNode.(*clickhouse.ColumnExprList); ok {
// Extract each expression from the list - these are top-level only
if exprList.Items != nil {
for _, item := range exprList.Items {
groupKey := e.extractColumnStrByExpr(item)
if groupKey != "" {
// Strip table alias if present (e.g., "m.region" -> "region")
groupKey = e.stripTableAlias(groupKey)
groupBy[groupKey] = true
}
}
}
}
}
// extractColumnStrByExpr extracts the complete string representation of different expression types
// Supports:
// - Ident: Simple identifier like "region" or "timestamp"
// - FunctionExpr: Function call like "toDate(timestamp)"
// - ColumnExpr: Column expression like "m.region", "toDate(timestamp)"
// - Other expression types: Return the string representation of the expression
//
// For example:
// - "region" -> "region"
// - "toDate(timestamp)" -> "toDate(timestamp)"
// - "`m.region`" -> "`m.region`"
func (e *ClickHouseFilterExtractor) extractColumnStrByExpr(expr clickhouse.Expr) string {
if expr == nil {
return ""
}
switch ex := expr.(type) {
// Ident is a simple identifier like "region" or "timestamp"
case *clickhouse.Ident:
// Handling for backticks which are native to ClickHouse and used for literal names.
// CH Parser removes the backticks from the identifier, so we need to add them back.
if ex.QuoteType == clickhouse.BackTicks {
return "`" + ex.Name + "`"
}
return ex.Name
// FunctionExpr is a function call like "toDate(timestamp)"
case *clickhouse.FunctionExpr:
// For function expressions, return the complete function call string
return ex.String()
// ColumnExpr is a column expression like "m.region", "toDate(timestamp)"
case *clickhouse.ColumnExpr:
// ColumnExpr wraps another expression - extract the underlying expression
if ex.Expr != nil {
return e.extractColumnStrByExpr(ex.Expr)
}
return ex.String()
default:
// For other expression types, return the string representation
return expr.String()
}
}
// stripTableAlias removes table alias prefix from a column name (e.g., "m.region" -> "region")
// but for literals with backticks, we need preserve the entire string. (e.g., `os.type` -> "os.type")
func (e *ClickHouseFilterExtractor) stripTableAlias(name string) string {
// Handling for backticks which are native to ClickHouse and used for literal names.
if strings.HasPrefix(name, "`") && strings.HasSuffix(name, "`") {
return strings.Trim(name, "`")
}
// split the name by dot and return the last part
parts := strings.Split(name, ".")
if len(parts) > 1 {
return parts[len(parts)-1]
}
return name
}
// getColumnName extracts column name from an expression
func (e *ClickHouseFilterExtractor) getColumnName(expr clickhouse.Expr) string {
switch ex := expr.(type) {
case *clickhouse.Ident:
return ex.Name
case *clickhouse.Path:
// Handle Path type for qualified column names like "m.metric_name"
// Extract the last field which is the column name
if len(ex.Fields) > 0 {
return ex.Fields[len(ex.Fields)-1].Name
}
return ""
}
return ""
}
// extractSourceQuery extracts the SelectQuery from FROM expressions
// Handles CTE references, subqueries, and table expressions
// For example: from the below query We'll try to extract the name of the source query
// which in the below case is "aggregated". Once we find it we return the SelectQuery node
// from the cteMap, which acts as the source for the GROUP BY extraction.
//
// WITH aggregated AS (
// SELECT region as region_alias, sum(value) AS total
// FROM metrics
// WHERE metric_name = 'cpu_usage'
// GROUP BY region
// )
// SELECT * FROM aggregated
func (e *ClickHouseFilterExtractor) extractSourceQuery(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery) *clickhouse.SelectQuery {
if query.From == nil {
return nil
}
// Find the FROM clause and extract the source
fromExprs := clickhouse.FindAll(query.From, func(node clickhouse.Expr) bool {
switch node.(type) {
case *clickhouse.Ident, *clickhouse.SelectQuery:
return true
}
return false
})
for _, fromExpr := range fromExprs {
switch expr := fromExpr.(type) {
case *clickhouse.Ident:
// CTE reference by simple name
if cteQuery, exists := cteMap[expr.Name]; exists {
return cteQuery
}
case *clickhouse.SelectQuery:
// Direct subquery
return expr
}
}
return nil
}
// ========================================
// Column Origin Tracing
// ========================================
// extractColumnOrigin recursively traces a column back to its original expression
// Returns the original expression string (e.g., "JSONExtractString(labels, 'service.name')")
// or the column name itself if it's a direct column reference
func (e *ClickHouseFilterExtractor) extractColumnOrigin(
columnName string,
query *clickhouse.SelectQuery,
cteMap map[string]*clickhouse.SelectQuery,
visited map[*clickhouse.SelectQuery]bool,
) string {
if query == nil {
return columnName
}
// Prevent infinite recursion and redundant work
// Once a query is visited, we don't need to check it again
if visited[query] {
return columnName
}
visited[query] = true
// this is to prevent infinite recursion in a single query search
// but we don't want this to affect the other queries searches
// so we delete it after the search is done for current query
defer delete(visited, query)
// Step 1: Search in CTE and Joins, this will take us to very end of the SubQueries and CTE
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
returningOrigin := e.extractColumnOrigin(columnName, sourceQuery, cteMap, visited)
if returningOrigin != columnName {
return returningOrigin
}
}
// Step 2: Once we're sure there are no SubQueries and CTE we just find all the selectItem
// and then get their column origin values
selectItems := clickhouse.FindAll(query, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.SelectItem)
return ok
})
// extractOriginFromSelectItem extracts the origin from a SelectItem
extractOriginFromSelectItem := func(selectItem *clickhouse.SelectItem) *string {
// Check if this SelectItem matches our column (by alias or by name)
alias := e.extractSelectItemAlias(selectItem)
exprStr := e.extractSelectItemName(selectItem)
normalizedExpr := e.stripTableAlias(exprStr)
// Case 1: Column matches an alias in SELECT
if alias == columnName {
// This is an alias - get the expression it's aliasing
if selectItem.Expr != nil {
originExpr := e.extractFullExpression(selectItem.Expr)
// If the expression is just a column name, trace it back further
if normalizedExpr == columnName || e.isSimpleColumnReference(selectItem.Expr) {
// It's referencing another column - trace back through source query
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
originExpr := e.extractColumnOrigin(normalizedExpr, sourceQuery, cteMap, visited)
return &originExpr
}
}
return &originExpr
}
}
// Case 2: Column matches the expression itself (no alias)
if normalizedExpr == columnName {
// Check if this is a simple column reference or a complex expression
if e.isSimpleColumnReference(selectItem.Expr) {
// Simple column - trace back through source query
sourceQuery := e.extractSourceQuery(query, cteMap)
if sourceQuery != nil {
originExpr := e.extractColumnOrigin(columnName, sourceQuery, cteMap, visited)
return &originExpr
}
return &columnName
} else {
// Complex expression - return it as origin
originExpr := e.extractFullExpression(selectItem.Expr)
return &originExpr
}
}
return nil
}
var finalColumnOrigin string
for _, itemNode := range selectItems {
if selectItem, ok := itemNode.(*clickhouse.SelectItem); ok {
// We call the extractOriginFromSelectItem function for each SelectItem
// and if the origin is not nil, we set the finalColumnOrigin to the origin
// this has to be done to get to the most nested origin of column where selectItem is present
origin := extractOriginFromSelectItem(selectItem)
if origin != nil {
finalColumnOrigin = *origin
}
}
}
if finalColumnOrigin != "" {
return finalColumnOrigin
}
return columnName
}
// extractFullExpression extracts the complete string representation of an expression
func (e *ClickHouseFilterExtractor) extractFullExpression(expr clickhouse.Expr) string {
if expr == nil {
return ""
}
return expr.String()
}
// isSimpleColumnReference checks if an expression is just a simple column reference
// (not a function call or complex expression)
func (e *ClickHouseFilterExtractor) isSimpleColumnReference(expr clickhouse.Expr) bool {
if expr == nil {
return false
}
switch ex := expr.(type) {
case *clickhouse.Ident:
// backticks are treated as non simple column reference
// so that we can return the origin expression with backticks
// origin parser will handle the backticks and extract the column name from it
if ex.QuoteType == clickhouse.BackTicks {
return false
}
return true
case *clickhouse.Path:
return true
case *clickhouse.ColumnExpr:
// Check if it wraps a simple reference
if ex.Expr != nil {
return e.isSimpleColumnReference(ex.Expr)
}
}
return false
}
// ========================================
// SELECT Column Alias Extraction
// ========================================
// extractSelectColumns extracts column names and their aliases from SELECT clause of a specific query
// Returns a map where key is normalized column name and value is the alias
// For duplicate columns with different aliases, the last alias wins
// This follows the same pattern as extractGroupFromGroupByClause - finding direct children only
func (e *ClickHouseFilterExtractor) extractSelectColumns(query *clickhouse.SelectQuery) map[string]string {
aliasMap := make(map[string]string)
if query == nil {
return aliasMap
}
// Find SelectItem nodes which represent columns in the SELECT clause
// SelectItem has an Expr field (the column/expression) and an Alias field
selectItems := clickhouse.FindAll(query, func(node clickhouse.Expr) bool {
_, ok := node.(*clickhouse.SelectItem)
return ok
})
// Process each SelectItem and extract column name and alias
for _, itemNode := range selectItems {
if selectItem, ok := itemNode.(*clickhouse.SelectItem); ok {
// Extract the column name/expression from SelectItem.Expr
columnName := e.extractSelectItemName(selectItem)
if columnName == "" {
continue
}
// Normalize column name (strip table alias)
normalizedName := e.stripTableAlias(columnName)
// Extract alias from SelectItem.Alias
alias := e.extractSelectItemAlias(selectItem)
// Store in map - last alias wins for duplicates
aliasMap[normalizedName] = alias
}
}
return aliasMap
}
// extractSelectItemName extracts the column name or expression from a SelectItem
func (e *ClickHouseFilterExtractor) extractSelectItemName(selectItem *clickhouse.SelectItem) string {
if selectItem == nil || selectItem.Expr == nil {
return ""
}
return e.extractColumnStrByExpr(selectItem.Expr)
}
// extractSelectItemAlias extracts the alias from a SelectItem
// Returns empty string if no alias is present
func (e *ClickHouseFilterExtractor) extractSelectItemAlias(selectItem *clickhouse.SelectItem) string {
if selectItem == nil || selectItem.Alias == nil {
return ""
}
// The Alias field is an *Ident (pointer type)
if selectItem.Alias.Name != "" {
return selectItem.Alias.Name
}
return ""
}
// ========================================
// CTE and Subquery Extraction
// ========================================
// buildCTEMap builds a map of CTE names to their SelectQuery nodes by recursively
// traversing all queries and their nested expressions
func (e *ClickHouseFilterExtractor) buildCTEMap(query *clickhouse.SelectQuery, cteMap map[string]*clickhouse.SelectQuery) {
if query == nil {
return
}
// Access CTEs directly from WithClause if it exists
if query.With != nil && query.With.CTEs != nil {
for _, cte := range query.With.CTEs {
cteName := e.extractCTEName(cte)
cteQuery := e.extractCTEQuery(cte)
if cteName != "" && cteQuery != nil {
cteMap[cteName] = cteQuery
// Recursively build CTE map for nested CTEs
e.buildCTEMap(cteQuery, cteMap)
}
}
}
// Also check for CTEs in subqueries and other expressions
e.buildCTEMapFromExpr(query, cteMap)
}
// extractCTEName extracts the CTE name from a CTEStmt, the Expr field is the name of the CTE
func (e *ClickHouseFilterExtractor) extractCTEName(cte *clickhouse.CTEStmt) string {
if cte == nil || cte.Expr == nil {
return ""
}
switch name := cte.Expr.(type) {
case *clickhouse.Ident:
return name.Name
default:
return cte.Expr.String()
}
}
// extractCTEQuery extracts the SelectQuery from a CTEStmt, the Alias field is the SelectQuery
func (e *ClickHouseFilterExtractor) extractCTEQuery(cte *clickhouse.CTEStmt) *clickhouse.SelectQuery {
if cte == nil || cte.Alias == nil {
return nil
}
// The Alias field should contain a SelectQuery
if selectQuery, ok := cte.Alias.(*clickhouse.SelectQuery); ok {
return selectQuery
}
return nil
}
// buildCTEMapFromExpr recursively extracts CTEs from various expression types
func (e *ClickHouseFilterExtractor) buildCTEMapFromExpr(expr clickhouse.Expr, cteMap map[string]*clickhouse.SelectQuery) {
if expr == nil {
return
}
// Walk through all nodes to find SelectQuery nodes that might contain CTEs
clickhouse.Walk(expr, func(node clickhouse.Expr) bool {
switch n := node.(type) {
case *clickhouse.SelectQuery:
// Don't process the same query we started with to avoid infinite recursion
if n != expr {
e.buildCTEMap(n, cteMap)
}
case *clickhouse.TableExpr:
if n.Expr != nil {
e.buildCTEMapFromExpr(n.Expr, cteMap)
}
case *clickhouse.JoinTableExpr:
if n.Table != nil {
e.buildCTEMapFromExpr(n.Table, cteMap)
}
}
return true // Continue traversal
})
}

View File

@@ -0,0 +1,316 @@
package queryfilterextractor
import (
"fmt"
"strings"
"github.com/AfterShip/clickhouse-sql-parser/parser"
)
// excludedFunctions contains functions that should cause ExtractOriginField to return empty string.
// Map key is the function name in lowercase, value is the original function name.
var excludedFunctions = map[string]string{
// Time functions
"now": "now",
"today": "today",
"yesterday": "yesterday",
"todatetime": "toDateTime",
"todatetime64": "toDateTime64",
"todate": "toDate",
"todate32": "toDate32",
"tostartofinterval": "toStartOfInterval",
"tostartofday": "toStartOfDay",
"tostartofweek": "toStartOfWeek",
"tostartofmonth": "toStartOfMonth",
"tostartofquarter": "toStartOfQuarter",
"tostartofyear": "toStartOfYear",
"tostartofhour": "toStartOfHour",
"tostartofminute": "toStartOfMinute",
"tostartofsecond": "toStartOfSecond",
"tostartoffiveminutes": "toStartOfFiveMinutes",
"tostartoftenminutes": "toStartOfTenMinutes",
"tostartoffifteenminutes": "toStartOfFifteenMinutes",
"tointervalsecond": "toIntervalSecond",
"tointervalminute": "toIntervalMinute",
"tointervalhour": "toIntervalHour",
"tointervalday": "toIntervalDay",
"tointervalweek": "toIntervalWeek",
"tointervalmonth": "toIntervalMonth",
"tointervalquarter": "toIntervalQuarter",
"tointervalyear": "toIntervalYear",
"parsedatetime": "parseDateTime",
"parsedatetimebesteffort": "parseDateTimeBestEffort",
// Aggregate functions
"count": "count",
"sum": "sum",
"avg": "avg",
"min": "min",
"max": "max",
"any": "any",
"stddevpop": "stddevPop",
"stddevsamp": "stddevSamp",
"varpop": "varPop",
"varsamp": "varSamp",
"grouparray": "groupArray",
"groupuniqarray": "groupUniqArray",
"quantile": "quantile",
"quantiles": "quantiles",
"quantileexact": "quantileExact",
"quantiletiming": "quantileTiming",
"median": "median",
"uniq": "uniq",
"uniqexact": "uniqExact",
"uniqcombined": "uniqCombined",
"uniqhll12": "uniqHLL12",
"topk": "topK",
"first": "first",
"last": "last",
}
// jsonExtractFunctions contains functions that extract from JSON columns.
// Map key is the function name in lowercase, value is the original function name.
var jsonExtractFunctions = map[string]string{
"jsonextractstring": "JSONExtractString",
"jsonextractint": "JSONExtractInt",
"jsonextractuint": "JSONExtractUInt",
"jsonextractfloat": "JSONExtractFloat",
"jsonextractbool": "JSONExtractBool",
"jsonextract": "JSONExtract",
"jsonextractraw": "JSONExtractRaw",
"jsonextractarrayraw": "JSONExtractArrayRaw",
"jsonextractkeysandvalues": "JSONExtractKeysAndValues",
}
// isFunctionPresentInStore checks if a function name exists in the function store map
func isFunctionPresentInStore(funcName string, funcStore map[string]string) bool {
_, exists := funcStore[strings.ToLower(funcName)]
return exists
}
// isReservedSelectKeyword checks if a keyword is a reserved keyword for the SELECT statement
// We're only including those which can appear in the SELECT statement without being quoted
func isReservedSelectKeyword(keyword string) bool {
return strings.ToUpper(keyword) == parser.KeywordSelect || strings.ToUpper(keyword) == parser.KeywordFrom
}
// extractCHOriginField extracts the origin field (column name) from a query string
// or fields getting extracted in case of JSON extraction functions.
func extractCHOriginFieldFromQuery(query string) (string, error) {
// Parse the query string
p := parser.NewParser(query)
stmts, err := p.ParseStmts()
if err != nil {
return "", err
}
if len(stmts) == 0 {
return "", fmt.Errorf("no statements found in query")
}
// Get the first statement which should be a SELECT
selectStmt, ok := stmts[0].(*parser.SelectQuery)
if !ok {
return "", fmt.Errorf("first statement is not a SELECT query")
}
// If query has multiple select items, return blank string as we don't expect multiple select items
if len(selectStmt.SelectItems) > 1 {
return "", nil
}
if len(selectStmt.SelectItems) == 0 {
return "", fmt.Errorf("SELECT query has no select items")
}
// Extract origin field from the first (and only) select item's expression
return extractOriginFieldFromExpr(selectStmt.SelectItems[0].Expr)
}
// extractOriginFieldFromExpr extracts the origin field (column name) from an expression.
// This is the internal helper function that contains the original logic.
func extractOriginFieldFromExpr(expr parser.Expr) (string, error) {
if expr == nil {
return "", fmt.Errorf("expression is nil")
}
// Check if expression contains excluded functions or IF/CASE
hasExcludedExpressions := false
hasReservedKeyword := false
parser.Walk(expr, func(node parser.Expr) bool {
// exclude reserved keywords because the parser will treat them as valid SQL
// example: SELECT FROM table here the "FROM" is a reserved keyword,
// but the parser will treat it as valid SQL
if ident, ok := node.(*parser.Ident); ok {
if ident.QuoteType == parser.Unquoted && isReservedSelectKeyword(ident.Name) {
hasReservedKeyword = true
return false
}
}
if funcExpr, ok := node.(*parser.FunctionExpr); ok {
if isFunctionPresentInStore(funcExpr.Name.Name, excludedFunctions) {
hasExcludedExpressions = true
return false
}
// Check for nested JSON extraction functions
if isFunctionPresentInStore(funcExpr.Name.Name, jsonExtractFunctions) {
// Check if any argument contains another JSON extraction function
if funcExpr.Params != nil && funcExpr.Params.Items != nil {
for _, arg := range funcExpr.Params.Items.Items {
if containsJSONExtractFunction(arg) {
hasExcludedExpressions = true
return false
}
}
}
}
}
if _, ok := node.(*parser.CaseExpr); ok {
hasExcludedExpressions = true
return false
}
return true
})
// If the expression contains reserved keywords, return error
if hasReservedKeyword {
return "", fmt.Errorf("reserved keyword found in query")
}
// If the expression contains excluded expressions, return empty string
if hasExcludedExpressions {
return "", nil
}
// Extract all column names from the expression
columns := extractColumns(expr)
// If we found exactly one unique column, return it
if len(columns) == 1 {
return columns[0], nil
}
// Multiple columns or no columns - return empty string
return "", nil
}
// containsJSONExtractFunction checks if an expression contains a JSON extraction function
func containsJSONExtractFunction(expr parser.Expr) bool {
if expr == nil {
return false
}
found := false
parser.Walk(expr, func(node parser.Expr) bool {
if funcExpr, ok := node.(*parser.FunctionExpr); ok {
if isFunctionPresentInStore(funcExpr.Name.Name, jsonExtractFunctions) {
found = true
return false
}
}
return true
})
return found
}
// extractColumns recursively extracts all unique column names from an expression.
// Note: String literals are also considered as origin fields and will be included in the result.
func extractColumns(expr parser.Expr) []string {
if expr == nil {
return nil
}
columnMap := make(map[string]bool)
extractColumnsHelper(expr, columnMap)
// Convert map to slice
columns := make([]string, 0, len(columnMap))
for col := range columnMap {
columns = append(columns, col)
}
return columns
}
// extractColumnsHelper is a recursive helper that finds all column references.
// Note: String literals are also considered as origin fields and will be added to the columnMap.
func extractColumnsHelper(expr parser.Expr, columnMap map[string]bool) {
if expr == nil {
return
}
switch n := expr.(type) {
case *parser.Ident:
// Add identifiers as column references
columnMap[n.Name] = true
case *parser.FunctionExpr:
// Special handling for JSON extraction functions
// In case of nested JSON extraction, we return blank values (handled at top level)
if isFunctionPresentInStore(n.Name.Name, jsonExtractFunctions) {
// For JSON functions, extract from the second argument (the JSON path/key being extracted)
// The first argument is the column name, the second is the exact data being extracted
// The extracted data (second argument) is treated as the origin field
if n.Params != nil && n.Params.Items != nil && len(n.Params.Items.Items) >= 2 {
secondArg := n.Params.Items.Items[1]
// If the second argument is a string literal, use its value as the origin field
// String literals are considered as origin fields
if strLit, ok := secondArg.(*parser.StringLiteral); ok {
columnMap[strLit.Literal] = true
} else {
// Otherwise, try to extract columns from it
extractColumnsHelper(secondArg, columnMap)
}
}
return
}
// For regular functions, recursively process all arguments
// Don't mark the function name itself as a column
if n.Params != nil && n.Params.Items != nil {
for _, item := range n.Params.Items.Items {
extractColumnsHelper(item, columnMap)
}
}
case *parser.BinaryOperation:
extractColumnsHelper(n.LeftExpr, columnMap)
extractColumnsHelper(n.RightExpr, columnMap)
case *parser.ColumnExpr:
extractColumnsHelper(n.Expr, columnMap)
case *parser.CastExpr:
extractColumnsHelper(n.Expr, columnMap)
case *parser.ParamExprList:
if n.Items != nil {
extractColumnsHelper(n.Items, columnMap)
}
case *parser.ColumnExprList:
for _, item := range n.Items {
extractColumnsHelper(item, columnMap)
}
case *parser.StringLiteral:
// String literals are considered as origin fields
columnMap[n.Literal] = true
return
// Support for columns like table.column_name
case *parser.Path:
if len(n.Fields) > 0 {
extractColumnsHelper(n.Fields[len(n.Fields)-1], columnMap)
}
return
// Add more cases as needed for other expression types
default:
// For unknown types, return empty (don't extract columns)
return
}
}

View File

@@ -0,0 +1,252 @@
package queryfilterextractor
import (
"testing"
)
func TestExtractOriginField(t *testing.T) {
tests := []struct {
name string
query string
expected string
expectError bool
}{
// JSON extraction functions - should return the second argument (JSON path/key) as origin field
{
name: "JSONExtractString simple",
query: `SELECT JSONExtractString(labels, 'service.name')`,
expected: "service.name",
},
{
name: "JSONExtractInt",
query: `SELECT JSONExtractInt(labels, 'status.code')`,
expected: "status.code",
},
{
name: "JSONExtractFloat",
query: `SELECT JSONExtractFloat(labels, 'cpu.usage')`,
expected: "cpu.usage",
},
{
name: "JSONExtractBool",
query: `SELECT JSONExtractBool(labels, 'feature.enabled')`,
expected: "feature.enabled",
},
{
name: "JSONExtractString with function wrapper",
query: `SELECT lower(JSONExtractString(labels, 'user.email'))`,
expected: "user.email",
},
{
name: "Nested JSON extraction",
query: `SELECT JSONExtractInt(JSONExtractRaw(labels, 'meta'), 'status.code')`,
expected: "", // Nested JSON extraction should return blank
},
// Nested functions - should return the deepest column
{
name: "Nested time functions with column",
query: `SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60))`,
expected: "", // Contains toStartOfInterval and toDateTime which are excluded
},
{
name: "Division with column",
query: `SELECT unix_milli / 1000`,
expected: "unix_milli",
},
{
name: "Function with single column",
query: `SELECT lower(unix_milli)`,
expected: "unix_milli",
},
{
name: "CAST with single column",
query: `SELECT CAST(unix_milli AS String)`,
expected: "unix_milli",
},
{
name: "intDiv with single column",
query: `SELECT intDiv(unix_milli, 1000)`,
expected: "unix_milli",
},
// Multiple columns - should return blank
{
name: "Multiple columns in coalesce",
query: `SELECT (coalesce(cpu_usage, 0) + coalesce(mem_usage, 0)) / 2`,
expected: "",
},
{
name: "Multiple columns in arithmetic",
query: `SELECT cpu_usage + mem_usage`,
expected: "",
},
{
name: "Multiple columns in function",
query: `SELECT concat(first_name, last_name)`,
expected: "",
},
// IF/CASE conditions - should return blank
{
name: "IF with single column in condition",
query: `SELECT IF(error_count > 0, service, 'healthy')`,
expected: "", // Multiple columns: error_count and service
},
{
name: "IF with JSON and multiple columns",
query: `SELECT if(JSONExtractInt(metadata, 'retry.count') > 3, toLower(JSONExtractString(metadata, 'user.id')), hostname)`,
expected: "", // Multiple columns: metadata and hostname
},
{
name: "String literal should return string",
query: `SELECT 'constant'`,
expected: "constant",
},
// No columns - should return blank
{
name: "Number literal",
query: `SELECT 42`,
expected: "",
},
{
name: "Multiple literals",
query: `SELECT 'constant', 42`,
expected: "",
},
{
name: "Multiple string literals",
query: `SELECT 'constant', '42'`,
expected: "",
},
// Excluded functions - should return blank
{
name: "now() function",
query: `SELECT now()`,
expected: "",
},
{
name: "today() function",
query: `SELECT today()`,
expected: "",
},
{
name: "count aggregate",
query: `SELECT count(user_id)`,
expected: "",
},
{
name: "sum aggregate",
query: `SELECT sum(amount)`,
expected: "",
},
// Single column simple cases
{
name: "Simple column reference",
query: `SELECT user_id`,
expected: "user_id",
},
{
name: "Column with alias",
query: `SELECT user_id AS id`,
expected: "user_id",
},
{
name: "Column in arithmetic with literals (multiplication)",
query: `SELECT unix_milli * 1000`,
expected: "unix_milli",
},
// Edge cases
{
name: "Nested functions with single column deep",
query: `SELECT upper(lower(trim(column_name)))`,
expected: "column_name",
},
// Qualified column names (Path)
{
name: "Column with table prefix",
query: `SELECT table.column_name`,
expected: "column_name", // IndexOperation: extracts column name from Index field
},
{
name: "Qualified column in function",
query: `SELECT lower(table.column_name)`,
expected: "column_name",
},
{
name: "Qualified column in arithmetic",
query: `SELECT table.column_name * 100`,
expected: "column_name",
},
{
name: "Nested qualified column (schema.table.column)",
query: `SELECT schema.table.column_name`,
expected: "column_name", // Should extract the final column name
},
{
name: "Multiple qualified columns",
query: `SELECT table1.column1 + table2.column2`,
expected: "", // Multiple columns: column1 and column2
},
{
name: "Qualified column with CAST",
query: `SELECT CAST(table.column_name AS String)`,
expected: "column_name",
},
{
name: "Multiple select items - return blank",
query: `SELECT JSONExtractString(labels, 'service.name'), unix_milli / 1000, cpu_usage + mem_usage`,
expected: "",
},
// Error cases
{
name: "Empty query",
query: ``,
expectError: true,
},
{
name: "Invalid SQL syntax",
query: `SELECT FROM table`,
expectError: true,
},
{
name: "Non-SELECT statement (CREATE TABLE)",
query: `CREATE TABLE test (id Int32)`,
expectError: true,
},
{
name: "Non-SELECT statement (INSERT)",
query: `INSERT INTO test VALUES (1)`,
expectError: true,
},
{
name: "Malformed query",
query: `SELECT * FROM`,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := extractCHOriginFieldFromQuery(tt.query)
if tt.expectError {
if err == nil {
t.Errorf("ExtractOriginField() expected error but got nil, result = %q", result)
}
} else {
if err != nil {
t.Errorf("ExtractOriginField() unexpected error: %v", err)
}
if result != tt.expected {
t.Errorf("ExtractOriginField() = %q, want %q", result, tt.expected)
}
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,129 @@
package queryfilterextractor
import (
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
)
// PromQLFilterExtractor extracts metric names and grouping keys from PromQL queries
type PromQLFilterExtractor struct{}
// NewPromQLFilterExtractor creates a new PromQL filter extractor
func NewPromQLFilterExtractor() *PromQLFilterExtractor {
return &PromQLFilterExtractor{}
}
// Extract parses a PromQL query and extracts metric names and grouping keys
func (e *PromQLFilterExtractor) Extract(query string) (*FilterResult, error) {
expr, err := parser.ParseExpr(query)
if err != nil {
return nil, err
}
result := &FilterResult{
MetricNames: []string{},
GroupByColumns: []ColumnInfo{},
}
// Use a visitor to traverse the AST
visitor := &promQLVisitor{
metricNames: make(map[string]bool),
groupBy: make(map[string]bool),
}
// Walk the AST
if err := parser.Walk(visitor, expr, nil); err != nil {
return result, err
}
// Convert sets to slices
for metric := range visitor.metricNames {
result.MetricNames = append(result.MetricNames, metric)
}
for groupKey := range visitor.groupBy {
result.GroupByColumns = append(result.GroupByColumns, ColumnInfo{Name: groupKey})
}
return result, nil
}
// promQLVisitor implements the parser.Visitor interface
type promQLVisitor struct {
metricNames map[string]bool
groupBy map[string]bool
// Track if we've already captured grouping from an outermost aggregation
hasOutermostGrouping bool
}
func (v *promQLVisitor) Visit(node parser.Node, path []parser.Node) (parser.Visitor, error) {
if node == nil {
return nil, nil
}
switch n := node.(type) {
case *parser.VectorSelector:
v.visitVectorSelector(n)
case *parser.AggregateExpr:
v.visitAggregateExpr(n, path)
case *parser.Call:
// Function calls may contain VectorSelectors, continue traversal
return v, nil
case *parser.BinaryExpr:
// Binary expressions may contain VectorSelectors on both sides
return v, nil
case *parser.SubqueryExpr:
// Subqueries may contain VectorSelectors
return v, nil
case *parser.ParenExpr:
// Parentheses don't change semantics, continue traversal
return v, nil
case *parser.MatrixSelector:
// Matrix selectors wrap VectorSelectors
return v, nil
}
return v, nil
}
func (v *promQLVisitor) visitVectorSelector(vs *parser.VectorSelector) {
// Check if metric name is specified directly
if vs.Name != "" {
v.metricNames[vs.Name] = true
}
// Check for __name__ label matcher
for _, matcher := range vs.LabelMatchers {
if matcher.Name == labels.MetricName {
switch matcher.Type {
case labels.MatchEqual:
v.metricNames[matcher.Value] = true
// Skip for negative filters - negative filters don't extract metric names
// case labels.MatchNotEqual, labels.MatchRegexp, labels.MatchNotRegexp:
}
}
}
}
func (v *promQLVisitor) visitAggregateExpr(ae *parser.AggregateExpr, path []parser.Node) {
// Count how many AggregateExpr nodes are in the path (excluding current node)
// This tells us the nesting level
nestingLevel := 0
for _, p := range path {
if _, ok := p.(*parser.AggregateExpr); ok {
nestingLevel++
}
}
// Only capture grouping from the outermost aggregation (nesting level 0)
if nestingLevel == 0 && !v.hasOutermostGrouping {
// If Without is true, we skip grouping per spec
if !ae.Without && len(ae.Grouping) > 0 {
v.hasOutermostGrouping = true
for _, label := range ae.Grouping {
v.groupBy[label] = true
}
}
}
// Continue traversal to find metrics in the expression
}

View File

@@ -0,0 +1,175 @@
package queryfilterextractor
import (
"reflect"
"testing"
)
func TestPromQLFilterExtractor_Extract(t *testing.T) {
extractor := NewPromQLFilterExtractor()
tests := []struct {
name string
query string
wantMetrics []string
wantGroupByColumns []ColumnInfo
wantError bool
}{
{
name: "P1 - Simple vector selector",
query: `http_requests_total{job="api"}`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P2 - Function call",
query: `rate(cpu_usage_seconds_total[5m])`,
wantMetrics: []string{"cpu_usage_seconds_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P3 - Aggregation with by()",
query: `sum by (pod,region) (rate(http_requests_total[5m]))`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{{Name: "pod"}, {Name: "region"}},
},
{
name: "P4 - Aggregation with without()",
query: `sum without (instance) (rate(cpu_usage_total[1m]))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{}, // without() means no grouping keys per spec
},
{
name: "P5 - Invalid: metric name set twice",
query: `sum(rate(http_requests_total{__name__!="http_requests_error_total"}[5m]))`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
wantError: true,
},
{
name: "P6 - Regex negative label",
query: `sum(rate(http_requests_total{status!~"5.."}[5m]))`,
wantMetrics: []string{"http_requests_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P7 - Nested aggregations",
query: `sum by (region) (max by (pod, region) (cpu_usage_total{env="prod"}))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{{Name: "region"}}, // Only outermost grouping
},
{
name: "P7a - Nested aggregation: inner grouping ignored",
query: `sum(max by (pod) (cpu_usage_total{env="prod"}))`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{}, // Inner grouping is ignored when outer has no grouping (nestingLevel != 0 case)
},
{
name: "P8 - Arithmetic expression",
query: `(http_requests_total{job="api"} + http_errors_total{job="api"})`,
wantMetrics: []string{"http_requests_total", "http_errors_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P9 - Mix of positive metric & exclusion label",
query: `sum by (region)(rate(foo{job!="db"}[5m]))`,
wantMetrics: []string{"foo"},
wantGroupByColumns: []ColumnInfo{{Name: "region"}},
},
{
name: "P10 - Function + aggregation",
query: `histogram_quantile(0.9, sum(rate(http_request_duration_seconds_bucket[5m])) by (le))`,
wantMetrics: []string{"http_request_duration_seconds_bucket"},
wantGroupByColumns: []ColumnInfo{{Name: "le"}},
},
{
name: "P11 - Subquery",
query: `sum_over_time(cpu_usage_total[1h:5m])`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P12 - Nested aggregation inside subquery",
query: `max_over_time(sum(rate(cpu_usage_total[5m]))[1h:5m])`,
wantMetrics: []string{"cpu_usage_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P13 - Subquery with multiple metrics",
query: `avg_over_time((foo + bar)[10m:1m])`,
wantMetrics: []string{"foo", "bar"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P14 - Simple meta-metric",
query: `sum by (pod) (up)`,
wantMetrics: []string{"up"},
wantGroupByColumns: []ColumnInfo{{Name: "pod"}},
},
{
name: "P15 - Binary operator unless",
query: `sum(rate(http_requests_total[5m])) unless avg(rate(http_errors_total[5m]))`,
wantMetrics: []string{"http_requests_total", "http_errors_total"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P16 - Vector matching",
query: `sum(rate(foo[5m])) / ignoring(instance) group_left(job) sum(rate(bar[5m]))`,
wantMetrics: []string{"foo", "bar"},
wantGroupByColumns: []ColumnInfo{},
},
{
name: "P17 - Offset modifier with aggregation",
query: `sum by (env)(rate(cpu_usage_seconds_total{job="api"}[5m] offset 1h))`,
wantMetrics: []string{"cpu_usage_seconds_total"},
wantGroupByColumns: []ColumnInfo{{Name: "env"}},
},
{
name: "P18 - Invalid syntax",
query: `sum by ((foo)(bar))(http_requests_total)`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
wantError: true,
},
{
name: "P19 - Literal expression",
query: `2 + 3`,
wantMetrics: []string{},
wantGroupByColumns: []ColumnInfo{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := extractor.Extract(tt.query)
// Check error expectation
if tt.wantError {
if err == nil {
t.Errorf("Extract() expected error but got none, query: %s", tt.query)
}
return
}
if err != nil {
t.Errorf("Extract() unexpected error = %v, query: %s", err, tt.query)
return
}
// Sort for comparison
gotMetrics := sortStrings(result.MetricNames)
wantMetrics := sortStrings(tt.wantMetrics)
if !reflect.DeepEqual(gotMetrics, wantMetrics) {
t.Errorf("Extract() MetricNames = %v, want %v", gotMetrics, wantMetrics)
}
// Test GroupByColumns - need to normalize for comparison (order may vary)
gotGroupByColumns := sortColumnInfo(result.GroupByColumns)
wantGroupByColumns := sortColumnInfo(tt.wantGroupByColumns)
if !reflect.DeepEqual(gotGroupByColumns, wantGroupByColumns) {
t.Errorf("Extract() GroupByColumns = %v, want %v", gotGroupByColumns, wantGroupByColumns)
}
})
}
}

View File

@@ -0,0 +1,42 @@
// Package queryfilterextractor provides utilities for extracting metric names
// and grouping keys.
//
// This is useful for metrics discovery, and query analysis.
package queryfilterextractor
import "fmt"
const (
ExtractorCH = "qfe_ch"
ExtractorPromQL = "qfe_promql"
)
// ColumnInfo represents a column in the query
type ColumnInfo struct {
Name string
Alias string
OriginExpr string
OriginField string
}
type FilterResult struct {
// MetricNames are the metrics that are being filtered on
MetricNames []string
// GroupByColumns are the columns that are being grouped by
GroupByColumns []ColumnInfo
}
type FilterExtractor interface {
Extract(query string) (*FilterResult, error)
}
func NewExtractor(extractorType string) (FilterExtractor, error) {
switch extractorType {
case ExtractorCH:
return NewClickHouseFilterExtractor(), nil
case ExtractorPromQL:
return NewPromQLFilterExtractor(), nil
default:
return nil, fmt.Errorf("invalid extractor type: %s", extractorType)
}
}

View File

@@ -190,7 +190,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
if aggFunc.FuncCombinator {
// Map the predicate (last argument)
origPred := args[len(args)-1].String()
whereClause, err := PrepareWhereClause(
whereClause, err := PrepareWhereClause(
origPred,
FilterExprVisitorOpts{
Logger: v.logger,
@@ -200,7 +200,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
FullTextColumn: v.fullTextColumn,
JsonBodyPrefix: v.jsonBodyPrefix,
JsonKeyToKey: v.jsonKeyToKey,
},
}, 0, 0,
)
if err != nil {
return err

View File

@@ -45,7 +45,7 @@ func CollisionHandledFinalExpr(
addCondition := func(key *telemetrytypes.TelemetryFieldKey) error {
sb := sqlbuilder.NewSelectBuilder()
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb)
condition, err := cb.ConditionFor(ctx, key, qbtypes.FilterOperatorExists, nil, sb, 0, 0)
if err != nil {
return err
}

View File

@@ -48,6 +48,8 @@ func (b *defaultConditionBuilder) ConditionFor(
op qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
) (string, error) {
if key.FieldContext != telemetrytypes.FieldContextResource {

View File

@@ -206,7 +206,7 @@ func TestConditionBuilder(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(context.Background(), tc.key, tc.op, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedErr != nil {

View File

@@ -169,7 +169,7 @@ func (b *resourceFilterStatementBuilder[T]) addConditions(
// there is no need for "key" not found error for resource filtering
IgnoreNotFoundKeys: true,
Variables: variables,
})
}, start, end)
if err != nil {
return err

View File

@@ -42,6 +42,8 @@ type filterExpressionVisitor struct {
variables map[string]qbtypes.VariableItem
keysWithWarnings map[string]bool
startNs uint64
endNs uint64
}
type FilterExprVisitorOpts struct {
@@ -58,6 +60,8 @@ type FilterExprVisitorOpts struct {
SkipFunctionCalls bool
IgnoreNotFoundKeys bool
Variables map[string]qbtypes.VariableItem
StartNs uint64
EndNs uint64
}
// newFilterExpressionVisitor creates a new filterExpressionVisitor
@@ -77,6 +81,8 @@ func newFilterExpressionVisitor(opts FilterExprVisitorOpts) *filterExpressionVis
ignoreNotFoundKeys: opts.IgnoreNotFoundKeys,
variables: opts.Variables,
keysWithWarnings: make(map[string]bool),
startNs: opts.StartNs,
endNs: opts.EndNs,
}
}
@@ -87,7 +93,7 @@ type PreparedWhereClause struct {
}
// PrepareWhereClause generates a ClickHouse compatible WHERE clause from the filter query
func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWhereClause, error) {
func PrepareWhereClause(query string, opts FilterExprVisitorOpts, startNs uint64, endNs uint64) (*PreparedWhereClause, error) {
// Setup the ANTLR parsing pipeline
input := antlr.NewInputStream(query)
lexer := grammar.NewFilterQueryLexer(input)
@@ -120,6 +126,8 @@ func PrepareWhereClause(query string, opts FilterExprVisitorOpts) (*PreparedWher
}
tokens.Reset()
opts.StartNs = startNs
opts.EndNs = endNs
visitor := newFilterExpressionVisitor(opts)
// Handle syntax errors
@@ -311,7 +319,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
// create a full text search condition on the body field
keyText := keyCtx.GetText()
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder)
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(keyText), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
@@ -331,7 +339,7 @@ func (v *filterExpressionVisitor) VisitPrimary(ctx *grammar.PrimaryContext) any
v.errors = append(v.errors, fmt.Sprintf("unsupported value type: %s", valCtx.GetText()))
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""
@@ -375,7 +383,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder)
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
if err != nil {
return ""
}
@@ -443,7 +451,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder)
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, values, v.builder, v.startNs, v.endNs)
if err != nil {
return ""
}
@@ -475,7 +483,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder)
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
if err != nil {
return ""
}
@@ -556,7 +564,7 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder)
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, value, v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
return ""
@@ -635,7 +643,7 @@ func (v *filterExpressionVisitor) VisitFullText(ctx *grammar.FullTextContext) an
v.errors = append(v.errors, "full text search is not supported")
return ""
}
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder)
cond, err := v.conditionBuilder.ConditionFor(context.Background(), v.fullTextColumn, qbtypes.FilterOperatorRegexp, FormatFullTextSearch(text), v.builder, v.startNs, v.endNs)
if err != nil {
v.errors = append(v.errors, fmt.Sprintf("failed to build full text search condition: %s", err.Error()))
return ""

View File

@@ -218,6 +218,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {

View File

@@ -276,7 +276,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {
@@ -331,7 +331,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
var err error
for _, key := range tc.keys {
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if err != nil {
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)
@@ -528,7 +528,7 @@ func TestConditionForJSONBodySearch(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {

View File

@@ -34,7 +34,7 @@ func TestLikeAndILikeWithoutWildcards_Warns(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)
@@ -71,7 +71,7 @@ func TestLikeAndILikeWithWildcards_NoWarn(t *testing.T) {
for _, expr := range tests {
t.Run(expr, func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(expr, opts)
clause, err := querybuilder.PrepareWhereClause(expr, opts, 0, 0)
require.NoError(t, err)
require.NotNil(t, clause)

View File

@@ -163,7 +163,7 @@ func TestFilterExprLogsBodyJSON(t *testing.T) {
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
if err != nil {

View File

@@ -2387,7 +2387,7 @@ func TestFilterExprLogs(t *testing.T) {
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
if err != nil {
@@ -2506,7 +2506,7 @@ func TestFilterExprLogsConflictNegation(t *testing.T) {
for _, tc := range testCases {
t.Run(fmt.Sprintf("%s: %s", tc.category, limitString(tc.query, 50)), func(t *testing.T) {
clause, err := querybuilder.PrepareWhereClause(tc.query, opts)
clause, err := querybuilder.PrepareWhereClause(tc.query, opts, 0, 0)
if tc.shouldPass {
if err != nil {

View File

@@ -592,7 +592,7 @@ func (b *logQueryStatementBuilder) addFilterCondition(
JsonBodyPrefix: b.jsonBodyPrefix,
JsonKeyToKey: b.jsonKeyToKey,
Variables: variables,
})
}, start, end)
if err != nil {
return nil, err

View File

@@ -25,6 +25,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
) (string, error) {
switch operator {

View File

@@ -53,7 +53,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {

View File

@@ -978,7 +978,7 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
FieldMapper: t.fm,
ConditionBuilder: t.conditionBuilder,
FieldKeys: keys,
})
}, 0, 0)
if err == nil {
sb.AddWhereClause(whereClause.WhereClause)
} else {
@@ -1002,20 +1002,20 @@ func (t *telemetryMetaStore) getRelatedValues(ctx context.Context, fieldValueSel
// search on attributes
key.FieldContext = telemetrytypes.FieldContextAttribute
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
// search on resource
key.FieldContext = telemetrytypes.FieldContextResource
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
cond, err = t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}
key.FieldContext = origContext
} else {
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb)
cond, err := t.conditionBuilder.ConditionFor(ctx, key, qbtypes.FilterOperatorContains, fieldValueSelector.Value, sb, 0, 0)
if err == nil {
conds = append(conds, cond)
}

View File

@@ -148,7 +148,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
})
}, start, end)
if err != nil {
return "", []any{}, err
}
@@ -231,7 +231,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
})
}, start, end)
if err != nil {
return "", nil, err
}
@@ -295,7 +295,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
})
}, start, end)
if err != nil {
return "", nil, err
}

View File

@@ -136,6 +136,8 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
_ uint64,
_ uint64,
) (string, error) {
condition, err := c.conditionFor(ctx, key, operator, value, sb)
if err != nil {

View File

@@ -234,7 +234,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if tc.expectedError != nil {
@@ -289,7 +289,7 @@ func TestConditionForMultipleKeys(t *testing.T) {
t.Run(tc.name, func(t *testing.T) {
var err error
for _, key := range tc.keys {
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &key, tc.operator, tc.value, sb, 0, 0)
sb.Where(cond)
if err != nil {
t.Fatalf("Error getting condition for key %s: %v", key.Name, err)

View File

@@ -348,7 +348,7 @@ func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
FieldKeys: keys,
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "labels"},
Variables: variables,
})
}, start, end)
if err != nil {
return "", nil, err
}

View File

@@ -223,9 +223,11 @@ func (c *conditionBuilder) ConditionFor(
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
startNs uint64,
_ uint64,
) (string, error) {
if c.isSpanScopeField(key.Name) {
return c.buildSpanScopeCondition(key, operator, value)
return c.buildSpanScopeCondition(key, operator, value, startNs)
}
condition, err := c.conditionFor(ctx, key, operator, value, sb)
@@ -257,7 +259,7 @@ func (c *conditionBuilder) isSpanScopeField(name string) bool {
return keyName == SpanSearchScopeRoot || keyName == SpanSearchScopeEntryPoint
}
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any) (string, error) {
func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.TelemetryFieldKey, operator qbtypes.FilterOperator, value any, startNs uint64) (string, error) {
if operator != qbtypes.FilterOperatorEqual {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "span scope field %s only supports '=' operator", key.Name)
}
@@ -281,6 +283,11 @@ func (c *conditionBuilder) buildSpanScopeCondition(key *telemetrytypes.Telemetry
case SpanSearchScopeRoot:
return "parent_span_id = ''", nil
case SpanSearchScopeEntryPoint:
if startNs > 0 { // only add time filter if it is a valid time, else do not add
startS := int64(startNs / 1_000_000_000)
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s WHERE time >= toDateTime(%d))) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName, startS), nil
}
return fmt.Sprintf("((name, resource_string_service$$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from %s.%s)) AND parent_span_id != ''",
DBName, TopLevelOperationsTableName), nil
default:

View File

@@ -289,7 +289,7 @@ func TestConditionFor(t *testing.T) {
for _, tc := range testCases {
sb := sqlbuilder.NewSelectBuilder()
t.Run(tc.name, func(t *testing.T) {
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb)
cond, err := conditionBuilder.ConditionFor(ctx, &tc.key, tc.operator, tc.value, sb, 1761437108000000000, 1761458708000000000)
sb.Where(cond)
if tc.expectedError != nil {

View File

@@ -21,26 +21,37 @@ func TestSpanScopeFilterExpression(t *testing.T) {
expression string
expectedCondition string
expectError bool
startNs uint64
}{
{
name: "simple isroot filter",
expression: "isroot = true",
expectedCondition: "parent_span_id = ''",
startNs: 1761437108000000000,
},
{
name: "simple isentrypoint filter",
name: "simple isentrypoint filter (unbounded)",
expression: "isentrypoint = true",
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
startNs: 0,
},
{
name: "simple isentrypoint filter (bounded)",
expression: "isentrypoint = true",
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1761437108))) AND parent_span_id != ''",
startNs: 1761437108000000000,
},
{
name: "combined filter with AND",
expression: "isroot = true AND has_error = true",
expectedCondition: "parent_span_id = ''",
startNs: 1761437108000000000,
},
{
name: "combined filter with OR",
expression: "isentrypoint = true OR has_error = true",
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations)) AND parent_span_id != ''",
expectedCondition: "((name, resource_string_service$$name) GLOBAL IN (SELECT DISTINCT name, serviceName from signoz_traces.distributed_top_level_operations WHERE time >= toDateTime(1761437108))) AND parent_span_id != ''",
startNs: 1761437108000000000,
},
}
@@ -64,13 +75,13 @@ func TestSpanScopeFilterExpression(t *testing.T) {
FieldContext: telemetrytypes.FieldContextSpan,
}}
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
whereClause, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: fieldKeys,
Builder: sb,
})
}, tt.startNs, 1761458708000000000)
if tt.expectError {
assert.Error(t, err)
@@ -131,13 +142,13 @@ func TestSpanScopeWithResourceFilter(t *testing.T) {
FieldContext: telemetrytypes.FieldContextResource,
}}
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
_, err := querybuilder.PrepareWhereClause(tt.expression, querybuilder.FilterExprVisitorOpts{
Logger: instrumentationtest.New().Logger(),
FieldMapper: fm,
ConditionBuilder: cb,
FieldKeys: fieldKeys,
SkipResourceFilter: false, // This would be set by the statement builder
})
}, 1761437108000000000, 1761458708000000000)
assert.NoError(t, err)
})

View File

@@ -746,7 +746,7 @@ func (b *traceQueryStatementBuilder) addFilterCondition(
FieldKeys: keys,
SkipResourceFilter: true,
Variables: variables,
})
}, start, end)
if err != nil {
return nil, err

View File

@@ -237,7 +237,7 @@ func (b *traceOperatorCTEBuilder) buildQueryCTE(ctx context.Context, queryName s
ConditionBuilder: b.stmtBuilder.cb,
FieldKeys: keys,
SkipResourceFilter: true,
},
}, b.start, b.end,
)
if err != nil {
b.stmtBuilder.logger.ErrorContext(ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)

View File

@@ -31,7 +31,8 @@ type FieldMapper interface {
// ConditionBuilder builds the condition for the filter.
type ConditionBuilder interface {
// ConditionFor returns the condition for the given key, operator and value.
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error)
// TODO(srikanthccv,nikhilmantri0902): remove startNs, endNs when top_level_operations can be replaced with `is_remote`
ConditionFor(ctx context.Context, key *telemetrytypes.TelemetryFieldKey, operator FilterOperator, value any, sb *sqlbuilder.SelectBuilder, startNs uint64, endNs uint64) (string, error)
}
type AggExprRewriter interface {