Compare commits
218 Commits
issue-5676
...
demo/trace
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45d27304e1 | ||
|
|
535eed828d | ||
|
|
6b9ada2a1e | ||
|
|
d85c4cf9bd | ||
|
|
298647cf79 | ||
|
|
254d174962 | ||
|
|
ec2c9f3d0a | ||
|
|
ab26d6d3b2 | ||
|
|
3275af484b | ||
|
|
f2525fb293 | ||
|
|
41716e16a2 | ||
|
|
c6dcdb8ba8 | ||
|
|
b6b3b5d6a6 | ||
|
|
1a770f3b98 | ||
|
|
28129fbcaf | ||
|
|
dcba183872 | ||
|
|
ef0785fa69 | ||
|
|
4a2ea9907c | ||
|
|
2419927f02 | ||
|
|
d2c94a82d6 | ||
|
|
824576e176 | ||
|
|
f97001df91 | ||
|
|
448a2533bb | ||
|
|
b69583d017 | ||
|
|
f02ffeb4ff | ||
|
|
d22211443d | ||
|
|
b2cb00d993 | ||
|
|
0896ed9da9 | ||
|
|
d900076a77 | ||
|
|
56b153adc2 | ||
|
|
4ae881f1e2 | ||
|
|
439889400b | ||
|
|
d33d4b2a2e | ||
|
|
7f3cf5e3c2 | ||
|
|
a28d94e790 | ||
|
|
b25d38e246 | ||
|
|
4ce41aa586 | ||
|
|
c9e7a19cc8 | ||
|
|
b59c5af060 | ||
|
|
0cdb0253cd | ||
|
|
33f05d0745 | ||
|
|
13aa670972 | ||
|
|
1185a981c3 | ||
|
|
9a3feea008 | ||
|
|
14c1e522fa | ||
|
|
3a349d096a | ||
|
|
dfcbb40b62 | ||
|
|
7cf0d841ea | ||
|
|
020b6c79d3 | ||
|
|
2b67faa794 | ||
|
|
55509ad5c4 | ||
|
|
e7ab38e947 | ||
|
|
711b85f607 | ||
|
|
8c19228f87 | ||
|
|
1632ad0396 | ||
|
|
e1c14a1dab | ||
|
|
81e9b70842 | ||
|
|
45923f9a9c | ||
|
|
82f81879c1 | ||
|
|
363fdfd646 | ||
|
|
31a917820c | ||
|
|
8fc9c09914 | ||
|
|
a1ca15fc81 | ||
|
|
8357716c0a | ||
|
|
ea54aae57a | ||
|
|
7ae2ca503f | ||
|
|
a2f9eccc8b | ||
|
|
e355c944c8 | ||
|
|
a805bdb637 | ||
|
|
fcfaf152b2 | ||
|
|
3ad600c4df | ||
|
|
7e3d17ce5f | ||
|
|
bc888539e0 | ||
|
|
688867b708 | ||
|
|
23948e72eb | ||
|
|
d0e5f6b478 | ||
|
|
8c75ba298a | ||
|
|
bc217a2aa3 | ||
|
|
6c0b5abbc0 | ||
|
|
0234829492 | ||
|
|
fba946bf78 | ||
|
|
55f96ca95f | ||
|
|
20a87db5bc | ||
|
|
ca774fe6a2 | ||
|
|
451c4bdeb7 | ||
|
|
dcd0de35a4 | ||
|
|
00829423bd | ||
|
|
585fadb867 | ||
|
|
4a1e786f4e | ||
|
|
0454a92b80 | ||
|
|
2d6d342ef0 | ||
|
|
fa06bac37b | ||
|
|
deb821617b | ||
|
|
f8b2bda431 | ||
|
|
6d95095d2f | ||
|
|
28a2ed4273 | ||
|
|
0fee724730 | ||
|
|
6f99d54a50 | ||
|
|
8dd3130701 | ||
|
|
8eaa609076 | ||
|
|
ac3c98b112 | ||
|
|
c0b96ed103 | ||
|
|
608d1565c0 | ||
|
|
e665d7c352 | ||
|
|
a5f9273743 | ||
|
|
7a79a16300 | ||
|
|
c39f48a41e | ||
|
|
d492d00976 | ||
|
|
4c0d2f0e6f | ||
|
|
02126b65b1 | ||
|
|
5235f65d9a | ||
|
|
43457eedc0 | ||
|
|
40c6458b31 | ||
|
|
f70f238b84 | ||
|
|
43d0cee5b5 | ||
|
|
33e7d852df | ||
|
|
5e968ec202 | ||
|
|
bbad7dca3e | ||
|
|
7ce278778f | ||
|
|
f09b79e04f | ||
|
|
1c72861290 | ||
|
|
9116c02e1c | ||
|
|
5d3254eeeb | ||
|
|
44a3fbfdd6 | ||
|
|
0dd41a07bd | ||
|
|
6f8de8da4c | ||
|
|
a5f57db0c7 | ||
|
|
83f46aeff6 | ||
|
|
7372bf0291 | ||
|
|
ee78805888 | ||
|
|
f6547210b2 | ||
|
|
7206bb82fe | ||
|
|
a1ad2b7835 | ||
|
|
4cb70ec07e | ||
|
|
0469233063 | ||
|
|
f3621e14bf | ||
|
|
fd035d885e | ||
|
|
c516825e41 | ||
|
|
188ff014d1 | ||
|
|
a2ab97a347 | ||
|
|
da7cdec01f | ||
|
|
7c1ca7544d | ||
|
|
1b0dcb86b5 | ||
|
|
cb49bc795b | ||
|
|
3f1aeb3077 | ||
|
|
cc2a905e0b | ||
|
|
eba024fc5d | ||
|
|
561ec8fd40 | ||
|
|
aa1dfc6eb1 | ||
|
|
3248012716 | ||
|
|
4ce56ebab4 | ||
|
|
bb80d69819 | ||
|
|
49aaecd02c | ||
|
|
98f4e840cd | ||
|
|
74824e7853 | ||
|
|
b574fee2d4 | ||
|
|
675b66a7b9 | ||
|
|
f55aeb5b5a | ||
|
|
ae3806ce64 | ||
|
|
9c489ebc84 | ||
|
|
f6d432cfce | ||
|
|
6ca6f615b0 | ||
|
|
36e7820edd | ||
|
|
f51cce844b | ||
|
|
b2d3d61b44 | ||
|
|
4e2c7c6309 | ||
|
|
885045d704 | ||
|
|
9dc2e82ce1 | ||
|
|
19e60ee688 | ||
|
|
ea89714cb4 | ||
|
|
4be618bcde | ||
|
|
2bfecce3cb | ||
|
|
eefbcbd1eb | ||
|
|
a3f366ee36 | ||
|
|
cff547c303 | ||
|
|
d6287cba52 | ||
|
|
44b09fbef2 | ||
|
|
081eb64893 | ||
|
|
6338af55dd | ||
|
|
5450b92650 | ||
|
|
a9179321e1 | ||
|
|
90366975d8 | ||
|
|
33f47993d3 | ||
|
|
9170846111 | ||
|
|
54baa9d76d | ||
|
|
0ed6aac74e | ||
|
|
b994fed409 | ||
|
|
a9eb992f67 | ||
|
|
ed95815a6a | ||
|
|
2e2888346f | ||
|
|
525c5ac081 | ||
|
|
66cede4c03 | ||
|
|
33ea94991a | ||
|
|
bae461d1f8 | ||
|
|
9df82cc952 | ||
|
|
d3d927c84d | ||
|
|
36ab1ce8a2 | ||
|
|
7bbf3ffba3 | ||
|
|
6ab5c3cf2e | ||
|
|
c2384e387d | ||
|
|
a00f263bad | ||
|
|
9d648915cc | ||
|
|
e6bd7484fa | ||
|
|
d780c7482e | ||
|
|
ffa8d0267e | ||
|
|
f0505a9c0e | ||
|
|
09e212bd64 | ||
|
|
75f3131e65 | ||
|
|
b1b571ace9 | ||
|
|
876f580f75 | ||
|
|
7999f261ef | ||
|
|
66b8574f74 | ||
|
|
d7b8be11a4 | ||
|
|
aa3935cc31 | ||
|
|
002c755ca5 | ||
|
|
558739b4e7 | ||
|
|
efdfa48ad0 | ||
|
|
693c4451ee |
@@ -1,4 +1,5 @@
|
||||
node_modules
|
||||
build
|
||||
*.typegen.ts
|
||||
i18-generate-hash.js
|
||||
i18-generate-hash.js
|
||||
src/parser/TraceOperatorParser/**
|
||||
@@ -10,4 +10,6 @@ public/
|
||||
**/*.json
|
||||
|
||||
# Ignore all files in parser folder:
|
||||
src/parser/**
|
||||
src/parser/**
|
||||
|
||||
src/TraceOperator/parser/**
|
||||
@@ -45,6 +45,7 @@
|
||||
"@sentry/webpack-plugin": "2.22.6",
|
||||
"@signozhq/badge": "0.0.2",
|
||||
"@signozhq/calendar": "0.0.0",
|
||||
"@signozhq/callout": "0.0.2",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
|
||||
@@ -92,6 +92,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
builder: {
|
||||
queryData: [baseBuilderQuery()],
|
||||
queryFormulas: [baseFormula()],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
@@ -215,7 +216,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
},
|
||||
],
|
||||
clickhouse_sql: [],
|
||||
builder: { queryData: [], queryFormulas: [] },
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
originalGraphType: PANEL_TYPES.TABLE,
|
||||
@@ -286,7 +287,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
legend: 'LC',
|
||||
},
|
||||
],
|
||||
builder: { queryData: [], queryFormulas: [] },
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
@@ -345,7 +346,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
unit: undefined,
|
||||
promql: [],
|
||||
clickhouse_sql: [],
|
||||
builder: { queryData: [], queryFormulas: [] },
|
||||
builder: { queryData: [], queryFormulas: [], queryTraceOperator: [] },
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
@@ -386,6 +387,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
builder: {
|
||||
queryData: [baseBuilderQuery()],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TABLE,
|
||||
@@ -459,6 +461,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
builder: {
|
||||
queryData: [logsQuery],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
@@ -572,6 +575,7 @@ describe('prepareQueryRangePayloadV5', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { mapQueryDataToApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataToApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BaseBuilderQuery,
|
||||
FieldContext,
|
||||
@@ -332,6 +336,101 @@ export function convertBuilderQueriesToV5(
|
||||
);
|
||||
}
|
||||
|
||||
function createTraceOperatorBaseSpec(
|
||||
queryData: IBuilderTraceOperator,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): BaseBuilderQuery {
|
||||
const nonEmptySelectColumns = (queryData.selectColumns as (
|
||||
| BaseAutocompleteData
|
||||
| TelemetryFieldKey
|
||||
)[])?.filter((c) => ('key' in c ? c?.key : c?.name));
|
||||
|
||||
return {
|
||||
stepInterval: queryData?.stepInterval || undefined,
|
||||
groupBy:
|
||||
queryData.groupBy?.length > 0
|
||||
? queryData.groupBy.map(
|
||||
(item: any): GroupByKey => ({
|
||||
name: item.key,
|
||||
fieldDataType: item?.dataType,
|
||||
fieldContext: item?.type,
|
||||
description: item?.description,
|
||||
unit: item?.unit,
|
||||
signal: item?.signal,
|
||||
materialized: item?.materialized,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
limit:
|
||||
panelType === PANEL_TYPES.TABLE || panelType === PANEL_TYPES.LIST
|
||||
? queryData.limit || queryData.pageSize || undefined
|
||||
: queryData.limit || undefined,
|
||||
offset:
|
||||
requestType === 'raw' || requestType === 'trace'
|
||||
? queryData.offset
|
||||
: undefined,
|
||||
order:
|
||||
queryData.orderBy?.length > 0
|
||||
? queryData.orderBy.map(
|
||||
(order: any): OrderBy => ({
|
||||
key: {
|
||||
name: order.columnName,
|
||||
},
|
||||
direction: order.order,
|
||||
}),
|
||||
)
|
||||
: undefined,
|
||||
legend: isEmpty(queryData.legend) ? undefined : queryData.legend,
|
||||
having: isEmpty(queryData.having) ? undefined : (queryData?.having as Having),
|
||||
selectFields: isEmpty(nonEmptySelectColumns)
|
||||
? undefined
|
||||
: nonEmptySelectColumns?.map(
|
||||
(column: any): TelemetryFieldKey => ({
|
||||
name: column.name ?? column.key,
|
||||
fieldDataType:
|
||||
column?.fieldDataType ?? (column?.dataType as FieldDataType),
|
||||
fieldContext: column?.fieldContext ?? (column?.type as FieldContext),
|
||||
signal: column?.signal ?? undefined,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function convertTraceOperatorToV5(
|
||||
traceOperator: Record<string, IBuilderTraceOperator>,
|
||||
requestType: RequestType,
|
||||
panelType?: PANEL_TYPES,
|
||||
): QueryEnvelope[] {
|
||||
return Object.entries(traceOperator).map(
|
||||
([queryName, traceOperatorData]): QueryEnvelope => {
|
||||
const baseSpec = createTraceOperatorBaseSpec(
|
||||
traceOperatorData,
|
||||
requestType,
|
||||
panelType,
|
||||
);
|
||||
|
||||
// Skip aggregation for raw request type
|
||||
const aggregations =
|
||||
requestType === 'raw'
|
||||
? undefined
|
||||
: createAggregation(traceOperatorData, panelType);
|
||||
|
||||
const spec: QueryEnvelope['spec'] = {
|
||||
name: queryName,
|
||||
...baseSpec,
|
||||
expression: traceOperatorData.expression || '',
|
||||
aggregations: aggregations as TraceAggregation[],
|
||||
};
|
||||
|
||||
return {
|
||||
type: 'builder_trace_operator' as QueryType,
|
||||
spec,
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts PromQL queries to V5 format
|
||||
*/
|
||||
@@ -413,14 +512,28 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
|
||||
switch (query.queryType) {
|
||||
case EQueryType.QUERY_BUILDER: {
|
||||
const { queryData: data, queryFormulas } = query.builder;
|
||||
const { queryData: data, queryFormulas, queryTraceOperator } = query.builder;
|
||||
const currentQueryData = mapQueryDataToApi(data, 'queryName', tableParams);
|
||||
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
|
||||
|
||||
const filteredTraceOperator =
|
||||
queryTraceOperator && queryTraceOperator.length > 0
|
||||
? queryTraceOperator.filter((traceOperator) =>
|
||||
Boolean(traceOperator.expression.trim()),
|
||||
)
|
||||
: [];
|
||||
|
||||
const currentTraceOperator = mapQueryDataToApi(
|
||||
filteredTraceOperator,
|
||||
'queryName',
|
||||
tableParams,
|
||||
);
|
||||
|
||||
// Combine legend maps
|
||||
legendMap = {
|
||||
...currentQueryData.newLegendMap,
|
||||
...currentFormulas.newLegendMap,
|
||||
...currentTraceOperator.newLegendMap,
|
||||
};
|
||||
|
||||
// Convert builder queries
|
||||
@@ -453,8 +566,14 @@ export const prepareQueryRangePayloadV5 = ({
|
||||
}),
|
||||
);
|
||||
|
||||
// Combine both types
|
||||
queries = [...builderQueries, ...formulaQueries];
|
||||
const traceOperatorQueries = convertTraceOperatorToV5(
|
||||
currentTraceOperator.data,
|
||||
requestType,
|
||||
graphType,
|
||||
);
|
||||
|
||||
// Combine all query types
|
||||
queries = [...builderQueries, ...formulaQueries, ...traceOperatorQueries];
|
||||
break;
|
||||
}
|
||||
case EQueryType.PROM: {
|
||||
|
||||
@@ -125,6 +125,7 @@ export const getHostTracesQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: '572f1d91-6ac0-46c0-b726-c21488b34434',
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
|
||||
@@ -51,6 +51,7 @@ export const getHostLogsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: uuidv4(),
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
|
||||
@@ -22,6 +22,10 @@
|
||||
flex: 1;
|
||||
|
||||
position: relative;
|
||||
|
||||
.qb-trace-view-selector-container {
|
||||
padding: 12px 8px 8px 8px;
|
||||
}
|
||||
}
|
||||
|
||||
.qb-content-section {
|
||||
@@ -179,7 +183,7 @@
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
margin-left: 32px;
|
||||
margin-left: 26px;
|
||||
padding-bottom: 16px;
|
||||
padding-left: 8px;
|
||||
|
||||
@@ -195,8 +199,8 @@
|
||||
}
|
||||
|
||||
.formula-container {
|
||||
margin-left: 82px;
|
||||
padding: 4px 0px;
|
||||
padding: 8px;
|
||||
margin-left: 74px;
|
||||
|
||||
.ant-col {
|
||||
&::before {
|
||||
@@ -291,6 +295,13 @@
|
||||
);
|
||||
}
|
||||
}
|
||||
.qb-trace-operator-button-container {
|
||||
&-text {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -331,6 +342,12 @@
|
||||
);
|
||||
left: 15px;
|
||||
}
|
||||
|
||||
&.has-trace-operator {
|
||||
&::before {
|
||||
height: 0px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.formula-name {
|
||||
@@ -347,7 +364,7 @@
|
||||
|
||||
&::before {
|
||||
content: '';
|
||||
height: 65px;
|
||||
height: 128px;
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: 0;
|
||||
|
||||
@@ -5,11 +5,13 @@ import { Formula } from 'container/QueryBuilder/components/Formula';
|
||||
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { memo, useEffect, useMemo, useRef } from 'react';
|
||||
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { QueryBuilderV2Provider } from './QueryBuilderV2Context';
|
||||
import QueryFooter from './QueryV2/QueryFooter/QueryFooter';
|
||||
import { QueryV2 } from './QueryV2/QueryV2';
|
||||
import TraceOperator from './QueryV2/TraceOperator/TraceOperator';
|
||||
|
||||
export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
config,
|
||||
@@ -18,6 +20,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
queryComponents,
|
||||
isListViewPanel = false,
|
||||
showOnlyWhereClause = false,
|
||||
showTraceOperator = false,
|
||||
version,
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const {
|
||||
@@ -25,6 +28,7 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
addNewBuilderQuery,
|
||||
addNewFormula,
|
||||
handleSetConfig,
|
||||
addTraceOperator,
|
||||
panelType,
|
||||
initialDataSource,
|
||||
} = useQueryBuilder();
|
||||
@@ -54,6 +58,11 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
newPanelType,
|
||||
]);
|
||||
|
||||
const isMultiQueryAllowed = useMemo(
|
||||
() => !isListViewPanel || showTraceOperator,
|
||||
[showTraceOperator, isListViewPanel],
|
||||
);
|
||||
|
||||
const listViewLogFilterConfigs: QueryBuilderProps['filterConfigs'] = useMemo(() => {
|
||||
const config: QueryBuilderProps['filterConfigs'] = {
|
||||
stepInterval: { isHidden: true, isDisabled: true },
|
||||
@@ -97,11 +106,60 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
listViewTracesFilterConfigs,
|
||||
]);
|
||||
|
||||
const traceOperator = useMemo((): IBuilderTraceOperator | undefined => {
|
||||
if (
|
||||
currentQuery.builder.queryTraceOperator &&
|
||||
currentQuery.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
return currentQuery.builder.queryTraceOperator[0];
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}, [currentQuery.builder.queryTraceOperator]);
|
||||
|
||||
const hasAtLeastOneTraceQuery = useMemo(
|
||||
() =>
|
||||
currentQuery.builder.queryData.some(
|
||||
(query) => query.dataSource === DataSource.TRACES,
|
||||
),
|
||||
[currentQuery.builder.queryData],
|
||||
);
|
||||
|
||||
const hasTraceOperator = useMemo(
|
||||
() => showTraceOperator && hasAtLeastOneTraceQuery && Boolean(traceOperator),
|
||||
[showTraceOperator, traceOperator, hasAtLeastOneTraceQuery],
|
||||
);
|
||||
|
||||
const shouldShowFooter = useMemo(
|
||||
() =>
|
||||
(!showOnlyWhereClause && !isListViewPanel) ||
|
||||
(currentDataSource === DataSource.TRACES && showTraceOperator),
|
||||
[isListViewPanel, showTraceOperator, showOnlyWhereClause, currentDataSource],
|
||||
);
|
||||
|
||||
const showQueryList = useMemo(
|
||||
() => (!showOnlyWhereClause && !isListViewPanel) || showTraceOperator,
|
||||
[isListViewPanel, showOnlyWhereClause, showTraceOperator],
|
||||
);
|
||||
|
||||
const showFormula = useMemo(() => {
|
||||
if (currentDataSource === DataSource.TRACES) {
|
||||
return !isListViewPanel;
|
||||
}
|
||||
|
||||
return true;
|
||||
}, [isListViewPanel, currentDataSource]);
|
||||
|
||||
const showAddTraceOperator = useMemo(
|
||||
() => showTraceOperator && !traceOperator && hasAtLeastOneTraceQuery,
|
||||
[showTraceOperator, traceOperator, hasAtLeastOneTraceQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
<QueryBuilderV2Provider>
|
||||
<div className="query-builder-v2">
|
||||
<div className="qb-content-container">
|
||||
{isListViewPanel && (
|
||||
{!isMultiQueryAllowed ? (
|
||||
<QueryV2
|
||||
ref={containerRef}
|
||||
key={currentQuery.builder.queryData[0].queryName}
|
||||
@@ -109,15 +167,16 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
query={currentQuery.builder.queryData[0]}
|
||||
filterConfigs={queryFilterConfigs}
|
||||
queryComponents={queryComponents}
|
||||
isMultiQueryAllowed={isMultiQueryAllowed}
|
||||
showTraceOperator={showTraceOperator}
|
||||
hasTraceOperator={hasTraceOperator}
|
||||
version={version}
|
||||
isAvailableToDisable={false}
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!isListViewPanel &&
|
||||
) : (
|
||||
currentQuery.builder.queryData.map((query, index) => (
|
||||
<QueryV2
|
||||
ref={containerRef}
|
||||
@@ -127,13 +186,17 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
filterConfigs={queryFilterConfigs}
|
||||
queryComponents={queryComponents}
|
||||
version={version}
|
||||
isMultiQueryAllowed={isMultiQueryAllowed}
|
||||
isAvailableToDisable={false}
|
||||
showTraceOperator={showTraceOperator}
|
||||
hasTraceOperator={hasTraceOperator}
|
||||
queryVariant={config?.queryVariant || 'dropdown'}
|
||||
showOnlyWhereClause={showOnlyWhereClause}
|
||||
isListViewPanel={isListViewPanel}
|
||||
signalSource={config?.signalSource || ''}
|
||||
/>
|
||||
))}
|
||||
))
|
||||
)}
|
||||
|
||||
{!showOnlyWhereClause && currentQuery.builder.queryFormulas.length > 0 && (
|
||||
<div className="qb-formulas-container">
|
||||
@@ -158,15 +221,25 @@ export const QueryBuilderV2 = memo(function QueryBuilderV2({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!showOnlyWhereClause && !isListViewPanel && (
|
||||
{shouldShowFooter && (
|
||||
<QueryFooter
|
||||
showAddFormula={showFormula}
|
||||
addNewBuilderQuery={addNewBuilderQuery}
|
||||
addNewFormula={addNewFormula}
|
||||
addTraceOperator={addTraceOperator}
|
||||
showAddTraceOperator={showAddTraceOperator}
|
||||
/>
|
||||
)}
|
||||
|
||||
{hasTraceOperator && (
|
||||
<TraceOperator
|
||||
isListViewPanel={isListViewPanel}
|
||||
traceOperator={traceOperator as IBuilderTraceOperator}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{!showOnlyWhereClause && !isListViewPanel && (
|
||||
{showQueryList && (
|
||||
<div className="query-names-section">
|
||||
{currentQuery.builder.queryData.map((query) => (
|
||||
<div key={query.queryName} className="query-name">
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
.query-add-ons {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.add-ons-list {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
|
||||
.add-ons-tabs {
|
||||
display: flex;
|
||||
|
||||
@@ -144,6 +144,7 @@ function QueryAddOns({
|
||||
showReduceTo,
|
||||
panelType,
|
||||
index,
|
||||
isForTraceOperator = false,
|
||||
}: {
|
||||
query: IBuilderQuery;
|
||||
version: string;
|
||||
@@ -151,6 +152,7 @@ function QueryAddOns({
|
||||
showReduceTo: boolean;
|
||||
panelType: PANEL_TYPES | null;
|
||||
index: number;
|
||||
isForTraceOperator?: boolean;
|
||||
}): JSX.Element {
|
||||
const [addOns, setAddOns] = useState<AddOn[]>(ADD_ONS);
|
||||
|
||||
@@ -160,6 +162,7 @@ function QueryAddOns({
|
||||
index,
|
||||
query,
|
||||
entityVersion: '',
|
||||
isForTraceOperator,
|
||||
});
|
||||
|
||||
const { handleSetQueryData } = useQueryBuilder();
|
||||
|
||||
@@ -4,7 +4,10 @@ import { Tooltip } from 'antd';
|
||||
import InputWithLabel from 'components/InputWithLabel/InputWithLabel';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useMemo } from 'react';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import QueryAggregationSelect from './QueryAggregationSelect';
|
||||
@@ -20,7 +23,7 @@ function QueryAggregationOptions({
|
||||
panelType?: string;
|
||||
onAggregationIntervalChange: (value: number) => void;
|
||||
onChange?: (value: string) => void;
|
||||
queryData: IBuilderQuery;
|
||||
queryData: IBuilderQuery | IBuilderTraceOperator;
|
||||
}): JSX.Element {
|
||||
const showAggregationInterval = useMemo(() => {
|
||||
// eslint-disable-next-line sonarjs/prefer-single-boolean-return
|
||||
|
||||
@@ -1,12 +1,20 @@
|
||||
/* eslint-disable react/require-default-props */
|
||||
import { Button, Tooltip, Typography } from 'antd';
|
||||
import { Plus, Sigma } from 'lucide-react';
|
||||
import { DraftingCompass, Plus, Sigma } from 'lucide-react';
|
||||
import BetaTag from 'periscope/components/BetaTag/BetaTag';
|
||||
|
||||
export default function QueryFooter({
|
||||
addNewBuilderQuery,
|
||||
addNewFormula,
|
||||
addTraceOperator,
|
||||
showAddFormula = true,
|
||||
showAddTraceOperator = false,
|
||||
}: {
|
||||
addNewBuilderQuery: () => void;
|
||||
addNewFormula: () => void;
|
||||
addTraceOperator?: () => void;
|
||||
showAddTraceOperator: boolean;
|
||||
showAddFormula?: boolean;
|
||||
}): JSX.Element {
|
||||
return (
|
||||
<div className="qb-footer">
|
||||
@@ -22,32 +30,65 @@ export default function QueryFooter({
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div className="qb-add-formula">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add New Formula
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-advanced-comparisons"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-formula-button periscope-btn secondary"
|
||||
icon={<Sigma size={16} />}
|
||||
onClick={addNewFormula}
|
||||
{showAddFormula && (
|
||||
<div className="qb-add-formula">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add New Formula
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-advanced-comparisons"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
Add Formula
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Button
|
||||
className="add-formula-button periscope-btn secondary"
|
||||
icon={<Sigma size={16} />}
|
||||
onClick={addNewFormula}
|
||||
>
|
||||
Add Formula
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
{showAddTraceOperator && (
|
||||
<div className="qb-trace-operator-button-container">
|
||||
<Tooltip
|
||||
title={
|
||||
<div style={{ textAlign: 'center' }}>
|
||||
Add Trace Matching
|
||||
<Typography.Link
|
||||
href="https://signoz.io/docs/userguide/query-builder-v5/#multi-query-analysis-trace-operators"
|
||||
target="_blank"
|
||||
style={{ textDecoration: 'underline' }}
|
||||
>
|
||||
{' '}
|
||||
<br />
|
||||
Learn more
|
||||
</Typography.Link>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="add-trace-operator-button periscope-btn secondary"
|
||||
icon={<DraftingCompass size={16} />}
|
||||
onClick={(): void => addTraceOperator?.()}
|
||||
>
|
||||
<div className="qb-trace-operator-button-container-text">
|
||||
Add Trace Matching
|
||||
<BetaTag />
|
||||
</div>
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
'Helvetica Neue', sans-serif;
|
||||
|
||||
.query-where-clause-editor-container {
|
||||
position: relative;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { Dropdown } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { ENTITY_VERSION_V4, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
@@ -26,9 +27,12 @@ export const QueryV2 = memo(function QueryV2({
|
||||
query,
|
||||
filterConfigs,
|
||||
isListViewPanel = false,
|
||||
showTraceOperator = false,
|
||||
hasTraceOperator = false,
|
||||
version,
|
||||
showOnlyWhereClause = false,
|
||||
signalSource = '',
|
||||
isMultiQueryAllowed = false,
|
||||
}: QueryProps & { ref: React.RefObject<HTMLDivElement> }): JSX.Element {
|
||||
const { cloneQuery, panelType } = useQueryBuilder();
|
||||
|
||||
@@ -75,6 +79,15 @@ export const QueryV2 = memo(function QueryV2({
|
||||
dataSource,
|
||||
]);
|
||||
|
||||
const showInlineQuerySearch = useMemo(() => {
|
||||
if (!showTraceOperator) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
dataSource === DataSource.TRACES && (hasTraceOperator || isListViewPanel)
|
||||
);
|
||||
}, [hasTraceOperator, isListViewPanel, showTraceOperator, dataSource]);
|
||||
|
||||
const handleChangeAggregateEvery = useCallback(
|
||||
(value: IBuilderQuery['stepInterval']) => {
|
||||
handleChangeQueryData('stepInterval', value);
|
||||
@@ -108,11 +121,12 @@ export const QueryV2 = memo(function QueryV2({
|
||||
ref={ref}
|
||||
>
|
||||
<div className="qb-content-section">
|
||||
{!showOnlyWhereClause && (
|
||||
{(!showOnlyWhereClause || showTraceOperator) && (
|
||||
<div className="qb-header-container">
|
||||
<div className="query-actions-container">
|
||||
<div className="query-actions-left-container">
|
||||
<QBEntityOptions
|
||||
hasTraceOperator={hasTraceOperator}
|
||||
isMetricsDataSource={dataSource === DataSource.METRICS}
|
||||
showFunctions={
|
||||
(version && version === ENTITY_VERSION_V4) ||
|
||||
@@ -122,6 +136,7 @@ export const QueryV2 = memo(function QueryV2({
|
||||
false
|
||||
}
|
||||
isCollapsed={isCollapsed}
|
||||
showTraceOperator={showTraceOperator}
|
||||
entityType="query"
|
||||
entityData={query}
|
||||
onToggleVisibility={handleToggleDisableQuery}
|
||||
@@ -139,7 +154,28 @@ export const QueryV2 = memo(function QueryV2({
|
||||
/>
|
||||
</div>
|
||||
|
||||
{!isListViewPanel && (
|
||||
{!isCollapsed && showInlineQuerySearch && (
|
||||
<div className="qb-search-filter-container" style={{ flex: 1 }}>
|
||||
<div className="query-search-container">
|
||||
<QuerySearch
|
||||
key={`query-search-${query.queryName}-${query.dataSource}`}
|
||||
onChange={handleSearchChange}
|
||||
queryData={query}
|
||||
dataSource={dataSource}
|
||||
signalSource={signalSource}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{showSpanScopeSelector && (
|
||||
<div className="traces-search-filter-container">
|
||||
<div className="traces-search-filter-in">in</div>
|
||||
<SpanScopeSelector query={query} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isMultiQueryAllowed && (
|
||||
<Dropdown
|
||||
className="query-actions-dropdown"
|
||||
menu={{
|
||||
@@ -181,28 +217,31 @@ export const QueryV2 = memo(function QueryV2({
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="qb-search-filter-container">
|
||||
<div className="query-search-container">
|
||||
<QuerySearch
|
||||
key={`query-search-${query.queryName}-${query.dataSource}`}
|
||||
onChange={handleSearchChange}
|
||||
queryData={query}
|
||||
dataSource={dataSource}
|
||||
signalSource={signalSource}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{showSpanScopeSelector && (
|
||||
<div className="traces-search-filter-container">
|
||||
<div className="traces-search-filter-in">in</div>
|
||||
<SpanScopeSelector query={query} />
|
||||
{!showInlineQuerySearch && (
|
||||
<div className="qb-search-filter-container">
|
||||
<div className="query-search-container">
|
||||
<QuerySearch
|
||||
key={`query-search-${query.queryName}-${query.dataSource}`}
|
||||
onChange={handleSearchChange}
|
||||
queryData={query}
|
||||
dataSource={dataSource}
|
||||
signalSource={signalSource}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showSpanScopeSelector && (
|
||||
<div className="traces-search-filter-container">
|
||||
<div className="traces-search-filter-in">in</div>
|
||||
<SpanScopeSelector query={query} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{!showOnlyWhereClause &&
|
||||
!isListViewPanel &&
|
||||
!(hasTraceOperator && dataSource === DataSource.TRACES) &&
|
||||
dataSource !== DataSource.METRICS && (
|
||||
<QueryAggregation
|
||||
dataSource={dataSource}
|
||||
@@ -225,16 +264,17 @@ export const QueryV2 = memo(function QueryV2({
|
||||
/>
|
||||
)}
|
||||
|
||||
{!showOnlyWhereClause && (
|
||||
<QueryAddOns
|
||||
index={index}
|
||||
query={query}
|
||||
version="v3"
|
||||
isListViewPanel={isListViewPanel}
|
||||
showReduceTo={showReduceTo}
|
||||
panelType={panelType}
|
||||
/>
|
||||
)}
|
||||
{!showOnlyWhereClause &&
|
||||
!(hasTraceOperator && query.dataSource === DataSource.TRACES) && (
|
||||
<QueryAddOns
|
||||
index={index}
|
||||
query={query}
|
||||
version="v3"
|
||||
isListViewPanel={isListViewPanel}
|
||||
showReduceTo={showReduceTo}
|
||||
panelType={panelType}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,185 @@
|
||||
.qb-trace-operator {
|
||||
padding: 8px;
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
|
||||
&.non-list-view {
|
||||
padding-left: 40px;
|
||||
position: relative;
|
||||
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 24px;
|
||||
left: 12px;
|
||||
height: 88px;
|
||||
width: 1px;
|
||||
background: repeating-linear-gradient(
|
||||
to bottom,
|
||||
#1d212d,
|
||||
#1d212d 4px,
|
||||
transparent 4px,
|
||||
transparent 8px
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
&-span-source-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
height: 24px;
|
||||
|
||||
&-query {
|
||||
font-size: 14px;
|
||||
font-weight: 400;
|
||||
color: var(--bg-vanilla-100);
|
||||
}
|
||||
&-query-name {
|
||||
width: 18px;
|
||||
height: 18px;
|
||||
display: grid;
|
||||
place-content: center;
|
||||
padding: 2px;
|
||||
|
||||
border-radius: 2px;
|
||||
border: 1px solid rgba(242, 71, 105, 0.2);
|
||||
background: rgba(242, 71, 105, 0.1);
|
||||
color: var(--Sakura-400, #f56c87);
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
&-arrow {
|
||||
position: relative;
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 16px;
|
||||
transform: translateY(-50%);
|
||||
left: -26px;
|
||||
height: 1px;
|
||||
width: 20px;
|
||||
background: repeating-linear-gradient(
|
||||
to right,
|
||||
#1d212d,
|
||||
#1d212d 4px,
|
||||
transparent 4px,
|
||||
transparent 8px
|
||||
);
|
||||
}
|
||||
|
||||
&::after {
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 16px;
|
||||
left: -10px;
|
||||
transform: translateY(-50%);
|
||||
height: 4px;
|
||||
width: 4px;
|
||||
border-radius: 50%;
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
&-input {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
&-container {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&-aggregation-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
&-add-ons-container {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
&-label-with-input {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-direction: row;
|
||||
|
||||
border-radius: 2px;
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
|
||||
.qb-trace-operator-editor-container {
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
&.arrow-left {
|
||||
&::before {
|
||||
content: '';
|
||||
position: absolute;
|
||||
left: -16px;
|
||||
top: 50%;
|
||||
height: 1px;
|
||||
width: 16px;
|
||||
background-color: var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
|
||||
.label {
|
||||
color: var(--bg-vanilla-400);
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
padding: 0px 8px;
|
||||
border-right: 1px solid var(--bg-slate-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.qb-trace-operator {
|
||||
&-arrow {
|
||||
&::before {
|
||||
background: repeating-linear-gradient(
|
||||
to right,
|
||||
var(--bg-vanilla-300),
|
||||
var(--bg-vanilla-300) 4px,
|
||||
transparent 4px,
|
||||
transparent 8px
|
||||
);
|
||||
}
|
||||
&::after {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
&.non-list-view {
|
||||
&::before {
|
||||
background: repeating-linear-gradient(
|
||||
to bottom,
|
||||
var(--bg-vanilla-300),
|
||||
var(--bg-vanilla-300) 4px,
|
||||
transparent 4px,
|
||||
transparent 8px
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
&-label-with-input {
|
||||
border: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
|
||||
.label {
|
||||
color: var(--bg-ink-500) !important;
|
||||
border-right: 1px solid var(--bg-vanilla-300) !important;
|
||||
background: var(--bg-vanilla-100) !important;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
/* eslint-disable react/require-default-props */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
|
||||
import './TraceOperator.styles.scss';
|
||||
|
||||
import { Button, Tooltip, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { Trash2 } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import QueryAddOns from '../QueryAddOns/QueryAddOns';
|
||||
import QueryAggregation from '../QueryAggregation/QueryAggregation';
|
||||
import TraceOperatorEditor from './TraceOperatorEditor';
|
||||
|
||||
export default function TraceOperator({
|
||||
traceOperator,
|
||||
isListViewPanel = false,
|
||||
}: {
|
||||
traceOperator: IBuilderTraceOperator;
|
||||
isListViewPanel?: boolean;
|
||||
}): JSX.Element {
|
||||
const { panelType, removeTraceOperator } = useQueryBuilder();
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: traceOperator,
|
||||
entityVersion: '',
|
||||
isForTraceOperator: true,
|
||||
});
|
||||
|
||||
const handleTraceOperatorChange = useCallback(
|
||||
(traceOperatorExpression: string) => {
|
||||
handleChangeQueryData('expression', traceOperatorExpression);
|
||||
},
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
const handleChangeAggregateEvery = useCallback(
|
||||
(value: IBuilderQuery['stepInterval']) => {
|
||||
handleChangeQueryData('stepInterval', value);
|
||||
},
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
const handleChangeAggregation = useCallback(
|
||||
(value: string) => {
|
||||
handleChangeQueryData('aggregations', [
|
||||
{
|
||||
expression: value,
|
||||
},
|
||||
]);
|
||||
},
|
||||
[handleChangeQueryData],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className={cx('qb-trace-operator', !isListViewPanel && 'non-list-view')}>
|
||||
<div className="qb-trace-operator-container">
|
||||
<div
|
||||
className={cx(
|
||||
'qb-trace-operator-label-with-input',
|
||||
!isListViewPanel && 'qb-trace-operator-arrow',
|
||||
)}
|
||||
>
|
||||
<Typography.Text className="label">TRACE OPERATOR</Typography.Text>
|
||||
<div className="qb-trace-operator-editor-container">
|
||||
<TraceOperatorEditor
|
||||
value={traceOperator?.expression || ''}
|
||||
traceOperator={traceOperator}
|
||||
onChange={handleTraceOperatorChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isListViewPanel && (
|
||||
<div className="qb-trace-operator-aggregation-container">
|
||||
<div className={cx(!isListViewPanel && 'qb-trace-operator-arrow')}>
|
||||
<QueryAggregation
|
||||
dataSource={DataSource.TRACES}
|
||||
key={`query-search-${traceOperator.queryName}`}
|
||||
panelType={panelType || undefined}
|
||||
onAggregationIntervalChange={handleChangeAggregateEvery}
|
||||
onChange={handleChangeAggregation}
|
||||
queryData={traceOperator}
|
||||
/>
|
||||
</div>
|
||||
<div
|
||||
className={cx(
|
||||
'qb-trace-operator-add-ons-container',
|
||||
!isListViewPanel && 'qb-trace-operator-arrow',
|
||||
)}
|
||||
>
|
||||
<QueryAddOns
|
||||
index={0}
|
||||
query={traceOperator}
|
||||
version="v3"
|
||||
isForTraceOperator
|
||||
isListViewPanel={false}
|
||||
showReduceTo={false}
|
||||
panelType={panelType}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<Tooltip title="Remove Trace Operator" placement="topLeft">
|
||||
<Button className="periscope-btn ghost" onClick={removeTraceOperator}>
|
||||
<Trash2 size={14} />
|
||||
</Button>
|
||||
</Tooltip>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,491 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
|
||||
import '../QuerySearch/QuerySearch.styles.scss';
|
||||
|
||||
import { CheckCircleFilled } from '@ant-design/icons';
|
||||
import {
|
||||
autocompletion,
|
||||
closeCompletion,
|
||||
CompletionContext,
|
||||
completionKeymap,
|
||||
CompletionResult,
|
||||
startCompletion,
|
||||
} from '@codemirror/autocomplete';
|
||||
import { javascript } from '@codemirror/lang-javascript';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { copilot } from '@uiw/codemirror-theme-copilot';
|
||||
import { githubLight } from '@uiw/codemirror-theme-github';
|
||||
import CodeMirror, { EditorView, keymap, Prec } from '@uiw/react-codemirror';
|
||||
import { Button, Popover } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import {
|
||||
TRACE_OPERATOR_OPERATORS,
|
||||
TRACE_OPERATOR_OPERATORS_LABELS,
|
||||
TRACE_OPERATOR_OPERATORS_WITH_PRIORITY,
|
||||
} from 'constants/antlrQueryConstants';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { TriangleAlert } from 'lucide-react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { IDetailedError, IValidationResult } from 'types/antlrQueryTypes';
|
||||
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { validateTraceOperatorQuery } from 'utils/queryValidationUtils';
|
||||
|
||||
import { getTraceOperatorContextAtCursor } from './utils/traceOperatorContextUtils';
|
||||
import { getInvolvedQueriesInTraceOperator } from './utils/utils';
|
||||
|
||||
// Custom extension to stop events
|
||||
const stopEventsExtension = EditorView.domEventHandlers({
|
||||
keydown: (event) => {
|
||||
// Stop all keyboard events from propagating to global shortcuts
|
||||
event.stopPropagation();
|
||||
event.stopImmediatePropagation();
|
||||
return false; // Important for CM to know you handled it
|
||||
},
|
||||
input: (event) => {
|
||||
event.stopPropagation();
|
||||
return false;
|
||||
},
|
||||
focus: (event) => {
|
||||
// Ensure focus events don't interfere with global shortcuts
|
||||
event.stopPropagation();
|
||||
return false;
|
||||
},
|
||||
blur: (event) => {
|
||||
// Ensure blur events don't interfere with global shortcuts
|
||||
event.stopPropagation();
|
||||
return false;
|
||||
},
|
||||
});
|
||||
|
||||
interface TraceOperatorEditorProps {
|
||||
value: string;
|
||||
traceOperator: IBuilderTraceOperator;
|
||||
onChange: (value: string) => void;
|
||||
placeholder?: string;
|
||||
onRun?: (query: string) => void;
|
||||
}
|
||||
|
||||
function TraceOperatorEditor({
|
||||
value,
|
||||
onChange,
|
||||
traceOperator,
|
||||
placeholder = 'Enter your trace operator query',
|
||||
onRun,
|
||||
}: TraceOperatorEditorProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [isFocused, setIsFocused] = useState(false);
|
||||
const [cursorPos, setCursorPos] = useState({ line: 0, ch: 0 });
|
||||
const editorRef = useRef<EditorView | null>(null);
|
||||
const [validation, setValidation] = useState<IValidationResult>({
|
||||
isValid: false,
|
||||
message: '',
|
||||
errors: [],
|
||||
});
|
||||
// Track if the query was changed externally (from props) vs internally (user input)
|
||||
const [isExternalQueryChange, setIsExternalQueryChange] = useState(false);
|
||||
const [lastExternalValue, setLastExternalValue] = useState<string>('');
|
||||
const { currentQuery, handleRunQuery } = useQueryBuilder();
|
||||
|
||||
const queryOptions = useMemo(
|
||||
() =>
|
||||
currentQuery.builder.queryData
|
||||
.filter((query) => query.dataSource === DataSource.TRACES) // Only show trace queries
|
||||
.map((query) => ({
|
||||
label: query.queryName,
|
||||
type: 'atom',
|
||||
apply: query.queryName,
|
||||
})),
|
||||
[currentQuery.builder.queryData],
|
||||
);
|
||||
|
||||
const toggleSuggestions = useCallback(
|
||||
(timeout?: number) => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
if (!editorRef.current) return;
|
||||
if (isFocused) {
|
||||
startCompletion(editorRef.current);
|
||||
} else {
|
||||
closeCompletion(editorRef.current);
|
||||
}
|
||||
}, timeout);
|
||||
|
||||
return (): void => clearTimeout(timeoutId);
|
||||
},
|
||||
[isFocused],
|
||||
);
|
||||
|
||||
const handleQueryValidation = (newQuery: string): void => {
|
||||
try {
|
||||
const validationResponse = validateTraceOperatorQuery(newQuery);
|
||||
setValidation(validationResponse);
|
||||
} catch (error) {
|
||||
setValidation({
|
||||
isValid: false,
|
||||
message: 'Failed to process trace operator',
|
||||
errors: [error as IDetailedError],
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// Detect external value changes and mark for validation
|
||||
useEffect(() => {
|
||||
const newValue = value || '';
|
||||
if (newValue !== lastExternalValue) {
|
||||
setIsExternalQueryChange(true);
|
||||
setLastExternalValue(newValue);
|
||||
}
|
||||
}, [value, lastExternalValue]);
|
||||
|
||||
// Validate when the value changes externally (including on mount)
|
||||
useEffect(() => {
|
||||
if (isExternalQueryChange && value) {
|
||||
handleQueryValidation(value);
|
||||
setIsExternalQueryChange(false);
|
||||
}
|
||||
}, [isExternalQueryChange, value]);
|
||||
|
||||
// Enhanced autosuggestion function with context awareness
|
||||
function autoSuggestions(context: CompletionContext): CompletionResult | null {
|
||||
// This matches words before the cursor position
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const word = context.matchBefore(/[a-zA-Z0-9_.:/?&=#%\-\[\]]*/);
|
||||
if (word?.from === word?.to && !context.explicit) return null;
|
||||
|
||||
// Get the trace operator context at the cursor position
|
||||
const queryContext = getTraceOperatorContextAtCursor(value, cursorPos.ch);
|
||||
|
||||
// Define autocomplete options based on the context
|
||||
let options: {
|
||||
label: string;
|
||||
type: string;
|
||||
info?: string;
|
||||
apply:
|
||||
| string
|
||||
| ((view: EditorView, completion: any, from: number, to: number) => void);
|
||||
detail?: string;
|
||||
boost?: number;
|
||||
}[] = [];
|
||||
|
||||
// Helper function to add space after selection
|
||||
const addSpaceAfterSelection = (
|
||||
view: EditorView,
|
||||
completion: any,
|
||||
from: number,
|
||||
to: number,
|
||||
shouldAddSpace = true,
|
||||
): void => {
|
||||
view.dispatch({
|
||||
changes: {
|
||||
from,
|
||||
to,
|
||||
insert: shouldAddSpace ? `${completion.apply} ` : `${completion.apply}`,
|
||||
},
|
||||
selection: {
|
||||
anchor:
|
||||
from +
|
||||
(shouldAddSpace ? completion.apply.length + 1 : completion.apply.length),
|
||||
},
|
||||
});
|
||||
// Do not reopen here; onUpdate will handle reopening via toggleSuggestions
|
||||
};
|
||||
|
||||
// Helper function to add space after selection to options
|
||||
const addSpaceToOptions = (opts: typeof options): typeof options =>
|
||||
opts.map((option) => {
|
||||
const originalApply = option.apply || option.label;
|
||||
return {
|
||||
...option,
|
||||
apply: (
|
||||
view: EditorView,
|
||||
completion: any,
|
||||
from: number,
|
||||
to: number,
|
||||
): void => {
|
||||
addSpaceAfterSelection(view, { apply: originalApply }, from, to);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
if (queryContext.isInAtom) {
|
||||
// Suggest atoms (identifiers) for trace operators
|
||||
|
||||
const involvedQueries = getInvolvedQueriesInTraceOperator([traceOperator]);
|
||||
|
||||
options = queryOptions.map((option) => ({
|
||||
...option,
|
||||
boost: !involvedQueries.includes(option.apply as string) ? 100 : -99,
|
||||
}));
|
||||
|
||||
// Filter options based on what user is typing
|
||||
const searchText = word?.text.toLowerCase().trim() ?? '';
|
||||
options = options.filter((option) =>
|
||||
option.label.toLowerCase().includes(searchText),
|
||||
);
|
||||
|
||||
// Add space after selection for atoms
|
||||
const optionsWithSpace = addSpaceToOptions(options);
|
||||
|
||||
return {
|
||||
from: word?.from ?? 0,
|
||||
to: word?.to ?? cursorPos.ch,
|
||||
options: optionsWithSpace,
|
||||
};
|
||||
}
|
||||
|
||||
if (queryContext.isInOperator) {
|
||||
// Suggest operators for trace operators
|
||||
const operators = Object.values(TRACE_OPERATOR_OPERATORS);
|
||||
options = operators.map((operator) => ({
|
||||
label: TRACE_OPERATOR_OPERATORS_LABELS[operator]
|
||||
? `${operator} (${TRACE_OPERATOR_OPERATORS_LABELS[operator]})`
|
||||
: operator,
|
||||
type: 'operator',
|
||||
apply: operator,
|
||||
boost: TRACE_OPERATOR_OPERATORS_WITH_PRIORITY[operator] * -10,
|
||||
}));
|
||||
|
||||
// Add space after selection for operators
|
||||
const optionsWithSpace = addSpaceToOptions(options);
|
||||
|
||||
return {
|
||||
from: word?.from ?? 0,
|
||||
to: word?.to ?? cursorPos.ch,
|
||||
options: optionsWithSpace,
|
||||
};
|
||||
}
|
||||
|
||||
if (queryContext.isInParenthesis) {
|
||||
// Different suggestions based on the context within parenthesis
|
||||
const curChar = value.charAt(cursorPos.ch - 1) || '';
|
||||
|
||||
if (curChar === '(') {
|
||||
// Right after opening parenthesis, suggest atoms or nested expressions
|
||||
options = [
|
||||
{ label: '(', type: 'parenthesis', apply: '(' },
|
||||
...queryOptions,
|
||||
];
|
||||
|
||||
// Add space after selection for opening parenthesis context
|
||||
const optionsWithSpace = addSpaceToOptions(options);
|
||||
|
||||
return {
|
||||
from: word?.from ?? 0,
|
||||
options: optionsWithSpace,
|
||||
};
|
||||
}
|
||||
|
||||
if (curChar === ')') {
|
||||
// After closing parenthesis, suggest operators
|
||||
const operators = Object.values(TRACE_OPERATOR_OPERATORS);
|
||||
options = operators.map((operator) => ({
|
||||
label: TRACE_OPERATOR_OPERATORS_LABELS[operator]
|
||||
? `${operator} (${TRACE_OPERATOR_OPERATORS_LABELS[operator]})`
|
||||
: operator,
|
||||
type: 'operator',
|
||||
apply: operator,
|
||||
boost: TRACE_OPERATOR_OPERATORS_WITH_PRIORITY[operator] * -10,
|
||||
}));
|
||||
|
||||
// Add space after selection for closing parenthesis context
|
||||
const optionsWithSpace = addSpaceToOptions(options);
|
||||
|
||||
return {
|
||||
from: word?.from ?? 0,
|
||||
options: optionsWithSpace,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Default: suggest atoms if no specific context
|
||||
options = [
|
||||
...queryOptions,
|
||||
{
|
||||
label: '(',
|
||||
type: 'parenthesis',
|
||||
apply: '(',
|
||||
},
|
||||
];
|
||||
|
||||
// Filter options based on what user is typing
|
||||
const searchText = word?.text.toLowerCase().trim() ?? '';
|
||||
options = options.filter((option) =>
|
||||
option.label.toLowerCase().includes(searchText),
|
||||
);
|
||||
|
||||
// Add space after selection
|
||||
const optionsWithSpace = addSpaceToOptions(options);
|
||||
|
||||
return {
|
||||
from: word?.from ?? 0,
|
||||
to: word?.to ?? context.pos,
|
||||
options: optionsWithSpace,
|
||||
};
|
||||
}
|
||||
|
||||
const handleUpdate = useCallback(
|
||||
(viewUpdate: { view: EditorView }): void => {
|
||||
if (!editorRef.current) {
|
||||
editorRef.current = viewUpdate.view;
|
||||
}
|
||||
|
||||
const selection = viewUpdate.view.state.selection.main;
|
||||
const pos = selection.head;
|
||||
|
||||
const lineInfo = viewUpdate.view.state.doc.lineAt(pos);
|
||||
const newPos = {
|
||||
line: lineInfo.number,
|
||||
ch: pos - lineInfo.from,
|
||||
};
|
||||
|
||||
if (newPos.line !== cursorPos.line || newPos.ch !== cursorPos.ch) {
|
||||
setCursorPos(newPos);
|
||||
// Trigger suggestions on context update
|
||||
toggleSuggestions(10);
|
||||
}
|
||||
},
|
||||
[cursorPos, toggleSuggestions],
|
||||
);
|
||||
|
||||
const handleChange = (newValue: string): void => {
|
||||
// Mark as internal change to avoid triggering external validation
|
||||
setIsExternalQueryChange(false);
|
||||
setLastExternalValue(newValue);
|
||||
onChange(newValue);
|
||||
};
|
||||
|
||||
const handleBlur = (): void => {
|
||||
handleQueryValidation(value);
|
||||
setIsFocused(false);
|
||||
};
|
||||
|
||||
// Effect to handle focus state and trigger suggestions on focus
|
||||
useEffect(() => {
|
||||
const clearTimeout = toggleSuggestions(10);
|
||||
return (): void => clearTimeout();
|
||||
}, [isFocused, toggleSuggestions]);
|
||||
|
||||
return (
|
||||
<div className="code-mirror-where-clause">
|
||||
<div className="query-where-clause-editor-container">
|
||||
<CodeMirror
|
||||
value={value}
|
||||
theme={isDarkMode ? copilot : githubLight}
|
||||
onChange={handleChange}
|
||||
onUpdate={handleUpdate}
|
||||
className={cx('query-where-clause-editor', {
|
||||
isValid: validation.isValid === true,
|
||||
hasErrors: validation.errors.length > 0,
|
||||
})}
|
||||
extensions={[
|
||||
autocompletion({
|
||||
override: [autoSuggestions],
|
||||
defaultKeymap: true,
|
||||
closeOnBlur: true,
|
||||
activateOnTyping: true,
|
||||
maxRenderedOptions: 50,
|
||||
}),
|
||||
javascript({ jsx: false, typescript: false }),
|
||||
EditorView.lineWrapping,
|
||||
stopEventsExtension,
|
||||
Prec.highest(
|
||||
keymap.of([
|
||||
...completionKeymap,
|
||||
{
|
||||
key: 'Escape',
|
||||
run: closeCompletion,
|
||||
},
|
||||
{
|
||||
key: 'Enter',
|
||||
preventDefault: true,
|
||||
// Prevent default behavior of Enter to add new line
|
||||
// and instead run a custom action
|
||||
run: (): boolean => true,
|
||||
},
|
||||
{
|
||||
key: 'Mod-Enter',
|
||||
preventDefault: true,
|
||||
run: (): boolean => {
|
||||
if (onRun && typeof onRun === 'function') {
|
||||
onRun(value);
|
||||
} else {
|
||||
handleRunQuery();
|
||||
}
|
||||
return true;
|
||||
},
|
||||
},
|
||||
{
|
||||
key: 'Shift-Enter',
|
||||
preventDefault: true,
|
||||
// Prevent default behavior of Shift-Enter to add new line
|
||||
run: (): boolean => true,
|
||||
},
|
||||
]),
|
||||
),
|
||||
]}
|
||||
placeholder={placeholder}
|
||||
basicSetup={{
|
||||
lineNumbers: false,
|
||||
}}
|
||||
onFocus={(): void => {
|
||||
setIsFocused(true);
|
||||
}}
|
||||
onBlur={handleBlur}
|
||||
/>
|
||||
{value && validation.isValid === false && !isFocused && (
|
||||
<div
|
||||
className={cx('query-status-container', {
|
||||
hasErrors: validation.errors.length > 0,
|
||||
})}
|
||||
>
|
||||
<Popover
|
||||
placement="bottomRight"
|
||||
showArrow={false}
|
||||
content={
|
||||
<div className="query-status-content">
|
||||
<div className="query-status-content-header">
|
||||
<div className="query-validation">
|
||||
<div className="query-validation-errors">
|
||||
{validation.errors.map((error) => (
|
||||
<div key={error.message} className="query-validation-error">
|
||||
<div className="query-validation-error">
|
||||
{error.line}:{error.column} - {error.message}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
overlayClassName="query-status-popover"
|
||||
>
|
||||
{validation.isValid ? (
|
||||
<Button
|
||||
type="text"
|
||||
icon={<CheckCircleFilled />}
|
||||
className="periscope-btn ghost"
|
||||
/>
|
||||
) : (
|
||||
<Button
|
||||
type="text"
|
||||
icon={<TriangleAlert size={14} color={Color.BG_CHERRY_500} />}
|
||||
className="periscope-btn ghost"
|
||||
/>
|
||||
)}
|
||||
</Popover>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
TraceOperatorEditor.defaultProps = {
|
||||
onRun: undefined,
|
||||
placeholder: 'Enter your trace operator query',
|
||||
};
|
||||
|
||||
export default TraceOperatorEditor;
|
||||
@@ -0,0 +1,425 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
|
||||
import { Token } from 'antlr4';
|
||||
import TraceOperatorGrammarLexer from 'parser/TraceOperatorParser/TraceOperatorGrammarLexer';
|
||||
|
||||
import {
|
||||
createTraceOperatorContext,
|
||||
extractTraceExpressionPairs,
|
||||
getTraceOperatorContextAtCursor,
|
||||
} from '../utils/traceOperatorContextUtils';
|
||||
|
||||
describe('traceOperatorContextUtils', () => {
|
||||
describe('createTraceOperatorContext', () => {
|
||||
it('should create a context object with all required properties', () => {
|
||||
const mockToken = {
|
||||
type: TraceOperatorGrammarLexer.IDENTIFIER,
|
||||
text: 'test',
|
||||
start: 0,
|
||||
stop: 3,
|
||||
} as Token;
|
||||
|
||||
const context = createTraceOperatorContext(
|
||||
mockToken,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
'atom',
|
||||
'operator',
|
||||
[],
|
||||
null,
|
||||
);
|
||||
|
||||
expect(context).toEqual({
|
||||
tokenType: TraceOperatorGrammarLexer.IDENTIFIER,
|
||||
text: 'test',
|
||||
start: 0,
|
||||
stop: 3,
|
||||
currentToken: 'test',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
atomToken: 'atom',
|
||||
operatorToken: 'operator',
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a context object with default values', () => {
|
||||
const mockToken = {
|
||||
type: TraceOperatorGrammarLexer.IDENTIFIER,
|
||||
text: 'test',
|
||||
start: 0,
|
||||
stop: 3,
|
||||
} as Token;
|
||||
|
||||
const context = createTraceOperatorContext(
|
||||
mockToken,
|
||||
false,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
|
||||
expect(context).toEqual({
|
||||
tokenType: TraceOperatorGrammarLexer.IDENTIFIER,
|
||||
text: 'test',
|
||||
start: 0,
|
||||
stop: 3,
|
||||
currentToken: 'test',
|
||||
isInAtom: false,
|
||||
isInOperator: true,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
atomToken: undefined,
|
||||
operatorToken: undefined,
|
||||
expressionPairs: [],
|
||||
currentPair: undefined,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractTraceExpressionPairs', () => {
|
||||
it('should extract simple expression pair', () => {
|
||||
const query = 'A => B';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].position.leftStart).toBe(0);
|
||||
expect(result[0].position.leftEnd).toBe(0);
|
||||
expect(result[0].operator).toBe('=>');
|
||||
expect(result[0].position.operatorStart).toBe(2);
|
||||
expect(result[0].position.operatorEnd).toBe(3);
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
expect(result[0].position.rightStart).toBe(5);
|
||||
expect(result[0].position.rightEnd).toBe(5);
|
||||
expect(result[0].isComplete).toBe(true);
|
||||
});
|
||||
|
||||
it('should extract multiple expression pairs', () => {
|
||||
const query = 'A => B && C => D';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
|
||||
// First pair: A => B
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].operator).toBe('=>');
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
|
||||
// Second pair: C => D
|
||||
expect(result[1].leftAtom).toBe('C');
|
||||
expect(result[1].operator).toBe('=>');
|
||||
expect(result[1].rightAtom).toBe('D');
|
||||
});
|
||||
|
||||
it('should handle NOT operator', () => {
|
||||
const query = 'NOT A => B';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].operator).toBe('=>');
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
});
|
||||
|
||||
it('should handle parentheses', () => {
|
||||
const query = '(A => B) && (C => D)';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
expect(result[1].leftAtom).toBe('C');
|
||||
expect(result[1].rightAtom).toBe('D');
|
||||
});
|
||||
|
||||
it('should handle incomplete expressions', () => {
|
||||
const query = 'A =>';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].operator).toBe('=>');
|
||||
expect(result[0].rightAtom).toBeUndefined();
|
||||
expect(result[0].isComplete).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle complex nested expressions', () => {
|
||||
const query = 'A => B && (C => D || E => F)';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
expect(result[1].leftAtom).toBe('C');
|
||||
expect(result[1].rightAtom).toBe('D');
|
||||
expect(result[2].leftAtom).toBe('E');
|
||||
expect(result[2].rightAtom).toBe('F');
|
||||
});
|
||||
|
||||
it('should handle whitespace variations', () => {
|
||||
const query = 'A=>B';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0].leftAtom).toBe('A');
|
||||
expect(result[0].operator).toBe('=>');
|
||||
expect(result[0].rightAtom).toBe('B');
|
||||
});
|
||||
|
||||
it('should handle error cases gracefully', () => {
|
||||
const query = 'invalid syntax @#$%';
|
||||
const result = extractTraceExpressionPairs(query);
|
||||
|
||||
// Should return an array (even if empty or with partial results)
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBeGreaterThanOrEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTraceOperatorContextAtCursor', () => {
|
||||
beforeEach(() => {
|
||||
// Reset console.error mock
|
||||
jest.spyOn(console, 'error').mockImplementation(() => {});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should return default context for empty query', () => {
|
||||
const result = getTraceOperatorContextAtCursor('', 0);
|
||||
|
||||
expect(result).toEqual({
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: 0,
|
||||
stop: 0,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return default context for null query', () => {
|
||||
const result = getTraceOperatorContextAtCursor(null as any, 0);
|
||||
|
||||
expect(result).toEqual({
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: 0,
|
||||
stop: 0,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return default context for undefined query', () => {
|
||||
const result = getTraceOperatorContextAtCursor(undefined as any, 0);
|
||||
|
||||
expect(result).toEqual({
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: 0,
|
||||
stop: 0,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
});
|
||||
});
|
||||
|
||||
it('should identify atom context', () => {
|
||||
const query = 'A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 0); // cursor at 'A'
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(true);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(0);
|
||||
expect(result.stop).toBe(0);
|
||||
});
|
||||
|
||||
it('should identify operator context', () => {
|
||||
const query = 'A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 2); // cursor at '='
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBeUndefined();
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(true);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(2);
|
||||
expect(result.stop).toBe(2);
|
||||
});
|
||||
|
||||
it('should identify parenthesis context', () => {
|
||||
const query = '(A => B)';
|
||||
const result = getTraceOperatorContextAtCursor(query, 0); // cursor at '('
|
||||
|
||||
expect(result.atomToken).toBeUndefined();
|
||||
expect(result.operatorToken).toBeUndefined();
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(true);
|
||||
expect(result.start).toBe(0);
|
||||
expect(result.stop).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle cursor at space', () => {
|
||||
const query = 'A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 1); // cursor at space
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBeUndefined();
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(true);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle cursor at end of query', () => {
|
||||
const query = 'A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 5); // cursor at end
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(true);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(5);
|
||||
expect(result.stop).toBe(5);
|
||||
});
|
||||
|
||||
it('should handle complex query', () => {
|
||||
const query = 'A => B && C => D';
|
||||
const result = getTraceOperatorContextAtCursor(query, 8); // cursor at '&'
|
||||
|
||||
expect(result.atomToken).toBeUndefined();
|
||||
expect(result.operatorToken).toBe('&&');
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(true);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(7);
|
||||
expect(result.stop).toBe(8);
|
||||
});
|
||||
|
||||
it('should identify operator position in complex query', () => {
|
||||
const query = 'A => B && C => D';
|
||||
const result = getTraceOperatorContextAtCursor(query, 10); // cursor at 'C'
|
||||
|
||||
expect(result.atomToken).toBe('C');
|
||||
expect(result.operatorToken).toBe('&&');
|
||||
expect(result.isInAtom).toBe(true);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(10);
|
||||
expect(result.stop).toBe(10);
|
||||
});
|
||||
|
||||
it('should identify atom position in complex query', () => {
|
||||
const query = 'A => B && C => D';
|
||||
const result = getTraceOperatorContextAtCursor(query, 13); // cursor at '>'
|
||||
|
||||
expect(result.atomToken).toBe('C');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(true);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(12);
|
||||
expect(result.stop).toBe(13);
|
||||
});
|
||||
|
||||
it('should handle transition points', () => {
|
||||
const query = 'A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 4); // cursor at 'B'
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(true);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(4);
|
||||
expect(result.stop).toBe(4);
|
||||
});
|
||||
|
||||
it('should handle whitespace in complex queries', () => {
|
||||
const query = 'A=>B && C=>D';
|
||||
const result = getTraceOperatorContextAtCursor(query, 6); // cursor at '&'
|
||||
|
||||
expect(result.atomToken).toBeUndefined();
|
||||
expect(result.operatorToken).toBe('&&');
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(true);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
expect(result.start).toBe(5);
|
||||
expect(result.stop).toBe(6);
|
||||
});
|
||||
|
||||
it('should handle NOT operator context', () => {
|
||||
const query = 'NOT A => B';
|
||||
const result = getTraceOperatorContextAtCursor(query, 0); // cursor at 'N'
|
||||
|
||||
expect(result.atomToken).toBeUndefined();
|
||||
expect(result.operatorToken).toBeUndefined();
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle parentheses context', () => {
|
||||
const query = '(A => B)';
|
||||
const result = getTraceOperatorContextAtCursor(query, 1); // cursor at 'A'
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(false);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(true);
|
||||
expect(result.start).toBe(0);
|
||||
expect(result.stop).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle expression pairs context', () => {
|
||||
const query = 'A => B && C => D';
|
||||
const result = getTraceOperatorContextAtCursor(query, 5); // cursor at 'A' in "&&"
|
||||
|
||||
expect(result.atomToken).toBe('A');
|
||||
expect(result.operatorToken).toBe('=>');
|
||||
expect(result.isInAtom).toBe(true);
|
||||
expect(result.isInOperator).toBe(false);
|
||||
expect(result.isInParenthesis).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle various cursor positions', () => {
|
||||
const query = 'A => B';
|
||||
|
||||
// Test cursor at each position
|
||||
for (let i = 0; i < query.length; i++) {
|
||||
const result = getTraceOperatorContextAtCursor(query, i);
|
||||
expect(result).toBeDefined();
|
||||
expect(typeof result.start).toBe('number');
|
||||
expect(typeof result.stop).toBe('number');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,46 @@
|
||||
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { getInvolvedQueriesInTraceOperator } from '../utils/utils';
|
||||
|
||||
const makeTraceOperator = (expression: string): IBuilderTraceOperator =>
|
||||
(({ expression } as unknown) as IBuilderTraceOperator);
|
||||
|
||||
describe('getInvolvedQueriesInTraceOperator', () => {
|
||||
it('returns empty array for empty input', () => {
|
||||
const result = getInvolvedQueriesInTraceOperator([]);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('extracts identifiers from expression', () => {
|
||||
const result = getInvolvedQueriesInTraceOperator([
|
||||
makeTraceOperator('A => B'),
|
||||
]);
|
||||
expect(result).toEqual(['A', 'B']);
|
||||
});
|
||||
|
||||
it('extracts identifiers from complex expression', () => {
|
||||
const result = getInvolvedQueriesInTraceOperator([
|
||||
makeTraceOperator('A => (NOT B || C)'),
|
||||
]);
|
||||
expect(result).toEqual(['A', 'B', 'C']);
|
||||
});
|
||||
|
||||
it('filters out querynames from complex expression', () => {
|
||||
const result = getInvolvedQueriesInTraceOperator([
|
||||
makeTraceOperator(
|
||||
'(A1 && (NOT B2 || (C3 -> (D4 && E5)))) => ((F6 || G7) && (NOT (H8 -> I9)))',
|
||||
),
|
||||
]);
|
||||
expect(result).toEqual([
|
||||
'A1',
|
||||
'B2',
|
||||
'C3',
|
||||
'D4',
|
||||
'E5',
|
||||
'F6',
|
||||
'G7',
|
||||
'H8',
|
||||
'I9',
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,562 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable no-continue */
|
||||
|
||||
import { CharStreams, CommonTokenStream, Token } from 'antlr4';
|
||||
import TraceOperatorGrammarLexer from 'parser/TraceOperatorParser/TraceOperatorGrammarLexer';
|
||||
import { IToken } from 'types/antlrQueryTypes';
|
||||
|
||||
// Trace Operator Context Interface
|
||||
export interface ITraceOperatorContext {
|
||||
tokenType: number;
|
||||
text: string;
|
||||
start: number;
|
||||
stop: number;
|
||||
currentToken: string;
|
||||
isInAtom: boolean;
|
||||
isInOperator: boolean;
|
||||
isInParenthesis: boolean;
|
||||
isInExpression: boolean;
|
||||
atomToken?: string;
|
||||
operatorToken?: string;
|
||||
expressionPairs: ITraceExpressionPair[];
|
||||
currentPair?: ITraceExpressionPair | null;
|
||||
}
|
||||
|
||||
// Trace Expression Pair Interface
|
||||
export interface ITraceExpressionPair {
|
||||
leftAtom: string;
|
||||
operator: string;
|
||||
rightAtom?: string;
|
||||
rightExpression?: string;
|
||||
position: {
|
||||
leftStart: number;
|
||||
leftEnd: number;
|
||||
operatorStart: number;
|
||||
operatorEnd: number;
|
||||
rightStart?: number;
|
||||
rightEnd?: number;
|
||||
};
|
||||
isComplete: boolean;
|
||||
}
|
||||
|
||||
// Helper functions to determine token types
|
||||
function isAtomToken(tokenType: number): boolean {
|
||||
return tokenType === TraceOperatorGrammarLexer.IDENTIFIER;
|
||||
}
|
||||
|
||||
function isOperatorToken(tokenType: number): boolean {
|
||||
return [
|
||||
TraceOperatorGrammarLexer.T__2, // '=>'
|
||||
TraceOperatorGrammarLexer.T__3, // '&&'
|
||||
TraceOperatorGrammarLexer.T__4, // '||'
|
||||
TraceOperatorGrammarLexer.T__5, // 'NOT'
|
||||
TraceOperatorGrammarLexer.T__6, // '->'
|
||||
].includes(tokenType);
|
||||
}
|
||||
|
||||
function isParenthesisToken(tokenType: number): boolean {
|
||||
return (
|
||||
tokenType === TraceOperatorGrammarLexer.T__0 ||
|
||||
tokenType === TraceOperatorGrammarLexer.T__1
|
||||
);
|
||||
}
|
||||
|
||||
function isOpeningParenthesis(tokenType: number): boolean {
|
||||
return tokenType === TraceOperatorGrammarLexer.T__0;
|
||||
}
|
||||
|
||||
function isClosingParenthesis(tokenType: number): boolean {
|
||||
return tokenType === TraceOperatorGrammarLexer.T__1;
|
||||
}
|
||||
|
||||
// Function to create a context object
|
||||
export function createTraceOperatorContext(
|
||||
token: Token,
|
||||
isInAtom: boolean,
|
||||
isInOperator: boolean,
|
||||
isInParenthesis: boolean,
|
||||
isInExpression: boolean,
|
||||
atomToken?: string,
|
||||
operatorToken?: string,
|
||||
expressionPairs?: ITraceExpressionPair[],
|
||||
currentPair?: ITraceExpressionPair | null,
|
||||
): ITraceOperatorContext {
|
||||
return {
|
||||
tokenType: token.type,
|
||||
text: token.text || '',
|
||||
start: token.start,
|
||||
stop: token.stop,
|
||||
currentToken: token.text || '',
|
||||
isInAtom,
|
||||
isInOperator,
|
||||
isInParenthesis,
|
||||
isInExpression,
|
||||
atomToken,
|
||||
operatorToken,
|
||||
expressionPairs: expressionPairs || [],
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
|
||||
// Helper to determine token context
|
||||
function determineTraceTokenContext(
|
||||
token: IToken,
|
||||
): {
|
||||
isInAtom: boolean;
|
||||
isInOperator: boolean;
|
||||
isInParenthesis: boolean;
|
||||
isInExpression: boolean;
|
||||
} {
|
||||
const tokenType = token.type;
|
||||
|
||||
return {
|
||||
isInAtom: isAtomToken(tokenType),
|
||||
isInOperator: isOperatorToken(tokenType),
|
||||
isInParenthesis: isParenthesisToken(tokenType),
|
||||
isInExpression: false, // Will be determined by broader context
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts all expression pairs from a trace operator query string
|
||||
* This parses the query according to the TraceOperatorGrammar.g4 grammar
|
||||
*
|
||||
* @param query The trace operator query string to parse
|
||||
* @returns An array of ITraceExpressionPair objects representing the expression pairs
|
||||
*/
|
||||
export function extractTraceExpressionPairs(
|
||||
query: string,
|
||||
): ITraceExpressionPair[] {
|
||||
try {
|
||||
const input = query || '';
|
||||
const chars = CharStreams.fromString(input);
|
||||
const lexer = new TraceOperatorGrammarLexer(chars);
|
||||
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
tokenStream.fill();
|
||||
|
||||
const allTokens = tokenStream.tokens as IToken[];
|
||||
const expressionPairs: ITraceExpressionPair[] = [];
|
||||
let currentPair: Partial<ITraceExpressionPair> | null = null;
|
||||
|
||||
let i = 0;
|
||||
while (i < allTokens.length) {
|
||||
const token = allTokens[i];
|
||||
i++;
|
||||
|
||||
// Skip EOF and whitespace tokens
|
||||
if (token.type === TraceOperatorGrammarLexer.EOF || token.channel !== 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If token is an IDENTIFIER (atom), start or continue a pair
|
||||
if (isAtomToken(token.type)) {
|
||||
// If we don't have a current pair, start one
|
||||
if (!currentPair) {
|
||||
currentPair = {
|
||||
leftAtom: token.text,
|
||||
position: {
|
||||
leftStart: token.start,
|
||||
leftEnd: token.stop,
|
||||
operatorStart: 0,
|
||||
operatorEnd: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
// If we have a current pair but no operator yet, this is still the left atom
|
||||
else if (!currentPair.operator && currentPair.position) {
|
||||
currentPair.leftAtom = token.text;
|
||||
currentPair.position.leftStart = token.start;
|
||||
currentPair.position.leftEnd = token.stop;
|
||||
}
|
||||
// If we have an operator, this is the right atom
|
||||
else if (
|
||||
currentPair.operator &&
|
||||
!currentPair.rightAtom &&
|
||||
currentPair.position
|
||||
) {
|
||||
currentPair.rightAtom = token.text;
|
||||
currentPair.position.rightStart = token.start;
|
||||
currentPair.position.rightEnd = token.stop;
|
||||
currentPair.isComplete = true;
|
||||
|
||||
// Add the completed pair to the result
|
||||
expressionPairs.push(currentPair as ITraceExpressionPair);
|
||||
currentPair = null;
|
||||
}
|
||||
}
|
||||
// If token is an operator and we have a left atom
|
||||
else if (
|
||||
isOperatorToken(token.type) &&
|
||||
currentPair &&
|
||||
currentPair.leftAtom &&
|
||||
currentPair.position
|
||||
) {
|
||||
currentPair.operator = token.text;
|
||||
currentPair.position.operatorStart = token.start;
|
||||
currentPair.position.operatorEnd = token.stop;
|
||||
|
||||
// If this is a NOT operator, it might be followed by another operator
|
||||
if (token.type === TraceOperatorGrammarLexer.T__5 && i < allTokens.length) {
|
||||
// Look ahead for the next operator
|
||||
const nextToken = allTokens[i];
|
||||
if (isOperatorToken(nextToken.type) && nextToken.channel === 0) {
|
||||
currentPair.operator = `${token.text} ${nextToken.text}`;
|
||||
currentPair.position.operatorEnd = nextToken.stop;
|
||||
i++; // Skip the next token since we've consumed it
|
||||
}
|
||||
}
|
||||
}
|
||||
// If token is an opening parenthesis after an operator, this is a right expression
|
||||
else if (
|
||||
isOpeningParenthesis(token.type) &&
|
||||
currentPair &&
|
||||
currentPair.operator &&
|
||||
!currentPair.rightAtom &&
|
||||
currentPair.position
|
||||
) {
|
||||
// Find the matching closing parenthesis
|
||||
let parenCount = 1;
|
||||
let j = i;
|
||||
let rightExpression = '';
|
||||
const rightStart = token.start;
|
||||
let rightEnd = token.stop;
|
||||
|
||||
while (j < allTokens.length && parenCount > 0) {
|
||||
const parenToken = allTokens[j];
|
||||
if (parenToken.channel === 0) {
|
||||
if (isOpeningParenthesis(parenToken.type)) {
|
||||
parenCount++;
|
||||
} else if (isClosingParenthesis(parenToken.type)) {
|
||||
parenCount--;
|
||||
if (parenCount === 0) {
|
||||
rightEnd = parenToken.stop;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
rightExpression += parenToken.text;
|
||||
j++;
|
||||
}
|
||||
|
||||
if (parenCount === 0) {
|
||||
currentPair.rightExpression = rightExpression;
|
||||
currentPair.position.rightStart = rightStart;
|
||||
currentPair.position.rightEnd = rightEnd;
|
||||
currentPair.isComplete = true;
|
||||
|
||||
// Add the completed pair to the result
|
||||
expressionPairs.push(currentPair as ITraceExpressionPair);
|
||||
currentPair = null;
|
||||
|
||||
// Skip to the end of the expression
|
||||
i = j;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add any remaining incomplete pair
|
||||
if (currentPair && currentPair.leftAtom && currentPair.position) {
|
||||
expressionPairs.push({
|
||||
...currentPair,
|
||||
isComplete: !!(currentPair.leftAtom && currentPair.operator),
|
||||
} as ITraceExpressionPair);
|
||||
}
|
||||
|
||||
return expressionPairs;
|
||||
} catch (error) {
|
||||
console.error('Error in extractTraceExpressionPairs:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current expression pair at the cursor position
|
||||
*
|
||||
* @param expressionPairs An array of ITraceExpressionPair objects
|
||||
* @param query The full query string
|
||||
* @param cursorIndex The position of the cursor in the query
|
||||
* @returns The expression pair at the cursor position, or null if not found
|
||||
*/
|
||||
export function getCurrentTraceExpressionPair(
|
||||
expressionPairs: ITraceExpressionPair[],
|
||||
cursorIndex: number,
|
||||
): ITraceExpressionPair | null {
|
||||
try {
|
||||
if (expressionPairs.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Find the rightmost pair whose end position is before or at the cursor
|
||||
let bestMatch: ITraceExpressionPair | null = null;
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const pair of expressionPairs) {
|
||||
const { position } = pair;
|
||||
const pairEnd =
|
||||
position.rightEnd || position.operatorEnd || position.leftEnd;
|
||||
const pairStart = position.leftStart;
|
||||
|
||||
// If this pair ends at or before the cursor, and it's further right than our previous best match
|
||||
if (
|
||||
pairStart <= cursorIndex &&
|
||||
cursorIndex <= pairEnd + 1 &&
|
||||
(!bestMatch ||
|
||||
pairEnd >
|
||||
(bestMatch.position.rightEnd ||
|
||||
bestMatch.position.operatorEnd ||
|
||||
bestMatch.position.leftEnd))
|
||||
) {
|
||||
bestMatch = pair;
|
||||
}
|
||||
}
|
||||
|
||||
return bestMatch;
|
||||
} catch (error) {
|
||||
console.error('Error in getCurrentTraceExpressionPair:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current trace operator context at the cursor position
|
||||
* This is useful for determining what kind of suggestions to show
|
||||
*
|
||||
* @param query The trace operator query string
|
||||
* @param cursorIndex The position of the cursor in the query
|
||||
* @returns The trace operator context at the cursor position
|
||||
*/
|
||||
export function getTraceOperatorContextAtCursor(
|
||||
query: string,
|
||||
cursorIndex: number,
|
||||
): ITraceOperatorContext {
|
||||
try {
|
||||
// Guard against infinite recursion
|
||||
const stackTrace = new Error().stack || '';
|
||||
const callCount = (stackTrace.match(/getTraceOperatorContextAtCursor/g) || [])
|
||||
.length;
|
||||
if (callCount > 3) {
|
||||
console.warn(
|
||||
'Potential infinite recursion detected in getTraceOperatorContextAtCursor',
|
||||
);
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Create input stream and lexer
|
||||
const input = query || '';
|
||||
const chars = CharStreams.fromString(input);
|
||||
const lexer = new TraceOperatorGrammarLexer(chars);
|
||||
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
tokenStream.fill();
|
||||
|
||||
const allTokens = tokenStream.tokens as IToken[];
|
||||
|
||||
// Get expression pairs information
|
||||
const expressionPairs = extractTraceExpressionPairs(query);
|
||||
const currentPair = getCurrentTraceExpressionPair(
|
||||
expressionPairs,
|
||||
cursorIndex,
|
||||
);
|
||||
|
||||
// Find the token at or just before the cursor
|
||||
let lastTokenBeforeCursor: IToken | null = null;
|
||||
for (let i = 0; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (token.type === TraceOperatorGrammarLexer.EOF) continue;
|
||||
|
||||
if (token.stop < cursorIndex || token.stop + 1 === cursorIndex) {
|
||||
lastTokenBeforeCursor = token;
|
||||
}
|
||||
|
||||
if (token.start > cursorIndex) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Find exact token at cursor
|
||||
let exactToken: IToken | null = null;
|
||||
for (let i = 0; i < allTokens.length; i++) {
|
||||
const token = allTokens[i];
|
||||
if (token.type === TraceOperatorGrammarLexer.EOF) continue;
|
||||
|
||||
if (token.start <= cursorIndex && cursorIndex <= token.stop + 1) {
|
||||
exactToken = token;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If we don't have any tokens, return default context
|
||||
if (!lastTokenBeforeCursor && !exactToken) {
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInAtom: true, // Default to atom context when input is empty
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs,
|
||||
currentPair: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Check if cursor is at a space after a token (transition point)
|
||||
const isAtSpace = cursorIndex < query.length && query[cursorIndex] === ' ';
|
||||
const isAfterSpace = cursorIndex > 0 && query[cursorIndex - 1] === ' ';
|
||||
const isAfterToken = cursorIndex > 0 && query[cursorIndex - 1] !== ' ';
|
||||
const isTransitionPoint =
|
||||
(isAtSpace && isAfterToken) ||
|
||||
(cursorIndex === query.length && isAfterToken);
|
||||
|
||||
// If we're at a transition point after a token, progress the context
|
||||
if (
|
||||
lastTokenBeforeCursor &&
|
||||
(isAtSpace || isAfterSpace || isTransitionPoint)
|
||||
) {
|
||||
const lastTokenContext = determineTraceTokenContext(lastTokenBeforeCursor);
|
||||
|
||||
// Apply context progression: atom → operator → atom/expression → operator → atom
|
||||
if (lastTokenContext.isInAtom) {
|
||||
// After atom + space, move to operator context
|
||||
return {
|
||||
tokenType: lastTokenBeforeCursor.type,
|
||||
text: lastTokenBeforeCursor.text,
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: lastTokenBeforeCursor.text,
|
||||
isInAtom: false,
|
||||
isInOperator: true,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
atomToken: lastTokenBeforeCursor.text,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
|
||||
if (lastTokenContext.isInOperator) {
|
||||
// After operator + space, move to atom/expression context
|
||||
return {
|
||||
tokenType: lastTokenBeforeCursor.type,
|
||||
text: lastTokenBeforeCursor.text,
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: lastTokenBeforeCursor.text,
|
||||
isInAtom: true, // Expecting an atom or expression after operator
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
operatorToken: lastTokenBeforeCursor.text,
|
||||
atomToken: currentPair?.leftAtom,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
lastTokenContext.isInParenthesis &&
|
||||
isClosingParenthesis(lastTokenBeforeCursor.type)
|
||||
) {
|
||||
// After closing parenthesis, move to operator context
|
||||
return {
|
||||
tokenType: lastTokenBeforeCursor.type,
|
||||
text: lastTokenBeforeCursor.text,
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: lastTokenBeforeCursor.text,
|
||||
isInAtom: false,
|
||||
isInOperator: true,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If cursor is at the end of a token, return the current token context
|
||||
if (exactToken && cursorIndex === exactToken.stop + 1) {
|
||||
const tokenContext = determineTraceTokenContext(exactToken);
|
||||
|
||||
return {
|
||||
tokenType: exactToken.type,
|
||||
text: exactToken.text,
|
||||
start: exactToken.start,
|
||||
stop: exactToken.stop,
|
||||
currentToken: exactToken.text,
|
||||
...tokenContext,
|
||||
atomToken: tokenContext.isInAtom ? exactToken.text : currentPair?.leftAtom,
|
||||
operatorToken: tokenContext.isInOperator
|
||||
? exactToken.text
|
||||
: currentPair?.operator,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
|
||||
// Regular token-based context detection
|
||||
if (exactToken?.channel === 0) {
|
||||
const tokenContext = determineTraceTokenContext(exactToken);
|
||||
|
||||
return {
|
||||
tokenType: exactToken.type,
|
||||
text: exactToken.text,
|
||||
start: exactToken.start,
|
||||
stop: exactToken.stop,
|
||||
currentToken: exactToken.text,
|
||||
...tokenContext,
|
||||
atomToken: tokenContext.isInAtom ? exactToken.text : currentPair?.leftAtom,
|
||||
operatorToken: tokenContext.isInOperator
|
||||
? exactToken.text
|
||||
: currentPair?.operator,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
}
|
||||
|
||||
// Default fallback to atom context
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs,
|
||||
currentPair,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error in getTraceOperatorContextAtCursor:', error);
|
||||
return {
|
||||
tokenType: -1,
|
||||
text: '',
|
||||
start: cursorIndex,
|
||||
stop: cursorIndex,
|
||||
currentToken: '',
|
||||
isInAtom: true,
|
||||
isInOperator: false,
|
||||
isInParenthesis: false,
|
||||
isInExpression: false,
|
||||
expressionPairs: [],
|
||||
currentPair: null,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import { IBuilderTraceOperator } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export const getInvolvedQueriesInTraceOperator = (
|
||||
traceOperators: IBuilderTraceOperator[],
|
||||
): string[] => {
|
||||
if (
|
||||
!traceOperators ||
|
||||
traceOperators.length === 0 ||
|
||||
traceOperators.length > 1
|
||||
)
|
||||
return [];
|
||||
|
||||
const currentTraceOperator = traceOperators[0];
|
||||
|
||||
// Match any word starting with letter or underscore
|
||||
const tokens =
|
||||
currentTraceOperator.expression.match(/\b[A-Za-z_][A-Za-z0-9_]*\b/g) || [];
|
||||
|
||||
// Filter out operator keywords
|
||||
const operators = new Set(['NOT']);
|
||||
return tokens.filter((t) => !operators.has(t));
|
||||
};
|
||||
@@ -17,6 +17,19 @@
|
||||
font-weight: var(--font-weight-normal);
|
||||
}
|
||||
|
||||
.view-title-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
justify-content: center;
|
||||
|
||||
.icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
}
|
||||
|
||||
.tab {
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
&:hover {
|
||||
|
||||
@@ -6,6 +6,7 @@ import { RadioChangeEvent } from 'antd/es/radio';
|
||||
interface Option {
|
||||
value: string;
|
||||
label: string;
|
||||
icon?: React.ReactNode;
|
||||
}
|
||||
|
||||
interface SignozRadioGroupProps {
|
||||
@@ -37,7 +38,10 @@ function SignozRadioGroup({
|
||||
value={option.value}
|
||||
className={value === option.value ? 'selected_view tab' : 'tab'}
|
||||
>
|
||||
{option.label}
|
||||
<div className="view-title-container">
|
||||
{option.icon && <div className="icon-container">{option.icon}</div>}
|
||||
{option.label}
|
||||
</div>
|
||||
</Radio.Button>
|
||||
))}
|
||||
</Radio.Group>
|
||||
|
||||
@@ -17,6 +17,27 @@ export const OPERATORS = {
|
||||
'<': '<',
|
||||
};
|
||||
|
||||
export const TRACE_OPERATOR_OPERATORS = {
|
||||
AND: '&&',
|
||||
OR: '||',
|
||||
NOT: 'NOT',
|
||||
DIRECT_DESCENDENT: '=>',
|
||||
INDIRECT_DESCENDENT: '->',
|
||||
};
|
||||
|
||||
export const TRACE_OPERATOR_OPERATORS_WITH_PRIORITY = {
|
||||
[TRACE_OPERATOR_OPERATORS.DIRECT_DESCENDENT]: 1,
|
||||
[TRACE_OPERATOR_OPERATORS.AND]: 2,
|
||||
[TRACE_OPERATOR_OPERATORS.OR]: 3,
|
||||
[TRACE_OPERATOR_OPERATORS.NOT]: 4,
|
||||
[TRACE_OPERATOR_OPERATORS.INDIRECT_DESCENDENT]: 5,
|
||||
};
|
||||
|
||||
export const TRACE_OPERATOR_OPERATORS_LABELS = {
|
||||
[TRACE_OPERATOR_OPERATORS.DIRECT_DESCENDENT]: 'Direct Descendant',
|
||||
[TRACE_OPERATOR_OPERATORS.INDIRECT_DESCENDENT]: 'Indirect Descendant',
|
||||
};
|
||||
|
||||
export const QUERY_BUILDER_FUNCTIONS = {
|
||||
HAS: 'has',
|
||||
HASANY: 'hasAny',
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
HavingForm,
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
Query,
|
||||
@@ -50,6 +51,8 @@ import {
|
||||
export const MAX_FORMULAS = 20;
|
||||
export const MAX_QUERIES = 26;
|
||||
|
||||
export const TRACE_OPERATOR_QUERY_NAME = 'Trace Operator';
|
||||
|
||||
export const idDivider = '--';
|
||||
export const selectValueDivider = '__';
|
||||
|
||||
@@ -263,6 +266,11 @@ export const initialFormulaBuilderFormValues: IBuilderFormula = {
|
||||
legend: '',
|
||||
};
|
||||
|
||||
export const initialQueryBuilderFormTraceOperatorValues: IBuilderTraceOperator = {
|
||||
...initialQueryBuilderFormTracesValues,
|
||||
queryName: TRACE_OPERATOR_QUERY_NAME,
|
||||
};
|
||||
|
||||
export const initialQueryPromQLData: IPromQLQuery = {
|
||||
name: createNewBuilderItemName({ existNames: [], sourceNames: alphabet }),
|
||||
query: '',
|
||||
@@ -280,6 +288,7 @@ export const initialClickHouseData: IClickHouseQuery = {
|
||||
export const initialQueryBuilderData: QueryBuilderData = {
|
||||
queryData: [initialQueryBuilderFormValues],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
};
|
||||
|
||||
export const initialSingleQueryMap: Record<
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { TRACE_OPERATOR_QUERY_NAME } from './queryBuilder';
|
||||
|
||||
export const FORMULA_REGEXP = /F\d+/;
|
||||
|
||||
export const HAVING_FILTER_REGEXP = /^[-\d.,\s]+$/;
|
||||
@@ -5,3 +7,5 @@ export const HAVING_FILTER_REGEXP = /^[-\d.,\s]+$/;
|
||||
export const TYPE_ADDON_REGEXP = /_(.+)/;
|
||||
|
||||
export const SPLIT_FIRST_UNDERSCORE = /(?<!^)_/;
|
||||
|
||||
export const TRACE_OPERATOR_REGEXP = new RegExp(TRACE_OPERATOR_QUERY_NAME);
|
||||
|
||||
@@ -507,6 +507,7 @@ export const getDomainMetricsQueryPayload = (
|
||||
legend: '',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -816,6 +817,7 @@ export const getEndPointsQueryPayload = (
|
||||
legend: 'error percentage',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -965,6 +967,7 @@ export const getTopErrorsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1729,6 +1732,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
legend: 'error percentage',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1928,6 +1932,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2016,6 +2021,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2287,6 +2293,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
legend: 'error percentage',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2376,6 +2383,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2464,6 +2472,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2558,6 +2567,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -3135,6 +3145,7 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -54,6 +54,7 @@ function QuerySection({
|
||||
queryVariant: 'static',
|
||||
initialDataSource: ALERTS_DATA_SOURCE_MAP[alertType],
|
||||
}}
|
||||
showTraceOperator={alertType === AlertTypes.TRACES_BASED_ALERT}
|
||||
showFunctions={
|
||||
(alertType === AlertTypes.METRICS_BASED_ALERT &&
|
||||
alertDef.version === ENTITY_VERSION_V4) ||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Button, FormInstance, Modal, SelectProps, Typography } from 'antd';
|
||||
import saveAlertApi from 'api/alerts/save';
|
||||
import testAlertApi from 'api/alerts/testAlert';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { getInvolvedQueriesInTraceOperator } from 'components/QueryBuilderV2/QueryV2/TraceOperator/utils/utils';
|
||||
import { ALERTS_DATA_SOURCE_MAP } from 'constants/alerts';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
@@ -149,10 +150,17 @@ function FormAlertRules({
|
||||
]);
|
||||
|
||||
const queryOptions = useMemo(() => {
|
||||
const involvedQueriesInTraceOperator = getInvolvedQueriesInTraceOperator(
|
||||
currentQuery.builder.queryTraceOperator,
|
||||
);
|
||||
const queryConfig: Record<EQueryType, () => SelectProps['options']> = {
|
||||
[EQueryType.QUERY_BUILDER]: () => [
|
||||
...(getSelectedQueryOptions(currentQuery.builder.queryData) || []),
|
||||
...(getSelectedQueryOptions(currentQuery.builder.queryData)?.filter(
|
||||
(option) =>
|
||||
!involvedQueriesInTraceOperator.includes(option.value as string),
|
||||
) || []),
|
||||
...(getSelectedQueryOptions(currentQuery.builder.queryFormulas) || []),
|
||||
...(getSelectedQueryOptions(currentQuery.builder.queryTraceOperator) || []),
|
||||
],
|
||||
[EQueryType.PROM]: () => getSelectedQueryOptions(currentQuery.promql),
|
||||
[EQueryType.CLICKHOUSE]: () =>
|
||||
|
||||
@@ -5,6 +5,7 @@ import getStep from 'lib/getStep';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -53,7 +54,11 @@ export const getUpdatedStepInterval = (evalWindow?: string): number => {
|
||||
|
||||
export const getSelectedQueryOptions = (
|
||||
queries: Array<
|
||||
IBuilderQuery | IBuilderFormula | IClickHouseQuery | IPromQLQuery
|
||||
| IBuilderQuery
|
||||
| IBuilderTraceOperator
|
||||
| IBuilderFormula
|
||||
| IClickHouseQuery
|
||||
| IPromQLQuery
|
||||
>,
|
||||
): SelectProps['options'] =>
|
||||
queries
|
||||
|
||||
@@ -90,6 +90,7 @@ const mockProps: WidgetGraphComponentProps = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -131,6 +131,7 @@ describe('GridCardLayout Utils', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
promql: [],
|
||||
@@ -171,6 +172,7 @@ describe('GridCardLayout Utils', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -195,6 +197,7 @@ describe('GridCardLayout Utils', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -240,6 +243,7 @@ describe('GridCardLayout Utils', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -268,6 +272,7 @@ describe('GridCardLayout Utils', () => {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -162,6 +162,7 @@ export const widgetQueryWithLegend = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: '48ad5a67-9a3c-49d4-a886-d7a34f8b875d',
|
||||
queryType: 'builder',
|
||||
@@ -457,6 +458,7 @@ export const widgetQueryQBv5MultiAggregations = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: 'qb-v5-multi-aggregations-test',
|
||||
queryType: 'builder',
|
||||
|
||||
@@ -301,6 +301,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -490,6 +491,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -575,6 +577,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -660,6 +663,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -797,6 +801,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1050,6 +1055,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1257,6 +1263,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1522,6 +1529,7 @@ export const getClusterMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -233,6 +233,7 @@ export const getDaemonSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -416,6 +417,7 @@ export const getDaemonSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -512,6 +514,7 @@ export const getDaemonSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -608,6 +611,7 @@ export const getDaemonSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -196,6 +196,7 @@ export const getDeploymentMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -346,6 +347,7 @@ export const getDeploymentMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -431,6 +433,7 @@ export const getDeploymentMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -516,6 +519,7 @@ export const getDeploymentMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -79,6 +79,7 @@ export const getEntityEventsOrLogsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: uuidv4(),
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
@@ -226,6 +227,7 @@ export const getEntityTracesQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
id: '572f1d91-6ac0-46c0-b726-c21488b34434',
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
|
||||
@@ -108,6 +108,7 @@ export const getJobMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -191,6 +192,7 @@ export const getJobMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -287,6 +289,7 @@ export const getJobMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -383,6 +386,7 @@ export const getJobMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -309,6 +309,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -576,6 +577,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -655,6 +657,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -734,6 +737,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -819,6 +823,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -904,6 +909,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1075,6 +1081,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1212,6 +1219,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1429,6 +1437,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1561,6 +1570,7 @@ export const getNamespaceMetricsQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -341,6 +341,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -647,6 +648,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -810,6 +812,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
queryName: 'F2',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -973,6 +976,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
queryName: 'F2',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1052,6 +1056,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1131,6 +1136,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1216,6 +1222,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1301,6 +1308,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1451,6 +1459,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1569,6 +1578,7 @@ export const getNodeMetricsQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -335,6 +335,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -668,6 +669,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -851,6 +853,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1184,6 +1187,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1324,6 +1328,7 @@ export const getPodMetricsQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1407,6 +1412,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1497,6 +1503,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1714,6 +1721,7 @@ export const getPodMetricsQueryPayload = (
|
||||
queryName: 'F2',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -1918,6 +1926,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2135,6 +2144,7 @@ export const getPodMetricsQueryPayload = (
|
||||
queryName: 'F2',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2231,6 +2241,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2327,6 +2338,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
@@ -2510,6 +2522,7 @@ export const getPodMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [
|
||||
{
|
||||
|
||||
@@ -246,6 +246,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -365,6 +366,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -534,6 +536,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -653,6 +656,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -735,6 +739,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -817,6 +822,7 @@ export const getStatefulSetMetricsQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
|
||||
@@ -148,6 +148,7 @@ export const getVolumeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -239,6 +240,7 @@ export const getVolumeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -330,6 +332,7 @@ export const getVolumeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -421,6 +424,7 @@ export const getVolumeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
@@ -512,6 +516,7 @@ export const getVolumeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: v4(),
|
||||
|
||||
@@ -58,6 +58,7 @@ export const mockQuery: Query = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: 'test-query-id',
|
||||
|
||||
@@ -121,6 +121,7 @@ export const getPodQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '9b92756a-b445-45f8-90f4-d26f3ef28f8f',
|
||||
@@ -197,6 +198,7 @@ export const getPodQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'a22c1e03-4876-4b3e-9a96-a3c3a28f9c0f',
|
||||
@@ -337,6 +339,7 @@ export const getPodQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '7bb3a6f5-d1c6-4f2e-9cc9-7dcc46db398f',
|
||||
@@ -477,6 +480,7 @@ export const getPodQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '6d5ccd81-0ea1-4fb9-a66b-7f0fe2f15165',
|
||||
@@ -624,6 +628,7 @@ export const getPodQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '4d03a0ff-4fa5-4b19-b397-97f80ba9e0ac',
|
||||
@@ -772,6 +777,7 @@ export const getPodQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'ad491f19-0f83-4dd4-bb8f-bec295c18d1b',
|
||||
@@ -920,6 +926,7 @@ export const getPodQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '16908d4e-1565-4847-8d87-01ebb8fc494a',
|
||||
@@ -1001,6 +1008,7 @@ export const getPodQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '4b255d6d-4cde-474d-8866-f4418583c18b',
|
||||
@@ -1177,6 +1185,7 @@ export const getNodeQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '259295b5-774d-4b2e-8a4f-e5dd63e6c38d',
|
||||
@@ -1314,6 +1323,7 @@ export const getNodeQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '486af4da-2a1a-4b8f-992c-eba098d3a6f9',
|
||||
@@ -1409,6 +1419,7 @@ export const getNodeQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'b56143c0-7d2f-4425-97c5-65ad6fc87366',
|
||||
@@ -1557,6 +1568,7 @@ export const getNodeQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '57eeac15-615c-4a71-9c61-8e0c0c76b045',
|
||||
@@ -1718,6 +1730,7 @@ export const getHostQueryPayload = (
|
||||
queryName: 'F1',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '315b15fa-ff0c-442f-89f8-2bf4fb1af2f2',
|
||||
@@ -1786,6 +1799,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '40218bfb-a9b7-4974-aead-5bf666e139bf',
|
||||
@@ -1928,6 +1942,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '8e6485ea-7018-43b0-ab27-b210f77b59ad',
|
||||
@@ -2009,6 +2024,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '47173220-44df-4ef6-87f4-31e333c180c7',
|
||||
@@ -2084,6 +2100,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '62eedbc6-c8ad-4d13-80a8-129396e1d1dc',
|
||||
@@ -2159,6 +2176,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '5ddb1b38-53bb-46f5-b4fe-fe832d6b9b24',
|
||||
@@ -2234,6 +2252,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'a849bcce-7684-4852-9134-530b45419b8f',
|
||||
@@ -2309,6 +2328,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'ab685a3d-fa4c-4663-8d94-c452e59038f3',
|
||||
@@ -2369,6 +2389,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '9bd40b51-0790-4cdd-9718-551b2ded5926',
|
||||
@@ -2450,6 +2471,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: '9c6d18ad-89ff-4e38-a15a-440e72ed6ca8',
|
||||
@@ -2524,6 +2546,7 @@ export const getHostQueryPayload = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'f4cfc2a5-78fc-42cc-8f4a-194c8c916132',
|
||||
|
||||
@@ -178,6 +178,10 @@ export const mockQueryBuilderContextValue = {
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
isEnabledQuery: false,
|
||||
lastUsedQuery: 0,
|
||||
handleSetTraceOperatorData: noop,
|
||||
removeAllQueryBuilderEntities: noop,
|
||||
removeTraceOperator: noop,
|
||||
addTraceOperator: noop,
|
||||
setLastUsedQuery: noop,
|
||||
handleSetQueryData: noop,
|
||||
handleSetFormulaData: noop,
|
||||
|
||||
@@ -71,6 +71,7 @@ export function getWidgetQuery(
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
|
||||
@@ -64,6 +64,7 @@ export const getQueryBuilderQueries = ({
|
||||
|
||||
return newQueryData;
|
||||
}),
|
||||
queryTraceOperator: [],
|
||||
});
|
||||
|
||||
export const getQueryBuilderQuerieswithFormula = ({
|
||||
@@ -106,4 +107,5 @@ export const getQueryBuilderQuerieswithFormula = ({
|
||||
}),
|
||||
dataSource,
|
||||
})),
|
||||
queryTraceOperator: [],
|
||||
});
|
||||
|
||||
@@ -71,6 +71,7 @@ export const useGetRelatedMetricsGraphs = ({
|
||||
builder: {
|
||||
queryData: [metric.query],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuidv4(),
|
||||
|
||||
@@ -150,6 +150,7 @@ export function getMetricDetailsQuery(
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -164,6 +164,7 @@ function QuerySection({
|
||||
<QueryBuilderV2
|
||||
panelType={selectedGraph}
|
||||
filterConfigs={filterConfigs}
|
||||
showTraceOperator={selectedGraph !== PANEL_TYPES.LIST}
|
||||
version={selectedDashboard?.data?.version || 'v3'}
|
||||
isListViewPanel={selectedGraph === PANEL_TYPES.LIST}
|
||||
queryComponents={queryComponents}
|
||||
|
||||
@@ -53,6 +53,7 @@ const compositeQueryParam = {
|
||||
legend: '',
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [
|
||||
{
|
||||
|
||||
@@ -33,6 +33,7 @@ const buildSupersetQuery = (extras?: Record<string, unknown>): Query => ({
|
||||
...(extras || {}),
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -528,6 +528,10 @@ export function handleQueryChange(
|
||||
|
||||
return tempQuery;
|
||||
}),
|
||||
queryTraceOperator:
|
||||
newPanelType === PANEL_TYPES.LIST
|
||||
? []
|
||||
: supersetQuery.builder.queryTraceOperator,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -77,6 +77,7 @@ export default function LogsConnectionStatus(): JSX.Element {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: '',
|
||||
|
||||
@@ -30,5 +30,14 @@ export type QueryBuilderProps = {
|
||||
isListViewPanel?: boolean;
|
||||
showFunctions?: boolean;
|
||||
showOnlyWhereClause?: boolean;
|
||||
showOnlyTraceOperator?: boolean;
|
||||
showTraceViewSelector?: boolean;
|
||||
showTraceOperator?: boolean;
|
||||
version: string;
|
||||
onChangeTraceView?: (view: TraceView) => void;
|
||||
};
|
||||
|
||||
export enum TraceView {
|
||||
SPANS = 'spans',
|
||||
TRACES = 'traces',
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
/* eslint-disable react/require-default-props */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import './QBEntityOptions.styles.scss';
|
||||
|
||||
@@ -39,6 +40,8 @@ interface QBEntityOptionsProps {
|
||||
showCloneOption?: boolean;
|
||||
isListViewPanel?: boolean;
|
||||
index?: number;
|
||||
showTraceOperator?: boolean;
|
||||
hasTraceOperator?: boolean;
|
||||
queryVariant?: 'dropdown' | 'static';
|
||||
onChangeDataSource?: (value: DataSource) => void;
|
||||
}
|
||||
@@ -61,6 +64,8 @@ export default function QBEntityOptions({
|
||||
onCloneQuery,
|
||||
index,
|
||||
queryVariant,
|
||||
hasTraceOperator = false,
|
||||
showTraceOperator = false,
|
||||
onChangeDataSource,
|
||||
}: QBEntityOptionsProps): JSX.Element {
|
||||
const handleCloneEntity = (): void => {
|
||||
@@ -97,7 +102,7 @@ export default function QBEntityOptions({
|
||||
value="query-builder"
|
||||
className="periscope-btn visibility-toggle"
|
||||
onClick={onToggleVisibility}
|
||||
disabled={isListViewPanel}
|
||||
disabled={isListViewPanel && !showTraceOperator}
|
||||
>
|
||||
{entityData.disabled ? <EyeOff size={16} /> : <Eye size={16} />}
|
||||
</Button>
|
||||
@@ -115,6 +120,10 @@ export default function QBEntityOptions({
|
||||
className={cx(
|
||||
'periscope-btn',
|
||||
entityType === 'query' ? 'query-name' : 'formula-name',
|
||||
query?.dataSource === DataSource.TRACES &&
|
||||
(hasTraceOperator || (showTraceOperator && isListViewPanel))
|
||||
? 'has-trace-operator'
|
||||
: '',
|
||||
isLogsExplorerPage && lastUsedQuery === index ? 'sync-btn' : '',
|
||||
)}
|
||||
>
|
||||
@@ -183,4 +192,6 @@ QBEntityOptions.defaultProps = {
|
||||
showCloneOption: true,
|
||||
queryVariant: 'static',
|
||||
onChangeDataSource: noop,
|
||||
hasTraceOperator: false,
|
||||
showTraceOperator: false,
|
||||
};
|
||||
|
||||
@@ -11,5 +11,8 @@ export type QueryProps = {
|
||||
version: string;
|
||||
showSpanScopeSelector?: boolean;
|
||||
showOnlyWhereClause?: boolean;
|
||||
showTraceOperator?: boolean;
|
||||
hasTraceOperator?: boolean;
|
||||
signalSource?: string;
|
||||
isMultiQueryAllowed?: boolean;
|
||||
} & Pick<QueryBuilderProps, 'filterConfigs' | 'queryComponents'>;
|
||||
|
||||
@@ -67,6 +67,7 @@ export const getTraceToLogsQuery = (
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -106,6 +106,16 @@ function ListView({
|
||||
];
|
||||
}
|
||||
|
||||
// add order by to trace operator
|
||||
if (query.builder.queryTraceOperator.length > 0) {
|
||||
query.builder.queryTraceOperator[0].orderBy = [
|
||||
{
|
||||
columnName: orderBy.split(':')[0],
|
||||
order: orderBy.split(':')[1] as 'asc' | 'desc',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return query;
|
||||
}, [stagedQuery, orderBy]);
|
||||
|
||||
|
||||
@@ -37,11 +37,15 @@ function QuerySection(): JSX.Element {
|
||||
};
|
||||
}, [panelTypes, renderOrderBy]);
|
||||
|
||||
const isListViewPanel = useMemo(
|
||||
() => panelTypes === PANEL_TYPES.LIST || panelTypes === PANEL_TYPES.TRACE,
|
||||
[panelTypes],
|
||||
);
|
||||
|
||||
return (
|
||||
<QueryBuilderV2
|
||||
isListViewPanel={
|
||||
panelTypes === PANEL_TYPES.LIST || panelTypes === PANEL_TYPES.TRACE
|
||||
}
|
||||
isListViewPanel={isListViewPanel}
|
||||
showTraceOperator
|
||||
config={{ initialDataSource: DataSource.TRACES, queryVariant: 'static' }}
|
||||
queryComponents={queryComponents}
|
||||
panelType={panelTypes}
|
||||
|
||||
39
frontend/src/grammer/TraceOperatorGrammar.g4
Normal file
39
frontend/src/grammer/TraceOperatorGrammar.g4
Normal file
@@ -0,0 +1,39 @@
|
||||
grammar TraceOperatorGrammar;
|
||||
|
||||
// Entry point of the grammar (the root of the parse tree)
|
||||
query : expression+ EOF;
|
||||
|
||||
// Expression rules
|
||||
expression
|
||||
: 'NOT' expression // NOT prefix expression
|
||||
| '(' expression ')' operator expression // Parenthesized operator expression
|
||||
| '(' expression ')' // Parenthesized expression
|
||||
| left=atom operator right=expression // Binary operator with expression on right
|
||||
| left=atom operator '(' expr=expression ')' // Expression with parentheses inside
|
||||
| atom // Simple expression (atom)
|
||||
;
|
||||
|
||||
// Atom definition: atoms are identifiers (letters and optional numbers)
|
||||
atom
|
||||
: IDENTIFIER // General atom (combination of letters and numbers)
|
||||
;
|
||||
|
||||
// Operator definition
|
||||
operator
|
||||
: '=>' // Implication
|
||||
| '&&' // AND
|
||||
| '||' // OR
|
||||
| 'NOT' // NOT
|
||||
| '->' // Implication
|
||||
;
|
||||
|
||||
// Lexer rules
|
||||
|
||||
// IDENTIFIER can be a sequence of letters followed by optional numbers
|
||||
IDENTIFIER
|
||||
: [a-zA-Z]+[0-9]* // Letters followed by optional numbers (e.g., A1, B123, C99)
|
||||
;
|
||||
|
||||
// Whitespace (to be skipped)
|
||||
WS
|
||||
: [ \t\r\n]+ -> skip; // Skip whitespace
|
||||
@@ -54,9 +54,11 @@ export const useQueryOperations: UseQueryOperations = ({
|
||||
formula,
|
||||
isListViewPanel = false,
|
||||
entityVersion,
|
||||
isForTraceOperator = false,
|
||||
}) => {
|
||||
const {
|
||||
handleSetQueryData,
|
||||
handleSetTraceOperatorData,
|
||||
handleSetFormulaData,
|
||||
removeQueryBuilderEntityByIndex,
|
||||
panelType,
|
||||
@@ -400,9 +402,19 @@ export const useQueryOperations: UseQueryOperations = ({
|
||||
: value,
|
||||
};
|
||||
|
||||
handleSetQueryData(index, newQuery);
|
||||
if (isForTraceOperator) {
|
||||
handleSetTraceOperatorData(index, newQuery);
|
||||
} else {
|
||||
handleSetQueryData(index, newQuery);
|
||||
}
|
||||
},
|
||||
[query, index, handleSetQueryData],
|
||||
[
|
||||
query,
|
||||
index,
|
||||
handleSetQueryData,
|
||||
handleSetTraceOperatorData,
|
||||
isForTraceOperator,
|
||||
],
|
||||
);
|
||||
|
||||
const handleChangeFormulaData: HandleChangeFormulaData = useCallback(
|
||||
|
||||
@@ -78,6 +78,7 @@ export const stepIntervalUnchanged = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [{ name: 'A', query: '', legend: '', disabled: false }],
|
||||
clickhouse_sql: [{ name: 'A', legend: '', disabled: false, query: '' }],
|
||||
@@ -242,6 +243,7 @@ export const replaceVariables = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [{ name: 'A', query: '', legend: '', disabled: false }],
|
||||
clickhouse_sql: [{ name: 'A', legend: '', disabled: false, query: '' }],
|
||||
@@ -292,6 +294,7 @@ export const defaultOutput = {
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [{ disabled: false, legend: '', name: 'A', query: '' }],
|
||||
id: 'test-id',
|
||||
@@ -469,6 +472,7 @@ export const outputWithFunctions = {
|
||||
ShiftBy: 0,
|
||||
},
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
promql: [{ name: 'A', query: '', legend: '', disabled: false }],
|
||||
clickhouse_sql: [{ name: 'A', legend: '', disabled: false, query: '' }],
|
||||
|
||||
@@ -25,12 +25,17 @@ const buildBuilderQuery = (
|
||||
query: Query,
|
||||
panelType: PANEL_TYPES | null,
|
||||
): ICompositeMetricQuery => {
|
||||
const { queryData, queryFormulas } = query.builder;
|
||||
const { queryData, queryFormulas, queryTraceOperator } = query.builder;
|
||||
const currentQueryData = mapQueryDataToApi(queryData, 'queryName');
|
||||
const currentFormulas = mapQueryDataToApi(queryFormulas, 'queryName');
|
||||
const currentTraceOperator = mapQueryDataToApi(
|
||||
queryTraceOperator,
|
||||
'queryName',
|
||||
);
|
||||
const builderQueries = {
|
||||
...currentQueryData.data,
|
||||
...currentFormulas.data,
|
||||
...currentTraceOperator.data,
|
||||
};
|
||||
|
||||
const compositeQuery = defaultCompositeQuery;
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { initialQueryState } from 'constants/queryBuilder';
|
||||
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
Query,
|
||||
@@ -22,10 +24,13 @@ import { v4 as uuid } from 'uuid';
|
||||
import { transformQueryBuilderDataModel } from '../transformQueryBuilderDataModel';
|
||||
|
||||
const mapQueryFromV5 = (compositeQuery: ICompositeMetricQuery): Query => {
|
||||
const builderQueries: Record<string, IBuilderQuery | IBuilderFormula> = {};
|
||||
const builderQueries: Record<
|
||||
string,
|
||||
IBuilderQuery | IBuilderFormula | IBuilderTraceOperator
|
||||
> = {};
|
||||
const builderQueryTypes: Record<
|
||||
string,
|
||||
'builder_query' | 'builder_formula'
|
||||
'builder_query' | 'builder_formula' | 'builder_trace_operator'
|
||||
> = {};
|
||||
const promQueries: IPromQLQuery[] = [];
|
||||
const clickhouseQueries: IClickHouseQuery[] = [];
|
||||
@@ -46,6 +51,11 @@ const mapQueryFromV5 = (compositeQuery: ICompositeMetricQuery): Query => {
|
||||
);
|
||||
builderQueryTypes[spec.name] = 'builder_formula';
|
||||
}
|
||||
} else if (q.type === 'builder_trace_operator') {
|
||||
if (spec.name) {
|
||||
builderQueries[spec.name] = (spec as unknown) as IBuilderTraceOperator;
|
||||
builderQueryTypes[spec.name] = 'builder_trace_operator';
|
||||
}
|
||||
} else if (q.type === 'promql') {
|
||||
const promSpec = spec as PromQuery;
|
||||
promQueries.push({
|
||||
|
||||
@@ -2,29 +2,41 @@ import {
|
||||
initialFormulaBuilderFormValues,
|
||||
initialQueryBuilderFormValuesMap,
|
||||
} from 'constants/queryBuilder';
|
||||
import { FORMULA_REGEXP } from 'constants/regExp';
|
||||
import { FORMULA_REGEXP, TRACE_OPERATOR_REGEXP } from 'constants/regExp';
|
||||
import {
|
||||
BuilderQueryDataResourse,
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryBuilderData } from 'types/common/queryBuilder';
|
||||
|
||||
export const transformQueryBuilderDataModel = (
|
||||
data: BuilderQueryDataResourse,
|
||||
queryTypes?: Record<string, 'builder_query' | 'builder_formula'>,
|
||||
queryTypes?: Record<
|
||||
string,
|
||||
'builder_query' | 'builder_formula' | 'builder_trace_operator'
|
||||
>,
|
||||
): QueryBuilderData => {
|
||||
const queryData: QueryBuilderData['queryData'] = [];
|
||||
const queryFormulas: QueryBuilderData['queryFormulas'] = [];
|
||||
const queryTraceOperator: QueryBuilderData['queryTraceOperator'] = [];
|
||||
|
||||
Object.entries(data).forEach(([key, value]) => {
|
||||
const isFormula = queryTypes
|
||||
? queryTypes[key] === 'builder_formula'
|
||||
: FORMULA_REGEXP.test(value.queryName);
|
||||
|
||||
const isTraceOperator = queryTypes
|
||||
? queryTypes[key] === 'builder_trace_operator'
|
||||
: TRACE_OPERATOR_REGEXP.test(value.queryName);
|
||||
|
||||
if (isFormula) {
|
||||
const formula = value as IBuilderFormula;
|
||||
queryFormulas.push({ ...initialFormulaBuilderFormValues, ...formula });
|
||||
} else if (isTraceOperator) {
|
||||
const traceOperator = value as IBuilderTraceOperator;
|
||||
queryTraceOperator.push({ ...traceOperator });
|
||||
} else {
|
||||
const queryFromData = value as IBuilderQuery;
|
||||
queryData.push({
|
||||
@@ -34,5 +46,5 @@ export const transformQueryBuilderDataModel = (
|
||||
}
|
||||
});
|
||||
|
||||
return { queryData, queryFormulas };
|
||||
return { queryData, queryFormulas, queryTraceOperator };
|
||||
};
|
||||
|
||||
@@ -206,6 +206,7 @@ describe('Logs Explorer Tests', () => {
|
||||
initialQueryBuilderFormValues,
|
||||
initialQueryBuilderFormValues,
|
||||
],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
},
|
||||
setSupersetQuery: jest.fn(),
|
||||
@@ -215,6 +216,10 @@ describe('Logs Explorer Tests', () => {
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
isEnabledQuery: false,
|
||||
lastUsedQuery: 0,
|
||||
handleSetTraceOperatorData: noop,
|
||||
removeAllQueryBuilderEntities: noop,
|
||||
removeTraceOperator: noop,
|
||||
addTraceOperator: noop,
|
||||
setLastUsedQuery: noop,
|
||||
handleSetQueryData: noop,
|
||||
handleSetFormulaData: noop,
|
||||
|
||||
@@ -72,6 +72,7 @@ export function getWidgetQuery(
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
|
||||
@@ -155,6 +155,7 @@ export function getWidgetQuery({
|
||||
},
|
||||
],
|
||||
queryFormulas: [],
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import './TracesExplorer.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Callout } from '@signozhq/callout';
|
||||
import { Card } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import cx from 'classnames';
|
||||
@@ -35,7 +36,10 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { Warning } from 'types/api';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
IBuilderTraceOperator,
|
||||
Query,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { generateExportToDashboardLink } from 'utils/dashboard/generateExportToDashboardLink';
|
||||
import {
|
||||
@@ -52,7 +56,6 @@ function TracesExplorer(): JSX.Element {
|
||||
handleRunQuery,
|
||||
stagedQuery,
|
||||
handleSetConfig,
|
||||
updateQueriesData,
|
||||
} = useQueryBuilder();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
@@ -103,32 +106,14 @@ function TracesExplorer(): JSX.Element {
|
||||
handleSetConfig(PANEL_TYPES.LIST, DataSource.TRACES);
|
||||
}
|
||||
|
||||
if (view === ExplorerViews.LIST) {
|
||||
if (
|
||||
selectedView !== ExplorerViews.LIST &&
|
||||
currentQuery?.builder?.queryData?.[0]
|
||||
) {
|
||||
const filterToRetain = currentQuery.builder.queryData[0].filter;
|
||||
|
||||
const newDefaultQuery = updateAllQueriesOperators(
|
||||
initialQueriesMap.traces,
|
||||
PANEL_TYPES.LIST,
|
||||
DataSource.TRACES,
|
||||
);
|
||||
|
||||
const newListQuery = updateQueriesData(
|
||||
newDefaultQuery,
|
||||
'queryData',
|
||||
(item, index) => {
|
||||
if (index === 0) {
|
||||
return { ...item, filter: filterToRetain };
|
||||
}
|
||||
return item;
|
||||
},
|
||||
);
|
||||
setDefaultQuery(newListQuery);
|
||||
}
|
||||
setShouldReset(true);
|
||||
if (
|
||||
(selectedView === ExplorerViews.TRACE ||
|
||||
selectedView === ExplorerViews.LIST) &&
|
||||
stagedQuery?.builder?.queryTraceOperator &&
|
||||
stagedQuery.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
// remove order by from trace operator
|
||||
stagedQuery.builder.queryTraceOperator[0].orderBy = [];
|
||||
}
|
||||
|
||||
setSelectedView(view);
|
||||
@@ -141,10 +126,8 @@ function TracesExplorer(): JSX.Element {
|
||||
handleSetConfig,
|
||||
handleExplorerTabChange,
|
||||
selectedView,
|
||||
currentQuery,
|
||||
updateAllQueriesOperators,
|
||||
updateQueriesData,
|
||||
setSelectedView,
|
||||
stagedQuery,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -211,19 +194,44 @@ function TracesExplorer(): JSX.Element {
|
||||
|
||||
useShareBuilderUrl({ defaultValue: defaultQuery, forceReset: shouldReset });
|
||||
|
||||
const isMultipleQueries = useMemo(() => {
|
||||
const builder = currentQuery?.builder;
|
||||
const queriesLen = builder?.queryData?.length ?? 0;
|
||||
const formulasLen = builder?.queryFormulas?.length ?? 0;
|
||||
return queriesLen > 1 || formulasLen > 0;
|
||||
}, [currentQuery]);
|
||||
|
||||
const isGroupByExist = useMemo(() => {
|
||||
const queryData = currentQuery?.builder?.queryData ?? [];
|
||||
return queryData.some((q) => (q?.groupBy?.length ?? 0) > 0);
|
||||
}, [currentQuery]);
|
||||
|
||||
const hasMultipleQueries = useMemo(
|
||||
() => currentQuery?.builder?.queryData?.length > 1,
|
||||
[currentQuery],
|
||||
);
|
||||
|
||||
const traceOperator = useMemo((): IBuilderTraceOperator | undefined => {
|
||||
if (
|
||||
currentQuery.builder.queryTraceOperator &&
|
||||
currentQuery.builder.queryTraceOperator.length > 0
|
||||
) {
|
||||
return currentQuery.builder.queryTraceOperator[0];
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}, [currentQuery.builder.queryTraceOperator]);
|
||||
|
||||
const showTraceOperatorCallout = useMemo(
|
||||
() =>
|
||||
(selectedView === ExplorerViews.LIST ||
|
||||
selectedView === ExplorerViews.TRACE) &&
|
||||
hasMultipleQueries &&
|
||||
!traceOperator,
|
||||
[selectedView, hasMultipleQueries, traceOperator],
|
||||
);
|
||||
|
||||
const traceOperatorCalloutDescription = useMemo(() => {
|
||||
if (currentQuery.builder.queryData.length === 0) return '';
|
||||
const firstQuery = currentQuery.builder.queryData[0];
|
||||
return `Please use a Trace Operator to combine results of multiple span queries. Else you'd only see the results from query "${firstQuery.queryName}"`;
|
||||
}, [currentQuery]);
|
||||
|
||||
useEffect(() => {
|
||||
const shouldChangeView = isMultipleQueries || isGroupByExist;
|
||||
const shouldChangeView = isGroupByExist;
|
||||
|
||||
if (
|
||||
(selectedView === ExplorerViews.LIST ||
|
||||
@@ -233,12 +241,7 @@ function TracesExplorer(): JSX.Element {
|
||||
// Switch to timeseries view automatically
|
||||
handleChangeSelectedView(ExplorerViews.TIMESERIES);
|
||||
}
|
||||
}, [
|
||||
selectedView,
|
||||
isMultipleQueries,
|
||||
isGroupByExist,
|
||||
handleChangeSelectedView,
|
||||
]);
|
||||
}, [selectedView, isGroupByExist, handleChangeSelectedView]);
|
||||
|
||||
useEffect(() => {
|
||||
if (shouldReset) {
|
||||
@@ -365,6 +368,15 @@ function TracesExplorer(): JSX.Element {
|
||||
/>
|
||||
</div>
|
||||
|
||||
{showTraceOperatorCallout && (
|
||||
<Callout
|
||||
type="info"
|
||||
size="small"
|
||||
showIcon
|
||||
description={traceOperatorCalloutDescription}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === ExplorerViews.LIST && (
|
||||
<div className="trace-explorer-list-view">
|
||||
<ListView
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
token literal names:
|
||||
null
|
||||
'NOT'
|
||||
'('
|
||||
')'
|
||||
'=>'
|
||||
'&&'
|
||||
'||'
|
||||
'->'
|
||||
null
|
||||
null
|
||||
|
||||
token symbolic names:
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
IDENTIFIER
|
||||
WS
|
||||
|
||||
rule names:
|
||||
query
|
||||
expression
|
||||
atom
|
||||
operator
|
||||
|
||||
|
||||
atn:
|
||||
[4, 1, 9, 45, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 1, 0, 4, 0, 10, 8, 0, 11, 0, 12, 0, 11, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 39, 8, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 0, 0, 4, 0, 2, 4, 6, 0, 1, 2, 0, 1, 1, 4, 7, 46, 0, 9, 1, 0, 0, 0, 2, 38, 1, 0, 0, 0, 4, 40, 1, 0, 0, 0, 6, 42, 1, 0, 0, 0, 8, 10, 3, 2, 1, 0, 9, 8, 1, 0, 0, 0, 10, 11, 1, 0, 0, 0, 11, 9, 1, 0, 0, 0, 11, 12, 1, 0, 0, 0, 12, 13, 1, 0, 0, 0, 13, 14, 5, 0, 0, 1, 14, 1, 1, 0, 0, 0, 15, 16, 5, 1, 0, 0, 16, 39, 3, 2, 1, 0, 17, 18, 5, 2, 0, 0, 18, 19, 3, 2, 1, 0, 19, 20, 5, 3, 0, 0, 20, 21, 3, 6, 3, 0, 21, 22, 3, 2, 1, 0, 22, 39, 1, 0, 0, 0, 23, 24, 5, 2, 0, 0, 24, 25, 3, 2, 1, 0, 25, 26, 5, 3, 0, 0, 26, 39, 1, 0, 0, 0, 27, 28, 3, 4, 2, 0, 28, 29, 3, 6, 3, 0, 29, 30, 3, 2, 1, 0, 30, 39, 1, 0, 0, 0, 31, 32, 3, 4, 2, 0, 32, 33, 3, 6, 3, 0, 33, 34, 5, 2, 0, 0, 34, 35, 3, 2, 1, 0, 35, 36, 5, 3, 0, 0, 36, 39, 1, 0, 0, 0, 37, 39, 3, 4, 2, 0, 38, 15, 1, 0, 0, 0, 38, 17, 1, 0, 0, 0, 38, 23, 1, 0, 0, 0, 38, 27, 1, 0, 0, 0, 38, 31, 1, 0, 0, 0, 38, 37, 1, 0, 0, 0, 39, 3, 1, 0, 0, 0, 40, 41, 5, 8, 0, 0, 41, 5, 1, 0, 0, 0, 42, 43, 7, 0, 0, 0, 43, 7, 1, 0, 0, 0, 2, 11, 38]
|
||||
@@ -0,0 +1,16 @@
|
||||
T__0=1
|
||||
T__1=2
|
||||
T__2=3
|
||||
T__3=4
|
||||
T__4=5
|
||||
T__5=6
|
||||
T__6=7
|
||||
IDENTIFIER=8
|
||||
WS=9
|
||||
'NOT'=1
|
||||
'('=2
|
||||
')'=3
|
||||
'=>'=4
|
||||
'&&'=5
|
||||
'||'=6
|
||||
'->'=7
|
||||
@@ -0,0 +1,44 @@
|
||||
token literal names:
|
||||
null
|
||||
'NOT'
|
||||
'('
|
||||
')'
|
||||
'=>'
|
||||
'&&'
|
||||
'||'
|
||||
'->'
|
||||
null
|
||||
null
|
||||
|
||||
token symbolic names:
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
IDENTIFIER
|
||||
WS
|
||||
|
||||
rule names:
|
||||
T__0
|
||||
T__1
|
||||
T__2
|
||||
T__3
|
||||
T__4
|
||||
T__5
|
||||
T__6
|
||||
IDENTIFIER
|
||||
WS
|
||||
|
||||
channel names:
|
||||
DEFAULT_TOKEN_CHANNEL
|
||||
HIDDEN
|
||||
|
||||
mode names:
|
||||
DEFAULT_MODE
|
||||
|
||||
atn:
|
||||
[4, 0, 9, 57, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 7, 4, 7, 41, 8, 7, 11, 7, 12, 7, 42, 1, 7, 5, 7, 46, 8, 7, 10, 7, 12, 7, 49, 9, 7, 1, 8, 4, 8, 52, 8, 8, 11, 8, 12, 8, 53, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 1, 0, 3, 2, 0, 65, 90, 97, 122, 1, 0, 48, 57, 3, 0, 9, 10, 13, 13, 32, 32, 59, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 1, 19, 1, 0, 0, 0, 3, 23, 1, 0, 0, 0, 5, 25, 1, 0, 0, 0, 7, 27, 1, 0, 0, 0, 9, 30, 1, 0, 0, 0, 11, 33, 1, 0, 0, 0, 13, 36, 1, 0, 0, 0, 15, 40, 1, 0, 0, 0, 17, 51, 1, 0, 0, 0, 19, 20, 5, 78, 0, 0, 20, 21, 5, 79, 0, 0, 21, 22, 5, 84, 0, 0, 22, 2, 1, 0, 0, 0, 23, 24, 5, 40, 0, 0, 24, 4, 1, 0, 0, 0, 25, 26, 5, 41, 0, 0, 26, 6, 1, 0, 0, 0, 27, 28, 5, 61, 0, 0, 28, 29, 5, 62, 0, 0, 29, 8, 1, 0, 0, 0, 30, 31, 5, 38, 0, 0, 31, 32, 5, 38, 0, 0, 32, 10, 1, 0, 0, 0, 33, 34, 5, 124, 0, 0, 34, 35, 5, 124, 0, 0, 35, 12, 1, 0, 0, 0, 36, 37, 5, 45, 0, 0, 37, 38, 5, 62, 0, 0, 38, 14, 1, 0, 0, 0, 39, 41, 7, 0, 0, 0, 40, 39, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 40, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 47, 1, 0, 0, 0, 44, 46, 7, 1, 0, 0, 45, 44, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 47, 48, 1, 0, 0, 0, 48, 16, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 50, 52, 7, 2, 0, 0, 51, 50, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 51, 1, 0, 0, 0, 53, 54, 1, 0, 0, 0, 54, 55, 1, 0, 0, 0, 55, 56, 6, 8, 0, 0, 56, 18, 1, 0, 0, 0, 4, 0, 42, 47, 53, 1, 6, 0, 0]
|
||||
@@ -0,0 +1,16 @@
|
||||
T__0=1
|
||||
T__1=2
|
||||
T__2=3
|
||||
T__3=4
|
||||
T__4=5
|
||||
T__5=6
|
||||
T__6=7
|
||||
IDENTIFIER=8
|
||||
WS=9
|
||||
'NOT'=1
|
||||
'('=2
|
||||
')'=3
|
||||
'=>'=4
|
||||
'&&'=5
|
||||
'||'=6
|
||||
'->'=7
|
||||
@@ -0,0 +1,92 @@
|
||||
// Generated from ./TraceOperatorGrammar.g4 by ANTLR 4.13.1
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
export default class TraceOperatorGrammarLexer extends Lexer {
|
||||
public static readonly T__0 = 1;
|
||||
public static readonly T__1 = 2;
|
||||
public static readonly T__2 = 3;
|
||||
public static readonly T__3 = 4;
|
||||
public static readonly T__4 = 5;
|
||||
public static readonly T__5 = 6;
|
||||
public static readonly T__6 = 7;
|
||||
public static readonly IDENTIFIER = 8;
|
||||
public static readonly WS = 9;
|
||||
public static readonly EOF = Token.EOF;
|
||||
|
||||
public static readonly channelNames: string[] = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN" ];
|
||||
public static readonly literalNames: (string | null)[] = [ null, "'NOT'",
|
||||
"'('", "')'",
|
||||
"'=>'", "'&&'",
|
||||
"'||'", "'->'" ];
|
||||
public static readonly symbolicNames: (string | null)[] = [ null, null,
|
||||
null, null,
|
||||
null, null,
|
||||
null, null,
|
||||
"IDENTIFIER",
|
||||
"WS" ];
|
||||
public static readonly modeNames: string[] = [ "DEFAULT_MODE", ];
|
||||
|
||||
public static readonly ruleNames: string[] = [
|
||||
"T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", "IDENTIFIER",
|
||||
"WS",
|
||||
];
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(this, TraceOperatorGrammarLexer._ATN, TraceOperatorGrammarLexer.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
|
||||
public get grammarFileName(): string { return "TraceOperatorGrammar.g4"; }
|
||||
|
||||
public get literalNames(): (string | null)[] { return TraceOperatorGrammarLexer.literalNames; }
|
||||
public get symbolicNames(): (string | null)[] { return TraceOperatorGrammarLexer.symbolicNames; }
|
||||
public get ruleNames(): string[] { return TraceOperatorGrammarLexer.ruleNames; }
|
||||
|
||||
public get serializedATN(): number[] { return TraceOperatorGrammarLexer._serializedATN; }
|
||||
|
||||
public get channelNames(): string[] { return TraceOperatorGrammarLexer.channelNames; }
|
||||
|
||||
public get modeNames(): string[] { return TraceOperatorGrammarLexer.modeNames; }
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,0,9,57,6,-1,2,0,7,
|
||||
0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7,6,2,7,7,7,2,8,7,8,1,0,1,
|
||||
0,1,0,1,0,1,1,1,1,1,2,1,2,1,3,1,3,1,3,1,4,1,4,1,4,1,5,1,5,1,5,1,6,1,6,1,
|
||||
6,1,7,4,7,41,8,7,11,7,12,7,42,1,7,5,7,46,8,7,10,7,12,7,49,9,7,1,8,4,8,52,
|
||||
8,8,11,8,12,8,53,1,8,1,8,0,0,9,1,1,3,2,5,3,7,4,9,5,11,6,13,7,15,8,17,9,
|
||||
1,0,3,2,0,65,90,97,122,1,0,48,57,3,0,9,10,13,13,32,32,59,0,1,1,0,0,0,0,
|
||||
3,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,
|
||||
0,15,1,0,0,0,0,17,1,0,0,0,1,19,1,0,0,0,3,23,1,0,0,0,5,25,1,0,0,0,7,27,1,
|
||||
0,0,0,9,30,1,0,0,0,11,33,1,0,0,0,13,36,1,0,0,0,15,40,1,0,0,0,17,51,1,0,
|
||||
0,0,19,20,5,78,0,0,20,21,5,79,0,0,21,22,5,84,0,0,22,2,1,0,0,0,23,24,5,40,
|
||||
0,0,24,4,1,0,0,0,25,26,5,41,0,0,26,6,1,0,0,0,27,28,5,61,0,0,28,29,5,62,
|
||||
0,0,29,8,1,0,0,0,30,31,5,38,0,0,31,32,5,38,0,0,32,10,1,0,0,0,33,34,5,124,
|
||||
0,0,34,35,5,124,0,0,35,12,1,0,0,0,36,37,5,45,0,0,37,38,5,62,0,0,38,14,1,
|
||||
0,0,0,39,41,7,0,0,0,40,39,1,0,0,0,41,42,1,0,0,0,42,40,1,0,0,0,42,43,1,0,
|
||||
0,0,43,47,1,0,0,0,44,46,7,1,0,0,45,44,1,0,0,0,46,49,1,0,0,0,47,45,1,0,0,
|
||||
0,47,48,1,0,0,0,48,16,1,0,0,0,49,47,1,0,0,0,50,52,7,2,0,0,51,50,1,0,0,0,
|
||||
52,53,1,0,0,0,53,51,1,0,0,0,53,54,1,0,0,0,54,55,1,0,0,0,55,56,6,8,0,0,56,
|
||||
18,1,0,0,0,4,0,42,47,53,1,6,0,0];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
if (!TraceOperatorGrammarLexer.__ATN) {
|
||||
TraceOperatorGrammarLexer.__ATN = new ATNDeserializer().deserialize(TraceOperatorGrammarLexer._serializedATN);
|
||||
}
|
||||
|
||||
return TraceOperatorGrammarLexer.__ATN;
|
||||
}
|
||||
|
||||
|
||||
static DecisionsToDFA = TraceOperatorGrammarLexer._ATN.decisionToState.map( (ds: DecisionState, index: number) => new DFA(ds, index) );
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// Generated from ./TraceOperatorGrammar.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
||||
import { QueryContext } from "./TraceOperatorGrammarParser";
|
||||
import { ExpressionContext } from "./TraceOperatorGrammarParser";
|
||||
import { AtomContext } from "./TraceOperatorGrammarParser";
|
||||
import { OperatorContext } from "./TraceOperatorGrammarParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete listener for a parse tree produced by
|
||||
* `TraceOperatorGrammarParser`.
|
||||
*/
|
||||
export default class TraceOperatorGrammarListener extends ParseTreeListener {
|
||||
/**
|
||||
* Enter a parse tree produced by `TraceOperatorGrammarParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `TraceOperatorGrammarParser.query`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitQuery?: (ctx: QueryContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `TraceOperatorGrammarParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `TraceOperatorGrammarParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitExpression?: (ctx: ExpressionContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `TraceOperatorGrammarParser.atom`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterAtom?: (ctx: AtomContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `TraceOperatorGrammarParser.atom`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitAtom?: (ctx: AtomContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `TraceOperatorGrammarParser.operator`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterOperator?: (ctx: OperatorContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `TraceOperatorGrammarParser.operator`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitOperator?: (ctx: OperatorContext) => void;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,423 @@
|
||||
// Generated from ./TraceOperatorGrammar.g4 by ANTLR 4.13.1
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer, DecisionState, DFA, FailedPredicateException,
|
||||
RecognitionException, NoViableAltException, BailErrorStrategy,
|
||||
Parser, ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionMode, PredictionContextCache,
|
||||
TerminalNode, RuleNode,
|
||||
Token, TokenStream,
|
||||
Interval, IntervalSet
|
||||
} from 'antlr4';
|
||||
import TraceOperatorGrammarListener from "./TraceOperatorGrammarListener.js";
|
||||
import TraceOperatorGrammarVisitor from "./TraceOperatorGrammarVisitor.js";
|
||||
|
||||
// for running tests with parameters, TODO: discuss strategy for typed parameters in CI
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
type int = number;
|
||||
|
||||
export default class TraceOperatorGrammarParser extends Parser {
|
||||
public static readonly T__0 = 1;
|
||||
public static readonly T__1 = 2;
|
||||
public static readonly T__2 = 3;
|
||||
public static readonly T__3 = 4;
|
||||
public static readonly T__4 = 5;
|
||||
public static readonly T__5 = 6;
|
||||
public static readonly T__6 = 7;
|
||||
public static readonly IDENTIFIER = 8;
|
||||
public static readonly WS = 9;
|
||||
public static readonly EOF = Token.EOF;
|
||||
public static readonly RULE_query = 0;
|
||||
public static readonly RULE_expression = 1;
|
||||
public static readonly RULE_atom = 2;
|
||||
public static readonly RULE_operator = 3;
|
||||
public static readonly literalNames: (string | null)[] = [ null, "'NOT'",
|
||||
"'('", "')'",
|
||||
"'=>'", "'&&'",
|
||||
"'||'", "'->'" ];
|
||||
public static readonly symbolicNames: (string | null)[] = [ null, null,
|
||||
null, null,
|
||||
null, null,
|
||||
null, null,
|
||||
"IDENTIFIER",
|
||||
"WS" ];
|
||||
// tslint:disable:no-trailing-whitespace
|
||||
public static readonly ruleNames: string[] = [
|
||||
"query", "expression", "atom", "operator",
|
||||
];
|
||||
public get grammarFileName(): string { return "TraceOperatorGrammar.g4"; }
|
||||
public get literalNames(): (string | null)[] { return TraceOperatorGrammarParser.literalNames; }
|
||||
public get symbolicNames(): (string | null)[] { return TraceOperatorGrammarParser.symbolicNames; }
|
||||
public get ruleNames(): string[] { return TraceOperatorGrammarParser.ruleNames; }
|
||||
public get serializedATN(): number[] { return TraceOperatorGrammarParser._serializedATN; }
|
||||
|
||||
protected createFailedPredicateException(predicate?: string, message?: string): FailedPredicateException {
|
||||
return new FailedPredicateException(this, predicate, message);
|
||||
}
|
||||
|
||||
constructor(input: TokenStream) {
|
||||
super(input);
|
||||
this._interp = new ParserATNSimulator(this, TraceOperatorGrammarParser._ATN, TraceOperatorGrammarParser.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
// @RuleVersion(0)
|
||||
public query(): QueryContext {
|
||||
let localctx: QueryContext = new QueryContext(this, this._ctx, this.state);
|
||||
this.enterRule(localctx, 0, TraceOperatorGrammarParser.RULE_query);
|
||||
let _la: number;
|
||||
try {
|
||||
this.enterOuterAlt(localctx, 1);
|
||||
{
|
||||
this.state = 9;
|
||||
this._errHandler.sync(this);
|
||||
_la = this._input.LA(1);
|
||||
do {
|
||||
{
|
||||
{
|
||||
this.state = 8;
|
||||
this.expression();
|
||||
}
|
||||
}
|
||||
this.state = 11;
|
||||
this._errHandler.sync(this);
|
||||
_la = this._input.LA(1);
|
||||
} while ((((_la) & ~0x1F) === 0 && ((1 << _la) & 262) !== 0));
|
||||
this.state = 13;
|
||||
this.match(TraceOperatorGrammarParser.EOF);
|
||||
}
|
||||
}
|
||||
catch (re) {
|
||||
if (re instanceof RecognitionException) {
|
||||
localctx.exception = re;
|
||||
this._errHandler.reportError(this, re);
|
||||
this._errHandler.recover(this, re);
|
||||
} else {
|
||||
throw re;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
this.exitRule();
|
||||
}
|
||||
return localctx;
|
||||
}
|
||||
// @RuleVersion(0)
|
||||
public expression(): ExpressionContext {
|
||||
let localctx: ExpressionContext = new ExpressionContext(this, this._ctx, this.state);
|
||||
this.enterRule(localctx, 2, TraceOperatorGrammarParser.RULE_expression);
|
||||
try {
|
||||
this.state = 38;
|
||||
this._errHandler.sync(this);
|
||||
switch ( this._interp.adaptivePredict(this._input, 1, this._ctx) ) {
|
||||
case 1:
|
||||
this.enterOuterAlt(localctx, 1);
|
||||
{
|
||||
this.state = 15;
|
||||
this.match(TraceOperatorGrammarParser.T__0);
|
||||
this.state = 16;
|
||||
this.expression();
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
this.enterOuterAlt(localctx, 2);
|
||||
{
|
||||
this.state = 17;
|
||||
this.match(TraceOperatorGrammarParser.T__1);
|
||||
this.state = 18;
|
||||
this.expression();
|
||||
this.state = 19;
|
||||
this.match(TraceOperatorGrammarParser.T__2);
|
||||
this.state = 20;
|
||||
this.operator();
|
||||
this.state = 21;
|
||||
this.expression();
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
this.enterOuterAlt(localctx, 3);
|
||||
{
|
||||
this.state = 23;
|
||||
this.match(TraceOperatorGrammarParser.T__1);
|
||||
this.state = 24;
|
||||
this.expression();
|
||||
this.state = 25;
|
||||
this.match(TraceOperatorGrammarParser.T__2);
|
||||
}
|
||||
break;
|
||||
case 4:
|
||||
this.enterOuterAlt(localctx, 4);
|
||||
{
|
||||
this.state = 27;
|
||||
localctx._left = this.atom();
|
||||
this.state = 28;
|
||||
this.operator();
|
||||
this.state = 29;
|
||||
localctx._right = this.expression();
|
||||
}
|
||||
break;
|
||||
case 5:
|
||||
this.enterOuterAlt(localctx, 5);
|
||||
{
|
||||
this.state = 31;
|
||||
localctx._left = this.atom();
|
||||
this.state = 32;
|
||||
this.operator();
|
||||
this.state = 33;
|
||||
this.match(TraceOperatorGrammarParser.T__1);
|
||||
this.state = 34;
|
||||
localctx._expr = this.expression();
|
||||
this.state = 35;
|
||||
this.match(TraceOperatorGrammarParser.T__2);
|
||||
}
|
||||
break;
|
||||
case 6:
|
||||
this.enterOuterAlt(localctx, 6);
|
||||
{
|
||||
this.state = 37;
|
||||
this.atom();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (re) {
|
||||
if (re instanceof RecognitionException) {
|
||||
localctx.exception = re;
|
||||
this._errHandler.reportError(this, re);
|
||||
this._errHandler.recover(this, re);
|
||||
} else {
|
||||
throw re;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
this.exitRule();
|
||||
}
|
||||
return localctx;
|
||||
}
|
||||
// @RuleVersion(0)
|
||||
public atom(): AtomContext {
|
||||
let localctx: AtomContext = new AtomContext(this, this._ctx, this.state);
|
||||
this.enterRule(localctx, 4, TraceOperatorGrammarParser.RULE_atom);
|
||||
try {
|
||||
this.enterOuterAlt(localctx, 1);
|
||||
{
|
||||
this.state = 40;
|
||||
this.match(TraceOperatorGrammarParser.IDENTIFIER);
|
||||
}
|
||||
}
|
||||
catch (re) {
|
||||
if (re instanceof RecognitionException) {
|
||||
localctx.exception = re;
|
||||
this._errHandler.reportError(this, re);
|
||||
this._errHandler.recover(this, re);
|
||||
} else {
|
||||
throw re;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
this.exitRule();
|
||||
}
|
||||
return localctx;
|
||||
}
|
||||
// @RuleVersion(0)
|
||||
public operator(): OperatorContext {
|
||||
let localctx: OperatorContext = new OperatorContext(this, this._ctx, this.state);
|
||||
this.enterRule(localctx, 6, TraceOperatorGrammarParser.RULE_operator);
|
||||
let _la: number;
|
||||
try {
|
||||
this.enterOuterAlt(localctx, 1);
|
||||
{
|
||||
this.state = 42;
|
||||
_la = this._input.LA(1);
|
||||
if(!((((_la) & ~0x1F) === 0 && ((1 << _la) & 242) !== 0))) {
|
||||
this._errHandler.recoverInline(this);
|
||||
}
|
||||
else {
|
||||
this._errHandler.reportMatch(this);
|
||||
this.consume();
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (re) {
|
||||
if (re instanceof RecognitionException) {
|
||||
localctx.exception = re;
|
||||
this._errHandler.reportError(this, re);
|
||||
this._errHandler.recover(this, re);
|
||||
} else {
|
||||
throw re;
|
||||
}
|
||||
}
|
||||
finally {
|
||||
this.exitRule();
|
||||
}
|
||||
return localctx;
|
||||
}
|
||||
|
||||
public static readonly _serializedATN: number[] = [4,1,9,45,2,0,7,0,2,1,
|
||||
7,1,2,2,7,2,2,3,7,3,1,0,4,0,10,8,0,11,0,12,0,11,1,0,1,0,1,1,1,1,1,1,1,1,
|
||||
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
|
||||
1,1,3,1,39,8,1,1,2,1,2,1,3,1,3,1,3,0,0,4,0,2,4,6,0,1,2,0,1,1,4,7,46,0,9,
|
||||
1,0,0,0,2,38,1,0,0,0,4,40,1,0,0,0,6,42,1,0,0,0,8,10,3,2,1,0,9,8,1,0,0,0,
|
||||
10,11,1,0,0,0,11,9,1,0,0,0,11,12,1,0,0,0,12,13,1,0,0,0,13,14,5,0,0,1,14,
|
||||
1,1,0,0,0,15,16,5,1,0,0,16,39,3,2,1,0,17,18,5,2,0,0,18,19,3,2,1,0,19,20,
|
||||
5,3,0,0,20,21,3,6,3,0,21,22,3,2,1,0,22,39,1,0,0,0,23,24,5,2,0,0,24,25,3,
|
||||
2,1,0,25,26,5,3,0,0,26,39,1,0,0,0,27,28,3,4,2,0,28,29,3,6,3,0,29,30,3,2,
|
||||
1,0,30,39,1,0,0,0,31,32,3,4,2,0,32,33,3,6,3,0,33,34,5,2,0,0,34,35,3,2,1,
|
||||
0,35,36,5,3,0,0,36,39,1,0,0,0,37,39,3,4,2,0,38,15,1,0,0,0,38,17,1,0,0,0,
|
||||
38,23,1,0,0,0,38,27,1,0,0,0,38,31,1,0,0,0,38,37,1,0,0,0,39,3,1,0,0,0,40,
|
||||
41,5,8,0,0,41,5,1,0,0,0,42,43,7,0,0,0,43,7,1,0,0,0,2,11,38];
|
||||
|
||||
private static __ATN: ATN;
|
||||
public static get _ATN(): ATN {
|
||||
if (!TraceOperatorGrammarParser.__ATN) {
|
||||
TraceOperatorGrammarParser.__ATN = new ATNDeserializer().deserialize(TraceOperatorGrammarParser._serializedATN);
|
||||
}
|
||||
|
||||
return TraceOperatorGrammarParser.__ATN;
|
||||
}
|
||||
|
||||
|
||||
static DecisionsToDFA = TraceOperatorGrammarParser._ATN.decisionToState.map( (ds: DecisionState, index: number) => new DFA(ds, index) );
|
||||
|
||||
}
|
||||
|
||||
export class QueryContext extends ParserRuleContext {
|
||||
constructor(parser?: TraceOperatorGrammarParser, parent?: ParserRuleContext, invokingState?: number) {
|
||||
super(parent, invokingState);
|
||||
this.parser = parser;
|
||||
}
|
||||
public EOF(): TerminalNode {
|
||||
return this.getToken(TraceOperatorGrammarParser.EOF, 0);
|
||||
}
|
||||
public expression_list(): ExpressionContext[] {
|
||||
return this.getTypedRuleContexts(ExpressionContext) as ExpressionContext[];
|
||||
}
|
||||
public expression(i: number): ExpressionContext {
|
||||
return this.getTypedRuleContext(ExpressionContext, i) as ExpressionContext;
|
||||
}
|
||||
public get ruleIndex(): number {
|
||||
return TraceOperatorGrammarParser.RULE_query;
|
||||
}
|
||||
public enterRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.enterQuery) {
|
||||
listener.enterQuery(this);
|
||||
}
|
||||
}
|
||||
public exitRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.exitQuery) {
|
||||
listener.exitQuery(this);
|
||||
}
|
||||
}
|
||||
// @Override
|
||||
public accept<Result>(visitor: TraceOperatorGrammarVisitor<Result>): Result {
|
||||
if (visitor.visitQuery) {
|
||||
return visitor.visitQuery(this);
|
||||
} else {
|
||||
return visitor.visitChildren(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class ExpressionContext extends ParserRuleContext {
|
||||
public _left!: AtomContext;
|
||||
public _right!: ExpressionContext;
|
||||
public _expr!: ExpressionContext;
|
||||
constructor(parser?: TraceOperatorGrammarParser, parent?: ParserRuleContext, invokingState?: number) {
|
||||
super(parent, invokingState);
|
||||
this.parser = parser;
|
||||
}
|
||||
public expression_list(): ExpressionContext[] {
|
||||
return this.getTypedRuleContexts(ExpressionContext) as ExpressionContext[];
|
||||
}
|
||||
public expression(i: number): ExpressionContext {
|
||||
return this.getTypedRuleContext(ExpressionContext, i) as ExpressionContext;
|
||||
}
|
||||
public operator(): OperatorContext {
|
||||
return this.getTypedRuleContext(OperatorContext, 0) as OperatorContext;
|
||||
}
|
||||
public atom(): AtomContext {
|
||||
return this.getTypedRuleContext(AtomContext, 0) as AtomContext;
|
||||
}
|
||||
public get ruleIndex(): number {
|
||||
return TraceOperatorGrammarParser.RULE_expression;
|
||||
}
|
||||
public enterRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.enterExpression) {
|
||||
listener.enterExpression(this);
|
||||
}
|
||||
}
|
||||
public exitRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.exitExpression) {
|
||||
listener.exitExpression(this);
|
||||
}
|
||||
}
|
||||
// @Override
|
||||
public accept<Result>(visitor: TraceOperatorGrammarVisitor<Result>): Result {
|
||||
if (visitor.visitExpression) {
|
||||
return visitor.visitExpression(this);
|
||||
} else {
|
||||
return visitor.visitChildren(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class AtomContext extends ParserRuleContext {
|
||||
constructor(parser?: TraceOperatorGrammarParser, parent?: ParserRuleContext, invokingState?: number) {
|
||||
super(parent, invokingState);
|
||||
this.parser = parser;
|
||||
}
|
||||
public IDENTIFIER(): TerminalNode {
|
||||
return this.getToken(TraceOperatorGrammarParser.IDENTIFIER, 0);
|
||||
}
|
||||
public get ruleIndex(): number {
|
||||
return TraceOperatorGrammarParser.RULE_atom;
|
||||
}
|
||||
public enterRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.enterAtom) {
|
||||
listener.enterAtom(this);
|
||||
}
|
||||
}
|
||||
public exitRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.exitAtom) {
|
||||
listener.exitAtom(this);
|
||||
}
|
||||
}
|
||||
// @Override
|
||||
public accept<Result>(visitor: TraceOperatorGrammarVisitor<Result>): Result {
|
||||
if (visitor.visitAtom) {
|
||||
return visitor.visitAtom(this);
|
||||
} else {
|
||||
return visitor.visitChildren(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class OperatorContext extends ParserRuleContext {
|
||||
constructor(parser?: TraceOperatorGrammarParser, parent?: ParserRuleContext, invokingState?: number) {
|
||||
super(parent, invokingState);
|
||||
this.parser = parser;
|
||||
}
|
||||
public get ruleIndex(): number {
|
||||
return TraceOperatorGrammarParser.RULE_operator;
|
||||
}
|
||||
public enterRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.enterOperator) {
|
||||
listener.enterOperator(this);
|
||||
}
|
||||
}
|
||||
public exitRule(listener: TraceOperatorGrammarListener): void {
|
||||
if(listener.exitOperator) {
|
||||
listener.exitOperator(this);
|
||||
}
|
||||
}
|
||||
// @Override
|
||||
public accept<Result>(visitor: TraceOperatorGrammarVisitor<Result>): Result {
|
||||
if (visitor.visitOperator) {
|
||||
return visitor.visitOperator(this);
|
||||
} else {
|
||||
return visitor.visitChildren(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
// Generated from ./TraceOperatorGrammar.g4 by ANTLR 4.13.1
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
||||
import { QueryContext } from "./TraceOperatorGrammarParser";
|
||||
import { ExpressionContext } from "./TraceOperatorGrammarParser";
|
||||
import { AtomContext } from "./TraceOperatorGrammarParser";
|
||||
import { OperatorContext } from "./TraceOperatorGrammarParser";
|
||||
|
||||
|
||||
/**
|
||||
* This interface defines a complete generic visitor for a parse tree produced
|
||||
* by `TraceOperatorGrammarParser`.
|
||||
*
|
||||
* @param <Result> The return type of the visit operation. Use `void` for
|
||||
* operations with no return type.
|
||||
*/
|
||||
export default class TraceOperatorGrammarVisitor<Result> extends ParseTreeVisitor<Result> {
|
||||
/**
|
||||
* Visit a parse tree produced by `TraceOperatorGrammarParser.query`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitQuery?: (ctx: QueryContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `TraceOperatorGrammarParser.expression`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitExpression?: (ctx: ExpressionContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `TraceOperatorGrammarParser.atom`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitAtom?: (ctx: AtomContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `TraceOperatorGrammarParser.operator`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitOperator?: (ctx: OperatorContext) => Result;
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
initialClickHouseData,
|
||||
initialFormulaBuilderFormValues,
|
||||
initialQueriesMap,
|
||||
initialQueryBuilderFormTraceOperatorValues,
|
||||
initialQueryBuilderFormValuesMap,
|
||||
initialQueryPromQLData,
|
||||
initialQueryState,
|
||||
@@ -14,6 +15,7 @@ import {
|
||||
MAX_FORMULAS,
|
||||
MAX_QUERIES,
|
||||
PANEL_TYPES,
|
||||
TRACE_OPERATOR_QUERY_NAME,
|
||||
} from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import {
|
||||
@@ -45,6 +47,7 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
Query,
|
||||
@@ -72,14 +75,18 @@ export const QueryBuilderContext = createContext<QueryBuilderContextType>({
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
isEnabledQuery: false,
|
||||
handleSetQueryData: () => {},
|
||||
handleSetTraceOperatorData: () => {},
|
||||
handleSetFormulaData: () => {},
|
||||
handleSetQueryItemData: () => {},
|
||||
handleSetConfig: () => {},
|
||||
removeQueryBuilderEntityByIndex: () => {},
|
||||
removeAllQueryBuilderEntities: () => {},
|
||||
removeQueryTypeItemByIndex: () => {},
|
||||
addNewBuilderQuery: () => {},
|
||||
cloneQuery: () => {},
|
||||
addNewFormula: () => {},
|
||||
addTraceOperator: () => {},
|
||||
removeTraceOperator: () => {},
|
||||
addNewQueryItem: () => {},
|
||||
redirectWithQueryBuilderData: () => {},
|
||||
handleRunQuery: () => {},
|
||||
@@ -166,6 +173,10 @@ export function QueryBuilderProvider({
|
||||
...initialFormulaBuilderFormValues,
|
||||
...item,
|
||||
})),
|
||||
queryTraceOperator: query.builder.queryTraceOperator?.map((item) => ({
|
||||
...initialQueryBuilderFormTraceOperatorValues,
|
||||
...item,
|
||||
})),
|
||||
};
|
||||
|
||||
const setupedQueryData = builder.queryData.map((item) => {
|
||||
@@ -378,8 +389,11 @@ export function QueryBuilderProvider({
|
||||
const removeQueryBuilderEntityByIndex = useCallback(
|
||||
(type: keyof QueryBuilderData, index: number) => {
|
||||
setCurrentQuery((prevState) => {
|
||||
const currentArray: (IBuilderQuery | IBuilderFormula)[] =
|
||||
prevState.builder[type];
|
||||
const currentArray: (
|
||||
| IBuilderQuery
|
||||
| IBuilderFormula
|
||||
| IBuilderTraceOperator
|
||||
)[] = prevState.builder[type];
|
||||
|
||||
const filteredArray = currentArray.filter((_, i) => index !== i);
|
||||
|
||||
@@ -393,8 +407,11 @@ export function QueryBuilderProvider({
|
||||
});
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSupersetQuery((prevState) => {
|
||||
const currentArray: (IBuilderQuery | IBuilderFormula)[] =
|
||||
prevState.builder[type];
|
||||
const currentArray: (
|
||||
| IBuilderQuery
|
||||
| IBuilderFormula
|
||||
| IBuilderTraceOperator
|
||||
)[] = prevState.builder[type];
|
||||
|
||||
const filteredArray = currentArray.filter((_, i) => index !== i);
|
||||
|
||||
@@ -410,6 +427,20 @@ export function QueryBuilderProvider({
|
||||
[],
|
||||
);
|
||||
|
||||
const removeAllQueryBuilderEntities = useCallback(
|
||||
(type: keyof QueryBuilderData) => {
|
||||
setCurrentQuery((prevState) => ({
|
||||
...prevState,
|
||||
builder: { ...prevState.builder, [type]: [] },
|
||||
}));
|
||||
setSupersetQuery((prevState) => ({
|
||||
...prevState,
|
||||
builder: { ...prevState.builder, [type]: [] },
|
||||
}));
|
||||
},
|
||||
[setCurrentQuery, setSupersetQuery],
|
||||
);
|
||||
|
||||
const removeQueryTypeItemByIndex = useCallback(
|
||||
(type: EQueryType.PROM | EQueryType.CLICKHOUSE, index: number) => {
|
||||
setCurrentQuery((prevState) => {
|
||||
@@ -632,6 +663,68 @@ export function QueryBuilderProvider({
|
||||
});
|
||||
}, [createNewBuilderFormula]);
|
||||
|
||||
const addTraceOperator = useCallback((expression = '') => {
|
||||
const trimmed = (expression || '').trim();
|
||||
|
||||
setCurrentQuery((prevState) => {
|
||||
const existing = prevState.builder.queryTraceOperator?.[0] || null;
|
||||
const updated: IBuilderTraceOperator = existing
|
||||
? { ...existing, expression: trimmed }
|
||||
: {
|
||||
...initialQueryBuilderFormTraceOperatorValues,
|
||||
queryName: TRACE_OPERATOR_QUERY_NAME,
|
||||
expression: trimmed,
|
||||
};
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
// enforce single trace operator and replace only expression
|
||||
queryTraceOperator: [updated],
|
||||
},
|
||||
};
|
||||
});
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSupersetQuery((prevState) => {
|
||||
const existing = prevState.builder.queryTraceOperator?.[0] || null;
|
||||
const updated: IBuilderTraceOperator = existing
|
||||
? { ...existing, expression: trimmed }
|
||||
: {
|
||||
...initialQueryBuilderFormTraceOperatorValues,
|
||||
queryName: TRACE_OPERATOR_QUERY_NAME,
|
||||
expression: trimmed,
|
||||
};
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
// enforce single trace operator and replace only expression
|
||||
queryTraceOperator: [updated],
|
||||
},
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
const removeTraceOperator = useCallback(() => {
|
||||
setCurrentQuery((prevState) => ({
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
}));
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSupersetQuery((prevState) => ({
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
queryTraceOperator: [],
|
||||
},
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const updateQueryBuilderData: <T>(
|
||||
arr: T[],
|
||||
index: number,
|
||||
@@ -738,6 +831,44 @@ export function QueryBuilderProvider({
|
||||
},
|
||||
[updateQueryBuilderData, updateSuperSetQueryBuilderData],
|
||||
);
|
||||
|
||||
const handleSetTraceOperatorData = useCallback(
|
||||
(index: number, traceOperatorData: IBuilderTraceOperator): void => {
|
||||
setCurrentQuery((prevState) => {
|
||||
const updatedTraceOperatorBuilderData = updateQueryBuilderData(
|
||||
prevState.builder.queryTraceOperator,
|
||||
index,
|
||||
traceOperatorData,
|
||||
);
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
queryTraceOperator: updatedTraceOperatorBuilderData,
|
||||
},
|
||||
};
|
||||
});
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
setSupersetQuery((prevState) => {
|
||||
const updatedTraceOperatorBuilderData = updateQueryBuilderData(
|
||||
prevState.builder.queryTraceOperator,
|
||||
index,
|
||||
traceOperatorData,
|
||||
);
|
||||
|
||||
return {
|
||||
...prevState,
|
||||
builder: {
|
||||
...prevState.builder,
|
||||
queryTraceOperator: updatedTraceOperatorBuilderData,
|
||||
},
|
||||
};
|
||||
});
|
||||
},
|
||||
[updateQueryBuilderData],
|
||||
);
|
||||
|
||||
const handleSetFormulaData = useCallback(
|
||||
(index: number, formulaData: IBuilderFormula): void => {
|
||||
setCurrentQuery((prevState) => {
|
||||
@@ -1020,14 +1151,18 @@ export function QueryBuilderProvider({
|
||||
panelType,
|
||||
isEnabledQuery,
|
||||
handleSetQueryData,
|
||||
handleSetTraceOperatorData,
|
||||
handleSetFormulaData,
|
||||
handleSetQueryItemData,
|
||||
handleSetConfig,
|
||||
removeQueryBuilderEntityByIndex,
|
||||
removeQueryTypeItemByIndex,
|
||||
removeAllQueryBuilderEntities,
|
||||
cloneQuery,
|
||||
addNewBuilderQuery,
|
||||
addNewFormula,
|
||||
addTraceOperator,
|
||||
removeTraceOperator,
|
||||
addNewQueryItem,
|
||||
redirectWithQueryBuilderData,
|
||||
handleRunQuery,
|
||||
@@ -1048,14 +1183,18 @@ export function QueryBuilderProvider({
|
||||
panelType,
|
||||
isEnabledQuery,
|
||||
handleSetQueryData,
|
||||
handleSetTraceOperatorData,
|
||||
handleSetFormulaData,
|
||||
handleSetQueryItemData,
|
||||
handleSetConfig,
|
||||
removeQueryBuilderEntityByIndex,
|
||||
removeQueryTypeItemByIndex,
|
||||
removeAllQueryBuilderEntities,
|
||||
cloneQuery,
|
||||
addNewBuilderQuery,
|
||||
addNewFormula,
|
||||
addTraceOperator,
|
||||
removeTraceOperator,
|
||||
addNewQueryItem,
|
||||
redirectWithQueryBuilderData,
|
||||
handleRunQuery,
|
||||
|
||||
@@ -29,6 +29,8 @@ export interface IBuilderFormula {
|
||||
orderBy?: OrderByPayload[];
|
||||
}
|
||||
|
||||
export type IBuilderTraceOperator = IBuilderQuery;
|
||||
|
||||
export interface TagFilterItem {
|
||||
id: string;
|
||||
key?: BaseAutocompleteData;
|
||||
@@ -118,12 +120,13 @@ export type BuilderClickHouseResource = Record<string, IClickHouseQuery>;
|
||||
export type BuilderPromQLResource = Record<string, IPromQLQuery>;
|
||||
export type BuilderQueryDataResourse = Record<
|
||||
string,
|
||||
IBuilderQuery | IBuilderFormula
|
||||
IBuilderQuery | IBuilderFormula | IBuilderTraceOperator
|
||||
>;
|
||||
|
||||
export type MapData =
|
||||
| IBuilderQuery
|
||||
| IBuilderFormula
|
||||
| IBuilderTraceOperator
|
||||
| IClickHouseQuery
|
||||
| IPromQLQuery;
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ export type RequestType =
|
||||
|
||||
export type QueryType =
|
||||
| 'builder_query'
|
||||
| 'builder_trace_operator'
|
||||
| 'builder_formula'
|
||||
| 'builder_sub_query'
|
||||
| 'builder_join'
|
||||
@@ -220,6 +221,7 @@ export interface BaseBuilderQuery {
|
||||
secondaryAggregations?: SecondaryAggregation[];
|
||||
functions?: QueryFunction[];
|
||||
legend?: string;
|
||||
expression?: string; // for trace operator
|
||||
}
|
||||
|
||||
export interface TraceBuilderQuery extends BaseBuilderQuery {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteRe
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BaseBuilderQuery,
|
||||
@@ -18,6 +19,7 @@ import { SelectOption } from './select';
|
||||
|
||||
type UseQueryOperationsParams = Pick<QueryProps, 'index' | 'query'> &
|
||||
Pick<QueryBuilderProps, 'filterConfigs'> & {
|
||||
isForTraceOperator?: boolean;
|
||||
formula?: IBuilderFormula;
|
||||
isListViewPanel?: boolean;
|
||||
entityVersion: string;
|
||||
@@ -32,6 +34,14 @@ export type HandleChangeQueryData<T = IBuilderQuery> = <
|
||||
value: Value,
|
||||
) => void;
|
||||
|
||||
export type HandleChangeTraceOperatorData<T = IBuilderTraceOperator> = <
|
||||
Key extends keyof T,
|
||||
Value extends T[Key]
|
||||
>(
|
||||
key: Key,
|
||||
value: Value,
|
||||
) => void;
|
||||
|
||||
// Legacy version for backward compatibility
|
||||
export type HandleChangeQueryDataLegacy = HandleChangeQueryData<IBuilderQuery>;
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import { Dispatch, SetStateAction } from 'react';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IBuilderTraceOperator,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
Query,
|
||||
@@ -222,6 +223,7 @@ export type ReduceOperators = 'last' | 'sum' | 'avg' | 'max' | 'min';
|
||||
export type QueryBuilderData = {
|
||||
queryData: IBuilderQuery[];
|
||||
queryFormulas: IBuilderFormula[];
|
||||
queryTraceOperator: IBuilderTraceOperator[];
|
||||
};
|
||||
|
||||
export type QueryBuilderContextType = {
|
||||
@@ -235,6 +237,10 @@ export type QueryBuilderContextType = {
|
||||
panelType: PANEL_TYPES | null;
|
||||
isEnabledQuery: boolean;
|
||||
handleSetQueryData: (index: number, queryData: IBuilderQuery) => void;
|
||||
handleSetTraceOperatorData: (
|
||||
index: number,
|
||||
traceOperatorData: IBuilderTraceOperator,
|
||||
) => void;
|
||||
handleSetFormulaData: (index: number, formulaData: IBuilderFormula) => void;
|
||||
handleSetQueryItemData: (
|
||||
index: number,
|
||||
@@ -249,12 +255,15 @@ export type QueryBuilderContextType = {
|
||||
type: keyof QueryBuilderData,
|
||||
index: number,
|
||||
) => void;
|
||||
removeAllQueryBuilderEntities: (type: keyof QueryBuilderData) => void;
|
||||
removeQueryTypeItemByIndex: (
|
||||
type: EQueryType.PROM | EQueryType.CLICKHOUSE,
|
||||
index: number,
|
||||
) => void;
|
||||
addNewBuilderQuery: () => void;
|
||||
addNewFormula: () => void;
|
||||
removeTraceOperator: () => void;
|
||||
addTraceOperator: (expression?: string) => void;
|
||||
cloneQuery: (type: string, query: IBuilderQuery) => void;
|
||||
addNewQueryItem: (type: EQueryType.PROM | EQueryType.CLICKHOUSE) => void;
|
||||
redirectWithQueryBuilderData: (
|
||||
|
||||
@@ -2,10 +2,15 @@ import {
|
||||
convertBuilderQueriesToV5,
|
||||
convertClickHouseQueriesToV5,
|
||||
convertPromQueriesToV5,
|
||||
convertTraceOperatorToV5,
|
||||
mapPanelTypeToRequestType,
|
||||
} from 'api/v5/queryRange/prepareQueryRangePayloadV5';
|
||||
import { TRACE_OPERATOR_QUERY_NAME } from 'constants/queryBuilder';
|
||||
import { ICompositeMetricQuery } from 'types/api/alerts/compositeQuery';
|
||||
import { BuilderQueryDataResourse } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import {
|
||||
BuilderQueryDataResourse,
|
||||
IBuilderTraceOperator,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { OrderBy, QueryEnvelope } from 'types/api/v5/queryRange';
|
||||
|
||||
function convertFormulasToV5(
|
||||
@@ -46,9 +51,12 @@ export function compositeQueryToQueryEnvelope(
|
||||
|
||||
const regularQueries: BuilderQueryDataResourse = {};
|
||||
const formulaQueries: BuilderQueryDataResourse = {};
|
||||
const traceOperatorQueries: BuilderQueryDataResourse = {};
|
||||
|
||||
Object.entries(builderQueries || {}).forEach(([queryName, queryData]) => {
|
||||
if ('dataSource' in queryData) {
|
||||
if (queryData.queryName === TRACE_OPERATOR_QUERY_NAME) {
|
||||
traceOperatorQueries[queryName] = queryData;
|
||||
} else if ('dataSource' in queryData) {
|
||||
regularQueries[queryName] = queryData;
|
||||
} else {
|
||||
formulaQueries[queryName] = queryData;
|
||||
@@ -64,6 +72,12 @@ export function compositeQueryToQueryEnvelope(
|
||||
);
|
||||
const formulaQueriesV5 = convertFormulasToV5(formulaQueries);
|
||||
|
||||
const traceOperatorQueriesV5 = convertTraceOperatorToV5(
|
||||
traceOperatorQueries as Record<string, IBuilderTraceOperator>,
|
||||
requestType,
|
||||
panelType,
|
||||
);
|
||||
|
||||
const promQueriesV5 = convertPromQueriesToV5(promQueries || {});
|
||||
const chQueriesV5 = convertClickHouseQueriesToV5(chQueries || {});
|
||||
|
||||
@@ -72,7 +86,11 @@ export function compositeQueryToQueryEnvelope(
|
||||
|
||||
switch (queryType) {
|
||||
case 'builder':
|
||||
queries = [...builderQueriesV5, ...formulaQueriesV5];
|
||||
queries = [
|
||||
...builderQueriesV5,
|
||||
...formulaQueriesV5,
|
||||
...traceOperatorQueriesV5,
|
||||
];
|
||||
break;
|
||||
case 'promql':
|
||||
queries = [...promQueriesV5];
|
||||
@@ -85,6 +103,7 @@ export function compositeQueryToQueryEnvelope(
|
||||
queries = [
|
||||
...builderQueriesV5,
|
||||
...formulaQueriesV5,
|
||||
...traceOperatorQueriesV5,
|
||||
...promQueriesV5,
|
||||
...chQueriesV5,
|
||||
];
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4';
|
||||
import FilterQueryLexer from 'parser/FilterQueryLexer';
|
||||
import FilterQueryParser from 'parser/FilterQueryParser';
|
||||
import TraceOperatorGrammarLexer from 'parser/TraceOperatorParser/TraceOperatorGrammarLexer';
|
||||
import TraceOperatorGrammarParser from 'parser/TraceOperatorParser/TraceOperatorGrammarParser';
|
||||
import { IDetailedError, IValidationResult } from 'types/antlrQueryTypes';
|
||||
|
||||
// Custom error listener to capture ANTLR errors
|
||||
@@ -169,3 +171,66 @@ export const validateQuery = (query: string): IValidationResult => {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const validateTraceOperatorQuery = (
|
||||
query: string,
|
||||
): IValidationResult => {
|
||||
// Empty query is considered valid
|
||||
if (!query.trim()) {
|
||||
return {
|
||||
isValid: true,
|
||||
message: 'Trace operator query is empty',
|
||||
errors: [],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const errorListener = new QueryErrorListener();
|
||||
const inputStream = CharStreams.fromString(query);
|
||||
|
||||
// Setup lexer
|
||||
const lexer = new TraceOperatorGrammarLexer(inputStream);
|
||||
lexer.removeErrorListeners(); // Remove default error listeners
|
||||
lexer.addErrorListener(errorListener);
|
||||
|
||||
// Setup parser
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
const parser = new TraceOperatorGrammarParser(tokenStream);
|
||||
parser.removeErrorListeners(); // Remove default error listeners
|
||||
parser.addErrorListener(errorListener);
|
||||
|
||||
// Try parsing
|
||||
parser.query();
|
||||
|
||||
// Check if any errors were captured
|
||||
if (errorListener.hasErrors()) {
|
||||
return {
|
||||
isValid: false,
|
||||
message: 'Trace operator syntax error',
|
||||
errors: errorListener.getErrors(),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: true,
|
||||
message: 'Trace operator is valid!',
|
||||
errors: [],
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : 'Invalid trace operator syntax';
|
||||
|
||||
const detailedError: IDetailedError = {
|
||||
message: errorMessage,
|
||||
line: 0,
|
||||
column: 0,
|
||||
offendingSymbol: '',
|
||||
expectedTokens: [],
|
||||
};
|
||||
return {
|
||||
isValid: false,
|
||||
message: 'Invalid trace operator syntax',
|
||||
errors: [detailedError],
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
],
|
||||
"types": ["node", "jest"]
|
||||
},
|
||||
"exclude": ["node_modules", "src/parser/*.ts"],
|
||||
"exclude": ["node_modules", "src/parser/*.ts", "src/parser/TraceOperatorParser/*.ts"],
|
||||
"include": [
|
||||
"./src",
|
||||
"./src/**/*.ts",
|
||||
|
||||
@@ -4276,6 +4276,20 @@
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@signozhq/callout@0.0.2":
|
||||
version "0.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/callout/-/callout-0.0.2.tgz#131ca15f89a8ee6729fecc4d322f11359c02e5cf"
|
||||
integrity sha512-tmguHm+/JVRKjMElJOFyG7LJcdqCW1hHnFfp8ZkjQ+Gi7MfFt/r2foLZG2DNdOcfxSvhf2zhzr7D+epgvmbQ1A==
|
||||
dependencies:
|
||||
"@radix-ui/react-icons" "^1.3.0"
|
||||
"@radix-ui/react-slot" "^1.1.0"
|
||||
class-variance-authority "^0.7.0"
|
||||
clsx "^2.1.1"
|
||||
lucide-react "^0.445.0"
|
||||
lucide-solid "^0.510.0"
|
||||
tailwind-merge "^2.5.2"
|
||||
tailwindcss-animate "^1.0.7"
|
||||
|
||||
"@signozhq/design-tokens@1.1.4":
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/@signozhq/design-tokens/-/design-tokens-1.1.4.tgz#5d5de5bd9d19b6a3631383db015cc4b70c3f7661"
|
||||
@@ -12370,6 +12384,11 @@ lucide-react@^0.445.0:
|
||||
resolved "https://registry.yarnpkg.com/lucide-react/-/lucide-react-0.445.0.tgz#35c42341e98fbf0475b2a6cf74fd25ef7cbfcd62"
|
||||
integrity sha512-YrLf3aAHvmd4dZ8ot+mMdNFrFpJD7YRwQ2pUcBhgqbmxtrMP4xDzIorcj+8y+6kpuXBF4JB0NOCTUWIYetJjgA==
|
||||
|
||||
lucide-solid@^0.510.0:
|
||||
version "0.510.0"
|
||||
resolved "https://registry.yarnpkg.com/lucide-solid/-/lucide-solid-0.510.0.tgz#f5b17397ef1df3017f62f96f4d00e080abfb492f"
|
||||
integrity sha512-G6rKYxURfSLG/zeOCN/BEl2dq2ezujFKPbcHjl7RLJ4bBQwWk4ZF2Swga/8anWglSVZyqYz7HMrrpb8/+vOcXw==
|
||||
|
||||
lz-string@^1.4.4:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz"
|
||||
|
||||
@@ -29,6 +29,8 @@ func getqueryInfo(spec any) queryInfo {
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderTraceOperator:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled, Step: s.StepInterval}
|
||||
case qbtypes.QueryBuilderFormula:
|
||||
return queryInfo{Name: s.Name, Disabled: s.Disabled}
|
||||
case qbtypes.PromQuery:
|
||||
@@ -70,6 +72,11 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any
|
||||
result = postProcessMetricQuery(q, result, spec, req)
|
||||
typedResults[spec.Name] = result
|
||||
}
|
||||
case qbtypes.QueryBuilderTraceOperator:
|
||||
if result, ok := typedResults[spec.Name]; ok {
|
||||
result = postProcessTraceOperator(q, result, spec, req)
|
||||
typedResults[spec.Name] = result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,6 +217,27 @@ func postProcessMetricQuery(
|
||||
return result
|
||||
}
|
||||
|
||||
// postProcessTraceOperator applies postprocessing to a trace operator query result
|
||||
func postProcessTraceOperator(
|
||||
q *querier,
|
||||
result *qbtypes.Result,
|
||||
query qbtypes.QueryBuilderTraceOperator,
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
result = q.applySeriesLimit(result, query.Limit, query.Order)
|
||||
|
||||
// Apply functions if any
|
||||
if len(query.Functions) > 0 {
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
result = q.applyFunctions(result, functions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
// applyMetricReduceTo applies reduce to operation using the metric's ReduceTo field
|
||||
func (q *querier) applyMetricReduceTo(result *qbtypes.Result, reduceOp qbtypes.ReduceTo) *qbtypes.Result {
|
||||
tsData, ok := result.Value.(*qbtypes.TimeSeriesData)
|
||||
|
||||
@@ -29,16 +29,17 @@ var (
|
||||
)
|
||||
|
||||
type querier struct {
|
||||
logger *slog.Logger
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
promEngine prometheus.Prometheus
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
bucketCache BucketCache
|
||||
liveDataRefreshSeconds time.Duration
|
||||
logger *slog.Logger
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
promEngine prometheus.Prometheus
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder
|
||||
bucketCache BucketCache
|
||||
liveDataRefreshSeconds time.Duration
|
||||
}
|
||||
|
||||
var _ Querier = (*querier)(nil)
|
||||
@@ -52,20 +53,22 @@ func New(
|
||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder,
|
||||
bucketCache BucketCache,
|
||||
) *querier {
|
||||
querierSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querier")
|
||||
return &querier{
|
||||
logger: querierSettings.Logger(),
|
||||
telemetryStore: telemetryStore,
|
||||
metadataStore: metadataStore,
|
||||
promEngine: promEngine,
|
||||
traceStmtBuilder: traceStmtBuilder,
|
||||
logStmtBuilder: logStmtBuilder,
|
||||
metricStmtBuilder: metricStmtBuilder,
|
||||
meterStmtBuilder: meterStmtBuilder,
|
||||
bucketCache: bucketCache,
|
||||
liveDataRefreshSeconds: 5,
|
||||
logger: querierSettings.Logger(),
|
||||
telemetryStore: telemetryStore,
|
||||
metadataStore: metadataStore,
|
||||
promEngine: promEngine,
|
||||
traceStmtBuilder: traceStmtBuilder,
|
||||
logStmtBuilder: logStmtBuilder,
|
||||
metricStmtBuilder: metricStmtBuilder,
|
||||
meterStmtBuilder: meterStmtBuilder,
|
||||
traceOperatorStmtBuilder: traceOperatorStmtBuilder,
|
||||
bucketCache: bucketCache,
|
||||
liveDataRefreshSeconds: 5,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -127,9 +130,28 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
NumberOfQueries: len(req.CompositeQuery.Queries),
|
||||
PanelType: req.RequestType.StringValue(),
|
||||
}
|
||||
|
||||
intervalWarnings := []string{}
|
||||
|
||||
dependencyQueries := make(map[string]bool)
|
||||
traceOperatorQueries := make(map[string]qbtypes.QueryBuilderTraceOperator)
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
if query.Type == qbtypes.QueryTypeTraceOperator {
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
// Parse expression to find dependencies
|
||||
if err := spec.ParseExpression(); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse trace operator expression: %w", err)
|
||||
}
|
||||
|
||||
deps := spec.CollectReferencedQueries(spec.ParsedExpression)
|
||||
for _, dep := range deps {
|
||||
dependencyQueries[dep] = true
|
||||
}
|
||||
traceOperatorQueries[spec.Name] = spec
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
@@ -223,6 +245,23 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
event.TracesUsed = strings.Contains(spec.Query, "signoz_traces")
|
||||
}
|
||||
}
|
||||
} else if query.Type == qbtypes.QueryTypeTraceOperator {
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
if spec.StepInterval.Seconds() == 0 {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.RecommendedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
}
|
||||
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -243,6 +282,38 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
steps := make(map[string]qbtypes.Step)
|
||||
|
||||
for _, query := range req.CompositeQuery.Queries {
|
||||
var queryName string
|
||||
var isTraceOperator bool
|
||||
|
||||
switch query.Type {
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
if spec, ok := query.Spec.(qbtypes.QueryBuilderTraceOperator); ok {
|
||||
queryName = spec.Name
|
||||
isTraceOperator = true
|
||||
}
|
||||
case qbtypes.QueryTypePromQL:
|
||||
if spec, ok := query.Spec.(qbtypes.PromQuery); ok {
|
||||
queryName = spec.Name
|
||||
}
|
||||
case qbtypes.QueryTypeClickHouseSQL:
|
||||
if spec, ok := query.Spec.(qbtypes.ClickHouseQuery); ok {
|
||||
queryName = spec.Name
|
||||
}
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
queryName = spec.Name
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
queryName = spec.Name
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
queryName = spec.Name
|
||||
}
|
||||
}
|
||||
|
||||
if !isTraceOperator && dependencyQueries[queryName] {
|
||||
continue
|
||||
}
|
||||
|
||||
switch query.Type {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
promQuery, ok := query.Spec.(qbtypes.PromQuery)
|
||||
@@ -259,6 +330,22 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
chSQLQuery := newchSQLQuery(q.logger, q.telemetryStore, chQuery, nil, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType, tmplVars)
|
||||
queries[chQuery.Name] = chSQLQuery
|
||||
case qbtypes.QueryTypeTraceOperator:
|
||||
traceOpQuery, ok := query.Spec.(qbtypes.QueryBuilderTraceOperator)
|
||||
if !ok {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid trace operator query spec %T", query.Spec)
|
||||
}
|
||||
toq := &traceOperatorQuery{
|
||||
telemetryStore: q.telemetryStore,
|
||||
stmtBuilder: q.traceOperatorStmtBuilder,
|
||||
spec: traceOpQuery,
|
||||
compositeQuery: &req.CompositeQuery,
|
||||
fromMS: uint64(req.Start),
|
||||
toMS: uint64(req.End),
|
||||
kind: req.RequestType,
|
||||
}
|
||||
queries[traceOpQuery.Name] = toq
|
||||
steps[traceOpQuery.Name] = traceOpQuery.StepInterval
|
||||
case qbtypes.QueryTypeBuilder:
|
||||
switch spec := query.Spec.(type) {
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]:
|
||||
@@ -676,7 +763,17 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
return newBuilderQuery(q.telemetryStore, q.meterStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
||||
}
|
||||
return newBuilderQuery(q.telemetryStore, q.metricStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
||||
|
||||
case *traceOperatorQuery:
|
||||
specCopy := qt.spec.Copy()
|
||||
return &traceOperatorQuery{
|
||||
telemetryStore: q.telemetryStore,
|
||||
stmtBuilder: q.traceOperatorStmtBuilder,
|
||||
spec: specCopy,
|
||||
fromMS: uint64(timeRange.From),
|
||||
toMS: uint64(timeRange.To),
|
||||
compositeQuery: qt.compositeQuery,
|
||||
kind: qt.kind,
|
||||
}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -89,6 +89,17 @@ func newProvider(
|
||||
telemetryStore,
|
||||
)
|
||||
|
||||
// ADD: Create trace operator statement builder
|
||||
traceOperatorStmtBuilder := telemetrytraces.NewTraceOperatorStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
traceFieldMapper,
|
||||
traceConditionBuilder,
|
||||
traceStmtBuilder, // Pass the regular trace statement builder
|
||||
resourceFilterStmtBuilder, // Pass the resource filter statement builder
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
@@ -157,6 +168,7 @@ func newProvider(
|
||||
logStmtBuilder,
|
||||
metricStmtBuilder,
|
||||
meterStmtBuilder,
|
||||
traceOperatorStmtBuilder,
|
||||
bucketCache,
|
||||
), nil
|
||||
}
|
||||
|
||||
99
pkg/querier/trace_operator_query.go
Normal file
99
pkg/querier/trace_operator_query.go
Normal file
@@ -0,0 +1,99 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type traceOperatorQuery struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
stmtBuilder qbtypes.TraceOperatorStatementBuilder
|
||||
spec qbtypes.QueryBuilderTraceOperator
|
||||
compositeQuery *qbtypes.CompositeQuery
|
||||
fromMS uint64
|
||||
toMS uint64
|
||||
kind qbtypes.RequestType
|
||||
}
|
||||
|
||||
var _ qbtypes.Query = (*traceOperatorQuery)(nil)
|
||||
|
||||
func (q *traceOperatorQuery) Fingerprint() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (q *traceOperatorQuery) Window() (uint64, uint64) {
|
||||
return q.fromMS, q.toMS
|
||||
}
|
||||
|
||||
func (q *traceOperatorQuery) Execute(ctx context.Context) (*qbtypes.Result, error) {
|
||||
stmt, err := q.stmtBuilder.Build(
|
||||
ctx,
|
||||
q.fromMS,
|
||||
q.toMS,
|
||||
q.kind,
|
||||
q.spec,
|
||||
q.compositeQuery,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Execute the query with proper context
|
||||
result, err := q.executeWithContext(ctx, stmt.Query, stmt.Args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Warnings = stmt.Warnings
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (q *traceOperatorQuery) executeWithContext(ctx context.Context, query string, args []any) (*qbtypes.Result, error) {
|
||||
totalRows := uint64(0)
|
||||
totalBytes := uint64(0)
|
||||
elapsed := time.Duration(0)
|
||||
|
||||
ctx = clickhouse.Context(ctx, clickhouse.WithProgress(func(p *clickhouse.Progress) {
|
||||
totalRows += p.Rows
|
||||
totalBytes += p.Bytes
|
||||
elapsed += p.Elapsed
|
||||
}))
|
||||
|
||||
rows, err := q.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Pass query window and step for partial value detection
|
||||
queryWindow := &qbtypes.TimeRange{From: q.fromMS, To: q.toMS}
|
||||
|
||||
// Use the consume function like builderQuery does
|
||||
payload, err := consume(rows, q.kind, queryWindow, q.spec.StepInterval, q.spec.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
Stats: qbtypes.ExecStats{
|
||||
RowsScanned: totalRows,
|
||||
BytesScanned: totalBytes,
|
||||
DurationMS: uint64(elapsed.Milliseconds()),
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// contains checks if a slice contains a specific string
|
||||
func contains(slice []string, item string) bool {
|
||||
for _, s := range slice {
|
||||
if s == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
@@ -56,6 +56,27 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"duration_nano": {
|
||||
{
|
||||
Name: "duration_nano",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeInt64,
|
||||
},
|
||||
},
|
||||
"http.method": {
|
||||
{
|
||||
Name: "http.method",
|
||||
FieldContext: telemetrytypes.FieldContextAttribute,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"response_status_code": {
|
||||
{
|
||||
Name: "response_status_code",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeInt64,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, keys := range keysMap {
|
||||
for _, key := range keys {
|
||||
|
||||
928
pkg/telemetrytraces/trace_operator_cte_builder.go
Normal file
928
pkg/telemetrytraces/trace_operator_cte_builder.go
Normal file
@@ -0,0 +1,928 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type cteNode struct {
|
||||
name string
|
||||
sql string
|
||||
args []any
|
||||
dependsOn []string
|
||||
}
|
||||
|
||||
type traceOperatorCTEBuilder struct {
|
||||
ctx context.Context
|
||||
start uint64
|
||||
end uint64
|
||||
operator *qbtypes.QueryBuilderTraceOperator
|
||||
stmtBuilder *traceOperatorStatementBuilder
|
||||
queries map[string]*qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]
|
||||
ctes []cteNode
|
||||
cteNameToIndex map[string]int
|
||||
queryToCTEName map[string]string
|
||||
compositeQuery *qbtypes.CompositeQuery
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) collectQueries() error {
|
||||
referencedQueries := b.operator.CollectReferencedQueries(b.operator.ParsedExpression)
|
||||
|
||||
for _, queryEnv := range b.compositeQuery.Queries {
|
||||
if queryEnv.Type == qbtypes.QueryTypeBuilder {
|
||||
if traceQuery, ok := queryEnv.Spec.(qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]); ok {
|
||||
for _, refName := range referencedQueries {
|
||||
if traceQuery.Name == refName {
|
||||
queryCopy := traceQuery
|
||||
b.queries[refName] = &queryCopy
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, refName := range referencedQueries {
|
||||
if _, found := b.queries[refName]; !found {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "referenced query '%s' not found", refName)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) build(requestType qbtypes.RequestType) (*qbtypes.Statement, error) {
|
||||
|
||||
err := b.buildAllSpansCTE()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rootCTEName, err := b.buildExpressionCTEs(b.operator.ParsedExpression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
selectFromCTE := rootCTEName
|
||||
if b.operator.ReturnSpansFrom != "" {
|
||||
selectFromCTE = b.queryToCTEName[b.operator.ReturnSpansFrom]
|
||||
if selectFromCTE == "" {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput,
|
||||
"returnSpansFrom references query '%s' which has no corresponding CTE",
|
||||
b.operator.ReturnSpansFrom)
|
||||
}
|
||||
}
|
||||
|
||||
finalStmt, err := b.buildFinalQuery(selectFromCTE, requestType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var cteFragments []string
|
||||
var cteArgs [][]any
|
||||
|
||||
timeConstantsCTE := b.buildTimeConstantsCTE()
|
||||
cteFragments = append(cteFragments, timeConstantsCTE)
|
||||
|
||||
for _, cte := range b.ctes {
|
||||
cteFragments = append(cteFragments, fmt.Sprintf("%s AS (%s)", cte.name, cte.sql))
|
||||
cteArgs = append(cteArgs, cte.args)
|
||||
}
|
||||
|
||||
finalSQL := querybuilder.CombineCTEs(cteFragments) + finalStmt.Query + " SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000"
|
||||
finalArgs := querybuilder.PrependArgs(cteArgs, finalStmt.Args)
|
||||
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Final trace operator query built",
|
||||
"operator_expression", b.operator.Expression,
|
||||
"cte_count", len(cteFragments),
|
||||
"args_count", len(finalArgs))
|
||||
|
||||
return &qbtypes.Statement{
|
||||
Query: finalSQL,
|
||||
Args: finalArgs,
|
||||
Warnings: finalStmt.Warnings,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Will be used in Indirect descendant Query, will not be used in any other query
|
||||
func (b *traceOperatorCTEBuilder) buildAllSpansCTE() error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("*")
|
||||
sb.SelectMore(sqlbuilder.Escape("resource_string_service$$name") + " AS `service.name`")
|
||||
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||
startBucket := b.start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||
endBucket := b.end / querybuilder.NsToSeconds
|
||||
sb.Where(
|
||||
sb.GE("timestamp", fmt.Sprintf("%d", b.start)),
|
||||
sb.L("timestamp", fmt.Sprintf("%d", b.end)),
|
||||
sb.GE("ts_bucket_start", startBucket),
|
||||
sb.LE("ts_bucket_start", endBucket),
|
||||
)
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Built all_spans CTE")
|
||||
b.addCTE("all_spans", sql, args, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildTimeConstantsCTE() string {
|
||||
startBucket := b.start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||
endBucket := b.end / querybuilder.NsToSeconds
|
||||
|
||||
return fmt.Sprintf(`toDateTime64(%d, 9) AS t_from, toDateTime64(%d, 9) AS t_to, %d AS bucket_from, %d AS bucket_to`, b.start, b.end, startBucket, endBucket)
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildResourceFilterCTE(query qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]) (*qbtypes.Statement, error) {
|
||||
return b.stmtBuilder.resourceFilterStmtBuilder.Build(
|
||||
b.ctx,
|
||||
b.start,
|
||||
b.end,
|
||||
qbtypes.RequestTypeRaw,
|
||||
query,
|
||||
nil,
|
||||
)
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildExpressionCTEs(expr *qbtypes.TraceOperand) (string, error) {
|
||||
if expr == nil {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "expression is nil")
|
||||
}
|
||||
|
||||
if expr.QueryRef != nil {
|
||||
return b.buildQueryCTE(expr.QueryRef.Name)
|
||||
}
|
||||
|
||||
var leftCTE, rightCTE string
|
||||
var err error
|
||||
|
||||
if expr.Left != nil {
|
||||
leftCTE, err = b.buildExpressionCTEs(expr.Left)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
if expr.Right != nil {
|
||||
rightCTE, err = b.buildExpressionCTEs(expr.Right)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return b.buildOperatorCTE(*expr.Operator, leftCTE, rightCTE)
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildQueryCTE(queryName string) (string, error) {
|
||||
query, exists := b.queries[queryName]
|
||||
if !exists {
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "query %s not found", queryName)
|
||||
}
|
||||
|
||||
cteName := queryName
|
||||
b.queryToCTEName[queryName] = cteName
|
||||
|
||||
if _, exists := b.cteNameToIndex[cteName]; exists {
|
||||
return cteName, nil
|
||||
}
|
||||
|
||||
keySelectors := getKeySelectors(*query)
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Key selectors for query", "queryName", queryName, "keySelectors", keySelectors)
|
||||
keys, _, err := b.stmtBuilder.metadataStore.GetKeysMulti(b.ctx, keySelectors)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Retrieved keys for query", "queryName", queryName, "keysCount", len(keys))
|
||||
|
||||
// Build resource filter CTE for this specific query
|
||||
resourceFilterCTEName := fmt.Sprintf("__resource_filter_%s", cteName)
|
||||
resourceStmt, err := b.buildResourceFilterCTE(*query)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if resourceStmt != nil && resourceStmt.Query != "" {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Built resource filter CTE for query",
|
||||
"queryName", queryName,
|
||||
"resourceFilterCTEName", resourceFilterCTEName)
|
||||
b.addCTE(resourceFilterCTEName, resourceStmt.Query, resourceStmt.Args, nil)
|
||||
} else {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "No resource filter needed for query", "queryName", queryName)
|
||||
resourceFilterCTEName = ""
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("*")
|
||||
sb.From(fmt.Sprintf("%s.%s", DBName, SpanIndexV3TableName))
|
||||
if resourceFilterCTEName != "" {
|
||||
sb.Where(fmt.Sprintf("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM %s)", resourceFilterCTEName))
|
||||
}
|
||||
startBucket := b.start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||
endBucket := b.end / querybuilder.NsToSeconds
|
||||
sb.Where(
|
||||
sb.GE("timestamp", fmt.Sprintf("%d", b.start)),
|
||||
sb.L("timestamp", fmt.Sprintf("%d", b.end)),
|
||||
sb.GE("ts_bucket_start", startBucket),
|
||||
sb.LE("ts_bucket_start", endBucket),
|
||||
)
|
||||
|
||||
if query.Filter != nil && query.Filter.Expression != "" {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Applying filter to query CTE", "queryName", queryName, "filter", query.Filter.Expression)
|
||||
filterWhereClause, err := querybuilder.PrepareWhereClause(
|
||||
query.Filter.Expression,
|
||||
querybuilder.FilterExprVisitorOpts{
|
||||
Logger: b.stmtBuilder.logger,
|
||||
FieldMapper: b.stmtBuilder.fm,
|
||||
ConditionBuilder: b.stmtBuilder.cb,
|
||||
FieldKeys: keys,
|
||||
SkipResourceFilter: true,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.ErrorContext(b.ctx, "Failed to prepare where clause", "error", err, "filter", query.Filter.Expression)
|
||||
return "", err
|
||||
}
|
||||
if filterWhereClause != nil {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Adding where clause", "whereClause", filterWhereClause.WhereClause)
|
||||
sb.AddWhereClause(filterWhereClause.WhereClause)
|
||||
} else {
|
||||
b.stmtBuilder.logger.WarnContext(b.ctx, "PrepareWhereClause returned nil", "filter", query.Filter.Expression)
|
||||
}
|
||||
} else {
|
||||
if query.Filter == nil {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "No filter for query CTE", "queryName", queryName, "reason", "filter is nil")
|
||||
} else {
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "No filter for query CTE", "queryName", queryName, "reason", "filter expression is empty")
|
||||
}
|
||||
}
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Built query CTE",
|
||||
"queryName", queryName,
|
||||
"cteName", cteName)
|
||||
dependencies := []string{}
|
||||
if resourceFilterCTEName != "" {
|
||||
dependencies = append(dependencies, resourceFilterCTEName)
|
||||
}
|
||||
b.addCTE(cteName, sql, args, dependencies)
|
||||
|
||||
return cteName, nil
|
||||
}
|
||||
|
||||
func sanitizeForSQL(s string) string {
|
||||
replacements := map[string]string{
|
||||
"=>": "DIR_DESC",
|
||||
"->": "INDIR_DESC",
|
||||
"&&": "AND",
|
||||
"||": "OR",
|
||||
"NOT": "NOT",
|
||||
" ": "_",
|
||||
}
|
||||
|
||||
result := s
|
||||
for old, new := range replacements {
|
||||
result = strings.ReplaceAll(result, old, new)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildOperatorCTE(op qbtypes.TraceOperatorType, leftCTE, rightCTE string) (string, error) {
|
||||
sanitizedOp := sanitizeForSQL(op.StringValue())
|
||||
cteName := fmt.Sprintf("%s_%s_%s", leftCTE, sanitizedOp, rightCTE)
|
||||
|
||||
if _, exists := b.cteNameToIndex[cteName]; exists {
|
||||
return cteName, nil
|
||||
}
|
||||
|
||||
var sql string
|
||||
var args []any
|
||||
var dependsOn []string
|
||||
|
||||
switch op {
|
||||
case qbtypes.TraceOperatorDirectDescendant:
|
||||
sql, args, dependsOn = b.buildDirectDescendantCTE(leftCTE, rightCTE)
|
||||
case qbtypes.TraceOperatorIndirectDescendant:
|
||||
sql, args, dependsOn = b.buildIndirectDescendantCTE(leftCTE, rightCTE)
|
||||
case qbtypes.TraceOperatorAnd:
|
||||
sql, args, dependsOn = b.buildAndCTE(leftCTE, rightCTE)
|
||||
case qbtypes.TraceOperatorOr:
|
||||
sql, dependsOn = b.buildOrCTE(leftCTE, rightCTE)
|
||||
args = nil
|
||||
case qbtypes.TraceOperatorNot, qbtypes.TraceOperatorExclude:
|
||||
sql, args, dependsOn = b.buildNotCTE(leftCTE, rightCTE)
|
||||
default:
|
||||
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported operator: %s", op.StringValue())
|
||||
}
|
||||
|
||||
b.stmtBuilder.logger.DebugContext(b.ctx, "Built operator CTE",
|
||||
"operator", op.StringValue(),
|
||||
"cteName", cteName,
|
||||
"leftCTE", leftCTE,
|
||||
"rightCTE", rightCTE)
|
||||
b.addCTE(cteName, sql, args, dependsOn)
|
||||
return cteName, nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildDirectDescendantCTE(parentCTE, childCTE string) (string, []any, []string) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("p.*")
|
||||
|
||||
sb.From(fmt.Sprintf("%s AS p", parentCTE))
|
||||
sb.JoinWithOption(
|
||||
sqlbuilder.InnerJoin,
|
||||
fmt.Sprintf("%s AS c", childCTE),
|
||||
"p.trace_id = c.trace_id AND p.span_id = c.parent_span_id",
|
||||
)
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return sql, args, []string{parentCTE, childCTE}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildIndirectDescendantCTE(ancestorCTE, descendantCTE string) (string, []any, []string) {
|
||||
sql := fmt.Sprintf(`WITH RECURSIVE up AS (SELECT d.trace_id, d.span_id, d.parent_span_id, 0 AS depth FROM %s AS d UNION ALL SELECT p.trace_id, p.span_id, p.parent_span_id, up.depth + 1 FROM all_spans AS p JOIN up ON p.trace_id = up.trace_id AND p.span_id = up.parent_span_id WHERE up.depth < 100) SELECT DISTINCT a.* FROM %s AS a GLOBAL INNER JOIN (SELECT DISTINCT trace_id, span_id FROM up WHERE depth > 0 ) AS ancestors ON ancestors.trace_id = a.trace_id AND ancestors.span_id = a.span_id`, descendantCTE, ancestorCTE)
|
||||
return sql, nil, []string{ancestorCTE, descendantCTE, "all_spans"}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildAndCTE(leftCTE, rightCTE string) (string, []any, []string) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
// Select all columns from left CTE
|
||||
sb.Select("l.*")
|
||||
sb.From(fmt.Sprintf("%s AS l", leftCTE))
|
||||
sb.JoinWithOption(
|
||||
sqlbuilder.InnerJoin,
|
||||
fmt.Sprintf("%s AS r", rightCTE),
|
||||
"l.trace_id = r.trace_id",
|
||||
)
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return sql, args, []string{leftCTE, rightCTE}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildOrCTE(leftCTE, rightCTE string) (string, []string) {
|
||||
sql := fmt.Sprintf(`SELECT * FROM %s UNION DISTINCT SELECT * FROM %s`, leftCTE, rightCTE)
|
||||
|
||||
return sql, []string{leftCTE, rightCTE}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildNotCTE(leftCTE, rightCTE string) (string, []any, []string) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
// Handle unary NOT case (rightCTE is empty)
|
||||
if rightCTE == "" {
|
||||
sb.Select("b.*")
|
||||
sb.From("all_spans AS b")
|
||||
sb.Where(fmt.Sprintf(
|
||||
"b.trace_id GLOBAL NOT IN (SELECT DISTINCT trace_id FROM %s)",
|
||||
leftCTE,
|
||||
))
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return sql, args, []string{"all_spans", leftCTE}
|
||||
}
|
||||
|
||||
sb.Select("l.*")
|
||||
sb.From(fmt.Sprintf("%s AS l", leftCTE))
|
||||
sb.Where(fmt.Sprintf(
|
||||
"l.trace_id GLOBAL NOT IN (SELECT DISTINCT trace_id FROM %s)",
|
||||
rightCTE,
|
||||
))
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return sql, args, []string{leftCTE, rightCTE}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildFinalQuery(selectFromCTE string, requestType qbtypes.RequestType) (*qbtypes.Statement, error) {
|
||||
switch requestType {
|
||||
case qbtypes.RequestTypeRaw:
|
||||
return b.buildListQuery(selectFromCTE)
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
return b.buildTimeSeriesQuery(selectFromCTE)
|
||||
case qbtypes.RequestTypeTrace:
|
||||
return b.buildTraceQuery(selectFromCTE)
|
||||
case qbtypes.RequestTypeScalar:
|
||||
return b.buildScalarQuery(selectFromCTE)
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported request type: %s", requestType)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildListQuery(selectFromCTE string) (*qbtypes.Statement, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
// Select core fields
|
||||
sb.Select(
|
||||
"timestamp",
|
||||
"trace_id",
|
||||
"span_id",
|
||||
"name",
|
||||
"duration_nano",
|
||||
"parent_span_id",
|
||||
)
|
||||
|
||||
selectedFields := map[string]bool{
|
||||
"timestamp": true,
|
||||
"trace_id": true,
|
||||
"span_id": true,
|
||||
"name": true,
|
||||
"duration_nano": true,
|
||||
"parent_span_id": true,
|
||||
}
|
||||
|
||||
// Get keys for selectFields
|
||||
keySelectors := b.getKeySelectors()
|
||||
for _, field := range b.operator.SelectFields {
|
||||
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||
Name: field.Name,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: field.FieldContext,
|
||||
FieldDataType: field.FieldDataType,
|
||||
})
|
||||
}
|
||||
|
||||
keys, _, err := b.stmtBuilder.metadataStore.GetKeysMulti(b.ctx, keySelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Add selectFields using ColumnExpressionFor since we now have all base table columns
|
||||
for _, field := range b.operator.SelectFields {
|
||||
if selectedFields[field.Name] {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(b.ctx, &field, keys)
|
||||
if err != nil {
|
||||
b.stmtBuilder.logger.WarnContext(b.ctx, "failed to map select field",
|
||||
"field", field.Name, "error", err)
|
||||
continue
|
||||
}
|
||||
sb.SelectMore(colExpr)
|
||||
selectedFields[field.Name] = true
|
||||
}
|
||||
|
||||
sb.From(selectFromCTE)
|
||||
|
||||
// Add order by support using ColumnExpressionFor
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
colExpr, err := b.stmtBuilder.fm.ColumnExpressionFor(b.ctx, &orderBy.Key.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.OrderBy(fmt.Sprintf("%s %s", colExpr, orderBy.Direction.StringValue()))
|
||||
orderApplied = true
|
||||
}
|
||||
|
||||
if !orderApplied {
|
||||
sb.OrderBy("timestamp DESC")
|
||||
}
|
||||
|
||||
if b.operator.Limit > 0 {
|
||||
sb.Limit(b.operator.Limit)
|
||||
} else {
|
||||
sb.Limit(100)
|
||||
}
|
||||
|
||||
if b.operator.Offset > 0 {
|
||||
sb.Offset(b.operator.Offset)
|
||||
}
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return &qbtypes.Statement{
|
||||
Query: sql,
|
||||
Args: args,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) getKeySelectors() []*telemetrytypes.FieldKeySelector {
|
||||
var keySelectors []*telemetrytypes.FieldKeySelector
|
||||
|
||||
for _, agg := range b.operator.Aggregations {
|
||||
selectors := querybuilder.QueryStringToKeysSelectors(agg.Expression)
|
||||
keySelectors = append(keySelectors, selectors...)
|
||||
}
|
||||
|
||||
if b.operator.Filter != nil && b.operator.Filter.Expression != "" {
|
||||
selectors := querybuilder.QueryStringToKeysSelectors(b.operator.Filter.Expression)
|
||||
keySelectors = append(keySelectors, selectors...)
|
||||
}
|
||||
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
selectors := querybuilder.QueryStringToKeysSelectors(gb.TelemetryFieldKey.Name)
|
||||
keySelectors = append(keySelectors, selectors...)
|
||||
}
|
||||
|
||||
for _, order := range b.operator.Order {
|
||||
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||
Name: order.Key.Name,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
FieldContext: order.Key.FieldContext,
|
||||
FieldDataType: order.Key.FieldDataType,
|
||||
})
|
||||
}
|
||||
|
||||
for i := range keySelectors {
|
||||
keySelectors[i].Signal = telemetrytypes.SignalTraces
|
||||
}
|
||||
|
||||
return keySelectors
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildTimeSeriesQuery(selectFromCTE string) (*qbtypes.Statement, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select(fmt.Sprintf(
|
||||
"toStartOfInterval(timestamp, INTERVAL %d SECOND) AS ts",
|
||||
int64(b.operator.StepInterval.Seconds()),
|
||||
))
|
||||
|
||||
keySelectors := b.getKeySelectors()
|
||||
keys, _, err := b.stmtBuilder.metadataStore.GetKeysMulti(b.ctx, keySelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
b.ctx,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to map group by field '%s': %v",
|
||||
gb.TelemetryFieldKey.Name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
}
|
||||
|
||||
var allAggChArgs []any
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
b.ctx,
|
||||
agg.Expression,
|
||||
uint64(b.operator.StepInterval.Seconds()),
|
||||
keys,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to rewrite aggregation expression '%s': %v",
|
||||
agg.Expression,
|
||||
err,
|
||||
)
|
||||
}
|
||||
allAggChArgs = append(allAggChArgs, chArgs...)
|
||||
|
||||
alias := fmt.Sprintf("__result_%d", i)
|
||||
|
||||
sb.SelectMore(fmt.Sprintf("%s AS %s", rewritten, alias))
|
||||
}
|
||||
|
||||
sb.From(selectFromCTE)
|
||||
|
||||
sb.GroupBy("ts")
|
||||
if len(b.operator.GroupBy) > 0 {
|
||||
groupByKeys := make([]string, len(b.operator.GroupBy))
|
||||
for i, gb := range b.operator.GroupBy {
|
||||
groupByKeys[i] = fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)
|
||||
}
|
||||
sb.GroupBy(groupByKeys...)
|
||||
}
|
||||
|
||||
// Add order by support
|
||||
for _, orderBy := range b.operator.Order {
|
||||
idx, ok := b.aggOrderBy(orderBy)
|
||||
if ok {
|
||||
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
|
||||
} else {
|
||||
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||
}
|
||||
}
|
||||
sb.OrderBy("ts desc")
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
// Add HAVING clause if specified
|
||||
if err := b.addHavingClause(sb); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
return &qbtypes.Statement{
|
||||
Query: sql,
|
||||
Args: args,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildTraceSummaryCTE(selectFromCTE string) error {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select(
|
||||
"trace_id",
|
||||
"count() AS total_span_count",
|
||||
"any(timestamp) AS first_timestamp",
|
||||
)
|
||||
|
||||
sb.From("all_spans")
|
||||
sb.Where(fmt.Sprintf("trace_id GLOBAL IN (SELECT DISTINCT trace_id FROM %s)", selectFromCTE))
|
||||
sb.GroupBy("trace_id")
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
b.addCTE("trace_summary", sql, args, []string{"all_spans", selectFromCTE})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildTraceQuery(selectFromCTE string) (*qbtypes.Statement, error) {
|
||||
err := b.buildTraceSummaryCTE(selectFromCTE)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
keySelectors := b.getKeySelectors()
|
||||
keys, _, err := b.stmtBuilder.metadataStore.GetKeysMulti(b.ctx, keySelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
b.ctx,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to map group by field '%s': %v",
|
||||
gb.TelemetryFieldKey.Name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
}
|
||||
|
||||
rateInterval := (b.end - b.start) / querybuilder.NsToSeconds
|
||||
|
||||
var allAggChArgs []any
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
b.ctx,
|
||||
agg.Expression,
|
||||
rateInterval,
|
||||
keys,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to rewrite aggregation expression '%s': %v",
|
||||
agg.Expression,
|
||||
err,
|
||||
)
|
||||
}
|
||||
allAggChArgs = append(allAggChArgs, chArgs...)
|
||||
|
||||
alias := fmt.Sprintf("__result_%d", i)
|
||||
|
||||
sb.SelectMore(fmt.Sprintf("%s AS %s", rewritten, alias))
|
||||
}
|
||||
|
||||
sb.Select(
|
||||
"any(root.timestamp) as timestamp",
|
||||
"any(root.`service.name`) as `service.name`",
|
||||
"any(root.name) as `name`",
|
||||
"summary.total_span_count as span_count", // Updated column name
|
||||
"any(root.duration_nano) as `duration_nano`",
|
||||
"root.trace_id as `trace_id`",
|
||||
)
|
||||
|
||||
sb.From("all_spans as root")
|
||||
sb.JoinWithOption(
|
||||
sqlbuilder.InnerJoin,
|
||||
"trace_summary as summary",
|
||||
"root.trace_id = summary.trace_id",
|
||||
)
|
||||
sb.Where("root.parent_span_id = ''")
|
||||
|
||||
sb.GroupBy("root.trace_id", "summary.total_span_count")
|
||||
if len(b.operator.GroupBy) > 0 {
|
||||
groupByKeys := make([]string, len(b.operator.GroupBy))
|
||||
for i, gb := range b.operator.GroupBy {
|
||||
groupByKeys[i] = fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)
|
||||
}
|
||||
sb.GroupBy(groupByKeys...)
|
||||
}
|
||||
|
||||
if err := b.addHavingClause(sb); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
orderApplied := false
|
||||
for _, orderBy := range b.operator.Order {
|
||||
switch orderBy.Key.Name {
|
||||
case qbtypes.OrderByTraceDuration.StringValue():
|
||||
sb.OrderBy(fmt.Sprintf("`duration_nano` %s", orderBy.Direction.StringValue()))
|
||||
orderApplied = true
|
||||
case qbtypes.OrderBySpanCount.StringValue():
|
||||
sb.OrderBy(fmt.Sprintf("span_count %s", orderBy.Direction.StringValue()))
|
||||
orderApplied = true
|
||||
case "timestamp":
|
||||
sb.OrderBy(fmt.Sprintf("timestamp %s", orderBy.Direction.StringValue()))
|
||||
orderApplied = true
|
||||
default:
|
||||
aggIndex := -1
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
if orderBy.Key.Name == agg.Alias || orderBy.Key.Name == fmt.Sprintf("__result_%d", i) {
|
||||
aggIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if aggIndex >= 0 {
|
||||
alias := fmt.Sprintf("__result_%d", aggIndex)
|
||||
if b.operator.Aggregations[aggIndex].Alias != "" {
|
||||
alias = b.operator.Aggregations[aggIndex].Alias
|
||||
}
|
||||
sb.OrderBy(fmt.Sprintf("%s %s", alias, orderBy.Direction.StringValue()))
|
||||
orderApplied = true
|
||||
} else {
|
||||
b.stmtBuilder.logger.WarnContext(b.ctx,
|
||||
"ignoring order by field that's not available in trace context",
|
||||
"field", orderBy.Key.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !orderApplied {
|
||||
sb.OrderBy("`duration_nano` DESC")
|
||||
}
|
||||
|
||||
if b.operator.Limit > 0 {
|
||||
sb.Limit(b.operator.Limit)
|
||||
}
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
return &qbtypes.Statement{
|
||||
Query: sql,
|
||||
Args: args,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) buildScalarQuery(selectFromCTE string) (*qbtypes.Statement, error) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
keySelectors := b.getKeySelectors()
|
||||
keys, _, err := b.stmtBuilder.metadataStore.GetKeysMulti(b.ctx, keySelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range b.operator.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(
|
||||
b.ctx,
|
||||
&gb.TelemetryFieldKey,
|
||||
b.stmtBuilder.fm,
|
||||
b.stmtBuilder.cb,
|
||||
keys,
|
||||
telemetrytypes.FieldDataTypeString,
|
||||
"",
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to map group by field '%s': %v",
|
||||
gb.TelemetryFieldKey.Name,
|
||||
err,
|
||||
)
|
||||
}
|
||||
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.Name)
|
||||
allGroupByArgs = append(allGroupByArgs, args...)
|
||||
sb.SelectMore(colExpr)
|
||||
}
|
||||
|
||||
var allAggChArgs []any
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
rewritten, chArgs, err := b.stmtBuilder.aggExprRewriter.Rewrite(
|
||||
b.ctx,
|
||||
agg.Expression,
|
||||
uint64((b.end-b.start)/querybuilder.NsToSeconds),
|
||||
keys,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to rewrite aggregation expression '%s': %v",
|
||||
agg.Expression,
|
||||
err,
|
||||
)
|
||||
}
|
||||
allAggChArgs = append(allAggChArgs, chArgs...)
|
||||
|
||||
alias := fmt.Sprintf("__result_%d", i)
|
||||
|
||||
sb.SelectMore(fmt.Sprintf("%s AS %s", rewritten, alias))
|
||||
}
|
||||
|
||||
sb.From(selectFromCTE)
|
||||
|
||||
if len(b.operator.GroupBy) > 0 {
|
||||
groupByKeys := make([]string, len(b.operator.GroupBy))
|
||||
for i, gb := range b.operator.GroupBy {
|
||||
groupByKeys[i] = fmt.Sprintf("`%s`", gb.TelemetryFieldKey.Name)
|
||||
}
|
||||
sb.GroupBy(groupByKeys...)
|
||||
}
|
||||
|
||||
// Add order by support
|
||||
for _, orderBy := range b.operator.Order {
|
||||
idx, ok := b.aggOrderBy(orderBy)
|
||||
if ok {
|
||||
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
|
||||
} else {
|
||||
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||
}
|
||||
}
|
||||
|
||||
// Add default ordering if no orderBy specified
|
||||
if len(b.operator.Order) == 0 {
|
||||
sb.OrderBy("__result_0 DESC")
|
||||
}
|
||||
|
||||
// Note: Do not apply limit in SQL for scalar queries - it should be applied post-processing
|
||||
// to limit series count, not data points. The current SQL LIMIT would limit the number
|
||||
// of data points returned, but we want to limit the number of series instead.
|
||||
|
||||
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||
|
||||
// Add HAVING clause if specified
|
||||
if err := b.addHavingClause(sb); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sql, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||
return &qbtypes.Statement{
|
||||
Query: sql,
|
||||
Args: args,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) addHavingClause(sb *sqlbuilder.SelectBuilder) error {
|
||||
if b.operator.Having != nil && b.operator.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForTraces(b.operator.Having.Expression, b.operator.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) addCTE(name, sql string, args []any, dependsOn []string) {
|
||||
b.ctes = append(b.ctes, cteNode{
|
||||
name: name,
|
||||
sql: sql,
|
||||
args: args,
|
||||
dependsOn: dependsOn,
|
||||
})
|
||||
b.cteNameToIndex[name] = len(b.ctes) - 1
|
||||
}
|
||||
|
||||
func (b *traceOperatorCTEBuilder) aggOrderBy(k qbtypes.OrderBy) (int, bool) {
|
||||
for i, agg := range b.operator.Aggregations {
|
||||
if k.Key.Name == agg.Alias ||
|
||||
k.Key.Name == agg.Expression ||
|
||||
k.Key.Name == fmt.Sprintf("__result_%d", i) {
|
||||
return i, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
551
pkg/telemetrytraces/trace_operator_cte_builder_test.go
Normal file
551
pkg/telemetrytraces/trace_operator_cte_builder_test.go
Normal file
@@ -0,0 +1,551 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
operator qbtypes.QueryBuilderTraceOperator
|
||||
compositeQuery *qbtypes.CompositeQuery
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "simple direct descendant operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A => B",
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{
|
||||
Name: "service.name",
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
FieldContext: telemetrytypes.FieldContextSpan,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id, resources_string['service.name'] AS `service.name` FROM A_DIR_DESC_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "indirect descendant operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A -> B",
|
||||
Limit: 5,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'gateway'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'database'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_INDIR_DESC_B AS (WITH RECURSIVE up AS (SELECT d.trace_id, d.span_id, d.parent_span_id, 0 AS depth FROM B AS d UNION ALL SELECT p.trace_id, p.span_id, p.parent_span_id, up.depth + 1 FROM all_spans AS p JOIN up ON p.trace_id = up.trace_id AND p.span_id = up.parent_span_id WHERE up.depth < 100) SELECT DISTINCT a.* FROM A AS a GLOBAL INNER JOIN (SELECT DISTINCT trace_id, span_id FROM up WHERE depth > 0 ) AS ancestors ON ancestors.trace_id = a.trace_id AND ancestors.span_id = a.span_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id FROM A_INDIR_DESC_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "gateway", "%service.name%", "%service.name\":\"gateway%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "database", "%service.name%", "%service.name\":\"database%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 5},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "AND operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A && B",
|
||||
Limit: 15,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_AND_B AS (SELECT l.* FROM A AS l INNER JOIN B AS r ON l.trace_id = r.trace_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id FROM A_AND_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 15},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "OR operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A || B",
|
||||
Limit: 20,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_OR_B AS (SELECT * FROM A UNION DISTINCT SELECT * FROM B) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id FROM A_OR_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 20},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "NOT operator",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A NOT B",
|
||||
Limit: 10,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_not_B AS (SELECT l.* FROM A AS l WHERE l.trace_id GLOBAL NOT IN (SELECT DISTINCT trace_id FROM B)) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id FROM A_not_B ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "time series query with aggregations",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A => B",
|
||||
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, count() AS __result_0 FROM A_DIR_DESC_B GROUP BY ts, `service.name` ORDER BY ts desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), true},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "scalar query with aggregation and group by",
|
||||
requestType: qbtypes.RequestTypeScalar,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A && B",
|
||||
Aggregations: []qbtypes.TraceAggregation{
|
||||
{
|
||||
Expression: "avg(duration_nano)",
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
Order: []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "__result_0",
|
||||
},
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
},
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "response_status_code < 400",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND toFloat64(response_status_code) < ?), A_AND_B AS (SELECT l.* FROM A AS l INNER JOIN B AS r ON l.trace_id = r.trace_id) SELECT toString(multiIf(mapContains(resources_string, 'service.name') = ?, resources_string['service.name'], NULL)) AS `service.name`, avg(multiIf(duration_nano <> ?, duration_nano, NULL)) AS __result_0 FROM A_AND_B GROUP BY `service.name` ORDER BY __result_0 desc SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), float64(400), true, 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "complex nested expression",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "(A => B) && (C => D)",
|
||||
Limit: 5,
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'backend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "C",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'auth'",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "D",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'database'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH toDateTime64(1747947419000000000, 9) AS t_from, toDateTime64(1747983448000000000, 9) AS t_to, 1747945619 AS bucket_from, 1747983448 AS bucket_to, all_spans AS (SELECT *, resource_string_service$$name AS `service.name` FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ?), __resource_filter_A AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), A AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_A) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_B AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), B AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_B) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), A_DIR_DESC_B AS (SELECT p.* FROM A AS p INNER JOIN B AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id), __resource_filter_C AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), C AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_C) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), __resource_filter_D AS (SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), D AS (SELECT * FROM signoz_traces.distributed_signoz_index_v3 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter_D) AND timestamp >= ? AND timestamp < ? AND ts_bucket_start >= ? AND ts_bucket_start <= ? AND true), C_DIR_DESC_D AS (SELECT p.* FROM C AS p INNER JOIN D AS c ON p.trace_id = c.trace_id AND p.span_id = c.parent_span_id), A_DIR_DESC_B_AND_C_DIR_DESC_D AS (SELECT l.* FROM A_DIR_DESC_B AS l INNER JOIN C_DIR_DESC_D AS r ON l.trace_id = r.trace_id) SELECT timestamp, trace_id, span_id, name, duration_nano, parent_span_id FROM A_DIR_DESC_B_AND_C_DIR_DESC_D ORDER BY timestamp DESC LIMIT ? SETTINGS distributed_product_mode='allow', max_memory_usage=10000000000",
|
||||
Args: []any{"1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "frontend", "%service.name%", "%service.name\":\"frontend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "backend", "%service.name%", "%service.name\":\"backend%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "auth", "%service.name%", "%service.name\":\"auth%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), "database", "%service.name%", "%service.name\":\"database%", uint64(1747945619), uint64(1747983448), "1747947419000000000", "1747983448000000000", uint64(1747945619), uint64(1747983448), 5},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
|
||||
statementBuilder := NewTraceOperatorStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
traceStmtBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
// Parse the operator expression
|
||||
err := c.operator.ParseExpression()
|
||||
require.NoError(t, err)
|
||||
|
||||
q, err := statementBuilder.Build(
|
||||
context.Background(),
|
||||
1747947419000,
|
||||
1747983448000,
|
||||
c.requestType,
|
||||
c.operator,
|
||||
c.compositeQuery,
|
||||
)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
operator qbtypes.QueryBuilderTraceOperator
|
||||
compositeQuery *qbtypes.CompositeQuery
|
||||
expectedErr string
|
||||
}{
|
||||
{
|
||||
name: "missing referenced query",
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A => B",
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
// Missing query B
|
||||
},
|
||||
},
|
||||
expectedErr: "referenced query 'B' not found",
|
||||
},
|
||||
{
|
||||
name: "nil composite query",
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A => B",
|
||||
},
|
||||
compositeQuery: nil,
|
||||
expectedErr: "compositeQuery cannot be nil",
|
||||
},
|
||||
{
|
||||
name: "unsupported operator",
|
||||
operator: qbtypes.QueryBuilderTraceOperator{
|
||||
Expression: "A XOR B", // Assuming XOR is not supported
|
||||
},
|
||||
compositeQuery: &qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedErr: "invalid query reference 'A XOR B'",
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
|
||||
statementBuilder := NewTraceOperatorStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
traceStmtBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
// Parse the operator expression
|
||||
err := c.operator.ParseExpression()
|
||||
if err == nil { // Only proceed if parsing succeeded
|
||||
_, err = statementBuilder.Build(
|
||||
context.Background(),
|
||||
1747947419000,
|
||||
1747983448000,
|
||||
qbtypes.RequestTypeRaw,
|
||||
c.operator,
|
||||
c.compositeQuery,
|
||||
)
|
||||
}
|
||||
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr)
|
||||
})
|
||||
}
|
||||
}
|
||||
96
pkg/telemetrytraces/trace_operator_statement_builder.go
Normal file
96
pkg/telemetrytraces/trace_operator_statement_builder.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package telemetrytraces
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"log/slog"
|
||||
)
|
||||
|
||||
type traceOperatorStatementBuilder struct {
|
||||
logger *slog.Logger
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
fm qbtypes.FieldMapper
|
||||
cb qbtypes.ConditionBuilder
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
aggExprRewriter qbtypes.AggExprRewriter
|
||||
}
|
||||
|
||||
var _ qbtypes.TraceOperatorStatementBuilder = (*traceOperatorStatementBuilder)(nil)
|
||||
|
||||
func NewTraceOperatorStatementBuilder(
|
||||
settings factory.ProviderSettings,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
) *traceOperatorStatementBuilder {
|
||||
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
|
||||
return &traceOperatorStatementBuilder{
|
||||
logger: tracesSettings.Logger(),
|
||||
metadataStore: metadataStore,
|
||||
fm: fieldMapper,
|
||||
cb: conditionBuilder,
|
||||
traceStmtBuilder: traceStmtBuilder,
|
||||
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||
aggExprRewriter: aggExprRewriter,
|
||||
}
|
||||
}
|
||||
|
||||
// Build builds a SQL query based on the given parameters.
|
||||
func (b *traceOperatorStatementBuilder) Build(
|
||||
ctx context.Context,
|
||||
start uint64,
|
||||
end uint64,
|
||||
requestType qbtypes.RequestType,
|
||||
query qbtypes.QueryBuilderTraceOperator,
|
||||
compositeQuery *qbtypes.CompositeQuery,
|
||||
) (*qbtypes.Statement, error) {
|
||||
|
||||
start = querybuilder.ToNanoSecs(start)
|
||||
end = querybuilder.ToNanoSecs(end)
|
||||
|
||||
// Parse the expression if not already parsed
|
||||
if query.ParsedExpression == nil {
|
||||
if err := query.ParseExpression(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Validate compositeQuery parameter
|
||||
if compositeQuery == nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "compositeQuery cannot be nil")
|
||||
}
|
||||
|
||||
b.logger.DebugContext(ctx, "Building trace operator query",
|
||||
"expression", query.Expression,
|
||||
"requestType", requestType)
|
||||
|
||||
// Build the CTE-based query
|
||||
builder := &traceOperatorCTEBuilder{
|
||||
ctx: ctx,
|
||||
start: start,
|
||||
end: end,
|
||||
operator: &query,
|
||||
stmtBuilder: b,
|
||||
queries: make(map[string]*qbtypes.QueryBuilderQuery[qbtypes.TraceAggregation]),
|
||||
ctes: []cteNode{}, // Use slice to maintain order
|
||||
cteNameToIndex: make(map[string]int),
|
||||
queryToCTEName: make(map[string]string),
|
||||
compositeQuery: compositeQuery, // Now passed as explicit parameter
|
||||
}
|
||||
|
||||
// Collect all referenced queries
|
||||
if err := builder.collectQueries(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build the query
|
||||
return builder.build(requestType)
|
||||
}
|
||||
@@ -52,3 +52,8 @@ type StatementBuilder[T any] interface {
|
||||
// Build builds the query.
|
||||
Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderQuery[T], variables map[string]VariableItem) (*Statement, error)
|
||||
}
|
||||
|
||||
type TraceOperatorStatementBuilder interface {
|
||||
// Build builds the trace operator query.
|
||||
Build(ctx context.Context, start, end uint64, requestType RequestType, query QueryBuilderTraceOperator, compositeQuery *CompositeQuery) (*Statement, error)
|
||||
}
|
||||
|
||||
@@ -88,8 +88,8 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeTraceOperator:
|
||||
var spec QueryBuilderTraceOperator
|
||||
if err := json.Unmarshal(shadow.Spec, &spec); err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "invalid trace operator spec")
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "trace operator spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid trace operator spec: %v", err)
|
||||
}
|
||||
q.Spec = spec
|
||||
|
||||
@@ -113,7 +113,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
"unknown query type %q",
|
||||
shadow.Type,
|
||||
).WithAdditional(
|
||||
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, promql, clickhouse_sql",
|
||||
"Valid query types are: builder_query, builder_sub_query, builder_formula, builder_join, builder_trace_operator, promql, clickhouse_sql",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -132,7 +132,7 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
||||
"filter": {
|
||||
"expression": "trace_duration > 200ms AND span_count >= 5"
|
||||
},
|
||||
"orderBy": [{
|
||||
"order": [{
|
||||
"key": {
|
||||
"name": "trace_duration"
|
||||
},
|
||||
@@ -231,7 +231,7 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
||||
"name": "complex_trace_analysis",
|
||||
"expression": "A => (B && NOT C)",
|
||||
"filter": { "expression": "trace_duration BETWEEN 100ms AND 5s AND span_count IN (5, 10, 15)" },
|
||||
"orderBy": [{
|
||||
"order": [{
|
||||
"key": { "name": "span_count" },
|
||||
"direction": "asc"
|
||||
}],
|
||||
@@ -1029,15 +1029,17 @@ func TestQueryRangeRequest_UnmarshalJSON(t *testing.T) {
|
||||
|
||||
func TestParseTraceExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expectError bool
|
||||
checkResult func(t *testing.T, result *TraceOperand)
|
||||
name string
|
||||
expression string
|
||||
expectError bool
|
||||
expectedOpCount int
|
||||
checkResult func(t *testing.T, result *TraceOperand)
|
||||
}{
|
||||
{
|
||||
name: "simple query reference",
|
||||
expression: "A",
|
||||
expectError: false,
|
||||
name: "simple query reference",
|
||||
expression: "A",
|
||||
expectError: false,
|
||||
expectedOpCount: 0,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.QueryRef)
|
||||
assert.Equal(t, "A", result.QueryRef.Name)
|
||||
@@ -1045,9 +1047,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "simple implication",
|
||||
expression: "A => B",
|
||||
expectError: false,
|
||||
name: "simple implication",
|
||||
expression: "A => B",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||
@@ -1058,9 +1061,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "and operation",
|
||||
expression: "A && B",
|
||||
expectError: false,
|
||||
name: "and operation",
|
||||
expression: "A && B",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorAnd, *result.Operator)
|
||||
@@ -1069,9 +1073,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "or operation",
|
||||
expression: "A || B",
|
||||
expectError: false,
|
||||
name: "or operation",
|
||||
expression: "A || B",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorOr, *result.Operator)
|
||||
@@ -1080,9 +1085,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "unary NOT operation",
|
||||
expression: "NOT A",
|
||||
expectError: false,
|
||||
name: "unary NOT operation",
|
||||
expression: "NOT A",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorNot, *result.Operator)
|
||||
@@ -1092,9 +1098,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "binary NOT operation",
|
||||
expression: "A NOT B",
|
||||
expectError: false,
|
||||
name: "binary NOT operation",
|
||||
expression: "A NOT B",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorExclude, *result.Operator)
|
||||
@@ -1105,9 +1112,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "complex expression with precedence",
|
||||
expression: "A => B && C || D",
|
||||
expectError: false,
|
||||
name: "complex expression with precedence",
|
||||
expression: "A => B && C || D",
|
||||
expectError: false,
|
||||
expectedOpCount: 3, // Three operators: =>, &&, ||
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
// Should parse as: A => (B && (C || D)) due to precedence: NOT > || > && > =>
|
||||
// The parsing finds operators from lowest precedence first
|
||||
@@ -1121,9 +1129,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "simple parentheses",
|
||||
expression: "(A)",
|
||||
expectError: false,
|
||||
name: "simple parentheses",
|
||||
expression: "(A)",
|
||||
expectError: false,
|
||||
expectedOpCount: 0,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.QueryRef)
|
||||
assert.Equal(t, "A", result.QueryRef.Name)
|
||||
@@ -1131,9 +1140,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "parentheses expression",
|
||||
expression: "A => (B || C)",
|
||||
expectError: false,
|
||||
name: "parentheses expression",
|
||||
expression: "A => (B || C)",
|
||||
expectError: false,
|
||||
expectedOpCount: 2, // Two operators: =>, ||
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||
@@ -1147,9 +1157,10 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested NOT with parentheses",
|
||||
expression: "NOT (A && B)",
|
||||
expectError: false,
|
||||
name: "nested NOT with parentheses",
|
||||
expression: "NOT (A && B)",
|
||||
expectError: false,
|
||||
expectedOpCount: 2, // Two operators: NOT, &&
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorNot, *result.Operator)
|
||||
@@ -1160,6 +1171,13 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
assert.Equal(t, TraceOperatorAnd, *result.Left.Operator)
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "complex expression exceeding operator limit",
|
||||
expression: "A => B => C => D => E => F => G => H => I => J => K => L",
|
||||
expectError: false, // parseTraceExpression doesn't validate count, ParseExpression does
|
||||
expectedOpCount: 11, // 11 => operators
|
||||
checkResult: nil,
|
||||
},
|
||||
{
|
||||
name: "invalid query reference with numbers",
|
||||
expression: "123",
|
||||
@@ -1175,11 +1193,11 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
expression: "",
|
||||
expectError: true,
|
||||
},
|
||||
|
||||
{
|
||||
name: "expression with extra whitespace",
|
||||
expression: " A => B ",
|
||||
expectError: false,
|
||||
name: "expression with extra whitespace",
|
||||
expression: " A => B ",
|
||||
expectError: false,
|
||||
expectedOpCount: 1,
|
||||
checkResult: func(t *testing.T, result *TraceOperand) {
|
||||
assert.NotNil(t, result.Operator)
|
||||
assert.Equal(t, TraceOperatorDirectDescendant, *result.Operator)
|
||||
@@ -1191,7 +1209,7 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := parseTraceExpression(tt.expression)
|
||||
result, opCount, err := parseTraceExpression(tt.expression)
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
@@ -1201,6 +1219,8 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result)
|
||||
assert.Equal(t, tt.expectedOpCount, opCount, "operator count mismatch")
|
||||
|
||||
if tt.checkResult != nil {
|
||||
tt.checkResult(t, result)
|
||||
}
|
||||
@@ -1208,6 +1228,63 @@ func TestParseTraceExpression(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryBuilderTraceOperator_ParseExpression_OperatorLimit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expectError bool
|
||||
errorContains string
|
||||
}{
|
||||
{
|
||||
name: "within operator limit",
|
||||
expression: "A => B => C",
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "exceeding operator limit",
|
||||
expression: "A => B => C => D => E => F => G => H => I => J => K => L",
|
||||
expectError: true,
|
||||
errorContains: "expression contains 11 operators, which exceeds the maximum allowed 10 operators",
|
||||
},
|
||||
{
|
||||
name: "exactly at limit",
|
||||
expression: "A => B => C => D => E => F => G => H => I => J => K",
|
||||
expectError: false, // 10 operators, exactly at limit
|
||||
},
|
||||
{
|
||||
name: "complex expression at limit",
|
||||
expression: "(A && B) => (C || D) => (E && F) => (G || H) => (I && J) => K",
|
||||
expectError: false, // 10 operators: 3 &&, 2 ||, 5 => = 10 total
|
||||
},
|
||||
{
|
||||
name: "complex expression exceeding limit",
|
||||
expression: "(A && B) => (C || D) => (E && F) => (G || H) => (I && J) => (K || L)",
|
||||
expectError: true,
|
||||
errorContains: "expression contains 11 operators, which exceeds the maximum allowed 10 operators",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
op := &QueryBuilderTraceOperator{
|
||||
Expression: tt.expression,
|
||||
}
|
||||
|
||||
err := op.ParseExpression()
|
||||
|
||||
if tt.expectError {
|
||||
assert.Error(t, err)
|
||||
if tt.errorContains != "" {
|
||||
assert.Contains(t, err.Error(), tt.errorContains)
|
||||
}
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, op.ParsedExpression)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryBuilderTraceOperator_ValidateTraceOperator(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user